From 3a2ed6300d2d31972a5ac633f4e1e9561e163e29 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 24 Oct 2021 15:37:43 -0700 Subject: [PATCH 0001/1103] Run tests on 3.10 during publish, refs #1482 --- .github/workflows/publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 54e582f0..17c6ae9b 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.6, 3.7, 3.8, 3.9] + python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} From 2c31d1cd9cd3b63458ccbe391866499fa3f44978 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 24 Oct 2021 16:24:41 -0700 Subject: [PATCH 0002/1103] Upgrade Docker base to Debian buster, refs #1497 --- Dockerfile | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/Dockerfile b/Dockerfile index 7c56cf56..42f5529b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,18 +1,11 @@ -FROM python:3.9.2-slim-buster as build +FROM python:3.9.7-slim-bullseye as build # Version of Datasette to install, e.g. 0.55 # docker build . -t datasette --build-arg VERSION=0.55 ARG VERSION -# software-properties-common provides add-apt-repository -# which we need in order to install a more recent release -# of libsqlite3-mod-spatialite from the sid distribution RUN apt-get update && \ - apt-get -y --no-install-recommends install software-properties-common && \ - add-apt-repository "deb http://httpredir.debian.org/debian sid main" && \ - apt-get update && \ - apt-get -t sid install -y --no-install-recommends libsqlite3-mod-spatialite && \ - apt-get remove -y software-properties-common && \ + apt-get install -y --no-install-recommends libsqlite3-mod-spatialite && \ apt clean && \ rm -rf /var/lib/apt && \ rm -rf /var/lib/dpkg/info/* From c92ab51b3ce0c2df002c0c2f10549a43910dd4be Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 12 Nov 2021 06:18:31 -0800 Subject: [PATCH 0003/1103] Logo at top of README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ee9d9a5a..ce15ccf4 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Datasette +Datasette [![PyPI](https://img.shields.io/pypi/v/datasette.svg)](https://pypi.org/project/datasette/) [![Changelog](https://img.shields.io/github/v/release/simonw/datasette?label=changelog)](https://docs.datasette.io/en/stable/changelog.html) From c306b696de0a582e322f9eb7cb4125c83301e3a9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Nov 2021 20:44:54 -0800 Subject: [PATCH 0004/1103] Correct facet links for columns with a leading underscore, closes #1506 --- datasette/facets.py | 9 ++- tests/fixtures.py | 10 +-- tests/test_api.py | 24 +++--- tests/test_csv.py | 2 +- tests/test_facets.py | 133 ++++++++++++++++++++++++++++++- tests/test_html.py | 32 ++++---- tests/test_internals_database.py | 4 +- tests/test_plugins.py | 8 +- 8 files changed, 179 insertions(+), 43 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index f74e2d01..94a1d83d 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -237,14 +237,17 @@ class ColumnFacet(Facet): else: expanded = {} for row in facet_rows: - selected = (column, str(row["value"])) in qs_pairs + column_qs = column + if column.startswith("_"): + column_qs = "{}__exact".format(column) + selected = (column_qs, str(row["value"])) in qs_pairs if selected: toggle_path = path_with_removed_args( - self.request, {column: str(row["value"])} + self.request, {column_qs: str(row["value"])} ) else: toggle_path = path_with_added_args( - self.request, {column: row["value"]} + self.request, {column_qs: row["value"]} ) facet_results_values.append( { diff --git a/tests/fixtures.py b/tests/fixtures.py index dc22c609..1a879126 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -355,12 +355,12 @@ METADATA = { "neighborhood_search": { "sql": textwrap.dedent( """ - select neighborhood, facet_cities.name, state + select _neighborhood, facet_cities.name, state from facetable join facet_cities on facetable.city_id = facet_cities.id - where neighborhood like '%' || :text || '%' - order by neighborhood; + where _neighborhood like '%' || :text || '%' + order by _neighborhood; """ ), "title": "Search neighborhoods", @@ -559,14 +559,14 @@ CREATE TABLE facetable ( on_earth integer, state text, city_id integer, - neighborhood text, + _neighborhood text, tags text, complex_array text, distinct_some_null, FOREIGN KEY ("city_id") REFERENCES [facet_cities](id) ); INSERT INTO facetable - (created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array, distinct_some_null) + (created, planet_int, on_earth, state, city_id, _neighborhood, tags, complex_array, distinct_some_null) VALUES ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]', 'one'), ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]', 'two'), diff --git a/tests/test_api.py b/tests/test_api.py index 311ae464..43b52175 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -213,7 +213,7 @@ def test_database_page(app_client): "on_earth", "state", "city_id", - "neighborhood", + "_neighborhood", "tags", "complex_array", "distinct_some_null", @@ -1241,7 +1241,9 @@ def test_table_filter_json_arraynotcontains(app_client): def test_table_filter_extra_where(app_client): - response = app_client.get("/fixtures/facetable.json?_where=neighborhood='Dogpatch'") + response = app_client.get( + "/fixtures/facetable.json?_where=_neighborhood='Dogpatch'" + ) assert [ [ 2, @@ -1259,14 +1261,16 @@ def test_table_filter_extra_where(app_client): def test_table_filter_extra_where_invalid(app_client): - response = app_client.get("/fixtures/facetable.json?_where=neighborhood=Dogpatch'") + response = app_client.get("/fixtures/facetable.json?_where=_neighborhood=Dogpatch'") assert 400 == response.status assert "Invalid SQL" == response.json["title"] def test_table_filter_extra_where_disabled_if_no_sql_allowed(): with make_app_client(metadata={"allow_sql": {}}) as client: - response = client.get("/fixtures/facetable.json?_where=neighborhood='Dogpatch'") + response = client.get( + "/fixtures/facetable.json?_where=_neighborhood='Dogpatch'" + ) assert 403 == response.status assert "_where= is not allowed" == response.json["error"] @@ -1696,7 +1700,7 @@ def test_suggested_facets(app_client): {"name": "on_earth", "querystring": "_facet=on_earth"}, {"name": "state", "querystring": "_facet=state"}, {"name": "city_id", "querystring": "_facet=city_id"}, - {"name": "neighborhood", "querystring": "_facet=neighborhood"}, + {"name": "_neighborhood", "querystring": "_facet=_neighborhood"}, {"name": "tags", "querystring": "_facet=tags"}, {"name": "complex_array", "querystring": "_facet=complex_array"}, {"name": "created", "querystring": "_facet_date=created"}, @@ -1752,7 +1756,7 @@ def test_nocount_nofacet_if_shape_is_object(app_client_with_trace): def test_expand_labels(app_client): response = app_client.get( "/fixtures/facetable.json?_shape=object&_labels=1&_size=2" - "&neighborhood__contains=c" + "&_neighborhood__contains=c" ) assert { "2": { @@ -1762,7 +1766,7 @@ def test_expand_labels(app_client): "on_earth": 1, "state": "CA", "city_id": {"value": 1, "label": "San Francisco"}, - "neighborhood": "Dogpatch", + "_neighborhood": "Dogpatch", "tags": '["tag1", "tag3"]', "complex_array": "[]", "distinct_some_null": "two", @@ -1774,7 +1778,7 @@ def test_expand_labels(app_client): "on_earth": 1, "state": "MI", "city_id": {"value": 3, "label": "Detroit"}, - "neighborhood": "Corktown", + "_neighborhood": "Corktown", "tags": "[]", "complex_array": "[]", "distinct_some_null": None, @@ -2125,7 +2129,7 @@ def test_http_options_request(app_client): "on_earth", "state", "city_id", - "neighborhood", + "_neighborhood", "tags", "complex_array", "distinct_some_null", @@ -2152,7 +2156,7 @@ def test_http_options_request(app_client): "planet_int", "on_earth", "city_id", - "neighborhood", + "_neighborhood", "tags", "complex_array", "distinct_some_null", diff --git a/tests/test_csv.py b/tests/test_csv.py index 5e9406e7..5902e9db 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -24,7 +24,7 @@ world ) EXPECTED_TABLE_WITH_LABELS_CSV = """ -pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,complex_array,distinct_some_null +pk,created,planet_int,on_earth,state,city_id,city_id_label,_neighborhood,tags,complex_array,distinct_some_null 1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]","[{""foo"": ""bar""}]",one 2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]",[],two 3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[],[], diff --git a/tests/test_facets.py b/tests/test_facets.py index 22927512..01d8b8f5 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -23,7 +23,10 @@ async def test_column_facet_suggest(app_client): {"name": "on_earth", "toggle_url": "http://localhost/?_facet=on_earth"}, {"name": "state", "toggle_url": "http://localhost/?_facet=state"}, {"name": "city_id", "toggle_url": "http://localhost/?_facet=city_id"}, - {"name": "neighborhood", "toggle_url": "http://localhost/?_facet=neighborhood"}, + { + "name": "_neighborhood", + "toggle_url": "http://localhost/?_facet=_neighborhood", + }, {"name": "tags", "toggle_url": "http://localhost/?_facet=tags"}, { "name": "complex_array", @@ -56,8 +59,8 @@ async def test_column_facet_suggest_skip_if_already_selected(app_client): "toggle_url": "http://localhost/?_facet=planet_int&_facet=on_earth&_facet=city_id", }, { - "name": "neighborhood", - "toggle_url": "http://localhost/?_facet=planet_int&_facet=on_earth&_facet=neighborhood", + "name": "_neighborhood", + "toggle_url": "http://localhost/?_facet=planet_int&_facet=on_earth&_facet=_neighborhood", }, { "name": "tags", @@ -86,7 +89,7 @@ async def test_column_facet_suggest_skip_if_enabled_by_metadata(app_client): "planet_int", "on_earth", "state", - "neighborhood", + "_neighborhood", "tags", "complex_array", ] == suggestions @@ -144,6 +147,128 @@ async def test_column_facet_results(app_client): } == buckets +@pytest.mark.asyncio +async def test_column_facet_results_column_starts_with_underscore(app_client): + facet = ColumnFacet( + app_client.ds, + Request.fake("/?_facet=_neighborhood"), + database="fixtures", + sql="select * from facetable", + table="facetable", + ) + buckets, timed_out = await facet.facet_results() + assert [] == timed_out + assert buckets == { + "_neighborhood": { + "name": "_neighborhood", + "type": "column", + "hideable": True, + "toggle_url": "/", + "results": [ + { + "value": "Downtown", + "label": "Downtown", + "count": 2, + "toggle_url": "http://localhost/?_facet=_neighborhood&_neighborhood__exact=Downtown", + "selected": False, + }, + { + "value": "Arcadia Planitia", + "label": "Arcadia Planitia", + "count": 1, + "toggle_url": "http://localhost/?_facet=_neighborhood&_neighborhood__exact=Arcadia+Planitia", + "selected": False, + }, + { + "value": "Bernal Heights", + "label": "Bernal Heights", + "count": 1, + "toggle_url": "http://localhost/?_facet=_neighborhood&_neighborhood__exact=Bernal+Heights", + "selected": False, + }, + { + "value": "Corktown", + "label": "Corktown", + "count": 1, + "toggle_url": "http://localhost/?_facet=_neighborhood&_neighborhood__exact=Corktown", + "selected": False, + }, + { + "value": "Dogpatch", + "label": "Dogpatch", + "count": 1, + "toggle_url": "http://localhost/?_facet=_neighborhood&_neighborhood__exact=Dogpatch", + "selected": False, + }, + { + "value": "Greektown", + "label": "Greektown", + "count": 1, + "toggle_url": "http://localhost/?_facet=_neighborhood&_neighborhood__exact=Greektown", + "selected": False, + }, + { + "value": "Hayes Valley", + "label": "Hayes Valley", + "count": 1, + "toggle_url": "http://localhost/?_facet=_neighborhood&_neighborhood__exact=Hayes+Valley", + "selected": False, + }, + { + "value": "Hollywood", + "label": "Hollywood", + "count": 1, + "toggle_url": "http://localhost/?_facet=_neighborhood&_neighborhood__exact=Hollywood", + "selected": False, + }, + { + "value": "Koreatown", + "label": "Koreatown", + "count": 1, + "toggle_url": "http://localhost/?_facet=_neighborhood&_neighborhood__exact=Koreatown", + "selected": False, + }, + { + "value": "Los Feliz", + "label": "Los Feliz", + "count": 1, + "toggle_url": "http://localhost/?_facet=_neighborhood&_neighborhood__exact=Los+Feliz", + "selected": False, + }, + { + "value": "Mexicantown", + "label": "Mexicantown", + "count": 1, + "toggle_url": "http://localhost/?_facet=_neighborhood&_neighborhood__exact=Mexicantown", + "selected": False, + }, + { + "value": "Mission", + "label": "Mission", + "count": 1, + "toggle_url": "http://localhost/?_facet=_neighborhood&_neighborhood__exact=Mission", + "selected": False, + }, + { + "value": "SOMA", + "label": "SOMA", + "count": 1, + "toggle_url": "http://localhost/?_facet=_neighborhood&_neighborhood__exact=SOMA", + "selected": False, + }, + { + "value": "Tenderloin", + "label": "Tenderloin", + "count": 1, + "toggle_url": "http://localhost/?_facet=_neighborhood&_neighborhood__exact=Tenderloin", + "selected": False, + }, + ], + "truncated": False, + } + } + + @pytest.mark.asyncio async def test_column_facet_from_metadata_cannot_be_hidden(app_client): facet = ColumnFacet( diff --git a/tests/test_html.py b/tests/test_html.py index 5f2ba2f1..1955e65b 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -235,7 +235,10 @@ def test_table_cell_truncation(): "Corkt…", "Mexic…", "Arcad…", - ] == [td.string for td in table.findAll("td", {"class": "col-neighborhood"})] + ] == [ + td.string + for td in table.findAll("td", {"class": "col-neighborhood-b352a7"}) + ] def test_row_page_does_not_truncate(): @@ -245,7 +248,8 @@ def test_row_page_does_not_truncate(): table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] assert ["Mission"] == [ - td.string for td in table.findAll("td", {"class": "col-neighborhood"}) + td.string + for td in table.findAll("td", {"class": "col-neighborhood-b352a7"}) ] @@ -1312,7 +1316,7 @@ def test_canned_query_show_hide_metadata_option( def test_extra_where_clauses(app_client): response = app_client.get( - "/fixtures/facetable?_where=neighborhood='Dogpatch'&_where=city_id=1" + "/fixtures/facetable?_where=_neighborhood='Dogpatch'&_where=city_id=1" ) soup = Soup(response.body, "html.parser") div = soup.select(".extra-wheres")[0] @@ -1320,12 +1324,12 @@ def test_extra_where_clauses(app_client): hrefs = [a["href"] for a in div.findAll("a")] assert [ "/fixtures/facetable?_where=city_id%3D1", - "/fixtures/facetable?_where=neighborhood%3D%27Dogpatch%27", + "/fixtures/facetable?_where=_neighborhood%3D%27Dogpatch%27", ] == hrefs # These should also be persisted as hidden fields inputs = soup.find("form").findAll("input") hiddens = [i for i in inputs if i["type"] == "hidden"] - assert [("_where", "neighborhood='Dogpatch'"), ("_where", "city_id=1")] == [ + assert [("_where", "_neighborhood='Dogpatch'"), ("_where", "city_id=1")] == [ (hidden["name"], hidden["value"]) for hidden in hiddens ] @@ -1634,11 +1638,11 @@ def test_base_url_affects_metadata_extra_css_urls(app_client_base_url_prefix): [ ( "/fixtures/neighborhood_search", - "/fixtures?sql=%0Aselect+neighborhood%2C+facet_cities.name%2C+state%0Afrom+facetable%0A++++join+facet_cities%0A++++++++on+facetable.city_id+%3D+facet_cities.id%0Awhere+neighborhood+like+%27%25%27+%7C%7C+%3Atext+%7C%7C+%27%25%27%0Aorder+by+neighborhood%3B%0A&text=", + "/fixtures?sql=%0Aselect+_neighborhood%2C+facet_cities.name%2C+state%0Afrom+facetable%0A++++join+facet_cities%0A++++++++on+facetable.city_id+%3D+facet_cities.id%0Awhere+_neighborhood+like+%27%25%27+%7C%7C+%3Atext+%7C%7C+%27%25%27%0Aorder+by+_neighborhood%3B%0A&text=", ), ( "/fixtures/neighborhood_search?text=ber", - "/fixtures?sql=%0Aselect+neighborhood%2C+facet_cities.name%2C+state%0Afrom+facetable%0A++++join+facet_cities%0A++++++++on+facetable.city_id+%3D+facet_cities.id%0Awhere+neighborhood+like+%27%25%27+%7C%7C+%3Atext+%7C%7C+%27%25%27%0Aorder+by+neighborhood%3B%0A&text=ber", + "/fixtures?sql=%0Aselect+_neighborhood%2C+facet_cities.name%2C+state%0Afrom+facetable%0A++++join+facet_cities%0A++++++++on+facetable.city_id+%3D+facet_cities.id%0Awhere+_neighborhood+like+%27%25%27+%7C%7C+%3Atext+%7C%7C+%27%25%27%0Aorder+by+_neighborhood%3B%0A&text=ber", ), ("/fixtures/pragma_cache_size", None), ( @@ -1716,23 +1720,23 @@ def test_navigation_menu_links( ( 5, # Default should show 2 facets - "/fixtures/facetable?_facet=neighborhood", + "/fixtures/facetable?_facet=_neighborhood", 2, True, - "/fixtures/facetable?_facet=neighborhood&_facet_size=max", + "/fixtures/facetable?_facet=_neighborhood&_facet_size=max", ), # _facet_size above max_returned_rows should show max_returned_rows (5) ( 5, - "/fixtures/facetable?_facet=neighborhood&_facet_size=50", + "/fixtures/facetable?_facet=_neighborhood&_facet_size=50", 5, True, - "/fixtures/facetable?_facet=neighborhood&_facet_size=max", + "/fixtures/facetable?_facet=_neighborhood&_facet_size=max", ), # If max_returned_rows is high enough, should return all ( 20, - "/fixtures/facetable?_facet=neighborhood&_facet_size=max", + "/fixtures/facetable?_facet=_neighborhood&_facet_size=max", 14, False, None, @@ -1741,7 +1745,7 @@ def test_navigation_menu_links( # _facet_size above max_returned_rows should show max_returned_rows (5) ( 5, - "/fixtures/facetable?_facet=neighborhood&_facet_size=max", + "/fixtures/facetable?_facet=_neighborhood&_facet_size=max", 5, True, None, @@ -1760,7 +1764,7 @@ def test_facet_more_links( ) as client: response = client.get(path) soup = Soup(response.body, "html.parser") - lis = soup.select("#facet-neighborhood ul li:not(.facet-truncated)") + lis = soup.select("#facet-neighborhood-b352a7 ul li:not(.facet-truncated)") facet_truncated = soup.select_one(".facet-truncated") assert len(lis) == expected_num_facets if not expected_ellipses: diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index ad829751..2d0cae7f 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -82,7 +82,7 @@ async def test_table_exists(db, tables, exists): "on_earth", "state", "city_id", - "neighborhood", + "_neighborhood", "tags", "complex_array", "distinct_some_null", @@ -170,7 +170,7 @@ async def test_table_columns(db, table, expected): ), Column( cid=6, - name="neighborhood", + name="_neighborhood", type="text", notnull=0, default_value=None, diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 7dac8002..7ac6b173 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -437,7 +437,7 @@ def test_hook_register_output_renderer_all_parameters(app_client): "on_earth", "state", "city_id", - "neighborhood", + "_neighborhood", "tags", "complex_array", "distinct_some_null", @@ -459,7 +459,7 @@ def test_hook_register_output_renderer_all_parameters(app_client): "", "", ], - "sql": "select pk, created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array, distinct_some_null from facetable order by pk limit 51", + "sql": "select pk, created, planet_int, on_earth, state, city_id, _neighborhood, tags, complex_array, distinct_some_null from facetable order by pk limit 51", "query_name": None, "database": "fixtures", "table": "facetable", @@ -526,12 +526,12 @@ def test_hook_register_output_renderer_can_render(app_client): "on_earth", "state", "city_id", - "neighborhood", + "_neighborhood", "tags", "complex_array", "distinct_some_null", ], - "sql": "select pk, created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array, distinct_some_null from facetable order by pk limit 51", + "sql": "select pk, created, planet_int, on_earth, state, city_id, _neighborhood, tags, complex_array, distinct_some_null from facetable order by pk limit 51", "query_name": None, "database": "fixtures", "table": "facetable", From c9e3cfecc8e966e5137d72e3f2150be9602d55f5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Nov 2021 20:53:00 -0800 Subject: [PATCH 0005/1103] Columns in filters now ignore ?_nocol, closes #1503 --- datasette/views/table.py | 2 +- tests/test_html.py | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index efcef4d2..3e8f38f6 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -922,7 +922,7 @@ class TableView(RowTableShared): "use_rowid": use_rowid, "filters": filters, "display_columns": display_columns, - "filter_columns": columns, + "filter_columns": table_columns, "display_rows": display_rows, "facets_timed_out": facets_timed_out, "sorted_facet_results": sorted( diff --git a/tests/test_html.py b/tests/test_html.py index 1955e65b..eb1e3d20 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -923,6 +923,26 @@ def test_table_html_filter_form_column_options( assert expected_column_options == column_options +def test_table_html_filter_form_still_shows_nocol_columns(app_client): + # https://github.com/simonw/datasette/issues/1503 + response = app_client.get("/fixtures/sortable?_nocol=sortable") + assert response.status == 200 + form = Soup(response.body, "html.parser").find("form") + assert [ + o.string + for o in form.select("select[name='_filter_column']")[0].select("option") + ] == [ + "- column -", + "pk1", + "pk2", + "content", + "sortable", + "sortable_with_nulls", + "sortable_with_nulls_2", + "text", + ] + + def test_row_html_compound_primary_key(app_client): response = app_client.get("/fixtures/compound_primary_key/a,b") assert response.status == 200 From 1c13e1af0664a4dfb1e69714c56523279cae09e4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Nov 2021 21:08:33 -0800 Subject: [PATCH 0006/1103] Ensure query columns are included too, ref #1503 --- datasette/views/table.py | 10 +++++++++- tests/test_html.py | 3 ++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 3e8f38f6..e6ae67de 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -915,6 +915,14 @@ class TableView(RowTableShared): links.extend(extra_links) return links + # filter_columns combine the columns we know are available + # in the table with any additional columns (such as rowid) + # which are available in the query + filter_columns = list(columns) + [ + table_column + for table_column in table_columns + if table_column not in columns + ] return { "table_actions": table_actions, "supports_search": bool(fts_table), @@ -922,7 +930,7 @@ class TableView(RowTableShared): "use_rowid": use_rowid, "filters": filters, "display_columns": display_columns, - "filter_columns": table_columns, + "filter_columns": filter_columns, "display_rows": display_rows, "facets_timed_out": facets_timed_out, "sorted_facet_results": sorted( diff --git a/tests/test_html.py b/tests/test_html.py index eb1e3d20..f24165bd 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -936,10 +936,11 @@ def test_table_html_filter_form_still_shows_nocol_columns(app_client): "pk1", "pk2", "content", - "sortable", "sortable_with_nulls", "sortable_with_nulls_2", "text", + # Moved to the end because it is no longer returned by the query: + "sortable", ] From de1e031713f47fbd51eb7239db3e7e6025fbf81a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Nov 2021 21:14:43 -0800 Subject: [PATCH 0007/1103] Release 0.59.2 Refs #1497, #1503, #1506 --- datasette/version.py | 2 +- docs/changelog.rst | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index ff1f55e8..db89b418 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.59.1" +__version__ = "0.59.2" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 40f49fb2..47ca3480 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,16 @@ Changelog ========= +.. _v0_59_2: + +0.59.2 (2021-11-13) +------------------- + +- Column names with a leading underscore now work correctly when used as a facet. (:issue:`1506`) +- Applying ``?_nocol=`` to a column no longer removes that column from the filtering interface. (:issue:`1503`) +- Official Datasette Docker container now uses Debian Bullseye as the base image. (:issue:`1497`) +- Datasette is four years old today! Here's the `original release announcement `__ from 2017. + .. _v0_59_1: 0.59.1 (2021-10-24) From 030390fd4abcecf1ab80d0528e32d7dbc50d1b5f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Nov 2021 21:29:43 -0800 Subject: [PATCH 0008/1103] .readthedocs.yaml configuration, refs #1507 --- .readthedocs.yaml | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 .readthedocs.yaml diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..816c10e2 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,9 @@ +version: 2 + +build: + os: ubuntu-20.04 + tools: + python: "3.9" + +sphinx: + configuration: docs/conf.py From 502c02fa6dde6a8bb840af6c4c8cf858aa1db687 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Nov 2021 21:37:40 -0800 Subject: [PATCH 0009/1103] Pin to docutils<0.18 in ReadTheDocs, refs #1507 --- .readthedocs.yaml | 4 ++++ docs/readthedocs-requirements.txt | 1 + 2 files changed, 5 insertions(+) create mode 100644 docs/readthedocs-requirements.txt diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 816c10e2..70db5313 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -7,3 +7,7 @@ build: sphinx: configuration: docs/conf.py + +python: + install: + - requirements: docs/readthedocs-requirements.txt diff --git a/docs/readthedocs-requirements.txt b/docs/readthedocs-requirements.txt new file mode 100644 index 00000000..93120e66 --- /dev/null +++ b/docs/readthedocs-requirements.txt @@ -0,0 +1 @@ +docutils<0.18 From 07044bd130542870d5eb2e545988d0a24eb573ec Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 15 Nov 2021 15:41:07 -0800 Subject: [PATCH 0010/1103] SQL view-friendly arraycontains/arraynotcontains implementation, refs #448 --- datasette/filters.py | 10 ++-------- tests/test_filters.py | 11 +++++++---- 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/datasette/filters.py b/datasette/filters.py index 2b859d99..cbd94415 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -149,19 +149,13 @@ class Filters: TemplatedFilter( "arraycontains", "array contains", - """rowid in ( - select {t}.rowid from {t}, json_each([{t}].[{c}]) j - where j.value = :{p} - )""", + """:{p} in (select value from json_each([{t}].[{c}]))""", '{c} contains "{v}"', ), TemplatedFilter( "arraynotcontains", "array does not contain", - """rowid not in ( - select {t}.rowid from {t}, json_each([{t}].[{c}]) j - where j.value = :{p} - )""", + """:{p} not in (select value from json_each([{t}].[{c}]))""", '{c} does not contain "{v}"', ), ] diff --git a/tests/test_filters.py b/tests/test_filters.py index f22b7b5c..d05ae80f 100644 --- a/tests/test_filters.py +++ b/tests/test_filters.py @@ -56,12 +56,15 @@ import pytest # Not in, and JSON array not in ((("foo__notin", "1,2,3"),), ["foo not in (:p0, :p1, :p2)"], ["1", "2", "3"]), ((("foo__notin", "[1,2,3]"),), ["foo not in (:p0, :p1, :p2)"], [1, 2, 3]), - # JSON arraycontains + # JSON arraycontains, arraynotcontains ( (("Availability+Info__arraycontains", "yes"),), - [ - "rowid in (\n select table.rowid from table, json_each([table].[Availability+Info]) j\n where j.value = :p0\n )" - ], + [":p0 in (select value from json_each([table].[Availability+Info]))"], + ["yes"], + ), + ( + (("Availability+Info__arraynotcontains", "yes"),), + [":p0 not in (select value from json_each([table].[Availability+Info]))"], ["yes"], ), ], From 55024b5301892306b786fc37a8ab3c096be5c227 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 15 Nov 2021 17:19:33 -0800 Subject: [PATCH 0011/1103] _facet_array no longer confused by duplicate array items, closes #448 --- datasette/facets.py | 23 +++++++++++++++--- tests/test_facets.py | 58 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 77 insertions(+), 4 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index 94a1d83d..ce2111c9 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -354,11 +354,26 @@ class ArrayFacet(Facet): config = source_and_config["config"] source = source_and_config["source"] column = config.get("column") or config["simple"] + # https://github.com/simonw/datasette/issues/448 facet_sql = """ - select j.value as value, count(*) as count from ( - {sql} - ) join json_each({col}) j - group by j.value order by count desc, value limit {limit} + with inner as ({sql}), + deduped_array_items as ( + select + distinct j.value, + inner.* + from + json_each([inner].{col}) j + join inner + ) + select + value as value, + count(*) as count + from + deduped_array_items + group by + value + order by + count(*) desc limit {limit} """.format( col=escape_sqlite(column), sql=self.sql, limit=facet_size + 1 ) diff --git a/tests/test_facets.py b/tests/test_facets.py index 01d8b8f5..a20c79c4 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -4,6 +4,7 @@ from datasette.facets import ColumnFacet, ArrayFacet, DateFacet from datasette.utils.asgi import Request from datasette.utils import detect_json1 from .fixtures import app_client # noqa +import json import pytest @@ -402,6 +403,63 @@ async def test_array_facet_results(app_client): } == buckets +@pytest.mark.asyncio +@pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module") +async def test_array_facet_handle_duplicate_tags(): + ds = Datasette([], memory=True) + db = ds.add_database(Database(ds, memory_name="test_array_facet")) + await db.execute_write("create table otters(name text, tags text)", block=True) + for name, tags in ( + ("Charles", ["friendly", "cunning", "friendly"]), + ("Shaun", ["cunning", "empathetic", "friendly"]), + ("Tracy", ["empathetic", "eager"]), + ): + await db.execute_write( + "insert into otters (name, tags) values (?, ?)", + [name, json.dumps(tags)], + block=True, + ) + + response = await ds.client.get("/test_array_facet/otters.json?_facet_array=tags") + assert response.json()["facet_results"]["tags"] == { + "name": "tags", + "type": "array", + "results": [ + { + "value": "cunning", + "label": "cunning", + "count": 2, + "toggle_url": "http://localhost/test_array_facet/otters.json?_facet_array=tags&tags__arraycontains=cunning", + "selected": False, + }, + { + "value": "empathetic", + "label": "empathetic", + "count": 2, + "toggle_url": "http://localhost/test_array_facet/otters.json?_facet_array=tags&tags__arraycontains=empathetic", + "selected": False, + }, + { + "value": "friendly", + "label": "friendly", + "count": 2, + "toggle_url": "http://localhost/test_array_facet/otters.json?_facet_array=tags&tags__arraycontains=friendly", + "selected": False, + }, + { + "value": "eager", + "label": "eager", + "count": 1, + "toggle_url": "http://localhost/test_array_facet/otters.json?_facet_array=tags&tags__arraycontains=eager", + "selected": False, + }, + ], + "hideable": True, + "toggle_url": "/test_array_facet/otters.json", + "truncated": False, + } + + @pytest.mark.asyncio async def test_date_facet_results(app_client): facet = DateFacet( From 0156c6b5e52d541e93f0d68e9245f20ae83bc933 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 15 Nov 2021 17:31:33 -0800 Subject: [PATCH 0012/1103] Facet in predictable order for tests, refs #448 --- datasette/facets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/facets.py b/datasette/facets.py index ce2111c9..62e7775e 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -373,7 +373,7 @@ class ArrayFacet(Facet): group by value order by - count(*) desc limit {limit} + count(*) desc, value limit {limit} """.format( col=escape_sqlite(column), sql=self.sql, limit=facet_size + 1 ) From 6e971b4ac175df95ac7fe5dc2b57b53ad7f533fc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Nov 2021 19:07:21 -0800 Subject: [PATCH 0013/1103] Test confirming plugins can over-ride default routes, closes #1517 --- tests/plugins/my_plugin_2.py | 10 +++++++++- tests/test_plugins.py | 19 +++++++++++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index ba298fd4..f5ce36b3 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -176,4 +176,12 @@ def register_routes(datasette): if not config: return path = config["path"] - return [(r"/{}/$".format(path), lambda: Response.text(path.upper()))] + + def new_table(request): + return Response.text("/db/table: {}".format(sorted(request.url_vars.items()))) + + return [ + (r"/{}/$".format(path), lambda: Response.text(path.upper())), + # Also serves to demonstrate over-ride of default paths: + (r"/(?P[^/]+)/(?P[^/]+?$)", new_table), + ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 7ac6b173..c9ff6edb 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -678,6 +678,25 @@ def test_hook_register_routes_with_datasette(configured_path): assert client.get(f"/{other_path}/", follow_redirects=True).status == 404 +def test_hook_register_routes_override(): + "Plugins can over-ride default paths such as /db/table" + with make_app_client( + metadata={ + "plugins": { + "register-route-demo": { + "path": "blah", + } + } + } + ) as client: + response = client.get("/db/table") + assert response.status == 200 + assert ( + response.text + == "/db/table: [('db_name', 'db'), ('table_and_format', 'table')]" + ) + + def test_hook_register_routes_post(app_client): response = app_client.post("/post/", {"this is": "post data"}, csrftoken_from=True) assert 200 == response.status From 30255055150d7bc0affc8156adc18295495020ff Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Nov 2021 19:19:43 -0800 Subject: [PATCH 0014/1103] functools.wraps to help investigate #1517 --- datasette/app.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/datasette/app.py b/datasette/app.py index 52c5e629..28268e42 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -2,6 +2,7 @@ import asyncio import asgi_csrf import collections import datetime +import functools import glob import hashlib import httpx @@ -1354,6 +1355,7 @@ def _cleaner_task_str(task): def wrap_view(view_fn, datasette): + @functools.wraps(view_fn) async def async_view_fn(request, send): if inspect.iscoroutinefunction(view_fn): response = await async_call_with_supported_arguments( From ff0dd4da38d48c2fa9250ecf336002c9ed724e36 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 19 Nov 2021 12:29:37 -0800 Subject: [PATCH 0015/1103] repr() method for Request, refs #1519 --- datasette/utils/asgi.py | 3 +++ tests/test_internals_request.py | 8 ++++++++ 2 files changed, 11 insertions(+) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 696944df..ad137fa9 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -37,6 +37,9 @@ class Request: self.scope = scope self.receive = receive + def __repr__(self): + return ''.format(self.method, self.url) + @property def method(self): return self.scope["method"] diff --git a/tests/test_internals_request.py b/tests/test_internals_request.py index c42cfbd3..cd956f3f 100644 --- a/tests/test_internals_request.py +++ b/tests/test_internals_request.py @@ -75,6 +75,14 @@ def test_request_args(): request.args["missing"] +def test_request_repr(): + request = Request.fake("/foo?multi=1&multi=2&single=3") + assert ( + repr(request) + == '' + ) + + def test_request_url_vars(): scope = { "http_version": "1.1", From c76bbd40664f789c45564b7796628e5110cd3b17 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 19 Nov 2021 14:50:06 -0800 Subject: [PATCH 0016/1103] New live demo with Apache proxying, refs #1522 --- demos/apache-proxy/Dockerfile | 42 +++++++++++++++++++++++++++++++++++ demos/apache-proxy/README.md | 5 +++++ demos/apache-proxy/deploy.sh | 13 +++++++++++ 3 files changed, 60 insertions(+) create mode 100644 demos/apache-proxy/Dockerfile create mode 100644 demos/apache-proxy/README.md create mode 100755 demos/apache-proxy/deploy.sh diff --git a/demos/apache-proxy/Dockerfile b/demos/apache-proxy/Dockerfile new file mode 100644 index 00000000..2956f913 --- /dev/null +++ b/demos/apache-proxy/Dockerfile @@ -0,0 +1,42 @@ +FROM python:3-alpine + +RUN apk add --no-cache \ + apache2 \ + apache2-proxy \ + bash + +RUN pip install datasette + +ENV TINI_VERSION v0.18.0 +ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini-static /tini +RUN chmod +x /tini + +# Append this to the end of the default httpd.conf file +RUN echo $'ServerName localhost\n\ +\n\ +\n\ + Order deny,allow\n\ + Allow from all\n\ +\n\ +\n\ +ProxyPass /prefix/ http://localhost:8001/\n\ +Header add X-Proxied-By "Apache2"' >> /etc/apache2/httpd.conf + +RUN echo $'Datasette' > /var/www/localhost/htdocs/index.html + +WORKDIR /app + +ADD https://latest.datasette.io/fixtures.db /app/fixtures.db + +RUN echo $'#!/usr/bin/env bash\n\ +set -e\n\ +\n\ +httpd -D FOREGROUND &\n\ +datasette fixtures.db --setting base_url "/prefix/" -h 0.0.0.0 -p 8001 &\n\ +\n\ +wait -n' > /app/start.sh + +RUN chmod +x /app/start.sh + +EXPOSE 80 +ENTRYPOINT ["/tini", "--", "/app/start.sh"] diff --git a/demos/apache-proxy/README.md b/demos/apache-proxy/README.md new file mode 100644 index 00000000..9bd3897c --- /dev/null +++ b/demos/apache-proxy/README.md @@ -0,0 +1,5 @@ +# Datasette running behind an Apache proxy + +See also [Running Datasette behind a proxy](https://docs.datasette.io/en/latest/deploying.html#running-datasette-behind-a-proxy) + +This live demo is running at https://apache-proxy-demo.datasette.io/ diff --git a/demos/apache-proxy/deploy.sh b/demos/apache-proxy/deploy.sh new file mode 100755 index 00000000..ae33941c --- /dev/null +++ b/demos/apache-proxy/deploy.sh @@ -0,0 +1,13 @@ +#!/bin/bash +# https://til.simonwillison.net/cloudrun/ship-dockerfile-to-cloud-run + +NAME="datasette-apache-proxy-demo" +PROJECT=$(gcloud config get-value project) +IMAGE="gcr.io/$PROJECT/$NAME" + +gcloud builds submit --tag $IMAGE +gcloud run deploy \ + --allow-unauthenticated \ + --platform=managed \ + --image $IMAGE $NAME \ + --port 80 From c617e1769ea27e045b0f2907ef49a9a1244e577d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 19 Nov 2021 15:13:17 -0800 Subject: [PATCH 0017/1103] Fixed test I broke with new repr() in ##1519 --- tests/test_plugins.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index c9ff6edb..697a6b32 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -463,7 +463,7 @@ def test_hook_register_output_renderer_all_parameters(app_client): "query_name": None, "database": "fixtures", "table": "facetable", - "request": "", + "request": '', "view_name": "table", "1+1": 2, } From a1ba6cd6bb86d935cdad240de6be6b37aad683f2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 19 Nov 2021 16:34:35 -0800 Subject: [PATCH 0018/1103] Use build arguments, refs #1522 --- demos/apache-proxy/Dockerfile | 23 +++++++++++++---------- demos/apache-proxy/README.md | 11 +++++++++++ demos/apache-proxy/deploy.sh | 31 ++++++++++++++++++++++++------- 3 files changed, 48 insertions(+), 17 deletions(-) diff --git a/demos/apache-proxy/Dockerfile b/demos/apache-proxy/Dockerfile index 2956f913..46697c63 100644 --- a/demos/apache-proxy/Dockerfile +++ b/demos/apache-proxy/Dockerfile @@ -5,38 +5,41 @@ RUN apk add --no-cache \ apache2-proxy \ bash -RUN pip install datasette +ARG DATASETTE_REF + +RUN pip install https://github.com/simonw/datasette/archive/${DATASETTE_REF}.zip ENV TINI_VERSION v0.18.0 ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini-static /tini RUN chmod +x /tini # Append this to the end of the default httpd.conf file -RUN echo $'ServerName localhost\n\ +RUN echo -e 'ServerName localhost\n\ \n\ \n\ Order deny,allow\n\ Allow from all\n\ \n\ \n\ -ProxyPass /prefix/ http://localhost:8001/\n\ +ProxyPass /prefix/ http://localhost:8001/\n\ Header add X-Proxied-By "Apache2"' >> /etc/apache2/httpd.conf -RUN echo $'Datasette' > /var/www/localhost/htdocs/index.html +RUN echo 'Datasette' > /var/www/localhost/htdocs/index.html WORKDIR /app ADD https://latest.datasette.io/fixtures.db /app/fixtures.db -RUN echo $'#!/usr/bin/env bash\n\ -set -e\n\ +RUN echo -e "#!/usr/bin/env bash\n\ +datasette /app/fixtures.db --setting base_url '/prefix/' --version-note '${DATASETTE_REF}' -h 0.0.0.0 -p 8001 &\n\ \n\ -httpd -D FOREGROUND &\n\ -datasette fixtures.db --setting base_url "/prefix/" -h 0.0.0.0 -p 8001 &\n\ +httpd -D FOREGROUND & \n\ \n\ -wait -n' > /app/start.sh +wait -n\n\ +exit $?" > /app/start.sh RUN chmod +x /app/start.sh EXPOSE 80 -ENTRYPOINT ["/tini", "--", "/app/start.sh"] + +CMD /tini -- /app/start.sh diff --git a/demos/apache-proxy/README.md b/demos/apache-proxy/README.md index 9bd3897c..08048512 100644 --- a/demos/apache-proxy/README.md +++ b/demos/apache-proxy/README.md @@ -3,3 +3,14 @@ See also [Running Datasette behind a proxy](https://docs.datasette.io/en/latest/deploying.html#running-datasette-behind-a-proxy) This live demo is running at https://apache-proxy-demo.datasette.io/ + +To build locally, passing in a Datasette commit hash (or `main` for the main branch): + + docker build -t datasette-apache-proxy-demo . \ + --build-arg DATASETTE_REF=c617e1769ea27e045b0f2907ef49a9a1244e577d + +Then run it like this: + + docker run -p 5000:80 datasette-apache-proxy-demo + +And visit `http://localhost:5000/` or `http://localhost:5000/prefix/` diff --git a/demos/apache-proxy/deploy.sh b/demos/apache-proxy/deploy.sh index ae33941c..2846590a 100755 --- a/demos/apache-proxy/deploy.sh +++ b/demos/apache-proxy/deploy.sh @@ -1,13 +1,30 @@ #!/bin/bash -# https://til.simonwillison.net/cloudrun/ship-dockerfile-to-cloud-run +# https://til.simonwillison.net/cloudrun/using-build-args-with-cloud-run + +if [[ -z "$DATASETTE_REF" ]]; then + echo "Must provide DATASETTE_REF environment variable" 1>&2 + exit 1 +fi NAME="datasette-apache-proxy-demo" PROJECT=$(gcloud config get-value project) IMAGE="gcr.io/$PROJECT/$NAME" -gcloud builds submit --tag $IMAGE -gcloud run deploy \ - --allow-unauthenticated \ - --platform=managed \ - --image $IMAGE $NAME \ - --port 80 +# Need YAML so we can set --build-arg +echo " +steps: +- name: 'gcr.io/cloud-builders/docker' + args: ['build', '-t', '$IMAGE', '.', '--build-arg', 'DATASETTE_REF=$DATASETTE_REF'] +- name: 'gcr.io/cloud-builders/docker' + args: ['push', '$IMAGE'] +" > /tmp/cloudbuild.yml + +gcloud builds submit --config /tmp/cloudbuild.yml + +rm /tmp/cloudbuild.yml + +gcloud run deploy $NAME \ + --allow-unauthenticated \ + --platform=managed \ + --image $IMAGE \ + --port 80 From fe687fd0207c4c56c4778d3e92e3505fc4b18172 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 19 Nov 2021 16:52:33 -0800 Subject: [PATCH 0019/1103] Fixed a whole bunch of broken base_url links Refs #1519, #838 --- datasette/facets.py | 18 +++++++++++++----- datasette/templates/_table.html | 4 ++-- datasette/views/base.py | 10 ++++++---- datasette/views/database.py | 2 +- datasette/views/table.py | 1 + tests/test_html.py | 24 +++++++++++++++++------- 6 files changed, 40 insertions(+), 19 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index 62e7775e..9a43b95e 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -180,7 +180,11 @@ class ColumnFacet(Facet): "name": column, "toggle_url": self.ds.absolute_url( self.request, - path_with_added_args(self.request, {"_facet": column}), + self.ds.urls.path( + path_with_added_args( + self.request, {"_facet": column} + ) + ), ), } ) @@ -334,8 +338,10 @@ class ArrayFacet(Facet): "type": "array", "toggle_url": self.ds.absolute_url( self.request, - path_with_added_args( - self.request, {"_facet_array": column} + self.ds.urls.path( + path_with_added_args( + self.request, {"_facet_array": column} + ) ), ), } @@ -461,8 +467,10 @@ class DateFacet(Facet): "type": "date", "toggle_url": self.ds.absolute_url( self.request, - path_with_added_args( - self.request, {"_facet_date": column} + self.ds.urls.path( + path_with_added_args( + self.request, {"_facet_date": column} + ) ), ), } diff --git a/datasette/templates/_table.html b/datasette/templates/_table.html index 649f5171..d91a1a57 100644 --- a/datasette/templates/_table.html +++ b/datasette/templates/_table.html @@ -9,9 +9,9 @@ {{ column.name }} {% else %} {% if column.name == sort %} - {{ column.name }} ▼ + {{ column.name }} ▼ {% else %} - {{ column.name }}{% if column.name == sort_desc %} ▲{% endif %} + {{ column.name }}{% if column.name == sort_desc %} ▲{% endif %} {% endif %} {% endif %} diff --git a/datasette/views/base.py b/datasette/views/base.py index 01e90220..a9953dfd 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -592,13 +592,15 @@ class DataView(BaseView): ) it_can_render = await await_me_maybe(it_can_render) if it_can_render: - renderers[key] = path_with_format( - request=request, format=key, extra_qs={**url_labels_extra} + renderers[key] = self.ds.urls.path( + path_with_format( + request=request, format=key, extra_qs={**url_labels_extra} + ) ) url_csv_args = {"_size": "max", **url_labels_extra} - url_csv = path_with_format( - request=request, format="csv", extra_qs=url_csv_args + url_csv = self.ds.urls.path( + path_with_format(request=request, format="csv", extra_qs=url_csv_args) ) url_csv_path = url_csv.split("?")[0] context = { diff --git a/datasette/views/database.py b/datasette/views/database.py index affded9b..f1901b34 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -459,7 +459,7 @@ class QueryView(DataView): "metadata": metadata, "settings": self.ds.settings_dict(), "request": request, - "show_hide_link": show_hide_link, + "show_hide_link": self.ds.urls.path(show_hide_link), "show_hide_text": show_hide_text, "show_hide_hidden": markupsafe.Markup(show_hide_hidden), "hide_sql": hide_sql, diff --git a/datasette/views/table.py b/datasette/views/table.py index e6ae67de..296e177f 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -942,6 +942,7 @@ class TableView(RowTableShared): "extra_wheres_for_ui": extra_wheres_for_ui, "form_hidden_args": form_hidden_args, "is_sortable": any(c["sortable"] for c in display_columns), + "fix_path": ds.urls.path, "path_with_replaced_args": path_with_replaced_args, "path_with_removed_args": path_with_removed_args, "append_querystring": append_querystring, diff --git a/tests/test_html.py b/tests/test_html.py index f24165bd..3301b91d 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1614,11 +1614,16 @@ def test_metadata_sort_desc(app_client): "/fixtures/compound_three_primary_keys/a,a,a", "/fixtures/paginated_view", "/fixtures/facetable", + "/fixtures?sql=select+1", ], ) -def test_base_url_config(app_client_base_url_prefix, path): +@pytest.mark.parametrize("use_prefix", (True, False)) +def test_base_url_config(app_client_base_url_prefix, path, use_prefix): client = app_client_base_url_prefix - response = client.get("/prefix/" + path.lstrip("/")) + path_to_get = path + if use_prefix: + path_to_get = "/prefix/" + path.lstrip("/") + response = client.get(path_to_get) soup = Soup(response.body, "html.parser") for el in soup.findAll(["a", "link", "script"]): if "href" in el.attrs: @@ -1642,11 +1647,16 @@ def test_base_url_config(app_client_base_url_prefix, path): # If this has been made absolute it may start http://localhost/ if href.startswith("http://localhost/"): href = href[len("http://localost/") :] - assert href.startswith("/prefix/"), { - "path": path, - "href_or_src": href, - "element_parent": str(el.parent), - } + assert href.startswith("/prefix/"), json.dumps( + { + "path": path, + "path_to_get": path_to_get, + "href_or_src": href, + "element_parent": str(el.parent), + }, + indent=4, + default=repr, + ) def test_base_url_affects_metadata_extra_css_urls(app_client_base_url_prefix): From 640031edfd40ba66aee3c4f7008c78c6a78a3e69 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 19 Nov 2021 17:01:17 -0800 Subject: [PATCH 0020/1103] Fixed bug introduced in #1519 --- datasette/views/table.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 296e177f..66447aa0 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -942,7 +942,7 @@ class TableView(RowTableShared): "extra_wheres_for_ui": extra_wheres_for_ui, "form_hidden_args": form_hidden_args, "is_sortable": any(c["sortable"] for c in display_columns), - "fix_path": ds.urls.path, + "fix_path": self.ds.urls.path, "path_with_replaced_args": path_with_replaced_args, "path_with_removed_args": path_with_removed_args, "append_querystring": append_querystring, From 24b5006ad7c316d00a1a963db5bfa82a49fab116 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 19 Nov 2021 17:11:13 -0800 Subject: [PATCH 0021/1103] ProxyPreserveHost On for apache-proxy demo, refs #1522 --- demos/apache-proxy/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/demos/apache-proxy/Dockerfile b/demos/apache-proxy/Dockerfile index 46697c63..59c20433 100644 --- a/demos/apache-proxy/Dockerfile +++ b/demos/apache-proxy/Dockerfile @@ -21,6 +21,7 @@ RUN echo -e 'ServerName localhost\n\ Allow from all\n\ \n\ \n\ +ProxyPreserveHost On\n\ ProxyPass /prefix/ http://localhost:8001/\n\ Header add X-Proxied-By "Apache2"' >> /etc/apache2/httpd.conf From 494f11d5cc88f05df300f6f41bcf083a736487dc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Nov 2021 10:51:14 -0800 Subject: [PATCH 0022/1103] Switch from Alpine to Debian, refs #1522 --- demos/apache-proxy/Dockerfile | 80 +++++++++++++++++++++-------------- 1 file changed, 49 insertions(+), 31 deletions(-) diff --git a/demos/apache-proxy/Dockerfile b/demos/apache-proxy/Dockerfile index 59c20433..40f5e31d 100644 --- a/demos/apache-proxy/Dockerfile +++ b/demos/apache-proxy/Dockerfile @@ -1,46 +1,64 @@ -FROM python:3-alpine +FROM python:3.9.7-slim-bullseye -RUN apk add --no-cache \ - apache2 \ - apache2-proxy \ - bash +RUN apt-get update && \ + apt-get install -y apache2 supervisor && \ + apt clean && \ + rm -rf /var/lib/apt && \ + rm -rf /var/lib/dpkg/info/* + +# Apache environment, copied from +# https://github.com/ijklim/laravel-benfords-law-app/blob/e9bf385dcaddb62ea466a7b245ab6e4ef708c313/docker/os/Dockerfile +ENV APACHE_DOCUMENT_ROOT=/var/www/html/public +ENV APACHE_RUN_USER www-data +ENV APACHE_RUN_GROUP www-data +ENV APACHE_PID_FILE /var/run/apache2.pid +ENV APACHE_RUN_DIR /var/run/apache2 +ENV APACHE_LOCK_DIR /var/lock/apache2 +ENV APACHE_LOG_DIR /var/log +RUN ln -sf /dev/stdout /var/log/apache2-access.log +RUN ln -sf /dev/stderr /var/log/apache2-error.log +RUN mkdir -p $APACHE_RUN_DIR $APACHE_LOCK_DIR + +RUN a2enmod proxy +RUN a2enmod proxy_http +RUN a2enmod headers ARG DATASETTE_REF RUN pip install https://github.com/simonw/datasette/archive/${DATASETTE_REF}.zip -ENV TINI_VERSION v0.18.0 -ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini-static /tini -RUN chmod +x /tini - # Append this to the end of the default httpd.conf file -RUN echo -e 'ServerName localhost\n\ +RUN echo '\n\ +\n\ + Options Indexes FollowSymLinks\n\ + AllowOverride None\n\ + Require all granted\n\ +\n\ \n\ -\n\ - Order deny,allow\n\ - Allow from all\n\ -\n\ -\n\ -ProxyPreserveHost On\n\ -ProxyPass /prefix/ http://localhost:8001/\n\ -Header add X-Proxied-By "Apache2"' >> /etc/apache2/httpd.conf - -RUN echo 'Datasette' > /var/www/localhost/htdocs/index.html +\n\ + ServerName localhost\n\ + DocumentRoot /app/html\n\ + ProxyPreserveHost On\n\ + ProxyPass /prefix/ http://127.0.0.1:8001/\n\ + Header add X-Proxied-By "Apache2 Debian"\n\ +\n\ +' > /etc/apache2/sites-enabled/000-default.conf WORKDIR /app +RUN mkdir -p /app/html +RUN echo 'Datasette' > /app/html/index.html ADD https://latest.datasette.io/fixtures.db /app/fixtures.db -RUN echo -e "#!/usr/bin/env bash\n\ -datasette /app/fixtures.db --setting base_url '/prefix/' --version-note '${DATASETTE_REF}' -h 0.0.0.0 -p 8001 &\n\ -\n\ -httpd -D FOREGROUND & \n\ -\n\ -wait -n\n\ -exit $?" > /app/start.sh - -RUN chmod +x /app/start.sh - EXPOSE 80 -CMD /tini -- /app/start.sh +RUN echo "[supervisord]" >> /app/supervisord.conf +RUN echo "nodaemon=true" >> /app/supervisord.conf +RUN echo "" >> /app/supervisord.conf +RUN echo "[program:apache2]" >> /app/supervisord.conf +RUN echo "command=apache2 -D FOREGROUND" >> /app/supervisord.conf +RUN echo "" >> /app/supervisord.conf +RUN echo "[program:datasette]" >> /app/supervisord.conf +RUN echo "command=datasette /app/fixtures.db --setting base_url '/prefix/' --version-note '${DATASETTE_REF}' -h 0.0.0.0 -p 8001" >> /app/supervisord.conf + +CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"] From 48951e4304cc39b49e26682836d6961e165bddb1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Nov 2021 10:51:51 -0800 Subject: [PATCH 0023/1103] Switch to hosting demo on Fly, closes #1522 --- demos/apache-proxy/README.md | 24 +++++++++++- .../{deploy.sh => deploy-to-cloud-run.sh} | 0 demos/apache-proxy/fly.toml | 37 +++++++++++++++++++ 3 files changed, 60 insertions(+), 1 deletion(-) rename demos/apache-proxy/{deploy.sh => deploy-to-cloud-run.sh} (100%) create mode 100644 demos/apache-proxy/fly.toml diff --git a/demos/apache-proxy/README.md b/demos/apache-proxy/README.md index 08048512..c76e440d 100644 --- a/demos/apache-proxy/README.md +++ b/demos/apache-proxy/README.md @@ -2,7 +2,7 @@ See also [Running Datasette behind a proxy](https://docs.datasette.io/en/latest/deploying.html#running-datasette-behind-a-proxy) -This live demo is running at https://apache-proxy-demo.datasette.io/ +This live demo is running at https://datasette-apache-proxy-demo.fly.dev/prefix/ To build locally, passing in a Datasette commit hash (or `main` for the main branch): @@ -14,3 +14,25 @@ Then run it like this: docker run -p 5000:80 datasette-apache-proxy-demo And visit `http://localhost:5000/` or `http://localhost:5000/prefix/` + +## Deployment to Fly + +To deploy to [Fly](https://fly.io/) first create an application there by running: + + flyctl apps create --name datasette-apache-proxy-demo + +You will need a different name, since I have already taken that one. + +Then run this command to deploy: + + flyctl deploy --build-arg DATASETTE_REF=main + +This uses `fly.toml` in this directory, which hard-codes the `datasette-apache-proxy-demo` name - so you would need to edit that file to match your application name before running this. + +## Deployment to Cloud Run + +Deployments to Cloud Run currently result in intermittent 503 errors and I'm not sure why, see [issue #1522](https://github.com/simonw/datasette/issues/1522). + +You can deploy like this: + + DATASETTE_REF=main ./deploy-to-cloud-run.sh diff --git a/demos/apache-proxy/deploy.sh b/demos/apache-proxy/deploy-to-cloud-run.sh similarity index 100% rename from demos/apache-proxy/deploy.sh rename to demos/apache-proxy/deploy-to-cloud-run.sh diff --git a/demos/apache-proxy/fly.toml b/demos/apache-proxy/fly.toml new file mode 100644 index 00000000..52e6af5d --- /dev/null +++ b/demos/apache-proxy/fly.toml @@ -0,0 +1,37 @@ +app = "datasette-apache-proxy-demo" + +kill_signal = "SIGINT" +kill_timeout = 5 +processes = [] + +[env] + +[experimental] + allowed_public_ports = [] + auto_rollback = true + +[[services]] + http_checks = [] + internal_port = 80 + processes = ["app"] + protocol = "tcp" + script_checks = [] + + [services.concurrency] + hard_limit = 25 + soft_limit = 20 + type = "connections" + + [[services.ports]] + handlers = ["http"] + port = 80 + + [[services.ports]] + handlers = ["tls", "http"] + port = 443 + + [[services.tcp_checks]] + grace_period = "1s" + interval = "15s" + restart_limit = 0 + timeout = "2s" From 08947fa76433d18988aa1ee1d929bd8320c75fe2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Nov 2021 11:03:08 -0800 Subject: [PATCH 0024/1103] Fix more broken base_url links Refs #1519, #838 --- datasette/facets.py | 10 +++++----- datasette/views/table.py | 2 +- tests/test_html.py | 1 + 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index 9a43b95e..29923ef7 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -225,8 +225,8 @@ class ColumnFacet(Facet): "name": column, "type": self.type, "hideable": source != "metadata", - "toggle_url": path_with_removed_args( - self.request, {"_facet": column} + "toggle_url": ds.urls.path( + path_with_removed_args(self.request, {"_facet": column}) ), "results": facet_results_values, "truncated": len(facet_rows_results) > facet_size, @@ -259,7 +259,7 @@ class ColumnFacet(Facet): "label": expanded.get((column, row["value"]), row["value"]), "count": row["count"], "toggle_url": self.ds.absolute_url( - self.request, toggle_path + self.request, self.ds.urls.path(toggle_path) ), "selected": selected, } @@ -397,8 +397,8 @@ class ArrayFacet(Facet): "type": self.type, "results": facet_results_values, "hideable": source != "metadata", - "toggle_url": path_with_removed_args( - self.request, {"_facet_array": column} + "toggle_url": self.ds.urls.path( + path_with_removed_args(self.request, {"_facet_array": column}) ), "truncated": len(facet_rows_results) > facet_size, } diff --git a/datasette/views/table.py b/datasette/views/table.py index 66447aa0..1960f455 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -839,7 +839,7 @@ class TableView(RowTableShared): else: added_args = {"_next": next_value} next_url = self.ds.absolute_url( - request, path_with_replaced_args(request, added_args) + request, self.ds.urls.path(path_with_replaced_args(request, added_args)) ) rows = rows[:page_size] diff --git a/tests/test_html.py b/tests/test_html.py index 3301b91d..68508d75 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1614,6 +1614,7 @@ def test_metadata_sort_desc(app_client): "/fixtures/compound_three_primary_keys/a,a,a", "/fixtures/paginated_view", "/fixtures/facetable", + "/fixtures/facetable?_facet=state", "/fixtures?sql=select+1", ], ) From 250db8192cb8aba5eb8cd301ccc2a49525bc3d24 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Nov 2021 11:09:05 -0800 Subject: [PATCH 0025/1103] Hopefully last fix relating to #1519, #838 --- datasette/facets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/facets.py b/datasette/facets.py index 29923ef7..8fd2177a 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -225,7 +225,7 @@ class ColumnFacet(Facet): "name": column, "type": self.type, "hideable": source != "metadata", - "toggle_url": ds.urls.path( + "toggle_url": self.ds.urls.path( path_with_removed_args(self.request, {"_facet": column}) ), "results": facet_results_values, From f11a13d73f021906f04b495cd589915e9a926bc5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Nov 2021 12:23:40 -0800 Subject: [PATCH 0026/1103] Extract out Apache config to separate file, refs #1524 --- demos/apache-proxy/000-default.conf | 13 +++++++++++++ demos/apache-proxy/Dockerfile | 20 +++----------------- 2 files changed, 16 insertions(+), 17 deletions(-) create mode 100644 demos/apache-proxy/000-default.conf diff --git a/demos/apache-proxy/000-default.conf b/demos/apache-proxy/000-default.conf new file mode 100644 index 00000000..5b6607a3 --- /dev/null +++ b/demos/apache-proxy/000-default.conf @@ -0,0 +1,13 @@ + + Options Indexes FollowSymLinks + AllowOverride None + Require all granted + + + + ServerName localhost + DocumentRoot /app/html + ProxyPreserveHost On + ProxyPass /prefix/ http://127.0.0.1:8001/ + Header add X-Proxied-By "Apache2 Debian" + diff --git a/demos/apache-proxy/Dockerfile b/demos/apache-proxy/Dockerfile index 40f5e31d..0854b552 100644 --- a/demos/apache-proxy/Dockerfile +++ b/demos/apache-proxy/Dockerfile @@ -27,31 +27,17 @@ ARG DATASETTE_REF RUN pip install https://github.com/simonw/datasette/archive/${DATASETTE_REF}.zip -# Append this to the end of the default httpd.conf file -RUN echo '\n\ -\n\ - Options Indexes FollowSymLinks\n\ - AllowOverride None\n\ - Require all granted\n\ -\n\ -\n\ -\n\ - ServerName localhost\n\ - DocumentRoot /app/html\n\ - ProxyPreserveHost On\n\ - ProxyPass /prefix/ http://127.0.0.1:8001/\n\ - Header add X-Proxied-By "Apache2 Debian"\n\ -\n\ -' > /etc/apache2/sites-enabled/000-default.conf +ADD 000-default.conf /etc/apache2/sites-enabled/000-default.conf WORKDIR /app RUN mkdir -p /app/html -RUN echo 'Datasette' > /app/html/index.html +RUN echo '

Demo is at /prefix/

' > /app/html/index.html ADD https://latest.datasette.io/fixtures.db /app/fixtures.db EXPOSE 80 +# Dynamically build supervisord config since it includes $DATASETTE_REF: RUN echo "[supervisord]" >> /app/supervisord.conf RUN echo "nodaemon=true" >> /app/supervisord.conf RUN echo "" >> /app/supervisord.conf From ed77eda6d8f10c63fc0670c7150fc974f786ade5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Nov 2021 15:30:25 -0800 Subject: [PATCH 0027/1103] Add datasette-redirect-to-https plugin Also configured suprvisord children to log to stdout, so that I can see them with flyctly logs -a datasette-apache-proxy-demo Refs #1524 --- demos/apache-proxy/Dockerfile | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/demos/apache-proxy/Dockerfile b/demos/apache-proxy/Dockerfile index 0854b552..ab7b9d16 100644 --- a/demos/apache-proxy/Dockerfile +++ b/demos/apache-proxy/Dockerfile @@ -25,7 +25,9 @@ RUN a2enmod headers ARG DATASETTE_REF -RUN pip install https://github.com/simonw/datasette/archive/${DATASETTE_REF}.zip +RUN pip install \ + https://github.com/simonw/datasette/archive/${DATASETTE_REF}.zip \ + datasette-redirect-to-https ADD 000-default.conf /etc/apache2/sites-enabled/000-default.conf @@ -43,8 +45,12 @@ RUN echo "nodaemon=true" >> /app/supervisord.conf RUN echo "" >> /app/supervisord.conf RUN echo "[program:apache2]" >> /app/supervisord.conf RUN echo "command=apache2 -D FOREGROUND" >> /app/supervisord.conf +RUN echo "stdout_logfile=/dev/stdout" >> /app/supervisord.conf +RUN echo "stdout_logfile_maxbytes=0" >> /app/supervisord.conf RUN echo "" >> /app/supervisord.conf RUN echo "[program:datasette]" >> /app/supervisord.conf RUN echo "command=datasette /app/fixtures.db --setting base_url '/prefix/' --version-note '${DATASETTE_REF}' -h 0.0.0.0 -p 8001" >> /app/supervisord.conf +RUN echo "stdout_logfile=/dev/stdout" >> /app/supervisord.conf +RUN echo "stdout_logfile_maxbytes=0" >> /app/supervisord.conf CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"] From d8c79b1340ceb742077587fb7f76ed8699d4e402 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Nov 2021 15:33:58 -0800 Subject: [PATCH 0028/1103] Link to Apache proxy demo from documentation, closes #1524 --- docs/deploying.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/deploying.rst b/docs/deploying.rst index 83d9e4dd..d4ad8836 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -188,6 +188,8 @@ Then add these directives to proxy traffic:: ProxyPass /my-datasette/ http://127.0.0.1:8009/my-datasette/ ProxyPreserveHost On +A live demo of Datasette running behind Apache using this proxy setup can be seen at `datasette-apache-proxy-demo.datasette.io/prefix/ `__. The code for that demo can be found in the `demos/apache-proxy `__ directory. + Using ``--uds`` you can use Unix domain sockets similar to the nginx example:: ProxyPass /my-datasette/ unix:/tmp/datasette.sock|http://localhost/my-datasette/ From 48f11998b73350057b74fe6ab464d4ac3071637c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Nov 2021 15:40:21 -0800 Subject: [PATCH 0029/1103] Release 0.59.3 Refs #448, #838, #1519 --- datasette/version.py | 2 +- docs/changelog.rst | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index db89b418..0ba55573 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.59.2" +__version__ = "0.59.3" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 47ca3480..449ce412 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,15 @@ Changelog ========= +.. _v0_59_3: + +0.59.3 (2021-11-20) +------------------- + +- Fixed numerous bugs when running Datasette :ref:`behind a proxy ` with a prefix URL path using the :ref:`setting_base_url` setting. A live demo of this mode is now available at `datasette-apache-proxy-demo.datasette.io/prefix/ `__. (:issue:`1519`, :issue:`838`) +- ``?column__arraycontains=`` and ``?column__arraynotcontains=`` table parameters now also work against SQL views. (:issue:`448`) +- ``?_facet_array=column`` no longer returns incorrect counts if columns contain the same value more than once. + .. _v0_59_2: 0.59.2 (2021-11-13) From 1beb7d939999da79bb77c4d3c777657c8a16bcd9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Nov 2021 18:29:54 -0800 Subject: [PATCH 0030/1103] Update aiofiles requirement from <0.8,>=0.4 to >=0.4,<0.9 (#1537) Updates the requirements on [aiofiles](https://github.com/Tinche/aiofiles) to permit the latest version. - [Release notes](https://github.com/Tinche/aiofiles/releases) - [Commits](https://github.com/Tinche/aiofiles/compare/v0.4.0...v0.8.0) --- updated-dependencies: - dependency-name: aiofiles dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 17a56a97..9e205ce2 100644 --- a/setup.py +++ b/setup.py @@ -51,7 +51,7 @@ setup( "pint~=0.9", "pluggy>=0.13,<1.1", "uvicorn~=0.11", - "aiofiles>=0.4,<0.8", + "aiofiles>=0.4,<0.9", "janus>=0.6.2,<0.7", "asgi-csrf>=0.9", "PyYAML>=5.3,<7.0", From 3303514a52b7170f2f1e598cd9c5f82c22f26e6c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Nov 2021 18:35:18 -0800 Subject: [PATCH 0031/1103] Update docutils requirement from <0.18 to <0.19 (#1508) Updates the requirements on [docutils](http://docutils.sourceforge.net/) to permit the latest version. --- updated-dependencies: - dependency-name: docutils dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/readthedocs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/readthedocs-requirements.txt b/docs/readthedocs-requirements.txt index 93120e66..db1851ad 100644 --- a/docs/readthedocs-requirements.txt +++ b/docs/readthedocs-requirements.txt @@ -1 +1 @@ -docutils<0.18 +docutils<0.19 From cc4c70b3670ce2a85bb883b8d5626574590efe14 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Nov 2021 18:35:28 -0800 Subject: [PATCH 0032/1103] Bump black from 21.9b0 to 21.11b1 (#1516) Bumps [black](https://github.com/psf/black) from 21.9b0 to 21.11b1. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/commits) --- updated-dependencies: - dependency-name: black dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 9e205ce2..71422d87 100644 --- a/setup.py +++ b/setup.py @@ -71,7 +71,7 @@ setup( "pytest-xdist>=2.2.1,<2.5", "pytest-asyncio>=0.10,<0.17", "beautifulsoup4>=4.8.1,<4.11.0", - "black==21.9b0", + "black==21.11b1", "pytest-timeout>=1.4.2,<2.1", "trustme>=0.7,<0.10", ], From 83eb29deced2430f40c3374ff9085d65d86d8281 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Nov 2021 18:37:13 -0800 Subject: [PATCH 0033/1103] Update janus requirement from <0.7,>=0.6.2 to >=0.6.2,<0.8 (#1529) Updates the requirements on [janus](https://github.com/aio-libs/janus) to permit the latest version. - [Release notes](https://github.com/aio-libs/janus/releases) - [Changelog](https://github.com/aio-libs/janus/blob/master/CHANGES.rst) - [Commits](https://github.com/aio-libs/janus/compare/v0.6.2...v0.7.0) --- updated-dependencies: - dependency-name: janus dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 71422d87..3cb657e3 100644 --- a/setup.py +++ b/setup.py @@ -52,7 +52,7 @@ setup( "pluggy>=0.13,<1.1", "uvicorn~=0.11", "aiofiles>=0.4,<0.9", - "janus>=0.6.2,<0.7", + "janus>=0.6.2,<0.8", "asgi-csrf>=0.9", "PyYAML>=5.3,<7.0", "mergedeep>=1.1.1,<1.4.0", From 06762776f712526fdb40a18ed26f259be62bb214 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 29 Nov 2021 19:04:20 -0800 Subject: [PATCH 0034/1103] Fix for incorrect hidden for fields for _columns, refs #1527 --- datasette/views/table.py | 6 +++++- tests/test_html.py | 13 +++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 1960f455..9fc6afcf 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -889,7 +889,11 @@ class TableView(RowTableShared): form_hidden_args = [] for key in request.args: - if key.startswith("_") and key not in ("_sort", "_search", "_next"): + if ( + key.startswith("_") + and key not in ("_sort", "_search", "_next") + and not key.endswith("__exact") + ): for value in request.args.getlist(key): form_hidden_args.append((key, value)) diff --git a/tests/test_html.py b/tests/test_html.py index 68508d75..179c3f09 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -326,6 +326,19 @@ def test_existing_filter_redirects(app_client): assert "?" not in response.headers["Location"] +def test_exact_parameter_results_in_correct_hidden_fields(app_client): + # https://github.com/simonw/datasette/issues/1527 + response = app_client.get( + "/fixtures/facetable?_facet=_neighborhood&_neighborhood__exact=Downtown" + ) + # In this case we should NOT have a hidden _neighborhood__exact=Downtown field + form = Soup(response.body, "html.parser").find("form") + hidden_inputs = { + input["name"]: input["value"] for input in form.select("input[type=hidden]") + } + assert hidden_inputs == {"_facet": "_neighborhood"} + + def test_empty_search_parameter_gets_removed(app_client): path_base = "/fixtures/simple_primary_key" path = ( From 69244a617b1118dcbd04a8f102173f04680cf08c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 29 Nov 2021 22:17:27 -0800 Subject: [PATCH 0035/1103] Rename city_id to _city_id in fixtures, refs #1525 --- tests/fixtures.py | 8 ++--- tests/test_api.py | 50 ++++++++++++++++---------------- tests/test_facets.py | 36 +++++++++++------------ tests/test_html.py | 34 +++++++++++----------- tests/test_internals_database.py | 4 +-- tests/test_plugins.py | 8 ++--- 6 files changed, 70 insertions(+), 70 deletions(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index 1a879126..37399da0 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -358,7 +358,7 @@ METADATA = { select _neighborhood, facet_cities.name, state from facetable join facet_cities - on facetable.city_id = facet_cities.id + on facetable._city_id = facet_cities.id where _neighborhood like '%' || :text || '%' order by _neighborhood; """ @@ -558,15 +558,15 @@ CREATE TABLE facetable ( planet_int integer, on_earth integer, state text, - city_id integer, + _city_id integer, _neighborhood text, tags text, complex_array text, distinct_some_null, - FOREIGN KEY ("city_id") REFERENCES [facet_cities](id) + FOREIGN KEY ("_city_id") REFERENCES [facet_cities](id) ); INSERT INTO facetable - (created, planet_int, on_earth, state, city_id, _neighborhood, tags, complex_array, distinct_some_null) + (created, planet_int, on_earth, state, _city_id, _neighborhood, tags, complex_array, distinct_some_null) VALUES ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]', 'one'), ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]', 'two'), diff --git a/tests/test_api.py b/tests/test_api.py index 43b52175..8b3fcd75 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -197,7 +197,7 @@ def test_database_page(app_client): { "other_table": "facetable", "column": "id", - "other_column": "city_id", + "other_column": "_city_id", } ], "outgoing": [], @@ -212,7 +212,7 @@ def test_database_page(app_client): "planet_int", "on_earth", "state", - "city_id", + "_city_id", "_neighborhood", "tags", "complex_array", @@ -227,7 +227,7 @@ def test_database_page(app_client): "outgoing": [ { "other_table": "facet_cities", - "column": "city_id", + "column": "_city_id", "other_column": "id", } ], @@ -1512,40 +1512,40 @@ def test_page_size_matching_max_returned_rows( "path,expected_facet_results", [ ( - "/fixtures/facetable.json?_facet=state&_facet=city_id", + "/fixtures/facetable.json?_facet=state&_facet=_city_id", { "state": { "name": "state", "hideable": True, "type": "column", - "toggle_url": "/fixtures/facetable.json?_facet=city_id", + "toggle_url": "/fixtures/facetable.json?_facet=_city_id", "results": [ { "value": "CA", "label": "CA", "count": 10, - "toggle_url": "_facet=state&_facet=city_id&state=CA", + "toggle_url": "_facet=state&_facet=_city_id&state=CA", "selected": False, }, { "value": "MI", "label": "MI", "count": 4, - "toggle_url": "_facet=state&_facet=city_id&state=MI", + "toggle_url": "_facet=state&_facet=_city_id&state=MI", "selected": False, }, { "value": "MC", "label": "MC", "count": 1, - "toggle_url": "_facet=state&_facet=city_id&state=MC", + "toggle_url": "_facet=state&_facet=_city_id&state=MC", "selected": False, }, ], "truncated": False, }, - "city_id": { - "name": "city_id", + "_city_id": { + "name": "_city_id", "hideable": True, "type": "column", "toggle_url": "/fixtures/facetable.json?_facet=state", @@ -1554,28 +1554,28 @@ def test_page_size_matching_max_returned_rows( "value": 1, "label": "San Francisco", "count": 6, - "toggle_url": "_facet=state&_facet=city_id&city_id=1", + "toggle_url": "_facet=state&_facet=_city_id&_city_id__exact=1", "selected": False, }, { "value": 2, "label": "Los Angeles", "count": 4, - "toggle_url": "_facet=state&_facet=city_id&city_id=2", + "toggle_url": "_facet=state&_facet=_city_id&_city_id__exact=2", "selected": False, }, { "value": 3, "label": "Detroit", "count": 4, - "toggle_url": "_facet=state&_facet=city_id&city_id=3", + "toggle_url": "_facet=state&_facet=_city_id&_city_id__exact=3", "selected": False, }, { "value": 4, "label": "Memnonia", "count": 1, - "toggle_url": "_facet=state&_facet=city_id&city_id=4", + "toggle_url": "_facet=state&_facet=_city_id&_city_id__exact=4", "selected": False, }, ], @@ -1584,26 +1584,26 @@ def test_page_size_matching_max_returned_rows( }, ), ( - "/fixtures/facetable.json?_facet=state&_facet=city_id&state=MI", + "/fixtures/facetable.json?_facet=state&_facet=_city_id&state=MI", { "state": { "name": "state", "hideable": True, "type": "column", - "toggle_url": "/fixtures/facetable.json?_facet=city_id&state=MI", + "toggle_url": "/fixtures/facetable.json?_facet=_city_id&state=MI", "results": [ { "value": "MI", "label": "MI", "count": 4, "selected": True, - "toggle_url": "_facet=state&_facet=city_id", + "toggle_url": "_facet=state&_facet=_city_id", } ], "truncated": False, }, - "city_id": { - "name": "city_id", + "_city_id": { + "name": "_city_id", "hideable": True, "type": "column", "toggle_url": "/fixtures/facetable.json?_facet=state&state=MI", @@ -1613,7 +1613,7 @@ def test_page_size_matching_max_returned_rows( "label": "Detroit", "count": 4, "selected": False, - "toggle_url": "_facet=state&_facet=city_id&state=MI&city_id=3", + "toggle_url": "_facet=state&_facet=_city_id&state=MI&_city_id__exact=3", } ], "truncated": False, @@ -1699,7 +1699,7 @@ def test_suggested_facets(app_client): {"name": "planet_int", "querystring": "_facet=planet_int"}, {"name": "on_earth", "querystring": "_facet=on_earth"}, {"name": "state", "querystring": "_facet=state"}, - {"name": "city_id", "querystring": "_facet=city_id"}, + {"name": "_city_id", "querystring": "_facet=_city_id"}, {"name": "_neighborhood", "querystring": "_facet=_neighborhood"}, {"name": "tags", "querystring": "_facet=tags"}, {"name": "complex_array", "querystring": "_facet=complex_array"}, @@ -1765,7 +1765,7 @@ def test_expand_labels(app_client): "planet_int": 1, "on_earth": 1, "state": "CA", - "city_id": {"value": 1, "label": "San Francisco"}, + "_city_id": {"value": 1, "label": "San Francisco"}, "_neighborhood": "Dogpatch", "tags": '["tag1", "tag3"]', "complex_array": "[]", @@ -1777,7 +1777,7 @@ def test_expand_labels(app_client): "planet_int": 1, "on_earth": 1, "state": "MI", - "city_id": {"value": 3, "label": "Detroit"}, + "_city_id": {"value": 3, "label": "Detroit"}, "_neighborhood": "Corktown", "tags": "[]", "complex_array": "[]", @@ -2128,7 +2128,7 @@ def test_http_options_request(app_client): "planet_int", "on_earth", "state", - "city_id", + "_city_id", "_neighborhood", "tags", "complex_array", @@ -2155,7 +2155,7 @@ def test_http_options_request(app_client): "created", "planet_int", "on_earth", - "city_id", + "_city_id", "_neighborhood", "tags", "complex_array", diff --git a/tests/test_facets.py b/tests/test_facets.py index a20c79c4..429117cb 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -23,7 +23,7 @@ async def test_column_facet_suggest(app_client): {"name": "planet_int", "toggle_url": "http://localhost/?_facet=planet_int"}, {"name": "on_earth", "toggle_url": "http://localhost/?_facet=on_earth"}, {"name": "state", "toggle_url": "http://localhost/?_facet=state"}, - {"name": "city_id", "toggle_url": "http://localhost/?_facet=city_id"}, + {"name": "_city_id", "toggle_url": "http://localhost/?_facet=_city_id"}, { "name": "_neighborhood", "toggle_url": "http://localhost/?_facet=_neighborhood", @@ -56,8 +56,8 @@ async def test_column_facet_suggest_skip_if_already_selected(app_client): "toggle_url": "http://localhost/?_facet=planet_int&_facet=on_earth&_facet=state", }, { - "name": "city_id", - "toggle_url": "http://localhost/?_facet=planet_int&_facet=on_earth&_facet=city_id", + "name": "_city_id", + "toggle_url": "http://localhost/?_facet=planet_int&_facet=on_earth&_facet=_city_id", }, { "name": "_neighborhood", @@ -82,7 +82,7 @@ async def test_column_facet_suggest_skip_if_enabled_by_metadata(app_client): database="fixtures", sql="select * from facetable", table="facetable", - metadata={"facets": ["city_id"]}, + metadata={"facets": ["_city_id"]}, ) suggestions = [s["name"] for s in await facet.suggest()] assert [ @@ -100,7 +100,7 @@ async def test_column_facet_suggest_skip_if_enabled_by_metadata(app_client): async def test_column_facet_results(app_client): facet = ColumnFacet( app_client.ds, - Request.fake("/?_facet=city_id"), + Request.fake("/?_facet=_city_id"), database="fixtures", sql="select * from facetable", table="facetable", @@ -108,8 +108,8 @@ async def test_column_facet_results(app_client): buckets, timed_out = await facet.facet_results() assert [] == timed_out assert { - "city_id": { - "name": "city_id", + "_city_id": { + "name": "_city_id", "type": "column", "hideable": True, "toggle_url": "/", @@ -118,28 +118,28 @@ async def test_column_facet_results(app_client): "value": 1, "label": "San Francisco", "count": 6, - "toggle_url": "http://localhost/?_facet=city_id&city_id=1", + "toggle_url": "http://localhost/?_facet=_city_id&_city_id__exact=1", "selected": False, }, { "value": 2, "label": "Los Angeles", "count": 4, - "toggle_url": "http://localhost/?_facet=city_id&city_id=2", + "toggle_url": "http://localhost/?_facet=_city_id&_city_id__exact=2", "selected": False, }, { "value": 3, "label": "Detroit", "count": 4, - "toggle_url": "http://localhost/?_facet=city_id&city_id=3", + "toggle_url": "http://localhost/?_facet=_city_id&_city_id__exact=3", "selected": False, }, { "value": 4, "label": "Memnonia", "count": 1, - "toggle_url": "http://localhost/?_facet=city_id&city_id=4", + "toggle_url": "http://localhost/?_facet=_city_id&_city_id__exact=4", "selected": False, }, ], @@ -278,13 +278,13 @@ async def test_column_facet_from_metadata_cannot_be_hidden(app_client): database="fixtures", sql="select * from facetable", table="facetable", - metadata={"facets": ["city_id"]}, + metadata={"facets": ["_city_id"]}, ) buckets, timed_out = await facet.facet_results() assert [] == timed_out assert { - "city_id": { - "name": "city_id", + "_city_id": { + "name": "_city_id", "type": "column", "hideable": False, "toggle_url": "/", @@ -293,28 +293,28 @@ async def test_column_facet_from_metadata_cannot_be_hidden(app_client): "value": 1, "label": "San Francisco", "count": 6, - "toggle_url": "http://localhost/?city_id=1", + "toggle_url": "http://localhost/?_city_id__exact=1", "selected": False, }, { "value": 2, "label": "Los Angeles", "count": 4, - "toggle_url": "http://localhost/?city_id=2", + "toggle_url": "http://localhost/?_city_id__exact=2", "selected": False, }, { "value": 3, "label": "Detroit", "count": 4, - "toggle_url": "http://localhost/?city_id=3", + "toggle_url": "http://localhost/?_city_id__exact=3", "selected": False, }, { "value": 4, "label": "Memnonia", "count": 1, - "toggle_url": "http://localhost/?city_id=4", + "toggle_url": "http://localhost/?_city_id__exact=4", "selected": False, }, ], diff --git a/tests/test_html.py b/tests/test_html.py index 179c3f09..aaf7da09 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -487,7 +487,7 @@ def test_sort_links(app_client): def test_facet_display(app_client): response = app_client.get( - "/fixtures/facetable?_facet=planet_int&_facet=city_id&_facet=on_earth" + "/fixtures/facetable?_facet=planet_int&_facet=_city_id&_facet=on_earth" ) assert response.status == 200 soup = Soup(response.body, "html.parser") @@ -509,26 +509,26 @@ def test_facet_display(app_client): ) assert actual == [ { - "name": "city_id", + "name": "_city_id", "items": [ { "name": "San Francisco", - "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&city_id=1", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&_city_id__exact=1", "count": 6, }, { "name": "Los Angeles", - "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&city_id=2", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&_city_id__exact=2", "count": 4, }, { "name": "Detroit", - "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&city_id=3", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&_city_id__exact=3", "count": 4, }, { "name": "Memnonia", - "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&city_id=4", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&_city_id__exact=4", "count": 1, }, ], @@ -538,12 +538,12 @@ def test_facet_display(app_client): "items": [ { "name": "1", - "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&planet_int=1", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&planet_int=1", "count": 14, }, { "name": "2", - "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&planet_int=2", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&planet_int=2", "count": 1, }, ], @@ -553,12 +553,12 @@ def test_facet_display(app_client): "items": [ { "name": "1", - "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&on_earth=1", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&on_earth=1", "count": 14, }, { "name": "0", - "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&on_earth=0", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&on_earth=0", "count": 1, }, ], @@ -568,14 +568,14 @@ def test_facet_display(app_client): def test_facets_persist_through_filter_form(app_client): response = app_client.get( - "/fixtures/facetable?_facet=planet_int&_facet=city_id&_facet_array=tags" + "/fixtures/facetable?_facet=planet_int&_facet=_city_id&_facet_array=tags" ) assert response.status == 200 inputs = Soup(response.body, "html.parser").find("form").findAll("input") hiddens = [i for i in inputs if i["type"] == "hidden"] assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == [ ("_facet", "planet_int"), - ("_facet", "city_id"), + ("_facet", "_city_id"), ("_facet_array", "tags"), ] @@ -1350,20 +1350,20 @@ def test_canned_query_show_hide_metadata_option( def test_extra_where_clauses(app_client): response = app_client.get( - "/fixtures/facetable?_where=_neighborhood='Dogpatch'&_where=city_id=1" + "/fixtures/facetable?_where=_neighborhood='Dogpatch'&_where=_city_id=1" ) soup = Soup(response.body, "html.parser") div = soup.select(".extra-wheres")[0] assert "2 extra where clauses" == div.find("h3").text hrefs = [a["href"] for a in div.findAll("a")] assert [ - "/fixtures/facetable?_where=city_id%3D1", + "/fixtures/facetable?_where=_city_id%3D1", "/fixtures/facetable?_where=_neighborhood%3D%27Dogpatch%27", ] == hrefs # These should also be persisted as hidden fields inputs = soup.find("form").findAll("input") hiddens = [i for i in inputs if i["type"] == "hidden"] - assert [("_where", "_neighborhood='Dogpatch'"), ("_where", "city_id=1")] == [ + assert [("_where", "_neighborhood='Dogpatch'"), ("_where", "_city_id=1")] == [ (hidden["name"], hidden["value"]) for hidden in hiddens ] @@ -1683,11 +1683,11 @@ def test_base_url_affects_metadata_extra_css_urls(app_client_base_url_prefix): [ ( "/fixtures/neighborhood_search", - "/fixtures?sql=%0Aselect+_neighborhood%2C+facet_cities.name%2C+state%0Afrom+facetable%0A++++join+facet_cities%0A++++++++on+facetable.city_id+%3D+facet_cities.id%0Awhere+_neighborhood+like+%27%25%27+%7C%7C+%3Atext+%7C%7C+%27%25%27%0Aorder+by+_neighborhood%3B%0A&text=", + "/fixtures?sql=%0Aselect+_neighborhood%2C+facet_cities.name%2C+state%0Afrom+facetable%0A++++join+facet_cities%0A++++++++on+facetable._city_id+%3D+facet_cities.id%0Awhere+_neighborhood+like+%27%25%27+%7C%7C+%3Atext+%7C%7C+%27%25%27%0Aorder+by+_neighborhood%3B%0A&text=", ), ( "/fixtures/neighborhood_search?text=ber", - "/fixtures?sql=%0Aselect+_neighborhood%2C+facet_cities.name%2C+state%0Afrom+facetable%0A++++join+facet_cities%0A++++++++on+facetable.city_id+%3D+facet_cities.id%0Awhere+_neighborhood+like+%27%25%27+%7C%7C+%3Atext+%7C%7C+%27%25%27%0Aorder+by+_neighborhood%3B%0A&text=ber", + "/fixtures?sql=%0Aselect+_neighborhood%2C+facet_cities.name%2C+state%0Afrom+facetable%0A++++join+facet_cities%0A++++++++on+facetable._city_id+%3D+facet_cities.id%0Awhere+_neighborhood+like+%27%25%27+%7C%7C+%3Atext+%7C%7C+%27%25%27%0Aorder+by+_neighborhood%3B%0A&text=ber", ), ("/fixtures/pragma_cache_size", None), ( diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 2d0cae7f..a00fe447 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -81,7 +81,7 @@ async def test_table_exists(db, tables, exists): "planet_int", "on_earth", "state", - "city_id", + "_city_id", "_neighborhood", "tags", "complex_array", @@ -161,7 +161,7 @@ async def test_table_columns(db, table, expected): ), Column( cid=5, - name="city_id", + name="_city_id", type="integer", notnull=0, default_value=None, diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 697a6b32..1da28453 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -436,7 +436,7 @@ def test_hook_register_output_renderer_all_parameters(app_client): "planet_int", "on_earth", "state", - "city_id", + "_city_id", "_neighborhood", "tags", "complex_array", @@ -459,7 +459,7 @@ def test_hook_register_output_renderer_all_parameters(app_client): "", "", ], - "sql": "select pk, created, planet_int, on_earth, state, city_id, _neighborhood, tags, complex_array, distinct_some_null from facetable order by pk limit 51", + "sql": "select pk, created, planet_int, on_earth, state, _city_id, _neighborhood, tags, complex_array, distinct_some_null from facetable order by pk limit 51", "query_name": None, "database": "fixtures", "table": "facetable", @@ -525,13 +525,13 @@ def test_hook_register_output_renderer_can_render(app_client): "planet_int", "on_earth", "state", - "city_id", + "_city_id", "_neighborhood", "tags", "complex_array", "distinct_some_null", ], - "sql": "select pk, created, planet_int, on_earth, state, city_id, _neighborhood, tags, complex_array, distinct_some_null from facetable order by pk limit 51", + "sql": "select pk, created, planet_int, on_earth, state, _city_id, _neighborhood, tags, complex_array, distinct_some_null from facetable order by pk limit 51", "query_name": None, "database": "fixtures", "table": "facetable", From a37ee74891f14898d5810127c7ca3355e77ff57d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 29 Nov 2021 22:34:31 -0800 Subject: [PATCH 0036/1103] Correct link to _ prefix on row page, closes #1525 --- datasette/templates/row.html | 2 +- datasette/views/table.py | 10 +++++++++- tests/test_html.py | 28 ++++++++++++++++++++++++++++ 3 files changed, 38 insertions(+), 2 deletions(-) diff --git a/datasette/templates/row.html b/datasette/templates/row.html index 916980b6..c86e979d 100644 --- a/datasette/templates/row.html +++ b/datasette/templates/row.html @@ -38,7 +38,7 @@
    {% for other in foreign_key_tables %}
  • - + {{ "{:,}".format(other.count) }} row{% if other.count == 1 %}{% else %}s{% endif %} from {{ other.other_column }} in {{ other.other_table }}
  • diff --git a/datasette/views/table.py b/datasette/views/table.py index 9fc6afcf..f58b78f5 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -1120,5 +1120,13 @@ class RowView(RowTableShared): count = ( foreign_table_counts.get((fk["other_table"], fk["other_column"])) or 0 ) - foreign_key_tables.append({**fk, **{"count": count}}) + key = fk["other_column"] + if key.startswith("_"): + key += "__exact" + link = "{}?{}={}".format( + self.ds.urls.table(database, fk["other_table"]), + key, + ",".join(pk_values), + ) + foreign_key_tables.append({**fk, **{"count": count, "link": link}}) return foreign_key_tables diff --git a/tests/test_html.py b/tests/test_html.py index aaf7da09..a7cb105c 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -820,6 +820,34 @@ def test_row_html_no_primary_key(app_client): ] +@pytest.mark.parametrize( + "path,expected_text,expected_link", + ( + ( + "/fixtures/facet_cities/1", + "6 rows from _city_id in facetable", + "/fixtures/facetable?_city_id__exact=1", + ), + ( + "/fixtures/attraction_characteristic/2", + "3 rows from characteristic_id in roadside_attraction_characteristics", + "/fixtures/roadside_attraction_characteristics?characteristic_id=2", + ), + ), +) +def test_row_links_from_other_tables(app_client, path, expected_text, expected_link): + response = app_client.get(path) + assert response.status == 200 + soup = Soup(response.body, "html.parser") + h2 = soup.find("h2") + assert h2.text == "Links from other tables" + li = h2.findNext("ul").find("li") + text = re.sub(r"\s+", " ", li.text.strip()) + assert text == expected_text + link = li.find("a")["href"] + assert link == expected_link + + def test_table_html_compound_primary_key(app_client): response = app_client.get("/fixtures/compound_primary_key") assert response.status == 200 From 35b12746ba2bf9f254791bddac03d25b19be9b77 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 29 Nov 2021 22:37:22 -0800 Subject: [PATCH 0037/1103] Fixed CSV test I broke in #1525 --- tests/test_csv.py | 44 +++++++++++++++++++++++--------------------- 1 file changed, 23 insertions(+), 21 deletions(-) diff --git a/tests/test_csv.py b/tests/test_csv.py index 5902e9db..8749cd8b 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -24,7 +24,7 @@ world ) EXPECTED_TABLE_WITH_LABELS_CSV = """ -pk,created,planet_int,on_earth,state,city_id,city_id_label,_neighborhood,tags,complex_array,distinct_some_null +pk,created,planet_int,on_earth,state,_city_id,_city_id_label,_neighborhood,tags,complex_array,distinct_some_null 1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]","[{""foo"": ""bar""}]",one 2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]",[],two 3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[],[], @@ -57,42 +57,42 @@ def test_table_csv(app_client): response = app_client.get("/fixtures/simple_primary_key.csv?_oh=1") assert response.status == 200 assert not response.headers.get("Access-Control-Allow-Origin") - assert "text/plain; charset=utf-8" == response.headers["content-type"] - assert EXPECTED_TABLE_CSV == response.text + assert response.headers["content-type"] == "text/plain; charset=utf-8" + assert response.text == EXPECTED_TABLE_CSV def test_table_csv_cors_headers(app_client_with_cors): response = app_client_with_cors.get("/fixtures/simple_primary_key.csv") assert response.status == 200 - assert "*" == response.headers["Access-Control-Allow-Origin"] + assert response.headers["Access-Control-Allow-Origin"] == "*" def test_table_csv_no_header(app_client): response = app_client.get("/fixtures/simple_primary_key.csv?_header=off") assert response.status == 200 assert not response.headers.get("Access-Control-Allow-Origin") - assert "text/plain; charset=utf-8" == response.headers["content-type"] - assert EXPECTED_TABLE_CSV.split("\r\n", 1)[1] == response.text + assert response.headers["content-type"] == "text/plain; charset=utf-8" + assert response.text == EXPECTED_TABLE_CSV.split("\r\n", 1)[1] def test_table_csv_with_labels(app_client): response = app_client.get("/fixtures/facetable.csv?_labels=1") assert response.status == 200 - assert "text/plain; charset=utf-8" == response.headers["content-type"] - assert EXPECTED_TABLE_WITH_LABELS_CSV == response.text + assert response.headers["content-type"] == "text/plain; charset=utf-8" + assert response.text == EXPECTED_TABLE_WITH_LABELS_CSV def test_table_csv_with_nullable_labels(app_client): response = app_client.get("/fixtures/foreign_key_references.csv?_labels=1") assert response.status == 200 - assert "text/plain; charset=utf-8" == response.headers["content-type"] - assert EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV == response.text + assert response.headers["content-type"] == "text/plain; charset=utf-8" + assert response.text == EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV def test_table_csv_blob_columns(app_client): response = app_client.get("/fixtures/binary_data.csv") assert response.status == 200 - assert "text/plain; charset=utf-8" == response.headers["content-type"] + assert response.headers["content-type"] == "text/plain; charset=utf-8" assert response.text == ( "rowid,data\r\n" "1,http://localhost/fixtures/binary_data/1.blob?_blob_column=data\r\n" @@ -104,7 +104,7 @@ def test_table_csv_blob_columns(app_client): def test_custom_sql_csv_blob_columns(app_client): response = app_client.get("/fixtures.csv?sql=select+rowid,+data+from+binary_data") assert response.status == 200 - assert "text/plain; charset=utf-8" == response.headers["content-type"] + assert response.headers["content-type"] == "text/plain; charset=utf-8" assert response.text == ( "rowid,data\r\n" '1,"http://localhost/fixtures.blob?sql=select+rowid,+data+from+binary_data&_blob_column=data&_blob_hash=f3088978da8f9aea479ffc7f631370b968d2e855eeb172bea7f6c7a04262bb6d"\r\n' @@ -118,16 +118,18 @@ def test_custom_sql_csv(app_client): "/fixtures.csv?sql=select+content+from+simple_primary_key+limit+2" ) assert response.status == 200 - assert "text/plain; charset=utf-8" == response.headers["content-type"] - assert EXPECTED_CUSTOM_CSV == response.text + assert response.headers["content-type"] == "text/plain; charset=utf-8" + assert response.text == EXPECTED_CUSTOM_CSV def test_table_csv_download(app_client): response = app_client.get("/fixtures/simple_primary_key.csv?_dl=1") assert response.status == 200 - assert "text/csv; charset=utf-8" == response.headers["content-type"] - expected_disposition = 'attachment; filename="simple_primary_key.csv"' - assert expected_disposition == response.headers["content-disposition"] + assert response.headers["content-type"] == "text/csv; charset=utf-8" + assert ( + response.headers["content-disposition"] + == 'attachment; filename="simple_primary_key.csv"' + ) def test_csv_with_non_ascii_characters(app_client): @@ -135,8 +137,8 @@ def test_csv_with_non_ascii_characters(app_client): "/fixtures.csv?sql=select%0D%0A++%27%F0%9D%90%9C%F0%9D%90%A2%F0%9D%90%AD%F0%9D%90%A2%F0%9D%90%9E%F0%9D%90%AC%27+as+text%2C%0D%0A++1+as+number%0D%0Aunion%0D%0Aselect%0D%0A++%27bob%27+as+text%2C%0D%0A++2+as+number%0D%0Aorder+by%0D%0A++number" ) assert response.status == 200 - assert "text/plain; charset=utf-8" == response.headers["content-type"] - assert "text,number\r\n𝐜𝐢𝐭𝐢𝐞𝐬,1\r\nbob,2\r\n" == response.text + assert response.headers["content-type"] == "text/plain; charset=utf-8" + assert response.text == "text,number\r\n𝐜𝐢𝐭𝐢𝐞𝐬,1\r\nbob,2\r\n" def test_max_csv_mb(app_client_csv_max_mb_one): @@ -156,10 +158,10 @@ def test_max_csv_mb(app_client_csv_max_mb_one): def test_table_csv_stream(app_client): # Without _stream should return header + 100 rows: response = app_client.get("/fixtures/compound_three_primary_keys.csv?_size=max") - assert 101 == len([b for b in response.body.split(b"\r\n") if b]) + assert len([b for b in response.body.split(b"\r\n") if b]) == 101 # With _stream=1 should return header + 1001 rows response = app_client.get("/fixtures/compound_three_primary_keys.csv?_stream=1") - assert 1002 == len([b for b in response.body.split(b"\r\n") if b]) + assert len([b for b in response.body.split(b"\r\n") if b]) == 1002 def test_csv_trace(app_client_with_trace): From ca6624643842f4b80644b83c3f4ad7c2265c15d8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 29 Nov 2021 22:45:04 -0800 Subject: [PATCH 0038/1103] Updated JSON foreign key tables test for #1525 --- tests/test_api.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/test_api.py b/tests/test_api.py index 8b3fcd75..400dae7e 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1357,30 +1357,35 @@ def test_row_foreign_key_tables(app_client): "column": "id", "other_column": "foreign_key_with_blank_label", "count": 0, + "link": "/fixtures/foreign_key_references?foreign_key_with_blank_label=1", }, { "other_table": "foreign_key_references", "column": "id", "other_column": "foreign_key_with_label", "count": 1, + "link": "/fixtures/foreign_key_references?foreign_key_with_label=1", }, { "other_table": "complex_foreign_keys", "column": "id", "other_column": "f3", "count": 1, + "link": "/fixtures/complex_foreign_keys?f3=1", }, { "other_table": "complex_foreign_keys", "column": "id", "other_column": "f2", "count": 0, + "link": "/fixtures/complex_foreign_keys?f2=1", }, { "other_table": "complex_foreign_keys", "column": "id", "other_column": "f1", "count": 1, + "link": "/fixtures/complex_foreign_keys?f1=1", }, ] From 7c02be2ee94cc64b120cc58b7a72cd387031f287 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 29 Nov 2021 22:45:37 -0800 Subject: [PATCH 0039/1103] Release 0.59.4 Refs #1525, #1527 --- datasette/version.py | 2 +- docs/changelog.rst | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 0ba55573..9c85b763 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.59.3" +__version__ = "0.59.4" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 449ce412..9ddc2794 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,15 @@ Changelog ========= +.. _v0_59_4: + +0.59.4 (2021-11-29) +------------------- + +- Fixed bug where columns with a leading underscore could not be removed from the interactive filters list. (:issue:`1527`) +- Fixed bug where columns with a leading underscore were not correctly linked to by the "Links from other tables" interface on the row page. (:issue:`1525`) +- Upgraded dependencies ``aiofiles``, ``black`` and ``janus``. + .. _v0_59_3: 0.59.3 (2021-11-20) From 36b596e3832f6126bb0e4e90cf9257b9e9c9a55e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 7 Dec 2021 11:41:56 -0800 Subject: [PATCH 0040/1103] Framework :: Datasette Trove classifier --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 3cb657e3..9b5bab61 100644 --- a/setup.py +++ b/setup.py @@ -80,6 +80,7 @@ setup( tests_require=["datasette[test]"], classifiers=[ "Development Status :: 4 - Beta", + "Framework :: Datasette", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "Intended Audience :: End Users/Desktop", From 737115ea14cd51ffb55dea886e6a684c148db2c9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 7 Dec 2021 12:03:42 -0800 Subject: [PATCH 0041/1103] Label column finder is now case-insensitive Closes #1544 --- datasette/database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/database.py b/datasette/database.py index 9f3bbddc..d1217e18 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -286,7 +286,7 @@ class Database: return explicit_label_column column_names = await self.execute_fn(lambda conn: table_columns(conn, table)) # Is there a name or title column? - name_or_title = [c for c in column_names if c in ("name", "title")] + name_or_title = [c for c in column_names if c.lower() in ("name", "title")] if name_or_title: return name_or_title[0] # If a table has two columns, one of which is ID, then label_column is the other one From 1876975e3b120298cec2ff14825260f4a19a0568 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 11 Dec 2021 19:06:45 -0800 Subject: [PATCH 0042/1103] Refactor table view HTML tests to test_table_html.py Refs #1518 --- tests/test_html.py | 1064 +------------------------------------- tests/test_table_html.py | 1045 +++++++++++++++++++++++++++++++++++++ tests/utils.py | 24 + 3 files changed, 1070 insertions(+), 1063 deletions(-) create mode 100644 tests/test_table_html.py create mode 100644 tests/utils.py diff --git a/tests/test_html.py b/tests/test_html.py index a7cb105c..bfe5c8f9 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -9,11 +9,11 @@ from .fixtures import ( # noqa make_app_client, METADATA, ) +from .utils import assert_footer_links, inner_html import json import pathlib import pytest import re -import textwrap import urllib.parse @@ -180,67 +180,6 @@ def test_row_strange_table_name_with_url_hash(app_client_with_hash): assert response.status == 200 -@pytest.mark.parametrize( - "path,expected_definition_sql", - [ - ( - "/fixtures/facet_cities", - """ -CREATE TABLE facet_cities ( - id integer primary key, - name text -); - """.strip(), - ), - ( - "/fixtures/compound_three_primary_keys", - """ -CREATE TABLE compound_three_primary_keys ( - pk1 varchar(30), - pk2 varchar(30), - pk3 varchar(30), - content text, - PRIMARY KEY (pk1, pk2, pk3) -); -CREATE INDEX idx_compound_three_primary_keys_content ON compound_three_primary_keys(content); - """.strip(), - ), - ], -) -def test_definition_sql(path, expected_definition_sql, app_client): - response = app_client.get(path) - pre = Soup(response.body, "html.parser").select_one("pre.wrapped-sql") - assert expected_definition_sql == pre.string - - -def test_table_cell_truncation(): - with make_app_client(settings={"truncate_cells_html": 5}) as client: - response = client.get("/fixtures/facetable") - assert response.status == 200 - table = Soup(response.body, "html.parser").find("table") - assert table["class"] == ["rows-and-columns"] - assert [ - "Missi…", - "Dogpa…", - "SOMA", - "Tende…", - "Berna…", - "Hayes…", - "Holly…", - "Downt…", - "Los F…", - "Korea…", - "Downt…", - "Greek…", - "Corkt…", - "Mexic…", - "Arcad…", - ] == [ - td.string - for td in table.findAll("td", {"class": "col-neighborhood-b352a7"}) - ] - - def test_row_page_does_not_truncate(): with make_app_client(settings={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable/1") @@ -253,343 +192,6 @@ def test_row_page_does_not_truncate(): ] -def test_add_filter_redirects(app_client): - filter_args = urllib.parse.urlencode( - {"_filter_column": "content", "_filter_op": "startswith", "_filter_value": "x"} - ) - path_base = "/fixtures/simple_primary_key" - path = path_base + "?" + filter_args - response = app_client.get(path) - assert response.status == 302 - assert response.headers["Location"].endswith("?content__startswith=x") - - # Adding a redirect to an existing query string: - path = path_base + "?foo=bar&" + filter_args - response = app_client.get(path) - assert response.status == 302 - assert response.headers["Location"].endswith("?foo=bar&content__startswith=x") - - # Test that op with a __x suffix overrides the filter value - path = ( - path_base - + "?" - + urllib.parse.urlencode( - { - "_filter_column": "content", - "_filter_op": "isnull__5", - "_filter_value": "x", - } - ) - ) - response = app_client.get(path) - assert response.status == 302 - assert response.headers["Location"].endswith("?content__isnull=5") - - -def test_existing_filter_redirects(app_client): - filter_args = { - "_filter_column_1": "name", - "_filter_op_1": "contains", - "_filter_value_1": "hello", - "_filter_column_2": "age", - "_filter_op_2": "gte", - "_filter_value_2": "22", - "_filter_column_3": "age", - "_filter_op_3": "lt", - "_filter_value_3": "30", - "_filter_column_4": "name", - "_filter_op_4": "contains", - "_filter_value_4": "world", - } - path_base = "/fixtures/simple_primary_key" - path = path_base + "?" + urllib.parse.urlencode(filter_args) - response = app_client.get(path) - assert response.status == 302 - assert_querystring_equal( - "name__contains=hello&age__gte=22&age__lt=30&name__contains=world", - response.headers["Location"].split("?")[1], - ) - - # Setting _filter_column_3 to empty string should remove *_3 entirely - filter_args["_filter_column_3"] = "" - path = path_base + "?" + urllib.parse.urlencode(filter_args) - response = app_client.get(path) - assert response.status == 302 - assert_querystring_equal( - "name__contains=hello&age__gte=22&name__contains=world", - response.headers["Location"].split("?")[1], - ) - - # ?_filter_op=exact should be removed if unaccompanied by _fiter_column - response = app_client.get(path_base + "?_filter_op=exact") - assert response.status == 302 - assert "?" not in response.headers["Location"] - - -def test_exact_parameter_results_in_correct_hidden_fields(app_client): - # https://github.com/simonw/datasette/issues/1527 - response = app_client.get( - "/fixtures/facetable?_facet=_neighborhood&_neighborhood__exact=Downtown" - ) - # In this case we should NOT have a hidden _neighborhood__exact=Downtown field - form = Soup(response.body, "html.parser").find("form") - hidden_inputs = { - input["name"]: input["value"] for input in form.select("input[type=hidden]") - } - assert hidden_inputs == {"_facet": "_neighborhood"} - - -def test_empty_search_parameter_gets_removed(app_client): - path_base = "/fixtures/simple_primary_key" - path = ( - path_base - + "?" - + urllib.parse.urlencode( - { - "_search": "", - "_filter_column": "name", - "_filter_op": "exact", - "_filter_value": "chidi", - } - ) - ) - response = app_client.get(path) - assert response.status == 302 - assert response.headers["Location"].endswith("?name__exact=chidi") - - -def test_searchable_view_persists_fts_table(app_client): - # The search form should persist ?_fts_table as a hidden field - response = app_client.get( - "/fixtures/searchable_view?_fts_table=searchable_fts&_fts_pk=pk" - ) - inputs = Soup(response.body, "html.parser").find("form").findAll("input") - hiddens = [i for i in inputs if i["type"] == "hidden"] - assert [("_fts_table", "searchable_fts"), ("_fts_pk", "pk")] == [ - (hidden["name"], hidden["value"]) for hidden in hiddens - ] - - -def test_sort_by_desc_redirects(app_client): - path_base = "/fixtures/sortable" - path = ( - path_base - + "?" - + urllib.parse.urlencode({"_sort": "sortable", "_sort_by_desc": "1"}) - ) - response = app_client.get(path) - assert response.status == 302 - assert response.headers["Location"].endswith("?_sort_desc=sortable") - - -def test_sort_links(app_client): - response = app_client.get("/fixtures/sortable?_sort=sortable") - assert response.status == 200 - ths = Soup(response.body, "html.parser").findAll("th") - attrs_and_link_attrs = [ - { - "attrs": th.attrs, - "a_href": (th.find("a")["href"] if th.find("a") else None), - } - for th in ths - ] - assert attrs_and_link_attrs == [ - { - "attrs": { - "class": ["col-Link"], - "scope": "col", - "data-column": "Link", - "data-column-type": "", - "data-column-not-null": "0", - "data-is-pk": "0", - }, - "a_href": None, - }, - { - "attrs": { - "class": ["col-pk1"], - "scope": "col", - "data-column": "pk1", - "data-column-type": "varchar(30)", - "data-column-not-null": "0", - "data-is-pk": "1", - }, - "a_href": None, - }, - { - "attrs": { - "class": ["col-pk2"], - "scope": "col", - "data-column": "pk2", - "data-column-type": "varchar(30)", - "data-column-not-null": "0", - "data-is-pk": "1", - }, - "a_href": None, - }, - { - "attrs": { - "class": ["col-content"], - "scope": "col", - "data-column": "content", - "data-column-type": "text", - "data-column-not-null": "0", - "data-is-pk": "0", - }, - "a_href": None, - }, - { - "attrs": { - "class": ["col-sortable"], - "scope": "col", - "data-column": "sortable", - "data-column-type": "integer", - "data-column-not-null": "0", - "data-is-pk": "0", - }, - "a_href": "/fixtures/sortable?_sort_desc=sortable", - }, - { - "attrs": { - "class": ["col-sortable_with_nulls"], - "scope": "col", - "data-column": "sortable_with_nulls", - "data-column-type": "real", - "data-column-not-null": "0", - "data-is-pk": "0", - }, - "a_href": "/fixtures/sortable?_sort=sortable_with_nulls", - }, - { - "attrs": { - "class": ["col-sortable_with_nulls_2"], - "scope": "col", - "data-column": "sortable_with_nulls_2", - "data-column-type": "real", - "data-column-not-null": "0", - "data-is-pk": "0", - }, - "a_href": "/fixtures/sortable?_sort=sortable_with_nulls_2", - }, - { - "attrs": { - "class": ["col-text"], - "scope": "col", - "data-column": "text", - "data-column-type": "text", - "data-column-not-null": "0", - "data-is-pk": "0", - }, - "a_href": "/fixtures/sortable?_sort=text", - }, - ] - - -def test_facet_display(app_client): - response = app_client.get( - "/fixtures/facetable?_facet=planet_int&_facet=_city_id&_facet=on_earth" - ) - assert response.status == 200 - soup = Soup(response.body, "html.parser") - divs = soup.find("div", {"class": "facet-results"}).findAll("div") - actual = [] - for div in divs: - actual.append( - { - "name": div.find("strong").text.split()[0], - "items": [ - { - "name": a.text, - "qs": a["href"].split("?")[-1], - "count": int(str(a.parent).split("")[1].split("<")[0]), - } - for a in div.find("ul").findAll("a") - ], - } - ) - assert actual == [ - { - "name": "_city_id", - "items": [ - { - "name": "San Francisco", - "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&_city_id__exact=1", - "count": 6, - }, - { - "name": "Los Angeles", - "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&_city_id__exact=2", - "count": 4, - }, - { - "name": "Detroit", - "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&_city_id__exact=3", - "count": 4, - }, - { - "name": "Memnonia", - "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&_city_id__exact=4", - "count": 1, - }, - ], - }, - { - "name": "planet_int", - "items": [ - { - "name": "1", - "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&planet_int=1", - "count": 14, - }, - { - "name": "2", - "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&planet_int=2", - "count": 1, - }, - ], - }, - { - "name": "on_earth", - "items": [ - { - "name": "1", - "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&on_earth=1", - "count": 14, - }, - { - "name": "0", - "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&on_earth=0", - "count": 1, - }, - ], - }, - ] - - -def test_facets_persist_through_filter_form(app_client): - response = app_client.get( - "/fixtures/facetable?_facet=planet_int&_facet=_city_id&_facet_array=tags" - ) - assert response.status == 200 - inputs = Soup(response.body, "html.parser").find("form").findAll("input") - hiddens = [i for i in inputs if i["type"] == "hidden"] - assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == [ - ("_facet", "planet_int"), - ("_facet", "_city_id"), - ("_facet_array", "tags"), - ] - - -def test_next_does_not_persist_in_hidden_field(app_client): - response = app_client.get("/fixtures/searchable?_size=1&_next=1") - assert response.status == 200 - inputs = Soup(response.body, "html.parser").find("form").findAll("input") - hiddens = [i for i in inputs if i["type"] == "hidden"] - assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == [ - ("_size", "1"), - ] - - @pytest.mark.parametrize( "path,expected_classes", [ @@ -646,74 +248,6 @@ def test_templates_considered(app_client, path, expected_considered): assert f"" in response.text -def test_table_html_simple_primary_key(app_client): - response = app_client.get("/fixtures/simple_primary_key?_size=3") - assert response.status == 200 - table = Soup(response.body, "html.parser").find("table") - assert table["class"] == ["rows-and-columns"] - ths = table.findAll("th") - assert "id\xa0▼" == ths[0].find("a").string.strip() - for expected_col, th in zip(("content",), ths[1:]): - a = th.find("a") - assert expected_col == a.string - assert a["href"].endswith(f"/simple_primary_key?_size=3&_sort={expected_col}") - assert ["nofollow"] == a["rel"] - assert [ - [ - '1', - 'hello', - ], - [ - '2', - 'world', - ], - [ - '3', - '\xa0', - ], - ] == [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] - - -def test_table_csv_json_export_interface(app_client): - response = app_client.get("/fixtures/simple_primary_key?id__gt=2") - assert response.status == 200 - # The links at the top of the page - links = ( - Soup(response.body, "html.parser") - .find("p", {"class": "export-links"}) - .findAll("a") - ) - actual = [l["href"] for l in links] - expected = [ - "/fixtures/simple_primary_key.json?id__gt=2", - "/fixtures/simple_primary_key.testall?id__gt=2", - "/fixtures/simple_primary_key.testnone?id__gt=2", - "/fixtures/simple_primary_key.testresponse?id__gt=2", - "/fixtures/simple_primary_key.csv?id__gt=2&_size=max", - "#export", - ] - assert expected == actual - # And the advaced export box at the bottom: - div = Soup(response.body, "html.parser").find("div", {"class": "advanced-export"}) - json_links = [a["href"] for a in div.find("p").findAll("a")] - assert [ - "/fixtures/simple_primary_key.json?id__gt=2", - "/fixtures/simple_primary_key.json?id__gt=2&_shape=array", - "/fixtures/simple_primary_key.json?id__gt=2&_shape=array&_nl=on", - "/fixtures/simple_primary_key.json?id__gt=2&_shape=object", - ] == json_links - # And the CSV form - form = div.find("form") - assert form["action"].endswith("/simple_primary_key.csv") - inputs = [str(input) for input in form.findAll("input")] - assert [ - '', - '', - '', - '', - ] == inputs - - def test_row_json_export_link(app_client): response = app_client.get("/fixtures/simple_primary_key/1") assert response.status == 200 @@ -727,26 +261,6 @@ def test_query_json_csv_export_links(app_client): assert 'CSV' in response.text -def test_csv_json_export_links_include_labels_if_foreign_keys(app_client): - response = app_client.get("/fixtures/facetable") - assert response.status == 200 - links = ( - Soup(response.body, "html.parser") - .find("p", {"class": "export-links"}) - .findAll("a") - ) - actual = [l["href"] for l in links] - expected = [ - "/fixtures/facetable.json?_labels=on", - "/fixtures/facetable.testall?_labels=on", - "/fixtures/facetable.testnone?_labels=on", - "/fixtures/facetable.testresponse?_labels=on", - "/fixtures/facetable.csv?_labels=on&_size=max", - "#export", - ] - assert expected == actual - - def test_row_html_simple_primary_key(app_client): response = app_client.get("/fixtures/simple_primary_key/1") assert response.status == 200 @@ -760,45 +274,6 @@ def test_row_html_simple_primary_key(app_client): ] == [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] -def test_table_not_exists(app_client): - assert "Table not found: blah" in app_client.get("/fixtures/blah").text - - -def test_table_html_no_primary_key(app_client): - response = app_client.get("/fixtures/no_primary_key") - assert response.status == 200 - table = Soup(response.body, "html.parser").find("table") - # We have disabled sorting for this table using metadata.json - assert ["content", "a", "b", "c"] == [ - th.string.strip() for th in table.select("thead th")[2:] - ] - expected = [ - [ - '{}'.format( - i, i - ), - f'{i}', - f'{i}', - f'a{i}', - f'b{i}', - f'c{i}', - ] - for i in range(1, 51) - ] - assert expected == [ - [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") - ] - - -def test_rowid_sortable_no_primary_key(app_client): - response = app_client.get("/fixtures/no_primary_key") - assert response.status == 200 - table = Soup(response.body, "html.parser").find("table") - assert table["class"] == ["rows-and-columns"] - ths = table.findAll("th") - assert "rowid\xa0▼" == ths[1].find("a").string.strip() - - def test_row_html_no_primary_key(app_client): response = app_client.get("/fixtures/no_primary_key/1") assert response.status == 200 @@ -848,143 +323,6 @@ def test_row_links_from_other_tables(app_client, path, expected_text, expected_l assert link == expected_link -def test_table_html_compound_primary_key(app_client): - response = app_client.get("/fixtures/compound_primary_key") - assert response.status == 200 - table = Soup(response.body, "html.parser").find("table") - ths = table.findAll("th") - assert "Link" == ths[0].string.strip() - for expected_col, th in zip(("pk1", "pk2", "content"), ths[1:]): - a = th.find("a") - assert expected_col == a.string - assert th["class"] == [f"col-{expected_col}"] - assert a["href"].endswith(f"/compound_primary_key?_sort={expected_col}") - expected = [ - [ - 'a,b', - 'a', - 'b', - 'c', - ] - ] - assert expected == [ - [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") - ] - - -def test_table_html_foreign_key_links(app_client): - response = app_client.get("/fixtures/foreign_key_references") - assert response.status == 200 - table = Soup(response.body, "html.parser").find("table") - actual = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] - assert actual == [ - [ - '1', - 'hello\xa01', - '-\xa03', - '1', - 'a', - 'b', - ], - [ - '2', - '\xa0', - '\xa0', - '\xa0', - '\xa0', - '\xa0', - ], - ] - - -def test_table_html_foreign_key_facets(app_client): - response = app_client.get( - "/fixtures/foreign_key_references?_facet=foreign_key_with_blank_label" - ) - assert response.status == 200 - assert ( - '
  • ' - "- 1
  • " - ) in response.text - - -def test_table_html_disable_foreign_key_links_with_labels(app_client): - response = app_client.get("/fixtures/foreign_key_references?_labels=off&_size=1") - assert response.status == 200 - table = Soup(response.body, "html.parser").find("table") - actual = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] - assert actual == [ - [ - '1', - '1', - '3', - '1', - 'a', - 'b', - ] - ] - - -def test_table_html_foreign_key_custom_label_column(app_client): - response = app_client.get("/fixtures/custom_foreign_key_label") - assert response.status == 200 - table = Soup(response.body, "html.parser").find("table") - expected = [ - [ - '1', - 'world2\xa01', - ] - ] - assert expected == [ - [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") - ] - - -@pytest.mark.parametrize( - "path,expected_column_options", - [ - ("/fixtures/infinity", ["- column -", "rowid", "value"]), - ( - "/fixtures/primary_key_multiple_columns", - ["- column -", "id", "content", "content2"], - ), - ("/fixtures/compound_primary_key", ["- column -", "pk1", "pk2", "content"]), - ], -) -def test_table_html_filter_form_column_options( - path, expected_column_options, app_client -): - response = app_client.get(path) - assert response.status == 200 - form = Soup(response.body, "html.parser").find("form") - column_options = [ - o.attrs.get("value") or o.string - for o in form.select("select[name=_filter_column] option") - ] - assert expected_column_options == column_options - - -def test_table_html_filter_form_still_shows_nocol_columns(app_client): - # https://github.com/simonw/datasette/issues/1503 - response = app_client.get("/fixtures/sortable?_nocol=sortable") - assert response.status == 200 - form = Soup(response.body, "html.parser").find("form") - assert [ - o.string - for o in form.select("select[name='_filter_column']")[0].select("option") - ] == [ - "- column -", - "pk1", - "pk2", - "content", - "sortable_with_nulls", - "sortable_with_nulls_2", - "text", - # Moved to the end because it is no longer returned by the query: - "sortable", - ] - - def test_row_html_compound_primary_key(app_client): response = app_client.get("/fixtures/compound_primary_key/a,b") assert response.status == 200 @@ -1004,58 +342,6 @@ def test_row_html_compound_primary_key(app_client): ] -def test_compound_primary_key_with_foreign_key_references(app_client): - # e.g. a many-to-many table with a compound primary key on the two columns - response = app_client.get("/fixtures/searchable_tags") - assert response.status == 200 - table = Soup(response.body, "html.parser").find("table") - expected = [ - [ - '1,feline', - '1\xa01', - 'feline', - ], - [ - '2,canine', - '2\xa02', - 'canine', - ], - ] - assert expected == [ - [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") - ] - - -def test_view_html(app_client): - response = app_client.get("/fixtures/simple_view?_size=3") - assert response.status == 200 - table = Soup(response.body, "html.parser").find("table") - ths = table.select("thead th") - assert 2 == len(ths) - assert ths[0].find("a") is not None - assert ths[0].find("a")["href"].endswith("/simple_view?_size=3&_sort=content") - assert ths[0].find("a").string.strip() == "content" - assert ths[1].find("a") is None - assert ths[1].string.strip() == "upper_content" - expected = [ - [ - 'hello', - 'HELLO', - ], - [ - 'world', - 'WORLD', - ], - [ - '\xa0', - '\xa0', - ], - ] - assert expected == [ - [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") - ] - - def test_index_metadata(app_client): response = app_client.get("/") assert response.status == 200 @@ -1094,20 +380,6 @@ def test_database_metadata_with_custom_sql(app_client): assert_footer_links(soup) -def test_table_metadata(app_client): - response = app_client.get("/fixtures/simple_primary_key") - assert response.status == 200 - soup = Soup(response.body, "html.parser") - # Page title should be custom and should be HTML escaped - assert "This <em>HTML</em> is escaped" == inner_html(soup.find("h1")) - # Description should be custom and NOT escaped (we used description_html) - assert "Simple primary key" == inner_html( - soup.find("div", {"class": "metadata-description"}) - ) - # The source/license should be inherited - assert_footer_links(soup) - - def test_database_download_for_immutable(): with make_app_client(is_immutable=True) as client: assert not client.ds.databases["fixtures"].is_mutable @@ -1169,36 +441,6 @@ def test_allow_sql_off(): assert b"View and edit SQL" not in response.body -def assert_querystring_equal(expected, actual): - assert sorted(expected.split("&")) == sorted(actual.split("&")) - - -def assert_footer_links(soup): - footer_links = soup.find("footer").findAll("a") - assert 4 == len(footer_links) - datasette_link, license_link, source_link, about_link = footer_links - assert "Datasette" == datasette_link.text.strip() - assert "tests/fixtures.py" == source_link.text.strip() - assert "Apache License 2.0" == license_link.text.strip() - assert "About Datasette" == about_link.text.strip() - assert "https://datasette.io/" == datasette_link["href"] - assert ( - "https://github.com/simonw/datasette/blob/main/tests/fixtures.py" - == source_link["href"] - ) - assert ( - "https://github.com/simonw/datasette/blob/main/LICENSE" == license_link["href"] - ) - assert "https://github.com/simonw/datasette" == about_link["href"] - - -def inner_html(soup): - html = str(soup) - # This includes the parent tag - so remove that - inner_html = html.split(">", 1)[1].rsplit("<", 1)[0] - return inner_html.strip() - - @pytest.mark.parametrize("path", ["/404", "/fixtures/404"]) def test_404(app_client, path): response = app_client.get(path) @@ -1249,31 +491,6 @@ def test_canned_query_with_custom_metadata(app_client): ) -@pytest.mark.parametrize( - "path,has_object,has_stream,has_expand", - [ - ("/fixtures/no_primary_key", False, True, False), - ("/fixtures/complex_foreign_keys", True, False, True), - ], -) -def test_advanced_export_box(app_client, path, has_object, has_stream, has_expand): - response = app_client.get(path) - assert response.status == 200 - soup = Soup(response.body, "html.parser") - # JSON shape options - expected_json_shapes = ["default", "array", "newline-delimited"] - if has_object: - expected_json_shapes.append("object") - div = soup.find("div", {"class": "advanced-export"}) - assert expected_json_shapes == [a.text for a in div.find("p").findAll("a")] - # "stream all rows" option - if has_stream: - assert "stream all rows" in str(div) - # "expand labels" option - if has_expand: - assert "expand labels" in str(div) - - def test_urlify_custom_queries(app_client): path = "/fixtures?" + urllib.parse.urlencode( {"sql": "select ('https://twitter.com/' || 'simonw') as user_url;"} @@ -1376,91 +593,6 @@ def test_canned_query_show_hide_metadata_option( assert '1', - '1', - '<Binary:\xa07\xa0bytes>', - ], - [ - '2', - '2', - '<Binary:\xa07\xa0bytes>', - ], - [ - '3', - '3', - '\xa0', - ], - ] - assert expected_tds == [ - [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") - ] - - def test_binary_data_display_in_query(app_client): response = app_client.get("/fixtures?sql=select+*+from+binary_data") assert response.status == 200 @@ -1525,19 +657,6 @@ def test_metadata_json_html(app_client): assert METADATA == json.loads(pre.text) -def test_custom_table_include(): - with make_app_client( - template_dir=str(pathlib.Path(__file__).parent / "test_templates") - ) as client: - response = client.get("/fixtures/complex_foreign_keys") - assert response.status == 200 - assert ( - '
    ' - '1 - 2 - hello 1' - "
    " - ) == str(Soup(response.text, "html.parser").select_one("div.custom-table-row")) - - @pytest.mark.parametrize( "path", [ @@ -1584,68 +703,6 @@ def test_debug_context_includes_extra_template_vars(): assert "scope_path" in response.text -def test_metadata_sort(app_client): - response = app_client.get("/fixtures/facet_cities") - assert response.status == 200 - table = Soup(response.body, "html.parser").find("table") - assert table["class"] == ["rows-and-columns"] - ths = table.findAll("th") - assert ["id", "name\xa0▼"] == [th.find("a").string.strip() for th in ths] - rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] - expected = [ - [ - '3', - 'Detroit', - ], - [ - '2', - 'Los Angeles', - ], - [ - '4', - 'Memnonia', - ], - [ - '1', - 'San Francisco', - ], - ] - assert expected == rows - # Make sure you can reverse that sort order - response = app_client.get("/fixtures/facet_cities?_sort_desc=name") - assert response.status == 200 - table = Soup(response.body, "html.parser").find("table") - rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] - assert list(reversed(expected)) == rows - - -def test_metadata_sort_desc(app_client): - response = app_client.get("/fixtures/attraction_characteristic") - assert response.status == 200 - table = Soup(response.body, "html.parser").find("table") - assert table["class"] == ["rows-and-columns"] - ths = table.findAll("th") - assert ["pk\xa0▲", "name"] == [th.find("a").string.strip() for th in ths] - rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] - expected = [ - [ - '2', - 'Paranormal', - ], - [ - '1', - 'Museum', - ], - ] - assert expected == rows - # Make sure you can reverse that sort order - response = app_client.get("/fixtures/attraction_characteristic?_sort=pk") - assert response.status == 200 - table = Soup(response.body, "html.parser").find("table") - rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] - assert list(reversed(expected)) == rows - - @pytest.mark.parametrize( "path", [ @@ -1787,126 +844,7 @@ def test_navigation_menu_links( ), f"{link} found but should not have been in nav menu" -@pytest.mark.parametrize( - "max_returned_rows,path,expected_num_facets,expected_ellipses,expected_ellipses_url", - ( - ( - 5, - # Default should show 2 facets - "/fixtures/facetable?_facet=_neighborhood", - 2, - True, - "/fixtures/facetable?_facet=_neighborhood&_facet_size=max", - ), - # _facet_size above max_returned_rows should show max_returned_rows (5) - ( - 5, - "/fixtures/facetable?_facet=_neighborhood&_facet_size=50", - 5, - True, - "/fixtures/facetable?_facet=_neighborhood&_facet_size=max", - ), - # If max_returned_rows is high enough, should return all - ( - 20, - "/fixtures/facetable?_facet=_neighborhood&_facet_size=max", - 14, - False, - None, - ), - # If num facets > max_returned_rows, show ... without a link - # _facet_size above max_returned_rows should show max_returned_rows (5) - ( - 5, - "/fixtures/facetable?_facet=_neighborhood&_facet_size=max", - 5, - True, - None, - ), - ), -) -def test_facet_more_links( - max_returned_rows, - path, - expected_num_facets, - expected_ellipses, - expected_ellipses_url, -): - with make_app_client( - settings={"max_returned_rows": max_returned_rows, "default_facet_size": 2} - ) as client: - response = client.get(path) - soup = Soup(response.body, "html.parser") - lis = soup.select("#facet-neighborhood-b352a7 ul li:not(.facet-truncated)") - facet_truncated = soup.select_one(".facet-truncated") - assert len(lis) == expected_num_facets - if not expected_ellipses: - assert facet_truncated is None - else: - if expected_ellipses_url: - assert facet_truncated.find("a")["href"] == expected_ellipses_url - else: - assert facet_truncated.find("a") is None - - -def test_unavailable_table_does_not_break_sort_relationships(): - # https://github.com/simonw/datasette/issues/1305 - with make_app_client( - metadata={ - "databases": { - "fixtures": {"tables": {"foreign_key_references": {"allow": False}}} - } - } - ) as client: - response = client.get("/?_sort=relationships") - assert response.status == 200 - - def test_trace_correctly_escaped(app_client): response = app_client.get("/fixtures?sql=select+'

    Hello'&_trace=1") assert "select '

    Hello" not in response.text assert "select '<h1>Hello" in response.text - - -def test_column_metadata(app_client): - response = app_client.get("/fixtures/roadside_attractions") - soup = Soup(response.body, "html.parser") - dl = soup.find("dl") - assert [(dt.text, dt.nextSibling.text) for dt in dl.findAll("dt")] == [ - ("name", "The name of the attraction"), - ("address", "The street address for the attraction"), - ] - assert ( - soup.select("th[data-column=name]")[0]["data-column-description"] - == "The name of the attraction" - ) - assert ( - soup.select("th[data-column=address]")[0]["data-column-description"] - == "The street address for the attraction" - ) - - -@pytest.mark.parametrize("use_facet_size_max", (True, False)) -def test_facet_total_shown_if_facet_max_size(use_facet_size_max): - # https://github.com/simonw/datasette/issues/1423 - with make_app_client(settings={"max_returned_rows": 100}) as client: - path = "/fixtures/sortable?_facet=content&_facet=pk1" - if use_facet_size_max: - path += "&_facet_size=max" - response = client.get(path) - assert response.status == 200 - fragments = ( - '>100', - '8', - ) - for fragment in fragments: - if use_facet_size_max: - assert fragment in response.text - else: - assert fragment not in response.text - - -def test_sort_rowid_with_next(app_client): - # https://github.com/simonw/datasette/issues/1470 - response = app_client.get("/fixtures/binary_data?_size=1&_next=1&_sort=rowid") - assert response.status == 200 diff --git a/tests/test_table_html.py b/tests/test_table_html.py new file mode 100644 index 00000000..2fbb53bd --- /dev/null +++ b/tests/test_table_html.py @@ -0,0 +1,1045 @@ +from bs4 import BeautifulSoup as Soup +from .fixtures import ( # noqa + app_client, + make_app_client, +) +import pathlib +import pytest +import urllib.parse +from .utils import assert_footer_links, inner_html + + +@pytest.mark.parametrize( + "path,expected_definition_sql", + [ + ( + "/fixtures/facet_cities", + """ +CREATE TABLE facet_cities ( + id integer primary key, + name text +); + """.strip(), + ), + ( + "/fixtures/compound_three_primary_keys", + """ +CREATE TABLE compound_three_primary_keys ( + pk1 varchar(30), + pk2 varchar(30), + pk3 varchar(30), + content text, + PRIMARY KEY (pk1, pk2, pk3) +); +CREATE INDEX idx_compound_three_primary_keys_content ON compound_three_primary_keys(content); + """.strip(), + ), + ], +) +def test_table_definition_sql(path, expected_definition_sql, app_client): + response = app_client.get(path) + pre = Soup(response.body, "html.parser").select_one("pre.wrapped-sql") + assert expected_definition_sql == pre.string + + +def test_table_cell_truncation(): + with make_app_client(settings={"truncate_cells_html": 5}) as client: + response = client.get("/fixtures/facetable") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + assert table["class"] == ["rows-and-columns"] + assert [ + "Missi…", + "Dogpa…", + "SOMA", + "Tende…", + "Berna…", + "Hayes…", + "Holly…", + "Downt…", + "Los F…", + "Korea…", + "Downt…", + "Greek…", + "Corkt…", + "Mexic…", + "Arcad…", + ] == [ + td.string + for td in table.findAll("td", {"class": "col-neighborhood-b352a7"}) + ] + + +def test_add_filter_redirects(app_client): + filter_args = urllib.parse.urlencode( + {"_filter_column": "content", "_filter_op": "startswith", "_filter_value": "x"} + ) + path_base = "/fixtures/simple_primary_key" + path = path_base + "?" + filter_args + response = app_client.get(path) + assert response.status == 302 + assert response.headers["Location"].endswith("?content__startswith=x") + + # Adding a redirect to an existing query string: + path = path_base + "?foo=bar&" + filter_args + response = app_client.get(path) + assert response.status == 302 + assert response.headers["Location"].endswith("?foo=bar&content__startswith=x") + + # Test that op with a __x suffix overrides the filter value + path = ( + path_base + + "?" + + urllib.parse.urlencode( + { + "_filter_column": "content", + "_filter_op": "isnull__5", + "_filter_value": "x", + } + ) + ) + response = app_client.get(path) + assert response.status == 302 + assert response.headers["Location"].endswith("?content__isnull=5") + + +def test_existing_filter_redirects(app_client): + filter_args = { + "_filter_column_1": "name", + "_filter_op_1": "contains", + "_filter_value_1": "hello", + "_filter_column_2": "age", + "_filter_op_2": "gte", + "_filter_value_2": "22", + "_filter_column_3": "age", + "_filter_op_3": "lt", + "_filter_value_3": "30", + "_filter_column_4": "name", + "_filter_op_4": "contains", + "_filter_value_4": "world", + } + path_base = "/fixtures/simple_primary_key" + path = path_base + "?" + urllib.parse.urlencode(filter_args) + response = app_client.get(path) + assert response.status == 302 + assert_querystring_equal( + "name__contains=hello&age__gte=22&age__lt=30&name__contains=world", + response.headers["Location"].split("?")[1], + ) + + # Setting _filter_column_3 to empty string should remove *_3 entirely + filter_args["_filter_column_3"] = "" + path = path_base + "?" + urllib.parse.urlencode(filter_args) + response = app_client.get(path) + assert response.status == 302 + assert_querystring_equal( + "name__contains=hello&age__gte=22&name__contains=world", + response.headers["Location"].split("?")[1], + ) + + # ?_filter_op=exact should be removed if unaccompanied by _fiter_column + response = app_client.get(path_base + "?_filter_op=exact") + assert response.status == 302 + assert "?" not in response.headers["Location"] + + +def test_exact_parameter_results_in_correct_hidden_fields(app_client): + # https://github.com/simonw/datasette/issues/1527 + response = app_client.get( + "/fixtures/facetable?_facet=_neighborhood&_neighborhood__exact=Downtown" + ) + # In this case we should NOT have a hidden _neighborhood__exact=Downtown field + form = Soup(response.body, "html.parser").find("form") + hidden_inputs = { + input["name"]: input["value"] for input in form.select("input[type=hidden]") + } + assert hidden_inputs == {"_facet": "_neighborhood"} + + +def test_empty_search_parameter_gets_removed(app_client): + path_base = "/fixtures/simple_primary_key" + path = ( + path_base + + "?" + + urllib.parse.urlencode( + { + "_search": "", + "_filter_column": "name", + "_filter_op": "exact", + "_filter_value": "chidi", + } + ) + ) + response = app_client.get(path) + assert response.status == 302 + assert response.headers["Location"].endswith("?name__exact=chidi") + + +def test_searchable_view_persists_fts_table(app_client): + # The search form should persist ?_fts_table as a hidden field + response = app_client.get( + "/fixtures/searchable_view?_fts_table=searchable_fts&_fts_pk=pk" + ) + inputs = Soup(response.body, "html.parser").find("form").findAll("input") + hiddens = [i for i in inputs if i["type"] == "hidden"] + assert [("_fts_table", "searchable_fts"), ("_fts_pk", "pk")] == [ + (hidden["name"], hidden["value"]) for hidden in hiddens + ] + + +def test_sort_by_desc_redirects(app_client): + path_base = "/fixtures/sortable" + path = ( + path_base + + "?" + + urllib.parse.urlencode({"_sort": "sortable", "_sort_by_desc": "1"}) + ) + response = app_client.get(path) + assert response.status == 302 + assert response.headers["Location"].endswith("?_sort_desc=sortable") + + +def test_sort_links(app_client): + response = app_client.get("/fixtures/sortable?_sort=sortable") + assert response.status == 200 + ths = Soup(response.body, "html.parser").findAll("th") + attrs_and_link_attrs = [ + { + "attrs": th.attrs, + "a_href": (th.find("a")["href"] if th.find("a") else None), + } + for th in ths + ] + assert attrs_and_link_attrs == [ + { + "attrs": { + "class": ["col-Link"], + "scope": "col", + "data-column": "Link", + "data-column-type": "", + "data-column-not-null": "0", + "data-is-pk": "0", + }, + "a_href": None, + }, + { + "attrs": { + "class": ["col-pk1"], + "scope": "col", + "data-column": "pk1", + "data-column-type": "varchar(30)", + "data-column-not-null": "0", + "data-is-pk": "1", + }, + "a_href": None, + }, + { + "attrs": { + "class": ["col-pk2"], + "scope": "col", + "data-column": "pk2", + "data-column-type": "varchar(30)", + "data-column-not-null": "0", + "data-is-pk": "1", + }, + "a_href": None, + }, + { + "attrs": { + "class": ["col-content"], + "scope": "col", + "data-column": "content", + "data-column-type": "text", + "data-column-not-null": "0", + "data-is-pk": "0", + }, + "a_href": None, + }, + { + "attrs": { + "class": ["col-sortable"], + "scope": "col", + "data-column": "sortable", + "data-column-type": "integer", + "data-column-not-null": "0", + "data-is-pk": "0", + }, + "a_href": "/fixtures/sortable?_sort_desc=sortable", + }, + { + "attrs": { + "class": ["col-sortable_with_nulls"], + "scope": "col", + "data-column": "sortable_with_nulls", + "data-column-type": "real", + "data-column-not-null": "0", + "data-is-pk": "0", + }, + "a_href": "/fixtures/sortable?_sort=sortable_with_nulls", + }, + { + "attrs": { + "class": ["col-sortable_with_nulls_2"], + "scope": "col", + "data-column": "sortable_with_nulls_2", + "data-column-type": "real", + "data-column-not-null": "0", + "data-is-pk": "0", + }, + "a_href": "/fixtures/sortable?_sort=sortable_with_nulls_2", + }, + { + "attrs": { + "class": ["col-text"], + "scope": "col", + "data-column": "text", + "data-column-type": "text", + "data-column-not-null": "0", + "data-is-pk": "0", + }, + "a_href": "/fixtures/sortable?_sort=text", + }, + ] + + +def test_facet_display(app_client): + response = app_client.get( + "/fixtures/facetable?_facet=planet_int&_facet=_city_id&_facet=on_earth" + ) + assert response.status == 200 + soup = Soup(response.body, "html.parser") + divs = soup.find("div", {"class": "facet-results"}).findAll("div") + actual = [] + for div in divs: + actual.append( + { + "name": div.find("strong").text.split()[0], + "items": [ + { + "name": a.text, + "qs": a["href"].split("?")[-1], + "count": int(str(a.parent).split("")[1].split("<")[0]), + } + for a in div.find("ul").findAll("a") + ], + } + ) + assert actual == [ + { + "name": "_city_id", + "items": [ + { + "name": "San Francisco", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&_city_id__exact=1", + "count": 6, + }, + { + "name": "Los Angeles", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&_city_id__exact=2", + "count": 4, + }, + { + "name": "Detroit", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&_city_id__exact=3", + "count": 4, + }, + { + "name": "Memnonia", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&_city_id__exact=4", + "count": 1, + }, + ], + }, + { + "name": "planet_int", + "items": [ + { + "name": "1", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&planet_int=1", + "count": 14, + }, + { + "name": "2", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&planet_int=2", + "count": 1, + }, + ], + }, + { + "name": "on_earth", + "items": [ + { + "name": "1", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&on_earth=1", + "count": 14, + }, + { + "name": "0", + "qs": "_facet=planet_int&_facet=_city_id&_facet=on_earth&on_earth=0", + "count": 1, + }, + ], + }, + ] + + +def test_facets_persist_through_filter_form(app_client): + response = app_client.get( + "/fixtures/facetable?_facet=planet_int&_facet=_city_id&_facet_array=tags" + ) + assert response.status == 200 + inputs = Soup(response.body, "html.parser").find("form").findAll("input") + hiddens = [i for i in inputs if i["type"] == "hidden"] + assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == [ + ("_facet", "planet_int"), + ("_facet", "_city_id"), + ("_facet_array", "tags"), + ] + + +def test_next_does_not_persist_in_hidden_field(app_client): + response = app_client.get("/fixtures/searchable?_size=1&_next=1") + assert response.status == 200 + inputs = Soup(response.body, "html.parser").find("form").findAll("input") + hiddens = [i for i in inputs if i["type"] == "hidden"] + assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == [ + ("_size", "1"), + ] + + +def test_table_html_simple_primary_key(app_client): + response = app_client.get("/fixtures/simple_primary_key?_size=3") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + assert table["class"] == ["rows-and-columns"] + ths = table.findAll("th") + assert "id\xa0▼" == ths[0].find("a").string.strip() + for expected_col, th in zip(("content",), ths[1:]): + a = th.find("a") + assert expected_col == a.string + assert a["href"].endswith(f"/simple_primary_key?_size=3&_sort={expected_col}") + assert ["nofollow"] == a["rel"] + assert [ + [ + '1', + 'hello', + ], + [ + '2', + 'world', + ], + [ + '3', + '\xa0', + ], + ] == [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] + + +def test_table_csv_json_export_interface(app_client): + response = app_client.get("/fixtures/simple_primary_key?id__gt=2") + assert response.status == 200 + # The links at the top of the page + links = ( + Soup(response.body, "html.parser") + .find("p", {"class": "export-links"}) + .findAll("a") + ) + actual = [l["href"] for l in links] + expected = [ + "/fixtures/simple_primary_key.json?id__gt=2", + "/fixtures/simple_primary_key.testall?id__gt=2", + "/fixtures/simple_primary_key.testnone?id__gt=2", + "/fixtures/simple_primary_key.testresponse?id__gt=2", + "/fixtures/simple_primary_key.csv?id__gt=2&_size=max", + "#export", + ] + assert expected == actual + # And the advaced export box at the bottom: + div = Soup(response.body, "html.parser").find("div", {"class": "advanced-export"}) + json_links = [a["href"] for a in div.find("p").findAll("a")] + assert [ + "/fixtures/simple_primary_key.json?id__gt=2", + "/fixtures/simple_primary_key.json?id__gt=2&_shape=array", + "/fixtures/simple_primary_key.json?id__gt=2&_shape=array&_nl=on", + "/fixtures/simple_primary_key.json?id__gt=2&_shape=object", + ] == json_links + # And the CSV form + form = div.find("form") + assert form["action"].endswith("/simple_primary_key.csv") + inputs = [str(input) for input in form.findAll("input")] + assert [ + '', + '', + '', + '', + ] == inputs + + +def test_csv_json_export_links_include_labels_if_foreign_keys(app_client): + response = app_client.get("/fixtures/facetable") + assert response.status == 200 + links = ( + Soup(response.body, "html.parser") + .find("p", {"class": "export-links"}) + .findAll("a") + ) + actual = [l["href"] for l in links] + expected = [ + "/fixtures/facetable.json?_labels=on", + "/fixtures/facetable.testall?_labels=on", + "/fixtures/facetable.testnone?_labels=on", + "/fixtures/facetable.testresponse?_labels=on", + "/fixtures/facetable.csv?_labels=on&_size=max", + "#export", + ] + assert expected == actual + + +def test_table_not_exists(app_client): + assert "Table not found: blah" in app_client.get("/fixtures/blah").text + + +def test_table_html_no_primary_key(app_client): + response = app_client.get("/fixtures/no_primary_key") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + # We have disabled sorting for this table using metadata.json + assert ["content", "a", "b", "c"] == [ + th.string.strip() for th in table.select("thead th")[2:] + ] + expected = [ + [ + '{}'.format( + i, i + ), + f'{i}', + f'{i}', + f'a{i}', + f'b{i}', + f'c{i}', + ] + for i in range(1, 51) + ] + assert expected == [ + [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") + ] + + +def test_rowid_sortable_no_primary_key(app_client): + response = app_client.get("/fixtures/no_primary_key") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + assert table["class"] == ["rows-and-columns"] + ths = table.findAll("th") + assert "rowid\xa0▼" == ths[1].find("a").string.strip() + + +def test_table_html_compound_primary_key(app_client): + response = app_client.get("/fixtures/compound_primary_key") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + ths = table.findAll("th") + assert "Link" == ths[0].string.strip() + for expected_col, th in zip(("pk1", "pk2", "content"), ths[1:]): + a = th.find("a") + assert expected_col == a.string + assert th["class"] == [f"col-{expected_col}"] + assert a["href"].endswith(f"/compound_primary_key?_sort={expected_col}") + expected = [ + [ + 'a,b', + 'a', + 'b', + 'c', + ] + ] + assert expected == [ + [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") + ] + + +def test_table_html_foreign_key_links(app_client): + response = app_client.get("/fixtures/foreign_key_references") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + actual = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] + assert actual == [ + [ + '1', + 'hello\xa01', + '-\xa03', + '1', + 'a', + 'b', + ], + [ + '2', + '\xa0', + '\xa0', + '\xa0', + '\xa0', + '\xa0', + ], + ] + + +def test_table_html_foreign_key_facets(app_client): + response = app_client.get( + "/fixtures/foreign_key_references?_facet=foreign_key_with_blank_label" + ) + assert response.status == 200 + assert ( + '
  • ' + "- 1
  • " + ) in response.text + + +def test_table_html_disable_foreign_key_links_with_labels(app_client): + response = app_client.get("/fixtures/foreign_key_references?_labels=off&_size=1") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + actual = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] + assert actual == [ + [ + '1', + '1', + '3', + '1', + 'a', + 'b', + ] + ] + + +def test_table_html_foreign_key_custom_label_column(app_client): + response = app_client.get("/fixtures/custom_foreign_key_label") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + expected = [ + [ + '1', + 'world2\xa01', + ] + ] + assert expected == [ + [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") + ] + + +@pytest.mark.parametrize( + "path,expected_column_options", + [ + ("/fixtures/infinity", ["- column -", "rowid", "value"]), + ( + "/fixtures/primary_key_multiple_columns", + ["- column -", "id", "content", "content2"], + ), + ("/fixtures/compound_primary_key", ["- column -", "pk1", "pk2", "content"]), + ], +) +def test_table_html_filter_form_column_options( + path, expected_column_options, app_client +): + response = app_client.get(path) + assert response.status == 200 + form = Soup(response.body, "html.parser").find("form") + column_options = [ + o.attrs.get("value") or o.string + for o in form.select("select[name=_filter_column] option") + ] + assert expected_column_options == column_options + + +def test_table_html_filter_form_still_shows_nocol_columns(app_client): + # https://github.com/simonw/datasette/issues/1503 + response = app_client.get("/fixtures/sortable?_nocol=sortable") + assert response.status == 200 + form = Soup(response.body, "html.parser").find("form") + assert [ + o.string + for o in form.select("select[name='_filter_column']")[0].select("option") + ] == [ + "- column -", + "pk1", + "pk2", + "content", + "sortable_with_nulls", + "sortable_with_nulls_2", + "text", + # Moved to the end because it is no longer returned by the query: + "sortable", + ] + + +def test_compound_primary_key_with_foreign_key_references(app_client): + # e.g. a many-to-many table with a compound primary key on the two columns + response = app_client.get("/fixtures/searchable_tags") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + expected = [ + [ + '1,feline', + '1\xa01', + 'feline', + ], + [ + '2,canine', + '2\xa02', + 'canine', + ], + ] + assert expected == [ + [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") + ] + + +def test_view_html(app_client): + response = app_client.get("/fixtures/simple_view?_size=3") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + ths = table.select("thead th") + assert 2 == len(ths) + assert ths[0].find("a") is not None + assert ths[0].find("a")["href"].endswith("/simple_view?_size=3&_sort=content") + assert ths[0].find("a").string.strip() == "content" + assert ths[1].find("a") is None + assert ths[1].string.strip() == "upper_content" + expected = [ + [ + 'hello', + 'HELLO', + ], + [ + 'world', + 'WORLD', + ], + [ + '\xa0', + '\xa0', + ], + ] + assert expected == [ + [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") + ] + + +def test_table_metadata(app_client): + response = app_client.get("/fixtures/simple_primary_key") + assert response.status == 200 + soup = Soup(response.body, "html.parser") + # Page title should be custom and should be HTML escaped + assert "This <em>HTML</em> is escaped" == inner_html(soup.find("h1")) + # Description should be custom and NOT escaped (we used description_html) + assert "Simple primary key" == inner_html( + soup.find("div", {"class": "metadata-description"}) + ) + # The source/license should be inherited + assert_footer_links(soup) + + +@pytest.mark.parametrize( + "path,has_object,has_stream,has_expand", + [ + ("/fixtures/no_primary_key", False, True, False), + ("/fixtures/complex_foreign_keys", True, False, True), + ], +) +def test_advanced_export_box(app_client, path, has_object, has_stream, has_expand): + response = app_client.get(path) + assert response.status == 200 + soup = Soup(response.body, "html.parser") + # JSON shape options + expected_json_shapes = ["default", "array", "newline-delimited"] + if has_object: + expected_json_shapes.append("object") + div = soup.find("div", {"class": "advanced-export"}) + assert expected_json_shapes == [a.text for a in div.find("p").findAll("a")] + # "stream all rows" option + if has_stream: + assert "stream all rows" in str(div) + # "expand labels" option + if has_expand: + assert "expand labels" in str(div) + + +def test_extra_where_clauses(app_client): + response = app_client.get( + "/fixtures/facetable?_where=_neighborhood='Dogpatch'&_where=_city_id=1" + ) + soup = Soup(response.body, "html.parser") + div = soup.select(".extra-wheres")[0] + assert "2 extra where clauses" == div.find("h3").text + hrefs = [a["href"] for a in div.findAll("a")] + assert [ + "/fixtures/facetable?_where=_city_id%3D1", + "/fixtures/facetable?_where=_neighborhood%3D%27Dogpatch%27", + ] == hrefs + # These should also be persisted as hidden fields + inputs = soup.find("form").findAll("input") + hiddens = [i for i in inputs if i["type"] == "hidden"] + assert [("_where", "_neighborhood='Dogpatch'"), ("_where", "_city_id=1")] == [ + (hidden["name"], hidden["value"]) for hidden in hiddens + ] + + +@pytest.mark.parametrize( + "path,expected_hidden", + [ + ("/fixtures/facetable?_size=10", [("_size", "10")]), + ( + "/fixtures/facetable?_size=10&_ignore=1&_ignore=2", + [ + ("_size", "10"), + ("_ignore", "1"), + ("_ignore", "2"), + ], + ), + ], +) +def test_other_hidden_form_fields(app_client, path, expected_hidden): + response = app_client.get(path) + soup = Soup(response.body, "html.parser") + inputs = soup.find("form").findAll("input") + hiddens = [i for i in inputs if i["type"] == "hidden"] + assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == expected_hidden + + +@pytest.mark.parametrize( + "path,expected_hidden", + [ + ("/fixtures/searchable?_search=terry", []), + ("/fixtures/searchable?_sort=text2", []), + ("/fixtures/searchable?_sort=text2&_where=1", [("_where", "1")]), + ], +) +def test_search_and_sort_fields_not_duplicated(app_client, path, expected_hidden): + # https://github.com/simonw/datasette/issues/1214 + response = app_client.get(path) + soup = Soup(response.body, "html.parser") + inputs = soup.find("form").findAll("input") + hiddens = [i for i in inputs if i["type"] == "hidden"] + assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == expected_hidden + + +def test_binary_data_display_in_table(app_client): + response = app_client.get("/fixtures/binary_data") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + expected_tds = [ + [ + '1', + '1', + '<Binary:\xa07\xa0bytes>', + ], + [ + '2', + '2', + '<Binary:\xa07\xa0bytes>', + ], + [ + '3', + '3', + '\xa0', + ], + ] + assert expected_tds == [ + [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") + ] + + +def test_custom_table_include(): + with make_app_client( + template_dir=str(pathlib.Path(__file__).parent / "test_templates") + ) as client: + response = client.get("/fixtures/complex_foreign_keys") + assert response.status == 200 + assert ( + '
    ' + '1 - 2 - hello 1' + "
    " + ) == str(Soup(response.text, "html.parser").select_one("div.custom-table-row")) + + +def test_metadata_sort(app_client): + response = app_client.get("/fixtures/facet_cities") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + assert table["class"] == ["rows-and-columns"] + ths = table.findAll("th") + assert ["id", "name\xa0▼"] == [th.find("a").string.strip() for th in ths] + rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] + expected = [ + [ + '3', + 'Detroit', + ], + [ + '2', + 'Los Angeles', + ], + [ + '4', + 'Memnonia', + ], + [ + '1', + 'San Francisco', + ], + ] + assert expected == rows + # Make sure you can reverse that sort order + response = app_client.get("/fixtures/facet_cities?_sort_desc=name") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] + assert list(reversed(expected)) == rows + + +def test_metadata_sort_desc(app_client): + response = app_client.get("/fixtures/attraction_characteristic") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + assert table["class"] == ["rows-and-columns"] + ths = table.findAll("th") + assert ["pk\xa0▲", "name"] == [th.find("a").string.strip() for th in ths] + rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] + expected = [ + [ + '2', + 'Paranormal', + ], + [ + '1', + 'Museum', + ], + ] + assert expected == rows + # Make sure you can reverse that sort order + response = app_client.get("/fixtures/attraction_characteristic?_sort=pk") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] + assert list(reversed(expected)) == rows + + +@pytest.mark.parametrize( + "max_returned_rows,path,expected_num_facets,expected_ellipses,expected_ellipses_url", + ( + ( + 5, + # Default should show 2 facets + "/fixtures/facetable?_facet=_neighborhood", + 2, + True, + "/fixtures/facetable?_facet=_neighborhood&_facet_size=max", + ), + # _facet_size above max_returned_rows should show max_returned_rows (5) + ( + 5, + "/fixtures/facetable?_facet=_neighborhood&_facet_size=50", + 5, + True, + "/fixtures/facetable?_facet=_neighborhood&_facet_size=max", + ), + # If max_returned_rows is high enough, should return all + ( + 20, + "/fixtures/facetable?_facet=_neighborhood&_facet_size=max", + 14, + False, + None, + ), + # If num facets > max_returned_rows, show ... without a link + # _facet_size above max_returned_rows should show max_returned_rows (5) + ( + 5, + "/fixtures/facetable?_facet=_neighborhood&_facet_size=max", + 5, + True, + None, + ), + ), +) +def test_facet_more_links( + max_returned_rows, + path, + expected_num_facets, + expected_ellipses, + expected_ellipses_url, +): + with make_app_client( + settings={"max_returned_rows": max_returned_rows, "default_facet_size": 2} + ) as client: + response = client.get(path) + soup = Soup(response.body, "html.parser") + lis = soup.select("#facet-neighborhood-b352a7 ul li:not(.facet-truncated)") + facet_truncated = soup.select_one(".facet-truncated") + assert len(lis) == expected_num_facets + if not expected_ellipses: + assert facet_truncated is None + else: + if expected_ellipses_url: + assert facet_truncated.find("a")["href"] == expected_ellipses_url + else: + assert facet_truncated.find("a") is None + + +def test_unavailable_table_does_not_break_sort_relationships(): + # https://github.com/simonw/datasette/issues/1305 + with make_app_client( + metadata={ + "databases": { + "fixtures": {"tables": {"foreign_key_references": {"allow": False}}} + } + } + ) as client: + response = client.get("/?_sort=relationships") + assert response.status == 200 + + +def test_column_metadata(app_client): + response = app_client.get("/fixtures/roadside_attractions") + soup = Soup(response.body, "html.parser") + dl = soup.find("dl") + assert [(dt.text, dt.nextSibling.text) for dt in dl.findAll("dt")] == [ + ("name", "The name of the attraction"), + ("address", "The street address for the attraction"), + ] + assert ( + soup.select("th[data-column=name]")[0]["data-column-description"] + == "The name of the attraction" + ) + assert ( + soup.select("th[data-column=address]")[0]["data-column-description"] + == "The street address for the attraction" + ) + + +@pytest.mark.parametrize("use_facet_size_max", (True, False)) +def test_facet_total_shown_if_facet_max_size(use_facet_size_max): + # https://github.com/simonw/datasette/issues/1423 + with make_app_client(settings={"max_returned_rows": 100}) as client: + path = "/fixtures/sortable?_facet=content&_facet=pk1" + if use_facet_size_max: + path += "&_facet_size=max" + response = client.get(path) + assert response.status == 200 + fragments = ( + '>100', + '8', + ) + for fragment in fragments: + if use_facet_size_max: + assert fragment in response.text + else: + assert fragment not in response.text + + +def test_sort_rowid_with_next(app_client): + # https://github.com/simonw/datasette/issues/1470 + response = app_client.get("/fixtures/binary_data?_size=1&_next=1&_sort=rowid") + assert response.status == 200 + + +def assert_querystring_equal(expected, actual): + assert sorted(expected.split("&")) == sorted(actual.split("&")) diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 00000000..972300db --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,24 @@ +def assert_footer_links(soup): + footer_links = soup.find("footer").findAll("a") + assert 4 == len(footer_links) + datasette_link, license_link, source_link, about_link = footer_links + assert "Datasette" == datasette_link.text.strip() + assert "tests/fixtures.py" == source_link.text.strip() + assert "Apache License 2.0" == license_link.text.strip() + assert "About Datasette" == about_link.text.strip() + assert "https://datasette.io/" == datasette_link["href"] + assert ( + "https://github.com/simonw/datasette/blob/main/tests/fixtures.py" + == source_link["href"] + ) + assert ( + "https://github.com/simonw/datasette/blob/main/LICENSE" == license_link["href"] + ) + assert "https://github.com/simonw/datasette" == about_link["href"] + + +def inner_html(soup): + html = str(soup) + # This includes the parent tag - so remove that + inner_html = html.split(">", 1)[1].rsplit("<", 1)[0] + return inner_html.strip() From 492f9835aa7e90540dd0c6324282b109f73df71b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 11 Dec 2021 19:07:19 -0800 Subject: [PATCH 0043/1103] Refactor table view API tests to test_table_api.py Refs #1518 --- tests/test_api.py | 1215 +-------------------------------------- tests/test_table_api.py | 1206 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 1214 insertions(+), 1207 deletions(-) create mode 100644 tests/test_table_api.py diff --git a/tests/test_api.py b/tests/test_api.py index 400dae7e..df9e0fc4 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,29 +1,22 @@ from datasette.app import Datasette from datasette.plugins import DEFAULT_PLUGINS -from datasette.utils import detect_json1 -from datasette.utils.sqlite import sqlite3, sqlite_version, supports_table_xinfo +from datasette.utils.sqlite import supports_table_xinfo from datasette.version import __version__ from .fixtures import ( # noqa app_client, app_client_no_files, - app_client_with_hash, - app_client_shorter_time_limit, - app_client_larger_cache_size, - app_client_returned_rows_matches_page_size, - app_client_two_attached_databases, - app_client_two_attached_databases_one_immutable, - app_client_conflicting_database_names, - app_client_with_cors, app_client_with_dot, - app_client_with_trace, + app_client_shorter_time_limit, + app_client_two_attached_databases_one_immutable, + app_client_larger_cache_size, + app_client_with_cors, + app_client_two_attached_databases, + app_client_conflicting_database_names, app_client_immutable_and_inspect_file, - generate_compound_rows, - generate_sortable_rows, make_app_client, EXPECTED_PLUGINS, METADATA, ) -import json import pathlib import pytest import sys @@ -680,649 +673,6 @@ def test_invalid_custom_sql(app_client): assert "Statement must be a SELECT" == response.json["error"] -def test_table_json(app_client): - response = app_client.get("/fixtures/simple_primary_key.json?_shape=objects") - assert response.status == 200 - data = response.json - assert ( - data["query"]["sql"] - == "select id, content from simple_primary_key order by id limit 51" - ) - assert data["query"]["params"] == {} - assert data["rows"] == [ - {"id": "1", "content": "hello"}, - {"id": "2", "content": "world"}, - {"id": "3", "content": ""}, - {"id": "4", "content": "RENDER_CELL_DEMO"}, - {"id": "5", "content": "RENDER_CELL_ASYNC"}, - ] - - -def test_table_not_exists_json(app_client): - assert { - "ok": False, - "error": "Table not found: blah", - "status": 404, - "title": None, - } == app_client.get("/fixtures/blah.json").json - - -def test_jsono_redirects_to_shape_objects(app_client_with_hash): - response_1 = app_client_with_hash.get("/fixtures/simple_primary_key.jsono") - response = app_client_with_hash.get(response_1.headers["Location"]) - assert response.status == 302 - assert response.headers["Location"].endswith("?_shape=objects") - - -def test_table_shape_arrays(app_client): - response = app_client.get("/fixtures/simple_primary_key.json?_shape=arrays") - assert [ - ["1", "hello"], - ["2", "world"], - ["3", ""], - ["4", "RENDER_CELL_DEMO"], - ["5", "RENDER_CELL_ASYNC"], - ] == response.json["rows"] - - -def test_table_shape_arrayfirst(app_client): - response = app_client.get( - "/fixtures.json?" - + urllib.parse.urlencode( - { - "sql": "select content from simple_primary_key order by id", - "_shape": "arrayfirst", - } - ) - ) - assert [ - "hello", - "world", - "", - "RENDER_CELL_DEMO", - "RENDER_CELL_ASYNC", - ] == response.json - - -def test_table_shape_objects(app_client): - response = app_client.get("/fixtures/simple_primary_key.json?_shape=objects") - assert [ - {"id": "1", "content": "hello"}, - {"id": "2", "content": "world"}, - {"id": "3", "content": ""}, - {"id": "4", "content": "RENDER_CELL_DEMO"}, - {"id": "5", "content": "RENDER_CELL_ASYNC"}, - ] == response.json["rows"] - - -def test_table_shape_array(app_client): - response = app_client.get("/fixtures/simple_primary_key.json?_shape=array") - assert [ - {"id": "1", "content": "hello"}, - {"id": "2", "content": "world"}, - {"id": "3", "content": ""}, - {"id": "4", "content": "RENDER_CELL_DEMO"}, - {"id": "5", "content": "RENDER_CELL_ASYNC"}, - ] == response.json - - -def test_table_shape_array_nl(app_client): - response = app_client.get("/fixtures/simple_primary_key.json?_shape=array&_nl=on") - lines = response.text.split("\n") - results = [json.loads(line) for line in lines] - assert [ - {"id": "1", "content": "hello"}, - {"id": "2", "content": "world"}, - {"id": "3", "content": ""}, - {"id": "4", "content": "RENDER_CELL_DEMO"}, - {"id": "5", "content": "RENDER_CELL_ASYNC"}, - ] == results - - -def test_table_shape_invalid(app_client): - response = app_client.get("/fixtures/simple_primary_key.json?_shape=invalid") - assert { - "ok": False, - "error": "Invalid _shape: invalid", - "status": 400, - "title": None, - } == response.json - - -def test_table_shape_object(app_client): - response = app_client.get("/fixtures/simple_primary_key.json?_shape=object") - assert { - "1": {"id": "1", "content": "hello"}, - "2": {"id": "2", "content": "world"}, - "3": {"id": "3", "content": ""}, - "4": {"id": "4", "content": "RENDER_CELL_DEMO"}, - "5": {"id": "5", "content": "RENDER_CELL_ASYNC"}, - } == response.json - - -def test_table_shape_object_compound_primary_key(app_client): - response = app_client.get("/fixtures/compound_primary_key.json?_shape=object") - assert {"a,b": {"pk1": "a", "pk2": "b", "content": "c"}} == response.json - - -def test_table_with_slashes_in_name(app_client): - response = app_client.get( - "/fixtures/table%2Fwith%2Fslashes.csv?_shape=objects&_format=json" - ) - assert response.status == 200 - data = response.json - assert data["rows"] == [{"pk": "3", "content": "hey"}] - - -def test_table_with_reserved_word_name(app_client): - response = app_client.get("/fixtures/select.json?_shape=objects") - assert response.status == 200 - data = response.json - assert data["rows"] == [ - { - "rowid": 1, - "group": "group", - "having": "having", - "and": "and", - "json": '{"href": "http://example.com/", "label":"Example"}', - } - ] - - -@pytest.mark.parametrize( - "path,expected_rows,expected_pages", - [ - ("/fixtures/no_primary_key.json", 201, 5), - ("/fixtures/paginated_view.json", 201, 9), - ("/fixtures/no_primary_key.json?_size=25", 201, 9), - ("/fixtures/paginated_view.json?_size=50", 201, 5), - ("/fixtures/paginated_view.json?_size=max", 201, 3), - ("/fixtures/123_starts_with_digits.json", 0, 1), - # Ensure faceting doesn't break pagination: - ("/fixtures/compound_three_primary_keys.json?_facet=pk1", 1001, 21), - # Paginating while sorted by an expanded foreign key should work - ( - "/fixtures/roadside_attraction_characteristics.json?_size=2&_sort=attraction_id&_labels=on", - 5, - 3, - ), - ], -) -def test_paginate_tables_and_views(app_client, path, expected_rows, expected_pages): - fetched = [] - count = 0 - while path: - response = app_client.get(path) - assert 200 == response.status - count += 1 - fetched.extend(response.json["rows"]) - path = response.json["next_url"] - if path: - assert urllib.parse.urlencode({"_next": response.json["next"]}) in path - path = path.replace("http://localhost", "") - assert count < 30, "Possible infinite loop detected" - - assert expected_rows == len(fetched) - assert expected_pages == count - - -@pytest.mark.parametrize( - "path,expected_error", - [ - ("/fixtures/no_primary_key.json?_size=-4", "_size must be a positive integer"), - ("/fixtures/no_primary_key.json?_size=dog", "_size must be a positive integer"), - ("/fixtures/no_primary_key.json?_size=1001", "_size must be <= 100"), - ], -) -def test_validate_page_size(app_client, path, expected_error): - response = app_client.get(path) - assert expected_error == response.json["error"] - assert 400 == response.status - - -def test_page_size_zero(app_client): - """For _size=0 we return the counts, empty rows and no continuation token""" - response = app_client.get("/fixtures/no_primary_key.json?_size=0") - assert 200 == response.status - assert [] == response.json["rows"] - assert 201 == response.json["filtered_table_rows_count"] - assert None is response.json["next"] - assert None is response.json["next_url"] - - -def test_paginate_compound_keys(app_client): - fetched = [] - path = "/fixtures/compound_three_primary_keys.json?_shape=objects" - page = 0 - while path: - page += 1 - response = app_client.get(path) - fetched.extend(response.json["rows"]) - path = response.json["next_url"] - if path: - path = path.replace("http://localhost", "") - assert page < 100 - assert 1001 == len(fetched) - assert 21 == page - # Should be correctly ordered - contents = [f["content"] for f in fetched] - expected = [r[3] for r in generate_compound_rows(1001)] - assert expected == contents - - -def test_paginate_compound_keys_with_extra_filters(app_client): - fetched = [] - path = ( - "/fixtures/compound_three_primary_keys.json?content__contains=d&_shape=objects" - ) - page = 0 - while path: - page += 1 - assert page < 100 - response = app_client.get(path) - fetched.extend(response.json["rows"]) - path = response.json["next_url"] - if path: - path = path.replace("http://localhost", "") - assert 2 == page - expected = [r[3] for r in generate_compound_rows(1001) if "d" in r[3]] - assert expected == [f["content"] for f in fetched] - - -@pytest.mark.parametrize( - "query_string,sort_key,human_description_en", - [ - ("_sort=sortable", lambda row: row["sortable"], "sorted by sortable"), - ( - "_sort_desc=sortable", - lambda row: -row["sortable"], - "sorted by sortable descending", - ), - ( - "_sort=sortable_with_nulls", - lambda row: ( - 1 if row["sortable_with_nulls"] is not None else 0, - row["sortable_with_nulls"], - ), - "sorted by sortable_with_nulls", - ), - ( - "_sort_desc=sortable_with_nulls", - lambda row: ( - 1 if row["sortable_with_nulls"] is None else 0, - -row["sortable_with_nulls"] - if row["sortable_with_nulls"] is not None - else 0, - row["content"], - ), - "sorted by sortable_with_nulls descending", - ), - # text column contains '$null' - ensure it doesn't confuse pagination: - ("_sort=text", lambda row: row["text"], "sorted by text"), - ], -) -def test_sortable(app_client, query_string, sort_key, human_description_en): - path = f"/fixtures/sortable.json?_shape=objects&{query_string}" - fetched = [] - page = 0 - while path: - page += 1 - assert page < 100 - response = app_client.get(path) - assert human_description_en == response.json["human_description_en"] - fetched.extend(response.json["rows"]) - path = response.json["next_url"] - if path: - path = path.replace("http://localhost", "") - assert 5 == page - expected = list(generate_sortable_rows(201)) - expected.sort(key=sort_key) - assert [r["content"] for r in expected] == [r["content"] for r in fetched] - - -def test_sortable_and_filtered(app_client): - path = ( - "/fixtures/sortable.json" - "?content__contains=d&_sort_desc=sortable&_shape=objects" - ) - response = app_client.get(path) - fetched = response.json["rows"] - assert ( - 'where content contains "d" sorted by sortable descending' - == response.json["human_description_en"] - ) - expected = [row for row in generate_sortable_rows(201) if "d" in row["content"]] - assert len(expected) == response.json["filtered_table_rows_count"] - expected.sort(key=lambda row: -row["sortable"]) - assert [r["content"] for r in expected] == [r["content"] for r in fetched] - - -def test_sortable_argument_errors(app_client): - response = app_client.get("/fixtures/sortable.json?_sort=badcolumn") - assert "Cannot sort table by badcolumn" == response.json["error"] - response = app_client.get("/fixtures/sortable.json?_sort_desc=badcolumn2") - assert "Cannot sort table by badcolumn2" == response.json["error"] - response = app_client.get( - "/fixtures/sortable.json?_sort=sortable_with_nulls&_sort_desc=sortable" - ) - assert "Cannot use _sort and _sort_desc at the same time" == response.json["error"] - - -def test_sortable_columns_metadata(app_client): - response = app_client.get("/fixtures/sortable.json?_sort=content") - assert "Cannot sort table by content" == response.json["error"] - # no_primary_key has ALL sort options disabled - for column in ("content", "a", "b", "c"): - response = app_client.get(f"/fixtures/sortable.json?_sort={column}") - assert f"Cannot sort table by {column}" == response.json["error"] - - -@pytest.mark.parametrize( - "path,expected_rows", - [ - ( - "/fixtures/searchable.json?_search=dog", - [ - [1, "barry cat", "terry dog", "panther"], - [2, "terry dog", "sara weasel", "puma"], - ], - ), - ( - # Special keyword shouldn't break FTS query - "/fixtures/searchable.json?_search=AND", - [], - ), - ( - # Without _searchmode=raw this should return no results - "/fixtures/searchable.json?_search=te*+AND+do*", - [], - ), - ( - # _searchmode=raw - "/fixtures/searchable.json?_search=te*+AND+do*&_searchmode=raw", - [ - [1, "barry cat", "terry dog", "panther"], - [2, "terry dog", "sara weasel", "puma"], - ], - ), - ( - # _searchmode=raw combined with _search_COLUMN - "/fixtures/searchable.json?_search_text2=te*&_searchmode=raw", - [ - [1, "barry cat", "terry dog", "panther"], - ], - ), - ( - "/fixtures/searchable.json?_search=weasel", - [[2, "terry dog", "sara weasel", "puma"]], - ), - ( - "/fixtures/searchable.json?_search_text2=dog", - [[1, "barry cat", "terry dog", "panther"]], - ), - ( - "/fixtures/searchable.json?_search_name%20with%20.%20and%20spaces=panther", - [[1, "barry cat", "terry dog", "panther"]], - ), - ], -) -def test_searchable(app_client, path, expected_rows): - response = app_client.get(path) - assert expected_rows == response.json["rows"] - - -_SEARCHMODE_RAW_RESULTS = [ - [1, "barry cat", "terry dog", "panther"], - [2, "terry dog", "sara weasel", "puma"], -] - - -@pytest.mark.parametrize( - "table_metadata,querystring,expected_rows", - [ - ( - {}, - "_search=te*+AND+do*", - [], - ), - ( - {"searchmode": "raw"}, - "_search=te*+AND+do*", - _SEARCHMODE_RAW_RESULTS, - ), - ( - {}, - "_search=te*+AND+do*&_searchmode=raw", - _SEARCHMODE_RAW_RESULTS, - ), - # Can be over-ridden with _searchmode=escaped - ( - {"searchmode": "raw"}, - "_search=te*+AND+do*&_searchmode=escaped", - [], - ), - ], -) -def test_searchmode(table_metadata, querystring, expected_rows): - with make_app_client( - metadata={"databases": {"fixtures": {"tables": {"searchable": table_metadata}}}} - ) as client: - response = client.get("/fixtures/searchable.json?" + querystring) - assert expected_rows == response.json["rows"] - - -@pytest.mark.parametrize( - "path,expected_rows", - [ - ( - "/fixtures/searchable_view_configured_by_metadata.json?_search=weasel", - [[2, "terry dog", "sara weasel", "puma"]], - ), - # This should return all results because search is not configured: - ( - "/fixtures/searchable_view.json?_search=weasel", - [ - [1, "barry cat", "terry dog", "panther"], - [2, "terry dog", "sara weasel", "puma"], - ], - ), - ( - "/fixtures/searchable_view.json?_search=weasel&_fts_table=searchable_fts&_fts_pk=pk", - [[2, "terry dog", "sara weasel", "puma"]], - ), - ], -) -def test_searchable_views(app_client, path, expected_rows): - response = app_client.get(path) - assert expected_rows == response.json["rows"] - - -def test_searchable_invalid_column(app_client): - response = app_client.get("/fixtures/searchable.json?_search_invalid=x") - assert 400 == response.status - assert { - "ok": False, - "error": "Cannot search by that column", - "status": 400, - "title": None, - } == response.json - - -@pytest.mark.parametrize( - "path,expected_rows", - [ - ("/fixtures/simple_primary_key.json?content=hello", [["1", "hello"]]), - ( - "/fixtures/simple_primary_key.json?content__contains=o", - [ - ["1", "hello"], - ["2", "world"], - ["4", "RENDER_CELL_DEMO"], - ], - ), - ("/fixtures/simple_primary_key.json?content__exact=", [["3", ""]]), - ( - "/fixtures/simple_primary_key.json?content__not=world", - [ - ["1", "hello"], - ["3", ""], - ["4", "RENDER_CELL_DEMO"], - ["5", "RENDER_CELL_ASYNC"], - ], - ), - ], -) -def test_table_filter_queries(app_client, path, expected_rows): - response = app_client.get(path) - assert expected_rows == response.json["rows"] - - -def test_table_filter_queries_multiple_of_same_type(app_client): - response = app_client.get( - "/fixtures/simple_primary_key.json?content__not=world&content__not=hello" - ) - assert [ - ["3", ""], - ["4", "RENDER_CELL_DEMO"], - ["5", "RENDER_CELL_ASYNC"], - ] == response.json["rows"] - - -@pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module") -def test_table_filter_json_arraycontains(app_client): - response = app_client.get("/fixtures/facetable.json?tags__arraycontains=tag1") - assert response.json["rows"] == [ - [ - 1, - "2019-01-14 08:00:00", - 1, - 1, - "CA", - 1, - "Mission", - '["tag1", "tag2"]', - '[{"foo": "bar"}]', - "one", - ], - [ - 2, - "2019-01-14 08:00:00", - 1, - 1, - "CA", - 1, - "Dogpatch", - '["tag1", "tag3"]', - "[]", - "two", - ], - ] - - -@pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module") -def test_table_filter_json_arraynotcontains(app_client): - response = app_client.get( - "/fixtures/facetable.json?tags__arraynotcontains=tag3&tags__not=[]" - ) - assert response.json["rows"] == [ - [ - 1, - "2019-01-14 08:00:00", - 1, - 1, - "CA", - 1, - "Mission", - '["tag1", "tag2"]', - '[{"foo": "bar"}]', - "one", - ] - ] - - -def test_table_filter_extra_where(app_client): - response = app_client.get( - "/fixtures/facetable.json?_where=_neighborhood='Dogpatch'" - ) - assert [ - [ - 2, - "2019-01-14 08:00:00", - 1, - 1, - "CA", - 1, - "Dogpatch", - '["tag1", "tag3"]', - "[]", - "two", - ] - ] == response.json["rows"] - - -def test_table_filter_extra_where_invalid(app_client): - response = app_client.get("/fixtures/facetable.json?_where=_neighborhood=Dogpatch'") - assert 400 == response.status - assert "Invalid SQL" == response.json["title"] - - -def test_table_filter_extra_where_disabled_if_no_sql_allowed(): - with make_app_client(metadata={"allow_sql": {}}) as client: - response = client.get( - "/fixtures/facetable.json?_where=_neighborhood='Dogpatch'" - ) - assert 403 == response.status - assert "_where= is not allowed" == response.json["error"] - - -def test_table_through(app_client): - # Just the museums: - response = app_client.get( - '/fixtures/roadside_attractions.json?_through={"table":"roadside_attraction_characteristics","column":"characteristic_id","value":"1"}' - ) - assert [ - [ - 3, - "Burlingame Museum of PEZ Memorabilia", - "214 California Drive, Burlingame, CA 94010", - 37.5793, - -122.3442, - ], - [ - 4, - "Bigfoot Discovery Museum", - "5497 Highway 9, Felton, CA 95018", - 37.0414, - -122.0725, - ], - ] == response.json["rows"] - assert ( - 'where roadside_attraction_characteristics.characteristic_id = "1"' - == response.json["human_description_en"] - ) - - -def test_max_returned_rows(app_client): - response = app_client.get("/fixtures.json?sql=select+content+from+no_primary_key") - data = response.json - assert {"sql": "select content from no_primary_key", "params": {}} == data["query"] - assert data["truncated"] - assert 100 == len(data["rows"]) - - -def test_view(app_client): - response = app_client.get("/fixtures/simple_view.json?_shape=objects") - assert response.status == 200 - data = response.json - assert data["rows"] == [ - {"upper_content": "HELLO", "content": "hello"}, - {"upper_content": "WORLD", "content": "world"}, - {"upper_content": "", "content": ""}, - {"upper_content": "RENDER_CELL_DEMO", "content": "RENDER_CELL_DEMO"}, - {"upper_content": "RENDER_CELL_ASYNC", "content": "RENDER_CELL_ASYNC"}, - ] - - def test_row(app_client): response = app_client.get("/fixtures/simple_primary_key/1.json?_shape=objects") assert response.status == 200 @@ -1390,20 +740,6 @@ def test_row_foreign_key_tables(app_client): ] -def test_unit_filters(app_client): - response = app_client.get( - "/fixtures/units.json?distance__lt=75km&frequency__gt=1kHz" - ) - assert response.status == 200 - data = response.json - - assert data["units"]["distance"] == "m" - assert data["units"]["frequency"] == "Hz" - - assert len(data["rows"]) == 1 - assert data["rows"][0][0] == 2 - - def test_databases_json(app_client_two_attached_databases_one_immutable): response = app_client_two_attached_databases_one_immutable.get("/-/databases.json") databases = response.json @@ -1498,330 +834,6 @@ def test_config_redirects_to_settings(app_client, path, expected_redirect): assert response.headers["Location"] == expected_redirect -def test_page_size_matching_max_returned_rows( - app_client_returned_rows_matches_page_size, -): - fetched = [] - path = "/fixtures/no_primary_key.json" - while path: - response = app_client_returned_rows_matches_page_size.get(path) - fetched.extend(response.json["rows"]) - assert len(response.json["rows"]) in (1, 50) - path = response.json["next_url"] - if path: - path = path.replace("http://localhost", "") - assert 201 == len(fetched) - - -@pytest.mark.parametrize( - "path,expected_facet_results", - [ - ( - "/fixtures/facetable.json?_facet=state&_facet=_city_id", - { - "state": { - "name": "state", - "hideable": True, - "type": "column", - "toggle_url": "/fixtures/facetable.json?_facet=_city_id", - "results": [ - { - "value": "CA", - "label": "CA", - "count": 10, - "toggle_url": "_facet=state&_facet=_city_id&state=CA", - "selected": False, - }, - { - "value": "MI", - "label": "MI", - "count": 4, - "toggle_url": "_facet=state&_facet=_city_id&state=MI", - "selected": False, - }, - { - "value": "MC", - "label": "MC", - "count": 1, - "toggle_url": "_facet=state&_facet=_city_id&state=MC", - "selected": False, - }, - ], - "truncated": False, - }, - "_city_id": { - "name": "_city_id", - "hideable": True, - "type": "column", - "toggle_url": "/fixtures/facetable.json?_facet=state", - "results": [ - { - "value": 1, - "label": "San Francisco", - "count": 6, - "toggle_url": "_facet=state&_facet=_city_id&_city_id__exact=1", - "selected": False, - }, - { - "value": 2, - "label": "Los Angeles", - "count": 4, - "toggle_url": "_facet=state&_facet=_city_id&_city_id__exact=2", - "selected": False, - }, - { - "value": 3, - "label": "Detroit", - "count": 4, - "toggle_url": "_facet=state&_facet=_city_id&_city_id__exact=3", - "selected": False, - }, - { - "value": 4, - "label": "Memnonia", - "count": 1, - "toggle_url": "_facet=state&_facet=_city_id&_city_id__exact=4", - "selected": False, - }, - ], - "truncated": False, - }, - }, - ), - ( - "/fixtures/facetable.json?_facet=state&_facet=_city_id&state=MI", - { - "state": { - "name": "state", - "hideable": True, - "type": "column", - "toggle_url": "/fixtures/facetable.json?_facet=_city_id&state=MI", - "results": [ - { - "value": "MI", - "label": "MI", - "count": 4, - "selected": True, - "toggle_url": "_facet=state&_facet=_city_id", - } - ], - "truncated": False, - }, - "_city_id": { - "name": "_city_id", - "hideable": True, - "type": "column", - "toggle_url": "/fixtures/facetable.json?_facet=state&state=MI", - "results": [ - { - "value": 3, - "label": "Detroit", - "count": 4, - "selected": False, - "toggle_url": "_facet=state&_facet=_city_id&state=MI&_city_id__exact=3", - } - ], - "truncated": False, - }, - }, - ), - ( - "/fixtures/facetable.json?_facet=planet_int", - { - "planet_int": { - "name": "planet_int", - "hideable": True, - "type": "column", - "toggle_url": "/fixtures/facetable.json", - "results": [ - { - "value": 1, - "label": 1, - "count": 14, - "selected": False, - "toggle_url": "_facet=planet_int&planet_int=1", - }, - { - "value": 2, - "label": 2, - "count": 1, - "selected": False, - "toggle_url": "_facet=planet_int&planet_int=2", - }, - ], - "truncated": False, - } - }, - ), - ( - # planet_int is an integer field: - "/fixtures/facetable.json?_facet=planet_int&planet_int=1", - { - "planet_int": { - "name": "planet_int", - "hideable": True, - "type": "column", - "toggle_url": "/fixtures/facetable.json?planet_int=1", - "results": [ - { - "value": 1, - "label": 1, - "count": 14, - "selected": True, - "toggle_url": "_facet=planet_int", - } - ], - "truncated": False, - } - }, - ), - ], -) -def test_facets(app_client, path, expected_facet_results): - response = app_client.get(path) - facet_results = response.json["facet_results"] - # We only compare the querystring portion of the taggle_url - for facet_name, facet_info in facet_results.items(): - assert facet_name == facet_info["name"] - assert False is facet_info["truncated"] - for facet_value in facet_info["results"]: - facet_value["toggle_url"] = facet_value["toggle_url"].split("?")[1] - assert expected_facet_results == facet_results - - -def test_suggested_facets(app_client): - suggestions = [ - { - "name": suggestion["name"], - "querystring": suggestion["toggle_url"].split("?")[-1], - } - for suggestion in app_client.get("/fixtures/facetable.json").json[ - "suggested_facets" - ] - ] - expected = [ - {"name": "created", "querystring": "_facet=created"}, - {"name": "planet_int", "querystring": "_facet=planet_int"}, - {"name": "on_earth", "querystring": "_facet=on_earth"}, - {"name": "state", "querystring": "_facet=state"}, - {"name": "_city_id", "querystring": "_facet=_city_id"}, - {"name": "_neighborhood", "querystring": "_facet=_neighborhood"}, - {"name": "tags", "querystring": "_facet=tags"}, - {"name": "complex_array", "querystring": "_facet=complex_array"}, - {"name": "created", "querystring": "_facet_date=created"}, - ] - if detect_json1(): - expected.append({"name": "tags", "querystring": "_facet_array=tags"}) - assert expected == suggestions - - -def test_allow_facet_off(): - with make_app_client(settings={"allow_facet": False}) as client: - assert 400 == client.get("/fixtures/facetable.json?_facet=planet_int").status - # Should not suggest any facets either: - assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] - - -def test_suggest_facets_off(): - with make_app_client(settings={"suggest_facets": False}) as client: - # Now suggested_facets should be [] - assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] - - -@pytest.mark.parametrize("nofacet", (True, False)) -def test_nofacet(app_client, nofacet): - path = "/fixtures/facetable.json?_facet=state" - if nofacet: - path += "&_nofacet=1" - response = app_client.get(path) - if nofacet: - assert response.json["suggested_facets"] == [] - assert response.json["facet_results"] == {} - else: - assert response.json["suggested_facets"] != [] - assert response.json["facet_results"] != {} - - -@pytest.mark.parametrize("nocount,expected_count", ((True, None), (False, 15))) -def test_nocount(app_client, nocount, expected_count): - path = "/fixtures/facetable.json" - if nocount: - path += "?_nocount=1" - response = app_client.get(path) - assert response.json["filtered_table_rows_count"] == expected_count - - -def test_nocount_nofacet_if_shape_is_object(app_client_with_trace): - response = app_client_with_trace.get( - "/fixtures/facetable.json?_trace=1&_shape=object" - ) - assert "count(*)" not in response.text - - -def test_expand_labels(app_client): - response = app_client.get( - "/fixtures/facetable.json?_shape=object&_labels=1&_size=2" - "&_neighborhood__contains=c" - ) - assert { - "2": { - "pk": 2, - "created": "2019-01-14 08:00:00", - "planet_int": 1, - "on_earth": 1, - "state": "CA", - "_city_id": {"value": 1, "label": "San Francisco"}, - "_neighborhood": "Dogpatch", - "tags": '["tag1", "tag3"]', - "complex_array": "[]", - "distinct_some_null": "two", - }, - "13": { - "pk": 13, - "created": "2019-01-17 08:00:00", - "planet_int": 1, - "on_earth": 1, - "state": "MI", - "_city_id": {"value": 3, "label": "Detroit"}, - "_neighborhood": "Corktown", - "tags": "[]", - "complex_array": "[]", - "distinct_some_null": None, - }, - } == response.json - - -def test_expand_label(app_client): - response = app_client.get( - "/fixtures/foreign_key_references.json?_shape=object" - "&_label=foreign_key_with_label&_size=1" - ) - assert response.json == { - "1": { - "pk": "1", - "foreign_key_with_label": {"value": "1", "label": "hello"}, - "foreign_key_with_blank_label": "3", - "foreign_key_with_no_label": "1", - "foreign_key_compound_pk1": "a", - "foreign_key_compound_pk2": "b", - } - } - - -@pytest.mark.parametrize( - "path,expected_cache_control", - [ - ("/fixtures/facetable.json", "max-age=5"), - ("/fixtures/facetable.json?_ttl=invalid", "max-age=5"), - ("/fixtures/facetable.json?_ttl=10", "max-age=10"), - ("/fixtures/facetable.json?_ttl=0", "no-cache"), - ], -) -def test_ttl_parameter(app_client, path, expected_cache_control): - response = app_client.get(path) - assert expected_cache_control == response.headers["Cache-Control"] - - @pytest.mark.parametrize( "path,expected_redirect", [ @@ -1899,29 +911,6 @@ def test_config_force_https_urls(): assert client.ds._last_request.scheme == "https" -def test_infinity_returned_as_null(app_client): - response = app_client.get("/fixtures/infinity.json?_shape=array") - assert [ - {"rowid": 1, "value": None}, - {"rowid": 2, "value": None}, - {"rowid": 3, "value": 1.5}, - ] == response.json - - -def test_infinity_returned_as_invalid_json_if_requested(app_client): - response = app_client.get("/fixtures/infinity.json?_shape=array&_json_infinity=1") - assert [ - {"rowid": 1, "value": float("inf")}, - {"rowid": 2, "value": float("-inf")}, - {"rowid": 3, "value": 1.5}, - ] == response.json - - -def test_custom_query_with_unicode_characters(app_client): - response = app_client.get("/fixtures/𝐜𝐢𝐭𝐢𝐞𝐬.json?_shape=array") - assert [{"id": 1, "name": "San Francisco"}] == response.json - - @pytest.mark.parametrize("trace_debug", (True, False)) def test_trace(trace_debug): with make_app_client(settings={"trace_debug": trace_debug}) as client: @@ -1997,205 +986,17 @@ def test_common_prefix_database_names(app_client_conflicting_database_names): assert db_name == data["database"] -def test_null_and_compound_foreign_keys_are_not_expanded(app_client): - response = app_client.get( - "/fixtures/foreign_key_references.json?_shape=array&_labels=on" - ) - assert response.json == [ - { - "pk": "1", - "foreign_key_with_label": {"value": "1", "label": "hello"}, - "foreign_key_with_blank_label": {"value": "3", "label": ""}, - "foreign_key_with_no_label": {"value": "1", "label": "1"}, - "foreign_key_compound_pk1": "a", - "foreign_key_compound_pk2": "b", - }, - { - "pk": "2", - "foreign_key_with_label": None, - "foreign_key_with_blank_label": None, - "foreign_key_with_no_label": None, - "foreign_key_compound_pk1": None, - "foreign_key_compound_pk2": None, - }, - ] - - def test_inspect_file_used_for_count(app_client_immutable_and_inspect_file): response = app_client_immutable_and_inspect_file.get("/fixtures/sortable.json") assert response.json["filtered_table_rows_count"] == 100 -@pytest.mark.parametrize( - "path,expected_json,expected_text", - [ - ( - "/fixtures/binary_data.json?_shape=array", - [ - {"rowid": 1, "data": {"$base64": True, "encoded": "FRwCx60F/g=="}}, - {"rowid": 2, "data": {"$base64": True, "encoded": "FRwDx60F/g=="}}, - {"rowid": 3, "data": None}, - ], - None, - ), - ( - "/fixtures/binary_data.json?_shape=array&_nl=on", - None, - ( - '{"rowid": 1, "data": {"$base64": true, "encoded": "FRwCx60F/g=="}}\n' - '{"rowid": 2, "data": {"$base64": true, "encoded": "FRwDx60F/g=="}}\n' - '{"rowid": 3, "data": null}' - ), - ), - ], -) -def test_binary_data_in_json(app_client, path, expected_json, expected_text): - response = app_client.get(path) - if expected_json: - assert response.json == expected_json - else: - assert response.text == expected_text - - -@pytest.mark.parametrize( - "qs", - [ - "", - "?_shape=arrays", - "?_shape=arrayfirst", - "?_shape=object", - "?_shape=objects", - "?_shape=array", - "?_shape=array&_nl=on", - ], -) -def test_paginate_using_link_header(app_client, qs): - path = f"/fixtures/compound_three_primary_keys.json{qs}" - num_pages = 0 - while path: - response = app_client.get(path) - assert response.status == 200 - num_pages += 1 - link = response.headers.get("link") - if link: - assert link.startswith("<") - assert link.endswith('>; rel="next"') - path = link[1:].split(">")[0] - path = path.replace("http://localhost", "") - else: - path = None - assert num_pages == 21 - - -@pytest.mark.skipif( - sqlite_version() < (3, 31, 0), - reason="generated columns were added in SQLite 3.31.0", -) -def test_generated_columns_are_visible_in_datasette(): - with make_app_client( - extra_databases={ - "generated.db": """ - CREATE TABLE generated_columns ( - body TEXT, - id INT GENERATED ALWAYS AS (json_extract(body, '$.number')) STORED, - consideration INT GENERATED ALWAYS AS (json_extract(body, '$.string')) STORED - ); - INSERT INTO generated_columns (body) VALUES ( - '{"number": 1, "string": "This is a string"}' - );""" - } - ) as client: - response = client.get("/generated/generated_columns.json?_shape=array") - assert response.json == [ - { - "rowid": 1, - "body": '{"number": 1, "string": "This is a string"}', - "id": 1, - "consideration": "This is a string", - } - ] - - def test_http_options_request(app_client): response = app_client.request("/fixtures", method="OPTIONS") assert response.status == 200 assert response.text == "ok" -@pytest.mark.parametrize( - "path,expected_columns", - ( - ("/fixtures/facetable.json?_col=created", ["pk", "created"]), - ( - "/fixtures/facetable.json?_nocol=created", - [ - "pk", - "planet_int", - "on_earth", - "state", - "_city_id", - "_neighborhood", - "tags", - "complex_array", - "distinct_some_null", - ], - ), - ( - "/fixtures/facetable.json?_col=state&_col=created", - ["pk", "state", "created"], - ), - ( - "/fixtures/facetable.json?_col=state&_col=state", - ["pk", "state"], - ), - ( - "/fixtures/facetable.json?_col=state&_col=created&_nocol=created", - ["pk", "state"], - ), - ( - # Ensure faceting doesn't break, https://github.com/simonw/datasette/issues/1345 - "/fixtures/facetable.json?_nocol=state&_facet=state", - [ - "pk", - "created", - "planet_int", - "on_earth", - "_city_id", - "_neighborhood", - "tags", - "complex_array", - "distinct_some_null", - ], - ), - ( - "/fixtures/simple_view.json?_nocol=content", - ["upper_content"], - ), - ("/fixtures/simple_view.json?_col=content", ["content"]), - ), -) -def test_col_nocol(app_client, path, expected_columns): - response = app_client.get(path) - assert response.status == 200 - columns = response.json["columns"] - assert columns == expected_columns - - -@pytest.mark.parametrize( - "path,expected_error", - ( - ("/fixtures/facetable.json?_col=bad", "_col=bad - invalid columns"), - ("/fixtures/facetable.json?_nocol=bad", "_nocol=bad - invalid columns"), - ("/fixtures/facetable.json?_nocol=pk", "_nocol=pk - invalid columns"), - ("/fixtures/simple_view.json?_col=bad", "_col=bad - invalid columns"), - ), -) -def test_col_nocol_errors(app_client, path, expected_error): - response = app_client.get(path) - assert response.status == 400 - assert response.json["error"] == expected_error - - @pytest.mark.asyncio async def test_db_path(app_client): db = app_client.ds.get_database() @@ -2205,5 +1006,5 @@ async def test_db_path(app_client): datasette = Datasette([path]) - # this will break with a path + # Previously this broke if path was a pathlib.Path: await datasette.refresh_schemas() diff --git a/tests/test_table_api.py b/tests/test_table_api.py new file mode 100644 index 00000000..a530de44 --- /dev/null +++ b/tests/test_table_api.py @@ -0,0 +1,1206 @@ +from datasette.utils import detect_json1 +from datasette.utils.sqlite import sqlite_version +from .fixtures import ( # noqa + app_client, + app_client_with_hash, + app_client_with_trace, + app_client_returned_rows_matches_page_size, + generate_compound_rows, + generate_sortable_rows, + make_app_client, +) +import json +import pytest +import urllib + + +def test_table_json(app_client): + response = app_client.get("/fixtures/simple_primary_key.json?_shape=objects") + assert response.status == 200 + data = response.json + assert ( + data["query"]["sql"] + == "select id, content from simple_primary_key order by id limit 51" + ) + assert data["query"]["params"] == {} + assert data["rows"] == [ + {"id": "1", "content": "hello"}, + {"id": "2", "content": "world"}, + {"id": "3", "content": ""}, + {"id": "4", "content": "RENDER_CELL_DEMO"}, + {"id": "5", "content": "RENDER_CELL_ASYNC"}, + ] + + +def test_table_not_exists_json(app_client): + assert { + "ok": False, + "error": "Table not found: blah", + "status": 404, + "title": None, + } == app_client.get("/fixtures/blah.json").json + + +def test_jsono_redirects_to_shape_objects(app_client_with_hash): + response_1 = app_client_with_hash.get("/fixtures/simple_primary_key.jsono") + response = app_client_with_hash.get(response_1.headers["Location"]) + assert response.status == 302 + assert response.headers["Location"].endswith("?_shape=objects") + + +def test_table_shape_arrays(app_client): + response = app_client.get("/fixtures/simple_primary_key.json?_shape=arrays") + assert [ + ["1", "hello"], + ["2", "world"], + ["3", ""], + ["4", "RENDER_CELL_DEMO"], + ["5", "RENDER_CELL_ASYNC"], + ] == response.json["rows"] + + +def test_table_shape_arrayfirst(app_client): + response = app_client.get( + "/fixtures.json?" + + urllib.parse.urlencode( + { + "sql": "select content from simple_primary_key order by id", + "_shape": "arrayfirst", + } + ) + ) + assert [ + "hello", + "world", + "", + "RENDER_CELL_DEMO", + "RENDER_CELL_ASYNC", + ] == response.json + + +def test_table_shape_objects(app_client): + response = app_client.get("/fixtures/simple_primary_key.json?_shape=objects") + assert [ + {"id": "1", "content": "hello"}, + {"id": "2", "content": "world"}, + {"id": "3", "content": ""}, + {"id": "4", "content": "RENDER_CELL_DEMO"}, + {"id": "5", "content": "RENDER_CELL_ASYNC"}, + ] == response.json["rows"] + + +def test_table_shape_array(app_client): + response = app_client.get("/fixtures/simple_primary_key.json?_shape=array") + assert [ + {"id": "1", "content": "hello"}, + {"id": "2", "content": "world"}, + {"id": "3", "content": ""}, + {"id": "4", "content": "RENDER_CELL_DEMO"}, + {"id": "5", "content": "RENDER_CELL_ASYNC"}, + ] == response.json + + +def test_table_shape_array_nl(app_client): + response = app_client.get("/fixtures/simple_primary_key.json?_shape=array&_nl=on") + lines = response.text.split("\n") + results = [json.loads(line) for line in lines] + assert [ + {"id": "1", "content": "hello"}, + {"id": "2", "content": "world"}, + {"id": "3", "content": ""}, + {"id": "4", "content": "RENDER_CELL_DEMO"}, + {"id": "5", "content": "RENDER_CELL_ASYNC"}, + ] == results + + +def test_table_shape_invalid(app_client): + response = app_client.get("/fixtures/simple_primary_key.json?_shape=invalid") + assert { + "ok": False, + "error": "Invalid _shape: invalid", + "status": 400, + "title": None, + } == response.json + + +def test_table_shape_object(app_client): + response = app_client.get("/fixtures/simple_primary_key.json?_shape=object") + assert { + "1": {"id": "1", "content": "hello"}, + "2": {"id": "2", "content": "world"}, + "3": {"id": "3", "content": ""}, + "4": {"id": "4", "content": "RENDER_CELL_DEMO"}, + "5": {"id": "5", "content": "RENDER_CELL_ASYNC"}, + } == response.json + + +def test_table_shape_object_compound_primary_key(app_client): + response = app_client.get("/fixtures/compound_primary_key.json?_shape=object") + assert {"a,b": {"pk1": "a", "pk2": "b", "content": "c"}} == response.json + + +def test_table_with_slashes_in_name(app_client): + response = app_client.get( + "/fixtures/table%2Fwith%2Fslashes.csv?_shape=objects&_format=json" + ) + assert response.status == 200 + data = response.json + assert data["rows"] == [{"pk": "3", "content": "hey"}] + + +def test_table_with_reserved_word_name(app_client): + response = app_client.get("/fixtures/select.json?_shape=objects") + assert response.status == 200 + data = response.json + assert data["rows"] == [ + { + "rowid": 1, + "group": "group", + "having": "having", + "and": "and", + "json": '{"href": "http://example.com/", "label":"Example"}', + } + ] + + +@pytest.mark.parametrize( + "path,expected_rows,expected_pages", + [ + ("/fixtures/no_primary_key.json", 201, 5), + ("/fixtures/paginated_view.json", 201, 9), + ("/fixtures/no_primary_key.json?_size=25", 201, 9), + ("/fixtures/paginated_view.json?_size=50", 201, 5), + ("/fixtures/paginated_view.json?_size=max", 201, 3), + ("/fixtures/123_starts_with_digits.json", 0, 1), + # Ensure faceting doesn't break pagination: + ("/fixtures/compound_three_primary_keys.json?_facet=pk1", 1001, 21), + # Paginating while sorted by an expanded foreign key should work + ( + "/fixtures/roadside_attraction_characteristics.json?_size=2&_sort=attraction_id&_labels=on", + 5, + 3, + ), + ], +) +def test_paginate_tables_and_views(app_client, path, expected_rows, expected_pages): + fetched = [] + count = 0 + while path: + response = app_client.get(path) + assert 200 == response.status + count += 1 + fetched.extend(response.json["rows"]) + path = response.json["next_url"] + if path: + assert urllib.parse.urlencode({"_next": response.json["next"]}) in path + path = path.replace("http://localhost", "") + assert count < 30, "Possible infinite loop detected" + + assert expected_rows == len(fetched) + assert expected_pages == count + + +@pytest.mark.parametrize( + "path,expected_error", + [ + ("/fixtures/no_primary_key.json?_size=-4", "_size must be a positive integer"), + ("/fixtures/no_primary_key.json?_size=dog", "_size must be a positive integer"), + ("/fixtures/no_primary_key.json?_size=1001", "_size must be <= 100"), + ], +) +def test_validate_page_size(app_client, path, expected_error): + response = app_client.get(path) + assert expected_error == response.json["error"] + assert 400 == response.status + + +def test_page_size_zero(app_client): + """For _size=0 we return the counts, empty rows and no continuation token""" + response = app_client.get("/fixtures/no_primary_key.json?_size=0") + assert 200 == response.status + assert [] == response.json["rows"] + assert 201 == response.json["filtered_table_rows_count"] + assert None is response.json["next"] + assert None is response.json["next_url"] + + +def test_paginate_compound_keys(app_client): + fetched = [] + path = "/fixtures/compound_three_primary_keys.json?_shape=objects" + page = 0 + while path: + page += 1 + response = app_client.get(path) + fetched.extend(response.json["rows"]) + path = response.json["next_url"] + if path: + path = path.replace("http://localhost", "") + assert page < 100 + assert 1001 == len(fetched) + assert 21 == page + # Should be correctly ordered + contents = [f["content"] for f in fetched] + expected = [r[3] for r in generate_compound_rows(1001)] + assert expected == contents + + +def test_paginate_compound_keys_with_extra_filters(app_client): + fetched = [] + path = ( + "/fixtures/compound_three_primary_keys.json?content__contains=d&_shape=objects" + ) + page = 0 + while path: + page += 1 + assert page < 100 + response = app_client.get(path) + fetched.extend(response.json["rows"]) + path = response.json["next_url"] + if path: + path = path.replace("http://localhost", "") + assert 2 == page + expected = [r[3] for r in generate_compound_rows(1001) if "d" in r[3]] + assert expected == [f["content"] for f in fetched] + + +@pytest.mark.parametrize( + "query_string,sort_key,human_description_en", + [ + ("_sort=sortable", lambda row: row["sortable"], "sorted by sortable"), + ( + "_sort_desc=sortable", + lambda row: -row["sortable"], + "sorted by sortable descending", + ), + ( + "_sort=sortable_with_nulls", + lambda row: ( + 1 if row["sortable_with_nulls"] is not None else 0, + row["sortable_with_nulls"], + ), + "sorted by sortable_with_nulls", + ), + ( + "_sort_desc=sortable_with_nulls", + lambda row: ( + 1 if row["sortable_with_nulls"] is None else 0, + -row["sortable_with_nulls"] + if row["sortable_with_nulls"] is not None + else 0, + row["content"], + ), + "sorted by sortable_with_nulls descending", + ), + # text column contains '$null' - ensure it doesn't confuse pagination: + ("_sort=text", lambda row: row["text"], "sorted by text"), + ], +) +def test_sortable(app_client, query_string, sort_key, human_description_en): + path = f"/fixtures/sortable.json?_shape=objects&{query_string}" + fetched = [] + page = 0 + while path: + page += 1 + assert page < 100 + response = app_client.get(path) + assert human_description_en == response.json["human_description_en"] + fetched.extend(response.json["rows"]) + path = response.json["next_url"] + if path: + path = path.replace("http://localhost", "") + assert 5 == page + expected = list(generate_sortable_rows(201)) + expected.sort(key=sort_key) + assert [r["content"] for r in expected] == [r["content"] for r in fetched] + + +def test_sortable_and_filtered(app_client): + path = ( + "/fixtures/sortable.json" + "?content__contains=d&_sort_desc=sortable&_shape=objects" + ) + response = app_client.get(path) + fetched = response.json["rows"] + assert ( + 'where content contains "d" sorted by sortable descending' + == response.json["human_description_en"] + ) + expected = [row for row in generate_sortable_rows(201) if "d" in row["content"]] + assert len(expected) == response.json["filtered_table_rows_count"] + expected.sort(key=lambda row: -row["sortable"]) + assert [r["content"] for r in expected] == [r["content"] for r in fetched] + + +def test_sortable_argument_errors(app_client): + response = app_client.get("/fixtures/sortable.json?_sort=badcolumn") + assert "Cannot sort table by badcolumn" == response.json["error"] + response = app_client.get("/fixtures/sortable.json?_sort_desc=badcolumn2") + assert "Cannot sort table by badcolumn2" == response.json["error"] + response = app_client.get( + "/fixtures/sortable.json?_sort=sortable_with_nulls&_sort_desc=sortable" + ) + assert "Cannot use _sort and _sort_desc at the same time" == response.json["error"] + + +def test_sortable_columns_metadata(app_client): + response = app_client.get("/fixtures/sortable.json?_sort=content") + assert "Cannot sort table by content" == response.json["error"] + # no_primary_key has ALL sort options disabled + for column in ("content", "a", "b", "c"): + response = app_client.get(f"/fixtures/sortable.json?_sort={column}") + assert f"Cannot sort table by {column}" == response.json["error"] + + +@pytest.mark.parametrize( + "path,expected_rows", + [ + ( + "/fixtures/searchable.json?_search=dog", + [ + [1, "barry cat", "terry dog", "panther"], + [2, "terry dog", "sara weasel", "puma"], + ], + ), + ( + # Special keyword shouldn't break FTS query + "/fixtures/searchable.json?_search=AND", + [], + ), + ( + # Without _searchmode=raw this should return no results + "/fixtures/searchable.json?_search=te*+AND+do*", + [], + ), + ( + # _searchmode=raw + "/fixtures/searchable.json?_search=te*+AND+do*&_searchmode=raw", + [ + [1, "barry cat", "terry dog", "panther"], + [2, "terry dog", "sara weasel", "puma"], + ], + ), + ( + # _searchmode=raw combined with _search_COLUMN + "/fixtures/searchable.json?_search_text2=te*&_searchmode=raw", + [ + [1, "barry cat", "terry dog", "panther"], + ], + ), + ( + "/fixtures/searchable.json?_search=weasel", + [[2, "terry dog", "sara weasel", "puma"]], + ), + ( + "/fixtures/searchable.json?_search_text2=dog", + [[1, "barry cat", "terry dog", "panther"]], + ), + ( + "/fixtures/searchable.json?_search_name%20with%20.%20and%20spaces=panther", + [[1, "barry cat", "terry dog", "panther"]], + ), + ], +) +def test_searchable(app_client, path, expected_rows): + response = app_client.get(path) + assert expected_rows == response.json["rows"] + + +_SEARCHMODE_RAW_RESULTS = [ + [1, "barry cat", "terry dog", "panther"], + [2, "terry dog", "sara weasel", "puma"], +] + + +@pytest.mark.parametrize( + "table_metadata,querystring,expected_rows", + [ + ( + {}, + "_search=te*+AND+do*", + [], + ), + ( + {"searchmode": "raw"}, + "_search=te*+AND+do*", + _SEARCHMODE_RAW_RESULTS, + ), + ( + {}, + "_search=te*+AND+do*&_searchmode=raw", + _SEARCHMODE_RAW_RESULTS, + ), + # Can be over-ridden with _searchmode=escaped + ( + {"searchmode": "raw"}, + "_search=te*+AND+do*&_searchmode=escaped", + [], + ), + ], +) +def test_searchmode(table_metadata, querystring, expected_rows): + with make_app_client( + metadata={"databases": {"fixtures": {"tables": {"searchable": table_metadata}}}} + ) as client: + response = client.get("/fixtures/searchable.json?" + querystring) + assert expected_rows == response.json["rows"] + + +@pytest.mark.parametrize( + "path,expected_rows", + [ + ( + "/fixtures/searchable_view_configured_by_metadata.json?_search=weasel", + [[2, "terry dog", "sara weasel", "puma"]], + ), + # This should return all results because search is not configured: + ( + "/fixtures/searchable_view.json?_search=weasel", + [ + [1, "barry cat", "terry dog", "panther"], + [2, "terry dog", "sara weasel", "puma"], + ], + ), + ( + "/fixtures/searchable_view.json?_search=weasel&_fts_table=searchable_fts&_fts_pk=pk", + [[2, "terry dog", "sara weasel", "puma"]], + ), + ], +) +def test_searchable_views(app_client, path, expected_rows): + response = app_client.get(path) + assert expected_rows == response.json["rows"] + + +def test_searchable_invalid_column(app_client): + response = app_client.get("/fixtures/searchable.json?_search_invalid=x") + assert 400 == response.status + assert { + "ok": False, + "error": "Cannot search by that column", + "status": 400, + "title": None, + } == response.json + + +@pytest.mark.parametrize( + "path,expected_rows", + [ + ("/fixtures/simple_primary_key.json?content=hello", [["1", "hello"]]), + ( + "/fixtures/simple_primary_key.json?content__contains=o", + [ + ["1", "hello"], + ["2", "world"], + ["4", "RENDER_CELL_DEMO"], + ], + ), + ("/fixtures/simple_primary_key.json?content__exact=", [["3", ""]]), + ( + "/fixtures/simple_primary_key.json?content__not=world", + [ + ["1", "hello"], + ["3", ""], + ["4", "RENDER_CELL_DEMO"], + ["5", "RENDER_CELL_ASYNC"], + ], + ), + ], +) +def test_table_filter_queries(app_client, path, expected_rows): + response = app_client.get(path) + assert expected_rows == response.json["rows"] + + +def test_table_filter_queries_multiple_of_same_type(app_client): + response = app_client.get( + "/fixtures/simple_primary_key.json?content__not=world&content__not=hello" + ) + assert [ + ["3", ""], + ["4", "RENDER_CELL_DEMO"], + ["5", "RENDER_CELL_ASYNC"], + ] == response.json["rows"] + + +@pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module") +def test_table_filter_json_arraycontains(app_client): + response = app_client.get("/fixtures/facetable.json?tags__arraycontains=tag1") + assert response.json["rows"] == [ + [ + 1, + "2019-01-14 08:00:00", + 1, + 1, + "CA", + 1, + "Mission", + '["tag1", "tag2"]', + '[{"foo": "bar"}]', + "one", + ], + [ + 2, + "2019-01-14 08:00:00", + 1, + 1, + "CA", + 1, + "Dogpatch", + '["tag1", "tag3"]', + "[]", + "two", + ], + ] + + +@pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module") +def test_table_filter_json_arraynotcontains(app_client): + response = app_client.get( + "/fixtures/facetable.json?tags__arraynotcontains=tag3&tags__not=[]" + ) + assert response.json["rows"] == [ + [ + 1, + "2019-01-14 08:00:00", + 1, + 1, + "CA", + 1, + "Mission", + '["tag1", "tag2"]', + '[{"foo": "bar"}]', + "one", + ] + ] + + +def test_table_filter_extra_where(app_client): + response = app_client.get( + "/fixtures/facetable.json?_where=_neighborhood='Dogpatch'" + ) + assert [ + [ + 2, + "2019-01-14 08:00:00", + 1, + 1, + "CA", + 1, + "Dogpatch", + '["tag1", "tag3"]', + "[]", + "two", + ] + ] == response.json["rows"] + + +def test_table_filter_extra_where_invalid(app_client): + response = app_client.get("/fixtures/facetable.json?_where=_neighborhood=Dogpatch'") + assert 400 == response.status + assert "Invalid SQL" == response.json["title"] + + +def test_table_filter_extra_where_disabled_if_no_sql_allowed(): + with make_app_client(metadata={"allow_sql": {}}) as client: + response = client.get( + "/fixtures/facetable.json?_where=_neighborhood='Dogpatch'" + ) + assert 403 == response.status + assert "_where= is not allowed" == response.json["error"] + + +def test_table_through(app_client): + # Just the museums: + response = app_client.get( + '/fixtures/roadside_attractions.json?_through={"table":"roadside_attraction_characteristics","column":"characteristic_id","value":"1"}' + ) + assert [ + [ + 3, + "Burlingame Museum of PEZ Memorabilia", + "214 California Drive, Burlingame, CA 94010", + 37.5793, + -122.3442, + ], + [ + 4, + "Bigfoot Discovery Museum", + "5497 Highway 9, Felton, CA 95018", + 37.0414, + -122.0725, + ], + ] == response.json["rows"] + assert ( + 'where roadside_attraction_characteristics.characteristic_id = "1"' + == response.json["human_description_en"] + ) + + +def test_max_returned_rows(app_client): + response = app_client.get("/fixtures.json?sql=select+content+from+no_primary_key") + data = response.json + assert {"sql": "select content from no_primary_key", "params": {}} == data["query"] + assert data["truncated"] + assert 100 == len(data["rows"]) + + +def test_view(app_client): + response = app_client.get("/fixtures/simple_view.json?_shape=objects") + assert response.status == 200 + data = response.json + assert data["rows"] == [ + {"upper_content": "HELLO", "content": "hello"}, + {"upper_content": "WORLD", "content": "world"}, + {"upper_content": "", "content": ""}, + {"upper_content": "RENDER_CELL_DEMO", "content": "RENDER_CELL_DEMO"}, + {"upper_content": "RENDER_CELL_ASYNC", "content": "RENDER_CELL_ASYNC"}, + ] + + +def test_unit_filters(app_client): + response = app_client.get( + "/fixtures/units.json?distance__lt=75km&frequency__gt=1kHz" + ) + assert response.status == 200 + data = response.json + + assert data["units"]["distance"] == "m" + assert data["units"]["frequency"] == "Hz" + + assert len(data["rows"]) == 1 + assert data["rows"][0][0] == 2 + + +def test_page_size_matching_max_returned_rows( + app_client_returned_rows_matches_page_size, +): + fetched = [] + path = "/fixtures/no_primary_key.json" + while path: + response = app_client_returned_rows_matches_page_size.get(path) + fetched.extend(response.json["rows"]) + assert len(response.json["rows"]) in (1, 50) + path = response.json["next_url"] + if path: + path = path.replace("http://localhost", "") + assert 201 == len(fetched) + + +@pytest.mark.parametrize( + "path,expected_facet_results", + [ + ( + "/fixtures/facetable.json?_facet=state&_facet=_city_id", + { + "state": { + "name": "state", + "hideable": True, + "type": "column", + "toggle_url": "/fixtures/facetable.json?_facet=_city_id", + "results": [ + { + "value": "CA", + "label": "CA", + "count": 10, + "toggle_url": "_facet=state&_facet=_city_id&state=CA", + "selected": False, + }, + { + "value": "MI", + "label": "MI", + "count": 4, + "toggle_url": "_facet=state&_facet=_city_id&state=MI", + "selected": False, + }, + { + "value": "MC", + "label": "MC", + "count": 1, + "toggle_url": "_facet=state&_facet=_city_id&state=MC", + "selected": False, + }, + ], + "truncated": False, + }, + "_city_id": { + "name": "_city_id", + "hideable": True, + "type": "column", + "toggle_url": "/fixtures/facetable.json?_facet=state", + "results": [ + { + "value": 1, + "label": "San Francisco", + "count": 6, + "toggle_url": "_facet=state&_facet=_city_id&_city_id__exact=1", + "selected": False, + }, + { + "value": 2, + "label": "Los Angeles", + "count": 4, + "toggle_url": "_facet=state&_facet=_city_id&_city_id__exact=2", + "selected": False, + }, + { + "value": 3, + "label": "Detroit", + "count": 4, + "toggle_url": "_facet=state&_facet=_city_id&_city_id__exact=3", + "selected": False, + }, + { + "value": 4, + "label": "Memnonia", + "count": 1, + "toggle_url": "_facet=state&_facet=_city_id&_city_id__exact=4", + "selected": False, + }, + ], + "truncated": False, + }, + }, + ), + ( + "/fixtures/facetable.json?_facet=state&_facet=_city_id&state=MI", + { + "state": { + "name": "state", + "hideable": True, + "type": "column", + "toggle_url": "/fixtures/facetable.json?_facet=_city_id&state=MI", + "results": [ + { + "value": "MI", + "label": "MI", + "count": 4, + "selected": True, + "toggle_url": "_facet=state&_facet=_city_id", + } + ], + "truncated": False, + }, + "_city_id": { + "name": "_city_id", + "hideable": True, + "type": "column", + "toggle_url": "/fixtures/facetable.json?_facet=state&state=MI", + "results": [ + { + "value": 3, + "label": "Detroit", + "count": 4, + "selected": False, + "toggle_url": "_facet=state&_facet=_city_id&state=MI&_city_id__exact=3", + } + ], + "truncated": False, + }, + }, + ), + ( + "/fixtures/facetable.json?_facet=planet_int", + { + "planet_int": { + "name": "planet_int", + "hideable": True, + "type": "column", + "toggle_url": "/fixtures/facetable.json", + "results": [ + { + "value": 1, + "label": 1, + "count": 14, + "selected": False, + "toggle_url": "_facet=planet_int&planet_int=1", + }, + { + "value": 2, + "label": 2, + "count": 1, + "selected": False, + "toggle_url": "_facet=planet_int&planet_int=2", + }, + ], + "truncated": False, + } + }, + ), + ( + # planet_int is an integer field: + "/fixtures/facetable.json?_facet=planet_int&planet_int=1", + { + "planet_int": { + "name": "planet_int", + "hideable": True, + "type": "column", + "toggle_url": "/fixtures/facetable.json?planet_int=1", + "results": [ + { + "value": 1, + "label": 1, + "count": 14, + "selected": True, + "toggle_url": "_facet=planet_int", + } + ], + "truncated": False, + } + }, + ), + ], +) +def test_facets(app_client, path, expected_facet_results): + response = app_client.get(path) + facet_results = response.json["facet_results"] + # We only compare the querystring portion of the taggle_url + for facet_name, facet_info in facet_results.items(): + assert facet_name == facet_info["name"] + assert False is facet_info["truncated"] + for facet_value in facet_info["results"]: + facet_value["toggle_url"] = facet_value["toggle_url"].split("?")[1] + assert expected_facet_results == facet_results + + +def test_suggested_facets(app_client): + suggestions = [ + { + "name": suggestion["name"], + "querystring": suggestion["toggle_url"].split("?")[-1], + } + for suggestion in app_client.get("/fixtures/facetable.json").json[ + "suggested_facets" + ] + ] + expected = [ + {"name": "created", "querystring": "_facet=created"}, + {"name": "planet_int", "querystring": "_facet=planet_int"}, + {"name": "on_earth", "querystring": "_facet=on_earth"}, + {"name": "state", "querystring": "_facet=state"}, + {"name": "_city_id", "querystring": "_facet=_city_id"}, + {"name": "_neighborhood", "querystring": "_facet=_neighborhood"}, + {"name": "tags", "querystring": "_facet=tags"}, + {"name": "complex_array", "querystring": "_facet=complex_array"}, + {"name": "created", "querystring": "_facet_date=created"}, + ] + if detect_json1(): + expected.append({"name": "tags", "querystring": "_facet_array=tags"}) + assert expected == suggestions + + +def test_allow_facet_off(): + with make_app_client(settings={"allow_facet": False}) as client: + assert 400 == client.get("/fixtures/facetable.json?_facet=planet_int").status + # Should not suggest any facets either: + assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] + + +def test_suggest_facets_off(): + with make_app_client(settings={"suggest_facets": False}) as client: + # Now suggested_facets should be [] + assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] + + +@pytest.mark.parametrize("nofacet", (True, False)) +def test_nofacet(app_client, nofacet): + path = "/fixtures/facetable.json?_facet=state" + if nofacet: + path += "&_nofacet=1" + response = app_client.get(path) + if nofacet: + assert response.json["suggested_facets"] == [] + assert response.json["facet_results"] == {} + else: + assert response.json["suggested_facets"] != [] + assert response.json["facet_results"] != {} + + +@pytest.mark.parametrize("nocount,expected_count", ((True, None), (False, 15))) +def test_nocount(app_client, nocount, expected_count): + path = "/fixtures/facetable.json" + if nocount: + path += "?_nocount=1" + response = app_client.get(path) + assert response.json["filtered_table_rows_count"] == expected_count + + +def test_nocount_nofacet_if_shape_is_object(app_client_with_trace): + response = app_client_with_trace.get( + "/fixtures/facetable.json?_trace=1&_shape=object" + ) + assert "count(*)" not in response.text + + +def test_expand_labels(app_client): + response = app_client.get( + "/fixtures/facetable.json?_shape=object&_labels=1&_size=2" + "&_neighborhood__contains=c" + ) + assert { + "2": { + "pk": 2, + "created": "2019-01-14 08:00:00", + "planet_int": 1, + "on_earth": 1, + "state": "CA", + "_city_id": {"value": 1, "label": "San Francisco"}, + "_neighborhood": "Dogpatch", + "tags": '["tag1", "tag3"]', + "complex_array": "[]", + "distinct_some_null": "two", + }, + "13": { + "pk": 13, + "created": "2019-01-17 08:00:00", + "planet_int": 1, + "on_earth": 1, + "state": "MI", + "_city_id": {"value": 3, "label": "Detroit"}, + "_neighborhood": "Corktown", + "tags": "[]", + "complex_array": "[]", + "distinct_some_null": None, + }, + } == response.json + + +def test_expand_label(app_client): + response = app_client.get( + "/fixtures/foreign_key_references.json?_shape=object" + "&_label=foreign_key_with_label&_size=1" + ) + assert response.json == { + "1": { + "pk": "1", + "foreign_key_with_label": {"value": "1", "label": "hello"}, + "foreign_key_with_blank_label": "3", + "foreign_key_with_no_label": "1", + "foreign_key_compound_pk1": "a", + "foreign_key_compound_pk2": "b", + } + } + + +@pytest.mark.parametrize( + "path,expected_cache_control", + [ + ("/fixtures/facetable.json", "max-age=5"), + ("/fixtures/facetable.json?_ttl=invalid", "max-age=5"), + ("/fixtures/facetable.json?_ttl=10", "max-age=10"), + ("/fixtures/facetable.json?_ttl=0", "no-cache"), + ], +) +def test_ttl_parameter(app_client, path, expected_cache_control): + response = app_client.get(path) + assert expected_cache_control == response.headers["Cache-Control"] + + +def test_infinity_returned_as_null(app_client): + response = app_client.get("/fixtures/infinity.json?_shape=array") + assert [ + {"rowid": 1, "value": None}, + {"rowid": 2, "value": None}, + {"rowid": 3, "value": 1.5}, + ] == response.json + + +def test_infinity_returned_as_invalid_json_if_requested(app_client): + response = app_client.get("/fixtures/infinity.json?_shape=array&_json_infinity=1") + assert [ + {"rowid": 1, "value": float("inf")}, + {"rowid": 2, "value": float("-inf")}, + {"rowid": 3, "value": 1.5}, + ] == response.json + + +def test_custom_query_with_unicode_characters(app_client): + response = app_client.get("/fixtures/𝐜𝐢𝐭𝐢𝐞𝐬.json?_shape=array") + assert [{"id": 1, "name": "San Francisco"}] == response.json + + +def test_null_and_compound_foreign_keys_are_not_expanded(app_client): + response = app_client.get( + "/fixtures/foreign_key_references.json?_shape=array&_labels=on" + ) + assert response.json == [ + { + "pk": "1", + "foreign_key_with_label": {"value": "1", "label": "hello"}, + "foreign_key_with_blank_label": {"value": "3", "label": ""}, + "foreign_key_with_no_label": {"value": "1", "label": "1"}, + "foreign_key_compound_pk1": "a", + "foreign_key_compound_pk2": "b", + }, + { + "pk": "2", + "foreign_key_with_label": None, + "foreign_key_with_blank_label": None, + "foreign_key_with_no_label": None, + "foreign_key_compound_pk1": None, + "foreign_key_compound_pk2": None, + }, + ] + + +@pytest.mark.parametrize( + "path,expected_json,expected_text", + [ + ( + "/fixtures/binary_data.json?_shape=array", + [ + {"rowid": 1, "data": {"$base64": True, "encoded": "FRwCx60F/g=="}}, + {"rowid": 2, "data": {"$base64": True, "encoded": "FRwDx60F/g=="}}, + {"rowid": 3, "data": None}, + ], + None, + ), + ( + "/fixtures/binary_data.json?_shape=array&_nl=on", + None, + ( + '{"rowid": 1, "data": {"$base64": true, "encoded": "FRwCx60F/g=="}}\n' + '{"rowid": 2, "data": {"$base64": true, "encoded": "FRwDx60F/g=="}}\n' + '{"rowid": 3, "data": null}' + ), + ), + ], +) +def test_binary_data_in_json(app_client, path, expected_json, expected_text): + response = app_client.get(path) + if expected_json: + assert response.json == expected_json + else: + assert response.text == expected_text + + +@pytest.mark.parametrize( + "qs", + [ + "", + "?_shape=arrays", + "?_shape=arrayfirst", + "?_shape=object", + "?_shape=objects", + "?_shape=array", + "?_shape=array&_nl=on", + ], +) +def test_paginate_using_link_header(app_client, qs): + path = f"/fixtures/compound_three_primary_keys.json{qs}" + num_pages = 0 + while path: + response = app_client.get(path) + assert response.status == 200 + num_pages += 1 + link = response.headers.get("link") + if link: + assert link.startswith("<") + assert link.endswith('>; rel="next"') + path = link[1:].split(">")[0] + path = path.replace("http://localhost", "") + else: + path = None + assert num_pages == 21 + + +@pytest.mark.skipif( + sqlite_version() < (3, 31, 0), + reason="generated columns were added in SQLite 3.31.0", +) +def test_generated_columns_are_visible_in_datasette(): + with make_app_client( + extra_databases={ + "generated.db": """ + CREATE TABLE generated_columns ( + body TEXT, + id INT GENERATED ALWAYS AS (json_extract(body, '$.number')) STORED, + consideration INT GENERATED ALWAYS AS (json_extract(body, '$.string')) STORED + ); + INSERT INTO generated_columns (body) VALUES ( + '{"number": 1, "string": "This is a string"}' + );""" + } + ) as client: + response = client.get("/generated/generated_columns.json?_shape=array") + assert response.json == [ + { + "rowid": 1, + "body": '{"number": 1, "string": "This is a string"}', + "id": 1, + "consideration": "This is a string", + } + ] + + +@pytest.mark.parametrize( + "path,expected_columns", + ( + ("/fixtures/facetable.json?_col=created", ["pk", "created"]), + ( + "/fixtures/facetable.json?_nocol=created", + [ + "pk", + "planet_int", + "on_earth", + "state", + "_city_id", + "_neighborhood", + "tags", + "complex_array", + "distinct_some_null", + ], + ), + ( + "/fixtures/facetable.json?_col=state&_col=created", + ["pk", "state", "created"], + ), + ( + "/fixtures/facetable.json?_col=state&_col=state", + ["pk", "state"], + ), + ( + "/fixtures/facetable.json?_col=state&_col=created&_nocol=created", + ["pk", "state"], + ), + ( + # Ensure faceting doesn't break, https://github.com/simonw/datasette/issues/1345 + "/fixtures/facetable.json?_nocol=state&_facet=state", + [ + "pk", + "created", + "planet_int", + "on_earth", + "_city_id", + "_neighborhood", + "tags", + "complex_array", + "distinct_some_null", + ], + ), + ( + "/fixtures/simple_view.json?_nocol=content", + ["upper_content"], + ), + ("/fixtures/simple_view.json?_col=content", ["content"]), + ), +) +def test_col_nocol(app_client, path, expected_columns): + response = app_client.get(path) + assert response.status == 200 + columns = response.json["columns"] + assert columns == expected_columns + + +@pytest.mark.parametrize( + "path,expected_error", + ( + ("/fixtures/facetable.json?_col=bad", "_col=bad - invalid columns"), + ("/fixtures/facetable.json?_nocol=bad", "_nocol=bad - invalid columns"), + ("/fixtures/facetable.json?_nocol=pk", "_nocol=pk - invalid columns"), + ("/fixtures/simple_view.json?_col=bad", "_col=bad - invalid columns"), + ), +) +def test_col_nocol_errors(app_client, path, expected_error): + response = app_client.get(path) + assert response.status == 400 + assert response.json["error"] == expected_error From a6ff123de5464806441f6a6f95145c9a83b7f20b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 12 Dec 2021 12:01:51 -0800 Subject: [PATCH 0044/1103] keep_blank_values=True when parsing query_string, closes #1551 Refs #1518 --- datasette/utils/asgi.py | 2 +- datasette/views/table.py | 11 +++-------- tests/test_internals_request.py | 16 ++++++++++++++++ 3 files changed, 20 insertions(+), 9 deletions(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index ad137fa9..cd3ec654 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -97,7 +97,7 @@ class Request: @property def args(self): - return MultiParams(parse_qs(qs=self.query_string)) + return MultiParams(parse_qs(qs=self.query_string, keep_blank_values=True)) @property def actor(self): diff --git a/datasette/views/table.py b/datasette/views/table.py index f58b78f5..59010723 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -393,21 +393,16 @@ class TableView(RowTableShared): nocount = True nofacet = True - # Ensure we don't drop anything with an empty value e.g. ?name__exact= - args = MultiParams( - urllib.parse.parse_qs(request.query_string, keep_blank_values=True) - ) - # Special args start with _ and do not contain a __ # That's so if there is a column that starts with _ # it can still be queried using ?_col__exact=blah special_args = {} other_args = [] - for key in args: + for key in request.args: if key.startswith("_") and "__" not in key: - special_args[key] = args[key] + special_args[key] = request.args[key] else: - for v in args.getlist(key): + for v in request.args.getlist(key): other_args.append((key, v)) # Handle ?_filter_column and redirect, if present diff --git a/tests/test_internals_request.py b/tests/test_internals_request.py index cd956f3f..01c93eec 100644 --- a/tests/test_internals_request.py +++ b/tests/test_internals_request.py @@ -121,3 +121,19 @@ def test_request_properties(path, query_string, expected_full_path): assert request.path == path assert request.query_string == query_string assert request.full_path == expected_full_path + + +def test_request_blank_values(): + query_string = "a=b&foo=bar&foo=bar2&baz=" + path_with_query_string = "/?" + query_string + scope = { + "http_version": "1.1", + "method": "POST", + "path": "/", + "raw_path": path_with_query_string.encode("latin-1"), + "query_string": query_string.encode("latin-1"), + "scheme": "http", + "type": "http", + } + request = Request(scope, None) + assert request.args._data == {"a": ["b"], "foo": ["bar", "bar2"], "baz": [""]} From 8b411a6b70e93e044820d613a28607ba5d6fe416 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Dec 2021 15:22:21 -0800 Subject: [PATCH 0045/1103] Update pytest-xdist requirement from <2.5,>=2.2.1 to >=2.2.1,<2.6 (#1548) Updates the requirements on [pytest-xdist](https://github.com/pytest-dev/pytest-xdist) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest-xdist/releases) - [Changelog](https://github.com/pytest-dev/pytest-xdist/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-xdist/compare/v2.2.1...v2.5.0) --- updated-dependencies: - dependency-name: pytest-xdist dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 9b5bab61..da8dea49 100644 --- a/setup.py +++ b/setup.py @@ -68,7 +68,7 @@ setup( "docs": ["sphinx_rtd_theme", "sphinx-autobuild", "codespell"], "test": [ "pytest>=5.2.2,<6.3.0", - "pytest-xdist>=2.2.1,<2.5", + "pytest-xdist>=2.2.1,<2.6", "pytest-asyncio>=0.10,<0.17", "beautifulsoup4>=4.8.1,<4.11.0", "black==21.11b1", From f5538e7161cce92a4dfaa7c5b71fcb6755d96c05 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Dec 2021 15:22:29 -0800 Subject: [PATCH 0046/1103] Bump black from 21.11b1 to 21.12b0 (#1543) Bumps [black](https://github.com/psf/black) from 21.11b1 to 21.12b0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/commits) --- updated-dependencies: - dependency-name: black dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index da8dea49..534265c2 100644 --- a/setup.py +++ b/setup.py @@ -71,7 +71,7 @@ setup( "pytest-xdist>=2.2.1,<2.6", "pytest-asyncio>=0.10,<0.17", "beautifulsoup4>=4.8.1,<4.11.0", - "black==21.11b1", + "black==21.12b0", "pytest-timeout>=1.4.2,<2.1", "trustme>=0.7,<0.10", ], From 4f02c8d4d7f8672cc98e5f8d435b5dc8fb5211dc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 14 Dec 2021 12:28:34 -0800 Subject: [PATCH 0047/1103] Test for JSON in query_string name, refs #621 Plus simplified implementation of test_request_blank_values --- tests/test_internals_request.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/tests/test_internals_request.py b/tests/test_internals_request.py index 01c93eec..44aaa153 100644 --- a/tests/test_internals_request.py +++ b/tests/test_internals_request.py @@ -124,16 +124,18 @@ def test_request_properties(path, query_string, expected_full_path): def test_request_blank_values(): - query_string = "a=b&foo=bar&foo=bar2&baz=" - path_with_query_string = "/?" + query_string - scope = { - "http_version": "1.1", - "method": "POST", - "path": "/", - "raw_path": path_with_query_string.encode("latin-1"), - "query_string": query_string.encode("latin-1"), - "scheme": "http", - "type": "http", - } - request = Request(scope, None) + request = Request.fake("/?a=b&foo=bar&foo=bar2&baz=") assert request.args._data == {"a": ["b"], "foo": ["bar", "bar2"], "baz": [""]} + + +def test_json_in_query_string_name(): + query_string = ( + '?_through.["roadside_attraction_characteristics"%2C"characteristic_id"]=1' + ) + request = Request.fake("/" + query_string) + assert ( + request.args[ + '_through.["roadside_attraction_characteristics","characteristic_id"]' + ] + == "1" + ) From eb53837d2aeacaffd8d37f81a6639139c6a0b4d4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 15 Dec 2021 09:58:01 -0800 Subject: [PATCH 0048/1103] Always show count of distinct facet values, closes #1556 Refs #1423 --- datasette/templates/table.html | 2 +- datasette/views/table.py | 1 - tests/test_table_html.py | 13 ++++--------- 3 files changed, 5 insertions(+), 11 deletions(-) diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 4b9df8e1..f3749b57 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -157,7 +157,7 @@

    {{ facet_info.name }}{% if facet_info.type != "column" %} ({{ facet_info.type }}){% endif %} - {% if show_facet_counts %} {% if facet_info.truncated %}>{% endif %}{{ facet_info.results|length }}{% endif %} + {% if facet_info.truncated %}>{% endif %}{{ facet_info.results|length }} {% if facet_info.hideable %} diff --git a/datasette/views/table.py b/datasette/views/table.py index 59010723..bb5876cc 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -937,7 +937,6 @@ class TableView(RowTableShared): key=lambda f: (len(f["results"]), f["name"]), reverse=True, ), - "show_facet_counts": special_args.get("_facet_size") == "max", "extra_wheres_for_ui": extra_wheres_for_ui, "form_hidden_args": form_hidden_args, "is_sortable": any(c["sortable"] for c in display_columns), diff --git a/tests/test_table_html.py b/tests/test_table_html.py index 2fbb53bd..50d679a0 100644 --- a/tests/test_table_html.py +++ b/tests/test_table_html.py @@ -1015,24 +1015,19 @@ def test_column_metadata(app_client): ) -@pytest.mark.parametrize("use_facet_size_max", (True, False)) -def test_facet_total_shown_if_facet_max_size(use_facet_size_max): +def test_facet_total(): # https://github.com/simonw/datasette/issues/1423 + # https://github.com/simonw/datasette/issues/1556 with make_app_client(settings={"max_returned_rows": 100}) as client: path = "/fixtures/sortable?_facet=content&_facet=pk1" - if use_facet_size_max: - path += "&_facet_size=max" response = client.get(path) assert response.status == 200 fragments = ( - '>100', + '>30', '8', ) for fragment in fragments: - if use_facet_size_max: - assert fragment in response.text - else: - assert fragment not in response.text + assert fragment in response.text def test_sort_rowid_with_next(app_client): From 40e5b0a5b5cbbe7ec9b1a525d61f58227061597e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 16 Dec 2021 10:03:10 -0800 Subject: [PATCH 0049/1103] How to create indexes with sqlite-utils --- docs/facets.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/facets.rst b/docs/facets.rst index 7730e4ac..4bbfa16f 100644 --- a/docs/facets.rst +++ b/docs/facets.rst @@ -133,6 +133,10 @@ The performance of facets can be greatly improved by adding indexes on the colum Enter ".help" for usage hints. sqlite> CREATE INDEX Food_Trucks_state ON Food_Trucks("state"); +Or using the `sqlite-utils `__ command-line utility:: + + $ sqlite-utils create-index mydatabase.db Food_Trucks state + .. _facet_by_json_array: Facet by JSON array From 20a2ed6bec367d2f6759be4a879364a72780b59d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 16 Dec 2021 10:47:22 -0800 Subject: [PATCH 0050/1103] Fixed bug with metadata config of array/date facets, closes #1552 Thanks @davidbgk for spotting the fix for the bug. --- datasette/facets.py | 2 +- docs/facets.rst | 20 ++++++++++++++++++-- tests/test_facets.py | 25 ++++++++++++++++++++++++- 3 files changed, 43 insertions(+), 4 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index 8fd2177a..51fccb01 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -30,7 +30,7 @@ def load_facet_configs(request, table_metadata): assert ( len(metadata_config.values()) == 1 ), "Metadata config dicts should be {type: config}" - type, metadata_config = metadata_config.items()[0] + type, metadata_config = list(metadata_config.items())[0] if isinstance(metadata_config, str): metadata_config = {"simple": metadata_config} facet_configs.setdefault(type, []).append( diff --git a/docs/facets.rst b/docs/facets.rst index 4bbfa16f..0228aa84 100644 --- a/docs/facets.rst +++ b/docs/facets.rst @@ -16,7 +16,9 @@ To turn on faceting for specific columns on a Datasette table view, add one or m /dbname/tablename?_facet=state&_facet=city_id -This works for both the HTML interface and the ``.json`` view. When enabled, facets will cause a ``facet_results`` block to be added to the JSON output, looking something like this:: +This works for both the HTML interface and the ``.json`` view. When enabled, facets will cause a ``facet_results`` block to be added to the JSON output, looking something like this: + +.. code-block:: json { "state": { @@ -93,7 +95,9 @@ Facets in metadata.json You can turn facets on by default for specific tables by adding them to a ``"facets"`` key in a Datasette :ref:`metadata` file. -Here's an example that turns on faceting by default for the ``qLegalStatus`` column in the ``Street_Tree_List`` table in the ``sf-trees`` database:: +Here's an example that turns on faceting by default for the ``qLegalStatus`` column in the ``Street_Tree_List`` table in the ``sf-trees`` database: + +.. code-block:: json { "databases": { @@ -109,6 +113,18 @@ Here's an example that turns on faceting by default for the ``qLegalStatus`` col Facets defined in this way will always be shown in the interface and returned in the API, regardless of the ``_facet`` arguments passed to the view. +You can specify :ref:`array ` or :ref:`date ` facets in metadata using JSON objects with a single key of ``array`` or ``date`` and a value specifying the column, like this: + +.. code-block:: json + + { + "facets": [ + {"array": "tags"}, + {"date": "created"} + ] + } + + Suggested facets ---------------- diff --git a/tests/test_facets.py b/tests/test_facets.py index 429117cb..5b1aa935 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -3,7 +3,7 @@ from datasette.database import Database from datasette.facets import ColumnFacet, ArrayFacet, DateFacet from datasette.utils.asgi import Request from datasette.utils import detect_json1 -from .fixtures import app_client # noqa +from .fixtures import app_client, make_app_client # noqa import json import pytest @@ -588,3 +588,26 @@ async def test_facet_size(): ) data5 = response5.json() assert len(data5["facet_results"]["city"]["results"]) == 20 + + +def test_other_types_of_facet_in_metadata(): + with make_app_client( + metadata={ + "databases": { + "fixtures": { + "tables": { + "facetable": { + "facets": ["state", {"array": "tags"}, {"date": "created"}] + } + } + } + } + } + ) as client: + response = client.get("/fixtures/facetable") + for fragment in ( + "created (date)\n", + "tags (array)\n", + "state\n", + ): + assert fragment in response.text From 992496f2611a72bd51e94bfd0b17c1d84e732487 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 16 Dec 2021 11:24:54 -0800 Subject: [PATCH 0051/1103] ?_nosuggest=1 parameter for table views, closes #1557 --- datasette/views/table.py | 2 ++ docs/json_api.rst | 3 +++ tests/test_table_api.py | 15 +++++++++++++++ 3 files changed, 20 insertions(+) diff --git a/datasette/views/table.py b/datasette/views/table.py index bb5876cc..f294ffb1 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -388,6 +388,7 @@ class TableView(RowTableShared): nocount = request.args.get("_nocount") nofacet = request.args.get("_nofacet") + nosuggest = request.args.get("_nosuggest") if request.args.get("_shape") in ("array", "object"): nocount = True @@ -846,6 +847,7 @@ class TableView(RowTableShared): and self.ds.setting("allow_facet") and not _next and not nofacet + and not nosuggest ): for facet in facet_instances: suggested_facets.extend(await facet.suggest()) diff --git a/docs/json_api.rst b/docs/json_api.rst index 7d3123b7..bd55c163 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -397,6 +397,9 @@ Special table arguments ``?_nofacet=1`` Disable all facets and facet suggestions for this page, including any defined by :ref:`facets_metadata`. +``?_nosuggest=1`` + Disable facet suggestions for this page. + ``?_nocount=1`` Disable the ``select count(*)`` query used on this page - a count of ``None`` will be returned instead. diff --git a/tests/test_table_api.py b/tests/test_table_api.py index a530de44..6a6daed5 100644 --- a/tests/test_table_api.py +++ b/tests/test_table_api.py @@ -915,6 +915,21 @@ def test_nofacet(app_client, nofacet): assert response.json["facet_results"] != {} +@pytest.mark.parametrize("nosuggest", (True, False)) +def test_nosuggest(app_client, nosuggest): + path = "/fixtures/facetable.json?_facet=state" + if nosuggest: + path += "&_nosuggest=1" + response = app_client.get(path) + if nosuggest: + assert response.json["suggested_facets"] == [] + # But facets should still be returned: + assert response.json["facet_results"] != {} + else: + assert response.json["suggested_facets"] != [] + assert response.json["facet_results"] != {} + + @pytest.mark.parametrize("nocount,expected_count", ((True, None), (False, 15))) def test_nocount(app_client, nocount, expected_count): path = "/fixtures/facetable.json" From 95d0dd7a1cf6be6b7da41e1404184217eb93f64a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 16 Dec 2021 12:12:04 -0800 Subject: [PATCH 0052/1103] Fix for colliding facet types bug, closes #625 Refs #830 --- datasette/facets.py | 74 ++++++++++++++++++++++------------------ datasette/views/table.py | 9 ++++- docs/plugin_hooks.rst | 6 ++-- tests/test_facets.py | 30 ++++++++-------- 4 files changed, 67 insertions(+), 52 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index 51fccb01..a1bb4a5f 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -193,7 +193,7 @@ class ColumnFacet(Facet): return suggested_facets async def facet_results(self): - facet_results = {} + facet_results = [] facets_timed_out = [] qs_pairs = self.get_querystring_pairs() @@ -221,16 +221,18 @@ class ColumnFacet(Facet): custom_time_limit=self.ds.setting("facet_time_limit_ms"), ) facet_results_values = [] - facet_results[column] = { - "name": column, - "type": self.type, - "hideable": source != "metadata", - "toggle_url": self.ds.urls.path( - path_with_removed_args(self.request, {"_facet": column}) - ), - "results": facet_results_values, - "truncated": len(facet_rows_results) > facet_size, - } + facet_results.append( + { + "name": column, + "type": self.type, + "hideable": source != "metadata", + "toggle_url": self.ds.urls.path( + path_with_removed_args(self.request, {"_facet": column}) + ), + "results": facet_results_values, + "truncated": len(facet_rows_results) > facet_size, + } + ) facet_rows = facet_rows_results.rows[:facet_size] if self.table: # Attempt to expand foreign keys into labels @@ -352,7 +354,7 @@ class ArrayFacet(Facet): async def facet_results(self): # self.configs should be a plain list of columns - facet_results = {} + facet_results = [] facets_timed_out = [] facet_size = self.get_facet_size() @@ -392,16 +394,20 @@ class ArrayFacet(Facet): custom_time_limit=self.ds.setting("facet_time_limit_ms"), ) facet_results_values = [] - facet_results[column] = { - "name": column, - "type": self.type, - "results": facet_results_values, - "hideable": source != "metadata", - "toggle_url": self.ds.urls.path( - path_with_removed_args(self.request, {"_facet_array": column}) - ), - "truncated": len(facet_rows_results) > facet_size, - } + facet_results.append( + { + "name": column, + "type": self.type, + "results": facet_results_values, + "hideable": source != "metadata", + "toggle_url": self.ds.urls.path( + path_with_removed_args( + self.request, {"_facet_array": column} + ) + ), + "truncated": len(facet_rows_results) > facet_size, + } + ) facet_rows = facet_rows_results.rows[:facet_size] pairs = self.get_querystring_pairs() for row in facet_rows: @@ -480,7 +486,7 @@ class DateFacet(Facet): return suggested_facets async def facet_results(self): - facet_results = {} + facet_results = [] facets_timed_out = [] args = dict(self.get_querystring_pairs()) facet_size = self.get_facet_size() @@ -507,16 +513,18 @@ class DateFacet(Facet): custom_time_limit=self.ds.setting("facet_time_limit_ms"), ) facet_results_values = [] - facet_results[column] = { - "name": column, - "type": self.type, - "results": facet_results_values, - "hideable": source != "metadata", - "toggle_url": path_with_removed_args( - self.request, {"_facet_date": column} - ), - "truncated": len(facet_rows_results) > facet_size, - } + facet_results.append( + { + "name": column, + "type": self.type, + "results": facet_results_values, + "hideable": source != "metadata", + "toggle_url": path_with_removed_args( + self.request, {"_facet_date": column} + ), + "truncated": len(facet_rows_results) > facet_size, + } + ) facet_rows = facet_rows_results.rows[:facet_size] for row in facet_rows: selected = str(args.get(f"{column}__date")) == str(row["value"]) diff --git a/datasette/views/table.py b/datasette/views/table.py index f294ffb1..3d0e27cb 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -754,7 +754,14 @@ class TableView(RowTableShared): instance_facet_results, instance_facets_timed_out, ) = await facet.facet_results() - facet_results.update(instance_facet_results) + for facet_info in instance_facet_results: + base_key = facet_info["name"] + key = base_key + i = 1 + while key in facet_results: + i += 1 + key = f"{base_key}_{i}" + facet_results[key] = facet_info facets_timed_out.extend(instance_facets_timed_out) # Figure out columns and rows for the query diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 23f19e38..4a7c36c3 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -668,7 +668,7 @@ Each Facet subclass implements a new type of facet operation. The class should l async def facet_results(self): # This should execute the facet operation and return results, again # using self.sql and self.params as the starting point - facet_results = {} + facet_results = [] facets_timed_out = [] facet_size = self.get_facet_size() # Do some calculations here... @@ -683,11 +683,11 @@ Each Facet subclass implements a new type of facet operation. The class should l "toggle_url": self.ds.absolute_url(self.request, toggle_path), "selected": selected, }) - facet_results[column] = { + facet_results.append({ "name": column, "results": facet_results_values, "truncated": len(facet_rows_results) > facet_size, - } + }) except QueryInterrupted: facets_timed_out.append(column) diff --git a/tests/test_facets.py b/tests/test_facets.py index 5b1aa935..a99979d3 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -107,8 +107,8 @@ async def test_column_facet_results(app_client): ) buckets, timed_out = await facet.facet_results() assert [] == timed_out - assert { - "_city_id": { + assert [ + { "name": "_city_id", "type": "column", "hideable": True, @@ -145,7 +145,7 @@ async def test_column_facet_results(app_client): ], "truncated": False, } - } == buckets + ] == buckets @pytest.mark.asyncio @@ -159,8 +159,8 @@ async def test_column_facet_results_column_starts_with_underscore(app_client): ) buckets, timed_out = await facet.facet_results() assert [] == timed_out - assert buckets == { - "_neighborhood": { + assert buckets == [ + { "name": "_neighborhood", "type": "column", "hideable": True, @@ -267,7 +267,7 @@ async def test_column_facet_results_column_starts_with_underscore(app_client): ], "truncated": False, } - } + ] @pytest.mark.asyncio @@ -282,8 +282,8 @@ async def test_column_facet_from_metadata_cannot_be_hidden(app_client): ) buckets, timed_out = await facet.facet_results() assert [] == timed_out - assert { - "_city_id": { + assert [ + { "name": "_city_id", "type": "column", "hideable": False, @@ -320,7 +320,7 @@ async def test_column_facet_from_metadata_cannot_be_hidden(app_client): ], "truncated": False, } - } == buckets + ] == buckets @pytest.mark.asyncio @@ -369,8 +369,8 @@ async def test_array_facet_results(app_client): ) buckets, timed_out = await facet.facet_results() assert [] == timed_out - assert { - "tags": { + assert [ + { "name": "tags", "type": "array", "results": [ @@ -400,7 +400,7 @@ async def test_array_facet_results(app_client): "toggle_url": "/", "truncated": False, } - } == buckets + ] == buckets @pytest.mark.asyncio @@ -471,8 +471,8 @@ async def test_date_facet_results(app_client): ) buckets, timed_out = await facet.facet_results() assert [] == timed_out - assert { - "created": { + assert [ + { "name": "created", "type": "date", "results": [ @@ -509,7 +509,7 @@ async def test_date_facet_results(app_client): "toggle_url": "/", "truncated": False, } - } == buckets + ] == buckets @pytest.mark.asyncio From 0d4145d0f4d8b2a7edc1ba4aac1be56cd536a10a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 16 Dec 2021 12:30:31 -0800 Subject: [PATCH 0053/1103] Additional test for #625 --- tests/test_facets.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tests/test_facets.py b/tests/test_facets.py index a99979d3..3f292a3b 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -611,3 +611,16 @@ def test_other_types_of_facet_in_metadata(): "state\n", ): assert fragment in response.text + + +def test_conflicting_facet_names_json(app_client): + response = app_client.get( + "/fixtures/facetable.json?_facet=created&_facet_date=created" + "&_facet=tags&_facet_array=tags" + ) + assert set(response.json["facet_results"].keys()) == { + "created", + "tags", + "created_2", + "tags_2", + } From 2c07327d23d9c5cf939ada9ba4091c1b8b2ba42d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 16 Dec 2021 13:43:44 -0800 Subject: [PATCH 0054/1103] Move columns_to_select to TableView class, add lots of comments, refs #1518 --- datasette/views/table.py | 117 +++++++++++++++++++++------------------ 1 file changed, 63 insertions(+), 54 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 3d0e27cb..d37a3066 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -64,41 +64,6 @@ class Row: class RowTableShared(DataView): - async def columns_to_select(self, db, table, request): - table_columns = await db.table_columns(table) - pks = await db.primary_keys(table) - columns = list(table_columns) - if "_col" in request.args: - columns = list(pks) - _cols = request.args.getlist("_col") - bad_columns = [column for column in _cols if column not in table_columns] - if bad_columns: - raise DatasetteError( - "_col={} - invalid columns".format(", ".join(bad_columns)), - status=400, - ) - # De-duplicate maintaining order: - columns.extend(dict.fromkeys(_cols)) - if "_nocol" in request.args: - # Return all columns EXCEPT these - bad_columns = [ - column - for column in request.args.getlist("_nocol") - if (column not in table_columns) or (column in pks) - ] - if bad_columns: - raise DatasetteError( - "_nocol={} - invalid columns".format(", ".join(bad_columns)), - status=400, - ) - tmp_columns = [ - column - for column in columns - if column not in request.args.getlist("_nocol") - ] - columns = tmp_columns - return columns - async def sortable_columns_for_table(self, database, table, use_rowid): db = self.ds.databases[database] table_metadata = self.ds.table_metadata(database, table) @@ -321,6 +286,39 @@ class TableView(RowTableShared): write=bool(canned_query.get("write")), ) + async def columns_to_select(self, table_columns, pks, request): + columns = list(table_columns) + if "_col" in request.args: + columns = list(pks) + _cols = request.args.getlist("_col") + bad_columns = [column for column in _cols if column not in table_columns] + if bad_columns: + raise DatasetteError( + "_col={} - invalid columns".format(", ".join(bad_columns)), + status=400, + ) + # De-duplicate maintaining order: + columns.extend(dict.fromkeys(_cols)) + if "_nocol" in request.args: + # Return all columns EXCEPT these + bad_columns = [ + column + for column in request.args.getlist("_nocol") + if (column not in table_columns) or (column in pks) + ] + if bad_columns: + raise DatasetteError( + "_nocol={} - invalid columns".format(", ".join(bad_columns)), + status=400, + ) + tmp_columns = [ + column + for column in columns + if column not in request.args.getlist("_nocol") + ] + columns = tmp_columns + return columns + async def data( self, request, @@ -331,6 +329,7 @@ class TableView(RowTableShared): _next=None, _size=None, ): + # If this is a canned query, not a table, then dispatch to QueryView instead canned_query = await self.ds.get_canned_query(database, table, request.actor) if canned_query: return await QueryView(self.ds).data( @@ -348,9 +347,12 @@ class TableView(RowTableShared): db = self.ds.databases[database] is_view = bool(await db.get_view_definition(table)) table_exists = bool(await db.table_exists(table)) + + # If table or view not found, return 404 if not is_view and not table_exists: raise NotFound(f"Table not found: {table}") + # Ensure user has permission to view this table await self.check_permissions( request, [ @@ -364,15 +366,18 @@ class TableView(RowTableShared): None, "view-table", (database, table), default=True ) + # Introspect columns and primary keys for table pks = await db.primary_keys(table) table_columns = await db.table_columns(table) - specified_columns = await self.columns_to_select(db, table, request) + # Take ?_col= and ?_nocol= into account + specified_columns = await self.columns_to_select(table_columns, pks, request) select_specified_columns = ", ".join( escape_sqlite(t) for t in specified_columns ) select_all_columns = ", ".join(escape_sqlite(t) for t in table_columns) + # rowid tables (no specified primary key) need a different SELECT use_rowid = not pks and not is_view if use_rowid: select_specified_columns = f"rowid, {select_specified_columns}" @@ -487,7 +492,7 @@ class TableView(RowTableShared): f'{through_table}.{other_column} = "{value}"' ) - # _search support: + # _search= support: fts_table = special_args.get("_fts_table") fts_table = fts_table or table_metadata.get("fts_table") fts_table = fts_table or await db.fts_table(table) @@ -541,8 +546,6 @@ class TableView(RowTableShared): ) params[f"search_{i}"] = search_text - sortable_columns = set() - sortable_columns = await self.sortable_columns_for_table( database, table, use_rowid ) @@ -581,6 +584,7 @@ class TableView(RowTableShared): count_sql = f"select count(*) {from_sql}" + # Handl pagination driven by ?_next= _next = _next or special_args.get("_next") offset = "" if _next: @@ -679,6 +683,7 @@ class TableView(RowTableShared): else: page_size = self.ds.page_size + # Facets are calculated against SQL without order by or limit sql_no_order_no_limit = ( "select {select_all_columns} from {table_name} {where}".format( select_all_columns=select_all_columns, @@ -686,6 +691,8 @@ class TableView(RowTableShared): where=where_clause, ) ) + + # This is the SQL that populates the main table on the page sql = "select {select_specified_columns} from {table_name} {where}{order_by} limit {page_size}{offset}".format( select_specified_columns=select_specified_columns, table_name=escape_sqlite(table), @@ -698,15 +705,17 @@ class TableView(RowTableShared): if request.args.get("_timelimit"): extra_args["custom_time_limit"] = int(request.args.get("_timelimit")) + # Execute the main query! results = await db.execute(sql, params, truncate=True, **extra_args) - # Number of filtered rows in whole set: + # Calculate the total count for this query filtered_table_rows_count = None if ( not db.is_mutable and self.ds.inspect_data and count_sql == f"select count(*) from {table} " ): + # We can use a previously cached table row count try: filtered_table_rows_count = self.ds.inspect_data[database]["tables"][ table @@ -714,6 +723,7 @@ class TableView(RowTableShared): except KeyError: pass + # Otherwise run a select count(*) ... if count_sql and filtered_table_rows_count is None and not nocount: try: count_rows = list(await db.execute(count_sql, from_sql_params)) @@ -721,7 +731,7 @@ class TableView(RowTableShared): except QueryInterrupted: pass - # facets support + # Faceting if not self.ds.setting("allow_facet") and any( arg.startswith("_facet") for arg in request.args ): @@ -764,6 +774,18 @@ class TableView(RowTableShared): facet_results[key] = facet_info facets_timed_out.extend(instance_facets_timed_out) + # Calculate suggested facets + suggested_facets = [] + if ( + self.ds.setting("suggest_facets") + and self.ds.setting("allow_facet") + and not _next + and not nofacet + and not nosuggest + ): + for facet in facet_instances: + suggested_facets.extend(await facet.suggest()) + # Figure out columns and rows for the query columns = [r[0] for r in results.description] rows = list(results.rows) @@ -846,19 +868,6 @@ class TableView(RowTableShared): ) rows = rows[:page_size] - # Detect suggested facets - suggested_facets = [] - - if ( - self.ds.setting("suggest_facets") - and self.ds.setting("allow_facet") - and not _next - and not nofacet - and not nosuggest - ): - for facet in facet_instances: - suggested_facets.extend(await facet.suggest()) - # human_description_en combines filters AND search, if provided human_description_en = filters.human_description_en( extra=extra_human_descriptions From 0663d5525cc41e9260ac7d1f6386d3a6eb5ad2a9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 16 Dec 2021 14:00:29 -0800 Subject: [PATCH 0055/1103] More comments in TableView.data(), refs #1518 --- datasette/views/table.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index d37a3066..da263966 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -437,6 +437,8 @@ class TableView(RowTableShared): table_metadata = self.ds.table_metadata(database, table) units = table_metadata.get("units", {}) + + # Build where clauses from query string arguments filters = Filters(sorted(other_args), units, ureg) where_clauses, params = filters.build_where_clauses(table) @@ -584,7 +586,7 @@ class TableView(RowTableShared): count_sql = f"select count(*) {from_sql}" - # Handl pagination driven by ?_next= + # Handle pagination driven by ?_next= _next = _next or special_args.get("_next") offset = "" if _next: From aa7f0037a46eb76ae6fe9bf2a1f616c58738ecdf Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 17 Dec 2021 11:02:14 -0800 Subject: [PATCH 0056/1103] filters_from_request plugin hook, now used in TableView - New `filters_from_request` plugin hook, closes #473 - Used it to extract the logic from TableView that handles `_search` and `_through` and `_where` - refs #1518 Also needed for this plugin work: https://github.com/simonw/datasette-leaflet-freedraw/issues/7 --- datasette/filters.py | 167 ++++++++++++++++++++++++++++++++++++++- datasette/hookspecs.py | 11 +++ datasette/plugins.py | 1 + datasette/views/table.py | 127 +++++------------------------ docs/plugin_hooks.rst | 53 +++++++++++++ tests/test_filters.py | 87 +++++++++++++++++++- tests/test_plugins.py | 18 +++++ 7 files changed, 353 insertions(+), 111 deletions(-) diff --git a/datasette/filters.py b/datasette/filters.py index cbd94415..5ea3488b 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -1,7 +1,172 @@ +from datasette import hookimpl +from datasette.views.base import DatasetteError +from datasette.utils.asgi import BadRequest import json import numbers +from .utils import detect_json1, escape_sqlite, path_with_removed_args -from .utils import detect_json1, escape_sqlite + +@hookimpl(specname="filters_from_request") +def where_filters(request, database, datasette): + # This one deals with ?_where= + async def inner(): + where_clauses = [] + extra_wheres_for_ui = [] + if "_where" in request.args: + if not await datasette.permission_allowed( + request.actor, + "execute-sql", + resource=database, + default=True, + ): + raise DatasetteError("_where= is not allowed", status=403) + else: + where_clauses.extend(request.args.getlist("_where")) + extra_wheres_for_ui = [ + { + "text": text, + "remove_url": path_with_removed_args(request, {"_where": text}), + } + for text in request.args.getlist("_where") + ] + + return FilterArguments( + where_clauses, + extra_context={ + "extra_wheres_for_ui": extra_wheres_for_ui, + }, + ) + + return inner + + +@hookimpl(specname="filters_from_request") +def search_filters(request, database, table, datasette): + # ?_search= and _search_colname= + async def inner(): + where_clauses = [] + params = {} + human_descriptions = [] + extra_context = {} + + # Figure out which fts_table to use + table_metadata = datasette.table_metadata(database, table) + db = datasette.get_database(database) + fts_table = request.args.get("_fts_table") + fts_table = fts_table or table_metadata.get("fts_table") + fts_table = fts_table or await db.fts_table(table) + fts_pk = request.args.get("_fts_pk", table_metadata.get("fts_pk", "rowid")) + search_args = { + key: request.args[key] + for key in request.args + if key.startswith("_search") and key != "_searchmode" + } + search = "" + search_mode_raw = table_metadata.get("searchmode") == "raw" + # Or set search mode from the querystring + qs_searchmode = request.args.get("_searchmode") + if qs_searchmode == "escaped": + search_mode_raw = False + if qs_searchmode == "raw": + search_mode_raw = True + + extra_context["supports_search"] = bool(fts_table) + + if fts_table and search_args: + if "_search" in search_args: + # Simple ?_search=xxx + search = search_args["_search"] + where_clauses.append( + "{fts_pk} in (select rowid from {fts_table} where {fts_table} match {match_clause})".format( + fts_table=escape_sqlite(fts_table), + fts_pk=escape_sqlite(fts_pk), + match_clause=":search" + if search_mode_raw + else "escape_fts(:search)", + ) + ) + human_descriptions.append(f'search matches "{search}"') + params["search"] = search + extra_context["search"] = search + else: + # More complex: search against specific columns + for i, (key, search_text) in enumerate(search_args.items()): + search_col = key.split("_search_", 1)[1] + if search_col not in await db.table_columns(fts_table): + raise BadRequest("Cannot search by that column") + + where_clauses.append( + "rowid in (select rowid from {fts_table} where {search_col} match {match_clause})".format( + fts_table=escape_sqlite(fts_table), + search_col=escape_sqlite(search_col), + match_clause=":search_{}".format(i) + if search_mode_raw + else "escape_fts(:search_{})".format(i), + ) + ) + human_descriptions.append( + f'search column "{search_col}" matches "{search_text}"' + ) + params[f"search_{i}"] = search_text + extra_context["search"] = search_text + + return FilterArguments(where_clauses, params, human_descriptions, extra_context) + + return inner + + +@hookimpl(specname="filters_from_request") +def through_filters(request, database, table, datasette): + # ?_search= and _search_colname= + async def inner(): + where_clauses = [] + params = {} + human_descriptions = [] + extra_context = {} + + # Support for ?_through={table, column, value} + if "_through" in request.args: + for through in request.args.getlist("_through"): + through_data = json.loads(through) + through_table = through_data["table"] + other_column = through_data["column"] + value = through_data["value"] + db = datasette.get_database(database) + outgoing_foreign_keys = await db.foreign_keys_for_table(through_table) + try: + fk_to_us = [ + fk for fk in outgoing_foreign_keys if fk["other_table"] == table + ][0] + except IndexError: + raise DatasetteError( + "Invalid _through - could not find corresponding foreign key" + ) + param = f"p{len(params)}" + where_clauses.append( + "{our_pk} in (select {our_column} from {through_table} where {other_column} = :{param})".format( + through_table=escape_sqlite(through_table), + our_pk=escape_sqlite(fk_to_us["other_column"]), + our_column=escape_sqlite(fk_to_us["column"]), + other_column=escape_sqlite(other_column), + param=param, + ) + ) + params[param] = value + human_descriptions.append(f'{through_table}.{other_column} = "{value}"') + + return FilterArguments(where_clauses, params, human_descriptions, extra_context) + + return inner + + +class FilterArguments: + def __init__( + self, where_clauses, params=None, human_descriptions=None, extra_context=None + ): + self.where_clauses = where_clauses + self.params = params or {} + self.human_descriptions = human_descriptions or [] + self.extra_context = extra_context or {} class Filter: diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 1d4e3b27..8f4fecab 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -89,6 +89,17 @@ def actor_from_request(datasette, request): """Return an actor dictionary based on the incoming request""" +@hookspec +def filters_from_request(request, database, table, datasette): + """ + Return datasette.filters.FilterArguments( + where_clauses=[str, str, str], + params={}, + human_descriptions=[str, str, str], + extra_context={} + ) based on the request""" + + @hookspec def permission_allowed(datasette, actor, action, resource): """Check if actor is allowed to perform this action - return True, False or None""" diff --git a/datasette/plugins.py b/datasette/plugins.py index 50791988..76b46a47 100644 --- a/datasette/plugins.py +++ b/datasette/plugins.py @@ -8,6 +8,7 @@ DEFAULT_PLUGINS = ( "datasette.publish.heroku", "datasette.publish.cloudrun", "datasette.facets", + "datasette.filters", "datasette.sql_functions", "datasette.actor_auth_cookie", "datasette.default_permissions", diff --git a/datasette/views/table.py b/datasette/views/table.py index da263966..cfd31bd3 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -442,117 +442,27 @@ class TableView(RowTableShared): filters = Filters(sorted(other_args), units, ureg) where_clauses, params = filters.build_where_clauses(table) - extra_wheres_for_ui = [] - # Add _where= from querystring - if "_where" in request.args: - if not await self.ds.permission_allowed( - request.actor, - "execute-sql", - resource=database, - default=True, - ): - raise DatasetteError("_where= is not allowed", status=403) - else: - where_clauses.extend(request.args.getlist("_where")) - extra_wheres_for_ui = [ - { - "text": text, - "remove_url": path_with_removed_args(request, {"_where": text}), - } - for text in request.args.getlist("_where") - ] - - # Support for ?_through={table, column, value} + # Execute filters_from_request plugin hooks + extra_context_from_filters = {} extra_human_descriptions = [] - if "_through" in request.args: - for through in request.args.getlist("_through"): - through_data = json.loads(through) - through_table = through_data["table"] - other_column = through_data["column"] - value = through_data["value"] - outgoing_foreign_keys = await db.foreign_keys_for_table(through_table) - try: - fk_to_us = [ - fk for fk in outgoing_foreign_keys if fk["other_table"] == table - ][0] - except IndexError: - raise DatasetteError( - "Invalid _through - could not find corresponding foreign key" - ) - param = f"p{len(params)}" - where_clauses.append( - "{our_pk} in (select {our_column} from {through_table} where {other_column} = :{param})".format( - through_table=escape_sqlite(through_table), - our_pk=escape_sqlite(fk_to_us["other_column"]), - our_column=escape_sqlite(fk_to_us["column"]), - other_column=escape_sqlite(other_column), - param=param, - ) - ) - params[param] = value - extra_human_descriptions.append( - f'{through_table}.{other_column} = "{value}"' - ) - # _search= support: - fts_table = special_args.get("_fts_table") - fts_table = fts_table or table_metadata.get("fts_table") - fts_table = fts_table or await db.fts_table(table) - fts_pk = special_args.get("_fts_pk", table_metadata.get("fts_pk", "rowid")) - search_args = dict( - pair - for pair in special_args.items() - if pair[0].startswith("_search") and pair[0] != "_searchmode" - ) - search = "" - search_mode_raw = table_metadata.get("searchmode") == "raw" - # Or set it from the querystring - qs_searchmode = special_args.get("_searchmode") - if qs_searchmode == "escaped": - search_mode_raw = False - if qs_searchmode == "raw": - search_mode_raw = True - if fts_table and search_args: - if "_search" in search_args: - # Simple ?_search=xxx - search = search_args["_search"] - where_clauses.append( - "{fts_pk} in (select rowid from {fts_table} where {fts_table} match {match_clause})".format( - fts_table=escape_sqlite(fts_table), - fts_pk=escape_sqlite(fts_pk), - match_clause=":search" - if search_mode_raw - else "escape_fts(:search)", - ) - ) - extra_human_descriptions.append(f'search matches "{search}"') - params["search"] = search - else: - # More complex: search against specific columns - for i, (key, search_text) in enumerate(search_args.items()): - search_col = key.split("_search_", 1)[1] - if search_col not in await db.table_columns(fts_table): - raise BadRequest("Cannot search by that column") - - where_clauses.append( - "rowid in (select rowid from {fts_table} where {search_col} match {match_clause})".format( - fts_table=escape_sqlite(fts_table), - search_col=escape_sqlite(search_col), - match_clause=":search_{}".format(i) - if search_mode_raw - else "escape_fts(:search_{})".format(i), - ) - ) - extra_human_descriptions.append( - f'search column "{search_col}" matches "{search_text}"' - ) - params[f"search_{i}"] = search_text + for hook in pm.hook.filters_from_request( + request=request, + table=table, + database=database, + datasette=self.ds, + ): + filter_arguments = await await_me_maybe(hook) + if filter_arguments: + where_clauses.extend(filter_arguments.where_clauses) + params.update(filter_arguments.params) + extra_human_descriptions.extend(filter_arguments.human_descriptions) + extra_context_from_filters.update(filter_arguments.extra_context) + # Deal with custom sort orders sortable_columns = await self.sortable_columns_for_table( database, table, use_rowid ) - - # Allow for custom sort order sort = special_args.get("_sort") sort_desc = special_args.get("_sort_desc") @@ -942,10 +852,8 @@ class TableView(RowTableShared): for table_column in table_columns if table_column not in columns ] - return { + d = { "table_actions": table_actions, - "supports_search": bool(fts_table), - "search": search or "", "use_rowid": use_rowid, "filters": filters, "display_columns": display_columns, @@ -957,7 +865,6 @@ class TableView(RowTableShared): key=lambda f: (len(f["results"]), f["name"]), reverse=True, ), - "extra_wheres_for_ui": extra_wheres_for_ui, "form_hidden_args": form_hidden_args, "is_sortable": any(c["sortable"] for c in display_columns), "fix_path": self.ds.urls.path, @@ -977,6 +884,8 @@ class TableView(RowTableShared): "view_definition": await db.get_view_definition(table), "table_definition": await db.get_table_definition(table), } + d.update(extra_context_from_filters) + return d return ( { diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 4a7c36c3..d76f70e5 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -923,6 +923,59 @@ Instead of returning a dictionary, this function can return an awaitable functio Example: `datasette-auth-tokens `_ +.. _plugin_hook_filters_from_request: + +filters_from_request(request, database, table, datasette) +--------------------------------------------------------- + +``request`` - object + The current HTTP :ref:`internals_request`. + +``database`` - string + The name of the database. + +``table`` - string + The name of the table. + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + +This hook runs on the :ref:`table ` page, and can influence the ``where`` clause of the SQL query used to populate that page, based on query string arguments on the incoming request. + +The hook should return an instance of ``datasette.filters.FilterArguments`` which has one required and three optional arguments: + +.. code-block:: python + + return FilterArguments( + where_clauses=["id > :max_id"], + params={"max_id": 5}, + human_descriptions=["max_id is greater than 5"], + extra_context={} + ) + +The arguments to the ``FilterArguments`` class constructor are as follows: + +``where_clauses`` - list of strings, required + A list of SQL fragments that will be inserted into the SQL query, joined by the ``and`` operator. These can include ``:named`` parameters which will be populated using data in ``params``. +``params`` - dictionary, optional + Additional keyword arguments to be used when the query is executed. These should match any ``:arguments`` in the where clauses. +``human_descriptions`` - list of strings, optional + These strings will be included in the human-readable description at the top of the page and the page ````. +``extra_context`` - dictionary, optional + Additional context variables that should be made available to the ``table.html`` template when it is rendered. + +This example plugin causes 0 results to be returned if ``?_nothing=1`` is added to the URL: + +.. code-block:: python + + from datasette import hookimpl + from datasette.filters import FilterArguments + + @hookimpl + def filters_from_request(self, request): + if request.args.get("_nothing"): + return FilterArguments(["1 = 0"], human_descriptions=["NOTHING"]) + .. _plugin_hook_permission_allowed: permission_allowed(datasette, actor, action, resource) diff --git a/tests/test_filters.py b/tests/test_filters.py index d05ae80f..2ff57489 100644 --- a/tests/test_filters.py +++ b/tests/test_filters.py @@ -1,4 +1,6 @@ -from datasette.filters import Filters +from datasette.filters import Filters, through_filters, where_filters, search_filters +from datasette.utils.asgi import Request +from .fixtures import app_client import pytest @@ -74,3 +76,86 @@ def test_build_where(args, expected_where, expected_params): sql_bits, actual_params = f.build_where_clauses("table") assert expected_where == sql_bits assert {f"p{i}": param for i, param in enumerate(expected_params)} == actual_params + + +@pytest.mark.asyncio +async def test_through_filters_from_request(app_client): + request = Request.fake( + '/?_through={"table":"roadside_attraction_characteristics","column":"characteristic_id","value":"1"}' + ) + filter_args = await ( + through_filters( + request=request, + datasette=app_client.ds, + table="roadside_attractions", + database="fixtures", + ) + )() + assert filter_args.where_clauses == [ + "pk in (select attraction_id from roadside_attraction_characteristics where characteristic_id = :p0)" + ] + assert filter_args.params == {"p0": "1"} + assert filter_args.human_descriptions == [ + 'roadside_attraction_characteristics.characteristic_id = "1"' + ] + assert filter_args.extra_context == {} + + +@pytest.mark.asyncio +async def test_through_filters_from_request(app_client): + request = Request.fake( + '/?_through={"table":"roadside_attraction_characteristics","column":"characteristic_id","value":"1"}' + ) + filter_args = await ( + through_filters( + request=request, + datasette=app_client.ds, + table="roadside_attractions", + database="fixtures", + ) + )() + assert filter_args.where_clauses == [ + "pk in (select attraction_id from roadside_attraction_characteristics where characteristic_id = :p0)" + ] + assert filter_args.params == {"p0": "1"} + assert filter_args.human_descriptions == [ + 'roadside_attraction_characteristics.characteristic_id = "1"' + ] + assert filter_args.extra_context == {} + + +@pytest.mark.asyncio +async def test_where_filters_from_request(app_client): + request = Request.fake("/?_where=pk+>+3") + filter_args = await ( + where_filters( + request=request, + datasette=app_client.ds, + database="fixtures", + ) + )() + assert filter_args.where_clauses == ["pk > 3"] + assert filter_args.params == {} + assert filter_args.human_descriptions == [] + assert filter_args.extra_context == { + "extra_wheres_for_ui": [{"text": "pk > 3", "remove_url": "/"}] + } + + +@pytest.mark.asyncio +async def test_search_filters_from_request(app_client): + request = Request.fake("/?_search=bobcat") + filter_args = await ( + search_filters( + request=request, + datasette=app_client.ds, + database="fixtures", + table="searchable", + ) + )() + assert filter_args.where_clauses == [ + "rowid in (select rowid from searchable_fts where searchable_fts match escape_fts(:search))" + ] + assert filter_args.params == {"search": "bobcat"} + assert filter_args.human_descriptions == ['search matches "bobcat"'] + assert filter_args.extra_context == {"supports_search": True, "search": "bobcat"} diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 1da28453..656f39e4 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -9,6 +9,7 @@ from .fixtures import ( from click.testing import CliRunner from datasette.app import Datasette from datasette import cli, hookimpl +from datasette.filters import FilterArguments from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm from datasette.utils.sqlite import sqlite3 from datasette.utils import CustomRow @@ -977,3 +978,20 @@ def test_hook_register_commands(): } pm.unregister(name="verify") importlib.reload(cli) + + +def test_hook_filters_from_request(app_client): + class ReturnNothingPlugin: + __name__ = "ReturnNothingPlugin" + + @hookimpl + def filters_from_request(self, request): + if request.args.get("_nothing"): + return FilterArguments(["1 = 0"], human_descriptions=["NOTHING"]) + + pm.register(ReturnNothingPlugin(), name="ReturnNothingPlugin") + response = app_client.get("/fixtures/facetable?_nothing=1") + assert "0 rows\n where NOTHING" in response.text + json_response = app_client.get("/fixtures/facetable.json?_nothing=1") + assert json_response.json["rows"] == [] + pm.unregister(name="ReturnNothingPlugin") From 92a5280d2e75c39424a75ad6226fc74400ae984f Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 17 Dec 2021 11:13:02 -0800 Subject: [PATCH 0057/1103] Release 0.60a0 Refs #473, #625, #1544, #1551, #1552, #1556, #1557 --- datasette/version.py | 2 +- docs/changelog.rst | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 9c85b763..2fce006c 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.59.4" +__version__ = "0.60a0" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 9ddc2794..92a9d941 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,19 @@ Changelog ========= +.. _v0_60a0: + +0.60a0 (2021-12-17) +------------------- + +- New plugin hook: :ref:`plugin_hook_filters_from_request`, which runs on the table page and can be used to support new custom query string parameters that modify the SQL query. (:issue:`473`) +- The number of unique values in a facet is now always displayed. Previously it was only displayed if the user specified ``?_facet_size=max``. (:issue:`1556`) +- Fixed bug where ``?_facet_array=tags&_facet=tags`` would only display one of the two selected facets. (:issue:`625`) +- Facets of type ``date`` or ``array`` can now be configured in ``metadata.json``, see :ref:`facets_metadata`. Thanks, David Larlet. (:issue:`1552`) +- New ``?_nosuggest=1`` parameter for table views, which disables facet suggestion. (:issue:`1557`) +- Label columns detected for foreign keys are now case-insensitive, so ``Name`` or ``TITLE`` will be detected in the same way as ``name`` or ``title``. (:issue:`1544`) +- The query string variables exposed by ``request.args`` will now include blank strings for arguments such as ``foo`` in ``?foo=&bar=1`` rather than ignoring those parameters entirely. (:issue:`1551`) + .. _v0_59_4: 0.59.4 (2021-11-29) From f000a7bd75ac512478070f2e2a09c8fb9604c82d Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 17 Dec 2021 12:15:29 -0800 Subject: [PATCH 0058/1103] Use load_extension(?) instead of fstring --- datasette/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index 28268e42..715506bd 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -553,7 +553,7 @@ class Datasette: if self.sqlite_extensions: conn.enable_load_extension(True) for extension in self.sqlite_extensions: - conn.execute(f"SELECT load_extension('{extension}')") + conn.execute("SELECT load_extension(?)", [extension]) if self.setting("cache_size_kb"): conn.execute(f"PRAGMA cache_size=-{self.setting('cache_size_kb')}") # pylint: disable=no-member From 35cba9e85a574cebf2986b64107fa84d02bd86ad Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 17 Dec 2021 15:08:28 -0800 Subject: [PATCH 0059/1103] Update janus requirement from <0.8,>=0.6.2 to >=0.6.2,<1.1 (#1562) Updates the requirements on [janus](https://github.com/aio-libs/janus) to permit the latest version. - [Release notes](https://github.com/aio-libs/janus/releases) - [Changelog](https://github.com/aio-libs/janus/blob/master/CHANGES.rst) - [Commits](https://github.com/aio-libs/janus/compare/v0.6.2...v1.0.0) --- updated-dependencies: - dependency-name: janus dependency-type: direct:production ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 534265c2..f8cd3e5b 100644 --- a/setup.py +++ b/setup.py @@ -52,7 +52,7 @@ setup( "pluggy>=0.13,<1.1", "uvicorn~=0.11", "aiofiles>=0.4,<0.9", - "janus>=0.6.2,<0.8", + "janus>=0.6.2,<1.1", "asgi-csrf>=0.9", "PyYAML>=5.3,<7.0", "mergedeep>=1.1.1,<1.4.0", From d0f24f9bbc596873f261ed4e0267c4aa5a0bac2b Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 17 Dec 2021 15:28:26 -0800 Subject: [PATCH 0060/1103] Clarifying comment The new filters stuff is a little bit action-at-a-distance --- datasette/views/table.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index cfd31bd3..c3bcf01d 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -442,7 +442,8 @@ class TableView(RowTableShared): filters = Filters(sorted(other_args), units, ureg) where_clauses, params = filters.build_where_clauses(table) - # Execute filters_from_request plugin hooks + # Execute filters_from_request plugin hooks - including the default + # ones that live in datasette/filters.py extra_context_from_filters = {} extra_human_descriptions = [] From 0c91e59d2bbfc08884cfcf5d1b902a2f4968b7ff Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 17 Dec 2021 15:28:44 -0800 Subject: [PATCH 0061/1103] datasette-leaflet-freedraw is an example of filters_from_request --- docs/plugin_hooks.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index d76f70e5..cbaf4c54 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -976,6 +976,8 @@ This example plugin causes 0 results to be returned if ``?_nothing=1`` is added if request.args.get("_nothing"): return FilterArguments(["1 = 0"], human_descriptions=["NOTHING"]) +Example: `datasette-leaflet-freedraw <https://datasette.io/plugins/datasette-leaflet-freedraw>`_ + .. _plugin_hook_permission_allowed: permission_allowed(datasette, actor, action, resource) From c35b84a2aabe2f14aeacf6cda4110ae1e94d6059 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 17 Dec 2021 17:54:39 -0800 Subject: [PATCH 0062/1103] Remove undocumented sqlite_functions mechanism, closes #1567 --- datasette/app.py | 3 --- tests/fixtures.py | 1 - tests/plugins/sleep_sql_function.py | 7 +++++++ 3 files changed, 7 insertions(+), 4 deletions(-) create mode 100644 tests/plugins/sleep_sql_function.py diff --git a/datasette/app.py b/datasette/app.py index 715506bd..d94cd5a2 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -259,7 +259,6 @@ class Datasette: with metadata_files[0].open() as fp: metadata = parse_metadata(fp.read()) self._metadata_local = metadata or {} - self.sqlite_functions = [] self.sqlite_extensions = [] for extension in sqlite_extensions or []: # Resolve spatialite, if requested @@ -548,8 +547,6 @@ class Datasette: def _prepare_connection(self, conn, database): conn.row_factory = sqlite3.Row conn.text_factory = lambda x: str(x, "utf-8", "replace") - for name, num_args, func in self.sqlite_functions: - conn.create_function(name, num_args, func) if self.sqlite_extensions: conn.enable_load_extension(True) for extension in self.sqlite_extensions: diff --git a/tests/fixtures.py b/tests/fixtures.py index 37399da0..76f794c6 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -153,7 +153,6 @@ def make_app_client( template_dir=template_dir, crossdb=crossdb, ) - ds.sqlite_functions.append(("sleep", 1, lambda n: time.sleep(float(n)))) yield TestClient(ds) diff --git a/tests/plugins/sleep_sql_function.py b/tests/plugins/sleep_sql_function.py new file mode 100644 index 00000000..d4b32a09 --- /dev/null +++ b/tests/plugins/sleep_sql_function.py @@ -0,0 +1,7 @@ +from datasette import hookimpl +import time + + +@hookimpl +def prepare_connection(conn): + conn.create_function("sleep", 1, lambda n: time.sleep(float(n))) From 83bacfa9452babe7bd66e3579e23af988d00f6ac Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 17 Dec 2021 17:58:39 -0800 Subject: [PATCH 0063/1103] Call _prepare_connection() on write connections, closes #1564 --- datasette/database.py | 1 + tests/test_internals_database.py | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/datasette/database.py b/datasette/database.py index d1217e18..0a0c104a 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -128,6 +128,7 @@ class Database: conn = None try: conn = self.connect(write=True) + self.ds._prepare_connection(conn, self.name) except Exception as e: conn_exception = e while True: diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index a00fe447..609caabf 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -396,6 +396,12 @@ async def test_execute_write_block_false(db): assert "Mystery!" == rows.rows[0][0] +@pytest.mark.asyncio +async def test_execute_write_has_correctly_prepared_connection(db): + # The sleep() function is only available if ds._prepare_connection() was called + await db.execute_write("select sleep(0.01)", block=True) + + @pytest.mark.asyncio async def test_execute_write_fn_block_false(db): def write_fn(conn): From 359140cedaf69242d6356479fb8a9d3aa591e618 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 17 Dec 2021 18:09:00 -0800 Subject: [PATCH 0064/1103] Datasette() constructor no longer requires files=, closes #1563 --- datasette/app.py | 4 ++-- tests/test_internals_datasette.py | 17 +++++++++++++++++ 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index d94cd5a2..17fa06a5 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -191,7 +191,7 @@ class Datasette: def __init__( self, - files, + files=None, immutables=None, cache_headers=True, cors=False, @@ -214,7 +214,7 @@ class Datasette: ), "config_dir= should be a pathlib.Path" self.pdb = pdb self._secret = secret or secrets.token_hex(32) - self.files = tuple(files) + tuple(immutables or []) + self.files = tuple(files or []) + tuple(immutables or []) if config_dir: self.files += tuple([str(p) for p in config_dir.glob("*.db")]) if ( diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index 56bc2fb4..692312a7 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -1,6 +1,7 @@ """ Tests for the datasette.app.Datasette class """ +from datasette.app import Datasette from itsdangerous import BadSignature from .fixtures import app_client import pytest @@ -45,3 +46,19 @@ def test_sign_unsign(datasette, value, namespace): ) def test_datasette_setting(datasette, setting, expected): assert datasette.setting(setting) == expected + + +@pytest.mark.asyncio +async def test_datasette_constructor(): + ds = Datasette(memory=True) + databases = (await ds.client.get("/-/databases.json")).json() + assert databases == [ + { + "name": "_memory", + "path": None, + "size": 0, + "is_mutable": False, + "is_memory": True, + "hash": None, + } + ] From 3a0cae4d7f77b5c2a103ea74ca7fa7a0d9ff2e66 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 17 Dec 2021 18:19:09 -0800 Subject: [PATCH 0065/1103] Fix bug introduced by refactor in c35b84a2aabe2f14aeacf6cda4110ae1e94d6059 --- tests/fixtures.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/fixtures.py b/tests/fixtures.py index 76f794c6..26f0cf7b 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -83,6 +83,13 @@ EXPECTED_PLUGINS = [ "version": None, "hooks": ["register_output_renderer"], }, + { + "name": "sleep_sql_function.py", + "static": False, + "templates": False, + "version": None, + "hooks": ["prepare_connection"], + }, { "name": "view_name.py", "static": False, From 7c8f8aa209e4ba7bf83976f8495d67c28fbfca24 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 17 Dec 2021 18:19:36 -0800 Subject: [PATCH 0066/1103] Documentation for Datasette() constructor, closes #1563 --- docs/internals.rst | 21 +++++++++++++++++++++ tests/test_internals_datasette.py | 2 +- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/docs/internals.rst b/docs/internals.rst index 411327eb..c706031b 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -196,6 +196,27 @@ Datasette class This object is an instance of the ``Datasette`` class, passed to many plugin hooks as an argument called ``datasette``. +You can create your own instance of this - for example to help write tests for a plugin - like so: + +.. code-block:: python + + from datasette.app import Datasette + + # With no arguments a single in-memory database will be attached + datasette = Datasette() + + # The files= argument can load files from disk + datasette = Datasette(files="/path/to/my-database.db") + + # Pass metadata as a JSON dictionary like this + datasette = Datasette(files="/path/to/my-database.db", metadata={ + "databases": { + "my-database": { + "description": "This is my database" + } + } + }) + .. _datasette_databases: .databases diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index 692312a7..adf84be9 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -50,7 +50,7 @@ def test_datasette_setting(datasette, setting, expected): @pytest.mark.asyncio async def test_datasette_constructor(): - ds = Datasette(memory=True) + ds = Datasette() databases = (await ds.client.get("/-/databases.json")).json() assert databases == [ { From f81d9d0cd9f567e73a1a54be34b653db8ae2c1cf Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 17 Dec 2021 18:42:29 -0800 Subject: [PATCH 0067/1103] Trace write SQL queries in addition to read ones, closes #1568 --- datasette/database.py | 4 +++- tests/test_api.py | 13 +++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/datasette/database.py b/datasette/database.py index 0a0c104a..468e9360 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -99,7 +99,9 @@ class Database: with conn: return conn.execute(sql, params or []) - return await self.execute_write_fn(_inner, block=block) + with trace("sql", database=self.name, sql=sql.strip(), params=params): + results = await self.execute_write_fn(_inner, block=block) + return results async def execute_write_fn(self, fn, block=False): task_id = uuid.uuid5(uuid.NAMESPACE_DNS, "datasette.io") diff --git a/tests/test_api.py b/tests/test_api.py index df9e0fc4..9ad7d569 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -939,6 +939,19 @@ def test_trace(trace_debug): assert isinstance(trace["sql"], str) assert isinstance(trace["params"], (list, dict, None.__class__)) + sqls = [trace["sql"] for trace in trace_info["traces"] if "sql" in trace] + # There should be a mix of different types of SQL statement + expected = ( + "CREATE TABLE ", + "PRAGMA ", + "INSERT OR REPLACE INTO ", + "DELETE FROM ", + "INSERT INTO", + "select ", + ) + for prefix in expected: + assert any(sql.startswith(prefix) for sql in sqls) + @pytest.mark.parametrize( "path,status_code", From 85c22f4fbccb7b35fbc16d3ef035ca71b1a5a20a Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 18 Dec 2021 10:10:37 -0800 Subject: [PATCH 0068/1103] Corrected Datasette(files=) example from #1563 --- docs/internals.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/internals.rst b/docs/internals.rst index c706031b..8788b26a 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -206,10 +206,10 @@ You can create your own instance of this - for example to help write tests for a datasette = Datasette() # The files= argument can load files from disk - datasette = Datasette(files="/path/to/my-database.db") + datasette = Datasette(files=["/path/to/my-database.db"]) # Pass metadata as a JSON dictionary like this - datasette = Datasette(files="/path/to/my-database.db", metadata={ + datasette = Datasette(files=["/path/to/my-database.db"], metadata={ "databases": { "my-database": { "description": "This is my database" From 9e094b7c9d575320a2f0c956eb547bfcf6d64d39 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 18 Dec 2021 10:28:25 -0800 Subject: [PATCH 0069/1103] db.execute_write(executescript=True) option, closes #1569 --- datasette/database.py | 8 ++++++-- docs/internals.rst | 6 ++++-- tests/test_internals_database.py | 21 +++++++++++++++++++++ 3 files changed, 31 insertions(+), 4 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 468e9360..350c4e9c 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -94,10 +94,14 @@ class Database: f"file:{self.path}{qs}", uri=True, check_same_thread=False ) - async def execute_write(self, sql, params=None, block=False): + async def execute_write(self, sql, params=None, executescript=False, block=False): + assert not (executescript and params), "Cannot use params with executescript=True" def _inner(conn): with conn: - return conn.execute(sql, params or []) + if executescript: + return conn.executescript(sql) + else: + return conn.execute(sql, params or []) with trace("sql", database=self.name, sql=sql.strip(), params=params): results = await self.execute_write_fn(_inner, block=block) diff --git a/docs/internals.rst b/docs/internals.rst index 8788b26a..d40e679b 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -663,8 +663,8 @@ Example usage: .. _database_execute_write: -await db.execute_write(sql, params=None, block=False) ------------------------------------------------------ +await db.execute_write(sql, params=None, executescript=False, block=False) +-------------------------------------------------------------------------- SQLite only allows one database connection to write at a time. Datasette handles this for you by maintaining a queue of writes to be executed against a given database. Plugins can submit write operations to this queue and they will be executed in the order in which they are received. @@ -676,6 +676,8 @@ By default queries are considered to be "fire and forget" - they will be added t If you pass ``block=True`` this behaviour changes: the method will block until the write operation has completed, and the return value will be the return from calling ``conn.execute(...)`` using the underlying ``sqlite3`` Python library. +If you pass ``executescript=True`` your SQL will be executed using the ``sqlite3`` `conn.executescript() <https://docs.python.org/3/library/sqlite3.html#sqlite3.Cursor.executescript>`__ method. This allows multiple SQL statements to be separated by semicolons, but cannot be used with the ``params=`` option. + .. _database_execute_write_fn: await db.execute_write_fn(fn, block=False) diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 609caabf..0a5c01a3 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -396,6 +396,27 @@ async def test_execute_write_block_false(db): assert "Mystery!" == rows.rows[0][0] +@pytest.mark.asyncio +async def test_execute_write_executescript(db): + await db.execute_write( + "create table foo (id integer primary key); create table bar (id integer primary key); ", + executescript=True, + block=True + ) + table_names = await db.table_names() + assert {"foo", "bar"}.issubset(table_names) + + +@pytest.mark.asyncio +async def test_execute_write_executescript_not_allowed_with_params(db): + with pytest.raises(AssertionError): + await db.execute_write( + "update roadside_attractions set name = ? where pk = ?", + ["Mystery!", 1], + executescript=True + ) + + @pytest.mark.asyncio async def test_execute_write_has_correctly_prepared_connection(db): # The sleep() function is only available if ds._prepare_connection() was called From 2e4ba71b53a45a7d2273afd30e400002c7f39755 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 18 Dec 2021 10:30:53 -0800 Subject: [PATCH 0070/1103] Optimize create table calls using executescript=True Refs #1555, #1569 --- datasette/database.py | 5 +++- datasette/utils/internal_db.py | 48 ++++++-------------------------- tests/test_api.py | 4 ++- tests/test_internals_database.py | 4 +-- 4 files changed, 18 insertions(+), 43 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 350c4e9c..f8365f5c 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -95,7 +95,10 @@ class Database: ) async def execute_write(self, sql, params=None, executescript=False, block=False): - assert not (executescript and params), "Cannot use params with executescript=True" + assert not ( + executescript and params + ), "Cannot use params with executescript=True" + def _inner(conn): with conn: if executescript: diff --git a/datasette/utils/internal_db.py b/datasette/utils/internal_db.py index 40fe719e..80babff8 100644 --- a/datasette/utils/internal_db.py +++ b/datasette/utils/internal_db.py @@ -2,22 +2,14 @@ import textwrap async def init_internal_db(db): - await db.execute_write( - textwrap.dedent( - """ + create_tables_sql = textwrap.dedent( + """ CREATE TABLE IF NOT EXISTS databases ( database_name TEXT PRIMARY KEY, path TEXT, is_memory INTEGER, schema_version INTEGER - ) - """ - ), - block=True, - ) - await db.execute_write( - textwrap.dedent( - """ + ); CREATE TABLE IF NOT EXISTS tables ( database_name TEXT, table_name TEXT, @@ -25,14 +17,7 @@ async def init_internal_db(db): sql TEXT, PRIMARY KEY (database_name, table_name), FOREIGN KEY (database_name) REFERENCES databases(database_name) - ) - """ - ), - block=True, - ) - await db.execute_write( - textwrap.dedent( - """ + ); CREATE TABLE IF NOT EXISTS columns ( database_name TEXT, table_name TEXT, @@ -46,14 +31,7 @@ async def init_internal_db(db): PRIMARY KEY (database_name, table_name, name), FOREIGN KEY (database_name) REFERENCES databases(database_name), FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name) - ) - """ - ), - block=True, - ) - await db.execute_write( - textwrap.dedent( - """ + ); CREATE TABLE IF NOT EXISTS indexes ( database_name TEXT, table_name TEXT, @@ -65,14 +43,7 @@ async def init_internal_db(db): PRIMARY KEY (database_name, table_name, name), FOREIGN KEY (database_name) REFERENCES databases(database_name), FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name) - ) - """ - ), - block=True, - ) - await db.execute_write( - textwrap.dedent( - """ + ); CREATE TABLE IF NOT EXISTS foreign_keys ( database_name TEXT, table_name TEXT, @@ -87,11 +58,10 @@ async def init_internal_db(db): PRIMARY KEY (database_name, table_name, id, seq), FOREIGN KEY (database_name) REFERENCES databases(database_name), FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name) - ) + ); """ - ), - block=True, - ) + ).strip() + await db.execute_write(create_tables_sql, block=True, executescript=True) async def populate_schema_tables(internal_db, db): diff --git a/tests/test_api.py b/tests/test_api.py index 9ad7d569..29c92920 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -950,7 +950,9 @@ def test_trace(trace_debug): "select ", ) for prefix in expected: - assert any(sql.startswith(prefix) for sql in sqls) + assert any( + sql.startswith(prefix) for sql in sqls + ), "No trace beginning with: {}".format(prefix) @pytest.mark.parametrize( diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 0a5c01a3..aa5676e7 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -401,7 +401,7 @@ async def test_execute_write_executescript(db): await db.execute_write( "create table foo (id integer primary key); create table bar (id integer primary key); ", executescript=True, - block=True + block=True, ) table_names = await db.table_names() assert {"foo", "bar"}.issubset(table_names) @@ -413,7 +413,7 @@ async def test_execute_write_executescript_not_allowed_with_params(db): await db.execute_write( "update roadside_attractions set name = ? where pk = ?", ["Mystery!", 1], - executescript=True + executescript=True, ) From 5cadc244895fc47e0534c6e90df976d34293921e Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 18 Dec 2021 10:57:22 -0800 Subject: [PATCH 0071/1103] db.execute_write_script() and db.execute_write_many(), closes #1570 Refs #1555 --- datasette/database.py | 29 ++++++++++++++++++++--------- datasette/utils/internal_db.py | 2 +- docs/internals.rst | 26 +++++++++++++++++++++++--- tests/test_internals_database.py | 24 ++++++++++++++---------- 4 files changed, 58 insertions(+), 23 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index f8365f5c..1de1d5ec 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -94,22 +94,33 @@ class Database: f"file:{self.path}{qs}", uri=True, check_same_thread=False ) - async def execute_write(self, sql, params=None, executescript=False, block=False): - assert not ( - executescript and params - ), "Cannot use params with executescript=True" - + async def execute_write(self, sql, params=None, block=False): def _inner(conn): with conn: - if executescript: - return conn.executescript(sql) - else: - return conn.execute(sql, params or []) + return conn.execute(sql, params or []) with trace("sql", database=self.name, sql=sql.strip(), params=params): results = await self.execute_write_fn(_inner, block=block) return results + async def execute_write_script(self, sql, block=False): + def _inner(conn): + with conn: + return conn.executescript(sql) + + with trace("sql", database=self.name, sql=sql.strip(), executescript=True): + results = await self.execute_write_fn(_inner, block=block) + return results + + async def execute_write_many(self, sql, params_seq, block=False): + def _inner(conn): + with conn: + return conn.executemany(sql, params_seq) + + with trace("sql", database=self.name, sql=sql.strip(), executemany=True): + results = await self.execute_write_fn(_inner, block=block) + return results + async def execute_write_fn(self, fn, block=False): task_id = uuid.uuid5(uuid.NAMESPACE_DNS, "datasette.io") if self._write_queue is None: diff --git a/datasette/utils/internal_db.py b/datasette/utils/internal_db.py index 80babff8..8a145767 100644 --- a/datasette/utils/internal_db.py +++ b/datasette/utils/internal_db.py @@ -61,7 +61,7 @@ async def init_internal_db(db): ); """ ).strip() - await db.execute_write(create_tables_sql, block=True, executescript=True) + await db.execute_write_script(create_tables_sql, block=True) async def populate_schema_tables(internal_db, db): diff --git a/docs/internals.rst b/docs/internals.rst index d40e679b..bc0174a8 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -663,8 +663,8 @@ Example usage: .. _database_execute_write: -await db.execute_write(sql, params=None, executescript=False, block=False) --------------------------------------------------------------------------- +await db.execute_write(sql, params=None, block=False) +----------------------------------------------------- SQLite only allows one database connection to write at a time. Datasette handles this for you by maintaining a queue of writes to be executed against a given database. Plugins can submit write operations to this queue and they will be executed in the order in which they are received. @@ -676,7 +676,27 @@ By default queries are considered to be "fire and forget" - they will be added t If you pass ``block=True`` this behaviour changes: the method will block until the write operation has completed, and the return value will be the return from calling ``conn.execute(...)`` using the underlying ``sqlite3`` Python library. -If you pass ``executescript=True`` your SQL will be executed using the ``sqlite3`` `conn.executescript() <https://docs.python.org/3/library/sqlite3.html#sqlite3.Cursor.executescript>`__ method. This allows multiple SQL statements to be separated by semicolons, but cannot be used with the ``params=`` option. +.. _database_execute_write_script: + +await db.execute_write_script(sql, block=False) +----------------------------------------------- + +Like ``execute_write()`` but can be used to send multiple SQL statements in a single string separated by semicolons, using the ``sqlite3`` `conn.executescript() <https://docs.python.org/3/library/sqlite3.html#sqlite3.Cursor.executescript>`__ method. + +.. _database_execute_write_many: + +await db.execute_write_many(sql, params_seq, block=False) +--------------------------------------------------------- + +Like ``execute_write()`` but uses the ``sqlite3`` `conn.executemany() <https://docs.python.org/3/library/sqlite3.html#sqlite3.Cursor.executemany>`__ method. This will efficiently execute the same SQL statement against each of the parameters in the ``params_seq`` iterator, for example: + +.. code-block:: python + + await db.execute_write_many( + "insert into characters (id, name) values (?, ?)", + [(1, "Melanie"), (2, "Selma"), (2, "Viktor")], + block=True, + ) .. _database_execute_write_fn: diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index aa5676e7..f751bf9a 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -397,10 +397,9 @@ async def test_execute_write_block_false(db): @pytest.mark.asyncio -async def test_execute_write_executescript(db): - await db.execute_write( +async def test_execute_write_script(db): + await db.execute_write_script( "create table foo (id integer primary key); create table bar (id integer primary key); ", - executescript=True, block=True, ) table_names = await db.table_names() @@ -408,13 +407,18 @@ async def test_execute_write_executescript(db): @pytest.mark.asyncio -async def test_execute_write_executescript_not_allowed_with_params(db): - with pytest.raises(AssertionError): - await db.execute_write( - "update roadside_attractions set name = ? where pk = ?", - ["Mystery!", 1], - executescript=True, - ) +async def test_execute_write_many(db): + await db.execute_write_script( + "create table foomany (id integer primary key)", + block=True, + ) + await db.execute_write_many( + "insert into foomany (id) values (?)", + [(1,), (10,), (100,)], + block=True, + ) + result = await db.execute("select * from foomany") + assert [r[0] for r in result.rows] == [1, 10, 100] @pytest.mark.asyncio From d637ed46762fdbbd8e32b86f258cd9a53c1cfdc7 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 18 Dec 2021 11:11:08 -0800 Subject: [PATCH 0072/1103] Use execute_write_many to optimize internal DB, refs #1555, #1570 --- datasette/utils/internal_db.py | 142 +++++++++++++++++++-------------- tests/test_api.py | 2 +- 2 files changed, 81 insertions(+), 63 deletions(-) diff --git a/datasette/utils/internal_db.py b/datasette/utils/internal_db.py index 8a145767..95055d8b 100644 --- a/datasette/utils/internal_db.py +++ b/datasette/utils/internal_db.py @@ -70,84 +70,102 @@ async def populate_schema_tables(internal_db, db): "DELETE FROM tables WHERE database_name = ?", [database_name], block=True ) tables = (await db.execute("select * from sqlite_master WHERE type = 'table'")).rows + tables_to_insert = [] + columns_to_delete = [] + columns_to_insert = [] + foreign_keys_to_delete = [] + foreign_keys_to_insert = [] + indexes_to_delete = [] + indexes_to_insert = [] + for table in tables: table_name = table["name"] - await internal_db.execute_write( - """ - INSERT INTO tables (database_name, table_name, rootpage, sql) - values (?, ?, ?, ?) - """, - [database_name, table_name, table["rootpage"], table["sql"]], - block=True, - ) - # And the columns - await internal_db.execute_write( - "DELETE FROM columns WHERE database_name = ? and table_name = ?", - [database_name, table_name], - block=True, + tables_to_insert.append( + (database_name, table_name, table["rootpage"], table["sql"]) ) + columns_to_delete.append((database_name, table_name)) columns = await db.table_column_details(table_name) - for column in columns: - params = { + columns_to_insert.extend( + { **{"database_name": database_name, "table_name": table_name}, **column._asdict(), } - await internal_db.execute_write( - """ - INSERT INTO columns ( - database_name, table_name, cid, name, type, "notnull", default_value, is_pk, hidden - ) VALUES ( - :database_name, :table_name, :cid, :name, :type, :notnull, :default_value, :is_pk, :hidden - ) - """, - params, - block=True, - ) - # And the foreign_keys - await internal_db.execute_write( - "DELETE FROM foreign_keys WHERE database_name = ? and table_name = ?", - [database_name, table_name], - block=True, + for column in columns ) + foreign_keys_to_delete.append((database_name, table_name)) foreign_keys = ( await db.execute(f"PRAGMA foreign_key_list([{table_name}])") ).rows - for foreign_key in foreign_keys: - params = { + foreign_keys_to_insert.extend( + { **{"database_name": database_name, "table_name": table_name}, **dict(foreign_key), } - await internal_db.execute_write( - """ - INSERT INTO foreign_keys ( - database_name, table_name, "id", seq, "table", "from", "to", on_update, on_delete, match - ) VALUES ( - :database_name, :table_name, :id, :seq, :table, :from, :to, :on_update, :on_delete, :match - ) - """, - params, - block=True, - ) - # And the indexes - await internal_db.execute_write( - "DELETE FROM indexes WHERE database_name = ? and table_name = ?", - [database_name, table_name], - block=True, + for foreign_key in foreign_keys ) + indexes_to_delete.append((database_name, table_name)) indexes = (await db.execute(f"PRAGMA index_list([{table_name}])")).rows - for index in indexes: - params = { + indexes_to_insert.extend( + { **{"database_name": database_name, "table_name": table_name}, **dict(index), } - await internal_db.execute_write( - """ - INSERT INTO indexes ( - database_name, table_name, seq, name, "unique", origin, partial - ) VALUES ( - :database_name, :table_name, :seq, :name, :unique, :origin, :partial - ) - """, - params, - block=True, - ) + for index in indexes + ) + + await internal_db.execute_write_many( + """ + INSERT INTO tables (database_name, table_name, rootpage, sql) + values (?, ?, ?, ?) + """, + tables_to_insert, + block=True, + ) + await internal_db.execute_write_many( + "DELETE FROM columns WHERE database_name = ? and table_name = ?", + columns_to_delete, + block=True, + ) + await internal_db.execute_write_many( + """ + INSERT INTO columns ( + database_name, table_name, cid, name, type, "notnull", default_value, is_pk, hidden + ) VALUES ( + :database_name, :table_name, :cid, :name, :type, :notnull, :default_value, :is_pk, :hidden + ) + """, + columns_to_insert, + block=True, + ) + await internal_db.execute_write_many( + "DELETE FROM foreign_keys WHERE database_name = ? and table_name = ?", + foreign_keys_to_delete, + block=True, + ) + await internal_db.execute_write_many( + """ + INSERT INTO foreign_keys ( + database_name, table_name, "id", seq, "table", "from", "to", on_update, on_delete, match + ) VALUES ( + :database_name, :table_name, :id, :seq, :table, :from, :to, :on_update, :on_delete, :match + ) + """, + foreign_keys_to_insert, + block=True, + ) + await internal_db.execute_write_many( + "DELETE FROM indexes WHERE database_name = ? and table_name = ?", + indexes_to_delete, + block=True, + ) + await internal_db.execute_write_many( + """ + INSERT INTO indexes ( + database_name, table_name, seq, name, "unique", origin, partial + ) VALUES ( + :database_name, :table_name, :seq, :name, :unique, :origin, :partial + ) + """, + indexes_to_insert, + block=True, + ) diff --git a/tests/test_api.py b/tests/test_api.py index 29c92920..f198c1f9 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -937,7 +937,7 @@ def test_trace(trace_debug): assert isinstance(trace["traceback"], list) assert isinstance(trace["database"], str) assert isinstance(trace["sql"], str) - assert isinstance(trace["params"], (list, dict, None.__class__)) + assert isinstance(trace.get("params"), (list, dict, None.__class__)) sqls = [trace["sql"] for trace in trace_info["traces"] if "sql" in trace] # There should be a mix of different types of SQL statement From 97b1723dd09cf000485d4e050efc5bb4f5184a06 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 18 Dec 2021 19:49:11 -0800 Subject: [PATCH 0073/1103] Optimize init_internal_db by running PRAGMA in a single function Refs #1555 --- datasette/utils/internal_db.py | 102 ++++++++++++++++++++------------- 1 file changed, 62 insertions(+), 40 deletions(-) diff --git a/datasette/utils/internal_db.py b/datasette/utils/internal_db.py index 95055d8b..58f99825 100644 --- a/datasette/utils/internal_db.py +++ b/datasette/utils/internal_db.py @@ -1,4 +1,5 @@ import textwrap +from datasette.utils import table_column_details async def init_internal_db(db): @@ -70,49 +71,70 @@ async def populate_schema_tables(internal_db, db): "DELETE FROM tables WHERE database_name = ?", [database_name], block=True ) tables = (await db.execute("select * from sqlite_master WHERE type = 'table'")).rows - tables_to_insert = [] - columns_to_delete = [] - columns_to_insert = [] - foreign_keys_to_delete = [] - foreign_keys_to_insert = [] - indexes_to_delete = [] - indexes_to_insert = [] - for table in tables: - table_name = table["name"] - tables_to_insert.append( - (database_name, table_name, table["rootpage"], table["sql"]) - ) - columns_to_delete.append((database_name, table_name)) - columns = await db.table_column_details(table_name) - columns_to_insert.extend( - { - **{"database_name": database_name, "table_name": table_name}, - **column._asdict(), - } - for column in columns - ) - foreign_keys_to_delete.append((database_name, table_name)) - foreign_keys = ( - await db.execute(f"PRAGMA foreign_key_list([{table_name}])") - ).rows - foreign_keys_to_insert.extend( - { - **{"database_name": database_name, "table_name": table_name}, - **dict(foreign_key), - } - for foreign_key in foreign_keys - ) - indexes_to_delete.append((database_name, table_name)) - indexes = (await db.execute(f"PRAGMA index_list([{table_name}])")).rows - indexes_to_insert.extend( - { - **{"database_name": database_name, "table_name": table_name}, - **dict(index), - } - for index in indexes + def collect_info(conn): + tables_to_insert = [] + columns_to_delete = [] + columns_to_insert = [] + foreign_keys_to_delete = [] + foreign_keys_to_insert = [] + indexes_to_delete = [] + indexes_to_insert = [] + + for table in tables: + table_name = table["name"] + tables_to_insert.append( + (database_name, table_name, table["rootpage"], table["sql"]) + ) + columns_to_delete.append((database_name, table_name)) + columns = table_column_details(conn, table_name) + columns_to_insert.extend( + { + **{"database_name": database_name, "table_name": table_name}, + **column._asdict(), + } + for column in columns + ) + foreign_keys_to_delete.append((database_name, table_name)) + foreign_keys = conn.execute( + f"PRAGMA foreign_key_list([{table_name}])" + ).fetchall() + foreign_keys_to_insert.extend( + { + **{"database_name": database_name, "table_name": table_name}, + **dict(foreign_key), + } + for foreign_key in foreign_keys + ) + indexes_to_delete.append((database_name, table_name)) + indexes = conn.execute(f"PRAGMA index_list([{table_name}])").fetchall() + indexes_to_insert.extend( + { + **{"database_name": database_name, "table_name": table_name}, + **dict(index), + } + for index in indexes + ) + return ( + tables_to_insert, + columns_to_delete, + columns_to_insert, + foreign_keys_to_delete, + foreign_keys_to_insert, + indexes_to_delete, + indexes_to_insert, ) + ( + tables_to_insert, + columns_to_delete, + columns_to_insert, + foreign_keys_to_delete, + foreign_keys_to_insert, + indexes_to_delete, + indexes_to_insert, + ) = await db.execute_fn(collect_info) + await internal_db.execute_write_many( """ INSERT INTO tables (database_name, table_name, rootpage, sql) From c6ff1f23e6a0b26dde8f5b30be3b868b031b6ecf Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 18 Dec 2021 20:03:21 -0800 Subject: [PATCH 0074/1103] Queries took rather than query took, closes #1572 --- datasette/templates/_footer.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/templates/_footer.html b/datasette/templates/_footer.html index b1380ae9..074270f1 100644 --- a/datasette/templates/_footer.html +++ b/datasette/templates/_footer.html @@ -1,5 +1,5 @@ Powered by <a href="https://datasette.io/" title="Datasette v{{ datasette_version }}">Datasette</a> -{% if query_ms %}· Query took {{ query_ms|round(3) }}ms{% endif %} +{% if query_ms %}· Queries took {{ query_ms|round(3) }}ms{% endif %} {% if metadata %} {% if metadata.license or metadata.license_url %}· Data license: {% if metadata.license_url %} From f65817000fdf87ce8a0c23edc40784ebe33b5842 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 19 Dec 2021 12:30:34 -0800 Subject: [PATCH 0075/1103] Include count in execute_write_many traces, closes #1571 --- datasette/database.py | 19 +++++++++++++++---- datasette/tracer.py | 6 +++--- tests/test_api.py | 14 +++++++++++--- 3 files changed, 29 insertions(+), 10 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 1de1d5ec..0e41ff32 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -114,11 +114,22 @@ class Database: async def execute_write_many(self, sql, params_seq, block=False): def _inner(conn): - with conn: - return conn.executemany(sql, params_seq) + count = 0 - with trace("sql", database=self.name, sql=sql.strip(), executemany=True): - results = await self.execute_write_fn(_inner, block=block) + def count_params(params): + nonlocal count + for param in params: + count += 1 + yield param + + with conn: + return conn.executemany(sql, count_params(params_seq)), count + + with trace( + "sql", database=self.name, sql=sql.strip(), executemany=True + ) as kwargs: + results, count = await self.execute_write_fn(_inner, block=block) + kwargs["count"] = count return results async def execute_write_fn(self, fn, block=False): diff --git a/datasette/tracer.py b/datasette/tracer.py index 62c3c90c..6703f060 100644 --- a/datasette/tracer.py +++ b/datasette/tracer.py @@ -32,14 +32,14 @@ def trace(type, **kwargs): ), f".trace() keyword parameters cannot include {TRACE_RESERVED_KEYS}" task_id = get_task_id() if task_id is None: - yield + yield kwargs return tracer = tracers.get(task_id) if tracer is None: - yield + yield kwargs return start = time.perf_counter() - yield + yield kwargs end = time.perf_counter() trace_info = { "type": type, diff --git a/tests/test_api.py b/tests/test_api.py index f198c1f9..8ecaef43 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -928,8 +928,9 @@ def test_trace(trace_debug): assert isinstance(trace_info["sum_trace_duration_ms"], float) assert isinstance(trace_info["num_traces"], int) assert isinstance(trace_info["traces"], list) - assert len(trace_info["traces"]) == trace_info["num_traces"] - for trace in trace_info["traces"]: + traces = trace_info["traces"] + assert len(traces) == trace_info["num_traces"] + for trace in traces: assert isinstance(trace["type"], str) assert isinstance(trace["start"], float) assert isinstance(trace["end"], float) @@ -939,7 +940,7 @@ def test_trace(trace_debug): assert isinstance(trace["sql"], str) assert isinstance(trace.get("params"), (list, dict, None.__class__)) - sqls = [trace["sql"] for trace in trace_info["traces"] if "sql" in trace] + sqls = [trace["sql"] for trace in traces if "sql" in trace] # There should be a mix of different types of SQL statement expected = ( "CREATE TABLE ", @@ -954,6 +955,13 @@ def test_trace(trace_debug): sql.startswith(prefix) for sql in sqls ), "No trace beginning with: {}".format(prefix) + # Should be at least one executescript + assert any(trace for trace in traces if trace.get("executescript")) + # And at least one executemany + execute_manys = [trace for trace in traces if trace.get("executemany")] + assert execute_manys + assert all(isinstance(trace["count"], int) for trace in execute_manys) + @pytest.mark.parametrize( "path,status_code", From 5fac26aa221a111d7633f2dd92014641f7c0ade9 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 19 Dec 2021 12:54:12 -0800 Subject: [PATCH 0076/1103] Another populate_schema_tables optimization, refs #1555 --- datasette/utils/internal_db.py | 41 +++++++++------------------------- tests/test_api.py | 1 - 2 files changed, 11 insertions(+), 31 deletions(-) diff --git a/datasette/utils/internal_db.py b/datasette/utils/internal_db.py index 58f99825..ed589a7a 100644 --- a/datasette/utils/internal_db.py +++ b/datasette/utils/internal_db.py @@ -67,18 +67,23 @@ async def init_internal_db(db): async def populate_schema_tables(internal_db, db): database_name = db.name - await internal_db.execute_write( - "DELETE FROM tables WHERE database_name = ?", [database_name], block=True - ) + + def delete_everything(conn): + conn.execute("DELETE FROM tables WHERE database_name = ?", [database_name]) + conn.execute("DELETE FROM columns WHERE database_name = ?", [database_name]) + conn.execute( + "DELETE FROM foreign_keys WHERE database_name = ?", [database_name] + ) + conn.execute("DELETE FROM indexes WHERE database_name = ?", [database_name]) + + await internal_db.execute_write_fn(delete_everything, block=True) + tables = (await db.execute("select * from sqlite_master WHERE type = 'table'")).rows def collect_info(conn): tables_to_insert = [] - columns_to_delete = [] columns_to_insert = [] - foreign_keys_to_delete = [] foreign_keys_to_insert = [] - indexes_to_delete = [] indexes_to_insert = [] for table in tables: @@ -86,7 +91,6 @@ async def populate_schema_tables(internal_db, db): tables_to_insert.append( (database_name, table_name, table["rootpage"], table["sql"]) ) - columns_to_delete.append((database_name, table_name)) columns = table_column_details(conn, table_name) columns_to_insert.extend( { @@ -95,7 +99,6 @@ async def populate_schema_tables(internal_db, db): } for column in columns ) - foreign_keys_to_delete.append((database_name, table_name)) foreign_keys = conn.execute( f"PRAGMA foreign_key_list([{table_name}])" ).fetchall() @@ -106,7 +109,6 @@ async def populate_schema_tables(internal_db, db): } for foreign_key in foreign_keys ) - indexes_to_delete.append((database_name, table_name)) indexes = conn.execute(f"PRAGMA index_list([{table_name}])").fetchall() indexes_to_insert.extend( { @@ -117,21 +119,15 @@ async def populate_schema_tables(internal_db, db): ) return ( tables_to_insert, - columns_to_delete, columns_to_insert, - foreign_keys_to_delete, foreign_keys_to_insert, - indexes_to_delete, indexes_to_insert, ) ( tables_to_insert, - columns_to_delete, columns_to_insert, - foreign_keys_to_delete, foreign_keys_to_insert, - indexes_to_delete, indexes_to_insert, ) = await db.execute_fn(collect_info) @@ -143,11 +139,6 @@ async def populate_schema_tables(internal_db, db): tables_to_insert, block=True, ) - await internal_db.execute_write_many( - "DELETE FROM columns WHERE database_name = ? and table_name = ?", - columns_to_delete, - block=True, - ) await internal_db.execute_write_many( """ INSERT INTO columns ( @@ -159,11 +150,6 @@ async def populate_schema_tables(internal_db, db): columns_to_insert, block=True, ) - await internal_db.execute_write_many( - "DELETE FROM foreign_keys WHERE database_name = ? and table_name = ?", - foreign_keys_to_delete, - block=True, - ) await internal_db.execute_write_many( """ INSERT INTO foreign_keys ( @@ -175,11 +161,6 @@ async def populate_schema_tables(internal_db, db): foreign_keys_to_insert, block=True, ) - await internal_db.execute_write_many( - "DELETE FROM indexes WHERE database_name = ? and table_name = ?", - indexes_to_delete, - block=True, - ) await internal_db.execute_write_many( """ INSERT INTO indexes ( diff --git a/tests/test_api.py b/tests/test_api.py index 8ecaef43..574ebb41 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -946,7 +946,6 @@ def test_trace(trace_debug): "CREATE TABLE ", "PRAGMA ", "INSERT OR REPLACE INTO ", - "DELETE FROM ", "INSERT INTO", "select ", ) From 4094741c2881c2ada3f3f878b532fdaec7914953 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 19 Dec 2021 13:11:57 -0800 Subject: [PATCH 0077/1103] Fixed bug with custom templates for writable canned queries, closes #1547 --- datasette/views/database.py | 11 +++++------ tests/test_canned_queries.py | 22 +++++++++++++++++++++- 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index f1901b34..aa8d27ec 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -246,6 +246,11 @@ class QueryView(DataView): extra_args["page_size"] = _size templates = [f"query-{to_css_class(database)}.html", "query.html"] + if canned_query: + templates.insert( + 0, + f"query-{to_css_class(database)}-{to_css_class(canned_query)}.html", + ) query_error = None @@ -340,12 +345,6 @@ class QueryView(DataView): results = None columns = [] - if canned_query: - templates.insert( - 0, - f"query-{to_css_class(database)}-{to_css_class(canned_query)}.html", - ) - allow_execute_sql = await self.ds.permission_allowed( request.actor, "execute-sql", database, default=True ) diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index cea81ec7..c5ccaf5c 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -6,9 +6,19 @@ from .fixtures import make_app_client, app_client @pytest.fixture -def canned_write_client(): +def canned_write_client(tmpdir): + template_dir = tmpdir / "canned_write_templates" + template_dir.mkdir() + (template_dir / "query-data-update_name.html").write_text( + """ + {% extends "query.html" %} + {% block content %}!!!CUSTOM_UPDATE_NAME_TEMPLATE!!!{{ super() }}{% endblock %} + """, + "utf-8", + ) with make_app_client( extra_databases={"data.db": "create table names (name text)"}, + template_dir=str(template_dir), metadata={ "databases": { "data": { @@ -344,3 +354,13 @@ def test_magic_parameters_cannot_be_used_in_arbitrary_queries(magic_parameters_c ) assert 400 == response.status assert response.json["error"].startswith("You did not supply a value for binding") + + +def test_canned_write_custom_template(canned_write_client): + response = canned_write_client.get("/data/update_name") + assert response.status == 200 + assert ( + "<!-- Templates considered: *query-data-update_name.html, query-data.html, query.html -->" + in response.text + ) + assert "!!!CUSTOM_UPDATE_NAME_TEMPLATE!!!" in response.text From dbaac79946034e0b00714e2da39f934d693883d2 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 19 Dec 2021 14:08:10 -0800 Subject: [PATCH 0078/1103] Release 0.60a1 Refs #1547, #1555, #1562, #1563, #1564, #1567, #1568, #1569, #1570, #1571, #1572 --- datasette/version.py | 2 +- docs/changelog.rst | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 2fce006c..290fbcf3 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.60a0" +__version__ = "0.60a1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 92a9d941..99d3315e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,18 @@ Changelog ========= +.. _v0_60a1: + +0.60a1 (2021-12-19) +------------------- + +- Database write connections now execute the :ref:`plugin_hook_prepare_connection` plugin hook. (:issue:`1564`) +- The ``Datasette()`` constructor no longer requires the ``files=`` argument, and is now documented at :ref:`internals_datasette`. (:issue:`1563`) +- The tracing feature now traces write queries, not just read queries. (:issue:`1568`) +- Added two methods for writing to the database: :ref:`database_execute_write_script` and :ref:`database_execute_write_many`. (:issue:`1570`) +- Made several performance improvements to the database schema introspection code that runs when Datasette first starts up. (:issue:`1555`) +- Fixed bug where writable canned queries could not be used with custom templates. (:issue:`1547`) + .. _v0_60a0: 0.60a0 (2021-12-17) From f36e010b3b69ada104b79d83c7685caf9359049e Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 19 Dec 2021 17:25:40 -0800 Subject: [PATCH 0079/1103] Upgrade to Pluggy>=1.0, refs #1575 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index f8cd3e5b..1f8855cf 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,7 @@ setup( "hupper~=1.9", "httpx>=0.20", "pint~=0.9", - "pluggy>=0.13,<1.1", + "pluggy>=1.0,<1.1", "uvicorn~=0.11", "aiofiles>=0.4,<0.9", "janus>=0.6.2,<1.1", From 554aae5c51b7ce1b570e0e9bbe45f4cf9200f2bf Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 20 Dec 2021 09:23:05 -0800 Subject: [PATCH 0080/1103] Plausible analytics for the documentation --- docs/_templates/layout.html | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html index b7b6f794..e44f3b56 100644 --- a/docs/_templates/layout.html +++ b/docs/_templates/layout.html @@ -1,5 +1,10 @@ {%- extends "!layout.html" %} +{% block htmltitle %} +{{ super() }} +<script defer data-domain="datasette.io" src="https://plausible.io/js/plausible.js"></script> +{% endblock %} + {% block sidebartitle %} <a href="https://datasette.io/"> From 6b1384b2f529134998fb507e63307609a5b7f5c0 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 20 Dec 2021 15:55:17 -0800 Subject: [PATCH 0081/1103] Track plausible for docs.datasette.io not datasette.io --- docs/_templates/layout.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html index e44f3b56..db16b428 100644 --- a/docs/_templates/layout.html +++ b/docs/_templates/layout.html @@ -2,7 +2,7 @@ {% block htmltitle %} {{ super() }} -<script defer data-domain="datasette.io" src="https://plausible.io/js/plausible.js"></script> +<script defer data-domain="docs.datasette.io" src="https://plausible.io/js/plausible.js"></script> {% endblock %} {% block sidebartitle %} From ace86566b28280091b3844cf5fbecd20158e9004 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 22 Dec 2021 12:22:44 -0800 Subject: [PATCH 0082/1103] Remove concept of special_args, re-arrange TableView a bit, refs #1518 --- datasette/views/table.py | 79 +++++++++++++++++++--------------------- 1 file changed, 38 insertions(+), 41 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index c3bcf01d..9808fd24 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -366,6 +366,30 @@ class TableView(RowTableShared): None, "view-table", (database, table), default=True ) + # Handle ?_filter_column and redirect, if present + redirect_params = filters_should_redirect(request.args) + if redirect_params: + return self.redirect( + request, + path_with_added_args(request, redirect_params), + forward_querystring=False, + ) + + # If ?_sort_by_desc=on (from checkbox) redirect to _sort_desc=(_sort) + if "_sort_by_desc" in request.args: + return self.redirect( + request, + path_with_added_args( + request, + { + "_sort_desc": request.args.get("_sort"), + "_sort_by_desc": None, + "_sort": None, + }, + ), + forward_querystring=False, + ) + # Introspect columns and primary keys for table pks = await db.primary_keys(table) table_columns = await db.table_columns(table) @@ -399,47 +423,20 @@ class TableView(RowTableShared): nocount = True nofacet = True - # Special args start with _ and do not contain a __ - # That's so if there is a column that starts with _ - # it can still be queried using ?_col__exact=blah - special_args = {} - other_args = [] - for key in request.args: - if key.startswith("_") and "__" not in key: - special_args[key] = request.args[key] - else: - for v in request.args.getlist(key): - other_args.append((key, v)) - - # Handle ?_filter_column and redirect, if present - redirect_params = filters_should_redirect(special_args) - if redirect_params: - return self.redirect( - request, - path_with_added_args(request, redirect_params), - forward_querystring=False, - ) - - # If ?_sort_by_desc=on (from checkbox) redirect to _sort_desc=(_sort) - if "_sort_by_desc" in special_args: - return self.redirect( - request, - path_with_added_args( - request, - { - "_sort_desc": special_args.get("_sort"), - "_sort_by_desc": None, - "_sort": None, - }, - ), - forward_querystring=False, - ) - table_metadata = self.ds.table_metadata(database, table) units = table_metadata.get("units", {}) + # Arguments that start with _ and don't contain a __ are + # special - things like ?_search= - and should not be + # treated as filters. + filter_args = [] + for key in request.args: + if not (key.startswith("_") and "__" not in key): + for v in request.args.getlist(key): + filter_args.append((key, v)) + # Build where clauses from query string arguments - filters = Filters(sorted(other_args), units, ureg) + filters = Filters(sorted(filter_args), units, ureg) where_clauses, params = filters.build_where_clauses(table) # Execute filters_from_request plugin hooks - including the default @@ -464,8 +461,8 @@ class TableView(RowTableShared): sortable_columns = await self.sortable_columns_for_table( database, table, use_rowid ) - sort = special_args.get("_sort") - sort_desc = special_args.get("_sort_desc") + sort = request.args.get("_sort") + sort_desc = request.args.get("_sort_desc") if not sort and not sort_desc: sort = table_metadata.get("sort") @@ -498,7 +495,7 @@ class TableView(RowTableShared): count_sql = f"select count(*) {from_sql}" # Handle pagination driven by ?_next= - _next = _next or special_args.get("_next") + _next = _next or request.args.get("_next") offset = "" if _next: sort_value = None @@ -708,7 +705,7 @@ class TableView(RowTableShared): expandable_columns = await self.expandable_columns(database, table) columns_to_expand = None try: - all_labels = value_as_boolean(special_args.get("_labels", "")) + all_labels = value_as_boolean(request.args.get("_labels", "")) except ValueError: all_labels = default_labels # Check for explicit _label= From 00a2895cd2dc42c63846216b36b2dc9f41170129 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 23 Dec 2021 11:03:49 -0800 Subject: [PATCH 0083/1103] execute_write defaut is now block=True, closes #1579 --- datasette/database.py | 8 ++++---- docs/internals.rst | 14 +++++++------- tests/test_internals_database.py | 2 +- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 0e41ff32..e908d1ea 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -94,7 +94,7 @@ class Database: f"file:{self.path}{qs}", uri=True, check_same_thread=False ) - async def execute_write(self, sql, params=None, block=False): + async def execute_write(self, sql, params=None, block=True): def _inner(conn): with conn: return conn.execute(sql, params or []) @@ -103,7 +103,7 @@ class Database: results = await self.execute_write_fn(_inner, block=block) return results - async def execute_write_script(self, sql, block=False): + async def execute_write_script(self, sql, block=True): def _inner(conn): with conn: return conn.executescript(sql) @@ -112,7 +112,7 @@ class Database: results = await self.execute_write_fn(_inner, block=block) return results - async def execute_write_many(self, sql, params_seq, block=False): + async def execute_write_many(self, sql, params_seq, block=True): def _inner(conn): count = 0 @@ -132,7 +132,7 @@ class Database: kwargs["count"] = count return results - async def execute_write_fn(self, fn, block=False): + async def execute_write_fn(self, fn, block=True): task_id = uuid.uuid5(uuid.NAMESPACE_DNS, "datasette.io") if self._write_queue is None: self._write_queue = queue.Queue() diff --git a/docs/internals.rst b/docs/internals.rst index bc0174a8..667ac33a 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -663,7 +663,7 @@ Example usage: .. _database_execute_write: -await db.execute_write(sql, params=None, block=False) +await db.execute_write(sql, params=None, block=True) ----------------------------------------------------- SQLite only allows one database connection to write at a time. Datasette handles this for you by maintaining a queue of writes to be executed against a given database. Plugins can submit write operations to this queue and they will be executed in the order in which they are received. @@ -672,20 +672,20 @@ This method can be used to queue up a non-SELECT SQL query to be executed agains You can pass additional SQL parameters as a tuple or dictionary. -By default queries are considered to be "fire and forget" - they will be added to the queue and executed in a separate thread while your code can continue to do other things. The method will return a UUID representing the queued task. +The method will block until the operation is completed, and the return value will be the return from calling ``conn.execute(...)`` using the underlying ``sqlite3`` Python library. -If you pass ``block=True`` this behaviour changes: the method will block until the write operation has completed, and the return value will be the return from calling ``conn.execute(...)`` using the underlying ``sqlite3`` Python library. +If you pass ``block=False`` this behaviour changes to "fire and forget" - queries will be added to the write queue and executed in a separate thread while your code can continue to do other things. The method will return a UUID representing the queued task. .. _database_execute_write_script: -await db.execute_write_script(sql, block=False) +await db.execute_write_script(sql, block=True) ----------------------------------------------- Like ``execute_write()`` but can be used to send multiple SQL statements in a single string separated by semicolons, using the ``sqlite3`` `conn.executescript() <https://docs.python.org/3/library/sqlite3.html#sqlite3.Cursor.executescript>`__ method. .. _database_execute_write_many: -await db.execute_write_many(sql, params_seq, block=False) +await db.execute_write_many(sql, params_seq, block=True) --------------------------------------------------------- Like ``execute_write()`` but uses the ``sqlite3`` `conn.executemany() <https://docs.python.org/3/library/sqlite3.html#sqlite3.Cursor.executemany>`__ method. This will efficiently execute the same SQL statement against each of the parameters in the ``params_seq`` iterator, for example: @@ -700,7 +700,7 @@ Like ``execute_write()`` but uses the ``sqlite3`` `conn.executemany() <https://d .. _database_execute_write_fn: -await db.execute_write_fn(fn, block=False) +await db.execute_write_fn(fn, block=True) ------------------------------------------ This method works like ``.execute_write()``, but instead of a SQL statement you give it a callable Python function. This function will be queued up and then called when the write connection is available, passing that connection as the argument to the function. @@ -725,7 +725,7 @@ This method is fire-and-forget, queueing your function to be executed and then a If you pass ``block=True`` your calling code will block until the function has been executed. The return value to the ``await`` will be the return value of your function. -If your function raises an exception and you specified ``block=True``, that exception will be propagated up to the ``await`` line. With ``block=False`` any exceptions will be silently ignored. +If your function raises an exception and you specified ``block=True``, that exception will be propagated up to the ``await`` line. With ``block=True`` any exceptions will be silently ignored. Here's an example of ``block=True`` in action: diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index f751bf9a..80f47ab9 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -435,7 +435,7 @@ async def test_execute_write_fn_block_false(db): row = conn.execute("select count(*) from roadside_attractions").fetchone() return row[0] - task_id = await db.execute_write_fn(write_fn) + task_id = await db.execute_write_fn(write_fn, block=False) assert isinstance(task_id, uuid.UUID) From 75153ea9b94d09ec3d61f7c6ebdf378e0c0c7a0b Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 23 Dec 2021 11:16:31 -0800 Subject: [PATCH 0084/1103] Updated db.execute_write_fn() docs for block=True default, refs #1579 --- docs/internals.rst | 35 ++++++++++++----------------------- 1 file changed, 12 insertions(+), 23 deletions(-) diff --git a/docs/internals.rst b/docs/internals.rst index 667ac33a..6a5666fd 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -331,7 +331,7 @@ This will add a mutable database and serve it at ``/my-new-database``. .. code-block:: python db = datasette.add_database(Database(datasette, memory_name="statistics")) - await db.execute_write("CREATE TABLE foo(id integer primary key)", block=True) + await db.execute_write("CREATE TABLE foo(id integer primary key)") .. _datasette_add_memory_database: @@ -694,8 +694,7 @@ Like ``execute_write()`` but uses the ``sqlite3`` `conn.executemany() <https://d await db.execute_write_many( "insert into characters (id, name) values (?, ?)", - [(1, "Melanie"), (2, "Selma"), (2, "Viktor")], - block=True, + [(1, "Melanie"), (2, "Selma"), (2, "Viktor")] ) .. _database_execute_write_fn: @@ -703,9 +702,9 @@ Like ``execute_write()`` but uses the ``sqlite3`` `conn.executemany() <https://d await db.execute_write_fn(fn, block=True) ------------------------------------------ -This method works like ``.execute_write()``, but instead of a SQL statement you give it a callable Python function. This function will be queued up and then called when the write connection is available, passing that connection as the argument to the function. +This method works like ``.execute_write()``, but instead of a SQL statement you give it a callable Python function. Your function will be queued up and then called when the write connection is available, passing that connection as the argument to the function. -The function can then perform multiple actions, safe in the knowledge that it has exclusive access to the single writable connection as long as it is executing. +The function can then perform multiple actions, safe in the knowledge that it has exclusive access to the single writable connection for as long as it is executing. .. warning:: @@ -715,31 +714,21 @@ For example: .. code-block:: python - def my_action(conn): - conn.execute("delete from some_table") - conn.execute("delete from other_table") - - await database.execute_write_fn(my_action) - -This method is fire-and-forget, queueing your function to be executed and then allowing your code after the call to ``.execute_write_fn()`` to continue running while the underlying thread waits for an opportunity to run your function. A UUID representing the queued task will be returned. - -If you pass ``block=True`` your calling code will block until the function has been executed. The return value to the ``await`` will be the return value of your function. - -If your function raises an exception and you specified ``block=True``, that exception will be propagated up to the ``await`` line. With ``block=True`` any exceptions will be silently ignored. - -Here's an example of ``block=True`` in action: - -.. code-block:: python - - def my_action(conn): + def delete_and_return_count(conn): conn.execute("delete from some_table where id > 5") return conn.execute("select count(*) from some_table").fetchone()[0] try: - num_rows_left = await database.execute_write_fn(my_action, block=True) + num_rows_left = await database.execute_write_fn(delete_and_return_count) except Exception as e: print("An error occurred:", e) +The value returned from ``await database.execute_write_fn(...)`` will be the return value from your function. + +If your function raises an exception that exception will be propagated up to the ``await`` line. + +If you specify ``block=False`` the method becomes fire-and-forget, queueing your function to be executed and then allowing your code after the call to ``.execute_write_fn()`` to continue running while the underlying thread waits for an opportunity to run your function. A UUID representing the queued task will be returned. Any exceptions in your code will be silently swallowed. + .. _internals_database_introspection: Database introspection From 8c401ee0f054de2f568c3a8302c9223555146407 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 23 Dec 2021 11:18:20 -0800 Subject: [PATCH 0085/1103] Fixed remaining code and docs for new block=True default, closes #1579 --- datasette/app.py | 1 - datasette/utils/internal_db.py | 8 ++------ datasette/views/database.py | 2 +- docs/plugin_hooks.rst | 2 +- tests/test_facets.py | 17 +++++------------ tests/test_internals_database.py | 28 ++++++++++------------------ 6 files changed, 19 insertions(+), 39 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 17fa06a5..bd663509 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -367,7 +367,6 @@ class Datasette: VALUES (?, ?, ?, ?) """, [database_name, str(db.path), db.is_memory, schema_version], - block=True, ) await populate_schema_tables(internal_db, db) diff --git a/datasette/utils/internal_db.py b/datasette/utils/internal_db.py index ed589a7a..e4b49e80 100644 --- a/datasette/utils/internal_db.py +++ b/datasette/utils/internal_db.py @@ -62,7 +62,7 @@ async def init_internal_db(db): ); """ ).strip() - await db.execute_write_script(create_tables_sql, block=True) + await db.execute_write_script(create_tables_sql) async def populate_schema_tables(internal_db, db): @@ -76,7 +76,7 @@ async def populate_schema_tables(internal_db, db): ) conn.execute("DELETE FROM indexes WHERE database_name = ?", [database_name]) - await internal_db.execute_write_fn(delete_everything, block=True) + await internal_db.execute_write_fn(delete_everything) tables = (await db.execute("select * from sqlite_master WHERE type = 'table'")).rows @@ -137,7 +137,6 @@ async def populate_schema_tables(internal_db, db): values (?, ?, ?, ?) """, tables_to_insert, - block=True, ) await internal_db.execute_write_many( """ @@ -148,7 +147,6 @@ async def populate_schema_tables(internal_db, db): ) """, columns_to_insert, - block=True, ) await internal_db.execute_write_many( """ @@ -159,7 +157,6 @@ async def populate_schema_tables(internal_db, db): ) """, foreign_keys_to_insert, - block=True, ) await internal_db.execute_write_many( """ @@ -170,5 +167,4 @@ async def populate_schema_tables(internal_db, db): ) """, indexes_to_insert, - block=True, ) diff --git a/datasette/views/database.py b/datasette/views/database.py index aa8d27ec..e26706e7 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -279,7 +279,7 @@ class QueryView(DataView): ok = None try: cursor = await self.ds.databases[database].execute_write( - sql, params_for_query, block=True + sql, params_for_query ) message = metadata.get( "on_success_message" diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index cbaf4c54..88e1def0 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -770,7 +770,7 @@ Or you can return an async function which will be awaited on startup. Use this o if "my_table" not in await db.table_names(): await db.execute_write(""" create table my_table (mycol text) - """, block=True) + """) return inner Potential use-cases: diff --git a/tests/test_facets.py b/tests/test_facets.py index 3f292a3b..c28dc43c 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -408,16 +408,14 @@ async def test_array_facet_results(app_client): async def test_array_facet_handle_duplicate_tags(): ds = Datasette([], memory=True) db = ds.add_database(Database(ds, memory_name="test_array_facet")) - await db.execute_write("create table otters(name text, tags text)", block=True) + await db.execute_write("create table otters(name text, tags text)") for name, tags in ( ("Charles", ["friendly", "cunning", "friendly"]), ("Shaun", ["cunning", "empathetic", "friendly"]), ("Tracy", ["empathetic", "eager"]), ): await db.execute_write( - "insert into otters (name, tags) values (?, ?)", - [name, json.dumps(tags)], - block=True, + "insert into otters (name, tags) values (?, ?)", [name, json.dumps(tags)] ) response = await ds.client.get("/test_array_facet/otters.json?_facet_array=tags") @@ -516,11 +514,9 @@ async def test_date_facet_results(app_client): async def test_json_array_with_blanks_and_nulls(): ds = Datasette([], memory=True) db = ds.add_database(Database(ds, memory_name="test_json_array")) - await db.execute_write("create table foo(json_column text)", block=True) + await db.execute_write("create table foo(json_column text)") for value in ('["a", "b", "c"]', '["a", "b"]', "", None): - await db.execute_write( - "insert into foo (json_column) values (?)", [value], block=True - ) + await db.execute_write("insert into foo (json_column) values (?)", [value]) response = await ds.client.get("/test_json_array/foo.json") data = response.json() assert data["suggested_facets"] == [ @@ -536,15 +532,12 @@ async def test_json_array_with_blanks_and_nulls(): async def test_facet_size(): ds = Datasette([], memory=True, settings={"max_returned_rows": 50}) db = ds.add_database(Database(ds, memory_name="test_facet_size")) - await db.execute_write( - "create table neighbourhoods(city text, neighbourhood text)", block=True - ) + await db.execute_write("create table neighbourhoods(city text, neighbourhood text)") for i in range(1, 51): for j in range(1, 4): await db.execute_write( "insert into neighbourhoods (city, neighbourhood) values (?, ?)", ["City {}".format(i), "Neighbourhood {}".format(j)], - block=True, ) response = await ds.client.get("/test_facet_size/neighbourhoods.json") data = response.json() diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 80f47ab9..bcecb486 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -377,9 +377,7 @@ async def test_table_names(db): @pytest.mark.asyncio async def test_execute_write_block_true(db): await db.execute_write( - "update roadside_attractions set name = ? where pk = ?", - ["Mystery!", 1], - block=True, + "update roadside_attractions set name = ? where pk = ?", ["Mystery!", 1] ) rows = await db.execute("select name from roadside_attractions where pk = 1") assert "Mystery!" == rows.rows[0][0] @@ -399,8 +397,7 @@ async def test_execute_write_block_false(db): @pytest.mark.asyncio async def test_execute_write_script(db): await db.execute_write_script( - "create table foo (id integer primary key); create table bar (id integer primary key); ", - block=True, + "create table foo (id integer primary key); create table bar (id integer primary key);" ) table_names = await db.table_names() assert {"foo", "bar"}.issubset(table_names) @@ -408,14 +405,9 @@ async def test_execute_write_script(db): @pytest.mark.asyncio async def test_execute_write_many(db): - await db.execute_write_script( - "create table foomany (id integer primary key)", - block=True, - ) + await db.execute_write_script("create table foomany (id integer primary key)") await db.execute_write_many( - "insert into foomany (id) values (?)", - [(1,), (10,), (100,)], - block=True, + "insert into foomany (id) values (?)", [(1,), (10,), (100,)] ) result = await db.execute("select * from foomany") assert [r[0] for r in result.rows] == [1, 10, 100] @@ -424,7 +416,7 @@ async def test_execute_write_many(db): @pytest.mark.asyncio async def test_execute_write_has_correctly_prepared_connection(db): # The sleep() function is only available if ds._prepare_connection() was called - await db.execute_write("select sleep(0.01)", block=True) + await db.execute_write("select sleep(0.01)") @pytest.mark.asyncio @@ -447,7 +439,7 @@ async def test_execute_write_fn_block_true(db): row = conn.execute("select count(*) from roadside_attractions").fetchone() return row[0] - new_count = await db.execute_write_fn(write_fn, block=True) + new_count = await db.execute_write_fn(write_fn) assert 3 == new_count @@ -457,7 +449,7 @@ async def test_execute_write_fn_exception(db): assert False with pytest.raises(AssertionError): - await db.execute_write_fn(write_fn, block=True) + await db.execute_write_fn(write_fn) @pytest.mark.asyncio @@ -472,7 +464,7 @@ async def test_execute_write_fn_connection_exception(tmpdir, app_client): assert False with pytest.raises(AssertionError): - await db.execute_write_fn(write_fn, block=True) + await db.execute_write_fn(write_fn) app_client.ds.remove_database("immutable-db") @@ -513,7 +505,7 @@ async def test_database_memory_name(app_client): table_names = await db.table_names() assert table_names == [] # Now create a table in foo - await foo1.execute_write("create table foo (t text)", block=True) + await foo1.execute_write("create table foo (t text)") assert await foo1.table_names() == ["foo"] assert await foo2.table_names() == ["foo"] assert await bar1.table_names() == [] @@ -528,5 +520,5 @@ async def test_in_memory_databases_forbid_writes(app_client): await db.execute("create table foo (t text)") assert await db.table_names() == [] # Using db.execute_write() should work: - await db.execute_write("create table foo (t text)", block=True) + await db.execute_write("create table foo (t text)") assert await db.table_names() == ["foo"] From 63537dd3decfd59636f4a42b336785ef49f0cec0 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Jan 2022 12:34:55 -0800 Subject: [PATCH 0086/1103] Allow 'explain query plan' with more whitespace, closes #1588 --- datasette/utils/__init__.py | 8 ++++---- tests/test_utils.py | 2 ++ 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index c339113c..bc3155a5 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -162,11 +162,11 @@ class InvalidSql(Exception): allowed_sql_res = [ re.compile(r"^select\b"), - re.compile(r"^explain select\b"), - re.compile(r"^explain query plan select\b"), + re.compile(r"^explain\s+select\b"), + re.compile(r"^explain\s+query\s+plan\s+select\b"), re.compile(r"^with\b"), - re.compile(r"^explain with\b"), - re.compile(r"^explain query plan with\b"), + re.compile(r"^explain\s+with\b"), + re.compile(r"^explain\s+query\s+plan\s+with\b"), ] allowed_pragmas = ( "database_list", diff --git a/tests/test_utils.py b/tests/test_utils.py index e1b61072..e7d67045 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -157,7 +157,9 @@ def test_validate_sql_select_bad(bad_sql): "select '# Hello there\n\n* This is a list\n* of items\n--\n[And a link](https://github.com/simonw/datasette-render-markdown).'\nas demo_markdown", "select 1 + 1", "explain select 1 + 1", + "explain\nselect 1 + 1", "explain query plan select 1 + 1", + "explain query plan\nselect 1 + 1", "SELECT\nblah FROM foo", "WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;", "explain WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;", From 4b23f01f3e668c8f2a2f1a294be49f49b4073969 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Jan 2022 13:35:54 -0800 Subject: [PATCH 0087/1103] CLI reference docs, maintained by cog - refs #1594 --- .github/workflows/test.yml | 3 + docs/cli-reference.rst | 355 +++++++++++++++++++++++++++++++++++++ docs/index.rst | 1 + setup.py | 1 + 4 files changed, 360 insertions(+) create mode 100644 docs/cli-reference.rst diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 0b3635fe..704931a6 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -28,3 +28,6 @@ jobs: run: | pytest -n auto -m "not serial" pytest -m "serial" + - name: Check if cog needs to be run + run: | + cog --check docs/*.rst diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst new file mode 100644 index 00000000..adf89633 --- /dev/null +++ b/docs/cli-reference.rst @@ -0,0 +1,355 @@ +.. _cli_reference: + +=============== + CLI reference +=============== + +This page lists the ``--help`` for every ``datasette`` CLI command. + +.. [[[cog + from datasette import cli + from click.testing import CliRunner + import textwrap + commands = [ + ["--help"], + ["serve", "--help"], + ["serve", "--help-settings"], + ["plugins", "--help"], + ["publish", "--help"], + ["publish", "cloudrun", "--help"], + ["publish", "heroku", "--help"], + ["package", "--help"], + ["inspect", "--help"], + ["install", "--help"], + ["uninstall", "--help"], + ] + for command in commands: + title = "datasette " + " ".join(command) + cog.out(title + "\n") + cog.out(("=" * len(title)) + "\n\n") + cog.out("::\n\n") + result = CliRunner().invoke(cli.cli, command) + output = result.output.replace("Usage: cli ", "Usage: datasette ") + cog.out(textwrap.indent(output, ' ')) + cog.out("\n\n") +.. ]]] +datasette --help +================ + +:: + + Usage: datasette [OPTIONS] COMMAND [ARGS]... + + Datasette is an open source multi-tool for exploring and publishing data + + About Datasette: https://datasette.io/ + Full documentation: https://docs.datasette.io/ + + Options: + --version Show the version and exit. + --help Show this message and exit. + + Commands: + serve* Serve up specified SQLite database files with a web UI + inspect + install Install Python packages - e.g. + package Package specified SQLite files into a new datasette Docker... + plugins List currently available plugins + publish Publish specified SQLite database files to the internet along... + uninstall Uninstall Python packages (e.g. + + +datasette serve --help +====================== + +:: + + Usage: datasette serve [OPTIONS] [FILES]... + + Serve up specified SQLite database files with a web UI + + Options: + -i, --immutable PATH Database files to open in immutable mode + -h, --host TEXT Host for server. Defaults to 127.0.0.1 which means + only connections from the local machine will be + allowed. Use 0.0.0.0 to listen to all IPs and allow + access from other machines. + -p, --port INTEGER RANGE Port for server, defaults to 8001. Use -p 0 to + automatically assign an available port. + [0<=x<=65535] + --uds TEXT Bind to a Unix domain socket + --reload Automatically reload if code or metadata change + detected - useful for development + --cors Enable CORS by serving Access-Control-Allow-Origin: + * + --load-extension TEXT Path to a SQLite extension to load + --inspect-file TEXT Path to JSON file created using "datasette inspect" + -m, --metadata FILENAME Path to JSON/YAML file containing license/source + metadata + --template-dir DIRECTORY Path to directory containing custom templates + --plugins-dir DIRECTORY Path to directory containing custom plugins + --static MOUNT:DIRECTORY Serve static files from this directory at /MOUNT/... + --memory Make /_memory database available + --config CONFIG Deprecated: set config option using + configname:value. Use --setting instead. + --setting SETTING... Setting, see docs.datasette.io/en/stable/config.html + --secret TEXT Secret used for signing secure values, such as + signed cookies + --root Output URL that sets a cookie authenticating the + root user + --get TEXT Run an HTTP GET request against this path, print + results and exit + --version-note TEXT Additional note to show on /-/versions + --help-settings Show available settings + --pdb Launch debugger on any errors + -o, --open Open Datasette in your web browser + --create Create database files if they do not exist + --crossdb Enable cross-database joins using the /_memory + database + --ssl-keyfile TEXT SSL key file + --ssl-certfile TEXT SSL certificate file + --help Show this message and exit. + + +datasette serve --help-settings +=============================== + +:: + + Settings: + default_page_size Default page size for the table view + (default=100) + max_returned_rows Maximum rows that can be returned from a table or + custom query (default=1000) + num_sql_threads Number of threads in the thread pool for + executing SQLite queries (default=3) + sql_time_limit_ms Time limit for a SQL query in milliseconds + (default=1000) + default_facet_size Number of values to return for requested facets + (default=30) + facet_time_limit_ms Time limit for calculating a requested facet + (default=200) + facet_suggest_time_limit_ms Time limit for calculating a suggested facet + (default=50) + hash_urls Include DB file contents hash in URLs, for far- + future caching (default=False) + allow_facet Allow users to specify columns to facet using + ?_facet= parameter (default=True) + allow_download Allow users to download the original SQLite + database files (default=True) + suggest_facets Calculate and display suggested facets + (default=True) + default_cache_ttl Default HTTP cache TTL (used in Cache-Control: + max-age= header) (default=5) + default_cache_ttl_hashed Default HTTP cache TTL for hashed URL pages + (default=31536000) + cache_size_kb SQLite cache size in KB (0 == use SQLite default) + (default=0) + allow_csv_stream Allow .csv?_stream=1 to download all rows + (ignoring max_returned_rows) (default=True) + max_csv_mb Maximum size allowed for CSV export in MB - set 0 + to disable this limit (default=100) + truncate_cells_html Truncate cells longer than this in HTML table + view - set 0 to disable (default=2048) + force_https_urls Force URLs in API output to always use https:// + protocol (default=False) + template_debug Allow display of template debug information with + ?_context=1 (default=False) + trace_debug Allow display of SQL trace debug information with + ?_trace=1 (default=False) + base_url Datasette URLs should use this base path + (default=/) + + + +datasette plugins --help +======================== + +:: + + Usage: datasette plugins [OPTIONS] + + List currently available plugins + + Options: + --all Include built-in default plugins + --plugins-dir DIRECTORY Path to directory containing custom plugins + --help Show this message and exit. + + +datasette publish --help +======================== + +:: + + Usage: datasette publish [OPTIONS] COMMAND [ARGS]... + + Publish specified SQLite database files to the internet along with a + Datasette-powered interface and API + + Options: + --help Show this message and exit. + + Commands: + cloudrun + heroku + + +datasette publish cloudrun --help +================================= + +:: + + Usage: datasette publish cloudrun [OPTIONS] [FILES]... + + Options: + -m, --metadata FILENAME Path to JSON/YAML file containing metadata to + publish + --extra-options TEXT Extra options to pass to datasette serve + --branch TEXT Install datasette from a GitHub branch e.g. + main + --template-dir DIRECTORY Path to directory containing custom templates + --plugins-dir DIRECTORY Path to directory containing custom plugins + --static MOUNT:DIRECTORY Serve static files from this directory at + /MOUNT/... + --install TEXT Additional packages (e.g. plugins) to install + --plugin-secret <TEXT TEXT TEXT>... + Secrets to pass to plugins, e.g. --plugin- + secret datasette-auth-github client_id xxx + --version-note TEXT Additional note to show on /-/versions + --secret TEXT Secret used for signing secure values, such as + signed cookies + --title TEXT Title for metadata + --license TEXT License label for metadata + --license_url TEXT License URL for metadata + --source TEXT Source label for metadata + --source_url TEXT Source URL for metadata + --about TEXT About label for metadata + --about_url TEXT About URL for metadata + -n, --name TEXT Application name to use when building + --service TEXT Cloud Run service to deploy (or over-write) + --spatialite Enable SpatialLite extension + --show-files Output the generated Dockerfile and + metadata.json + --memory TEXT Memory to allocate in Cloud Run, e.g. 1Gi + --cpu [1|2|4] Number of vCPUs to allocate in Cloud Run + --apt-get-install TEXT Additional packages to apt-get install + --help Show this message and exit. + + +datasette publish heroku --help +=============================== + +:: + + Usage: datasette publish heroku [OPTIONS] [FILES]... + + Options: + -m, --metadata FILENAME Path to JSON/YAML file containing metadata to + publish + --extra-options TEXT Extra options to pass to datasette serve + --branch TEXT Install datasette from a GitHub branch e.g. + main + --template-dir DIRECTORY Path to directory containing custom templates + --plugins-dir DIRECTORY Path to directory containing custom plugins + --static MOUNT:DIRECTORY Serve static files from this directory at + /MOUNT/... + --install TEXT Additional packages (e.g. plugins) to install + --plugin-secret <TEXT TEXT TEXT>... + Secrets to pass to plugins, e.g. --plugin- + secret datasette-auth-github client_id xxx + --version-note TEXT Additional note to show on /-/versions + --secret TEXT Secret used for signing secure values, such as + signed cookies + --title TEXT Title for metadata + --license TEXT License label for metadata + --license_url TEXT License URL for metadata + --source TEXT Source label for metadata + --source_url TEXT Source URL for metadata + --about TEXT About label for metadata + --about_url TEXT About URL for metadata + -n, --name TEXT Application name to use when deploying + --tar TEXT --tar option to pass to Heroku, e.g. + --tar=/usr/local/bin/gtar + --help Show this message and exit. + + +datasette package --help +======================== + +:: + + Usage: datasette package [OPTIONS] FILES... + + Package specified SQLite files into a new datasette Docker container + + Options: + -t, --tag TEXT Name for the resulting Docker container, can + optionally use name:tag format + -m, --metadata FILENAME Path to JSON/YAML file containing metadata to + publish + --extra-options TEXT Extra options to pass to datasette serve + --branch TEXT Install datasette from a GitHub branch e.g. main + --template-dir DIRECTORY Path to directory containing custom templates + --plugins-dir DIRECTORY Path to directory containing custom plugins + --static MOUNT:DIRECTORY Serve static files from this directory at /MOUNT/... + --install TEXT Additional packages (e.g. plugins) to install + --spatialite Enable SpatialLite extension + --version-note TEXT Additional note to show on /-/versions + --secret TEXT Secret used for signing secure values, such as + signed cookies + -p, --port INTEGER RANGE Port to run the server on, defaults to 8001 + [1<=x<=65535] + --title TEXT Title for metadata + --license TEXT License label for metadata + --license_url TEXT License URL for metadata + --source TEXT Source label for metadata + --source_url TEXT Source URL for metadata + --about TEXT About label for metadata + --about_url TEXT About URL for metadata + --help Show this message and exit. + + +datasette inspect --help +======================== + +:: + + Usage: datasette inspect [OPTIONS] [FILES]... + + Options: + --inspect-file TEXT + --load-extension TEXT Path to a SQLite extension to load + --help Show this message and exit. + + +datasette install --help +======================== + +:: + + Usage: datasette install [OPTIONS] PACKAGES... + + Install Python packages - e.g. Datasette plugins - into the same environment + as Datasette + + Options: + -U, --upgrade Upgrade packages to latest version + --help Show this message and exit. + + +datasette uninstall --help +========================== + +:: + + Usage: datasette uninstall [OPTIONS] PACKAGES... + + Uninstall Python packages (e.g. plugins) from the Datasette environment + + Options: + -y, --yes Don't ask for confirmation + --help Show this message and exit. + + +.. [[[end]]] diff --git a/docs/index.rst b/docs/index.rst index eafc5bdb..36e42848 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -60,4 +60,5 @@ Contents testing_plugins internals contributing + cli-reference changelog diff --git a/setup.py b/setup.py index 1f8855cf..e9ef082a 100644 --- a/setup.py +++ b/setup.py @@ -74,6 +74,7 @@ setup( "black==21.12b0", "pytest-timeout>=1.4.2,<2.1", "trustme>=0.7,<0.10", + "cogapp>=3.3.0", ], "rich": ["rich"], }, From 5698e2af0182677c0f1f7f5b3bc61415bb6c93f8 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Jan 2022 13:55:13 -0800 Subject: [PATCH 0088/1103] Promote Datasette Desktop in installation docs, closes #1466 --- docs/installation.rst | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/docs/installation.rst b/docs/installation.rst index 723f1e3f..ac3dcca2 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -7,8 +7,7 @@ .. note:: If you just want to try Datasette out you don't need to install anything: see :ref:`getting_started_glitch` -There are two main options for installing Datasette. You can install it directly -on to your machine, or you can install it using Docker. +There are two main options for installing Datasette. You can install it directly on to your machine, or you can install it using Docker. If you want to start making contributions to the Datasette project by installing a copy that lets you directly modify the code, take a look at our guide to :ref:`devenvironment`. @@ -20,6 +19,13 @@ If you want to start making contributions to the Datasette project by installing Basic installation ================== +.. _installation_datasette_desktop: + +Datasette Desktop for Mac +------------------------- + +`Datasette Desktop <https://datasette.io/desktop>`__ is a packaged Mac application which bundles Datasette together with Python and allows you to install and run Datasette directly on your laptop. This is the best option for local installation if you are not comfortable using the command line. + .. _installation_homebrew: Using Homebrew From 3658e57ac2de0bec0ea5de36e3ddd09784ecf65e Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Jan 2022 14:20:07 -0800 Subject: [PATCH 0089/1103] Fixed bug with table title element, closes #1560 --- datasette/templates/table.html | 3 +-- tests/test_table_html.py | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/datasette/templates/table.html b/datasette/templates/table.html index f3749b57..e3c6f38d 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -1,7 +1,6 @@ {% extends "base.html" %} -{% block title %}{{ database }}: {{ table }}: {% if filtered_table_rows_count or filtered_table_rows_count == 0 %}{{ "{:,}".format(filtered_table_rows_count) }} row{% if filtered_table_rows_count == 1 %}{% else %}s{% endif %}{% endif %} - {% if human_description_en %}where {{ human_description_en }}{% endif %}{% endblock %} +{% block title %}{{ database }}: {{ table }}: {% if filtered_table_rows_count or filtered_table_rows_count == 0 %}{{ "{:,}".format(filtered_table_rows_count) }} row{% if filtered_table_rows_count == 1 %}{% else %}s{% endif %}{% endif %}{% if human_description_en %} {{ human_description_en }}{% endif %}{% endblock %} {% block extra_head %} {{ super() }} diff --git a/tests/test_table_html.py b/tests/test_table_html.py index 50d679a0..f68e05a5 100644 --- a/tests/test_table_html.py +++ b/tests/test_table_html.py @@ -1038,3 +1038,22 @@ def test_sort_rowid_with_next(app_client): def assert_querystring_equal(expected, actual): assert sorted(expected.split("&")) == sorted(actual.split("&")) + + +@pytest.mark.parametrize( + "path,expected", + ( + ( + "/fixtures/facetable", + "fixtures: facetable: 15 rows", + ), + ( + "/fixtures/facetable?on_earth__exact=1", + "fixtures: facetable: 14 rows where on_earth = 1", + ), + ), +) +def test_table_page_title(app_client, path, expected): + response = app_client.get(path) + title = Soup(response.text, "html.parser").find("title").text + assert title == expected From 88bc2ceae1151ec859f477d527b40f7e36012017 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Jan 2022 16:07:30 -0800 Subject: [PATCH 0090/1103] --help summary for 'datasette inspect', closes #1597 --- datasette/cli.py | 6 ++++++ docs/cli-reference.rst | 7 ++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/datasette/cli.py b/datasette/cli.py index 22e2338a..12d3d728 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -136,6 +136,12 @@ def cli(): @click.option("--inspect-file", default="-") @sqlite_extensions def inspect(files, inspect_file, sqlite_extensions): + """ + Generate JSON summary of provided database files + + This can then be passed to "datasette --inspect-file" to speed up count + operations against immutable database files. + """ app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions) loop = asyncio.get_event_loop() inspect_data = loop.run_until_complete(inspect_(files, sqlite_extensions)) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index adf89633..7ac6debe 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -51,7 +51,7 @@ datasette --help Commands: serve* Serve up specified SQLite database files with a web UI - inspect + inspect Generate JSON summary of provided database files install Install Python packages - e.g. package Package specified SQLite files into a new datasette Docker... plugins List currently available plugins @@ -317,6 +317,11 @@ datasette inspect --help Usage: datasette inspect [OPTIONS] [FILES]... + Generate JSON summary of provided database files + + This can then be passed to "datasette --inspect-file" to speed up count + operations against immutable database files. + Options: --inspect-file TEXT --load-extension TEXT Path to a SQLite extension to load From 8f5c44a1669427019b288f5b5debec67a90f908b Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Jan 2022 16:09:38 -0800 Subject: [PATCH 0091/1103] Better --help summaries for install and uninstall --- datasette/cli.py | 4 ++-- docs/cli-reference.rst | 9 ++++----- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 12d3d728..18054448 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -307,7 +307,7 @@ def package( "-U", "--upgrade", is_flag=True, help="Upgrade packages to latest version" ) def install(packages, upgrade): - """Install Python packages - e.g. Datasette plugins - into the same environment as Datasette""" + """Install plugins and packages from PyPI into the same environment as Datasette""" args = ["pip", "install"] if upgrade: args += ["--upgrade"] @@ -320,7 +320,7 @@ def install(packages, upgrade): @click.argument("packages", nargs=-1, required=True) @click.option("-y", "--yes", is_flag=True, help="Don't ask for confirmation") def uninstall(packages, yes): - """Uninstall Python packages (e.g. plugins) from the Datasette environment""" + """Uninstall plugins and Python packages from the Datasette environment""" sys.argv = ["pip", "uninstall"] + list(packages) + (["-y"] if yes else []) run_module("pip", run_name="__main__") diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 7ac6debe..37a30606 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -52,11 +52,11 @@ datasette --help Commands: serve* Serve up specified SQLite database files with a web UI inspect Generate JSON summary of provided database files - install Install Python packages - e.g. + install Install plugins and packages from PyPI into the same... package Package specified SQLite files into a new datasette Docker... plugins List currently available plugins publish Publish specified SQLite database files to the internet along... - uninstall Uninstall Python packages (e.g. + uninstall Uninstall plugins and Python packages from the Datasette... datasette serve --help @@ -335,8 +335,7 @@ datasette install --help Usage: datasette install [OPTIONS] PACKAGES... - Install Python packages - e.g. Datasette plugins - into the same environment - as Datasette + Install plugins and packages from PyPI into the same environment as Datasette Options: -U, --upgrade Upgrade packages to latest version @@ -350,7 +349,7 @@ datasette uninstall --help Usage: datasette uninstall [OPTIONS] PACKAGES... - Uninstall Python packages (e.g. plugins) from the Datasette environment + Uninstall plugins and Python packages from the Datasette environment Options: -y, --yes Don't ask for confirmation From 8cf4b77a92f5170c33e0079f2bab48a4f36b6934 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Jan 2022 16:10:52 -0800 Subject: [PATCH 0092/1103] Better copy for 'datasette plugins --help' --- datasette/cli.py | 2 +- docs/cli-reference.rst | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 18054448..af09453f 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -190,7 +190,7 @@ pm.hook.publish_subcommand(publish=publish) help="Path to directory containing custom plugins", ) def plugins(all, plugins_dir): - """List currently available plugins""" + """List currently installed plugins""" app = Datasette([], plugins_dir=plugins_dir) click.echo(json.dumps(app._plugins(all=all), indent=4)) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 37a30606..69a7cdd9 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -54,7 +54,7 @@ datasette --help inspect Generate JSON summary of provided database files install Install plugins and packages from PyPI into the same... package Package specified SQLite files into a new datasette Docker... - plugins List currently available plugins + plugins List currently installed plugins publish Publish specified SQLite database files to the internet along... uninstall Uninstall plugins and Python packages from the Datasette... @@ -169,7 +169,7 @@ datasette plugins --help Usage: datasette plugins [OPTIONS] - List currently available plugins + List currently installed plugins Options: --all Include built-in default plugins From 515f8d38ebae203efc15ca79a8b42848276b35e5 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Jan 2022 16:12:54 -0800 Subject: [PATCH 0093/1103] Help summaries for publish cloudrun/heroku --- datasette/publish/cloudrun.py | 1 + datasette/publish/heroku.py | 1 + docs/cli-reference.rst | 8 ++++++-- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 1fabcafd..a1e2f580 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -74,6 +74,7 @@ def publish_subcommand(publish): cpu, apt_get_extras, ): + "Publish databases to Datasette running on Cloud Run" fail_if_publish_binary_not_installed( "gcloud", "Google Cloud", "https://cloud.google.com/sdk/" ) diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index 2ebbd4bd..171252ce 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -50,6 +50,7 @@ def publish_subcommand(publish): name, tar, ): + "Publish databases to Datasette running on Heroku" fail_if_publish_binary_not_installed( "heroku", "Heroku", "https://cli.heroku.com" ) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 69a7cdd9..f3279f6d 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -191,8 +191,8 @@ datasette publish --help --help Show this message and exit. Commands: - cloudrun - heroku + cloudrun Publish databases to Datasette running on Cloud Run + heroku Publish databases to Datasette running on Heroku datasette publish cloudrun --help @@ -202,6 +202,8 @@ datasette publish cloudrun --help Usage: datasette publish cloudrun [OPTIONS] [FILES]... + Publish databases to Datasette running on Cloud Run + Options: -m, --metadata FILENAME Path to JSON/YAML file containing metadata to publish @@ -244,6 +246,8 @@ datasette publish heroku --help Usage: datasette publish heroku [OPTIONS] [FILES]... + Publish databases to Datasette running on Heroku + Options: -m, --metadata FILENAME Path to JSON/YAML file containing metadata to publish From 3a0f7d64889cd79d5d00d3251e8ab77ff52de60d Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Jan 2022 16:27:21 -0800 Subject: [PATCH 0094/1103] Fixed hidden form fields bug #1527 --- datasette/views/table.py | 2 +- tests/test_table_html.py | 22 +++++++++++++++++----- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 9808fd24..77fb2850 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -815,7 +815,7 @@ class TableView(RowTableShared): if ( key.startswith("_") and key not in ("_sort", "_search", "_next") - and not key.endswith("__exact") + and "__" not in key ): for value in request.args.getlist(key): form_hidden_args.append((key, value)) diff --git a/tests/test_table_html.py b/tests/test_table_html.py index f68e05a5..021268c3 100644 --- a/tests/test_table_html.py +++ b/tests/test_table_html.py @@ -143,17 +143,29 @@ def test_existing_filter_redirects(app_client): assert "?" not in response.headers["Location"] -def test_exact_parameter_results_in_correct_hidden_fields(app_client): +@pytest.mark.parametrize( + "qs,expected_hidden", + ( + # Things that should be reflected in hidden form fields: + ("_facet=_neighborhood", {"_facet": "_neighborhood"}), + ("_where=1+=+1&_col=_city_id", {"_where": "1 = 1", "_col": "_city_id"}), + # Things that should NOT be reflected in hidden form fields: + ( + "_facet=_neighborhood&_neighborhood__exact=Downtown", + {"_facet": "_neighborhood"}, + ), + ("_facet=_neighborhood&_city_id__gt=1", {"_facet": "_neighborhood"}), + ), +) +def test_reflected_hidden_form_fields(app_client, qs, expected_hidden): # https://github.com/simonw/datasette/issues/1527 - response = app_client.get( - "/fixtures/facetable?_facet=_neighborhood&_neighborhood__exact=Downtown" - ) + response = app_client.get("/fixtures/facetable?{}".format(qs)) # In this case we should NOT have a hidden _neighborhood__exact=Downtown field form = Soup(response.body, "html.parser").find("form") hidden_inputs = { input["name"]: input["value"] for input in form.select("input[type=hidden]") } - assert hidden_inputs == {"_facet": "_neighborhood"} + assert hidden_inputs == expected_hidden def test_empty_search_parameter_gets_removed(app_client): From 76d66d5b2bf10249c0beaac0999b93ac8d757f48 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Jan 2022 16:30:00 -0800 Subject: [PATCH 0095/1103] Tweak order of documentation contents --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 36e42848..acca943f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -59,6 +59,6 @@ Contents plugin_hooks testing_plugins internals - contributing cli-reference + contributing changelog From 714b4df1b1b2aeab8cde3a309627c42355439dda Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Jan 2022 16:36:28 -0800 Subject: [PATCH 0096/1103] Fixed reStructuredText warning, refs #1594 --- docs/cli-reference.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index f3279f6d..f529782d 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -23,6 +23,7 @@ This page lists the ``--help`` for every ``datasette`` CLI command. ["install", "--help"], ["uninstall", "--help"], ] + cog.out("\n") for command in commands: title = "datasette " + " ".join(command) cog.out(title + "\n") @@ -33,6 +34,7 @@ This page lists the ``--help`` for every ``datasette`` CLI command. cog.out(textwrap.indent(output, ' ')) cog.out("\n\n") .. ]]] + datasette --help ================ From ab7d6a7179e9939c2764989e508bfa8eba31f3b1 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Jan 2022 16:38:16 -0800 Subject: [PATCH 0097/1103] Updated settings help URL to avoid redirect --- datasette/cli.py | 2 +- datasette/views/base.py | 2 +- docs/cli-reference.rst | 3 ++- tests/test_html.py | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index af09453f..9d1b5ee5 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -400,7 +400,7 @@ def uninstall(packages, yes): "--setting", "settings", type=Setting(), - help="Setting, see docs.datasette.io/en/stable/config.html", + help="Setting, see docs.datasette.io/en/stable/settings.html", multiple=True, ) @click.option( diff --git a/datasette/views/base.py b/datasette/views/base.py index a9953dfd..b1cacb3f 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -493,7 +493,7 @@ class DataView(BaseView): raise DatasetteError( """ SQL query took too long. The time limit is controlled by the - <a href="https://docs.datasette.io/en/stable/config.html#sql-time-limit-ms">sql_time_limit_ms</a> + <a href="https://docs.datasette.io/en/stable/settings.html#sql-time-limit-ms">sql_time_limit_ms</a> configuration option. """, title="SQL Interrupted", diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index f529782d..74adb92d 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -94,7 +94,8 @@ datasette serve --help --memory Make /_memory database available --config CONFIG Deprecated: set config option using configname:value. Use --setting instead. - --setting SETTING... Setting, see docs.datasette.io/en/stable/config.html + --setting SETTING... Setting, see + docs.datasette.io/en/stable/settings.html --secret TEXT Secret used for signing secure values, such as signed cookies --root Output URL that sets a cookie authenticating the diff --git a/tests/test_html.py b/tests/test_html.py index bfe5c8f9..3f0a88a9 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -155,7 +155,7 @@ def test_sql_time_limit(app_client_shorter_time_limit): response = app_client_shorter_time_limit.get("/fixtures?sql=select+sleep(0.5)") assert 400 == response.status expected_html_fragment = """ - <a href="https://docs.datasette.io/en/stable/config.html#sql-time-limit-ms">sql_time_limit_ms</a> + <a href="https://docs.datasette.io/en/stable/settings.html#sql-time-limit-ms">sql_time_limit_ms</a> """.strip() assert expected_html_fragment in response.text From 10659c3f1f82458adfa65c61f4dcc8d9af5467ed Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Jan 2022 16:38:53 -0800 Subject: [PATCH 0098/1103] datasette-debug-asgi plugin to help investigate #1590 --- demos/apache-proxy/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/demos/apache-proxy/Dockerfile b/demos/apache-proxy/Dockerfile index ab7b9d16..6c921963 100644 --- a/demos/apache-proxy/Dockerfile +++ b/demos/apache-proxy/Dockerfile @@ -27,7 +27,7 @@ ARG DATASETTE_REF RUN pip install \ https://github.com/simonw/datasette/archive/${DATASETTE_REF}.zip \ - datasette-redirect-to-https + datasette-redirect-to-https datasette-debug-asgi ADD 000-default.conf /etc/apache2/sites-enabled/000-default.conf From 3664ddd400062123e99500d28b160c7944408c1a Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Jan 2022 16:47:53 -0800 Subject: [PATCH 0099/1103] Replace update-docs-help.py with cog, closes #1598 --- docs/cli-reference.rst | 24 ++++++++++++++ docs/datasette-package-help.txt | 29 ---------------- docs/datasette-publish-cloudrun-help.txt | 33 ------------------- docs/datasette-publish-heroku-help.txt | 29 ---------------- docs/datasette-serve-help.txt | 42 ------------------------ docs/getting_started.rst | 9 +---- docs/publish.rst | 6 ++-- tests/test_docs.py | 20 ----------- update-docs-help.py | 25 -------------- 9 files changed, 28 insertions(+), 189 deletions(-) delete mode 100644 docs/datasette-package-help.txt delete mode 100644 docs/datasette-publish-cloudrun-help.txt delete mode 100644 docs/datasette-publish-heroku-help.txt delete mode 100644 docs/datasette-serve-help.txt delete mode 100644 update-docs-help.py diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 74adb92d..155a005d 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -26,6 +26,8 @@ This page lists the ``--help`` for every ``datasette`` CLI command. cog.out("\n") for command in commands: title = "datasette " + " ".join(command) + ref = "_cli_help_" + ("_".join(command).replace("-", "_")) + cog.out(".. {}:\n\n".format(ref)) cog.out(title + "\n") cog.out(("=" * len(title)) + "\n\n") cog.out("::\n\n") @@ -35,6 +37,8 @@ This page lists the ``--help`` for every ``datasette`` CLI command. cog.out("\n\n") .. ]]] +.. _cli_help___help: + datasette --help ================ @@ -61,6 +65,8 @@ datasette --help uninstall Uninstall plugins and Python packages from the Datasette... +.. _cli_help_serve___help: + datasette serve --help ====================== @@ -114,6 +120,8 @@ datasette serve --help --help Show this message and exit. +.. _cli_help_serve___help_settings: + datasette serve --help-settings =============================== @@ -165,6 +173,8 @@ datasette serve --help-settings +.. _cli_help_plugins___help: + datasette plugins --help ======================== @@ -180,6 +190,8 @@ datasette plugins --help --help Show this message and exit. +.. _cli_help_publish___help: + datasette publish --help ======================== @@ -198,6 +210,8 @@ datasette publish --help heroku Publish databases to Datasette running on Heroku +.. _cli_help_publish_cloudrun___help: + datasette publish cloudrun --help ================================= @@ -242,6 +256,8 @@ datasette publish cloudrun --help --help Show this message and exit. +.. _cli_help_publish_heroku___help: + datasette publish heroku --help =============================== @@ -281,6 +297,8 @@ datasette publish heroku --help --help Show this message and exit. +.. _cli_help_package___help: + datasette package --help ======================== @@ -317,6 +335,8 @@ datasette package --help --help Show this message and exit. +.. _cli_help_inspect___help: + datasette inspect --help ======================== @@ -335,6 +355,8 @@ datasette inspect --help --help Show this message and exit. +.. _cli_help_install___help: + datasette install --help ======================== @@ -349,6 +371,8 @@ datasette install --help --help Show this message and exit. +.. _cli_help_uninstall___help: + datasette uninstall --help ========================== diff --git a/docs/datasette-package-help.txt b/docs/datasette-package-help.txt deleted file mode 100644 index 7cfac1b1..00000000 --- a/docs/datasette-package-help.txt +++ /dev/null @@ -1,29 +0,0 @@ -$ datasette package --help - -Usage: datasette package [OPTIONS] FILES... - - Package specified SQLite files into a new datasette Docker container - -Options: - -t, --tag TEXT Name for the resulting Docker container, can optionally use - name:tag format - -m, --metadata FILENAME Path to JSON/YAML file containing metadata to publish - --extra-options TEXT Extra options to pass to datasette serve - --branch TEXT Install datasette from a GitHub branch e.g. main - --template-dir DIRECTORY Path to directory containing custom templates - --plugins-dir DIRECTORY Path to directory containing custom plugins - --static MOUNT:DIRECTORY Serve static files from this directory at /MOUNT/... - --install TEXT Additional packages (e.g. plugins) to install - --spatialite Enable SpatialLite extension - --version-note TEXT Additional note to show on /-/versions - --secret TEXT Secret used for signing secure values, such as signed - cookies - -p, --port INTEGER RANGE Port to run the server on, defaults to 8001 [1<=x<=65535] - --title TEXT Title for metadata - --license TEXT License label for metadata - --license_url TEXT License URL for metadata - --source TEXT Source label for metadata - --source_url TEXT Source URL for metadata - --about TEXT About label for metadata - --about_url TEXT About URL for metadata - --help Show this message and exit. diff --git a/docs/datasette-publish-cloudrun-help.txt b/docs/datasette-publish-cloudrun-help.txt deleted file mode 100644 index 34481b40..00000000 --- a/docs/datasette-publish-cloudrun-help.txt +++ /dev/null @@ -1,33 +0,0 @@ -$ datasette publish cloudrun --help - -Usage: datasette publish cloudrun [OPTIONS] [FILES]... - -Options: - -m, --metadata FILENAME Path to JSON/YAML file containing metadata to publish - --extra-options TEXT Extra options to pass to datasette serve - --branch TEXT Install datasette from a GitHub branch e.g. main - --template-dir DIRECTORY Path to directory containing custom templates - --plugins-dir DIRECTORY Path to directory containing custom plugins - --static MOUNT:DIRECTORY Serve static files from this directory at /MOUNT/... - --install TEXT Additional packages (e.g. plugins) to install - --plugin-secret <TEXT TEXT TEXT>... - Secrets to pass to plugins, e.g. --plugin-secret - datasette-auth-github client_id xxx - --version-note TEXT Additional note to show on /-/versions - --secret TEXT Secret used for signing secure values, such as signed - cookies - --title TEXT Title for metadata - --license TEXT License label for metadata - --license_url TEXT License URL for metadata - --source TEXT Source label for metadata - --source_url TEXT Source URL for metadata - --about TEXT About label for metadata - --about_url TEXT About URL for metadata - -n, --name TEXT Application name to use when building - --service TEXT Cloud Run service to deploy (or over-write) - --spatialite Enable SpatialLite extension - --show-files Output the generated Dockerfile and metadata.json - --memory TEXT Memory to allocate in Cloud Run, e.g. 1Gi - --cpu [1|2|4] Number of vCPUs to allocate in Cloud Run - --apt-get-install TEXT Additional packages to apt-get install - --help Show this message and exit. diff --git a/docs/datasette-publish-heroku-help.txt b/docs/datasette-publish-heroku-help.txt deleted file mode 100644 index 9d633e95..00000000 --- a/docs/datasette-publish-heroku-help.txt +++ /dev/null @@ -1,29 +0,0 @@ -$ datasette publish heroku --help - -Usage: datasette publish heroku [OPTIONS] [FILES]... - -Options: - -m, --metadata FILENAME Path to JSON/YAML file containing metadata to publish - --extra-options TEXT Extra options to pass to datasette serve - --branch TEXT Install datasette from a GitHub branch e.g. main - --template-dir DIRECTORY Path to directory containing custom templates - --plugins-dir DIRECTORY Path to directory containing custom plugins - --static MOUNT:DIRECTORY Serve static files from this directory at /MOUNT/... - --install TEXT Additional packages (e.g. plugins) to install - --plugin-secret <TEXT TEXT TEXT>... - Secrets to pass to plugins, e.g. --plugin-secret - datasette-auth-github client_id xxx - --version-note TEXT Additional note to show on /-/versions - --secret TEXT Secret used for signing secure values, such as signed - cookies - --title TEXT Title for metadata - --license TEXT License label for metadata - --license_url TEXT License URL for metadata - --source TEXT Source label for metadata - --source_url TEXT Source URL for metadata - --about TEXT About label for metadata - --about_url TEXT About URL for metadata - -n, --name TEXT Application name to use when deploying - --tar TEXT --tar option to pass to Heroku, e.g. - --tar=/usr/local/bin/gtar - --help Show this message and exit. diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt deleted file mode 100644 index 2911977a..00000000 --- a/docs/datasette-serve-help.txt +++ /dev/null @@ -1,42 +0,0 @@ -$ datasette serve --help - -Usage: datasette serve [OPTIONS] [FILES]... - - Serve up specified SQLite database files with a web UI - -Options: - -i, --immutable PATH Database files to open in immutable mode - -h, --host TEXT Host for server. Defaults to 127.0.0.1 which means only - connections from the local machine will be allowed. Use - 0.0.0.0 to listen to all IPs and allow access from other - machines. - -p, --port INTEGER RANGE Port for server, defaults to 8001. Use -p 0 to automatically - assign an available port. [0<=x<=65535] - --uds TEXT Bind to a Unix domain socket - --reload Automatically reload if code or metadata change detected - - useful for development - --cors Enable CORS by serving Access-Control-Allow-Origin: * - --load-extension TEXT Path to a SQLite extension to load - --inspect-file TEXT Path to JSON file created using "datasette inspect" - -m, --metadata FILENAME Path to JSON/YAML file containing license/source metadata - --template-dir DIRECTORY Path to directory containing custom templates - --plugins-dir DIRECTORY Path to directory containing custom plugins - --static MOUNT:DIRECTORY Serve static files from this directory at /MOUNT/... - --memory Make /_memory database available - --config CONFIG Deprecated: set config option using configname:value. Use - --setting instead. - --setting SETTING... Setting, see docs.datasette.io/en/stable/config.html - --secret TEXT Secret used for signing secure values, such as signed - cookies - --root Output URL that sets a cookie authenticating the root user - --get TEXT Run an HTTP GET request against this path, print results and - exit - --version-note TEXT Additional note to show on /-/versions - --help-settings Show available settings - --pdb Launch debugger on any errors - -o, --open Open Datasette in your web browser - --create Create database files if they do not exist - --crossdb Enable cross-database joins using the /_memory database - --ssl-keyfile TEXT SSL key file - --ssl-certfile TEXT SSL certificate file - --help Show this message and exit. diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 52434fdc..3e357afb 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -161,11 +161,4 @@ The ``--get`` option can specify the path to a page within Datasette and cause D The exit code will be 0 if the request succeeds and 1 if the request produced an HTTP status code other than 200 - e.g. a 404 or 500 error. This means you can use ``datasette --get /`` to run tests against a Datasette application in a continuous integration environment such as GitHub Actions. -.. _getting_started_serve_help: - -datasette serve --help ----------------------- - -Running ``datasette downloads.db`` executes the default ``serve`` sub-command, and is equivalent to running ``datasette serve downloads.db``. The full list of options to that command is shown below. - -.. literalinclude:: datasette-serve-help.txt +Running ``datasette`` without specifying a command runs the default command, ``datasette serve``. See :ref:`cli_help_serve___help` for the full list of options for that command. diff --git a/docs/publish.rst b/docs/publish.rst index f6895f53..1d9664e7 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -47,7 +47,7 @@ Once it has finished it will output a URL like this one:: Cloud Run provides a URL on the ``.run.app`` domain, but you can also point your own domain or subdomain at your Cloud Run service - see `mapping custom domains <https://cloud.google.com/run/docs/mapping-custom-domains>`__ in the Cloud Run documentation for details. -.. literalinclude:: datasette-publish-cloudrun-help.txt +See :ref:`cli_help_publish_cloudrun___help` for the full list of options for this command. Publishing to Heroku -------------------- @@ -64,7 +64,7 @@ This will output some details about the new deployment, including a URL like thi You can specify a custom app name by passing ``-n my-app-name`` to the publish command. This will also allow you to overwrite an existing app. -.. literalinclude:: datasette-publish-heroku-help.txt +See :ref:`cli_help_publish_heroku___help` for the full list of options for this command. .. _publish_vercel: @@ -171,4 +171,4 @@ You can customize the port that is exposed by the container using the ``--port`` A full list of options can be seen by running ``datasette package --help``: -.. literalinclude:: datasette-package-help.txt +See :ref:`cli_help_package___help` for the full list of options for this command. \ No newline at end of file diff --git a/tests/test_docs.py b/tests/test_docs.py index d0cb036d..0d17b8e3 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -33,26 +33,6 @@ def test_settings_are_documented(settings_headings, setting): assert setting.name in settings_headings -@pytest.mark.parametrize( - "name,filename", - ( - ("serve", "datasette-serve-help.txt"), - ("package", "datasette-package-help.txt"), - ("publish heroku", "datasette-publish-heroku-help.txt"), - ("publish cloudrun", "datasette-publish-cloudrun-help.txt"), - ), -) -def test_help_includes(name, filename): - expected = (docs_path / filename).read_text() - runner = CliRunner() - result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88) - actual = f"$ datasette {name} --help\n\n{result.output}" - # actual has "Usage: cli package [OPTIONS] FILES" - # because it doesn't know that cli will be aliased to datasette - expected = expected.replace("Usage: datasette", "Usage: cli") - assert expected == actual, "Run python update-docs-help.py to fix this" - - @pytest.fixture(scope="session") def plugin_hooks_content(): return (docs_path / "plugin_hooks.rst").read_text() diff --git a/update-docs-help.py b/update-docs-help.py deleted file mode 100644 index 292d1dcd..00000000 --- a/update-docs-help.py +++ /dev/null @@ -1,25 +0,0 @@ -from click.testing import CliRunner -from datasette.cli import cli -from pathlib import Path - -docs_path = Path(__file__).parent / "docs" - -includes = ( - ("serve", "datasette-serve-help.txt"), - ("package", "datasette-package-help.txt"), - ("publish heroku", "datasette-publish-heroku-help.txt"), - ("publish cloudrun", "datasette-publish-cloudrun-help.txt"), -) - - -def update_help_includes(): - for name, filename in includes: - runner = CliRunner() - result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88) - actual = f"$ datasette {name} --help\n\n{result.output}" - actual = actual.replace("Usage: cli ", "Usage: datasette ") - (docs_path / filename).write_text(actual) - - -if __name__ == "__main__": - update_help_includes() From cb29119db9115b1f40de2fb45263ed77e3bfbb3e Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Jan 2022 17:36:51 -0800 Subject: [PATCH 0100/1103] Release 0.60 Refs #473, #625, #1527, #1544, #1547, #1551, #1552, #1555, #1556, #1557, #1563, #1564, #1568, #1570, #1575, #1579, #1588, #1594 --- datasette/version.py | 2 +- docs/changelog.rst | 37 ++++++++++++++++++++++++------------- 2 files changed, 25 insertions(+), 14 deletions(-) diff --git a/datasette/version.py b/datasette/version.py index 290fbcf3..a4e340b3 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.60a1" +__version__ = "0.60" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 99d3315e..d7e2af39 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,30 +4,41 @@ Changelog ========= -.. _v0_60a1: +.. _v0_60: -0.60a1 (2021-12-19) -------------------- +0.60 (2022-01-13) +----------------- +Plugins and internals +~~~~~~~~~~~~~~~~~~~~~ + +- New plugin hook: :ref:`plugin_hook_filters_from_request`, which runs on the table page and can be used to support new custom query string parameters that modify the SQL query. (:issue:`473`) +- Added two additional methods for writing to the database: :ref:`database_execute_write_script` and :ref:`database_execute_write_many`. (:issue:`1570`) +- The :ref:`db.execute_write() <database_execute_write>` internal method now defaults to blocking until the write operation has completed. Previously it defaulted to queuing the write and then continuing to run code while the write was in the queue. (:issue:`1579`) - Database write connections now execute the :ref:`plugin_hook_prepare_connection` plugin hook. (:issue:`1564`) - The ``Datasette()`` constructor no longer requires the ``files=`` argument, and is now documented at :ref:`internals_datasette`. (:issue:`1563`) - The tracing feature now traces write queries, not just read queries. (:issue:`1568`) -- Added two methods for writing to the database: :ref:`database_execute_write_script` and :ref:`database_execute_write_many`. (:issue:`1570`) -- Made several performance improvements to the database schema introspection code that runs when Datasette first starts up. (:issue:`1555`) -- Fixed bug where writable canned queries could not be used with custom templates. (:issue:`1547`) +- The query string variables exposed by ``request.args`` will now include blank strings for arguments such as ``foo`` in ``?foo=&bar=1`` rather than ignoring those parameters entirely. (:issue:`1551`) -.. _v0_60a0: +Faceting +~~~~~~~~ -0.60a0 (2021-12-17) -------------------- - -- New plugin hook: :ref:`plugin_hook_filters_from_request`, which runs on the table page and can be used to support new custom query string parameters that modify the SQL query. (:issue:`473`) - The number of unique values in a facet is now always displayed. Previously it was only displayed if the user specified ``?_facet_size=max``. (:issue:`1556`) -- Fixed bug where ``?_facet_array=tags&_facet=tags`` would only display one of the two selected facets. (:issue:`625`) - Facets of type ``date`` or ``array`` can now be configured in ``metadata.json``, see :ref:`facets_metadata`. Thanks, David Larlet. (:issue:`1552`) - New ``?_nosuggest=1`` parameter for table views, which disables facet suggestion. (:issue:`1557`) +- Fixed bug where ``?_facet_array=tags&_facet=tags`` would only display one of the two selected facets. (:issue:`625`) + +Other small fixes +~~~~~~~~~~~~~~~~~ + +- Made several performance improvements to the database schema introspection code that runs when Datasette first starts up. (:issue:`1555`) - Label columns detected for foreign keys are now case-insensitive, so ``Name`` or ``TITLE`` will be detected in the same way as ``name`` or ``title``. (:issue:`1544`) -- The query string variables exposed by ``request.args`` will now include blank strings for arguments such as ``foo`` in ``?foo=&bar=1`` rather than ignoring those parameters entirely. (:issue:`1551`) +- Upgraded Pluggy dependency to 1.0. (:issue:`1575`) +- Now using `Plausible analytics <https://plausible.io/>`__ for the Datasette documentation. +- ``explain query plan`` is now allowed with varying amounts of whitespace in the query. (:issue:`1588`) +- New :ref:`cli_reference` page showing the output of ``--help`` for each of the ``datasette`` sub-commands. This lead to several small improvements to the help copy. (:issue:`1594`) +- Fixed bug where writable canned queries could not be used with custom templates. (:issue:`1547`) +- Improved fix for a bug where columns with a underscore prefix could result in unnecessary hidden form fields. (:issue:`1527`) .. _v0_59_4: From 58652dd925bb7509b43905423ec00083bd374dc1 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 19 Jan 2022 20:12:46 -0800 Subject: [PATCH 0101/1103] Hidden tables sqlite1/2/3/4, closes #1587 --- datasette/database.py | 4 +++- tests/test_api.py | 12 ++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/datasette/database.py b/datasette/database.py index e908d1ea..06dc8da5 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -345,7 +345,9 @@ class Database: """ select name from sqlite_master where rootpage = 0 - and sql like '%VIRTUAL TABLE%USING FTS%' + and ( + sql like '%VIRTUAL TABLE%USING FTS%' + ) or name in ('sqlite_stat1', 'sqlite_stat2', 'sqlite_stat3', 'sqlite_stat4') """ ) ).rows diff --git a/tests/test_api.py b/tests/test_api.py index 574ebb41..47ec3a8c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1030,3 +1030,15 @@ async def test_db_path(app_client): # Previously this broke if path was a pathlib.Path: await datasette.refresh_schemas() + + +@pytest.mark.asyncio +async def test_hidden_sqlite_stat1_table(): + ds = Datasette() + db = ds.add_memory_database("db") + await db.execute_write("create table normal (id integer primary key, name text)") + await db.execute_write("create index idx on normal (name)") + await db.execute_write("analyze") + data = (await ds.client.get("/db.json?_show_hidden=1")).json() + tables = [(t["name"], t["hidden"]) for t in data["tables"]] + assert tables == [("normal", False), ("sqlite_stat1", True)] From fae3983c51f4a3aca8335f3e01ff85ef27076fbf Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 19 Jan 2022 20:31:22 -0800 Subject: [PATCH 0102/1103] Drop support for Python 3.6, closes #1577 Refs #1606 --- .github/workflows/publish.yml | 6 +++--- .github/workflows/test.yml | 2 +- README.md | 2 +- docs/contributing.rst | 2 +- docs/installation.rst | 2 +- docs/introspection.rst | 8 ++++---- setup.py | 3 +-- 7 files changed, 12 insertions(+), 13 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 17c6ae9b..3cfc67da 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"] + python-version: ["3.7", "3.8", "3.9", "3.10"] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} @@ -38,7 +38,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: '3.9' + python-version: '3.10' - uses: actions/cache@v2 name: Configure pip caching with: @@ -66,7 +66,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: '3.9' + python-version: '3.10' - uses: actions/cache@v2 name: Configure pip caching with: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 704931a6..78c289bb 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,7 +7,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"] + python-version: ["3.7", "3.8", "3.9", "3.10"] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} diff --git a/README.md b/README.md index ce15ccf4..107d81da 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ You can also install it using `pip` or `pipx`: pip install datasette -Datasette requires Python 3.6 or higher. We also have [detailed installation instructions](https://docs.datasette.io/en/stable/installation.html) covering other options such as Docker. +Datasette requires Python 3.7 or higher. We also have [detailed installation instructions](https://docs.datasette.io/en/stable/installation.html) covering other options such as Docker. ## Basic usage diff --git a/docs/contributing.rst b/docs/contributing.rst index 07f2a0e4..b74f2f36 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -19,7 +19,7 @@ General guidelines Setting up a development environment ------------------------------------ -If you have Python 3.6 or higher installed on your computer (on OS X the quickest way to do this `is using homebrew <https://docs.python-guide.org/starting/install3/osx/>`__) you can install an editable copy of Datasette using the following steps. +If you have Python 3.7 or higher installed on your computer (on OS X the quickest way to do this `is using homebrew <https://docs.python-guide.org/starting/install3/osx/>`__) you can install an editable copy of Datasette using the following steps. If you want to use GitHub to publish your changes, first `create a fork of datasette <https://github.com/simonw/datasette/fork>`__ under your own GitHub account. diff --git a/docs/installation.rst b/docs/installation.rst index ac3dcca2..e8bef9cd 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -56,7 +56,7 @@ If the latest packaged release of Datasette has not yet been made available thro Using pip --------- -Datasette requires Python 3.6 or higher. Visit `InstallPython3.com <https://installpython3.com/>`__ for step-by-step installation guides for your operating system. +Datasette requires Python 3.7 or higher. Visit `InstallPython3.com <https://installpython3.com/>`__ for step-by-step installation guides for your operating system. You can install Datasette and its dependencies using ``pip``:: diff --git a/docs/introspection.rst b/docs/introspection.rst index d1a0a854..e08ca911 100644 --- a/docs/introspection.rst +++ b/docs/introspection.rst @@ -38,11 +38,11 @@ Shows the version of Datasette, Python and SQLite. `Versions example <https://la { "datasette": { - "version": "0.21" + "version": "0.60" }, "python": { - "full": "3.6.5 (default, May 5 2018, 03:07:21) \n[GCC 6.3.0 20170516]", - "version": "3.6.5" + "full": "3.8.12 (default, Dec 21 2021, 10:45:09) \n[GCC 10.2.1 20210110]", + "version": "3.8.12" }, "sqlite": { "extensions": { @@ -62,7 +62,7 @@ Shows the version of Datasette, Python and SQLite. `Versions example <https://la "ENABLE_RTREE", "THREADSAFE=1" ], - "version": "3.16.2" + "version": "3.37.0" } } diff --git a/setup.py b/setup.py index e9ef082a..dade0a88 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,7 @@ setup( packages=find_packages(exclude=("tests",)), package_data={"datasette": ["templates/*.html"]}, include_package_data=True, - python_requires=">=3.6", + python_requires=">=3.7", install_requires=[ "asgiref>=3.2.10,<3.5.0", "click>=7.1.1,<8.1.0", @@ -91,6 +91,5 @@ setup( "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.6", ], ) From 14e320329f756b7d8e298c4e2251d8a0b194c9c4 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 19 Jan 2022 20:38:49 -0800 Subject: [PATCH 0103/1103] Hidden tables data_licenses, KNN, KNN2 for SpatiaLite, closes #1601 --- datasette/database.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/datasette/database.py b/datasette/database.py index 06dc8da5..6ce87215 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -365,6 +365,9 @@ class Database: "sqlite_sequence", "views_geometry_columns", "virts_geometry_columns", + "data_licenses", + "KNN", + "KNN2", ] + [ r[0] for r in ( From 43c30ce0236ebbc7e9cec98a3822265eb2691430 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 19 Jan 2022 21:04:09 -0800 Subject: [PATCH 0104/1103] Use cog to maintain default plugin list in plugins.rst, closes #1600 Also fixed a bug I spotted where datasette.filters showed the same hook three times. --- datasette/app.py | 2 +- docs/plugins.rst | 97 ++++++++++++++++++++++++++++++++++++++++++++---- 2 files changed, 91 insertions(+), 8 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index bd663509..0a89a9f3 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -770,7 +770,7 @@ class Datasette: "static": p["static_path"] is not None, "templates": p["templates_path"] is not None, "version": p.get("version"), - "hooks": p["hooks"], + "hooks": list(set(p["hooks"])), } for p in ps ] diff --git a/docs/plugins.rst b/docs/plugins.rst index 020030f1..4a2c0194 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -91,36 +91,119 @@ You can also use the ``datasette plugins`` command:: } ] +.. [[[cog + from datasette import cli + from click.testing import CliRunner + import textwrap, json + cog.out("\n") + result = CliRunner().invoke(cli.cli, ["plugins", "--all"]) + # cog.out() with text containing newlines was unindenting for some reason + cog.outl("If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette::\n") + plugins = [p for p in json.loads(result.output) if p["name"].startswith("datasette.")] + indented = textwrap.indent(json.dumps(plugins, indent=4), " ") + for line in indented.split("\n"): + cog.outl(line) + cog.out("\n\n") +.. ]]] + If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette:: - $ datasette plugins --all [ + { + "name": "datasette.publish.heroku", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "publish_subcommand" + ] + }, { "name": "datasette.sql_functions", "static": false, "templates": false, - "version": null + "version": null, + "hooks": [ + "prepare_connection" + ] }, { - "name": "datasette.publish.cloudrun", + "name": "datasette.actor_auth_cookie", "static": false, "templates": false, - "version": null + "version": null, + "hooks": [ + "actor_from_request" + ] + }, + { + "name": "datasette.blob_renderer", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "register_output_renderer" + ] }, { "name": "datasette.facets", "static": false, "templates": false, - "version": null + "version": null, + "hooks": [ + "register_facet_classes" + ] }, { - "name": "datasette.publish.heroku", + "name": "datasette.default_magic_parameters", "static": false, "templates": false, - "version": null + "version": null, + "hooks": [ + "register_magic_parameters" + ] + }, + { + "name": "datasette.default_permissions", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "permission_allowed" + ] + }, + { + "name": "datasette.default_menu_links", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "menu_links" + ] + }, + { + "name": "datasette.filters", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "filters_from_request" + ] + }, + { + "name": "datasette.publish.cloudrun", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "publish_subcommand" + ] } ] + +.. [[[end]]] + You can add the ``--plugins-dir=`` option to include any plugins found in that directory. .. _plugins_configuration: From e1770766ce3ae6669305662ba618be610367af77 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 19 Jan 2022 21:14:04 -0800 Subject: [PATCH 0105/1103] Return plugins and hooks in predictable order --- datasette/app.py | 3 ++- docs/plugins.rst | 58 +++++++++++++++++++++++------------------------ tests/test_cli.py | 4 +--- 3 files changed, 32 insertions(+), 33 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 0a89a9f3..49858a4a 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -764,13 +764,14 @@ class Datasette: should_show_all = all if not should_show_all: ps = [p for p in ps if p["name"] not in DEFAULT_PLUGINS] + ps.sort(key=lambda p: p["name"]) return [ { "name": p["name"], "static": p["static_path"] is not None, "templates": p["templates_path"] is not None, "version": p.get("version"), - "hooks": list(set(p["hooks"])), + "hooks": list(sorted(set(p["hooks"]))), } for p in ps ] diff --git a/docs/plugins.rst b/docs/plugins.rst index 4a2c0194..f2ed02f7 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -109,24 +109,6 @@ You can also use the ``datasette plugins`` command:: If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette:: [ - { - "name": "datasette.publish.heroku", - "static": false, - "templates": false, - "version": null, - "hooks": [ - "publish_subcommand" - ] - }, - { - "name": "datasette.sql_functions", - "static": false, - "templates": false, - "version": null, - "hooks": [ - "prepare_connection" - ] - }, { "name": "datasette.actor_auth_cookie", "static": false, @@ -145,15 +127,6 @@ If you run ``datasette plugins --all`` it will include default plugins that ship "register_output_renderer" ] }, - { - "name": "datasette.facets", - "static": false, - "templates": false, - "version": null, - "hooks": [ - "register_facet_classes" - ] - }, { "name": "datasette.default_magic_parameters", "static": false, @@ -163,6 +136,15 @@ If you run ``datasette plugins --all`` it will include default plugins that ship "register_magic_parameters" ] }, + { + "name": "datasette.default_menu_links", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "menu_links" + ] + }, { "name": "datasette.default_permissions", "static": false, @@ -173,12 +155,12 @@ If you run ``datasette plugins --all`` it will include default plugins that ship ] }, { - "name": "datasette.default_menu_links", + "name": "datasette.facets", "static": false, "templates": false, "version": null, "hooks": [ - "menu_links" + "register_facet_classes" ] }, { @@ -198,6 +180,24 @@ If you run ``datasette plugins --all`` it will include default plugins that ship "hooks": [ "publish_subcommand" ] + }, + { + "name": "datasette.publish.heroku", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "publish_subcommand" + ] + }, + { + "name": "datasette.sql_functions", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "prepare_connection" + ] } ] diff --git a/tests/test_cli.py b/tests/test_cli.py index 763fe2e7..bbc5df30 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -106,9 +106,7 @@ def test_spatialite_error_if_cannot_find_load_extension_spatialite(): def test_plugins_cli(app_client): runner = CliRunner() result1 = runner.invoke(cli, ["plugins"]) - assert sorted(EXPECTED_PLUGINS, key=lambda p: p["name"]) == sorted( - json.loads(result1.output), key=lambda p: p["name"] - ) + assert json.loads(result1.output) == EXPECTED_PLUGINS # Try with --all result2 = runner.invoke(cli, ["plugins", "--all"]) names = [p["name"] for p in json.loads(result2.output)] From 0467723ee57c2cbc0f02daa47cef632dd4651df0 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 19 Jan 2022 21:46:03 -0800 Subject: [PATCH 0106/1103] New, improved favicon - refs #1603 --- datasette/app.py | 11 ++++++++++- datasette/static/favicon.png | Bin 0 -> 1207 bytes tests/test_html.py | 4 +++- 3 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 datasette/static/favicon.png diff --git a/datasette/app.py b/datasette/app.py index 49858a4a..b2942cd9 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -70,6 +70,7 @@ from .utils.asgi import ( Response, asgi_static, asgi_send, + asgi_send_file, asgi_send_html, asgi_send_json, asgi_send_redirect, @@ -178,9 +179,17 @@ SETTINGS = ( DEFAULT_SETTINGS = {option.name: option.default for option in SETTINGS} +FAVICON_PATH = app_root / "datasette" / "static" / "favicon.png" + async def favicon(request, send): - await asgi_send(send, "", 200) + await asgi_send_file( + send, + str(FAVICON_PATH), + content_type="image/png", + chunk_size=4096, + headers={"Cache-Control": "max-age=3600, immutable, public"}, + ) class Datasette: diff --git a/datasette/static/favicon.png b/datasette/static/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..36d6334fd7714da87aad44bba0d9c2a869cd0b86 GIT binary patch literal 1207 zcmeAS@N?(olHy`uVBq!ia0vp^86eET1|(%=wk`xxoCO|{#S9GG!XV7ZFl&wkP>``W z$lZxy-8q?;Kn_c~qpu?a!^VE@KZ&eBu8*gSV@L(#+gXPB%B~{E@6V2KU$RE|a=`mo z=TfEn3fnR!zH@Oseo)=wmh*$NvI}i5UR0HpZD-+iO=*9nWijR7qiH4)<$WST)$;i@ z7Pn&6W#%ORmVfzw^0n8MkLM=U{Z2c!d+{f!Nh;ZFoU_%gg<K8^2~y1O^E`j0N^4WS z)t0-HC$IUQZ-4!?)*=bf+}{^PjLiQWdq0o&?c~!gYhy%W{N?P*4DbJ%$Td@YV}E$l zz53T~-v6xLzVW{<o?)`-SjGzf8K$QX&1;>t_1E7u_MKO{uKo<L)=N)*Wi*l5{#(e? z19Rm#E0fQ>nKnPKDs9jE<8O=B=Jm9FI<kL@+QiR_=i0Y!`~P6ESF!uD#ORRy!B?Hv z+lv_q-|MgM`yEtze)7W$`G3k|LyF?`XZ<R;{^8-f=i#Z#H_lpGsVg%rded{MCyw*j zy=H2E>zh^cE<R6m@y;h}4=TKFi;%clvb^Zx#Lm3nb4nMKr+zqK{IWmjO8AAxpIdJV z=iJt@5#8jGxBBMwqYh^lO}PFg&d?%GAmE%)2lvM2B!wSYRuu)CCBArO`k$C|At=ek zJn_$uWe$4ZVonqVR7dG$Nv%46EkygXV5Gv-|1;jHaWu9y9atI4yzk2IKn|WW3I!S* zGF(jQS$tK7KY~)-UVU|!7dfpTU-xnHnG>PgJ8#X~AG#rFrX&j=YooK{g8%^w5yH&A zu=@`Lb!Nbnmw#>8dOX(lIu939d((l1P%H5<cVx22q=BsR^F6U?rJvzaTg5vjOxNVh zdt;2j?!~7WqCo#m+JXCb=j^xMe5!j}(p{Z`%N!Omk3hCtu>Z9eGn54i6jk3bDLT6V z(?LnOy0g1j|L5P)W(y179CBjQ4UMUHd=0k78I~oh-Vxm-($q3(!3(t?#dH69edwFe z8oYk{U(GqSF+Hy4isG-QHQ(}?v_>-}(Im|H|CQYv-*<n!yzp%sFs`FqgEnlqoRPw- zSbY4q<-EKsIXUsi@0Ju7P5OGszIE-TP_0b6D}3TSpH)3?ef&}uclmwteDT-k+P-Dl z^s^q-De}EPMgKZ)yg|vUuj?(;V-@QzpZ;}Ad8K-^-O&tfd+z<m_UZjUwJqt}k^JZH zo8x$HegC`YyVv6d(V?g7KU9S4ym@By>+cVXYPIVVUa9r1zW3x~{B3L5XDjDF@~uA} zp6fgF*VMny^fq~}43T|4*J6vWv}(7)Icsgx-y80q|26qW>$K~W*PdB@%So*DsOB-_ z=bN*RF3o)Fy}u;n^4wRK`eQW3!Y|a>)NJLO@oekkJdvlXd#cw}1p8iomBlP^=5O7< pA8K}rANu~Q{$t<kbEaiK<ImRe6uW@kXMyDmgQu&X%Q~loCIC6tDUtvH literal 0 HcmV?d00001 diff --git a/tests/test_html.py b/tests/test_html.py index 3f0a88a9..735d12ff 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -63,7 +63,9 @@ def test_homepage_options(app_client): def test_favicon(app_client): response = app_client.get("/favicon.ico") assert response.status == 200 - assert "" == response.text + assert response.headers["cache-control"] == "max-age=3600, immutable, public" + assert response.headers["content-length"] == "1207" + assert response.headers["content-type"] == "image/png" def test_static(app_client): From b2eebf5ebf222b61a21625527851b77347d3d662 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 19 Jan 2022 21:52:00 -0800 Subject: [PATCH 0107/1103] No need to send this, it's got a default, refs #1603 --- datasette/app.py | 1 - 1 file changed, 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index b2942cd9..09d7d034 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -187,7 +187,6 @@ async def favicon(request, send): send, str(FAVICON_PATH), content_type="image/png", - chunk_size=4096, headers={"Cache-Control": "max-age=3600, immutable, public"}, ) From b01c9b68d151e1656fc180815c0d8480e35fc961 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 19 Jan 2022 21:54:41 -0800 Subject: [PATCH 0108/1103] Oops I pushed the wrong favicon, refs #1603 --- datasette/static/favicon.png | Bin 1207 -> 1358 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/datasette/static/favicon.png b/datasette/static/favicon.png index 36d6334fd7714da87aad44bba0d9c2a869cd0b86..a27139ae6ecb09660f96d262d3779de29f7b5d4f 100644 GIT binary patch literal 1358 zcmV-U1+n^xP)<h;3K|Lk000e1NJLTq004jh004jp0{{R3^x%>C0000;P)t-s0001% zmb`Fqr$j`OPfwIOI*uG1hYt>g|NsB|{QdCp_1@p^(bD6=!P>XE+_AIRsjA42k-T+v zrB+v&Dk+Hr0*3$qfB*n>9JfE)00008bW%=J07QSN(*FMbUcFRk000E7Nkl<Zc-rk- zje4sf486525v_s>@%`WQlKxzNVr%CWzi#uW>voplSwP4@{_OZL9ExLB6vg4y?TeGK zxnWMl)7zgdqJ`^`=cF%a;XEnfZ~_DX0hkjY04$p0#Qg^g4mhGP`9S3ZfVHOsyFUU7 z#G%tFCv-O;3Iy9H0!|1B<DlF8=+1O68`nZ%>HhuALbX|-?j<*B7==YsgcaaKiLv)L zR~e4ruNx2mhbRC<c#EtdAW$ucAO!R?YO?W`YeNO!G#vuildl268^F5pk3*oYm4N2p zYrq0_83Vw}>_!pP!O;Q=uhM_!kwFpBR0sbAoGjp31uE6rmB2!vqppAi3=e@S_*n0v zGty(YfGe8&)i!jlbNTZ8k6gf6ZwrwM*c>(yU^fP)5dafI5T+Q;E%LO90Kjouuqqgu z7@dQT7){iyRiobWOrHWV_GJXwv`t$_RNXqRjB#<?CGl(>SH)J`Q=U;o0<f1M%8L4n zx(vCRdXy@nWaF$ZW$dZi#Iw9Ot0pO*3t)c-6%*nrK*nY_0TC+2JZ}O*W>$dY0_27Q z$RHIUH;sTSKQ96sw}B@SP)h{JpA><;4XoA%KC&m=ek@?4VT8>`!{HJ`ce-oqfT)#Y zG_zt{LtImb)Rd<|@j4?y#CNGBW;`PT5d#3;r52f4oE0EAMGWT_dBSW`iX%=j3pmE` zD&UQ<=^Ru<DWcLw=qNqm+0I&-$;VLAHS-vHBU#G(V<>&E_v$f}|CN-MwPV)mi1yKN z&k6T7@C`De4~@XjZUetOC*%{AUGI*Bany_z!BPFZm06OIaB4as!7nEye4bD&o+lK` z<_Xc_#$W<qTUbnU!YV?ol+u@ox@gB$F<Py3lXz<8**GheSWP~LlJ|r%?T9=@pN+Gk zoG|Ues(+lkbwoZVeBKe2RJgpIkDidoJNFRqps2h@N93)1M|3~@VC2i+bHe|~2Y!77 z+Fzd$?QI~d067%_`Fg!TJ{KUpBNrgQK^u5?rnKb(5>G!Ey%GRu`WS=5JJd8cZ<Q`y zTjVo)lhgt;ob<GBtR26(PQ1v|@(2XSU3SEICgGNNj>sZlW@~p?wG9Bmwok!=u6XmF za9=zA4x{0o59~SN-Uc%Fgx*9x%?T?{h5sFgo{D*{mJ_}|7*$_P$@mOOwek#!nw%k- z&xkn1IkU*qe%uR<%OW5Mi#%ceeL~O7P>ama&b-vxJEU{P4q7v=TjVJa)|f4CwtJt@ zPtyg|m(~l)4}YI<kkdJ#XVw*9*n&m1?PwUymLDwu)}_p!5BTwE6c&nyfC8);n^JBT z0a@zN1UoDa2B{Hjm=jh80nfoUCkui?kUs)G0MS;bf&@`3$lV5f3jj2Sc&C@32SLj0 z5pbV1B2WO*tyXBRWj$LtUF9W~t7=36I8XpQ3PB3Su9lfsPUWQ^5qt$i3P6n5cPe;C zzD?heS51hvBaoGc=AIql{>xz;1prQLx-0;}3cxWlgs^^}S-R&EEeN*$DIOO*C~-R< zX5L0jdwPaSLA1l+>8@9c&tY)|*l^D)M{zuxv&jcn`mvbX`kWVkc6=QE0tPO=y`~{k QD*ylh07*qoM6N<$g1B&Oe*gdg literal 1207 zcmeAS@N?(olHy`uVBq!ia0vp^86eET1|(%=wk`xxoCO|{#S9GG!XV7ZFl&wkP>``W z$lZxy-8q?;Kn_c~qpu?a!^VE@KZ&eBu8*gSV@L(#+gXPB%B~{E@6V2KU$RE|a=`mo z=TfEn3fnR!zH@Oseo)=wmh*$NvI}i5UR0HpZD-+iO=*9nWijR7qiH4)<$WST)$;i@ z7Pn&6W#%ORmVfzw^0n8MkLM=U{Z2c!d+{f!Nh;ZFoU_%gg<K8^2~y1O^E`j0N^4WS z)t0-HC$IUQZ-4!?)*=bf+}{^PjLiQWdq0o&?c~!gYhy%W{N?P*4DbJ%$Td@YV}E$l zz53T~-v6xLzVW{<o?)`-SjGzf8K$QX&1;>t_1E7u_MKO{uKo<L)=N)*Wi*l5{#(e? z19Rm#E0fQ>nKnPKDs9jE<8O=B=Jm9FI<kL@+QiR_=i0Y!`~P6ESF!uD#ORRy!B?Hv z+lv_q-|MgM`yEtze)7W$`G3k|LyF?`XZ<R;{^8-f=i#Z#H_lpGsVg%rded{MCyw*j zy=H2E>zh^cE<R6m@y;h}4=TKFi;%clvb^Zx#Lm3nb4nMKr+zqK{IWmjO8AAxpIdJV z=iJt@5#8jGxBBMwqYh^lO}PFg&d?%GAmE%)2lvM2B!wSYRuu)CCBArO`k$C|At=ek zJn_$uWe$4ZVonqVR7dG$Nv%46EkygXV5Gv-|1;jHaWu9y9atI4yzk2IKn|WW3I!S* zGF(jQS$tK7KY~)-UVU|!7dfpTU-xnHnG>PgJ8#X~AG#rFrX&j=YooK{g8%^w5yH&A zu=@`Lb!Nbnmw#>8dOX(lIu939d((l1P%H5<cVx22q=BsR^F6U?rJvzaTg5vjOxNVh zdt;2j?!~7WqCo#m+JXCb=j^xMe5!j}(p{Z`%N!Omk3hCtu>Z9eGn54i6jk3bDLT6V z(?LnOy0g1j|L5P)W(y179CBjQ4UMUHd=0k78I~oh-Vxm-($q3(!3(t?#dH69edwFe z8oYk{U(GqSF+Hy4isG-QHQ(}?v_>-}(Im|H|CQYv-*<n!yzp%sFs`FqgEnlqoRPw- zSbY4q<-EKsIXUsi@0Ju7P5OGszIE-TP_0b6D}3TSpH)3?ef&}uclmwteDT-k+P-Dl z^s^q-De}EPMgKZ)yg|vUuj?(;V-@QzpZ;}Ad8K-^-O&tfd+z<m_UZjUwJqt}k^JZH zo8x$HegC`YyVv6d(V?g7KU9S4ym@By>+cVXYPIVVUa9r1zW3x~{B3L5XDjDF@~uA} zp6fgF*VMny^fq~}43T|4*J6vWv}(7)Icsgx-y80q|26qW>$K~W*PdB@%So*DsOB-_ z=bN*RF3o)Fy}u;n^4wRK`eQW3!Y|a>)NJLO@oekkJdvlXd#cw}1p8iomBlP^=5O7< pA8K}rANu~Q{$t<kbEaiK<ImRe6uW@kXMyDmgQu&X%Q~loCIC6tDUtvH From 7c67483f5e61f7c46410a433a55280d62280327f Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 19 Jan 2022 21:57:14 -0800 Subject: [PATCH 0109/1103] Make test_favicon flexible to changing icon sizes, refs #1603 --- tests/test_html.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_html.py b/tests/test_html.py index 735d12ff..aa718857 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -64,7 +64,7 @@ def test_favicon(app_client): response = app_client.get("/favicon.ico") assert response.status == 200 assert response.headers["cache-control"] == "max-age=3600, immutable, public" - assert response.headers["content-length"] == "1207" + assert int(response.headers["content-length"]) > 100 assert response.headers["content-type"] == "image/png" From 150967d98ef6c5b6064587e7ed30cbdd9b992b8e Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 20 Jan 2022 10:43:15 -0800 Subject: [PATCH 0110/1103] Hand-edited pixel favicon, refs #1603 --- datasette/static/favicon.png | Bin 1358 -> 208 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/datasette/static/favicon.png b/datasette/static/favicon.png index a27139ae6ecb09660f96d262d3779de29f7b5d4f..4993163f703c425c6512dec380dafe2ca52051a7 100644 GIT binary patch literal 208 zcmeAS@N?(olHy`uVBq!ia0vp^3LwnF3?v&v(vJfvi2$DvS0Mc#47TsQvvJdnm8(v& zGBmL;l)e{q>Icd(l?3?(GYEgu?Rf^|rFyzJhE&{2KERTb>7mfr*l2iDf^!C=sKnw1 z79FNc4F-(`OP(xAS+ay@QOJ}j3QP)SnoLihzI2EP<5Qdv5^(o|!kVa_4lxb=X6adN yPl8f;beC`>@CQ#ili=ZWZBd)t&CVH}j0{Q7(vQX;{-qAIkipZ{&t;ucLK6Tps7T!a literal 1358 zcmV-U1+n^xP)<h;3K|Lk000e1NJLTq004jh004jp0{{R3^x%>C0000;P)t-s0001% zmb`Fqr$j`OPfwIOI*uG1hYt>g|NsB|{QdCp_1@p^(bD6=!P>XE+_AIRsjA42k-T+v zrB+v&Dk+Hr0*3$qfB*n>9JfE)00008bW%=J07QSN(*FMbUcFRk000E7Nkl<Zc-rk- zje4sf486525v_s>@%`WQlKxzNVr%CWzi#uW>voplSwP4@{_OZL9ExLB6vg4y?TeGK zxnWMl)7zgdqJ`^`=cF%a;XEnfZ~_DX0hkjY04$p0#Qg^g4mhGP`9S3ZfVHOsyFUU7 z#G%tFCv-O;3Iy9H0!|1B<DlF8=+1O68`nZ%>HhuALbX|-?j<*B7==YsgcaaKiLv)L zR~e4ruNx2mhbRC<c#EtdAW$ucAO!R?YO?W`YeNO!G#vuildl268^F5pk3*oYm4N2p zYrq0_83Vw}>_!pP!O;Q=uhM_!kwFpBR0sbAoGjp31uE6rmB2!vqppAi3=e@S_*n0v zGty(YfGe8&)i!jlbNTZ8k6gf6ZwrwM*c>(yU^fP)5dafI5T+Q;E%LO90Kjouuqqgu z7@dQT7){iyRiobWOrHWV_GJXwv`t$_RNXqRjB#<?CGl(>SH)J`Q=U;o0<f1M%8L4n zx(vCRdXy@nWaF$ZW$dZi#Iw9Ot0pO*3t)c-6%*nrK*nY_0TC+2JZ}O*W>$dY0_27Q z$RHIUH;sTSKQ96sw}B@SP)h{JpA><;4XoA%KC&m=ek@?4VT8>`!{HJ`ce-oqfT)#Y zG_zt{LtImb)Rd<|@j4?y#CNGBW;`PT5d#3;r52f4oE0EAMGWT_dBSW`iX%=j3pmE` zD&UQ<=^Ru<DWcLw=qNqm+0I&-$;VLAHS-vHBU#G(V<>&E_v$f}|CN-MwPV)mi1yKN z&k6T7@C`De4~@XjZUetOC*%{AUGI*Bany_z!BPFZm06OIaB4as!7nEye4bD&o+lK` z<_Xc_#$W<qTUbnU!YV?ol+u@ox@gB$F<Py3lXz<8**GheSWP~LlJ|r%?T9=@pN+Gk zoG|Ues(+lkbwoZVeBKe2RJgpIkDidoJNFRqps2h@N93)1M|3~@VC2i+bHe|~2Y!77 z+Fzd$?QI~d067%_`Fg!TJ{KUpBNrgQK^u5?rnKb(5>G!Ey%GRu`WS=5JJd8cZ<Q`y zTjVo)lhgt;ob<GBtR26(PQ1v|@(2XSU3SEICgGNNj>sZlW@~p?wG9Bmwok!=u6XmF za9=zA4x{0o59~SN-Uc%Fgx*9x%?T?{h5sFgo{D*{mJ_}|7*$_P$@mOOwek#!nw%k- z&xkn1IkU*qe%uR<%OW5Mi#%ceeL~O7P>ama&b-vxJEU{P4q7v=TjVJa)|f4CwtJt@ zPtyg|m(~l)4}YI<kkdJ#XVw*9*n&m1?PwUymLDwu)}_p!5BTwE6c&nyfC8);n^JBT z0a@zN1UoDa2B{Hjm=jh80nfoUCkui?kUs)G0MS;bf&@`3$lV5f3jj2Sc&C@32SLj0 z5pbV1B2WO*tyXBRWj$LtUF9W~t7=36I8XpQ3PB3Su9lfsPUWQ^5qt$i3P6n5cPe;C zzD?heS51hvBaoGc=AIql{>xz;1prQLx-0;}3cxWlgs^^}S-R&EEeN*$DIOO*C~-R< zX5L0jdwPaSLA1l+>8@9c&tY)|*l^D)M{zuxv&jcn`mvbX`kWVkc6=QE0tPO=y`~{k QD*ylh07*qoM6N<$g1B&Oe*gdg From ffca55dfd7cc9b53522c2e5a2fa1ff67c9beadf2 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 20 Jan 2022 14:40:44 -0800 Subject: [PATCH 0111/1103] Show link to /stable/ on /latest/ pages, refs #1608 --- docs/_templates/layout.html | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html index db16b428..785cdc7c 100644 --- a/docs/_templates/layout.html +++ b/docs/_templates/layout.html @@ -26,3 +26,36 @@ {% include "searchbox.html" %} {% endblock %} + +{% block footer %} +{{ super() }} +<script> +jQuery(function ($) { + // Show banner linking to /stable/ if this is a /latest/ page + if (!/\/latest\//.test(location.pathname)) { + return; + } + var stableUrl = location.pathname.replace("/latest/", "/stable/"); + // Check it's not a 404 + fetch(stableUrl, { method: "HEAD" }).then((response) => { + if (response.status == 200) { + var warning = $( + `<div class="admonition warning"> + <p class="first admonition-title">Note</p> + <p class="last"> + This documentation covers the <strong>development version</strong> of Datasette.</p> + <p>See <a href="${stableUrl}">this page</a> for the current stable release. + </p> + </div>` + ); + warning.find("a").attr("href", stableUrl); + var body = $("div.body"); + if (!body.length) { + body = $("div.document"); + } + body.prepend(warning); + } + }); +}); +</script> +{% endblock %} From d194db4204b732af57138e1fb0924ec77354dd58 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 20 Jan 2022 18:01:47 -0800 Subject: [PATCH 0112/1103] Output pip freeze to show installed packages, refs #1609 --- .github/workflows/test.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 78c289bb..2caf9447 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -24,6 +24,7 @@ jobs: - name: Install dependencies run: | pip install -e '.[test]' + pip freeze - name: Run tests run: | pytest -n auto -m "not serial" From 68cc1e2dbb0b841af7a7691ea6b4e7d31b09cc5e Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 25 Jan 2022 10:28:05 -0800 Subject: [PATCH 0113/1103] Move queries to top of database page, refs #1612 --- datasette/templates/database.html | 24 ++++++++++++++---------- tests/test_html.py | 30 +++++++++++++++++++++++++----- 2 files changed, 39 insertions(+), 15 deletions(-) diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 2d182d1b..c1e39bd1 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -67,10 +67,23 @@ </div> {% endif %} +{% if queries %} + <h2 id="queries">Queries</h2> + <ul class="bullets"> + {% for query in queries %} + <li><a href="{{ urls.query(database, query.name) }}{% if query.fragment %}#{{ query.fragment }}{% endif %}" title="{{ query.description or query.sql }}">{{ query.title or query.name }}</a>{% if query.private %} 🔒{% endif %}</li> + {% endfor %} + </ul> +{% endif %} + +{% if tables %} +<h2 id="tables">Tables</h2> +{% endif %} + {% for table in tables %} {% if show_hidden or not table.hidden %} <div class="db-table"> - <h2><a href="{{ urls.table(database, table.name) }}">{{ table.name }}</a>{% if table.private %} 🔒{% endif %}{% if table.hidden %}<em> (hidden)</em>{% endif %}</h2> + <h3><a href="{{ urls.table(database, table.name) }}">{{ table.name }}</a>{% if table.private %} 🔒{% endif %}{% if table.hidden %}<em> (hidden)</em>{% endif %}</h3> <p><em>{% for column in table.columns %}{{ column }}{% if not loop.last %}, {% endif %}{% endfor %}</em></p> <p>{% if table.count is none %}Many rows{% else %}{{ "{:,}".format(table.count) }} row{% if table.count == 1 %}{% else %}s{% endif %}{% endif %}</p> </div> @@ -90,15 +103,6 @@ </ul> {% endif %} -{% if queries %} - <h2 id="queries">Queries</h2> - <ul class="bullets"> - {% for query in queries %} - <li><a href="{{ urls.query(database, query.name) }}{% if query.fragment %}#{{ query.fragment }}{% endif %}" title="{{ query.description or query.sql }}">{{ query.title or query.name }}</a>{% if query.private %} 🔒{% endif %}</li> - {% endfor %} - </ul> -{% endif %} - {% if allow_download %} <p class="download-sqlite">Download SQLite DB: <a href="{{ urls.database(database) }}.db">{{ database }}.db</a> <em>{{ format_bytes(size) }}</em></p> {% endif %} diff --git a/tests/test_html.py b/tests/test_html.py index aa718857..1bbf335c 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -110,12 +110,32 @@ def test_database_page_redirects_with_url_hash(app_client_with_hash): def test_database_page(app_client): response = app_client.get("/fixtures") - assert ( - b"<p><em>pk, foreign_key_with_label, foreign_key_with_blank_label, " - b"foreign_key_with_no_label, foreign_key_compound_pk1, " - b"foreign_key_compound_pk2</em></p>" - ) in response.body soup = Soup(response.body, "html.parser") + # Should have a <textarea> for executing SQL + assert "<textarea" in response.text + + # And a list of tables + for fragment in ( + '<h2 id="tables">Tables</h2>', + '<h3><a href="/fixtures/sortable">sortable</a></h3>', + "<p><em>pk, foreign_key_with_label, foreign_key_with_blank_label, ", + ): + assert fragment in response.text + + # And views + views_ul = soup.find("h2", text="Views").find_next_sibling("ul") + assert views_ul is not None + assert [ + ("/fixtures/paginated_view", "paginated_view"), + ("/fixtures/searchable_view", "searchable_view"), + ( + "/fixtures/searchable_view_configured_by_metadata", + "searchable_view_configured_by_metadata", + ), + ("/fixtures/simple_view", "simple_view"), + ] == sorted([(a["href"], a.text) for a in views_ul.find_all("a")]) + + # And a list of canned queries queries_ul = soup.find("h2", text="Queries").find_next_sibling("ul") assert queries_ul is not None assert [ From 84391763a8d5911c387c9965c86c8d45f39b31fb Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 25 Jan 2022 10:39:03 -0800 Subject: [PATCH 0114/1103] Clarify that magic parameters don't work for custom SQL --- docs/sql_queries.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index f9a36490..010e3205 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -275,6 +275,8 @@ Magic parameters Named parameters that start with an underscore are special: they can be used to automatically add values created by Datasette that are not contained in the incoming form fields or query string. +These magic parameters are only supported for canned queries: to avoid security issues (such as queries that extract the user's private cookies) they are not available to SQL that is executed by the user as a custom SQL query. + Available magic parameters are: ``_actor_*`` - e.g. ``_actor_id``, ``_actor_name`` From 2aa686c6554bf6b8230eb5b3019574df6cc99225 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 26 Jan 2022 10:21:05 -0800 Subject: [PATCH 0115/1103] It's not a weekly newsletter --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 107d81da..557d9290 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover * Live demo of current main: https://latest.datasette.io/ * Support questions, feedback? Join our [GitHub Discussions forum](https://github.com/simonw/datasette/discussions) -Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly newsletter](https://datasette.substack.com/) for tips, tricks and news on what's new in the Datasette ecosystem. +Want to stay up-to-date with the project? Subscribe to the [Datasette newsletter](https://datasette.substack.com/) for tips, tricks and news on what's new in the Datasette ecosystem. ## Installation From 3ef47a0896c7e63404a34e465b7160c80eaa571d Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 27 Nov 2021 12:08:42 -0800 Subject: [PATCH 0116/1103] Link rel=alternate header for tables and rows Also added Access-Control-Expose-Headers: Link to --cors mode. Closes #1533 Refs https://github.com/simonw/datasette-notebook/issues/2 LL# metadata.json.1 --- datasette/templates/base.html | 2 +- datasette/templates/row.html | 3 ++- datasette/templates/table.html | 3 ++- datasette/utils/__init__.py | 1 + datasette/views/base.py | 12 ++++++++++-- datasette/views/table.py | 22 +++++++++++++++++++++- docs/json_api.rst | 20 ++++++++++++++++++++ tests/test_api.py | 1 + tests/test_table_html.py | 28 ++++++++++++++++++++++++++++ 9 files changed, 86 insertions(+), 6 deletions(-) diff --git a/datasette/templates/base.html b/datasette/templates/base.html index c9aa7e31..836b7bb7 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -10,7 +10,7 @@ {% for url in extra_js_urls %} <script {% if url.module %}type="module" {% endif %}src="{{ url.url }}"{% if url.sri %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}></script> {% endfor %} -{% block extra_head %}{% endblock %} +{%- block extra_head %}{% endblock -%} </head> <body class="{% block body_class %}{% endblock %}"> <div class="not-footer"> diff --git a/datasette/templates/row.html b/datasette/templates/row.html index c86e979d..1ac16268 100644 --- a/datasette/templates/row.html +++ b/datasette/templates/row.html @@ -3,7 +3,8 @@ {% block title %}{{ database }}: {{ table }}{% endblock %} {% block extra_head %} -{{ super() }} +{{- super() -}} +<link rel="alternate" type="application/json+datasette" href="{{ alternate_url_json }}"> <style> @media only screen and (max-width: 576px) { {% for column in columns %} diff --git a/datasette/templates/table.html b/datasette/templates/table.html index e3c6f38d..403e1d5b 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -3,7 +3,8 @@ {% block title %}{{ database }}: {{ table }}: {% if filtered_table_rows_count or filtered_table_rows_count == 0 %}{{ "{:,}".format(filtered_table_rows_count) }} row{% if filtered_table_rows_count == 1 %}{% else %}s{% endif %}{% endif %}{% if human_description_en %} {{ human_description_en }}{% endif %}{% endblock %} {% block extra_head %} -{{ super() }} +{{- super() -}} +<link rel="alternate" type="application/json+datasette" href="{{ alternate_url_json }}"> <script src="{{ urls.static('table.js') }}" defer></script> <style> @media only screen and (max-width: 576px) { diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index bc3155a5..dc4e1c99 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1094,3 +1094,4 @@ async def derive_named_parameters(db, sql): def add_cors_headers(headers): headers["Access-Control-Allow-Origin"] = "*" headers["Access-Control-Allow-Headers"] = "Authorization" + headers["Access-Control-Expose-Headers"] = "Link" diff --git a/datasette/views/base.py b/datasette/views/base.py index b1cacb3f..a414892a 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -137,10 +137,18 @@ class BaseView: ], }, } + # Hacky cheat to add extra headers + headers = {} + if "_extra_headers" in context: + headers.update(context["_extra_headers"]) return Response.html( await self.ds.render_template( - template, template_context, request=request, view_name=self.name - ) + template, + template_context, + request=request, + view_name=self.name, + ), + headers=headers, ) @classmethod diff --git a/datasette/views/table.py b/datasette/views/table.py index 77fb2850..6bbee352 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -17,6 +17,7 @@ from datasette.utils import ( is_url, path_from_row_pks, path_with_added_args, + path_with_format, path_with_removed_args, path_with_replaced_args, to_css_class, @@ -850,7 +851,12 @@ class TableView(RowTableShared): for table_column in table_columns if table_column not in columns ] + alternate_url_json = self.ds.absolute_url( + request, + self.ds.urls.path(path_with_format(request=request, format="json")), + ) d = { + "alternate_url_json": alternate_url_json, "table_actions": table_actions, "use_rowid": use_rowid, "filters": filters, @@ -881,6 +887,11 @@ class TableView(RowTableShared): "metadata": metadata, "view_definition": await db.get_view_definition(table), "table_definition": await db.get_table_definition(table), + "_extra_headers": { + "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( + alternate_url_json + ) + }, } d.update(extra_context_from_filters) return d @@ -964,8 +975,12 @@ class RowView(RowTableShared): ) for column in display_columns: column["sortable"] = False - + alternate_url_json = self.ds.absolute_url( + request, + self.ds.urls.path(path_with_format(request=request, format="json")), + ) return { + "alternate_url_json": alternate_url_json, "foreign_key_tables": await self.foreign_key_tables( database, table, pk_values ), @@ -980,6 +995,11 @@ class RowView(RowTableShared): .get(database, {}) .get("tables", {}) .get(table, {}), + "_extra_headers": { + "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( + alternate_url_json + ) + }, } data = { diff --git a/docs/json_api.rst b/docs/json_api.rst index bd55c163..b5a6744b 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -14,6 +14,7 @@ served with the following additional HTTP headers:: Access-Control-Allow-Origin: * Access-Control-Allow-Headers: Authorization + Access-Control-Expose-Headers: Link This means JavaScript running on any domain will be able to make cross-origin requests to fetch the data. @@ -435,3 +436,22 @@ looks like:: The column in the foreign key table that is used for the label can be specified in ``metadata.json`` - see :ref:`label_columns`. + +.. _json_api_discover_alternate: + +Discovering the JSON for a page +------------------------------- + +The :ref:`table <TableView>` and :ref:`row <RowView>` HTML pages both provide a mechanism for discovering their JSON equivalents using the HTML ``link`` mechanism. + +You can find this near the top of those pages, looking like this: + +.. code-block:: python + + <link rel="alternate" + type="application/json+datasette" + href="https://latest.datasette.io/fixtures/sortable.json"> + +The JSON URL is also made available in a ``Link`` HTTP header for the page:: + + Link: https://latest.datasette.io/fixtures/sortable.json; rel="alternate"; type="application/json+datasette" diff --git a/tests/test_api.py b/tests/test_api.py index 47ec3a8c..9741ffc5 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -977,6 +977,7 @@ def test_cors(app_client_with_cors, path, status_code): assert response.status == status_code assert response.headers["Access-Control-Allow-Origin"] == "*" assert response.headers["Access-Control-Allow-Headers"] == "Authorization" + assert response.headers["Access-Control-Expose-Headers"] == "Link" @pytest.mark.parametrize( diff --git a/tests/test_table_html.py b/tests/test_table_html.py index 021268c3..7d08d230 100644 --- a/tests/test_table_html.py +++ b/tests/test_table_html.py @@ -1069,3 +1069,31 @@ def test_table_page_title(app_client, path, expected): response = app_client.get(path) title = Soup(response.text, "html.parser").find("title").text assert title == expected + + +@pytest.mark.parametrize( + "path,expected", + ( + ( + "/fixtures/table%2Fwith%2Fslashes.csv", + "http://localhost/fixtures/table%2Fwith%2Fslashes.csv?_format=json", + ), + ("/fixtures/facetable", "http://localhost/fixtures/facetable.json"), + ( + "/fixtures/no_primary_key/1", + "http://localhost/fixtures/no_primary_key/1.json", + ), + ), +) +def test_alternate_url_json(app_client, path, expected): + response = app_client.get(path) + link = response.headers["link"] + assert link == '{}; rel="alternate"; type="application/json+datasette"'.format( + expected + ) + assert ( + '<link rel="alternate" type="application/json+datasette" href="{}">'.format( + expected + ) + in response.text + ) From b72b2423c79dea4600b2337949db98269d0b6215 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 2 Feb 2022 13:21:11 -0800 Subject: [PATCH 0117/1103] rel=alternate JSON for queries and database pages, closes #1620 --- datasette/templates/database.html | 3 ++- datasette/templates/query.html | 3 ++- datasette/views/database.py | 32 ++++++++++++++++++++++ docs/json_api.rst | 2 +- tests/test_canned_queries.py | 9 +++++++ tests/test_html.py | 45 +++++++++++++++++++++++++++++++ tests/test_table_html.py | 28 ------------------- 7 files changed, 91 insertions(+), 31 deletions(-) diff --git a/datasette/templates/database.html b/datasette/templates/database.html index c1e39bd1..8f0c65d7 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -3,7 +3,8 @@ {% block title %}{{ database }}{% endblock %} {% block extra_head %} -{{ super() }} +{{- super() -}} +<link rel="alternate" type="application/json+datasette" href="{{ alternate_url_json }}"> {% include "_codemirror.html" %} {% endblock %} diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 75f7f1b1..d0121976 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -3,7 +3,8 @@ {% block title %}{{ database }}{% if query and query.sql %}: {{ query.sql }}{% endif %}{% endblock %} {% block extra_head %} -{{ super() }} +{{- super() -}} +<link rel="alternate" type="application/json+datasette" href="{{ alternate_url_json }}"> {% if columns %} <style> @media only screen and (max-width: 576px) { diff --git a/datasette/views/database.py b/datasette/views/database.py index e26706e7..f3641dc5 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -123,6 +123,10 @@ class DatabaseView(DataView): attached_databases = [d.name for d in await db.attached_databases()] + alternate_url_json = self.ds.absolute_url( + request, + self.ds.urls.path(path_with_format(request=request, format="json")), + ) return ( { "database": database, @@ -140,6 +144,7 @@ class DatabaseView(DataView): ), }, { + "alternate_url_json": alternate_url_json, "database_actions": database_actions, "show_hidden": request.args.get("_show_hidden"), "editable": True, @@ -148,6 +153,11 @@ class DatabaseView(DataView): and not db.is_mutable and not db.is_memory, "attached_databases": attached_databases, + "_extra_headers": { + "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( + alternate_url_json + ) + }, }, (f"database-{to_css_class(database)}.html", "database.html"), ) @@ -308,7 +318,14 @@ class QueryView(DataView): else: async def extra_template(): + alternate_url_json = self.ds.absolute_url( + request, + self.ds.urls.path( + path_with_format(request=request, format="json") + ), + ) return { + "alternate_url_json": alternate_url_json, "request": request, "path_with_added_args": path_with_added_args, "path_with_removed_args": path_with_removed_args, @@ -316,6 +333,11 @@ class QueryView(DataView): "canned_query": canned_query, "success_message": request.args.get("_success") or "", "canned_write": True, + "_extra_headers": { + "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( + alternate_url_json + ) + }, } return ( @@ -448,7 +470,12 @@ class QueryView(DataView): show_hide_link = path_with_added_args(request, {"_hide_sql": 1}) show_hide_text = "hide" hide_sql = show_hide_text == "show" + alternate_url_json = self.ds.absolute_url( + request, + self.ds.urls.path(path_with_format(request=request, format="json")), + ) return { + "alternate_url_json": alternate_url_json, "display_rows": display_rows, "custom_sql": True, "named_parameter_values": named_parameter_values, @@ -462,6 +489,11 @@ class QueryView(DataView): "show_hide_text": show_hide_text, "show_hide_hidden": markupsafe.Markup(show_hide_hidden), "hide_sql": hide_sql, + "_extra_headers": { + "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( + alternate_url_json + ) + }, } return ( diff --git a/docs/json_api.rst b/docs/json_api.rst index b5a6744b..4f9eaddb 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -442,7 +442,7 @@ in ``metadata.json`` - see :ref:`label_columns`. Discovering the JSON for a page ------------------------------- -The :ref:`table <TableView>` and :ref:`row <RowView>` HTML pages both provide a mechanism for discovering their JSON equivalents using the HTML ``link`` mechanism. +The :ref:`database <DatabaseView>`, :ref:`table <TableView>`, :ref:`custom/canned query <sql>` and :ref:`row <RowView>` HTML pages all provide a mechanism for discovering their JSON equivalents using the HTML ``link`` mechanism. You can find this near the top of those pages, looking like this: diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index c5ccaf5c..5abffdcc 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -364,3 +364,12 @@ def test_canned_write_custom_template(canned_write_client): in response.text ) assert "!!!CUSTOM_UPDATE_NAME_TEMPLATE!!!" in response.text + # And test for link rel=alternate while we're here: + assert ( + '<link rel="alternate" type="application/json+datasette" href="http://localhost/data/update_name.json">' + in response.text + ) + assert ( + response.headers["link"] + == 'http://localhost/data/update_name.json; rel="alternate"; type="application/json+datasette"' + ) diff --git a/tests/test_html.py b/tests/test_html.py index 1bbf335c..273e4914 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -870,3 +870,48 @@ def test_trace_correctly_escaped(app_client): response = app_client.get("/fixtures?sql=select+'<h1>Hello'&_trace=1") assert "select '<h1>Hello" not in response.text assert "select '<h1>Hello" in response.text + + +@pytest.mark.parametrize( + "path,expected", + ( + # Table page + ( + "/fixtures/table%2Fwith%2Fslashes.csv", + "http://localhost/fixtures/table%2Fwith%2Fslashes.csv?_format=json", + ), + ("/fixtures/facetable", "http://localhost/fixtures/facetable.json"), + # Row page + ( + "/fixtures/no_primary_key/1", + "http://localhost/fixtures/no_primary_key/1.json", + ), + # Database index page + ( + "/fixtures", + "http://localhost/fixtures.json", + ), + # Custom query page + ( + "/fixtures?sql=select+*+from+facetable", + "http://localhost/fixtures.json?sql=select+*+from+facetable", + ), + # Canned query page + ( + "/fixtures/neighborhood_search?text=town", + "http://localhost/fixtures/neighborhood_search.json?text=town", + ), + ), +) +def test_alternate_url_json(app_client, path, expected): + response = app_client.get(path) + link = response.headers["link"] + assert link == '{}; rel="alternate"; type="application/json+datasette"'.format( + expected + ) + assert ( + '<link rel="alternate" type="application/json+datasette" href="{}">'.format( + expected + ) + in response.text + ) diff --git a/tests/test_table_html.py b/tests/test_table_html.py index 7d08d230..021268c3 100644 --- a/tests/test_table_html.py +++ b/tests/test_table_html.py @@ -1069,31 +1069,3 @@ def test_table_page_title(app_client, path, expected): response = app_client.get(path) title = Soup(response.text, "html.parser").find("title").text assert title == expected - - -@pytest.mark.parametrize( - "path,expected", - ( - ( - "/fixtures/table%2Fwith%2Fslashes.csv", - "http://localhost/fixtures/table%2Fwith%2Fslashes.csv?_format=json", - ), - ("/fixtures/facetable", "http://localhost/fixtures/facetable.json"), - ( - "/fixtures/no_primary_key/1", - "http://localhost/fixtures/no_primary_key/1.json", - ), - ), -) -def test_alternate_url_json(app_client, path, expected): - response = app_client.get(path) - link = response.headers["link"] - assert link == '{}; rel="alternate"; type="application/json+datasette"'.format( - expected - ) - assert ( - '<link rel="alternate" type="application/json+datasette" href="{}">'.format( - expected - ) - in response.text - ) From 8d5779acf0041cfd0db7f68f468419f9008b86ec Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 2 Feb 2022 13:32:47 -0800 Subject: [PATCH 0118/1103] Refactored alternate_url_json mechanism, refs #1620, #1533 --- datasette/templates/base.html | 3 +++ datasette/templates/database.html | 1 - datasette/templates/query.html | 1 - datasette/templates/row.html | 1 - datasette/templates/table.html | 1 - datasette/views/base.py | 17 +++++++++++++--- datasette/views/database.py | 32 ------------------------------- datasette/views/table.py | 20 ------------------- docs/json_api.rst | 4 ++-- tests/test_html.py | 9 ++++++++- 10 files changed, 27 insertions(+), 62 deletions(-) diff --git a/datasette/templates/base.html b/datasette/templates/base.html index 836b7bb7..c3a71acb 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -10,6 +10,9 @@ {% for url in extra_js_urls %} <script {% if url.module %}type="module" {% endif %}src="{{ url.url }}"{% if url.sri %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}></script> {% endfor %} +{%- if alternate_url_json -%} + <link rel="alternate" type="application/json+datasette" href="{{ alternate_url_json }}"> +{%- endif -%} {%- block extra_head %}{% endblock -%} </head> <body class="{% block body_class %}{% endblock %}"> diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 8f0c65d7..e76bc49e 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -4,7 +4,6 @@ {% block extra_head %} {{- super() -}} -<link rel="alternate" type="application/json+datasette" href="{{ alternate_url_json }}"> {% include "_codemirror.html" %} {% endblock %} diff --git a/datasette/templates/query.html b/datasette/templates/query.html index d0121976..8c920527 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -4,7 +4,6 @@ {% block extra_head %} {{- super() -}} -<link rel="alternate" type="application/json+datasette" href="{{ alternate_url_json }}"> {% if columns %} <style> @media only screen and (max-width: 576px) { diff --git a/datasette/templates/row.html b/datasette/templates/row.html index 1ac16268..10770ce9 100644 --- a/datasette/templates/row.html +++ b/datasette/templates/row.html @@ -4,7 +4,6 @@ {% block extra_head %} {{- super() -}} -<link rel="alternate" type="application/json+datasette" href="{{ alternate_url_json }}"> <style> @media only screen and (max-width: 576px) { {% for column in columns %} diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 403e1d5b..81bd044a 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -4,7 +4,6 @@ {% block extra_head %} {{- super() -}} -<link rel="alternate" type="application/json+datasette" href="{{ alternate_url_json }}"> <script src="{{ urls.static('table.js') }}" defer></script> <style> @media only screen and (max-width: 576px) { diff --git a/datasette/views/base.py b/datasette/views/base.py index a414892a..c74d6141 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -55,6 +55,7 @@ class DatasetteError(Exception): class BaseView: ds = None + has_json_alternate = True def __init__(self, datasette): self.ds = datasette @@ -137,10 +138,20 @@ class BaseView: ], }, } - # Hacky cheat to add extra headers headers = {} - if "_extra_headers" in context: - headers.update(context["_extra_headers"]) + if self.has_json_alternate: + alternate_url_json = self.ds.absolute_url( + request, + self.ds.urls.path(path_with_format(request=request, format="json")), + ) + template_context["alternate_url_json"] = alternate_url_json + headers.update( + { + "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( + alternate_url_json + ) + } + ) return Response.html( await self.ds.render_template( template, diff --git a/datasette/views/database.py b/datasette/views/database.py index f3641dc5..e26706e7 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -123,10 +123,6 @@ class DatabaseView(DataView): attached_databases = [d.name for d in await db.attached_databases()] - alternate_url_json = self.ds.absolute_url( - request, - self.ds.urls.path(path_with_format(request=request, format="json")), - ) return ( { "database": database, @@ -144,7 +140,6 @@ class DatabaseView(DataView): ), }, { - "alternate_url_json": alternate_url_json, "database_actions": database_actions, "show_hidden": request.args.get("_show_hidden"), "editable": True, @@ -153,11 +148,6 @@ class DatabaseView(DataView): and not db.is_mutable and not db.is_memory, "attached_databases": attached_databases, - "_extra_headers": { - "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( - alternate_url_json - ) - }, }, (f"database-{to_css_class(database)}.html", "database.html"), ) @@ -318,14 +308,7 @@ class QueryView(DataView): else: async def extra_template(): - alternate_url_json = self.ds.absolute_url( - request, - self.ds.urls.path( - path_with_format(request=request, format="json") - ), - ) return { - "alternate_url_json": alternate_url_json, "request": request, "path_with_added_args": path_with_added_args, "path_with_removed_args": path_with_removed_args, @@ -333,11 +316,6 @@ class QueryView(DataView): "canned_query": canned_query, "success_message": request.args.get("_success") or "", "canned_write": True, - "_extra_headers": { - "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( - alternate_url_json - ) - }, } return ( @@ -470,12 +448,7 @@ class QueryView(DataView): show_hide_link = path_with_added_args(request, {"_hide_sql": 1}) show_hide_text = "hide" hide_sql = show_hide_text == "show" - alternate_url_json = self.ds.absolute_url( - request, - self.ds.urls.path(path_with_format(request=request, format="json")), - ) return { - "alternate_url_json": alternate_url_json, "display_rows": display_rows, "custom_sql": True, "named_parameter_values": named_parameter_values, @@ -489,11 +462,6 @@ class QueryView(DataView): "show_hide_text": show_hide_text, "show_hide_hidden": markupsafe.Markup(show_hide_hidden), "hide_sql": hide_sql, - "_extra_headers": { - "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( - alternate_url_json - ) - }, } return ( diff --git a/datasette/views/table.py b/datasette/views/table.py index 6bbee352..be9e9c3b 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -851,12 +851,7 @@ class TableView(RowTableShared): for table_column in table_columns if table_column not in columns ] - alternate_url_json = self.ds.absolute_url( - request, - self.ds.urls.path(path_with_format(request=request, format="json")), - ) d = { - "alternate_url_json": alternate_url_json, "table_actions": table_actions, "use_rowid": use_rowid, "filters": filters, @@ -887,11 +882,6 @@ class TableView(RowTableShared): "metadata": metadata, "view_definition": await db.get_view_definition(table), "table_definition": await db.get_table_definition(table), - "_extra_headers": { - "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( - alternate_url_json - ) - }, } d.update(extra_context_from_filters) return d @@ -975,12 +965,7 @@ class RowView(RowTableShared): ) for column in display_columns: column["sortable"] = False - alternate_url_json = self.ds.absolute_url( - request, - self.ds.urls.path(path_with_format(request=request, format="json")), - ) return { - "alternate_url_json": alternate_url_json, "foreign_key_tables": await self.foreign_key_tables( database, table, pk_values ), @@ -995,11 +980,6 @@ class RowView(RowTableShared): .get(database, {}) .get("tables", {}) .get(table, {}), - "_extra_headers": { - "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( - alternate_url_json - ) - }, } data = { diff --git a/docs/json_api.rst b/docs/json_api.rst index 4f9eaddb..aa6fcdaa 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -442,9 +442,9 @@ in ``metadata.json`` - see :ref:`label_columns`. Discovering the JSON for a page ------------------------------- -The :ref:`database <DatabaseView>`, :ref:`table <TableView>`, :ref:`custom/canned query <sql>` and :ref:`row <RowView>` HTML pages all provide a mechanism for discovering their JSON equivalents using the HTML ``link`` mechanism. +Most of the HTML pages served by Datasette provide a mechanism for discovering their JSON equivalents using the HTML ``link`` mechanism. -You can find this near the top of those pages, looking like this: +You can find this near the top of the source code of those pages, looking like this: .. code-block:: python diff --git a/tests/test_html.py b/tests/test_html.py index 273e4914..4b6cbd13 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -875,12 +875,14 @@ def test_trace_correctly_escaped(app_client): @pytest.mark.parametrize( "path,expected", ( + # Instance index page + ("/", "http://localhost/.json"), # Table page + ("/fixtures/facetable", "http://localhost/fixtures/facetable.json"), ( "/fixtures/table%2Fwith%2Fslashes.csv", "http://localhost/fixtures/table%2Fwith%2Fslashes.csv?_format=json", ), - ("/fixtures/facetable", "http://localhost/fixtures/facetable.json"), # Row page ( "/fixtures/no_primary_key/1", @@ -901,6 +903,11 @@ def test_trace_correctly_escaped(app_client): "/fixtures/neighborhood_search?text=town", "http://localhost/fixtures/neighborhood_search.json?text=town", ), + # /-/ pages + ( + "/-/plugins", + "http://localhost/-/plugins.json", + ), ), ) def test_alternate_url_json(app_client, path, expected): From 23a09b0f6af33c52acf8c1d9002fe475b42fee10 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 2 Feb 2022 13:48:52 -0800 Subject: [PATCH 0119/1103] Remove JSON rel=alternate from some pages, closes #1623 --- datasette/views/special.py | 6 ++++++ tests/test_html.py | 12 ++++++++++++ 2 files changed, 18 insertions(+) diff --git a/datasette/views/special.py b/datasette/views/special.py index 3cb626a5..cdd530f0 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -43,6 +43,7 @@ class JsonDataView(BaseView): class PatternPortfolioView(BaseView): name = "patterns" + has_json_alternate = False async def get(self, request): await self.check_permission(request, "view-instance") @@ -51,6 +52,7 @@ class PatternPortfolioView(BaseView): class AuthTokenView(BaseView): name = "auth_token" + has_json_alternate = False async def get(self, request): token = request.args.get("token") or "" @@ -69,6 +71,7 @@ class AuthTokenView(BaseView): class LogoutView(BaseView): name = "logout" + has_json_alternate = False async def get(self, request): if not request.actor: @@ -88,6 +91,7 @@ class LogoutView(BaseView): class PermissionsDebugView(BaseView): name = "permissions_debug" + has_json_alternate = False async def get(self, request): await self.check_permission(request, "view-instance") @@ -103,6 +107,7 @@ class PermissionsDebugView(BaseView): class AllowDebugView(BaseView): name = "allow_debug" + has_json_alternate = False async def get(self, request): errors = [] @@ -137,6 +142,7 @@ class AllowDebugView(BaseView): class MessagesDebugView(BaseView): name = "messages_debug" + has_json_alternate = False async def get(self, request): await self.check_permission(request, "view-instance") diff --git a/tests/test_html.py b/tests/test_html.py index 4b6cbd13..d5f4250d 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -922,3 +922,15 @@ def test_alternate_url_json(app_client, path, expected): ) in response.text ) + + +@pytest.mark.parametrize( + "path", + ("/-/patterns", "/-/messages", "/-/allow-debug", "/fixtures.db"), +) +def test_no_alternate_url_json(app_client, path): + response = app_client.get(path) + assert "link" not in response.headers + assert ( + '<link rel="alternate" type="application/json+datasette"' not in response.text + ) From a9d8824617268c4d214dd3be2174ac452044f737 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 2 Feb 2022 13:58:52 -0800 Subject: [PATCH 0120/1103] Test against Python 3.11-dev Closes #1621 --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2caf9447..478e1f34 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,7 +7,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11-dev"] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} From b5e6b1a9e1332fca3effe45d55dd06ee4249f163 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 2 Feb 2022 14:23:51 -0800 Subject: [PATCH 0121/1103] Bump black from 21.12b0 to 22.1.0 (#1616) Bumps [black](https://github.com/psf/black) from 21.12b0 to 22.1.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/commits/22.1.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index dade0a88..6accaa30 100644 --- a/setup.py +++ b/setup.py @@ -71,7 +71,7 @@ setup( "pytest-xdist>=2.2.1,<2.6", "pytest-asyncio>=0.10,<0.17", "beautifulsoup4>=4.8.1,<4.11.0", - "black==21.12b0", + "black==22.1.0", "pytest-timeout>=1.4.2,<2.1", "trustme>=0.7,<0.10", "cogapp>=3.3.0", From 1af1041f91a9b91b321078d354132d1df5204660 Mon Sep 17 00:00:00 2001 From: Robert Christie <robc@pobox.com> Date: Thu, 3 Feb 2022 01:58:35 +0000 Subject: [PATCH 0122/1103] Jinja template_name should use "/" even on Windows (#1617) Closes #1545. Thanks, Robert Christie --- datasette/app.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 09d7d034..7bdf076c 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1212,9 +1212,10 @@ class DatasetteRouter: else: # Is there a pages/* template matching this path? route_path = request.scope.get("route_path", request.scope["path"]) - template_path = os.path.join("pages", *route_path.split("/")) + ".html" + # Jinja requires template names to use "/" even on Windows + template_name = "pages" + route_path + ".html" try: - template = self.ds.jinja_env.select_template([template_path]) + template = self.ds.jinja_env.select_template([template_name]) except TemplateNotFound: template = None if template is None: From ac239d34ab2de6987afac43f5d38b576b26e9457 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 4 Feb 2022 20:45:13 -0800 Subject: [PATCH 0123/1103] Refactor test_trace into separate test module, refs #1576 --- tests/test_api.py | 51 ------------------------------------------ tests/test_tracer.py | 53 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+), 51 deletions(-) create mode 100644 tests/test_tracer.py diff --git a/tests/test_api.py b/tests/test_api.py index 9741ffc5..57471af2 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -911,57 +911,6 @@ def test_config_force_https_urls(): assert client.ds._last_request.scheme == "https" -@pytest.mark.parametrize("trace_debug", (True, False)) -def test_trace(trace_debug): - with make_app_client(settings={"trace_debug": trace_debug}) as client: - response = client.get("/fixtures/simple_primary_key.json?_trace=1") - assert response.status == 200 - - data = response.json - if not trace_debug: - assert "_trace" not in data - return - - assert "_trace" in data - trace_info = data["_trace"] - assert isinstance(trace_info["request_duration_ms"], float) - assert isinstance(trace_info["sum_trace_duration_ms"], float) - assert isinstance(trace_info["num_traces"], int) - assert isinstance(trace_info["traces"], list) - traces = trace_info["traces"] - assert len(traces) == trace_info["num_traces"] - for trace in traces: - assert isinstance(trace["type"], str) - assert isinstance(trace["start"], float) - assert isinstance(trace["end"], float) - assert trace["duration_ms"] == (trace["end"] - trace["start"]) * 1000 - assert isinstance(trace["traceback"], list) - assert isinstance(trace["database"], str) - assert isinstance(trace["sql"], str) - assert isinstance(trace.get("params"), (list, dict, None.__class__)) - - sqls = [trace["sql"] for trace in traces if "sql" in trace] - # There should be a mix of different types of SQL statement - expected = ( - "CREATE TABLE ", - "PRAGMA ", - "INSERT OR REPLACE INTO ", - "INSERT INTO", - "select ", - ) - for prefix in expected: - assert any( - sql.startswith(prefix) for sql in sqls - ), "No trace beginning with: {}".format(prefix) - - # Should be at least one executescript - assert any(trace for trace in traces if trace.get("executescript")) - # And at least one executemany - execute_manys = [trace for trace in traces if trace.get("executemany")] - assert execute_manys - assert all(isinstance(trace["count"], int) for trace in execute_manys) - - @pytest.mark.parametrize( "path,status_code", [ diff --git a/tests/test_tracer.py b/tests/test_tracer.py new file mode 100644 index 00000000..20a4427e --- /dev/null +++ b/tests/test_tracer.py @@ -0,0 +1,53 @@ +import pytest +from .fixtures import make_app_client + + +@pytest.mark.parametrize("trace_debug", (True, False)) +def test_trace(trace_debug): + with make_app_client(settings={"trace_debug": trace_debug}) as client: + response = client.get("/fixtures/simple_primary_key.json?_trace=1") + assert response.status == 200 + + data = response.json + if not trace_debug: + assert "_trace" not in data + return + + assert "_trace" in data + trace_info = data["_trace"] + assert isinstance(trace_info["request_duration_ms"], float) + assert isinstance(trace_info["sum_trace_duration_ms"], float) + assert isinstance(trace_info["num_traces"], int) + assert isinstance(trace_info["traces"], list) + traces = trace_info["traces"] + assert len(traces) == trace_info["num_traces"] + for trace in traces: + assert isinstance(trace["type"], str) + assert isinstance(trace["start"], float) + assert isinstance(trace["end"], float) + assert trace["duration_ms"] == (trace["end"] - trace["start"]) * 1000 + assert isinstance(trace["traceback"], list) + assert isinstance(trace["database"], str) + assert isinstance(trace["sql"], str) + assert isinstance(trace.get("params"), (list, dict, None.__class__)) + + sqls = [trace["sql"] for trace in traces if "sql" in trace] + # There should be a mix of different types of SQL statement + expected = ( + "CREATE TABLE ", + "PRAGMA ", + "INSERT OR REPLACE INTO ", + "INSERT INTO", + "select ", + ) + for prefix in expected: + assert any( + sql.startswith(prefix) for sql in sqls + ), "No trace beginning with: {}".format(prefix) + + # Should be at least one executescript + assert any(trace for trace in traces if trace.get("executescript")) + # And at least one executemany + execute_manys = [trace for trace in traces if trace.get("executemany")] + assert execute_manys + assert all(isinstance(trace["count"], int) for trace in execute_manys) From da53e0360da4771ffb56a8e3eb3f7476f3168299 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 4 Feb 2022 21:19:49 -0800 Subject: [PATCH 0124/1103] tracer.trace_child_tasks() for asyncio.gather tracing Also added documentation for datasette.tracer module. Closes #1576 --- datasette/tracer.py | 20 +++++++---- docs/internals.rst | 71 ++++++++++++++++++++++++++++++++++++++ tests/plugins/my_plugin.py | 12 +++++++ tests/test_tracer.py | 15 ++++++++ 4 files changed, 111 insertions(+), 7 deletions(-) diff --git a/datasette/tracer.py b/datasette/tracer.py index 6703f060..fc7338b0 100644 --- a/datasette/tracer.py +++ b/datasette/tracer.py @@ -1,5 +1,6 @@ import asyncio from contextlib import contextmanager +from contextvars import ContextVar from markupsafe import escape import time import json @@ -9,20 +10,25 @@ tracers = {} TRACE_RESERVED_KEYS = {"type", "start", "end", "duration_ms", "traceback"} - -# asyncio.current_task was introduced in Python 3.7: -for obj in (asyncio, asyncio.Task): - current_task = getattr(obj, "current_task", None) - if current_task is not None: - break +trace_task_id = ContextVar("trace_task_id", default=None) def get_task_id(): + current = trace_task_id.get(None) + if current is not None: + return current try: loop = asyncio.get_event_loop() except RuntimeError: return None - return id(current_task(loop=loop)) + return id(asyncio.current_task(loop=loop)) + + +@contextmanager +def trace_child_tasks(): + token = trace_task_id.set(get_task_id()) + yield + trace_task_id.reset(token) @contextmanager diff --git a/docs/internals.rst b/docs/internals.rst index 6a5666fd..a5dbdfb4 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -864,3 +864,74 @@ parse_metadata(content) This function accepts a string containing either JSON or YAML, expected to be of the format described in :ref:`metadata`. It returns a nested Python dictionary representing the parsed data from that string. If the metadata cannot be parsed as either JSON or YAML the function will raise a ``utils.BadMetadataError`` exception. + +.. _internals_tracer + +datasette.tracer +================ + +Running Datasette with ``--setting trace_debug 1`` enables trace debug output, which can then be viewed by adding ``?_trace=1`` to the query string for any page. + +You can see an example of this at the bottom of `latest.datasette.io/fixtures/facetable?_trace=1 <https://latest.datasette.io/fixtures/facetable?_trace=1>`__. The JSON output shows full details of every SQL query that was executed to generate the page. + +The `datasette-pretty-traces <https://datasette.io/plugins/datasette-pretty-traces>`__ plugin can be installed to provide a more readable display of this information. You can see `a demo of that here <https://latest-with-plugins.datasette.io/github/commits?_trace=1>`__. + +You can add your own custom traces to the JSON output using the ``trace()`` context manager. This takes a string that identifies the type of trace being recorded, and records any keyword arguments as additional JSON keys on the resulting trace object. + +The start and end time, duration and a traceback of where the trace was executed will be automatically attached to the JSON object. + +This example uses trace to record the start, end and duration of any HTTP GET requests made using the function: + +.. code-block:: python + + from datasette.tracer import trace + import httpx + + async def fetch_url(url): + with trace("fetch-url", url=url): + async with httpx.AsyncClient() as client: + return await client.get(url) + +.. _internals_tracer_trace_child_tasks + +Tracing child tasks +------------------- + +If your code uses a mechanism such as ``asyncio.gather()`` to execute code in additional tasks you may find that some of the traces are missing from the display. + +You can use the ``trace_child_tasks()`` context manager to ensure these child tasks are correctly handled. + +.. code-block:: python + + from datasette import tracer + + with tracer.trace_child_tasks(): + results = await asyncio.gather( + # ... async tasks here + ) + +This example uses the :ref:`register_routes() <plugin_register_routes>` plugin hook to add a page at ``/parallel-queries`` which executes two SQL queries in parallel using ``asyncio.gather()`` and returns their results. + +.. code-block:: python + + from datasette import hookimpl + from datasette import tracer + + @hookimpl + def register_routes(): + + async def parallel_queries(datasette): + db = datasette.get_database() + with tracer.trace_child_tasks(): + one, two = await asyncio.gather( + db.execute("select 1"), + db.execute("select 2"), + ) + return Response.json({"one": one.single_value(), "two": two.single_value()}) + + return [ + (r"/parallel-queries$", parallel_queries), + ] + + +Adding ``?_trace=1`` will show that the trace covers both of those child tasks. diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 75c76ea8..610cea17 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -1,5 +1,7 @@ +import asyncio from datasette import hookimpl from datasette.facets import Facet +from datasette import tracer from datasette.utils import path_with_added_args from datasette.utils.asgi import asgi_send_json, Response import base64 @@ -270,6 +272,15 @@ def register_routes(): def asgi_scope(scope): return Response.json(scope, default=repr) + async def parallel_queries(datasette): + db = datasette.get_database() + with tracer.trace_child_tasks(): + one, two = await asyncio.gather( + db.execute("select coalesce(sleep(0.1), 1)"), + db.execute("select coalesce(sleep(0.1), 2)"), + ) + return Response.json({"one": one.single_value(), "two": two.single_value()}) + return [ (r"/one/$", one), (r"/two/(?P<name>.*)$", two), @@ -281,6 +292,7 @@ def register_routes(): (r"/add-message/$", add_message), (r"/render-message/$", render_message), (r"/asgi-scope$", asgi_scope), + (r"/parallel-queries$", parallel_queries), ] diff --git a/tests/test_tracer.py b/tests/test_tracer.py index 20a4427e..ceadee50 100644 --- a/tests/test_tracer.py +++ b/tests/test_tracer.py @@ -51,3 +51,18 @@ def test_trace(trace_debug): execute_manys = [trace for trace in traces if trace.get("executemany")] assert execute_manys assert all(isinstance(trace["count"], int) for trace in execute_manys) + + +def test_trace_parallel_queries(): + with make_app_client(settings={"trace_debug": True}) as client: + response = client.get("/parallel-queries?_trace=1") + assert response.status == 200 + + data = response.json + assert data["one"] == 1 + assert data["two"] == 2 + trace_info = data["_trace"] + traces = [trace for trace in trace_info["traces"] if "sql" in trace] + one, two = traces + # "two" should have started before "one" ended + assert two["start"] < one["end"] From 1c6b297e3ec288cf1f838796df499a9c21c31664 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 4 Feb 2022 21:28:35 -0800 Subject: [PATCH 0125/1103] Link to datasette.tracer from trace_debug docs, refs #1576 --- docs/settings.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/settings.rst b/docs/settings.rst index 7cc4bae0..da06d6a0 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -302,6 +302,8 @@ Some examples: * https://latest.datasette.io/?_trace=1 * https://latest.datasette.io/fixtures/roadside_attractions?_trace=1 +See :ref:`internals_tracer` for details on how to hook into this mechanism as a plugin author. + .. _setting_base_url: base_url From d25b55ab5e4d7368d374ea752b2232755869d40d Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 5 Feb 2022 22:32:23 -0800 Subject: [PATCH 0126/1103] Fixed rST warnings --- docs/internals.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/internals.rst b/docs/internals.rst index a5dbdfb4..0b010295 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -865,7 +865,7 @@ This function accepts a string containing either JSON or YAML, expected to be of If the metadata cannot be parsed as either JSON or YAML the function will raise a ``utils.BadMetadataError`` exception. -.. _internals_tracer +.. _internals_tracer: datasette.tracer ================ @@ -892,7 +892,7 @@ This example uses trace to record the start, end and duration of any HTTP GET re async with httpx.AsyncClient() as client: return await client.get(url) -.. _internals_tracer_trace_child_tasks +.. _internals_tracer_trace_child_tasks: Tracing child tasks ------------------- From 8a25ea9bcae7ae4c9a4bd99f90c955828ff5676d Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 5 Feb 2022 22:34:33 -0800 Subject: [PATCH 0127/1103] Implemented import shortcuts, closes #957 --- datasette/__init__.py | 2 ++ docs/internals.rst | 15 +++++++++++++++ docs/plugin_hooks.rst | 4 +++- tests/plugins/my_plugin.py | 9 +++++++++ 4 files changed, 29 insertions(+), 1 deletion(-) diff --git a/datasette/__init__.py b/datasette/__init__.py index 0e59760a..faa36051 100644 --- a/datasette/__init__.py +++ b/datasette/__init__.py @@ -1,3 +1,5 @@ from datasette.version import __version_info__, __version__ # noqa +from datasette.utils.asgi import Forbidden, NotFound, Response # noqa +from datasette.utils import actor_matches_allow # noqa from .hookspecs import hookimpl # noqa from .hookspecs import hookspec # noqa diff --git a/docs/internals.rst b/docs/internals.rst index 0b010295..632f7d7a 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -935,3 +935,18 @@ This example uses the :ref:`register_routes() <plugin_register_routes>` plugin h Adding ``?_trace=1`` will show that the trace covers both of those child tasks. + +.. _internals_shortcuts: + +Import shortcuts +================ + +The following commonly used symbols can be imported directly from the ``datasette`` module: + +.. code-block:: python + + from datasette import Response + from datasette import Forbidden + from datasette import NotFound + from datasette import hookimpl + from datasette import actor_matches_allow diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 88e1def0..1308b704 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -542,7 +542,7 @@ Return a list of ``(regex, view_function)`` pairs, something like this: .. code-block:: python - from datasette.utils.asgi import Response + from datasette import Response import html @@ -582,6 +582,8 @@ The view function can be a regular function or an ``async def`` function, depend The function can either return a :ref:`internals_response` or it can return nothing and instead respond directly to the request using the ASGI ``send`` function (for advanced uses only). +It can also rase the ``datasette.NotFound`` exception to return a 404 not found error, or the ``datasette.Forbidden`` exception for a 403 forbidden. + See :ref:`writing_plugins_designing_urls` for tips on designing the URL routes used by your plugin. Examples: `datasette-auth-github <https://datasette.io/plugins/datasette-auth-github>`__, `datasette-psutil <https://datasette.io/plugins/datasette-psutil>`__ diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 610cea17..1c9b0575 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -300,6 +300,15 @@ def register_routes(): def startup(datasette): datasette._startup_hook_fired = True + # And test some import shortcuts too + from datasette import Response + from datasette import Forbidden + from datasette import NotFound + from datasette import hookimpl + from datasette import actor_matches_allow + + _ = (Response, Forbidden, NotFound, hookimpl, actor_matches_allow) + @hookimpl def canned_queries(datasette, database, actor): From 9b83ff2ee4d3cb5bfc5cb09a3ec99819ac214434 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 5 Feb 2022 22:46:33 -0800 Subject: [PATCH 0128/1103] Fixed spelling of "raise" --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 1308b704..a63d441e 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -582,7 +582,7 @@ The view function can be a regular function or an ``async def`` function, depend The function can either return a :ref:`internals_response` or it can return nothing and instead respond directly to the request using the ASGI ``send`` function (for advanced uses only). -It can also rase the ``datasette.NotFound`` exception to return a 404 not found error, or the ``datasette.Forbidden`` exception for a 403 forbidden. +It can also raise the ``datasette.NotFound`` exception to return a 404 not found error, or the ``datasette.Forbidden`` exception for a 403 forbidden. See :ref:`writing_plugins_designing_urls` for tips on designing the URL routes used by your plugin. From d9b508ffaa91f9f1840b366f5d282712d445f16b Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 6 Feb 2022 22:30:00 -0800 Subject: [PATCH 0129/1103] @documented decorator plus unit test plus sphinx.ext.autodoc New mechanism for marking datasette.utils functions that should be covered by the documentation, then testing that they have indeed been documented. Also enabled sphinx.ext.autodoc which can now be used to embed the documented versions of those functions. Refs #1176 --- datasette/utils/__init__.py | 16 ++++++++++++++-- docs/conf.py | 2 +- docs/internals.rst | 11 +++++++++++ tests/test_docs.py | 18 +++++++++++++++++- 4 files changed, 43 insertions(+), 4 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index dc4e1c99..610e916f 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -12,6 +12,7 @@ import os import re import shlex import tempfile +import typing import time import types import shutil @@ -59,8 +60,17 @@ Column = namedtuple( "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk", "hidden") ) +functions_marked_as_documented = [] -async def await_me_maybe(value): + +def documented(fn): + functions_marked_as_documented.append(fn) + return fn + + +@documented +async def await_me_maybe(value: typing.Any) -> typing.Any: + "If value is callable, call it. If awaitable, await it. Otherwise return it." if callable(value): value = value() if asyncio.iscoroutine(value): @@ -915,7 +925,9 @@ class BadMetadataError(Exception): pass -def parse_metadata(content): +@documented +def parse_metadata(content: str) -> dict: + "Detects if content is JSON or YAML and parses it appropriately." # content can be JSON or YAML try: return json.loads(content) diff --git a/docs/conf.py b/docs/conf.py index 89009ea9..d114bc52 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -31,7 +31,7 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ["sphinx.ext.extlinks"] +extensions = ["sphinx.ext.extlinks", "sphinx.ext.autodoc"] extlinks = { "issue": ("https://github.com/simonw/datasette/issues/%s", "#"), diff --git a/docs/internals.rst b/docs/internals.rst index 632f7d7a..12ef5c54 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -865,6 +865,17 @@ This function accepts a string containing either JSON or YAML, expected to be of If the metadata cannot be parsed as either JSON or YAML the function will raise a ``utils.BadMetadataError`` exception. +.. autofunction:: datasette.utils.parse_metadata + +.. _internals_utils_await_me_maybe: + +await_me_maybe(value) +--------------------- + +Utility function for calling ``await`` on a return value if it is awaitable, otherwise returning the value. This is used by Datasette to support plugin hooks that can optionally return awaitable functions. Read more about this function in `The “await me maybe” pattern for Python asyncio <https://simonwillison.net/2020/Sep/2/await-me-maybe/>`__. + +.. autofunction:: datasette.utils.await_me_maybe + .. _internals_tracer: datasette.tracer diff --git a/tests/test_docs.py b/tests/test_docs.py index 0d17b8e3..cd5a6c13 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -2,7 +2,7 @@ Tests to ensure certain things are documented. """ from click.testing import CliRunner -from datasette import app +from datasette import app, utils from datasette.cli import cli from datasette.filters import Filters from pathlib import Path @@ -86,3 +86,19 @@ def documented_table_filters(): @pytest.mark.parametrize("filter", [f.key for f in Filters._filters]) def test_table_filters_are_documented(documented_table_filters, filter): assert filter in documented_table_filters + + +@pytest.fixture(scope="session") +def documented_fns(): + internals_rst = (docs_path / "internals.rst").read_text() + # Any line that starts .. _internals_utils_X + lines = internals_rst.split("\n") + prefix = ".. _internals_utils_" + return { + line.split(prefix)[1].split(":")[0] for line in lines if line.startswith(prefix) + } + + +@pytest.mark.parametrize("fn", utils.functions_marked_as_documented) +def test_functions_marked_with_documented_are_documented(documented_fns, fn): + assert fn.__name__ in documented_fns From fdce6f29e19c3c6b477b72f86e187abee9627b92 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 6 Feb 2022 22:38:27 -0800 Subject: [PATCH 0130/1103] Reconfigure ReadTheDocs, refs #1176 --- .readthedocs.yaml | 8 ++++++-- docs/readthedocs-requirements.txt | 1 - 2 files changed, 6 insertions(+), 3 deletions(-) delete mode 100644 docs/readthedocs-requirements.txt diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 70db5313..60b73b30 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -9,5 +9,9 @@ sphinx: configuration: docs/conf.py python: - install: - - requirements: docs/readthedocs-requirements.txt + version: "3.9" + install: + - method: pip + path: . + extra_requirements: + - docs diff --git a/docs/readthedocs-requirements.txt b/docs/readthedocs-requirements.txt deleted file mode 100644 index db1851ad..00000000 --- a/docs/readthedocs-requirements.txt +++ /dev/null @@ -1 +0,0 @@ -docutils<0.19 From 03305ea183b1534bc4cef3a721fe5f3700273b84 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 6 Feb 2022 22:40:47 -0800 Subject: [PATCH 0131/1103] Remove python.version, refs #1176 --- .readthedocs.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 60b73b30..e157fb9c 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -9,7 +9,6 @@ sphinx: configuration: docs/conf.py python: - version: "3.9" install: - method: pip path: . From 0cd982fc6af45b60e0c9306516dd412ae948c89b Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 7 Feb 2022 15:28:46 -0800 Subject: [PATCH 0132/1103] De-duplicate 'datasette db.db db.db', closes #1632 Refs https://github.com/simonw/datasette-publish-fly/pull/12 --- datasette/cli.py | 3 +++ tests/test_cli.py | 12 ++++++++++++ 2 files changed, 15 insertions(+) diff --git a/datasette/cli.py b/datasette/cli.py index 9d1b5ee5..61e7ce91 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -549,6 +549,9 @@ def serve( ) ) + # De-duplicate files so 'datasette db.db db.db' only attaches one /db + files = list(dict.fromkeys(files)) + try: ds = Datasette(files, **kwargs) except SpatialiteNotFound: diff --git a/tests/test_cli.py b/tests/test_cli.py index bbc5df30..3fbfdee2 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -257,6 +257,7 @@ def test_serve_create(ensure_eventloop, tmpdir): def test_serve_duplicate_database_names(ensure_eventloop, tmpdir): + "'datasette db.db nested/db.db' should attach two databases, /db and /db_2" runner = CliRunner() db_1_path = str(tmpdir / "db.db") nested = tmpdir / "nested" @@ -270,6 +271,17 @@ def test_serve_duplicate_database_names(ensure_eventloop, tmpdir): assert {db["name"] for db in databases} == {"db", "db_2"} +def test_serve_deduplicate_same_database_path(ensure_eventloop, tmpdir): + "'datasette db.db db.db' should only attach one database, /db" + runner = CliRunner() + db_path = str(tmpdir / "db.db") + sqlite3.connect(db_path).execute("vacuum") + result = runner.invoke(cli, [db_path, db_path, "--get", "/-/databases.json"]) + assert result.exit_code == 0, result.output + databases = json.loads(result.output) + assert {db["name"] for db in databases} == {"db"} + + @pytest.mark.parametrize( "filename", ["test-database (1).sqlite", "database (1).sqlite"] ) From fa5fc327adbbf70656ac533912f3fc0526a3873d Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 7 Feb 2022 15:32:54 -0800 Subject: [PATCH 0133/1103] Release 0.60.2 Refs #1632 --- datasette/version.py | 2 +- docs/changelog.rst | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index a4e340b3..91224615 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.60" +__version__ = "0.60.2" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index d7e2af39..c58c8444 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,20 @@ Changelog ========= +.. _v0_60.2: + +0.60.2 (2022-02-07) +------------------- + +- Fixed a bug where Datasette would open the same file twice with two different database names if you ran ``datasette file.db file.db``. (:issue:`1632`) + +.. _v0_60.1: + +0.60.1 (2022-01-20) +------------------- + +- Fixed a bug where installation on Python 3.6 stopped working due to a change to an underlying dependency. This release can now be installed on Python 3.6, but is the last release of Datasette that will support anything less than Python 3.7. (:issue:`1609`) + .. _v0_60: 0.60 (2022-01-13) From 5bfd001b55357106dba090c83a1c88912a004665 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 7 Feb 2022 15:42:37 -0800 Subject: [PATCH 0134/1103] Use de-dupe idiom that works with Python 3.6, refs #1632 --- datasette/cli.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/cli.py b/datasette/cli.py index 61e7ce91..a8da0741 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -550,7 +550,9 @@ def serve( ) # De-duplicate files so 'datasette db.db db.db' only attaches one /db - files = list(dict.fromkeys(files)) + files_seen = set() + deduped_files = [f for f in files if f not in files_seen and not files_seen.add(f)] + files = deduped_files try: ds = Datasette(files, **kwargs) From 1b2f0ab6bbc9274dac1ba5fe126b1d6b8587ea96 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 7 Feb 2022 15:43:20 -0800 Subject: [PATCH 0135/1103] Revert "Use de-dupe idiom that works with Python 3.6, refs #1632" This reverts commit 5bfd001b55357106dba090c83a1c88912a004665. No need for this on the main branch because it doesn't support Python 3.6 any more. --- datasette/cli.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index a8da0741..61e7ce91 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -550,9 +550,7 @@ def serve( ) # De-duplicate files so 'datasette db.db db.db' only attaches one /db - files_seen = set() - deduped_files = [f for f in files if f not in files_seen and not files_seen.add(f)] - files = deduped_files + files = list(dict.fromkeys(files)) try: ds = Datasette(files, **kwargs) From 458f03ad3a454d271f47a643f4530bd8b60ddb76 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 8 Feb 2022 22:32:19 -0800 Subject: [PATCH 0136/1103] More SpatiaLite details on /-/versions, closes #1607 --- datasette/app.py | 12 ++++++++++++ datasette/utils/__init__.py | 32 ++++++++++++++++++++++++++++++++ tests/test_spatialite.py | 21 +++++++++++++++++++++ 3 files changed, 65 insertions(+) create mode 100644 tests/test_spatialite.py diff --git a/datasette/app.py b/datasette/app.py index 7bdf076c..8c5480cf 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -46,6 +46,7 @@ from .database import Database, QueryInterrupted from .utils import ( PrefixedUrlString, + SPATIALITE_FUNCTIONS, StartupError, add_cors_headers, async_call_with_supported_arguments, @@ -724,6 +725,17 @@ class Datasette: sqlite_extensions[extension] = None except Exception: pass + # More details on SpatiaLite + if "spatialite" in sqlite_extensions: + spatialite_details = {} + for fn in SPATIALITE_FUNCTIONS: + try: + result = conn.execute("select {}()".format(fn)) + spatialite_details[fn] = result.fetchone()[0] + except Exception as e: + spatialite_details[fn] = {"error": str(e)} + sqlite_extensions["spatialite"] = spatialite_details + # Figure out supported FTS versions fts_versions = [] for fts in ("FTS5", "FTS4", "FTS3"): diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 610e916f..e17b4d7f 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -52,9 +52,41 @@ SPATIALITE_PATHS = ( "/usr/local/lib/mod_spatialite.dylib", "/usr/local/lib/mod_spatialite.so", ) +# Used to display /-/versions.json SpatiaLite information +SPATIALITE_FUNCTIONS = ( + "spatialite_version", + "spatialite_target_cpu", + "check_strict_sql_quoting", + "freexl_version", + "proj_version", + "geos_version", + "rttopo_version", + "libxml2_version", + "HasIconv", + "HasMathSQL", + "HasGeoCallbacks", + "HasProj", + "HasProj6", + "HasGeos", + "HasGeosAdvanced", + "HasGeosTrunk", + "HasGeosReentrant", + "HasGeosOnlyReentrant", + "HasMiniZip", + "HasRtTopo", + "HasLibXML2", + "HasEpsg", + "HasFreeXL", + "HasGeoPackage", + "HasGCP", + "HasTopology", + "HasKNN", + "HasRouting", +) # Length of hash subset used in hashed URLs: HASH_LENGTH = 7 + # Can replace this with Column from sqlite_utils when I add that dependency Column = namedtuple( "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk", "hidden") diff --git a/tests/test_spatialite.py b/tests/test_spatialite.py new file mode 100644 index 00000000..8b98c5d6 --- /dev/null +++ b/tests/test_spatialite.py @@ -0,0 +1,21 @@ +from datasette.app import Datasette +from datasette.utils import find_spatialite, SpatialiteNotFound, SPATIALITE_FUNCTIONS +import pytest + + +def has_spatialite(): + try: + find_spatialite() + return True + except SpatialiteNotFound: + return False + + +@pytest.mark.asyncio +@pytest.mark.skipif(not has_spatialite(), reason="Requires SpatiaLite") +async def test_spatialite_version_info(): + ds = Datasette(sqlite_extensions=["spatialite"]) + response = await ds.client.get("/-/versions.json") + assert response.status_code == 200 + spatialite = response.json()["sqlite"]["extensions"]["spatialite"] + assert set(SPATIALITE_FUNCTIONS) == set(spatialite) From 7d24fd405f3c60e4c852c5d746c91aa2ba23cf5b Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 9 Feb 2022 09:47:54 -0800 Subject: [PATCH 0137/1103] datasette-auth-passwords is now an example of register_commands Refs https://github.com/simonw/datasette-auth-passwords/issues/19 --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index a63d441e..92cf662f 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -633,7 +633,7 @@ Note that ``register_commands()`` plugins cannot used with the :ref:`--plugins-d pip install -e path/to/my/datasette-plugin -Example: `datasette-verify <https://datasette.io/plugins/datasette-verify>`_ +Examples: `datasette-auth-passwords <https://datasette.io/plugins/datasette-auth-passwords>`__, `datasette-verify <https://datasette.io/plugins/datasette-verify>`__ .. _plugin_register_facet_classes: From dd94157f8958bdfe9f45575add934ccf1aba6d63 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 27 Feb 2022 10:04:03 -0800 Subject: [PATCH 0138/1103] Link to tutorials from documentation index page --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index acca943f..a2888822 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -25,7 +25,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover `Explore a demo <https://fivethirtyeight.datasettes.com/fivethirtyeight>`__, watch `a presentation about the project <https://static.simonwillison.net/static/2018/pybay-datasette/>`__ or :ref:`getting_started_glitch`. -More examples: https://datasette.io/examples +Interested in learning Datasette? Start with `the official tutorials <https://datasette.io/tutorials>`__. Support questions, feedback? Join our `GitHub Discussions forum <https://github.com/simonw/datasette/discussions>`__. From 5010d1359b9e9db90a5a69a3ca22d12862893e00 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 5 Mar 2022 11:45:04 -0800 Subject: [PATCH 0139/1103] Fix for test failure caused by SQLite 3.37.0+, closes #1647 --- datasette/templates/_table.html | 2 +- tests/test_internals_database.py | 10 +++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/datasette/templates/_table.html b/datasette/templates/_table.html index d91a1a57..5332f831 100644 --- a/datasette/templates/_table.html +++ b/datasette/templates/_table.html @@ -4,7 +4,7 @@ <thead> <tr> {% for column in display_columns %} - <th {% if column.description %}data-column-description="{{ column.description }}" {% endif %}class="col-{{ column.name|to_css_class }}" scope="col" data-column="{{ column.name }}" data-column-type="{{ column.type }}" data-column-not-null="{{ column.notnull }}" data-is-pk="{% if column.is_pk %}1{% else %}0{% endif %}"> + <th {% if column.description %}data-column-description="{{ column.description }}" {% endif %}class="col-{{ column.name|to_css_class }}" scope="col" data-column="{{ column.name }}" data-column-type="{{ column.type.lower() }}" data-column-not-null="{{ column.notnull }}" data-is-pk="{% if column.is_pk %}1{% else %}0{% endif %}"> {% if not column.sortable %} {{ column.name }} {% else %} diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index bcecb486..31538a24 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -279,7 +279,15 @@ async def test_table_columns(db, table, expected): @pytest.mark.asyncio async def test_table_column_details(db, table, expected): columns = await db.table_column_details(table) - assert columns == expected + # Convert "type" to lowercase before comparison + # https://github.com/simonw/datasette/issues/1647 + compare_columns = [ + Column( + c.cid, c.name, c.type.lower(), c.notnull, c.default_value, c.is_pk, c.hidden + ) + for c in columns + ] + assert compare_columns == expected @pytest.mark.asyncio From a22ec96c3ac555337eb49121450723a273fb52d1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 5 Mar 2022 17:29:53 -0800 Subject: [PATCH 0140/1103] Update pytest-asyncio requirement from <0.17,>=0.10 to >=0.10,<0.19 (#1631) Updates the requirements on [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) - [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.10.0...v0.18.0) --- updated-dependencies: - dependency-name: pytest-asyncio dependency-type: direct:development ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 6accaa30..6a097e0f 100644 --- a/setup.py +++ b/setup.py @@ -69,7 +69,7 @@ setup( "test": [ "pytest>=5.2.2,<6.3.0", "pytest-xdist>=2.2.1,<2.6", - "pytest-asyncio>=0.10,<0.17", + "pytest-asyncio>=0.10,<0.19", "beautifulsoup4>=4.8.1,<4.11.0", "black==22.1.0", "pytest-timeout>=1.4.2,<2.1", From b21839dd1a005f6269c4e9a9f763195fe7aa9c86 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 5 Mar 2022 17:30:05 -0800 Subject: [PATCH 0141/1103] Update pytest requirement from <6.3.0,>=5.2.2 to >=5.2.2,<7.1.0 (#1629) Updates the requirements on [pytest](https://github.com/pytest-dev/pytest) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/5.2.2...7.0.0) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 6a097e0f..b9db0700 100644 --- a/setup.py +++ b/setup.py @@ -67,7 +67,7 @@ setup( extras_require={ "docs": ["sphinx_rtd_theme", "sphinx-autobuild", "codespell"], "test": [ - "pytest>=5.2.2,<6.3.0", + "pytest>=5.2.2,<7.1.0", "pytest-xdist>=2.2.1,<2.6", "pytest-asyncio>=0.10,<0.19", "beautifulsoup4>=4.8.1,<4.11.0", From 73f2d25f70d741c6b53f7312674c91f0aec83e17 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 5 Mar 2022 17:30:27 -0800 Subject: [PATCH 0142/1103] Update asgiref requirement from <3.5.0,>=3.2.10 to >=3.2.10,<3.6.0 (#1610) Updates the requirements on [asgiref](https://github.com/django/asgiref) to permit the latest version. - [Release notes](https://github.com/django/asgiref/releases) - [Changelog](https://github.com/django/asgiref/blob/main/CHANGELOG.txt) - [Commits](https://github.com/django/asgiref/compare/3.2.10...3.5.0) --- updated-dependencies: - dependency-name: asgiref dependency-type: direct:production ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b9db0700..b13f7496 100644 --- a/setup.py +++ b/setup.py @@ -42,7 +42,7 @@ setup( include_package_data=True, python_requires=">=3.7", install_requires=[ - "asgiref>=3.2.10,<3.5.0", + "asgiref>=3.2.10,<3.6.0", "click>=7.1.1,<8.1.0", "click-default-group~=1.2.2", "Jinja2>=2.10.3,<3.1.0", From 7b78279b93b6e7a5fce6b53e5a85ca421a801496 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 5 Mar 2022 17:41:49 -0800 Subject: [PATCH 0143/1103] Update pytest-timeout requirement from <2.1,>=1.4.2 to >=1.4.2,<2.2 (#1602) Updates the requirements on [pytest-timeout](https://github.com/pytest-dev/pytest-timeout) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest-timeout/releases) - [Commits](https://github.com/pytest-dev/pytest-timeout/compare/1.4.2...2.1.0) --- updated-dependencies: - dependency-name: pytest-timeout dependency-type: direct:development ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b13f7496..8e69c2f5 100644 --- a/setup.py +++ b/setup.py @@ -72,7 +72,7 @@ setup( "pytest-asyncio>=0.10,<0.19", "beautifulsoup4>=4.8.1,<4.11.0", "black==22.1.0", - "pytest-timeout>=1.4.2,<2.1", + "pytest-timeout>=1.4.2,<2.2", "trustme>=0.7,<0.10", "cogapp>=3.3.0", ], From 0499f174c063283aa9b589d475a32077aaf7adc5 Mon Sep 17 00:00:00 2001 From: David Larlet <3556+davidbgk@users.noreply.github.com> Date: Sat, 5 Mar 2022 20:58:31 -0500 Subject: [PATCH 0144/1103] Typo in docs about default redirect status code (#1589) --- docs/custom_templates.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/custom_templates.rst b/docs/custom_templates.rst index 3e4eb633..97dea2af 100644 --- a/docs/custom_templates.rst +++ b/docs/custom_templates.rst @@ -428,7 +428,7 @@ You can use the ``custom_redirect(location)`` function to redirect users to anot Now requests to ``http://localhost:8001/datasette`` will result in a redirect. -These redirects are served with a ``301 Found`` status code by default. You can send a ``301 Moved Permanently`` code by passing ``301`` as the second argument to the function: +These redirects are served with a ``302 Found`` status code by default. You can send a ``301 Moved Permanently`` code by passing ``301`` as the second argument to the function: .. code-block:: jinja From de810f49cc57a4f88e4a1553d26c579253ce4531 Mon Sep 17 00:00:00 2001 From: Dan Peterson <danp@danp.net> Date: Sun, 6 Mar 2022 15:39:15 -0400 Subject: [PATCH 0145/1103] Add /opt/homebrew to where spatialite extension can be found (#1649) Helps homebrew on Apple Silicon setups find spatialite without needing a full path. Similar to #1114 Thanks, @danp --- datasette/utils/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index e17b4d7f..133b9bc7 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -51,6 +51,7 @@ SPATIALITE_PATHS = ( "/usr/lib/x86_64-linux-gnu/mod_spatialite.so", "/usr/local/lib/mod_spatialite.dylib", "/usr/local/lib/mod_spatialite.so", + "/opt/homebrew/lib/mod_spatialite.dylib", ) # Used to display /-/versions.json SpatiaLite information SPATIALITE_FUNCTIONS = ( From 1baa030eca375f839f3471237547ab403523e643 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 7 Mar 2022 07:38:29 -0800 Subject: [PATCH 0146/1103] Switch to dash encoding for table/database/row-pk in paths * Dash encoding functions, tests and docs, refs #1439 * dash encoding is now like percent encoding but with dashes * Use dash-encoding for row PKs and ?_next=, refs #1439 * Use dash encoding for table names, refs #1439 * Use dash encoding for database names, too, refs #1439 See also https://simonwillison.net/2022/Mar/5/dash-encoding/ --- datasette/url_builder.py | 10 ++++---- datasette/utils/__init__.py | 41 ++++++++++++++++++++++++++--- datasette/views/base.py | 24 ++++++++--------- datasette/views/table.py | 9 ++++--- docs/internals.rst | 26 +++++++++++++++++++ tests/fixtures.py | 1 + tests/test_api.py | 19 +++++++++++--- tests/test_cli.py | 5 ++-- tests/test_html.py | 50 ++++++++++++++++++++++++------------ tests/test_internals_urls.py | 2 +- tests/test_table_api.py | 7 +++-- tests/test_table_html.py | 12 ++++++--- tests/test_utils.py | 20 ++++++++++++++- 13 files changed, 173 insertions(+), 53 deletions(-) diff --git a/datasette/url_builder.py b/datasette/url_builder.py index 2bcda869..eebfe31e 100644 --- a/datasette/url_builder.py +++ b/datasette/url_builder.py @@ -1,4 +1,4 @@ -from .utils import path_with_format, HASH_LENGTH, PrefixedUrlString +from .utils import dash_encode, path_with_format, HASH_LENGTH, PrefixedUrlString import urllib @@ -31,20 +31,20 @@ class Urls: db = self.ds.databases[database] if self.ds.setting("hash_urls") and db.hash: path = self.path( - f"{urllib.parse.quote(database)}-{db.hash[:HASH_LENGTH]}", format=format + f"{dash_encode(database)}-{db.hash[:HASH_LENGTH]}", format=format ) else: - path = self.path(urllib.parse.quote(database), format=format) + path = self.path(dash_encode(database), format=format) return path def table(self, database, table, format=None): - path = f"{self.database(database)}/{urllib.parse.quote_plus(table)}" + path = f"{self.database(database)}/{dash_encode(table)}" if format is not None: path = path_with_format(path=path, format=format) return PrefixedUrlString(path) def query(self, database, query, format=None): - path = f"{self.database(database)}/{urllib.parse.quote_plus(query)}" + path = f"{self.database(database)}/{dash_encode(query)}" if format is not None: path = path_with_format(path=path, format=format) return PrefixedUrlString(path) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 133b9bc7..79feeef6 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -112,12 +112,12 @@ async def await_me_maybe(value: typing.Any) -> typing.Any: def urlsafe_components(token): - """Splits token on commas and URL decodes each component""" - return [urllib.parse.unquote_plus(b) for b in token.split(",")] + """Splits token on commas and dash-decodes each component""" + return [dash_decode(b) for b in token.split(",")] def path_from_row_pks(row, pks, use_rowid, quote=True): - """Generate an optionally URL-quoted unique identifier + """Generate an optionally dash-quoted unique identifier for a row from its primary keys.""" if use_rowid: bits = [row["rowid"]] @@ -126,7 +126,7 @@ def path_from_row_pks(row, pks, use_rowid, quote=True): row[pk]["value"] if isinstance(row[pk], dict) else row[pk] for pk in pks ] if quote: - bits = [urllib.parse.quote_plus(str(bit)) for bit in bits] + bits = [dash_encode(str(bit)) for bit in bits] else: bits = [str(bit) for bit in bits] @@ -1140,3 +1140,36 @@ def add_cors_headers(headers): headers["Access-Control-Allow-Origin"] = "*" headers["Access-Control-Allow-Headers"] = "Authorization" headers["Access-Control-Expose-Headers"] = "Link" + + +_DASH_ENCODING_SAFE = frozenset( + b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" + b"abcdefghijklmnopqrstuvwxyz" + b"0123456789_" + # This is the same as Python percent-encoding but I removed + # '.' and '-' and '~' +) + + +class DashEncoder(dict): + # Keeps a cache internally, via __missing__ + def __missing__(self, b): + # Handle a cache miss, store encoded string in cache and return. + res = chr(b) if b in _DASH_ENCODING_SAFE else "-{:02X}".format(b) + self[b] = res + return res + + +_dash_encoder = DashEncoder().__getitem__ + + +@documented +def dash_encode(s: str) -> str: + "Returns dash-encoded string - for example ``/foo/bar`` -> ``-2Ffoo-2Fbar``" + return "".join(_dash_encoder(char) for char in s.encode("utf-8")) + + +@documented +def dash_decode(s: str) -> str: + "Decodes a dash-encoded string, so ``-2Ffoo-2Fbar`` -> ``/foo/bar``" + return urllib.parse.unquote(s.replace("-", "%")) diff --git a/datasette/views/base.py b/datasette/views/base.py index c74d6141..7cd385b7 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -17,6 +17,8 @@ from datasette.utils import ( InvalidSql, LimitedWriter, call_with_supported_arguments, + dash_decode, + dash_encode, path_from_row_pks, path_with_added_args, path_with_removed_args, @@ -203,17 +205,17 @@ class DataView(BaseView): async def resolve_db_name(self, request, db_name, **kwargs): hash = None name = None - db_name = urllib.parse.unquote_plus(db_name) - if db_name not in self.ds.databases and "-" in db_name: + decoded_name = dash_decode(db_name) + if decoded_name not in self.ds.databases and "-" in db_name: # No matching DB found, maybe it's a name-hash? name_bit, hash_bit = db_name.rsplit("-", 1) - if name_bit not in self.ds.databases: + if dash_decode(name_bit) not in self.ds.databases: raise NotFound(f"Database not found: {name}") else: - name = name_bit + name = dash_decode(name_bit) hash = hash_bit else: - name = db_name + name = decoded_name try: db = self.ds.databases[name] @@ -233,9 +235,7 @@ class DataView(BaseView): return await db.table_exists(t) table, _format = await resolve_table_and_format( - table_and_format=urllib.parse.unquote_plus( - kwargs["table_and_format"] - ), + table_and_format=dash_decode(kwargs["table_and_format"]), table_exists=async_table_exists, allowed_formats=self.ds.renderers.keys(), ) @@ -243,11 +243,11 @@ class DataView(BaseView): if _format: kwargs["as_format"] = f".{_format}" elif kwargs.get("table"): - kwargs["table"] = urllib.parse.unquote_plus(kwargs["table"]) + kwargs["table"] = dash_decode(kwargs["table"]) should_redirect = self.ds.urls.path(f"{name}-{expected}") if kwargs.get("table"): - should_redirect += "/" + urllib.parse.quote_plus(kwargs["table"]) + should_redirect += "/" + dash_encode(kwargs["table"]) if kwargs.get("pk_path"): should_redirect += "/" + kwargs["pk_path"] if kwargs.get("as_format"): @@ -467,7 +467,7 @@ class DataView(BaseView): return await db.table_exists(t) table, _ext_format = await resolve_table_and_format( - table_and_format=urllib.parse.unquote_plus(args["table_and_format"]), + table_and_format=dash_decode(args["table_and_format"]), table_exists=async_table_exists, allowed_formats=self.ds.renderers.keys(), ) @@ -475,7 +475,7 @@ class DataView(BaseView): args["table"] = table del args["table_and_format"] elif "table" in args: - args["table"] = urllib.parse.unquote_plus(args["table"]) + args["table"] = dash_decode(args["table"]) return _format, args async def view_get(self, request, database, hash, correct_hash_provided, **kwargs): diff --git a/datasette/views/table.py b/datasette/views/table.py index be9e9c3b..1d81755e 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -12,6 +12,7 @@ from datasette.utils import ( MultiParams, append_querystring, compound_keys_after_sql, + dash_encode, escape_sqlite, filters_should_redirect, is_url, @@ -142,7 +143,7 @@ class RowTableShared(DataView): '<a href="{base_url}{database}/{table}/{flat_pks_quoted}">{flat_pks}</a>'.format( base_url=base_url, database=database, - table=urllib.parse.quote_plus(table), + table=dash_encode(table), flat_pks=str(markupsafe.escape(pk_path)), flat_pks_quoted=path_from_row_pks(row, pks, not pks), ) @@ -199,8 +200,8 @@ class RowTableShared(DataView): link_template.format( database=database, base_url=base_url, - table=urllib.parse.quote_plus(other_table), - link_id=urllib.parse.quote_plus(str(value)), + table=dash_encode(other_table), + link_id=dash_encode(str(value)), id=str(markupsafe.escape(value)), label=str(markupsafe.escape(label)) or "-", ) @@ -765,7 +766,7 @@ class TableView(RowTableShared): if prefix is None: prefix = "$null" else: - prefix = urllib.parse.quote_plus(str(prefix)) + prefix = dash_encode(str(prefix)) next_value = f"{prefix},{next_value}" added_args = {"_next": next_value} if sort: diff --git a/docs/internals.rst b/docs/internals.rst index 12ef5c54..d035e1f1 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -876,6 +876,32 @@ Utility function for calling ``await`` on a return value if it is awaitable, oth .. autofunction:: datasette.utils.await_me_maybe +.. _internals_dash_encoding: + +Dash encoding +------------- + +Datasette uses a custom encoding scheme in some places, called **dash encoding**. This is primarily used for table names and row primary keys, to avoid any confusion between ``/`` characters in those values and the Datasette URLs that reference them. + +Dash encoding uses the same algorithm as `URL percent-encoding <https://developer.mozilla.org/en-US/docs/Glossary/percent-encoding>`__, but with the ``-`` hyphen character used in place of ``%``. + +Any character other than ``ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz 0123456789_`` will be replaced by the numeric equivalent preceded by a hyphen. For example: + +- ``/`` becomes ``-2F`` +- ``.`` becomes ``-2E`` +- ``%`` becomes ``-25`` +- ``-`` becomes ``-2D`` +- Space character becomes ``-20`` +- ``polls/2022.primary`` becomes ``polls-2F2022-2Eprimary`` + +.. _internals_utils_dash_encode: + +.. autofunction:: datasette.utils.dash_encode + +.. _internals_utils_dash_decode: + +.. autofunction:: datasette.utils.dash_decode + .. _internals_tracer: datasette.tracer diff --git a/tests/fixtures.py b/tests/fixtures.py index 26f0cf7b..11f09c41 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -406,6 +406,7 @@ CREATE TABLE compound_primary_key ( ); INSERT INTO compound_primary_key VALUES ('a', 'b', 'c'); +INSERT INTO compound_primary_key VALUES ('a/b', '.c-d', 'c'); CREATE TABLE compound_three_primary_keys ( pk1 varchar(30), diff --git a/tests/test_api.py b/tests/test_api.py index 57471af2..dd916cf0 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -143,7 +143,7 @@ def test_database_page(app_client): "name": "compound_primary_key", "columns": ["pk1", "pk2", "content"], "primary_keys": ["pk1", "pk2"], - "count": 1, + "count": 2, "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, @@ -942,7 +942,7 @@ def test_cors(app_client_with_cors, path, status_code): ) def test_database_with_space_in_name(app_client_two_attached_databases, path): response = app_client_two_attached_databases.get( - "/extra database" + path, follow_redirects=True + "/extra-20database" + path, follow_redirects=True ) assert response.status == 200 @@ -953,7 +953,7 @@ def test_common_prefix_database_names(app_client_conflicting_database_names): d["name"] for d in app_client_conflicting_database_names.get("/-/databases.json").json ] - for db_name, path in (("foo", "/foo.json"), ("foo-bar", "/foo-bar.json")): + for db_name, path in (("foo", "/foo.json"), ("foo-bar", "/foo-2Dbar.json")): data = app_client_conflicting_database_names.get(path).json assert db_name == data["database"] @@ -992,3 +992,16 @@ async def test_hidden_sqlite_stat1_table(): data = (await ds.client.get("/db.json?_show_hidden=1")).json() tables = [(t["name"], t["hidden"]) for t in data["tables"]] assert tables == [("normal", False), ("sqlite_stat1", True)] + + +@pytest.mark.asyncio +@pytest.mark.parametrize("db_name", ("foo", r"fo%o", "f~/c.d")) +async def test_dash_encoded_database_names(db_name): + ds = Datasette() + ds.add_memory_database(db_name) + response = await ds.client.get("/.json") + assert db_name in response.json().keys() + path = response.json()[db_name]["path"] + # And the JSON for that database + response2 = await ds.client.get(path + ".json") + assert response2.status_code == 200 diff --git a/tests/test_cli.py b/tests/test_cli.py index 3fbfdee2..e30c2ad3 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -9,6 +9,7 @@ from datasette.app import SETTINGS from datasette.plugins import DEFAULT_PLUGINS from datasette.cli import cli, serve from datasette.version import __version__ +from datasette.utils import dash_encode from datasette.utils.sqlite import sqlite3 from click.testing import CliRunner import io @@ -294,12 +295,12 @@ def test_weird_database_names(ensure_eventloop, tmpdir, filename): assert result1.exit_code == 0, result1.output filename_no_stem = filename.rsplit(".", 1)[0] expected_link = '<a href="/{}">{}</a>'.format( - urllib.parse.quote(filename_no_stem), filename_no_stem + dash_encode(filename_no_stem), filename_no_stem ) assert expected_link in result1.output # Now try hitting that database page result2 = runner.invoke( - cli, [db_path, "--get", "/{}".format(urllib.parse.quote(filename_no_stem))] + cli, [db_path, "--get", "/{}".format(dash_encode(filename_no_stem))] ) assert result2.exit_code == 0, result2.output diff --git a/tests/test_html.py b/tests/test_html.py index d5f4250d..b4a12b8a 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -29,7 +29,7 @@ def test_homepage(app_client_two_attached_databases): ) # Should be two attached databases assert [ - {"href": r"/extra%20database", "text": "extra database"}, + {"href": r"/extra-20database", "text": "extra database"}, {"href": "/fixtures", "text": "fixtures"}, ] == [{"href": a["href"], "text": a.text.strip()} for a in soup.select("h2 a")] # Database should show count text and attached tables @@ -44,8 +44,8 @@ def test_homepage(app_client_two_attached_databases): {"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a") ] assert [ - {"href": r"/extra%20database/searchable", "text": "searchable"}, - {"href": r"/extra%20database/searchable_view", "text": "searchable_view"}, + {"href": r"/extra-20database/searchable", "text": "searchable"}, + {"href": r"/extra-20database/searchable_view", "text": "searchable_view"}, ] == table_links @@ -140,7 +140,7 @@ def test_database_page(app_client): assert queries_ul is not None assert [ ( - "/fixtures/%F0%9D%90%9C%F0%9D%90%A2%F0%9D%90%AD%F0%9D%90%A2%F0%9D%90%9E%F0%9D%90%AC", + "/fixtures/-F0-9D-90-9C-F0-9D-90-A2-F0-9D-90-AD-F0-9D-90-A2-F0-9D-90-9E-F0-9D-90-AC", "𝐜𝐢𝐭𝐢𝐞𝐬", ), ("/fixtures/from_async_hook", "from_async_hook"), @@ -193,11 +193,11 @@ def test_row_redirects_with_url_hash(app_client_with_hash): def test_row_strange_table_name_with_url_hash(app_client_with_hash): - response = app_client_with_hash.get("/fixtures/table%2Fwith%2Fslashes.csv/3") + response = app_client_with_hash.get("/fixtures/table-2Fwith-2Fslashes-2Ecsv/3") assert response.status == 302 - assert response.headers["Location"].endswith("/table%2Fwith%2Fslashes.csv/3") + assert response.headers["Location"].endswith("/table-2Fwith-2Fslashes-2Ecsv/3") response = app_client_with_hash.get( - "/fixtures/table%2Fwith%2Fslashes.csv/3", follow_redirects=True + "/fixtures/table-2Fwith-2Fslashes-2Ecsv/3", follow_redirects=True ) assert response.status == 200 @@ -345,20 +345,38 @@ def test_row_links_from_other_tables(app_client, path, expected_text, expected_l assert link == expected_link -def test_row_html_compound_primary_key(app_client): - response = app_client.get("/fixtures/compound_primary_key/a,b") +@pytest.mark.parametrize( + "path,expected", + ( + ( + "/fixtures/compound_primary_key/a,b", + [ + [ + '<td class="col-pk1 type-str">a</td>', + '<td class="col-pk2 type-str">b</td>', + '<td class="col-content type-str">c</td>', + ] + ], + ), + ( + "/fixtures/compound_primary_key/a-2Fb,-2Ec-2Dd", + [ + [ + '<td class="col-pk1 type-str">a/b</td>', + '<td class="col-pk2 type-str">.c-d</td>', + '<td class="col-content type-str">c</td>', + ] + ], + ), + ), +) +def test_row_html_compound_primary_key(app_client, path, expected): + response = app_client.get(path) assert response.status == 200 table = Soup(response.body, "html.parser").find("table") assert ["pk1", "pk2", "content"] == [ th.string.strip() for th in table.select("thead th") ] - expected = [ - [ - '<td class="col-pk1 type-str">a</td>', - '<td class="col-pk2 type-str">b</td>', - '<td class="col-content type-str">c</td>', - ] - ] assert expected == [ [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") ] diff --git a/tests/test_internals_urls.py b/tests/test_internals_urls.py index e486e4c9..16515ad6 100644 --- a/tests/test_internals_urls.py +++ b/tests/test_internals_urls.py @@ -121,7 +121,7 @@ def test_database(ds, base_url, format, expected): ("/", "name", None, "/_memory/name"), ("/prefix/", "name", None, "/prefix/_memory/name"), ("/", "name", "json", "/_memory/name.json"), - ("/", "name.json", "json", "/_memory/name.json?_format=json"), + ("/", "name.json", "json", "/_memory/name-2Ejson.json"), ], ) def test_table_and_query(ds, base_url, name, format, expected): diff --git a/tests/test_table_api.py b/tests/test_table_api.py index 6a6daed5..cc38d392 100644 --- a/tests/test_table_api.py +++ b/tests/test_table_api.py @@ -136,7 +136,10 @@ def test_table_shape_object(app_client): def test_table_shape_object_compound_primary_key(app_client): response = app_client.get("/fixtures/compound_primary_key.json?_shape=object") - assert {"a,b": {"pk1": "a", "pk2": "b", "content": "c"}} == response.json + assert response.json == { + "a,b": {"pk1": "a", "pk2": "b", "content": "c"}, + "a-2Fb,-2Ec-2Dd": {"pk1": "a/b", "pk2": ".c-d", "content": "c"}, + } def test_table_with_slashes_in_name(app_client): @@ -308,7 +311,7 @@ def test_sortable(app_client, query_string, sort_key, human_description_en): path = response.json["next_url"] if path: path = path.replace("http://localhost", "") - assert 5 == page + assert page == 5 expected = list(generate_sortable_rows(201)) expected.sort(key=sort_key) assert [r["content"] for r in expected] == [r["content"] for r in fetched] diff --git a/tests/test_table_html.py b/tests/test_table_html.py index 021268c3..77d97d80 100644 --- a/tests/test_table_html.py +++ b/tests/test_table_html.py @@ -563,11 +563,17 @@ def test_table_html_compound_primary_key(app_client): '<td class="col-pk1 type-str">a</td>', '<td class="col-pk2 type-str">b</td>', '<td class="col-content type-str">c</td>', - ] + ], + [ + '<td class="col-Link type-pk"><a href="/fixtures/compound_primary_key/a-2Fb,-2Ec-2Dd">a/b,.c-d</a></td>', + '<td class="col-pk1 type-str">a/b</td>', + '<td class="col-pk2 type-str">.c-d</td>', + '<td class="col-content type-str">c</td>', + ], ] - assert expected == [ + assert [ [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") - ] + ] == expected def test_table_html_foreign_key_links(app_client): diff --git a/tests/test_utils.py b/tests/test_utils.py index e7d67045..1c3ab495 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -93,7 +93,7 @@ def test_path_with_replaced_args(path, args, expected): "row,pks,expected_path", [ ({"A": "foo", "B": "bar"}, ["A", "B"], "foo,bar"), - ({"A": "f,o", "B": "bar"}, ["A", "B"], "f%2Co,bar"), + ({"A": "f,o", "B": "bar"}, ["A", "B"], "f-2Co,bar"), ({"A": 123}, ["A"], "123"), ( utils.CustomRow( @@ -646,3 +646,21 @@ async def test_derive_named_parameters(sql, expected): db = ds.get_database("_memory") params = await utils.derive_named_parameters(db, sql) assert params == expected + + +@pytest.mark.parametrize( + "original,expected", + ( + ("abc", "abc"), + ("/foo/bar", "-2Ffoo-2Fbar"), + ("/-/bar", "-2F-2D-2Fbar"), + ("-/db-/table.csv", "-2D-2Fdb-2D-2Ftable-2Ecsv"), + (r"%~-/", "-25-7E-2D-2F"), + ("-25-7E-2D-2F", "-2D25-2D7E-2D2D-2D2F"), + ), +) +def test_dash_encoding(original, expected): + actual = utils.dash_encode(original) + assert actual == expected + # And test round-trip + assert original == utils.dash_decode(actual) From 644d25d1de78a36b105cca479e7b3e4375a6eadc Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 7 Mar 2022 08:01:03 -0800 Subject: [PATCH 0147/1103] Redirect old % URLs to new - encoded URLs, closes #1650 Refs #1439 --- datasette/app.py | 7 +++++++ tests/test_html.py | 6 ++++++ 2 files changed, 13 insertions(+) diff --git a/datasette/app.py b/datasette/app.py index 8c5480cf..2907d90e 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1211,6 +1211,13 @@ class DatasetteRouter: return await self.handle_404(request, send) async def handle_404(self, request, send, exception=None): + # If path contains % encoding, redirect to dash encoding + if "%" in request.path: + # Try the same path but with "%" replaced by "-" + # and "-" replaced with "-2D" + new_path = request.path.replace("-", "-2D").replace("%", "-") + await asgi_send_redirect(send, new_path) + return # If URL has a trailing slash, redirect to URL without it path = request.scope.get( "raw_path", request.scope["path"].encode("utf8") diff --git a/tests/test_html.py b/tests/test_html.py index b4a12b8a..3e24009e 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -952,3 +952,9 @@ def test_no_alternate_url_json(app_client, path): assert ( '<link rel="alternate" type="application/json+datasette"' not in response.text ) + + +def test_redirect_percent_encoding_to_dash_encoding(app_client): + response = app_client.get("/fivethirtyeight/twitter-ratio%2Fsenators") + assert response.status == 302 + assert response.headers["location"] == "/fivethirtyeight/twitter-2Dratio-2Fsenators" From d714c67d656c46e012b24ccca53b59409440334f Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 7 Mar 2022 08:09:15 -0800 Subject: [PATCH 0148/1103] asyncio_mode = strict to avoid pytest warnings --- pytest.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/pytest.ini b/pytest.ini index d702ce5f..559e518c 100644 --- a/pytest.ini +++ b/pytest.ini @@ -8,3 +8,4 @@ filterwarnings= ignore:.*current_task.*:PendingDeprecationWarning markers = serial: tests to avoid using with pytest-xdist +asyncio_mode = strict From 020effe47bf89f35182960a9645f2383a42ebd54 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 7 Mar 2022 08:18:07 -0800 Subject: [PATCH 0149/1103] Preserve query string in % to - redirects, refs #1650 --- datasette/app.py | 2 ++ tests/test_html.py | 17 ++++++++++++++--- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 2907d90e..7abccc05 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1216,6 +1216,8 @@ class DatasetteRouter: # Try the same path but with "%" replaced by "-" # and "-" replaced with "-2D" new_path = request.path.replace("-", "-2D").replace("%", "-") + if request.query_string: + new_path += "?{}".format(request.query_string) await asgi_send_redirect(send, new_path) return # If URL has a trailing slash, redirect to URL without it diff --git a/tests/test_html.py b/tests/test_html.py index 3e24009e..de703284 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -954,7 +954,18 @@ def test_no_alternate_url_json(app_client, path): ) -def test_redirect_percent_encoding_to_dash_encoding(app_client): - response = app_client.get("/fivethirtyeight/twitter-ratio%2Fsenators") +@pytest.mark.parametrize( + "path,expected", + ( + ( + "/fivethirtyeight/twitter-ratio%2Fsenators", + "/fivethirtyeight/twitter-2Dratio-2Fsenators", + ), + # query string should be preserved + ("/foo/bar%2Fbaz?id=5", "/foo/bar-2Fbaz?id=5"), + ), +) +def test_redirect_percent_encoding_to_dash_encoding(app_client, path, expected): + response = app_client.get(path) assert response.status == 302 - assert response.headers["location"] == "/fivethirtyeight/twitter-2Dratio-2Fsenators" + assert response.headers["location"] == expected From c85d669de387b40e667fd6942c6cc1c15b4f5964 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 7 Mar 2022 11:26:08 -0800 Subject: [PATCH 0150/1103] Fix bug with percentage redirects, close #1650 --- datasette/utils/__init__.py | 7 ++++++- tests/test_html.py | 4 ++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 79feeef6..e7c9fb1c 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -10,6 +10,7 @@ import markupsafe import mergedeep import os import re +import secrets import shlex import tempfile import typing @@ -1172,4 +1173,8 @@ def dash_encode(s: str) -> str: @documented def dash_decode(s: str) -> str: "Decodes a dash-encoded string, so ``-2Ffoo-2Fbar`` -> ``/foo/bar``" - return urllib.parse.unquote(s.replace("-", "%")) + # Avoid accidentally decoding a %2f style sequence + temp = secrets.token_hex(16) + s = s.replace("%", temp) + decoded = urllib.parse.unquote(s.replace("-", "%")) + return decoded.replace(temp, "%") diff --git a/tests/test_html.py b/tests/test_html.py index de703284..55d78c05 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -961,6 +961,10 @@ def test_no_alternate_url_json(app_client, path): "/fivethirtyeight/twitter-ratio%2Fsenators", "/fivethirtyeight/twitter-2Dratio-2Fsenators", ), + ( + "/fixtures/table%2Fwith%2Fslashes", + "/fixtures/table-2Fwith-2Fslashes", + ), # query string should be preserved ("/foo/bar%2Fbaz?id=5", "/foo/bar-2Fbaz?id=5"), ), From bb499942c15c4e2cfa4b6afab8f8debe5948c009 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 7 Mar 2022 11:33:31 -0800 Subject: [PATCH 0151/1103] Fixed tests for urlsafe_components, refs #1650 --- tests/test_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 1c3ab495..ff4f649a 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -19,8 +19,8 @@ from unittest.mock import patch ("foo", ["foo"]), ("foo,bar", ["foo", "bar"]), ("123,433,112", ["123", "433", "112"]), - ("123%2C433,112", ["123,433", "112"]), - ("123%2F433%2F112", ["123/433/112"]), + ("123-2C433,112", ["123,433", "112"]), + ("123-2F433-2F112", ["123/433/112"]), ], ) def test_urlsafe_components(path, expected): From c5791156d92615f25696ba93dae5bb2dcc192c98 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 7 Mar 2022 14:04:10 -0800 Subject: [PATCH 0152/1103] Code of conduct, refs #1654 --- CODE_OF_CONDUCT.md | 128 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 128 insertions(+) create mode 100644 CODE_OF_CONDUCT.md diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..14d4c567 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,128 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity +and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +`swillison+datasette-code-of-conduct@gmail.com`. +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or +permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within +the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct +enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. From 239aed182053903ed69108776b6864d42bfe1eb4 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 15 Mar 2022 08:36:35 -0700 Subject: [PATCH 0153/1103] Revert "Code of conduct, refs #1654" This reverts commit c5791156d92615f25696ba93dae5bb2dcc192c98. Refs #1658 --- CODE_OF_CONDUCT.md | 128 --------------------------------------------- 1 file changed, 128 deletions(-) delete mode 100644 CODE_OF_CONDUCT.md diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md deleted file mode 100644 index 14d4c567..00000000 --- a/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,128 +0,0 @@ -# Contributor Covenant Code of Conduct - -## Our Pledge - -We as members, contributors, and leaders pledge to make participation in our -community a harassment-free experience for everyone, regardless of age, body -size, visible or invisible disability, ethnicity, sex characteristics, gender -identity and expression, level of experience, education, socio-economic status, -nationality, personal appearance, race, religion, or sexual identity -and orientation. - -We pledge to act and interact in ways that contribute to an open, welcoming, -diverse, inclusive, and healthy community. - -## Our Standards - -Examples of behavior that contributes to a positive environment for our -community include: - -* Demonstrating empathy and kindness toward other people -* Being respectful of differing opinions, viewpoints, and experiences -* Giving and gracefully accepting constructive feedback -* Accepting responsibility and apologizing to those affected by our mistakes, - and learning from the experience -* Focusing on what is best not just for us as individuals, but for the - overall community - -Examples of unacceptable behavior include: - -* The use of sexualized language or imagery, and sexual attention or - advances of any kind -* Trolling, insulting or derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or email - address, without their explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Enforcement Responsibilities - -Community leaders are responsible for clarifying and enforcing our standards of -acceptable behavior and will take appropriate and fair corrective action in -response to any behavior that they deem inappropriate, threatening, offensive, -or harmful. - -Community leaders have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are -not aligned to this Code of Conduct, and will communicate reasons for moderation -decisions when appropriate. - -## Scope - -This Code of Conduct applies within all community spaces, and also applies when -an individual is officially representing the community in public spaces. -Examples of representing our community include using an official e-mail address, -posting via an official social media account, or acting as an appointed -representative at an online or offline event. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported to the community leaders responsible for enforcement at -`swillison+datasette-code-of-conduct@gmail.com`. -All complaints will be reviewed and investigated promptly and fairly. - -All community leaders are obligated to respect the privacy and security of the -reporter of any incident. - -## Enforcement Guidelines - -Community leaders will follow these Community Impact Guidelines in determining -the consequences for any action they deem in violation of this Code of Conduct: - -### 1. Correction - -**Community Impact**: Use of inappropriate language or other behavior deemed -unprofessional or unwelcome in the community. - -**Consequence**: A private, written warning from community leaders, providing -clarity around the nature of the violation and an explanation of why the -behavior was inappropriate. A public apology may be requested. - -### 2. Warning - -**Community Impact**: A violation through a single incident or series -of actions. - -**Consequence**: A warning with consequences for continued behavior. No -interaction with the people involved, including unsolicited interaction with -those enforcing the Code of Conduct, for a specified period of time. This -includes avoiding interactions in community spaces as well as external channels -like social media. Violating these terms may lead to a temporary or -permanent ban. - -### 3. Temporary Ban - -**Community Impact**: A serious violation of community standards, including -sustained inappropriate behavior. - -**Consequence**: A temporary ban from any sort of interaction or public -communication with the community for a specified period of time. No public or -private interaction with the people involved, including unsolicited interaction -with those enforcing the Code of Conduct, is allowed during this period. -Violating these terms may lead to a permanent ban. - -### 4. Permanent Ban - -**Community Impact**: Demonstrating a pattern of violation of community -standards, including sustained inappropriate behavior, harassment of an -individual, or aggression toward or disparagement of classes of individuals. - -**Consequence**: A permanent ban from any sort of public interaction within -the community. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], -version 2.0, available at -https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. - -Community Impact Guidelines were inspired by [Mozilla's code of conduct -enforcement ladder](https://github.com/mozilla/diversity). - -[homepage]: https://www.contributor-covenant.org - -For answers to common questions about this code of conduct, see the FAQ at -https://www.contributor-covenant.org/faq. Translations are available at -https://www.contributor-covenant.org/translations. From 5a353a32b9c4d75acbe3193fd72f735a8e78516a Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 15 Mar 2022 08:37:14 -0700 Subject: [PATCH 0154/1103] Revert "Fixed tests for urlsafe_components, refs #1650" This reverts commit bb499942c15c4e2cfa4b6afab8f8debe5948c009. Refs #1658 --- tests/test_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index ff4f649a..1c3ab495 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -19,8 +19,8 @@ from unittest.mock import patch ("foo", ["foo"]), ("foo,bar", ["foo", "bar"]), ("123,433,112", ["123", "433", "112"]), - ("123-2C433,112", ["123,433", "112"]), - ("123-2F433-2F112", ["123/433/112"]), + ("123%2C433,112", ["123,433", "112"]), + ("123%2F433%2F112", ["123/433/112"]), ], ) def test_urlsafe_components(path, expected): From 77e718c3ffb30473759a8b1ed347f73cb2ff5cfe Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 15 Mar 2022 08:37:31 -0700 Subject: [PATCH 0155/1103] Revert "Fix bug with percentage redirects, close #1650" This reverts commit c85d669de387b40e667fd6942c6cc1c15b4f5964. Refs #1658 --- datasette/utils/__init__.py | 7 +------ tests/test_html.py | 4 ---- 2 files changed, 1 insertion(+), 10 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index e7c9fb1c..79feeef6 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -10,7 +10,6 @@ import markupsafe import mergedeep import os import re -import secrets import shlex import tempfile import typing @@ -1173,8 +1172,4 @@ def dash_encode(s: str) -> str: @documented def dash_decode(s: str) -> str: "Decodes a dash-encoded string, so ``-2Ffoo-2Fbar`` -> ``/foo/bar``" - # Avoid accidentally decoding a %2f style sequence - temp = secrets.token_hex(16) - s = s.replace("%", temp) - decoded = urllib.parse.unquote(s.replace("-", "%")) - return decoded.replace(temp, "%") + return urllib.parse.unquote(s.replace("-", "%")) diff --git a/tests/test_html.py b/tests/test_html.py index 55d78c05..de703284 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -961,10 +961,6 @@ def test_no_alternate_url_json(app_client, path): "/fivethirtyeight/twitter-ratio%2Fsenators", "/fivethirtyeight/twitter-2Dratio-2Fsenators", ), - ( - "/fixtures/table%2Fwith%2Fslashes", - "/fixtures/table-2Fwith-2Fslashes", - ), # query string should be preserved ("/foo/bar%2Fbaz?id=5", "/foo/bar-2Fbaz?id=5"), ), From 645381a5ed23c016281e8c6c7d141518f91b67e5 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 15 Mar 2022 08:36:35 -0700 Subject: [PATCH 0156/1103] Add code of conduct again Refs #1658 --- CODE_OF_CONDUCT.md | 128 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 128 insertions(+) create mode 100644 CODE_OF_CONDUCT.md diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..14d4c567 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,128 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity +and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +`swillison+datasette-code-of-conduct@gmail.com`. +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or +permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within +the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct +enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. From c10cd48baf106659bf3f129ad7bfb2226be73821 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 7 Mar 2022 11:56:59 -0800 Subject: [PATCH 0157/1103] Min pytest-asyncio of 0.17 So that the asyncio_mode in pytest.ini does not produce a warning on older versions of that library. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 8e69c2f5..e70839d6 100644 --- a/setup.py +++ b/setup.py @@ -69,7 +69,7 @@ setup( "test": [ "pytest>=5.2.2,<7.1.0", "pytest-xdist>=2.2.1,<2.6", - "pytest-asyncio>=0.10,<0.19", + "pytest-asyncio>=0.17,<0.19", "beautifulsoup4>=4.8.1,<4.11.0", "black==22.1.0", "pytest-timeout>=1.4.2,<2.2", From a35393b29cfb5b8abdc6a94e577af1c9a5c13652 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 15 Mar 2022 11:01:57 -0700 Subject: [PATCH 0158/1103] Tilde encoding (#1659) Closes #1657 Refs #1439 --- datasette/app.py | 11 +++++---- datasette/url_builder.py | 10 ++++---- datasette/utils/__init__.py | 37 ++++++++++++++++------------- datasette/views/base.py | 25 +++++++++++--------- datasette/views/table.py | 14 +++++++---- docs/csv_export.rst | 18 --------------- docs/internals.rst | 34 +++++++++++++-------------- tests/test_api.py | 17 ++++---------- tests/test_cli.py | 6 ++--- tests/test_html.py | 45 +++++++++++++++++++++--------------- tests/test_internals_urls.py | 2 +- tests/test_table_api.py | 9 +++++--- tests/test_table_html.py | 2 +- tests/test_utils.py | 36 +++++++++-------------------- 14 files changed, 125 insertions(+), 141 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 7abccc05..b39ef7cd 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1211,11 +1211,14 @@ class DatasetteRouter: return await self.handle_404(request, send) async def handle_404(self, request, send, exception=None): - # If path contains % encoding, redirect to dash encoding + # If path contains % encoding, redirect to tilde encoding if "%" in request.path: - # Try the same path but with "%" replaced by "-" - # and "-" replaced with "-2D" - new_path = request.path.replace("-", "-2D").replace("%", "-") + # Try the same path but with "%" replaced by "~" + # and "~" replaced with "~7E" + # and "." replaced with "~2E" + new_path = ( + request.path.replace("~", "~7E").replace("%", "~").replace(".", "~2E") + ) if request.query_string: new_path += "?{}".format(request.query_string) await asgi_send_redirect(send, new_path) diff --git a/datasette/url_builder.py b/datasette/url_builder.py index eebfe31e..9f072462 100644 --- a/datasette/url_builder.py +++ b/datasette/url_builder.py @@ -1,4 +1,4 @@ -from .utils import dash_encode, path_with_format, HASH_LENGTH, PrefixedUrlString +from .utils import tilde_encode, path_with_format, HASH_LENGTH, PrefixedUrlString import urllib @@ -31,20 +31,20 @@ class Urls: db = self.ds.databases[database] if self.ds.setting("hash_urls") and db.hash: path = self.path( - f"{dash_encode(database)}-{db.hash[:HASH_LENGTH]}", format=format + f"{tilde_encode(database)}-{db.hash[:HASH_LENGTH]}", format=format ) else: - path = self.path(dash_encode(database), format=format) + path = self.path(tilde_encode(database), format=format) return path def table(self, database, table, format=None): - path = f"{self.database(database)}/{dash_encode(table)}" + path = f"{self.database(database)}/{tilde_encode(table)}" if format is not None: path = path_with_format(path=path, format=format) return PrefixedUrlString(path) def query(self, database, query, format=None): - path = f"{self.database(database)}/{dash_encode(query)}" + path = f"{self.database(database)}/{tilde_encode(query)}" if format is not None: path = path_with_format(path=path, format=format) return PrefixedUrlString(path) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 79feeef6..bd591459 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -15,6 +15,7 @@ import tempfile import typing import time import types +import secrets import shutil import urllib import yaml @@ -112,12 +113,12 @@ async def await_me_maybe(value: typing.Any) -> typing.Any: def urlsafe_components(token): - """Splits token on commas and dash-decodes each component""" - return [dash_decode(b) for b in token.split(",")] + """Splits token on commas and tilde-decodes each component""" + return [tilde_decode(b) for b in token.split(",")] def path_from_row_pks(row, pks, use_rowid, quote=True): - """Generate an optionally dash-quoted unique identifier + """Generate an optionally tilde-encoded unique identifier for a row from its primary keys.""" if use_rowid: bits = [row["rowid"]] @@ -126,7 +127,7 @@ def path_from_row_pks(row, pks, use_rowid, quote=True): row[pk]["value"] if isinstance(row[pk], dict) else row[pk] for pk in pks ] if quote: - bits = [dash_encode(str(bit)) for bit in bits] + bits = [tilde_encode(str(bit)) for bit in bits] else: bits = [str(bit) for bit in bits] @@ -1142,34 +1143,38 @@ def add_cors_headers(headers): headers["Access-Control-Expose-Headers"] = "Link" -_DASH_ENCODING_SAFE = frozenset( +_TILDE_ENCODING_SAFE = frozenset( b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"abcdefghijklmnopqrstuvwxyz" - b"0123456789_" + b"0123456789_-" # This is the same as Python percent-encoding but I removed - # '.' and '-' and '~' + # '.' and '~' ) -class DashEncoder(dict): +class TildeEncoder(dict): # Keeps a cache internally, via __missing__ def __missing__(self, b): # Handle a cache miss, store encoded string in cache and return. - res = chr(b) if b in _DASH_ENCODING_SAFE else "-{:02X}".format(b) + res = chr(b) if b in _TILDE_ENCODING_SAFE else "~{:02X}".format(b) self[b] = res return res -_dash_encoder = DashEncoder().__getitem__ +_tilde_encoder = TildeEncoder().__getitem__ @documented -def dash_encode(s: str) -> str: - "Returns dash-encoded string - for example ``/foo/bar`` -> ``-2Ffoo-2Fbar``" - return "".join(_dash_encoder(char) for char in s.encode("utf-8")) +def tilde_encode(s: str) -> str: + "Returns tilde-encoded string - for example ``/foo/bar`` -> ``~2Ffoo~2Fbar``" + return "".join(_tilde_encoder(char) for char in s.encode("utf-8")) @documented -def dash_decode(s: str) -> str: - "Decodes a dash-encoded string, so ``-2Ffoo-2Fbar`` -> ``/foo/bar``" - return urllib.parse.unquote(s.replace("-", "%")) +def tilde_decode(s: str) -> str: + "Decodes a tilde-encoded string, so ``~2Ffoo~2Fbar`` -> ``/foo/bar``" + # Avoid accidentally decoding a %2f style sequence + temp = secrets.token_hex(16) + s = s.replace("%", temp) + decoded = urllib.parse.unquote(s.replace("~", "%")) + return decoded.replace(temp, "%") diff --git a/datasette/views/base.py b/datasette/views/base.py index 7cd385b7..1c0c3f9b 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -10,6 +10,7 @@ import pint from datasette import __version__ from datasette.database import QueryInterrupted +from datasette.utils.asgi import Request from datasette.utils import ( add_cors_headers, await_me_maybe, @@ -17,8 +18,8 @@ from datasette.utils import ( InvalidSql, LimitedWriter, call_with_supported_arguments, - dash_decode, - dash_encode, + tilde_decode, + tilde_encode, path_from_row_pks, path_with_added_args, path_with_removed_args, @@ -205,14 +206,14 @@ class DataView(BaseView): async def resolve_db_name(self, request, db_name, **kwargs): hash = None name = None - decoded_name = dash_decode(db_name) + decoded_name = tilde_decode(db_name) if decoded_name not in self.ds.databases and "-" in db_name: # No matching DB found, maybe it's a name-hash? name_bit, hash_bit = db_name.rsplit("-", 1) - if dash_decode(name_bit) not in self.ds.databases: + if tilde_decode(name_bit) not in self.ds.databases: raise NotFound(f"Database not found: {name}") else: - name = dash_decode(name_bit) + name = tilde_decode(name_bit) hash = hash_bit else: name = decoded_name @@ -235,7 +236,7 @@ class DataView(BaseView): return await db.table_exists(t) table, _format = await resolve_table_and_format( - table_and_format=dash_decode(kwargs["table_and_format"]), + table_and_format=tilde_decode(kwargs["table_and_format"]), table_exists=async_table_exists, allowed_formats=self.ds.renderers.keys(), ) @@ -243,11 +244,11 @@ class DataView(BaseView): if _format: kwargs["as_format"] = f".{_format}" elif kwargs.get("table"): - kwargs["table"] = dash_decode(kwargs["table"]) + kwargs["table"] = tilde_decode(kwargs["table"]) should_redirect = self.ds.urls.path(f"{name}-{expected}") if kwargs.get("table"): - should_redirect += "/" + dash_encode(kwargs["table"]) + should_redirect += "/" + tilde_encode(kwargs["table"]) if kwargs.get("pk_path"): should_redirect += "/" + kwargs["pk_path"] if kwargs.get("as_format"): @@ -291,6 +292,7 @@ class DataView(BaseView): if not request.args.get(key) ] if extra_parameters: + # Replace request object with a new one with modified scope if not request.query_string: new_query_string = "&".join(extra_parameters) else: @@ -300,7 +302,8 @@ class DataView(BaseView): new_scope = dict( request.scope, query_string=new_query_string.encode("latin-1") ) - request.scope = new_scope + receive = request.receive + request = Request(new_scope, receive) if stream: # Some quick soundness checks if not self.ds.setting("allow_csv_stream"): @@ -467,7 +470,7 @@ class DataView(BaseView): return await db.table_exists(t) table, _ext_format = await resolve_table_and_format( - table_and_format=dash_decode(args["table_and_format"]), + table_and_format=tilde_decode(args["table_and_format"]), table_exists=async_table_exists, allowed_formats=self.ds.renderers.keys(), ) @@ -475,7 +478,7 @@ class DataView(BaseView): args["table"] = table del args["table_and_format"] elif "table" in args: - args["table"] = dash_decode(args["table"]) + args["table"] = tilde_decode(args["table"]) return _format, args async def view_get(self, request, database, hash, correct_hash_provided, **kwargs): diff --git a/datasette/views/table.py b/datasette/views/table.py index 1d81755e..72b8e9a4 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -12,7 +12,8 @@ from datasette.utils import ( MultiParams, append_querystring, compound_keys_after_sql, - dash_encode, + tilde_decode, + tilde_encode, escape_sqlite, filters_should_redirect, is_url, @@ -143,7 +144,7 @@ class RowTableShared(DataView): '<a href="{base_url}{database}/{table}/{flat_pks_quoted}">{flat_pks}</a>'.format( base_url=base_url, database=database, - table=dash_encode(table), + table=tilde_encode(table), flat_pks=str(markupsafe.escape(pk_path)), flat_pks_quoted=path_from_row_pks(row, pks, not pks), ) @@ -200,8 +201,8 @@ class RowTableShared(DataView): link_template.format( database=database, base_url=base_url, - table=dash_encode(other_table), - link_id=dash_encode(str(value)), + table=tilde_encode(other_table), + link_id=tilde_encode(str(value)), id=str(markupsafe.escape(value)), label=str(markupsafe.escape(label)) or "-", ) @@ -346,6 +347,8 @@ class TableView(RowTableShared): write=bool(canned_query.get("write")), ) + table = tilde_decode(table) + db = self.ds.databases[database] is_view = bool(await db.get_view_definition(table)) table_exists = bool(await db.table_exists(table)) @@ -766,7 +769,7 @@ class TableView(RowTableShared): if prefix is None: prefix = "$null" else: - prefix = dash_encode(str(prefix)) + prefix = tilde_encode(str(prefix)) next_value = f"{prefix},{next_value}" added_args = {"_next": next_value} if sort: @@ -938,6 +941,7 @@ class RowView(RowTableShared): name = "row" async def data(self, request, database, hash, table, pk_path, default_labels=False): + table = tilde_decode(table) await self.check_permissions( request, [ diff --git a/docs/csv_export.rst b/docs/csv_export.rst index b1cc673c..023fa05e 100644 --- a/docs/csv_export.rst +++ b/docs/csv_export.rst @@ -59,21 +59,3 @@ truncation error message. You can increase or remove this limit using the :ref:`setting_max_csv_mb` config setting. You can also disable the CSV export feature entirely using :ref:`setting_allow_csv_stream`. - -A note on URLs --------------- - -The default URL for the CSV representation of a table is that table with -``.csv`` appended to it: - -* https://latest.datasette.io/fixtures/facetable - HTML interface -* https://latest.datasette.io/fixtures/facetable.csv - CSV export -* https://latest.datasette.io/fixtures/facetable.json - JSON API - -This pattern doesn't work for tables with names that already end in ``.csv`` or -``.json``. For those tables, you can instead use the ``_format=`` query string -parameter: - -* https://latest.datasette.io/fixtures/table%2Fwith%2Fslashes.csv - HTML interface -* https://latest.datasette.io/fixtures/table%2Fwith%2Fslashes.csv?_format=csv - CSV export -* https://latest.datasette.io/fixtures/table%2Fwith%2Fslashes.csv?_format=json - JSON API diff --git a/docs/internals.rst b/docs/internals.rst index d035e1f1..3d223603 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -545,7 +545,7 @@ These functions can be accessed via the ``{{ urls }}`` object in Datasette templ <a href="{{ urls.table("fixtures", "facetable") }}">facetable table</a> <a href="{{ urls.query("fixtures", "pragma_cache_size") }}">pragma_cache_size query</a> -Use the ``format="json"`` (or ``"csv"`` or other formats supported by plugins) arguments to get back URLs to the JSON representation. This is usually the path with ``.json`` added on the end, but it may use ``?_format=json`` in cases where the path already includes ``.json``, for example a URL to a table named ``table.json``. +Use the ``format="json"`` (or ``"csv"`` or other formats supported by plugins) arguments to get back URLs to the JSON representation. This is the path with ``.json`` added on the end. These methods each return a ``datasette.utils.PrefixedUrlString`` object, which is a subclass of the Python ``str`` type. This allows the logic that considers the ``base_url`` setting to detect if that prefix has already been applied to the path. @@ -876,31 +876,31 @@ Utility function for calling ``await`` on a return value if it is awaitable, oth .. autofunction:: datasette.utils.await_me_maybe -.. _internals_dash_encoding: +.. _internals_tilde_encoding: -Dash encoding -------------- +Tilde encoding +-------------- -Datasette uses a custom encoding scheme in some places, called **dash encoding**. This is primarily used for table names and row primary keys, to avoid any confusion between ``/`` characters in those values and the Datasette URLs that reference them. +Datasette uses a custom encoding scheme in some places, called **tilde encoding**. This is primarily used for table names and row primary keys, to avoid any confusion between ``/`` characters in those values and the Datasette URLs that reference them. -Dash encoding uses the same algorithm as `URL percent-encoding <https://developer.mozilla.org/en-US/docs/Glossary/percent-encoding>`__, but with the ``-`` hyphen character used in place of ``%``. +Tilde encoding uses the same algorithm as `URL percent-encoding <https://developer.mozilla.org/en-US/docs/Glossary/percent-encoding>`__, but with the ``~`` tilde character used in place of ``%``. -Any character other than ``ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz 0123456789_`` will be replaced by the numeric equivalent preceded by a hyphen. For example: +Any character other than ``ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz 0123456789_-`` will be replaced by the numeric equivalent preceded by a tilde. For example: -- ``/`` becomes ``-2F`` -- ``.`` becomes ``-2E`` -- ``%`` becomes ``-25`` -- ``-`` becomes ``-2D`` -- Space character becomes ``-20`` -- ``polls/2022.primary`` becomes ``polls-2F2022-2Eprimary`` +- ``/`` becomes ``~2F`` +- ``.`` becomes ``~2E`` +- ``%`` becomes ``~25`` +- ``~`` becomes ``~7E`` +- Space character becomes ``~20`` +- ``polls/2022.primary`` becomes ``polls~2F2022~2Eprimary`` -.. _internals_utils_dash_encode: +.. _internals_utils_tilde_encode: -.. autofunction:: datasette.utils.dash_encode +.. autofunction:: datasette.utils.tilde_encode -.. _internals_utils_dash_decode: +.. _internals_utils_tilde_decode: -.. autofunction:: datasette.utils.dash_decode +.. autofunction:: datasette.utils.tilde_decode .. _internals_tracer: diff --git a/tests/test_api.py b/tests/test_api.py index dd916cf0..87d91e56 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -679,18 +679,9 @@ def test_row(app_client): assert [{"id": "1", "content": "hello"}] == response.json["rows"] -def test_row_format_in_querystring(app_client): - # regression test for https://github.com/simonw/datasette/issues/563 - response = app_client.get( - "/fixtures/simple_primary_key/1?_format=json&_shape=objects" - ) - assert response.status == 200 - assert [{"id": "1", "content": "hello"}] == response.json["rows"] - - def test_row_strange_table_name(app_client): response = app_client.get( - "/fixtures/table%2Fwith%2Fslashes.csv/3.json?_shape=objects" + "/fixtures/table~2Fwith~2Fslashes~2Ecsv/3.json?_shape=objects" ) assert response.status == 200 assert [{"pk": "3", "content": "hey"}] == response.json["rows"] @@ -942,7 +933,7 @@ def test_cors(app_client_with_cors, path, status_code): ) def test_database_with_space_in_name(app_client_two_attached_databases, path): response = app_client_two_attached_databases.get( - "/extra-20database" + path, follow_redirects=True + "/extra~20database" + path, follow_redirects=True ) assert response.status == 200 @@ -953,7 +944,7 @@ def test_common_prefix_database_names(app_client_conflicting_database_names): d["name"] for d in app_client_conflicting_database_names.get("/-/databases.json").json ] - for db_name, path in (("foo", "/foo.json"), ("foo-bar", "/foo-2Dbar.json")): + for db_name, path in (("foo", "/foo.json"), ("foo-bar", "/foo-bar.json")): data = app_client_conflicting_database_names.get(path).json assert db_name == data["database"] @@ -996,7 +987,7 @@ async def test_hidden_sqlite_stat1_table(): @pytest.mark.asyncio @pytest.mark.parametrize("db_name", ("foo", r"fo%o", "f~/c.d")) -async def test_dash_encoded_database_names(db_name): +async def test_tilde_encoded_database_names(db_name): ds = Datasette() ds.add_memory_database(db_name) response = await ds.client.get("/.json") diff --git a/tests/test_cli.py b/tests/test_cli.py index e30c2ad3..5afe72c1 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -9,7 +9,7 @@ from datasette.app import SETTINGS from datasette.plugins import DEFAULT_PLUGINS from datasette.cli import cli, serve from datasette.version import __version__ -from datasette.utils import dash_encode +from datasette.utils import tilde_encode from datasette.utils.sqlite import sqlite3 from click.testing import CliRunner import io @@ -295,12 +295,12 @@ def test_weird_database_names(ensure_eventloop, tmpdir, filename): assert result1.exit_code == 0, result1.output filename_no_stem = filename.rsplit(".", 1)[0] expected_link = '<a href="/{}">{}</a>'.format( - dash_encode(filename_no_stem), filename_no_stem + tilde_encode(filename_no_stem), filename_no_stem ) assert expected_link in result1.output # Now try hitting that database page result2 = runner.invoke( - cli, [db_path, "--get", "/{}".format(dash_encode(filename_no_stem))] + cli, [db_path, "--get", "/{}".format(tilde_encode(filename_no_stem))] ) assert result2.exit_code == 0, result2.output diff --git a/tests/test_html.py b/tests/test_html.py index de703284..76a8423a 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -29,7 +29,7 @@ def test_homepage(app_client_two_attached_databases): ) # Should be two attached databases assert [ - {"href": r"/extra-20database", "text": "extra database"}, + {"href": "/extra~20database", "text": "extra database"}, {"href": "/fixtures", "text": "fixtures"}, ] == [{"href": a["href"], "text": a.text.strip()} for a in soup.select("h2 a")] # Database should show count text and attached tables @@ -44,8 +44,8 @@ def test_homepage(app_client_two_attached_databases): {"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a") ] assert [ - {"href": r"/extra-20database/searchable", "text": "searchable"}, - {"href": r"/extra-20database/searchable_view", "text": "searchable_view"}, + {"href": r"/extra~20database/searchable", "text": "searchable"}, + {"href": r"/extra~20database/searchable_view", "text": "searchable_view"}, ] == table_links @@ -139,15 +139,15 @@ def test_database_page(app_client): queries_ul = soup.find("h2", text="Queries").find_next_sibling("ul") assert queries_ul is not None assert [ - ( - "/fixtures/-F0-9D-90-9C-F0-9D-90-A2-F0-9D-90-AD-F0-9D-90-A2-F0-9D-90-9E-F0-9D-90-AC", - "𝐜𝐢𝐭𝐢𝐞𝐬", - ), ("/fixtures/from_async_hook", "from_async_hook"), ("/fixtures/from_hook", "from_hook"), ("/fixtures/magic_parameters", "magic_parameters"), ("/fixtures/neighborhood_search#fragment-goes-here", "Search neighborhoods"), ("/fixtures/pragma_cache_size", "pragma_cache_size"), + ( + "/fixtures/~F0~9D~90~9C~F0~9D~90~A2~F0~9D~90~AD~F0~9D~90~A2~F0~9D~90~9E~F0~9D~90~AC", + "𝐜𝐢𝐭𝐢𝐞𝐬", + ), ] == sorted( [(a["href"], a.text) for a in queries_ul.find_all("a")], key=lambda p: p[0] ) @@ -193,11 +193,11 @@ def test_row_redirects_with_url_hash(app_client_with_hash): def test_row_strange_table_name_with_url_hash(app_client_with_hash): - response = app_client_with_hash.get("/fixtures/table-2Fwith-2Fslashes-2Ecsv/3") + response = app_client_with_hash.get("/fixtures/table~2Fwith~2Fslashes~2Ecsv/3") assert response.status == 302 - assert response.headers["Location"].endswith("/table-2Fwith-2Fslashes-2Ecsv/3") + assert response.headers["Location"].endswith("/table~2Fwith~2Fslashes~2Ecsv/3") response = app_client_with_hash.get( - "/fixtures/table-2Fwith-2Fslashes-2Ecsv/3", follow_redirects=True + "/fixtures/table~2Fwith~2Fslashes~2Ecsv/3", follow_redirects=True ) assert response.status == 200 @@ -229,7 +229,7 @@ def test_row_page_does_not_truncate(): ["query", "db-fixtures", "query-neighborhood_search"], ), ( - "/fixtures/table%2Fwith%2Fslashes.csv", + "/fixtures/table~2Fwith~2Fslashes~2Ecsv", ["table", "db-fixtures", "table-tablewithslashescsv-fa7563"], ), ( @@ -255,7 +255,7 @@ def test_css_classes_on_body(app_client, path, expected_classes): "table-fixtures-simple_primary_key.html, *table.html", ), ( - "/fixtures/table%2Fwith%2Fslashes.csv", + "/fixtures/table~2Fwith~2Fslashes~2Ecsv", "table-fixtures-tablewithslashescsv-fa7563.html, *table.html", ), ( @@ -359,7 +359,7 @@ def test_row_links_from_other_tables(app_client, path, expected_text, expected_l ], ), ( - "/fixtures/compound_primary_key/a-2Fb,-2Ec-2Dd", + "/fixtures/compound_primary_key/a~2Fb,~2Ec~2Dd", [ [ '<td class="col-pk1 type-str">a/b</td>', @@ -816,7 +816,8 @@ def test_base_url_affects_metadata_extra_css_urls(app_client_base_url_prefix): ), ("/fixtures/pragma_cache_size", None), ( - "/fixtures/𝐜𝐢𝐭𝐢𝐞𝐬", + # /fixtures/𝐜𝐢𝐭𝐢𝐞𝐬 + "/fixtures/~F0~9D~90~9C~F0~9D~90~A2~F0~9D~90~AD~F0~9D~90~A2~F0~9D~90~9E~F0~9D~90~AC", "/fixtures?sql=select+id%2C+name+from+facet_cities+order+by+id+limit+1%3B", ), ("/fixtures/magic_parameters", None), @@ -824,6 +825,7 @@ def test_base_url_affects_metadata_extra_css_urls(app_client_base_url_prefix): ) def test_edit_sql_link_on_canned_queries(app_client, path, expected): response = app_client.get(path) + assert response.status == 200 expected_link = f'<a href="{expected}" class="canned-query-edit-sql">Edit SQL</a>' if expected: assert expected_link in response.text @@ -898,8 +900,8 @@ def test_trace_correctly_escaped(app_client): # Table page ("/fixtures/facetable", "http://localhost/fixtures/facetable.json"), ( - "/fixtures/table%2Fwith%2Fslashes.csv", - "http://localhost/fixtures/table%2Fwith%2Fslashes.csv?_format=json", + "/fixtures/table~2Fwith~2Fslashes~2Ecsv", + "http://localhost/fixtures/table~2Fwith~2Fslashes~2Ecsv.json", ), # Row page ( @@ -930,6 +932,7 @@ def test_trace_correctly_escaped(app_client): ) def test_alternate_url_json(app_client, path, expected): response = app_client.get(path) + assert response.status == 200 link = response.headers["link"] assert link == '{}; rel="alternate"; type="application/json+datasette"'.format( expected @@ -959,13 +962,17 @@ def test_no_alternate_url_json(app_client, path): ( ( "/fivethirtyeight/twitter-ratio%2Fsenators", - "/fivethirtyeight/twitter-2Dratio-2Fsenators", + "/fivethirtyeight/twitter-ratio~2Fsenators", + ), + ( + "/fixtures/table%2Fwith%2Fslashes.csv", + "/fixtures/table~2Fwith~2Fslashes~2Ecsv", ), # query string should be preserved - ("/foo/bar%2Fbaz?id=5", "/foo/bar-2Fbaz?id=5"), + ("/foo/bar%2Fbaz?id=5", "/foo/bar~2Fbaz?id=5"), ), ) -def test_redirect_percent_encoding_to_dash_encoding(app_client, path, expected): +def test_redirect_percent_encoding_to_tilde_encoding(app_client, path, expected): response = app_client.get(path) assert response.status == 302 assert response.headers["location"] == expected diff --git a/tests/test_internals_urls.py b/tests/test_internals_urls.py index 16515ad6..4307789c 100644 --- a/tests/test_internals_urls.py +++ b/tests/test_internals_urls.py @@ -121,7 +121,7 @@ def test_database(ds, base_url, format, expected): ("/", "name", None, "/_memory/name"), ("/prefix/", "name", None, "/prefix/_memory/name"), ("/", "name", "json", "/_memory/name.json"), - ("/", "name.json", "json", "/_memory/name-2Ejson.json"), + ("/", "name.json", "json", "/_memory/name~2Ejson.json"), ], ) def test_table_and_query(ds, base_url, name, format, expected): diff --git a/tests/test_table_api.py b/tests/test_table_api.py index cc38d392..3ab369b3 100644 --- a/tests/test_table_api.py +++ b/tests/test_table_api.py @@ -138,13 +138,13 @@ def test_table_shape_object_compound_primary_key(app_client): response = app_client.get("/fixtures/compound_primary_key.json?_shape=object") assert response.json == { "a,b": {"pk1": "a", "pk2": "b", "content": "c"}, - "a-2Fb,-2Ec-2Dd": {"pk1": "a/b", "pk2": ".c-d", "content": "c"}, + "a~2Fb,~2Ec-d": {"pk1": "a/b", "pk2": ".c-d", "content": "c"}, } def test_table_with_slashes_in_name(app_client): response = app_client.get( - "/fixtures/table%2Fwith%2Fslashes.csv?_shape=objects&_format=json" + "/fixtures/table~2Fwith~2Fslashes~2Ecsv.json?_shape=objects" ) assert response.status == 200 data = response.json @@ -1032,7 +1032,10 @@ def test_infinity_returned_as_invalid_json_if_requested(app_client): def test_custom_query_with_unicode_characters(app_client): - response = app_client.get("/fixtures/𝐜𝐢𝐭𝐢𝐞𝐬.json?_shape=array") + # /fixtures/𝐜𝐢𝐭𝐢𝐞𝐬.json + response = app_client.get( + "/fixtures/~F0~9D~90~9C~F0~9D~90~A2~F0~9D~90~AD~F0~9D~90~A2~F0~9D~90~9E~F0~9D~90~AC.json?_shape=array" + ) assert [{"id": 1, "name": "San Francisco"}] == response.json diff --git a/tests/test_table_html.py b/tests/test_table_html.py index 77d97d80..d40f017a 100644 --- a/tests/test_table_html.py +++ b/tests/test_table_html.py @@ -565,7 +565,7 @@ def test_table_html_compound_primary_key(app_client): '<td class="col-content type-str">c</td>', ], [ - '<td class="col-Link type-pk"><a href="/fixtures/compound_primary_key/a-2Fb,-2Ec-2Dd">a/b,.c-d</a></td>', + '<td class="col-Link type-pk"><a href="/fixtures/compound_primary_key/a~2Fb,~2Ec-d">a/b,.c-d</a></td>', '<td class="col-pk1 type-str">a/b</td>', '<td class="col-pk2 type-str">.c-d</td>', '<td class="col-content type-str">c</td>', diff --git a/tests/test_utils.py b/tests/test_utils.py index 1c3ab495..790aadc7 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -19,8 +19,8 @@ from unittest.mock import patch ("foo", ["foo"]), ("foo,bar", ["foo", "bar"]), ("123,433,112", ["123", "433", "112"]), - ("123%2C433,112", ["123,433", "112"]), - ("123%2F433%2F112", ["123/433/112"]), + ("123~2C433,112", ["123,433", "112"]), + ("123~2F433~2F112", ["123/433/112"]), ], ) def test_urlsafe_components(path, expected): @@ -93,7 +93,7 @@ def test_path_with_replaced_args(path, args, expected): "row,pks,expected_path", [ ({"A": "foo", "B": "bar"}, ["A", "B"], "foo,bar"), - ({"A": "f,o", "B": "bar"}, ["A", "B"], "f-2Co,bar"), + ({"A": "f,o", "B": "bar"}, ["A", "B"], "f~2Co,bar"), ({"A": 123}, ["A"], "123"), ( utils.CustomRow( @@ -393,9 +393,7 @@ def test_table_columns(): ("/foo?sql=select+1", "json", {}, "/foo.json?sql=select+1"), ("/foo/bar", "json", {}, "/foo/bar.json"), ("/foo/bar", "csv", {}, "/foo/bar.csv"), - ("/foo/bar.csv", "json", {}, "/foo/bar.csv?_format=json"), ("/foo/bar", "csv", {"_dl": 1}, "/foo/bar.csv?_dl=1"), - ("/foo/b.csv", "json", {"_dl": 1}, "/foo/b.csv?_dl=1&_format=json"), ( "/sf-trees/Street_Tree_List?_search=cherry&_size=1000", "csv", @@ -410,18 +408,6 @@ def test_path_with_format(path, format, extra_qs, expected): assert expected == actual -def test_path_with_format_replace_format(): - request = Request.fake("/foo/bar.csv") - assert ( - utils.path_with_format(request=request, format="blob") - == "/foo/bar.csv?_format=blob" - ) - assert ( - utils.path_with_format(request=request, format="blob", replace_format="csv") - == "/foo/bar.blob" - ) - - @pytest.mark.parametrize( "bytes,expected", [ @@ -652,15 +638,15 @@ async def test_derive_named_parameters(sql, expected): "original,expected", ( ("abc", "abc"), - ("/foo/bar", "-2Ffoo-2Fbar"), - ("/-/bar", "-2F-2D-2Fbar"), - ("-/db-/table.csv", "-2D-2Fdb-2D-2Ftable-2Ecsv"), - (r"%~-/", "-25-7E-2D-2F"), - ("-25-7E-2D-2F", "-2D25-2D7E-2D2D-2D2F"), + ("/foo/bar", "~2Ffoo~2Fbar"), + ("/-/bar", "~2F-~2Fbar"), + ("-/db-/table.csv", "-~2Fdb-~2Ftable~2Ecsv"), + (r"%~-/", "~25~7E-~2F"), + ("~25~7E~2D~2F", "~7E25~7E7E~7E2D~7E2F"), ), ) -def test_dash_encoding(original, expected): - actual = utils.dash_encode(original) +def test_tilde_encoding(original, expected): + actual = utils.tilde_encode(original) assert actual == expected # And test round-trip - assert original == utils.dash_decode(actual) + assert original == utils.tilde_decode(actual) From 77a904fea14f743560af9cc668146339bdbbd0a9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 15 Mar 2022 11:03:01 -0700 Subject: [PATCH 0159/1103] Update pytest requirement from <7.1.0,>=5.2.2 to >=5.2.2,<7.2.0 (#1656) Updates the requirements on [pytest](https://github.com/pytest-dev/pytest) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/5.2.2...7.1.0) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index e70839d6..4b58b8c4 100644 --- a/setup.py +++ b/setup.py @@ -67,7 +67,7 @@ setup( extras_require={ "docs": ["sphinx_rtd_theme", "sphinx-autobuild", "codespell"], "test": [ - "pytest>=5.2.2,<7.1.0", + "pytest>=5.2.2,<7.2.0", "pytest-xdist>=2.2.1,<2.6", "pytest-asyncio>=0.17,<0.19", "beautifulsoup4>=4.8.1,<4.11.0", From 30e5f0e67c38054a8087a2a4eae3fc4d1779af90 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 17 Mar 2022 14:30:02 -0700 Subject: [PATCH 0160/1103] Documented internals used by datasette-hashed-urls Closes #1663 --- docs/internals.rst | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/docs/internals.rst b/docs/internals.rst index 3d223603..117cb95c 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -217,12 +217,18 @@ You can create your own instance of this - for example to help write tests for a } }) +Constructor parameters include: + +- ``files=[...]`` - a list of database files to open +- ``immutables=[...]`` - a list of database files to open in immutable mode +- ``metadata={...}`` - a dictionary of :ref:`metadata` + .. _datasette_databases: .databases ---------- -Property exposing an ordered dictionary of databases currently connected to Datasette. +Property exposing a ``collections.OrderedDict`` of databases currently connected to Datasette. The dictionary keys are the name of the database that is used in the URL - e.g. ``/fixtures`` would have a key of ``"fixtures"``. The values are :ref:`internals_database` instances. @@ -582,6 +588,13 @@ The arguments are as follows: The first argument is the ``datasette`` instance you are attaching to, the second is a ``path=``, then ``is_mutable`` and ``is_memory`` are both optional arguments. +.. _database_hash: + +db.hash +------- + +If the database was opened in immutable mode, this property returns the 64 character SHA-256 hash of the database contents as a string. Otherwise it returns ``None``. + .. _database_execute: await db.execute(sql, ...) From d4f60c2388c01ddce1b16f95c16d310e037c9912 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 18 Mar 2022 17:12:03 -0700 Subject: [PATCH 0161/1103] Remove hashed URL mode Also simplified how view class routing works. Refs #1661 --- datasette/app.py | 2 +- datasette/views/base.py | 153 ++++++----------------------------- datasette/views/database.py | 19 +++-- datasette/views/index.py | 3 +- datasette/views/special.py | 3 +- datasette/views/table.py | 34 ++++---- tests/fixtures.py | 6 -- tests/test_api.py | 29 ------- tests/test_custom_pages.py | 42 +++++----- tests/test_html.py | 28 ------- tests/test_internals_urls.py | 18 ----- tests/test_table_api.py | 8 -- 12 files changed, 79 insertions(+), 266 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index b39ef7cd..3099ada7 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1097,7 +1097,7 @@ class Datasette: ) add_route( TableView.as_view(self), - r"/(?P<db_name>[^/]+)/(?P<table_and_format>[^/]+?$)", + r"/(?P<db_name>[^/]+)/(?P<table>[^\/\.]+)(\.[a-zA-Z0-9_]+)?$", ) add_route( RowView.as_view(self), diff --git a/datasette/views/base.py b/datasette/views/base.py index 1c0c3f9b..e31beb19 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -122,11 +122,11 @@ class BaseView: async def delete(self, request, *args, **kwargs): return Response.text("Method not allowed", status=405) - async def dispatch_request(self, request, *args, **kwargs): + async def dispatch_request(self, request): if self.ds: await self.ds.refresh_schemas() handler = getattr(self, request.method.lower(), None) - return await handler(request, *args, **kwargs) + return await handler(request) async def render(self, templates, request, context=None): context = context or {} @@ -169,9 +169,7 @@ class BaseView: def as_view(cls, *class_args, **class_kwargs): async def view(request, send): self = view.view_class(*class_args, **class_kwargs) - return await self.dispatch_request( - request, **request.scope["url_route"]["kwargs"] - ) + return await self.dispatch_request(request) view.view_class = cls view.__doc__ = cls.__doc__ @@ -200,90 +198,14 @@ class DataView(BaseView): add_cors_headers(r.headers) return r - async def data(self, request, database, hash, **kwargs): + async def data(self, request): raise NotImplementedError - async def resolve_db_name(self, request, db_name, **kwargs): - hash = None - name = None - decoded_name = tilde_decode(db_name) - if decoded_name not in self.ds.databases and "-" in db_name: - # No matching DB found, maybe it's a name-hash? - name_bit, hash_bit = db_name.rsplit("-", 1) - if tilde_decode(name_bit) not in self.ds.databases: - raise NotFound(f"Database not found: {name}") - else: - name = tilde_decode(name_bit) - hash = hash_bit - else: - name = decoded_name - - try: - db = self.ds.databases[name] - except KeyError: - raise NotFound(f"Database not found: {name}") - - # Verify the hash - expected = "000" - if db.hash is not None: - expected = db.hash[:HASH_LENGTH] - correct_hash_provided = expected == hash - - if not correct_hash_provided: - if "table_and_format" in kwargs: - - async def async_table_exists(t): - return await db.table_exists(t) - - table, _format = await resolve_table_and_format( - table_and_format=tilde_decode(kwargs["table_and_format"]), - table_exists=async_table_exists, - allowed_formats=self.ds.renderers.keys(), - ) - kwargs["table"] = table - if _format: - kwargs["as_format"] = f".{_format}" - elif kwargs.get("table"): - kwargs["table"] = tilde_decode(kwargs["table"]) - - should_redirect = self.ds.urls.path(f"{name}-{expected}") - if kwargs.get("table"): - should_redirect += "/" + tilde_encode(kwargs["table"]) - if kwargs.get("pk_path"): - should_redirect += "/" + kwargs["pk_path"] - if kwargs.get("as_format"): - should_redirect += kwargs["as_format"] - if kwargs.get("as_db"): - should_redirect += kwargs["as_db"] - - if ( - (self.ds.setting("hash_urls") or "_hash" in request.args) - and - # Redirect only if database is immutable - not self.ds.databases[name].is_mutable - ): - return name, expected, correct_hash_provided, should_redirect - - return name, expected, correct_hash_provided, None - def get_templates(self, database, table=None): assert NotImplemented - async def get(self, request, db_name, **kwargs): - ( - database, - hash, - correct_hash_provided, - should_redirect, - ) = await self.resolve_db_name(request, db_name, **kwargs) - if should_redirect: - return self.redirect(request, should_redirect, remove_args={"_hash"}) - - return await self.view_get( - request, database, hash, correct_hash_provided, **kwargs - ) - - async def as_csv(self, request, database, hash, **kwargs): + async def as_csv(self, request, database): + kwargs = {} stream = request.args.get("_stream") # Do not calculate facets or counts: extra_parameters = [ @@ -313,9 +235,7 @@ class DataView(BaseView): kwargs["_size"] = "max" # Fetch the first page try: - response_or_template_contexts = await self.data( - request, database, hash, **kwargs - ) + response_or_template_contexts = await self.data(request) if isinstance(response_or_template_contexts, Response): return response_or_template_contexts elif len(response_or_template_contexts) == 4: @@ -367,10 +287,11 @@ class DataView(BaseView): next = None while first or (next and stream): try: + kwargs = {} if next: kwargs["_next"] = next if not first: - data, _, _ = await self.data(request, database, hash, **kwargs) + data, _, _ = await self.data(request, **kwargs) if first: if request.args.get("_header") != "off": await writer.writerow(headings) @@ -445,60 +366,39 @@ class DataView(BaseView): if not trace: content_type = "text/csv; charset=utf-8" disposition = 'attachment; filename="{}.csv"'.format( - kwargs.get("table", database) + request.url_vars.get("table", database) ) headers["content-disposition"] = disposition return AsgiStream(stream_fn, headers=headers, content_type=content_type) - async def get_format(self, request, database, args): - """Determine the format of the response from the request, from URL - parameters or from a file extension. - - `args` is a dict of the path components parsed from the URL by the router. - """ - # If ?_format= is provided, use that as the format - _format = request.args.get("_format", None) - if not _format: - _format = (args.pop("as_format", None) or "").lstrip(".") + def get_format(self, request): + # Format is the bit from the path following the ., if one exists + last_path_component = request.path.split("/")[-1] + if "." in last_path_component: + return last_path_component.split(".")[-1] else: - args.pop("as_format", None) - if "table_and_format" in args: - db = self.ds.databases[database] + return None - async def async_table_exists(t): - return await db.table_exists(t) - - table, _ext_format = await resolve_table_and_format( - table_and_format=tilde_decode(args["table_and_format"]), - table_exists=async_table_exists, - allowed_formats=self.ds.renderers.keys(), - ) - _format = _format or _ext_format - args["table"] = table - del args["table_and_format"] - elif "table" in args: - args["table"] = tilde_decode(args["table"]) - return _format, args - - async def view_get(self, request, database, hash, correct_hash_provided, **kwargs): - _format, kwargs = await self.get_format(request, database, kwargs) + async def get(self, request): + db_name = request.url_vars["db_name"] + database = tilde_decode(db_name) + _format = self.get_format(request) + data_kwargs = {} if _format == "csv": - return await self.as_csv(request, database, hash, **kwargs) + return await self.as_csv(request, database) if _format is None: # HTML views default to expanding all foreign key labels - kwargs["default_labels"] = True + data_kwargs["default_labels"] = True extra_template_data = {} start = time.perf_counter() status_code = None templates = [] try: - response_or_template_contexts = await self.data( - request, database, hash, **kwargs - ) + response_or_template_contexts = await self.data(request, **data_kwargs) if isinstance(response_or_template_contexts, Response): return response_or_template_contexts # If it has four items, it includes an HTTP status code @@ -650,10 +550,7 @@ class DataView(BaseView): ttl = request.args.get("_ttl", None) if ttl is None or not ttl.isdigit(): - if correct_hash_provided: - ttl = self.ds.setting("default_cache_ttl_hashed") - else: - ttl = self.ds.setting("default_cache_ttl") + ttl = self.ds.setting("default_cache_ttl") return self.set_response_headers(r, ttl) diff --git a/datasette/views/database.py b/datasette/views/database.py index e26706e7..48635e01 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -12,6 +12,7 @@ from datasette.utils import ( await_me_maybe, check_visibility, derive_named_parameters, + tilde_decode, to_css_class, validate_sql_select, is_url, @@ -21,7 +22,7 @@ from datasette.utils import ( sqlite3, InvalidSql, ) -from datasette.utils.asgi import AsgiFileDownload, Response, Forbidden +from datasette.utils.asgi import AsgiFileDownload, NotFound, Response, Forbidden from datasette.plugins import pm from .base import DatasetteError, DataView @@ -30,7 +31,8 @@ from .base import DatasetteError, DataView class DatabaseView(DataView): name = "database" - async def data(self, request, database, hash, default_labels=False, _size=None): + async def data(self, request, default_labels=False, _size=None): + database = tilde_decode(request.url_vars["db_name"]) await self.check_permissions( request, [ @@ -45,10 +47,13 @@ class DatabaseView(DataView): sql = request.args.get("sql") validate_sql_select(sql) return await QueryView(self.ds).data( - request, database, hash, sql, _size=_size, metadata=metadata + request, sql, _size=_size, metadata=metadata ) - db = self.ds.databases[database] + try: + db = self.ds.databases[database] + except KeyError: + raise NotFound("Database not found: {}".format(database)) table_counts = await db.table_counts(5) hidden_table_names = set(await db.hidden_table_names()) @@ -156,7 +161,8 @@ class DatabaseView(DataView): class DatabaseDownload(DataView): name = "database_download" - async def view_get(self, request, database, hash, correct_hash_present, **kwargs): + async def get(self, request): + database = tilde_decode(request.url_vars["db_name"]) await self.check_permissions( request, [ @@ -191,8 +197,6 @@ class QueryView(DataView): async def data( self, request, - database, - hash, sql, editable=True, canned_query=None, @@ -201,6 +205,7 @@ class QueryView(DataView): named_parameters=None, write=False, ): + database = tilde_decode(request.url_vars["db_name"]) params = {key: request.args.get(key) for key in request.args} if "sql" in params: params.pop("sql") diff --git a/datasette/views/index.py b/datasette/views/index.py index 18454759..311a49db 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -18,7 +18,8 @@ COUNT_DB_SIZE_LIMIT = 100 * 1024 * 1024 class IndexView(BaseView): name = "index" - async def get(self, request, as_format): + async def get(self, request): + as_format = request.url_vars["as_format"] await self.check_permission(request, "view-instance") databases = [] for name, db in self.ds.databases.items(): diff --git a/datasette/views/special.py b/datasette/views/special.py index cdd530f0..c7b5061f 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -14,7 +14,8 @@ class JsonDataView(BaseView): self.data_callback = data_callback self.needs_request = needs_request - async def get(self, request, as_format): + async def get(self, request): + as_format = request.url_vars["as_format"] await self.check_permission(request, "view-instance") if self.needs_request: data = self.data_callback(request) diff --git a/datasette/views/table.py b/datasette/views/table.py index 72b8e9a4..8bdc7417 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -271,20 +271,18 @@ class RowTableShared(DataView): class TableView(RowTableShared): name = "table" - async def post(self, request, db_name, table_and_format): + async def post(self, request): + db_name = tilde_decode(request.url_vars["db_name"]) + table = tilde_decode(request.url_vars["table"]) # Handle POST to a canned query - canned_query = await self.ds.get_canned_query( - db_name, table_and_format, request.actor - ) + canned_query = await self.ds.get_canned_query(db_name, table, request.actor) assert canned_query, "You may only POST to a canned query" return await QueryView(self.ds).data( request, - db_name, - None, canned_query["sql"], metadata=canned_query, editable=False, - canned_query=table_and_format, + canned_query=table, named_parameters=canned_query.get("params"), write=bool(canned_query.get("write")), ) @@ -325,20 +323,22 @@ class TableView(RowTableShared): async def data( self, request, - database, - hash, - table, default_labels=False, _next=None, _size=None, ): + database = tilde_decode(request.url_vars["db_name"]) + table = tilde_decode(request.url_vars["table"]) + try: + db = self.ds.databases[database] + except KeyError: + raise NotFound("Database not found: {}".format(database)) + # If this is a canned query, not a table, then dispatch to QueryView instead canned_query = await self.ds.get_canned_query(database, table, request.actor) if canned_query: return await QueryView(self.ds).data( request, - database, - hash, canned_query["sql"], metadata=canned_query, editable=False, @@ -347,9 +347,6 @@ class TableView(RowTableShared): write=bool(canned_query.get("write")), ) - table = tilde_decode(table) - - db = self.ds.databases[database] is_view = bool(await db.get_view_definition(table)) table_exists = bool(await db.table_exists(table)) @@ -940,8 +937,9 @@ async def _sql_params_pks(db, table, pk_values): class RowView(RowTableShared): name = "row" - async def data(self, request, database, hash, table, pk_path, default_labels=False): - table = tilde_decode(table) + async def data(self, request, default_labels=False): + database = tilde_decode(request.url_vars["db_name"]) + table = tilde_decode(request.url_vars["table"]) await self.check_permissions( request, [ @@ -950,7 +948,7 @@ class RowView(RowTableShared): "view-instance", ], ) - pk_values = urlsafe_components(pk_path) + pk_values = urlsafe_components(request.url_vars["pk_path"]) db = self.ds.databases[database] sql, params, pks = await _sql_params_pks(db, table, pk_values) results = await db.execute(sql, params, truncate=True) diff --git a/tests/fixtures.py b/tests/fixtures.py index 11f09c41..342a3020 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -214,12 +214,6 @@ def app_client_two_attached_databases_one_immutable(): yield client -@pytest.fixture(scope="session") -def app_client_with_hash(): - with make_app_client(settings={"hash_urls": True}, is_immutable=True) as client: - yield client - - @pytest.fixture(scope="session") def app_client_with_trace(): with make_app_client(settings={"trace_debug": True}, is_immutable=True) as client: diff --git a/tests/test_api.py b/tests/test_api.py index 87d91e56..46e41afb 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -825,35 +825,6 @@ def test_config_redirects_to_settings(app_client, path, expected_redirect): assert response.headers["Location"] == expected_redirect -@pytest.mark.parametrize( - "path,expected_redirect", - [ - ("/fixtures/facetable.json?_hash=1", "/fixtures-HASH/facetable.json"), - ( - "/fixtures/facetable.json?city_id=1&_hash=1", - "/fixtures-HASH/facetable.json?city_id=1", - ), - ], -) -def test_hash_parameter( - app_client_two_attached_databases_one_immutable, path, expected_redirect -): - # First get the current hash for the fixtures database - current_hash = app_client_two_attached_databases_one_immutable.ds.databases[ - "fixtures" - ].hash[:7] - response = app_client_two_attached_databases_one_immutable.get(path) - assert response.status == 302 - location = response.headers["Location"] - assert expected_redirect.replace("HASH", current_hash) == location - - -def test_hash_parameter_ignored_for_mutable_databases(app_client): - path = "/fixtures/facetable.json?_hash=1" - response = app_client.get(path) - assert response.status == 200 - - test_json_columns_default_expected = [ {"intval": 1, "strval": "s", "floatval": 0.5, "jsonval": '{"foo": "bar"}'} ] diff --git a/tests/test_custom_pages.py b/tests/test_custom_pages.py index 66b7437a..f2cfe394 100644 --- a/tests/test_custom_pages.py +++ b/tests/test_custom_pages.py @@ -21,61 +21,61 @@ def custom_pages_client_with_base_url(): def test_custom_pages_view_name(custom_pages_client): response = custom_pages_client.get("/about") - assert 200 == response.status - assert "ABOUT! view_name:page" == response.text + assert response.status == 200 + assert response.text == "ABOUT! view_name:page" def test_request_is_available(custom_pages_client): response = custom_pages_client.get("/request") - assert 200 == response.status - assert "path:/request" == response.text + assert response.status == 200 + assert response.text == "path:/request" def test_custom_pages_with_base_url(custom_pages_client_with_base_url): response = custom_pages_client_with_base_url.get("/prefix/request") - assert 200 == response.status - assert "path:/prefix/request" == response.text + assert response.status == 200 + assert response.text == "path:/prefix/request" def test_custom_pages_nested(custom_pages_client): response = custom_pages_client.get("/nested/nest") - assert 200 == response.status - assert "Nest!" == response.text + assert response.status == 200 + assert response.text == "Nest!" response = custom_pages_client.get("/nested/nest2") - assert 404 == response.status + assert response.status == 404 def test_custom_status(custom_pages_client): response = custom_pages_client.get("/202") - assert 202 == response.status - assert "202!" == response.text + assert response.status == 202 + assert response.text == "202!" def test_custom_headers(custom_pages_client): response = custom_pages_client.get("/headers") - assert 200 == response.status - assert "foo" == response.headers["x-this-is-foo"] - assert "bar" == response.headers["x-this-is-bar"] - assert "FOOBAR" == response.text + assert response.status == 200 + assert response.headers["x-this-is-foo"] == "foo" + assert response.headers["x-this-is-bar"] == "bar" + assert response.text == "FOOBAR" def test_custom_content_type(custom_pages_client): response = custom_pages_client.get("/atom") - assert 200 == response.status + assert response.status == 200 assert response.headers["content-type"] == "application/xml" - assert "<?xml ...>" == response.text + assert response.text == "<?xml ...>" def test_redirect(custom_pages_client): response = custom_pages_client.get("/redirect") - assert 302 == response.status - assert "/example" == response.headers["Location"] + assert response.status == 302 + assert response.headers["Location"] == "/example" def test_redirect2(custom_pages_client): response = custom_pages_client.get("/redirect2") - assert 301 == response.status - assert "/example" == response.headers["Location"] + assert response.status == 301 + assert response.headers["Location"] == "/example" @pytest.mark.parametrize( diff --git a/tests/test_html.py b/tests/test_html.py index 76a8423a..6e4c22b1 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -5,7 +5,6 @@ from .fixtures import ( # noqa app_client_base_url_prefix, app_client_shorter_time_limit, app_client_two_attached_databases, - app_client_with_hash, make_app_client, METADATA, ) @@ -101,13 +100,6 @@ def test_not_allowed_methods(): assert response.status == 405 -def test_database_page_redirects_with_url_hash(app_client_with_hash): - response = app_client_with_hash.get("/fixtures") - assert response.status == 302 - response = app_client_with_hash.get("/fixtures", follow_redirects=True) - assert "fixtures" in response.text - - def test_database_page(app_client): response = app_client.get("/fixtures") soup = Soup(response.body, "html.parser") @@ -182,26 +174,6 @@ def test_sql_time_limit(app_client_shorter_time_limit): assert expected_html_fragment in response.text -def test_row_redirects_with_url_hash(app_client_with_hash): - response = app_client_with_hash.get("/fixtures/simple_primary_key/1") - assert response.status == 302 - assert response.headers["Location"].endswith("/1") - response = app_client_with_hash.get( - "/fixtures/simple_primary_key/1", follow_redirects=True - ) - assert response.status == 200 - - -def test_row_strange_table_name_with_url_hash(app_client_with_hash): - response = app_client_with_hash.get("/fixtures/table~2Fwith~2Fslashes~2Ecsv/3") - assert response.status == 302 - assert response.headers["Location"].endswith("/table~2Fwith~2Fslashes~2Ecsv/3") - response = app_client_with_hash.get( - "/fixtures/table~2Fwith~2Fslashes~2Ecsv/3", follow_redirects=True - ) - assert response.status == 200 - - def test_row_page_does_not_truncate(): with make_app_client(settings={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable/1") diff --git a/tests/test_internals_urls.py b/tests/test_internals_urls.py index 4307789c..d60aafcf 100644 --- a/tests/test_internals_urls.py +++ b/tests/test_internals_urls.py @@ -1,6 +1,5 @@ from datasette.app import Datasette from datasette.utils import PrefixedUrlString -from .fixtures import app_client_with_hash import pytest @@ -147,20 +146,3 @@ def test_row(ds, base_url, format, expected): actual = ds.urls.row("_memory", "facetable", "1", format=format) assert actual == expected assert isinstance(actual, PrefixedUrlString) - - -@pytest.mark.parametrize("base_url", ["/", "/prefix/"]) -def test_database_hashed(app_client_with_hash, base_url): - ds = app_client_with_hash.ds - original_base_url = ds._settings["base_url"] - try: - ds._settings["base_url"] = base_url - db_hash = ds.get_database("fixtures").hash - assert len(db_hash) == 64 - expected = f"{base_url}fixtures-{db_hash[:7]}" - assert ds.urls.database("fixtures") == expected - assert ds.urls.table("fixtures", "name") == expected + "/name" - assert ds.urls.query("fixtures", "name") == expected + "/name" - finally: - # Reset this since fixture is shared with other tests - ds._settings["base_url"] = original_base_url diff --git a/tests/test_table_api.py b/tests/test_table_api.py index 3ab369b3..3d0a7fbd 100644 --- a/tests/test_table_api.py +++ b/tests/test_table_api.py @@ -2,7 +2,6 @@ from datasette.utils import detect_json1 from datasette.utils.sqlite import sqlite_version from .fixtures import ( # noqa app_client, - app_client_with_hash, app_client_with_trace, app_client_returned_rows_matches_page_size, generate_compound_rows, @@ -41,13 +40,6 @@ def test_table_not_exists_json(app_client): } == app_client.get("/fixtures/blah.json").json -def test_jsono_redirects_to_shape_objects(app_client_with_hash): - response_1 = app_client_with_hash.get("/fixtures/simple_primary_key.jsono") - response = app_client_with_hash.get(response_1.headers["Location"]) - assert response.status == 302 - assert response.headers["Location"].endswith("?_shape=objects") - - def test_table_shape_arrays(app_client): response = app_client.get("/fixtures/simple_primary_key.json?_shape=arrays") assert [ From 8658c66438ec71edc7e9adc495f4692b937a0f57 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 18 Mar 2022 17:19:31 -0700 Subject: [PATCH 0162/1103] Show error if --setting hash_urls 1 used, refs #1661 --- datasette/app.py | 15 ++++++++++----- datasette/cli.py | 21 ++++++++++++++++++--- tests/test_cli.py | 7 +++++++ 3 files changed, 35 insertions(+), 8 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 3099ada7..c1c0663d 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -118,11 +118,6 @@ SETTINGS = ( 50, "Time limit for calculating a suggested facet", ), - Setting( - "hash_urls", - False, - "Include DB file contents hash in URLs, for far-future caching", - ), Setting( "allow_facet", True, @@ -177,6 +172,16 @@ SETTINGS = ( ), Setting("base_url", "/", "Datasette URLs should use this base path"), ) +OBSOLETE_SETTINGS = { + option.name: option + for option in ( + Setting( + "hash_urls", + False, + "The hash_urls setting has been removed, try the datasette-hashed-urls plugin instead", + ), + ) +} DEFAULT_SETTINGS = {option.name: option.default for option in SETTINGS} diff --git a/datasette/cli.py b/datasette/cli.py index 61e7ce91..b94ac192 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -12,7 +12,14 @@ from subprocess import call import sys from runpy import run_module import webbrowser -from .app import Datasette, DEFAULT_SETTINGS, SETTINGS, SQLITE_LIMIT_ATTACHED, pm +from .app import ( + OBSOLETE_SETTINGS, + Datasette, + DEFAULT_SETTINGS, + SETTINGS, + SQLITE_LIMIT_ATTACHED, + pm, +) from .utils import ( StartupError, check_connection, @@ -50,8 +57,12 @@ class Config(click.ParamType): return name, value = config.split(":", 1) if name not in DEFAULT_SETTINGS: + if name in OBSOLETE_SETTINGS: + msg = OBSOLETE_SETTINGS[name].help + else: + msg = f"{name} is not a valid option (--help-settings to see all)" self.fail( - f"{name} is not a valid option (--help-settings to see all)", + msg, param, ctx, ) @@ -83,8 +94,12 @@ class Setting(CompositeParamType): def convert(self, config, param, ctx): name, value = config if name not in DEFAULT_SETTINGS: + if name in OBSOLETE_SETTINGS: + msg = OBSOLETE_SETTINGS[name].help + else: + msg = f"{name} is not a valid option (--help-settings to see all)" self.fail( - f"{name} is not a valid option (--help-settings to see all)", + msg, param, ctx, ) diff --git a/tests/test_cli.py b/tests/test_cli.py index 5afe72c1..89e8d044 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -310,3 +310,10 @@ def test_help_settings(): result = runner.invoke(cli, ["--help-settings"]) for setting in SETTINGS: assert setting.name in result.output + + +def test_help_error_on_hash_urls_setting(): + runner = CliRunner() + result = runner.invoke(cli, ["--setting", "hash_urls", 1]) + assert result.exit_code == 2 + assert 'The hash_urls setting has been removed' in result.output From 9979dcd07f9921ac30c4c0b5ea60d09cd1e10556 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 18 Mar 2022 17:25:14 -0700 Subject: [PATCH 0163/1103] Also remove default_cache_ttl_hashed setting, refs #1661 --- datasette/app.py | 17 +++-------------- datasette/cli.py | 16 ++++++++-------- datasette/url_builder.py | 9 +-------- tests/test_api.py | 2 -- tests/test_cli.py | 7 ++++--- 5 files changed, 16 insertions(+), 35 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index c1c0663d..f52e3283 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -134,11 +134,6 @@ SETTINGS = ( 5, "Default HTTP cache TTL (used in Cache-Control: max-age= header)", ), - Setting( - "default_cache_ttl_hashed", - 365 * 24 * 60 * 60, - "Default HTTP cache TTL for hashed URL pages", - ), Setting("cache_size_kb", 0, "SQLite cache size in KB (0 == use SQLite default)"), Setting( "allow_csv_stream", @@ -172,17 +167,11 @@ SETTINGS = ( ), Setting("base_url", "/", "Datasette URLs should use this base path"), ) +_HASH_URLS_REMOVED = "The hash_urls setting has been removed, try the datasette-hashed-urls plugin instead" OBSOLETE_SETTINGS = { - option.name: option - for option in ( - Setting( - "hash_urls", - False, - "The hash_urls setting has been removed, try the datasette-hashed-urls plugin instead", - ), - ) + "hash_urls": _HASH_URLS_REMOVED, + "default_cache_ttl_hashed": _HASH_URLS_REMOVED, } - DEFAULT_SETTINGS = {option.name: option.default for option in SETTINGS} FAVICON_PATH = app_root / "datasette" / "static" / "favicon.png" diff --git a/datasette/cli.py b/datasette/cli.py index b94ac192..3c6e1b2c 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -57,10 +57,10 @@ class Config(click.ParamType): return name, value = config.split(":", 1) if name not in DEFAULT_SETTINGS: - if name in OBSOLETE_SETTINGS: - msg = OBSOLETE_SETTINGS[name].help - else: - msg = f"{name} is not a valid option (--help-settings to see all)" + msg = ( + OBSOLETE_SETTINGS.get(name) + or f"{name} is not a valid option (--help-settings to see all)" + ) self.fail( msg, param, @@ -94,10 +94,10 @@ class Setting(CompositeParamType): def convert(self, config, param, ctx): name, value = config if name not in DEFAULT_SETTINGS: - if name in OBSOLETE_SETTINGS: - msg = OBSOLETE_SETTINGS[name].help - else: - msg = f"{name} is not a valid option (--help-settings to see all)" + msg = ( + OBSOLETE_SETTINGS.get(name) + or f"{name} is not a valid option (--help-settings to see all)" + ) self.fail( msg, param, diff --git a/datasette/url_builder.py b/datasette/url_builder.py index 9f072462..498ec85d 100644 --- a/datasette/url_builder.py +++ b/datasette/url_builder.py @@ -28,14 +28,7 @@ class Urls: return self.path("-/logout") def database(self, database, format=None): - db = self.ds.databases[database] - if self.ds.setting("hash_urls") and db.hash: - path = self.path( - f"{tilde_encode(database)}-{db.hash[:HASH_LENGTH]}", format=format - ) - else: - path = self.path(tilde_encode(database), format=format) - return path + return self.path(tilde_encode(database), format=format) def table(self, database, table, format=None): path = f"{self.database(database)}/{tilde_encode(table)}" diff --git a/tests/test_api.py b/tests/test_api.py index 46e41afb..d3c94023 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -798,14 +798,12 @@ def test_settings_json(app_client): "allow_facet": True, "suggest_facets": True, "default_cache_ttl": 5, - "default_cache_ttl_hashed": 365 * 24 * 60 * 60, "num_sql_threads": 1, "cache_size_kb": 0, "allow_csv_stream": True, "max_csv_mb": 100, "truncate_cells_html": 2048, "force_https_urls": False, - "hash_urls": False, "template_debug": False, "trace_debug": False, "base_url": "/", diff --git a/tests/test_cli.py b/tests/test_cli.py index 89e8d044..dca65f26 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -312,8 +312,9 @@ def test_help_settings(): assert setting.name in result.output -def test_help_error_on_hash_urls_setting(): +@pytest.mark.parametrize("setting", ("hash_urls", "default_cache_ttl_hashed")) +def test_help_error_on_hash_urls_setting(setting): runner = CliRunner() - result = runner.invoke(cli, ["--setting", "hash_urls", 1]) + result = runner.invoke(cli, ["--setting", setting, 1]) assert result.exit_code == 2 - assert 'The hash_urls setting has been removed' in result.output + assert "The hash_urls setting has been removed" in result.output From 32963018e7edfab1233de7c7076c428d0e5c7813 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 18 Mar 2022 17:33:06 -0700 Subject: [PATCH 0164/1103] Updated documentation to remove hash_urls, refs #1661 --- docs/cli-reference.rst | 4 ---- docs/performance.rst | 18 +++++++++++------- docs/settings.rst | 27 --------------------------- 3 files changed, 11 insertions(+), 38 deletions(-) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 155a005d..69670d8a 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -142,8 +142,6 @@ datasette serve --help-settings (default=200) facet_suggest_time_limit_ms Time limit for calculating a suggested facet (default=50) - hash_urls Include DB file contents hash in URLs, for far- - future caching (default=False) allow_facet Allow users to specify columns to facet using ?_facet= parameter (default=True) allow_download Allow users to download the original SQLite @@ -152,8 +150,6 @@ datasette serve --help-settings (default=True) default_cache_ttl Default HTTP cache TTL (used in Cache-Control: max-age= header) (default=5) - default_cache_ttl_hashed Default HTTP cache TTL for hashed URL pages - (default=31536000) cache_size_kb SQLite cache size in KB (0 == use SQLite default) (default=0) allow_csv_stream Allow .csv?_stream=1 to download all rows diff --git a/docs/performance.rst b/docs/performance.rst index bcf3208e..d37f1804 100644 --- a/docs/performance.rst +++ b/docs/performance.rst @@ -60,18 +60,22 @@ The :ref:`setting_default_cache_ttl` setting sets the default HTTP cache TTL for You can also change the cache timeout on a per-request basis using the ``?_ttl=10`` query string parameter. This can be useful when you are working with the Datasette JSON API - you may decide that a specific query can be cached for a longer time, or maybe you need to set ``?_ttl=0`` for some requests for example if you are running a SQL ``order by random()`` query. -Hashed URL mode ---------------- +datasette-hashed-urls +--------------------- -When you open a database file in immutable mode using the ``-i`` option, Datasette calculates a SHA-256 hash of the contents of that file on startup. This content hash can then optionally be used to create URLs that are guaranteed to change if the contents of the file changes in the future. This results in URLs that can then be cached indefinitely by both browsers and caching proxies - an enormous potential performance optimization. +If you open a database file in immutable mode using the ``-i`` option, you can be assured that the content of that database will not change for the lifetime of the Datasette server. -You can enable these hashed URLs in two ways: using the :ref:`setting_hash_urls` configuration setting (which affects all requests to Datasette) or via the ``?_hash=1`` query string parameter (which only applies to the current request). +The `datasette-hashed-urls plugin <https://datasette.io/plugins/datasette-hashed-urls>`__ implements an optimization where your database is served with part of the SHA-256 hash of the database contents baked into the URL. -With hashed URLs enabled, any request to e.g. ``/mydatabase/mytable`` will 302 redirect to ``mydatabase-455fe3a/mytable``. The URL containing the hash will be served with a very long cache expire header - configured using :ref:`setting_default_cache_ttl_hashed` which defaults to 365 days. +A database at ``/fixtures`` will instead be served at ``/fixtures-aa7318b``, and a year-long cache expiry header will be returned with those pages. -Since these responses are cached for a long time, you may wish to build API clients against the non-hashed version of these URLs. These 302 redirects are served extremely quickly, so this should still be a performant way to work against the Datasette API. +This will then be cached by both browsers and caching proxies such as Cloudflare or Fastly, providing a potentially significant performance boost. -If you run Datasette behind an `HTTP/2 server push <https://en.wikipedia.org/wiki/HTTP/2_Server_Push>`__ aware proxy such as Cloudflare Datasette will serve the 302 redirects in such a way that the redirected page will be efficiently "pushed" to the browser as part of the response, without the browser needing to make a second HTTP request to fetch the redirected resource. +To install the plugin, run the following:: + + datasette install datasette-hashed-urls .. note:: + Prior to Datasette 0.61 hashed URL mode was a core Datasette feature, enabled using the ``hash_urls`` setting. This implementation has now been removed in favor of the ``datasette-hashed-urls`` plugin. + Prior to Datasette 0.28 hashed URL mode was the default behaviour for Datasette, since all database files were assumed to be immutable and unchanging. From 0.28 onwards the default has been to treat database files as mutable unless explicitly configured otherwise. diff --git a/docs/settings.rst b/docs/settings.rst index da06d6a0..60c4b36d 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -178,17 +178,6 @@ Default HTTP caching max-age header in seconds, used for ``Cache-Control: max-ag datasette mydatabase.db --setting default_cache_ttl 60 -.. _setting_default_cache_ttl_hashed: - -default_cache_ttl_hashed -~~~~~~~~~~~~~~~~~~~~~~~~ - -Default HTTP caching max-age for responses served using using the :ref:`hashed-urls mechanism <setting_hash_urls>`. Defaults to 365 days (31536000 seconds). - -:: - - datasette mydatabase.db --setting default_cache_ttl_hashed 10000 - .. _setting_cache_size_kb: cache_size_kb @@ -251,22 +240,6 @@ HTTP but is served to the outside world via a proxy that enables HTTPS. datasette mydatabase.db --setting force_https_urls 1 -.. _setting_hash_urls: - -hash_urls -~~~~~~~~~ - -When enabled, this setting causes Datasette to append a content hash of the -database file to the URL path for every table and query within that database. - -When combined with far-future expire headers this ensures that queries can be -cached forever, safe in the knowledge that any modifications to the database -itself will result in new, uncached URL paths. - -:: - - datasette mydatabase.db --setting hash_urls 1 - .. _setting_template_debug: template_debug From 4e47a2d894b96854348343374c8e97c9d7055cf6 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 18 Mar 2022 18:37:54 -0700 Subject: [PATCH 0165/1103] Fixed bug where tables with a column called n caused 500 errors Closes #1228 --- datasette/facets.py | 6 +++--- tests/fixtures.py | 33 ++++++++++++++++---------------- tests/test_api.py | 1 + tests/test_csv.py | 32 +++++++++++++++---------------- tests/test_internals_database.py | 10 ++++++++++ tests/test_plugins.py | 6 ++++-- tests/test_table_api.py | 8 ++++++++ 7 files changed, 59 insertions(+), 37 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index a1bb4a5f..b15a758c 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -151,10 +151,10 @@ class ColumnFacet(Facet): if column in already_enabled: continue suggested_facet_sql = """ - select {column}, count(*) as n from ( + select {column} as value, count(*) as n from ( {sql} - ) where {column} is not null - group by {column} + ) where value is not null + group by value limit {limit} """.format( column=escape_sqlite(column), sql=self.sql, limit=facet_size + 1 diff --git a/tests/fixtures.py b/tests/fixtures.py index 342a3020..e0e4ec7b 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -564,26 +564,27 @@ CREATE TABLE facetable ( tags text, complex_array text, distinct_some_null, + n text, FOREIGN KEY ("_city_id") REFERENCES [facet_cities](id) ); INSERT INTO facetable - (created, planet_int, on_earth, state, _city_id, _neighborhood, tags, complex_array, distinct_some_null) + (created, planet_int, on_earth, state, _city_id, _neighborhood, tags, complex_array, distinct_some_null, n) VALUES - ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]', 'one'), - ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]', 'two'), - ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'SOMA', '[]', '[]', null), - ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Tenderloin', '[]', '[]', null), - ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Bernal Heights', '[]', '[]', null), - ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Hayes Valley', '[]', '[]', null), - ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Hollywood', '[]', '[]', null), - ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Downtown', '[]', '[]', null), - ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Los Feliz', '[]', '[]', null), - ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Koreatown', '[]', '[]', null), - ("2019-01-16 08:00:00", 1, 1, 'MI', 3, 'Downtown', '[]', '[]', null), - ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Greektown', '[]', '[]', null), - ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Corktown', '[]', '[]', null), - ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Mexicantown', '[]', '[]', null), - ("2019-01-17 08:00:00", 2, 0, 'MC', 4, 'Arcadia Planitia', '[]', '[]', null) + ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]', 'one', 'n1'), + ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]', 'two', 'n2'), + ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'SOMA', '[]', '[]', null, null), + ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Tenderloin', '[]', '[]', null, null), + ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Bernal Heights', '[]', '[]', null, null), + ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Hayes Valley', '[]', '[]', null, null), + ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Hollywood', '[]', '[]', null, null), + ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Downtown', '[]', '[]', null, null), + ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Los Feliz', '[]', '[]', null, null), + ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Koreatown', '[]', '[]', null, null), + ("2019-01-16 08:00:00", 1, 1, 'MI', 3, 'Downtown', '[]', '[]', null, null), + ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Greektown', '[]', '[]', null, null), + ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Corktown', '[]', '[]', null, null), + ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Mexicantown', '[]', '[]', null, null), + ("2019-01-17 08:00:00", 2, 0, 'MC', 4, 'Arcadia Planitia', '[]', '[]', null, null) ; CREATE TABLE binary_data ( diff --git a/tests/test_api.py b/tests/test_api.py index d3c94023..421bb1fe 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -210,6 +210,7 @@ def test_database_page(app_client): "tags", "complex_array", "distinct_some_null", + "n", ], "primary_keys": ["pk"], "count": 15, diff --git a/tests/test_csv.py b/tests/test_csv.py index 8749cd8b..7fc25a09 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -24,22 +24,22 @@ world ) EXPECTED_TABLE_WITH_LABELS_CSV = """ -pk,created,planet_int,on_earth,state,_city_id,_city_id_label,_neighborhood,tags,complex_array,distinct_some_null -1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]","[{""foo"": ""bar""}]",one -2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]",[],two -3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[],[], -4,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Tenderloin,[],[], -5,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Bernal Heights,[],[], -6,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Hayes Valley,[],[], -7,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Hollywood,[],[], -8,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Downtown,[],[], -9,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Los Feliz,[],[], -10,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Koreatown,[],[], -11,2019-01-16 08:00:00,1,1,MI,3,Detroit,Downtown,[],[], -12,2019-01-17 08:00:00,1,1,MI,3,Detroit,Greektown,[],[], -13,2019-01-17 08:00:00,1,1,MI,3,Detroit,Corktown,[],[], -14,2019-01-17 08:00:00,1,1,MI,3,Detroit,Mexicantown,[],[], -15,2019-01-17 08:00:00,2,0,MC,4,Memnonia,Arcadia Planitia,[],[], +pk,created,planet_int,on_earth,state,_city_id,_city_id_label,_neighborhood,tags,complex_array,distinct_some_null,n +1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]","[{""foo"": ""bar""}]",one,n1 +2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]",[],two,n2 +3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[],[],, +4,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Tenderloin,[],[],, +5,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Bernal Heights,[],[],, +6,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Hayes Valley,[],[],, +7,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Hollywood,[],[],, +8,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Downtown,[],[],, +9,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Los Feliz,[],[],, +10,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Koreatown,[],[],, +11,2019-01-16 08:00:00,1,1,MI,3,Detroit,Downtown,[],[],, +12,2019-01-17 08:00:00,1,1,MI,3,Detroit,Greektown,[],[],, +13,2019-01-17 08:00:00,1,1,MI,3,Detroit,Corktown,[],[],, +14,2019-01-17 08:00:00,1,1,MI,3,Detroit,Mexicantown,[],[],, +15,2019-01-17 08:00:00,2,0,MC,4,Memnonia,Arcadia Planitia,[],[],, """.lstrip().replace( "\n", "\r\n" ) diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 31538a24..551f67e1 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -86,6 +86,7 @@ async def test_table_exists(db, tables, exists): "tags", "complex_array", "distinct_some_null", + "n", ], ), ( @@ -204,6 +205,15 @@ async def test_table_columns(db, table, expected): is_pk=0, hidden=0, ), + Column( + cid=10, + name="n", + type="text", + notnull=0, + default_value=None, + is_pk=0, + hidden=0, + ), ], ), ( diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 656f39e4..15bde962 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -442,6 +442,7 @@ def test_hook_register_output_renderer_all_parameters(app_client): "tags", "complex_array", "distinct_some_null", + "n", ], "rows": [ "<sqlite3.Row object at 0xXXX>", @@ -460,7 +461,7 @@ def test_hook_register_output_renderer_all_parameters(app_client): "<sqlite3.Row object at 0xXXX>", "<sqlite3.Row object at 0xXXX>", ], - "sql": "select pk, created, planet_int, on_earth, state, _city_id, _neighborhood, tags, complex_array, distinct_some_null from facetable order by pk limit 51", + "sql": "select pk, created, planet_int, on_earth, state, _city_id, _neighborhood, tags, complex_array, distinct_some_null, n from facetable order by pk limit 51", "query_name": None, "database": "fixtures", "table": "facetable", @@ -531,8 +532,9 @@ def test_hook_register_output_renderer_can_render(app_client): "tags", "complex_array", "distinct_some_null", + "n", ], - "sql": "select pk, created, planet_int, on_earth, state, _city_id, _neighborhood, tags, complex_array, distinct_some_null from facetable order by pk limit 51", + "sql": "select pk, created, planet_int, on_earth, state, _city_id, _neighborhood, tags, complex_array, distinct_some_null, n from facetable order by pk limit 51", "query_name": None, "database": "fixtures", "table": "facetable", diff --git a/tests/test_table_api.py b/tests/test_table_api.py index 3d0a7fbd..9db383c3 100644 --- a/tests/test_table_api.py +++ b/tests/test_table_api.py @@ -532,6 +532,7 @@ def test_table_filter_json_arraycontains(app_client): '["tag1", "tag2"]', '[{"foo": "bar"}]', "one", + "n1", ], [ 2, @@ -544,6 +545,7 @@ def test_table_filter_json_arraycontains(app_client): '["tag1", "tag3"]', "[]", "two", + "n2", ], ] @@ -565,6 +567,7 @@ def test_table_filter_json_arraynotcontains(app_client): '["tag1", "tag2"]', '[{"foo": "bar"}]', "one", + "n1", ] ] @@ -585,6 +588,7 @@ def test_table_filter_extra_where(app_client): '["tag1", "tag3"]', "[]", "two", + "n2", ] ] == response.json["rows"] @@ -958,6 +962,7 @@ def test_expand_labels(app_client): "tags": '["tag1", "tag3"]', "complex_array": "[]", "distinct_some_null": "two", + "n": "n2", }, "13": { "pk": 13, @@ -970,6 +975,7 @@ def test_expand_labels(app_client): "tags": "[]", "complex_array": "[]", "distinct_some_null": None, + "n": None, }, } == response.json @@ -1161,6 +1167,7 @@ def test_generated_columns_are_visible_in_datasette(): "tags", "complex_array", "distinct_some_null", + "n", ], ), ( @@ -1188,6 +1195,7 @@ def test_generated_columns_are_visible_in_datasette(): "tags", "complex_array", "distinct_some_null", + "n", ], ), ( From 711767bcd3c1e76a0861fe7f24069ff1c8efc97a Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 18 Mar 2022 21:03:08 -0700 Subject: [PATCH 0166/1103] Refactored URL routing to add tests, closes #1666 Refs #1660 --- datasette/app.py | 54 ++++++++++++++++++++----------------- datasette/utils/__init__.py | 8 ++++++ tests/test_routes.py | 34 +++++++++++++++++++++++ 3 files changed, 72 insertions(+), 24 deletions(-) create mode 100644 tests/test_routes.py diff --git a/datasette/app.py b/datasette/app.py index f52e3283..8987112c 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -60,6 +60,7 @@ from .utils import ( module_from_path, parse_metadata, resolve_env_secrets, + resolve_routes, to_css_class, ) from .utils.asgi import ( @@ -974,8 +975,7 @@ class Datasette: output.append(script) return output - def app(self): - """Returns an ASGI app function that serves the whole of Datasette""" + def _routes(self): routes = [] for routes_to_add in pm.hook.register_routes(datasette=self): @@ -1099,6 +1099,15 @@ class Datasette: + renderer_regex + r")?$", ) + return [ + # Compile any strings to regular expressions + ((re.compile(pattern) if isinstance(pattern, str) else pattern), view) + for pattern, view in routes + ] + + def app(self): + """Returns an ASGI app function that serves the whole of Datasette""" + routes = self._routes() self._register_custom_units() async def setup_db(): @@ -1129,12 +1138,7 @@ class Datasette: class DatasetteRouter: def __init__(self, datasette, routes): self.ds = datasette - routes = routes or [] - self.routes = [ - # Compile any strings to regular expressions - ((re.compile(pattern) if isinstance(pattern, str) else pattern), view) - for pattern, view in routes - ] + self.routes = routes or [] # Build a list of pages/blah/{name}.html matching expressions pattern_templates = [ filepath @@ -1187,22 +1191,24 @@ class DatasetteRouter: break scope_modifications["actor"] = actor or default_actor scope = dict(scope, **scope_modifications) - for regex, view in self.routes: - match = regex.match(path) - if match is not None: - new_scope = dict(scope, url_route={"kwargs": match.groupdict()}) - request.scope = new_scope - try: - response = await view(request, send) - if response: - self.ds._write_messages_to_response(request, response) - await response.asgi_send(send) - return - except NotFound as exception: - return await self.handle_404(request, send, exception) - except Exception as exception: - return await self.handle_500(request, send, exception) - return await self.handle_404(request, send) + + match, view = resolve_routes(self.routes, path) + + if match is None: + return await self.handle_404(request, send) + + new_scope = dict(scope, url_route={"kwargs": match.groupdict()}) + request.scope = new_scope + try: + response = await view(request, send) + if response: + self.ds._write_messages_to_response(request, response) + await response.asgi_send(send) + return + except NotFound as exception: + return await self.handle_404(request, send, exception) + except Exception as exception: + return await self.handle_500(request, send, exception) async def handle_404(self, request, send, exception=None): # If path contains % encoding, redirect to tilde encoding diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index bd591459..ccdf8ad4 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1178,3 +1178,11 @@ def tilde_decode(s: str) -> str: s = s.replace("%", temp) decoded = urllib.parse.unquote(s.replace("~", "%")) return decoded.replace(temp, "%") + + +def resolve_routes(routes, path): + for regex, view in routes: + match = regex.match(path) + if match is not None: + return match, view + return None, None diff --git a/tests/test_routes.py b/tests/test_routes.py new file mode 100644 index 00000000..a1960f14 --- /dev/null +++ b/tests/test_routes.py @@ -0,0 +1,34 @@ +from datasette.app import Datasette +from datasette.utils import resolve_routes +import pytest + + +@pytest.fixture(scope="session") +def routes(): + ds = Datasette() + return ds._routes() + + +@pytest.mark.parametrize( + "path,expected", + ( + ("/", "IndexView"), + ("/foo", "DatabaseView"), + ("/foo.csv", "DatabaseView"), + ("/foo.json", "DatabaseView"), + ("/foo.humbug", "DatabaseView"), + ("/foo/humbug", "TableView"), + ("/foo/humbug.json", "TableView"), + ("/foo/humbug.blah", "TableView"), + ("/foo/humbug/1", "RowView"), + ("/foo/humbug/1.json", "RowView"), + ("/-/metadata.json", "JsonDataView"), + ("/-/metadata", "JsonDataView"), + ), +) +def test_routes(routes, path, expected): + match, view = resolve_routes(routes, path) + if expected is None: + assert match is None + else: + assert view.view_class.__name__ == expected From 764738dfcb16cd98b0987d443f59d5baa9d3c332 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 19 Mar 2022 09:30:22 -0700 Subject: [PATCH 0167/1103] test_routes also now asserts matches, refs #1666 --- tests/test_routes.py | 41 +++++++++++++++++++++++++---------------- 1 file changed, 25 insertions(+), 16 deletions(-) diff --git a/tests/test_routes.py b/tests/test_routes.py index a1960f14..6718c232 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -10,25 +10,34 @@ def routes(): @pytest.mark.parametrize( - "path,expected", + "path,expected_class,expected_matches", ( - ("/", "IndexView"), - ("/foo", "DatabaseView"), - ("/foo.csv", "DatabaseView"), - ("/foo.json", "DatabaseView"), - ("/foo.humbug", "DatabaseView"), - ("/foo/humbug", "TableView"), - ("/foo/humbug.json", "TableView"), - ("/foo/humbug.blah", "TableView"), - ("/foo/humbug/1", "RowView"), - ("/foo/humbug/1.json", "RowView"), - ("/-/metadata.json", "JsonDataView"), - ("/-/metadata", "JsonDataView"), + ("/", "IndexView", {"as_format": ""}), + ("/foo", "DatabaseView", {"as_format": None, "db_name": "foo"}), + ("/foo.csv", "DatabaseView", {"as_format": ".csv", "db_name": "foo"}), + ("/foo.json", "DatabaseView", {"as_format": ".json", "db_name": "foo"}), + ("/foo.humbug", "DatabaseView", {"as_format": None, "db_name": "foo.humbug"}), + ("/foo/humbug", "TableView", {"db_name": "foo", "table": "humbug"}), + ("/foo/humbug.json", "TableView", {"db_name": "foo", "table": "humbug"}), + ("/foo/humbug.blah", "TableView", {"db_name": "foo", "table": "humbug"}), + ( + "/foo/humbug/1", + "RowView", + {"as_format": None, "db_name": "foo", "pk_path": "1", "table": "humbug"}, + ), + ( + "/foo/humbug/1.json", + "RowView", + {"as_format": ".json", "db_name": "foo", "pk_path": "1", "table": "humbug"}, + ), + ("/-/metadata.json", "JsonDataView", {"as_format": ".json"}), + ("/-/metadata", "JsonDataView", {"as_format": ""}), ), ) -def test_routes(routes, path, expected): +def test_routes(routes, path, expected_class, expected_matches): match, view = resolve_routes(routes, path) - if expected is None: + if expected_class is None: assert match is None else: - assert view.view_class.__name__ == expected + assert view.view_class.__name__ == expected_class + assert match.groupdict() == expected_matches From 61419388c134001118aaf7dfb913562d467d7913 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 19 Mar 2022 09:52:08 -0700 Subject: [PATCH 0168/1103] Rename route match groups for consistency, refs #1667, #1660 --- datasette/app.py | 28 ++++++++++++---------------- datasette/blob_renderer.py | 4 ++-- datasette/views/base.py | 2 +- datasette/views/database.py | 6 +++--- datasette/views/index.py | 2 +- datasette/views/special.py | 2 +- datasette/views/table.py | 8 ++++---- tests/test_routes.py | 24 ++++++++++++------------ 8 files changed, 36 insertions(+), 40 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 8987112c..5259c50c 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -988,7 +988,7 @@ class Datasette: # Generate a regex snippet to match all registered renderer file extensions renderer_regex = "|".join(r"\." + key for key in self.renderers.keys()) - add_route(IndexView.as_view(self), r"/(?P<as_format>(\.jsono?)?$)") + add_route(IndexView.as_view(self), r"/(?P<format>(\.jsono?)?$)") # TODO: /favicon.ico and /-/static/ deserve far-future cache expires add_route(favicon, "/favicon.ico") @@ -1020,21 +1020,21 @@ class Datasette: ) add_route( JsonDataView.as_view(self, "metadata.json", lambda: self.metadata()), - r"/-/metadata(?P<as_format>(\.json)?)$", + r"/-/metadata(?P<format>(\.json)?)$", ) add_route( JsonDataView.as_view(self, "versions.json", self._versions), - r"/-/versions(?P<as_format>(\.json)?)$", + r"/-/versions(?P<format>(\.json)?)$", ) add_route( JsonDataView.as_view( self, "plugins.json", self._plugins, needs_request=True ), - r"/-/plugins(?P<as_format>(\.json)?)$", + r"/-/plugins(?P<format>(\.json)?)$", ) add_route( JsonDataView.as_view(self, "settings.json", lambda: self._settings), - r"/-/settings(?P<as_format>(\.json)?)$", + r"/-/settings(?P<format>(\.json)?)$", ) add_route( permanent_redirect("/-/settings.json"), @@ -1046,15 +1046,15 @@ class Datasette: ) add_route( JsonDataView.as_view(self, "threads.json", self._threads), - r"/-/threads(?P<as_format>(\.json)?)$", + r"/-/threads(?P<format>(\.json)?)$", ) add_route( JsonDataView.as_view(self, "databases.json", self._connected_databases), - r"/-/databases(?P<as_format>(\.json)?)$", + r"/-/databases(?P<format>(\.json)?)$", ) add_route( JsonDataView.as_view(self, "actor.json", self._actor, needs_request=True), - r"/-/actor(?P<as_format>(\.json)?)$", + r"/-/actor(?P<format>(\.json)?)$", ) add_route( AuthTokenView.as_view(self), @@ -1080,22 +1080,18 @@ class Datasette: PatternPortfolioView.as_view(self), r"/-/patterns$", ) - add_route( - DatabaseDownload.as_view(self), r"/(?P<db_name>[^/]+?)(?P<as_db>\.db)$" - ) + add_route(DatabaseDownload.as_view(self), r"/(?P<database>[^/]+?)\.db$") add_route( DatabaseView.as_view(self), - r"/(?P<db_name>[^/]+?)(?P<as_format>" - + renderer_regex - + r"|.jsono|\.csv)?$", + r"/(?P<database>[^/]+?)(?P<format>" + renderer_regex + r"|.jsono|\.csv)?$", ) add_route( TableView.as_view(self), - r"/(?P<db_name>[^/]+)/(?P<table>[^\/\.]+)(\.[a-zA-Z0-9_]+)?$", + r"/(?P<database>[^/]+)/(?P<table>[^\/\.]+)(\.[a-zA-Z0-9_]+)?$", ) add_route( RowView.as_view(self), - r"/(?P<db_name>[^/]+)/(?P<table>[^/]+?)/(?P<pk_path>[^/]+?)(?P<as_format>" + r"/(?P<database>[^/]+)/(?P<table>[^/]+?)/(?P<pks>[^/]+?)(?P<format>" + renderer_regex + r")?$", ) diff --git a/datasette/blob_renderer.py b/datasette/blob_renderer.py index 217b3638..4d8c6bea 100644 --- a/datasette/blob_renderer.py +++ b/datasette/blob_renderer.py @@ -34,8 +34,8 @@ async def render_blob(datasette, database, rows, columns, request, table, view_n filename_bits = [] if table: filename_bits.append(to_css_class(table)) - if "pk_path" in request.url_vars: - filename_bits.append(request.url_vars["pk_path"]) + if "pks" in request.url_vars: + filename_bits.append(request.url_vars["pks"]) filename_bits.append(to_css_class(blob_column)) if blob_hash: filename_bits.append(blob_hash[:6]) diff --git a/datasette/views/base.py b/datasette/views/base.py index e31beb19..0bbf98bb 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -381,7 +381,7 @@ class DataView(BaseView): return None async def get(self, request): - db_name = request.url_vars["db_name"] + db_name = request.url_vars["database"] database = tilde_decode(db_name) _format = self.get_format(request) data_kwargs = {} diff --git a/datasette/views/database.py b/datasette/views/database.py index 48635e01..93bd1011 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -32,7 +32,7 @@ class DatabaseView(DataView): name = "database" async def data(self, request, default_labels=False, _size=None): - database = tilde_decode(request.url_vars["db_name"]) + database = tilde_decode(request.url_vars["database"]) await self.check_permissions( request, [ @@ -162,7 +162,7 @@ class DatabaseDownload(DataView): name = "database_download" async def get(self, request): - database = tilde_decode(request.url_vars["db_name"]) + database = tilde_decode(request.url_vars["database"]) await self.check_permissions( request, [ @@ -205,7 +205,7 @@ class QueryView(DataView): named_parameters=None, write=False, ): - database = tilde_decode(request.url_vars["db_name"]) + database = tilde_decode(request.url_vars["database"]) params = {key: request.args.get(key) for key in request.args} if "sql" in params: params.pop("sql") diff --git a/datasette/views/index.py b/datasette/views/index.py index 311a49db..f5e31181 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -19,7 +19,7 @@ class IndexView(BaseView): name = "index" async def get(self, request): - as_format = request.url_vars["as_format"] + as_format = request.url_vars["format"] await self.check_permission(request, "view-instance") databases = [] for name, db in self.ds.databases.items(): diff --git a/datasette/views/special.py b/datasette/views/special.py index c7b5061f..395ee587 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -15,7 +15,7 @@ class JsonDataView(BaseView): self.needs_request = needs_request async def get(self, request): - as_format = request.url_vars["as_format"] + as_format = request.url_vars["format"] await self.check_permission(request, "view-instance") if self.needs_request: data = self.data_callback(request) diff --git a/datasette/views/table.py b/datasette/views/table.py index 8bdc7417..ea4f24b7 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -272,7 +272,7 @@ class TableView(RowTableShared): name = "table" async def post(self, request): - db_name = tilde_decode(request.url_vars["db_name"]) + db_name = tilde_decode(request.url_vars["database"]) table = tilde_decode(request.url_vars["table"]) # Handle POST to a canned query canned_query = await self.ds.get_canned_query(db_name, table, request.actor) @@ -327,7 +327,7 @@ class TableView(RowTableShared): _next=None, _size=None, ): - database = tilde_decode(request.url_vars["db_name"]) + database = tilde_decode(request.url_vars["database"]) table = tilde_decode(request.url_vars["table"]) try: db = self.ds.databases[database] @@ -938,7 +938,7 @@ class RowView(RowTableShared): name = "row" async def data(self, request, default_labels=False): - database = tilde_decode(request.url_vars["db_name"]) + database = tilde_decode(request.url_vars["database"]) table = tilde_decode(request.url_vars["table"]) await self.check_permissions( request, @@ -948,7 +948,7 @@ class RowView(RowTableShared): "view-instance", ], ) - pk_values = urlsafe_components(request.url_vars["pk_path"]) + pk_values = urlsafe_components(request.url_vars["pks"]) db = self.ds.databases[database] sql, params, pks = await _sql_params_pks(db, table, pk_values) results = await db.execute(sql, params, truncate=True) diff --git a/tests/test_routes.py b/tests/test_routes.py index 6718c232..349ac302 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -12,26 +12,26 @@ def routes(): @pytest.mark.parametrize( "path,expected_class,expected_matches", ( - ("/", "IndexView", {"as_format": ""}), - ("/foo", "DatabaseView", {"as_format": None, "db_name": "foo"}), - ("/foo.csv", "DatabaseView", {"as_format": ".csv", "db_name": "foo"}), - ("/foo.json", "DatabaseView", {"as_format": ".json", "db_name": "foo"}), - ("/foo.humbug", "DatabaseView", {"as_format": None, "db_name": "foo.humbug"}), - ("/foo/humbug", "TableView", {"db_name": "foo", "table": "humbug"}), - ("/foo/humbug.json", "TableView", {"db_name": "foo", "table": "humbug"}), - ("/foo/humbug.blah", "TableView", {"db_name": "foo", "table": "humbug"}), + ("/", "IndexView", {"format": ""}), + ("/foo", "DatabaseView", {"format": None, "database": "foo"}), + ("/foo.csv", "DatabaseView", {"format": ".csv", "database": "foo"}), + ("/foo.json", "DatabaseView", {"format": ".json", "database": "foo"}), + ("/foo.humbug", "DatabaseView", {"format": None, "database": "foo.humbug"}), + ("/foo/humbug", "TableView", {"database": "foo", "table": "humbug"}), + ("/foo/humbug.json", "TableView", {"database": "foo", "table": "humbug"}), + ("/foo/humbug.blah", "TableView", {"database": "foo", "table": "humbug"}), ( "/foo/humbug/1", "RowView", - {"as_format": None, "db_name": "foo", "pk_path": "1", "table": "humbug"}, + {"format": None, "database": "foo", "pks": "1", "table": "humbug"}, ), ( "/foo/humbug/1.json", "RowView", - {"as_format": ".json", "db_name": "foo", "pk_path": "1", "table": "humbug"}, + {"format": ".json", "database": "foo", "pks": "1", "table": "humbug"}, ), - ("/-/metadata.json", "JsonDataView", {"as_format": ".json"}), - ("/-/metadata", "JsonDataView", {"as_format": ""}), + ("/-/metadata.json", "JsonDataView", {"format": ".json"}), + ("/-/metadata", "JsonDataView", {"format": ""}), ), ) def test_routes(routes, path, expected_class, expected_matches): From b9c2b1cfc8692b9700416db98721fa3ec982f6be Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 19 Mar 2022 13:29:10 -0700 Subject: [PATCH 0169/1103] Consistent treatment of format in route capturing, refs #1667 Also refs #1660 --- datasette/app.py | 30 ++++++++++++------------------ tests/test_api.py | 4 ++-- tests/test_routes.py | 32 ++++++++++++++++++++++---------- 3 files changed, 36 insertions(+), 30 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 5259c50c..edef34e9 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -985,10 +985,7 @@ class Datasette: def add_route(view, regex): routes.append((regex, view)) - # Generate a regex snippet to match all registered renderer file extensions - renderer_regex = "|".join(r"\." + key for key in self.renderers.keys()) - - add_route(IndexView.as_view(self), r"/(?P<format>(\.jsono?)?$)") + add_route(IndexView.as_view(self), r"/(\.(?P<format>jsono?))?$") # TODO: /favicon.ico and /-/static/ deserve far-future cache expires add_route(favicon, "/favicon.ico") @@ -1020,21 +1017,21 @@ class Datasette: ) add_route( JsonDataView.as_view(self, "metadata.json", lambda: self.metadata()), - r"/-/metadata(?P<format>(\.json)?)$", + r"/-/metadata(\.(?P<format>json))?$", ) add_route( JsonDataView.as_view(self, "versions.json", self._versions), - r"/-/versions(?P<format>(\.json)?)$", + r"/-/versions(\.(?P<format>json))?$", ) add_route( JsonDataView.as_view( self, "plugins.json", self._plugins, needs_request=True ), - r"/-/plugins(?P<format>(\.json)?)$", + r"/-/plugins(\.(?P<format>json))?$", ) add_route( JsonDataView.as_view(self, "settings.json", lambda: self._settings), - r"/-/settings(?P<format>(\.json)?)$", + r"/-/settings(\.(?P<format>json))?$", ) add_route( permanent_redirect("/-/settings.json"), @@ -1046,15 +1043,15 @@ class Datasette: ) add_route( JsonDataView.as_view(self, "threads.json", self._threads), - r"/-/threads(?P<format>(\.json)?)$", + r"/-/threads(\.(?P<format>json))?$", ) add_route( JsonDataView.as_view(self, "databases.json", self._connected_databases), - r"/-/databases(?P<format>(\.json)?)$", + r"/-/databases(\.(?P<format>json))?$", ) add_route( JsonDataView.as_view(self, "actor.json", self._actor, needs_request=True), - r"/-/actor(?P<format>(\.json)?)$", + r"/-/actor(\.(?P<format>json))?$", ) add_route( AuthTokenView.as_view(self), @@ -1080,20 +1077,17 @@ class Datasette: PatternPortfolioView.as_view(self), r"/-/patterns$", ) - add_route(DatabaseDownload.as_view(self), r"/(?P<database>[^/]+?)\.db$") + add_route(DatabaseDownload.as_view(self), r"/(?P<database>[^\/\.]+)\.db$") add_route( - DatabaseView.as_view(self), - r"/(?P<database>[^/]+?)(?P<format>" + renderer_regex + r"|.jsono|\.csv)?$", + DatabaseView.as_view(self), r"/(?P<database>[^\/\.]+)(\.(?P<format>\w+))?$" ) add_route( TableView.as_view(self), - r"/(?P<database>[^/]+)/(?P<table>[^\/\.]+)(\.[a-zA-Z0-9_]+)?$", + r"/(?P<database>[^\/\.]+)/(?P<table>[^\/\.]+)(\.(?P<format>\w+))?$", ) add_route( RowView.as_view(self), - r"/(?P<database>[^/]+)/(?P<table>[^/]+?)/(?P<pks>[^/]+?)(?P<format>" - + renderer_regex - + r")?$", + r"/(?P<database>[^\/\.]+)/(?P<table>[^/]+?)/(?P<pks>[^/]+?)(\.(?P<format>\w+))?$", ) return [ # Compile any strings to regular expressions diff --git a/tests/test_api.py b/tests/test_api.py index 421bb1fe..253c1718 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -629,8 +629,8 @@ def test_old_memory_urls_redirect(app_client_no_files, path, expected_redirect): def test_database_page_for_database_with_dot_in_name(app_client_with_dot): - response = app_client_with_dot.get("/fixtures.dot.json") - assert 200 == response.status + response = app_client_with_dot.get("/fixtures~2Edot.json") + assert response.status == 200 def test_custom_sql(app_client): diff --git a/tests/test_routes.py b/tests/test_routes.py index 349ac302..1fa55018 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -12,14 +12,26 @@ def routes(): @pytest.mark.parametrize( "path,expected_class,expected_matches", ( - ("/", "IndexView", {"format": ""}), + ("/", "IndexView", {"format": None}), ("/foo", "DatabaseView", {"format": None, "database": "foo"}), - ("/foo.csv", "DatabaseView", {"format": ".csv", "database": "foo"}), - ("/foo.json", "DatabaseView", {"format": ".json", "database": "foo"}), - ("/foo.humbug", "DatabaseView", {"format": None, "database": "foo.humbug"}), - ("/foo/humbug", "TableView", {"database": "foo", "table": "humbug"}), - ("/foo/humbug.json", "TableView", {"database": "foo", "table": "humbug"}), - ("/foo/humbug.blah", "TableView", {"database": "foo", "table": "humbug"}), + ("/foo.csv", "DatabaseView", {"format": "csv", "database": "foo"}), + ("/foo.json", "DatabaseView", {"format": "json", "database": "foo"}), + ("/foo.humbug", "DatabaseView", {"format": "humbug", "database": "foo"}), + ( + "/foo/humbug", + "TableView", + {"database": "foo", "table": "humbug", "format": None}, + ), + ( + "/foo/humbug.json", + "TableView", + {"database": "foo", "table": "humbug", "format": "json"}, + ), + ( + "/foo/humbug.blah", + "TableView", + {"database": "foo", "table": "humbug", "format": "blah"}, + ), ( "/foo/humbug/1", "RowView", @@ -28,10 +40,10 @@ def routes(): ( "/foo/humbug/1.json", "RowView", - {"format": ".json", "database": "foo", "pks": "1", "table": "humbug"}, + {"format": "json", "database": "foo", "pks": "1", "table": "humbug"}, ), - ("/-/metadata.json", "JsonDataView", {"format": ".json"}), - ("/-/metadata", "JsonDataView", {"format": ""}), + ("/-/metadata.json", "JsonDataView", {"format": "json"}), + ("/-/metadata", "JsonDataView", {"format": None}), ), ) def test_routes(routes, path, expected_class, expected_matches): From 798f075ef9b98819fdb564f9f79c78975a0f71e8 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 19 Mar 2022 13:32:29 -0700 Subject: [PATCH 0170/1103] Read format from route captures, closes #1667 Refs #1660 --- datasette/utils/__init__.py | 20 -------------------- datasette/views/base.py | 12 +----------- tests/test_utils.py | 25 ------------------------- 3 files changed, 1 insertion(+), 56 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index ccdf8ad4..c89b9d23 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -731,26 +731,6 @@ def module_from_path(path, name): return mod -async def resolve_table_and_format( - table_and_format, table_exists, allowed_formats=None -): - if allowed_formats is None: - allowed_formats = [] - if "." in table_and_format: - # Check if a table exists with this exact name - it_exists = await table_exists(table_and_format) - if it_exists: - return table_and_format, None - - # Check if table ends with a known format - formats = list(allowed_formats) + ["csv", "jsono"] - for _format in formats: - if table_and_format.endswith(f".{_format}"): - table = table_and_format[: -(len(_format) + 1)] - return table, _format - return table_and_format, None - - def path_with_format( *, request=None, path=None, format=None, extra_qs=None, replace_format=None ): diff --git a/datasette/views/base.py b/datasette/views/base.py index 0bbf98bb..24e97d95 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -19,12 +19,10 @@ from datasette.utils import ( LimitedWriter, call_with_supported_arguments, tilde_decode, - tilde_encode, path_from_row_pks, path_with_added_args, path_with_removed_args, path_with_format, - resolve_table_and_format, sqlite3, HASH_LENGTH, ) @@ -372,18 +370,10 @@ class DataView(BaseView): return AsgiStream(stream_fn, headers=headers, content_type=content_type) - def get_format(self, request): - # Format is the bit from the path following the ., if one exists - last_path_component = request.path.split("/")[-1] - if "." in last_path_component: - return last_path_component.split(".")[-1] - else: - return None - async def get(self, request): db_name = request.url_vars["database"] database = tilde_decode(db_name) - _format = self.get_format(request) + _format = request.url_vars["format"] data_kwargs = {} if _format == "csv": diff --git a/tests/test_utils.py b/tests/test_utils.py index 790aadc7..7b41a87f 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -351,31 +351,6 @@ def test_compound_keys_after_sql(): ) -async def table_exists(table): - return table == "exists.csv" - - -@pytest.mark.asyncio -@pytest.mark.parametrize( - "table_and_format,expected_table,expected_format", - [ - ("blah", "blah", None), - ("blah.csv", "blah", "csv"), - ("blah.json", "blah", "json"), - ("blah.baz", "blah.baz", None), - ("exists.csv", "exists.csv", None), - ], -) -async def test_resolve_table_and_format( - table_and_format, expected_table, expected_format -): - actual_table, actual_format = await utils.resolve_table_and_format( - table_and_format, table_exists, ["json"] - ) - assert expected_table == actual_table - assert expected_format == actual_format - - def test_table_columns(): conn = sqlite3.connect(":memory:") conn.executescript( From 7a6654a253dee243518dc542ce4c06dbb0d0801d Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 19 Mar 2022 17:11:17 -0700 Subject: [PATCH 0171/1103] Databases can now have a .route separate from their .name, refs #1668 --- datasette/app.py | 13 ++++++-- datasette/database.py | 1 + datasette/views/base.py | 12 +++++-- datasette/views/database.py | 18 ++++++----- datasette/views/table.py | 29 ++++++++++++----- docs/internals.rst | 11 ++++--- tests/test_internals_datasette.py | 1 + tests/test_routes.py | 52 ++++++++++++++++++++++++++++++- 8 files changed, 111 insertions(+), 26 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index edef34e9..5c8101a3 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -388,13 +388,18 @@ class Datasette: def unsign(self, signed, namespace="default"): return URLSafeSerializer(self._secret, namespace).loads(signed) - def get_database(self, name=None): + def get_database(self, name=None, route=None): + if route is not None: + matches = [db for db in self.databases.values() if db.route == route] + if not matches: + raise KeyError + return matches[0] if name is None: - # Return first no-_schemas database + # Return first database that isn't "_internal" name = [key for key in self.databases.keys() if key != "_internal"][0] return self.databases[name] - def add_database(self, db, name=None): + def add_database(self, db, name=None, route=None): new_databases = self.databases.copy() if name is None: # Pick a unique name for this database @@ -407,6 +412,7 @@ class Datasette: name = "{}_{}".format(suggestion, i) i += 1 db.name = name + db.route = route or name new_databases[name] = db # don't mutate! that causes race conditions with live import self.databases = new_databases @@ -693,6 +699,7 @@ class Datasette: return [ { "name": d.name, + "route": d.route, "path": d.path, "size": d.size, "is_mutable": d.is_mutable, diff --git a/datasette/database.py b/datasette/database.py index 6ce87215..ba594a8c 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -31,6 +31,7 @@ class Database: self, ds, path=None, is_mutable=False, is_memory=False, memory_name=None ): self.name = None + self.route = None self.ds = ds self.path = path self.is_mutable = is_mutable diff --git a/datasette/views/base.py b/datasette/views/base.py index 24e97d95..afa9eaa6 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -371,13 +371,19 @@ class DataView(BaseView): return AsgiStream(stream_fn, headers=headers, content_type=content_type) async def get(self, request): - db_name = request.url_vars["database"] - database = tilde_decode(db_name) + database_route = tilde_decode(request.url_vars["database"]) + + try: + db = self.ds.get_database(route=database_route) + except KeyError: + raise NotFound("Database not found: {}".format(database_route)) + database = db.name + _format = request.url_vars["format"] data_kwargs = {} if _format == "csv": - return await self.as_csv(request, database) + return await self.as_csv(request, database_route) if _format is None: # HTML views default to expanding all foreign key labels diff --git a/datasette/views/database.py b/datasette/views/database.py index 93bd1011..2563c5b2 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -32,7 +32,13 @@ class DatabaseView(DataView): name = "database" async def data(self, request, default_labels=False, _size=None): - database = tilde_decode(request.url_vars["database"]) + database_route = tilde_decode(request.url_vars["database"]) + try: + db = self.ds.get_database(route=database_route) + except KeyError: + raise NotFound("Database not found: {}".format(database_route)) + database = db.name + await self.check_permissions( request, [ @@ -50,11 +56,6 @@ class DatabaseView(DataView): request, sql, _size=_size, metadata=metadata ) - try: - db = self.ds.databases[database] - except KeyError: - raise NotFound("Database not found: {}".format(database)) - table_counts = await db.table_counts(5) hidden_table_names = set(await db.hidden_table_names()) all_foreign_keys = await db.get_all_foreign_keys() @@ -171,9 +172,10 @@ class DatabaseDownload(DataView): "view-instance", ], ) - if database not in self.ds.databases: + try: + db = self.ds.get_database(route=database) + except KeyError: raise DatasetteError("Invalid database", status=404) - db = self.ds.databases[database] if db.is_memory: raise DatasetteError("Cannot download in-memory databases", status=404) if not self.ds.setting("allow_download") or db.is_mutable: diff --git a/datasette/views/table.py b/datasette/views/table.py index ea4f24b7..7fa1da3a 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -272,10 +272,15 @@ class TableView(RowTableShared): name = "table" async def post(self, request): - db_name = tilde_decode(request.url_vars["database"]) + database_route = tilde_decode(request.url_vars["database"]) + try: + db = self.ds.get_database(route=database_route) + except KeyError: + raise NotFound("Database not found: {}".format(database_route)) + database = db.name table = tilde_decode(request.url_vars["table"]) # Handle POST to a canned query - canned_query = await self.ds.get_canned_query(db_name, table, request.actor) + canned_query = await self.ds.get_canned_query(database, table, request.actor) assert canned_query, "You may only POST to a canned query" return await QueryView(self.ds).data( request, @@ -327,12 +332,13 @@ class TableView(RowTableShared): _next=None, _size=None, ): - database = tilde_decode(request.url_vars["database"]) + database_route = tilde_decode(request.url_vars["database"]) table = tilde_decode(request.url_vars["table"]) try: - db = self.ds.databases[database] + db = self.ds.get_database(route=database_route) except KeyError: - raise NotFound("Database not found: {}".format(database)) + raise NotFound("Database not found: {}".format(database_route)) + database = db.name # If this is a canned query, not a table, then dispatch to QueryView instead canned_query = await self.ds.get_canned_query(database, table, request.actor) @@ -938,8 +944,13 @@ class RowView(RowTableShared): name = "row" async def data(self, request, default_labels=False): - database = tilde_decode(request.url_vars["database"]) + database_route = tilde_decode(request.url_vars["database"]) table = tilde_decode(request.url_vars["table"]) + try: + db = self.ds.get_database(route=database_route) + except KeyError: + raise NotFound("Database not found: {}".format(database_route)) + database = db.name await self.check_permissions( request, [ @@ -949,7 +960,11 @@ class RowView(RowTableShared): ], ) pk_values = urlsafe_components(request.url_vars["pks"]) - db = self.ds.databases[database] + try: + db = self.ds.get_database(route=database_route) + except KeyError: + raise NotFound("Database not found: {}".format(database_route)) + database = db.name sql, params, pks = await _sql_params_pks(db, table, pk_values) results = await db.execute(sql, params, truncate=True) columns = [r[0] for r in results.description] diff --git a/docs/internals.rst b/docs/internals.rst index 117cb95c..323256c7 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -307,14 +307,17 @@ Returns the specified database object. Raises a ``KeyError`` if the database doe .. _datasette_add_database: -.add_database(db, name=None) ----------------------------- +.add_database(db, name=None, route=None) +---------------------------------------- ``db`` - datasette.database.Database instance The database to be attached. ``name`` - string, optional - The name to be used for this database - this will be used in the URL path, e.g. ``/dbname``. If not specified Datasette will pick one based on the filename or memory name. + The name to be used for this database . If not specified Datasette will pick one based on the filename or memory name. + +``route`` - string, optional + This will be used in the URL path. If not specified, it will default to the same thing as the ``name``. The ``datasette.add_database(db)`` method lets you add a new database to the current Datasette instance. @@ -371,7 +374,7 @@ Using either of these pattern will result in the in-memory database being served ``name`` - string The name of the database to be removed. -This removes a database that has been previously added. ``name=`` is the unique name of that database, used in its URL path. +This removes a database that has been previously added. ``name=`` is the unique name of that database. .. _datasette_sign: diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index adf84be9..cc200a2d 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -55,6 +55,7 @@ async def test_datasette_constructor(): assert databases == [ { "name": "_memory", + "route": "_memory", "path": None, "size": 0, "is_mutable": False, diff --git a/tests/test_routes.py b/tests/test_routes.py index 1fa55018..dd3bc644 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -1,6 +1,7 @@ -from datasette.app import Datasette +from datasette.app import Datasette, Database from datasette.utils import resolve_routes import pytest +import pytest_asyncio @pytest.fixture(scope="session") @@ -53,3 +54,52 @@ def test_routes(routes, path, expected_class, expected_matches): else: assert view.view_class.__name__ == expected_class assert match.groupdict() == expected_matches + + +@pytest_asyncio.fixture +async def ds_with_route(): + ds = Datasette() + ds.remove_database("_memory") + db = Database(ds, is_memory=True, memory_name="route-name-db") + ds.add_database(db, name="name", route="route-name") + await db.execute_write_script( + """ + create table if not exists t (id integer primary key); + insert or replace into t (id) values (1); + """ + ) + return ds + + +@pytest.mark.asyncio +async def test_db_with_route_databases(ds_with_route): + response = await ds_with_route.client.get("/-/databases.json") + assert response.json()[0] == { + "name": "name", + "route": "route-name", + "path": None, + "size": 0, + "is_mutable": True, + "is_memory": True, + "hash": None, + } + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "path,expected_status", + ( + ("/", 200), + ("/name", 404), + ("/name/t", 404), + ("/name/t/1", 404), + ("/route-name", 200), + ("/route-name/t", 200), + ("/route-name/t/1", 200), + ), +) +async def test_db_with_route_that_does_not_match_name( + ds_with_route, path, expected_status +): + response = await ds_with_route.client.get(path) + assert response.status_code == expected_status From e10da9af3595c0a4e09c6f370103571aa4ea106e Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 19 Mar 2022 17:21:56 -0700 Subject: [PATCH 0172/1103] alternative-route demo, refs #1668 --- .github/workflows/deploy-latest.yml | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 1ae96e89..92aa1c6b 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -42,6 +42,17 @@ jobs: sphinx-build -b xml . _build sphinx-to-sqlite ../docs.db _build cd .. + - name: Set up the alternate-route demo + run: | + echo ' + from datasette import hookimpl + + @hookimpl + def startup(datasette): + db = datasette.get_database("fixtures2") + db.route = "alternative-route" + ' > plugins/alternative_route.py + cp fixtures.db fixtures2.db - name: Set up Cloud Run uses: google-github-actions/setup-gcloud@master with: @@ -54,7 +65,7 @@ jobs: gcloud config set project datasette-222320 export SUFFIX="-${GITHUB_REF#refs/heads/}" export SUFFIX=${SUFFIX#-main} - datasette publish cloudrun fixtures.db extra_database.db \ + datasette publish cloudrun fixtures.db fixtures2.db extra_database.db \ -m fixtures.json \ --plugins-dir=plugins \ --branch=$GITHUB_SHA \ From cdbae2b93f441653616dd889644c63e4150ceec1 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 19 Mar 2022 17:31:23 -0700 Subject: [PATCH 0173/1103] Fixed internal links to respect db.route, refs #1668 --- datasette/url_builder.py | 3 ++- datasette/views/table.py | 5 ++--- tests/test_routes.py | 22 +++++++++++++--------- 3 files changed, 17 insertions(+), 13 deletions(-) diff --git a/datasette/url_builder.py b/datasette/url_builder.py index 498ec85d..574bf3c1 100644 --- a/datasette/url_builder.py +++ b/datasette/url_builder.py @@ -28,7 +28,8 @@ class Urls: return self.path("-/logout") def database(self, database, format=None): - return self.path(tilde_encode(database), format=format) + db = self.ds.get_database(database) + return self.path(tilde_encode(db.route), format=format) def table(self, database, table, format=None): path = f"{self.database(database)}/{tilde_encode(table)}" diff --git a/datasette/views/table.py b/datasette/views/table.py index 7fa1da3a..8745c28a 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -141,10 +141,9 @@ class RowTableShared(DataView): "is_special_link_column": is_special_link_column, "raw": pk_path, "value": markupsafe.Markup( - '<a href="{base_url}{database}/{table}/{flat_pks_quoted}">{flat_pks}</a>'.format( + '<a href="{table_path}/{flat_pks_quoted}">{flat_pks}</a>'.format( base_url=base_url, - database=database, - table=tilde_encode(table), + table_path=self.ds.urls.table(database, table), flat_pks=str(markupsafe.escape(pk_path)), flat_pks_quoted=path_from_row_pks(row, pks, not pks), ) diff --git a/tests/test_routes.py b/tests/test_routes.py index dd3bc644..211b77b5 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -61,7 +61,7 @@ async def ds_with_route(): ds = Datasette() ds.remove_database("_memory") db = Database(ds, is_memory=True, memory_name="route-name-db") - ds.add_database(db, name="name", route="route-name") + ds.add_database(db, name="original-name", route="custom-route-name") await db.execute_write_script( """ create table if not exists t (id integer primary key); @@ -75,8 +75,8 @@ async def ds_with_route(): async def test_db_with_route_databases(ds_with_route): response = await ds_with_route.client.get("/-/databases.json") assert response.json()[0] == { - "name": "name", - "route": "route-name", + "name": "original-name", + "route": "custom-route-name", "path": None, "size": 0, "is_mutable": True, @@ -90,12 +90,12 @@ async def test_db_with_route_databases(ds_with_route): "path,expected_status", ( ("/", 200), - ("/name", 404), - ("/name/t", 404), - ("/name/t/1", 404), - ("/route-name", 200), - ("/route-name/t", 200), - ("/route-name/t/1", 200), + ("/original-name", 404), + ("/original-name/t", 404), + ("/original-name/t/1", 404), + ("/custom-route-name", 200), + ("/custom-route-name/t", 200), + ("/custom-route-name/t/1", 200), ), ) async def test_db_with_route_that_does_not_match_name( @@ -103,3 +103,7 @@ async def test_db_with_route_that_does_not_match_name( ): response = await ds_with_route.client.get(path) assert response.status_code == expected_status + # There should be links to custom-route-name but none to original-name + if response.status_code == 200: + assert "/custom-route-name" in response.text + assert "/original-name" not in response.text From 5471e3c4914837de957e206d8fb80c9ec383bc2e Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 19 Mar 2022 18:14:40 -0700 Subject: [PATCH 0174/1103] Release 0.61a0 Refs #957, #1533, #1545, #1576, #1577, #1587, #1601, #1603, #1607, #1612, #1621, #1649, #1654, #1657, #1661, #1668 --- datasette/version.py | 2 +- docs/changelog.rst | 29 +++++++++++++++++++++++++++-- docs/performance.rst | 2 ++ 3 files changed, 30 insertions(+), 3 deletions(-) diff --git a/datasette/version.py b/datasette/version.py index 91224615..ccc1e04b 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.60.2" +__version__ = "0.61a0" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index c58c8444..0f3d3aff 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,14 +4,39 @@ Changelog ========= -.. _v0_60.2: +.. _v0_61_a0: + +0.61a0 (2022-03-19) +------------------- + +- Removed hashed URL mode from Datasette. The new ``datasette-hashed-urls`` plugin can be used to achieve the same result, see :ref:`performance_hashed_urls` for details. (:issue:`1661`) +- Databases can now have a custom path within the Datasette instance that is indpendent of the database name, using the ``db.route`` property. (:issue:`1668`) +- URLs within Datasette now use a different encoding scheme for tables or databases that include "special" characters outside of the range of ``a-zA-Z0-9_-``. This scheme is explained here: :ref:`internals_tilde_encoding`. (:issue:`1657`) +- Table and row HTML pages now include a ``<link rel="alternate" type="application/json+datasette" href="...">`` element and return a ``Link: URL; rel="alternate"; type="application/json+datasette"`` HTTP header pointing to the JSON version of those pages. (:issue:`1533`) +- ``Access-Control-Expose-Headers: Link`` is now added to the CORS headers, allowing remote JavaScript to access that header. +- Canned queries are now shown at the top of the database page, directly below the SQL editor. Previously they were shown at the bottom, below the list of tables. (:issue:`1612`) +- Datasette now has a default favicon. (:issue:`1603`) +- ``sqlite_stat`` tables are now hidden by default. (:issue:`1587`) +- SpatiaLite tables ``data_licenses``, ``KNN`` and ``KNN2`` are now hidden by default. (:issue:`1601`) +- Python 3.6 is no longer supported. (:issue:`1577`) +- Tests now run against Python 3.11-dev. (:issue:`1621`) +- Fixed bug where :ref:`custom pages <custom_pages>` did not work on Windows. Thanks, Robert Christie. (:issue:`1545`) +- SQL query tracing mechanism now works for queries executed in ``asyncio`` sub-tasks, such as those created by ``asyncio.gather()``. (:issue:`1576`) +- :ref:`internals_tracer` mechanism is now documented. +- Common Datasette symbols can now be imported directly from the top-level ``datasette`` package, see :ref:`internals_shortcuts`. Those symbols are ``Response``, ``Forbidden``, ``NotFound``, ``hookimpl``, ``actor_matches_allow``. (:issue:`957`) +- ``/-/versions`` page now returns additional details for libraries used by SpatiaLite. (:issue:`1607`) +- Documentation now links to the `Datasette Tutorials <https://datasette.io/tutorials>`__. +- Datasette will now also look for SpatiaLite in ``/opt/homebrew`` - thanks, Dan Peterson. (`#1649 <https://github.com/simonw/datasette/pull/1649>`__) +- Datasette is now covered by a `Code of Conduct <https://github.com/simonw/datasette/blob/main/CODE_OF_CONDUCT.md>`__. (:issue:`1654`) + +.. _v0_60_2: 0.60.2 (2022-02-07) ------------------- - Fixed a bug where Datasette would open the same file twice with two different database names if you ran ``datasette file.db file.db``. (:issue:`1632`) -.. _v0_60.1: +.. _v0_60_1: 0.60.1 (2022-01-20) ------------------- diff --git a/docs/performance.rst b/docs/performance.rst index d37f1804..89bbf5ae 100644 --- a/docs/performance.rst +++ b/docs/performance.rst @@ -60,6 +60,8 @@ The :ref:`setting_default_cache_ttl` setting sets the default HTTP cache TTL for You can also change the cache timeout on a per-request basis using the ``?_ttl=10`` query string parameter. This can be useful when you are working with the Datasette JSON API - you may decide that a specific query can be cached for a longer time, or maybe you need to set ``?_ttl=0`` for some requests for example if you are running a SQL ``order by random()`` query. +.. _performance_hashed_urls: + datasette-hashed-urls --------------------- From cb4854a435cc1418665edec2a73664ad74a32017 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 19 Mar 2022 18:17:58 -0700 Subject: [PATCH 0175/1103] Fixed typo --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 0f3d3aff..9f5a143c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -10,7 +10,7 @@ Changelog ------------------- - Removed hashed URL mode from Datasette. The new ``datasette-hashed-urls`` plugin can be used to achieve the same result, see :ref:`performance_hashed_urls` for details. (:issue:`1661`) -- Databases can now have a custom path within the Datasette instance that is indpendent of the database name, using the ``db.route`` property. (:issue:`1668`) +- Databases can now have a custom path within the Datasette instance that is independent of the database name, using the ``db.route`` property. (:issue:`1668`) - URLs within Datasette now use a different encoding scheme for tables or databases that include "special" characters outside of the range of ``a-zA-Z0-9_-``. This scheme is explained here: :ref:`internals_tilde_encoding`. (:issue:`1657`) - Table and row HTML pages now include a ``<link rel="alternate" type="application/json+datasette" href="...">`` element and return a ``Link: URL; rel="alternate"; type="application/json+datasette"`` HTTP header pointing to the JSON version of those pages. (:issue:`1533`) - ``Access-Control-Expose-Headers: Link`` is now added to the CORS headers, allowing remote JavaScript to access that header. From 4a4164b81191dec35e423486a208b05a9edc65e4 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 19 Mar 2022 18:23:03 -0700 Subject: [PATCH 0176/1103] Added another note to the 0.61a0 release notes, refs #1228 --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 9f5a143c..05ad85f2 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -28,6 +28,7 @@ Changelog - Documentation now links to the `Datasette Tutorials <https://datasette.io/tutorials>`__. - Datasette will now also look for SpatiaLite in ``/opt/homebrew`` - thanks, Dan Peterson. (`#1649 <https://github.com/simonw/datasette/pull/1649>`__) - Datasette is now covered by a `Code of Conduct <https://github.com/simonw/datasette/blob/main/CODE_OF_CONDUCT.md>`__. (:issue:`1654`) +- Fixed error caused when a table had a column named ``n``. (:issue:`1228`) .. _v0_60_2: From e627510b760198ccedba9e5af47a771e847785c9 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 21 Mar 2022 10:13:16 -0700 Subject: [PATCH 0177/1103] BaseView.check_permissions is now datasette.ensure_permissions, closes #1675 Refs #1660 --- datasette/app.py | 35 +++++++++++++++++++++++++++++++++++ datasette/views/base.py | 26 -------------------------- datasette/views/database.py | 12 ++++++------ datasette/views/table.py | 8 ++++---- docs/internals.rst | 26 ++++++++++++++++++++++++++ 5 files changed, 71 insertions(+), 36 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 5c8101a3..9e509e96 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1,4 +1,5 @@ import asyncio +from typing import Sequence, Union, Tuple import asgi_csrf import collections import datetime @@ -628,6 +629,40 @@ class Datasette: ) return result + async def ensure_permissions( + self, + actor: dict, + permissions: Sequence[Union[Tuple[str, Union[str, Tuple[str, str]]], str]], + ): + """ + permissions is a list of (action, resource) tuples or 'action' strings + + Raises datasette.Forbidden() if any of the checks fail + """ + for permission in permissions: + if isinstance(permission, str): + action = permission + resource = None + elif isinstance(permission, (tuple, list)) and len(permission) == 2: + action, resource = permission + else: + assert ( + False + ), "permission should be string or tuple of two items: {}".format( + repr(permission) + ) + ok = await self.permission_allowed( + actor, + action, + resource=resource, + default=None, + ) + if ok is not None: + if ok: + return + else: + raise Forbidden(action) + async def execute( self, db_name, diff --git a/datasette/views/base.py b/datasette/views/base.py index afa9eaa6..d1e684a2 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -76,32 +76,6 @@ class BaseView: if not ok: raise Forbidden(action) - async def check_permissions(self, request, permissions): - """permissions is a list of (action, resource) tuples or 'action' strings""" - for permission in permissions: - if isinstance(permission, str): - action = permission - resource = None - elif isinstance(permission, (tuple, list)) and len(permission) == 2: - action, resource = permission - else: - assert ( - False - ), "permission should be string or tuple of two items: {}".format( - repr(permission) - ) - ok = await self.ds.permission_allowed( - request.actor, - action, - resource=resource, - default=None, - ) - if ok is not None: - if ok: - return - else: - raise Forbidden(action) - def database_color(self, database): return "ff0000" diff --git a/datasette/views/database.py b/datasette/views/database.py index 2563c5b2..69ed1233 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -39,8 +39,8 @@ class DatabaseView(DataView): raise NotFound("Database not found: {}".format(database_route)) database = db.name - await self.check_permissions( - request, + await self.ds.ensure_permissions( + request.actor, [ ("view-database", database), "view-instance", @@ -164,8 +164,8 @@ class DatabaseDownload(DataView): async def get(self, request): database = tilde_decode(request.url_vars["database"]) - await self.check_permissions( - request, + await self.ds.ensure_permissions( + request.actor, [ ("view-database-download", database), ("view-database", database), @@ -217,8 +217,8 @@ class QueryView(DataView): private = False if canned_query: # Respect canned query permissions - await self.check_permissions( - request, + await self.ds.ensure_permissions( + request.actor, [ ("view-query", (database, canned_query)), ("view-database", database), diff --git a/datasette/views/table.py b/datasette/views/table.py index 8745c28a..84169820 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -360,8 +360,8 @@ class TableView(RowTableShared): raise NotFound(f"Table not found: {table}") # Ensure user has permission to view this table - await self.check_permissions( - request, + await self.ds.ensure_permissions( + request.actor, [ ("view-table", (database, table)), ("view-database", database), @@ -950,8 +950,8 @@ class RowView(RowTableShared): except KeyError: raise NotFound("Database not found: {}".format(database_route)) database = db.name - await self.check_permissions( - request, + await self.ds.ensure_permissions( + request.actor, [ ("view-table", (database, table)), ("view-database", database), diff --git a/docs/internals.rst b/docs/internals.rst index 323256c7..12adde00 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -295,6 +295,32 @@ If neither ``metadata.json`` nor any of the plugins provide an answer to the per See :ref:`permissions` for a full list of permission actions included in Datasette core. +.. _datasette_permission_allowed: + +await .ensure_permissions(actor, permissions) +--------------------------------------------- + +``actor`` - dictionary + The authenticated actor. This is usually ``request.actor``. + +``permissions`` - list + A list of permissions to check. Each permission in that list can be a string ``action`` name or a 2-tuple of ``(action, resource)``. + +This method allows multiple permissions to be checked at onced. It raises a ``datasette.Forbidden`` exception if any of the checks are denied before one of them is explicitly granted. + +This is useful when you need to check multiple permissions at once. For example, an actor should be able to view a table if either one of the following checks returns ``True`` or not a single one of them returns ``False``: + +.. code-block:: python + + await self.ds.ensure_permissions( + request.actor, + [ + ("view-table", (database, table)), + ("view-database", database), + "view-instance", + ] + ) + .. _datasette_get_database: .get_database(name) From dfafce6d962d615d98a7080e546c7b3662ae7d34 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 21 Mar 2022 11:37:27 -0700 Subject: [PATCH 0178/1103] Display no-opinion permission checks on /-/permissions --- datasette/templates/permissions_debug.html | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/datasette/templates/permissions_debug.html b/datasette/templates/permissions_debug.html index d898ea8c..db709c14 100644 --- a/datasette/templates/permissions_debug.html +++ b/datasette/templates/permissions_debug.html @@ -10,6 +10,9 @@ .check-result-false { color: red; } +.check-result-no-opinion { + color: #aaa; +} .check h2 { font-size: 1em } @@ -38,6 +41,8 @@ <span class="check-when">{{ check.when }}</span> {% if check.result %} <span class="check-result check-result-true">✓</span> + {% elif check.result is none %} + <span class="check-result check-result-no-opinion">none</span> {% else %} <span class="check-result check-result-false">✗</span> {% endif %} From 194e4f6c3fffde69eb196f8535ca45386b40ec2d Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 21 Mar 2022 11:41:56 -0700 Subject: [PATCH 0179/1103] Removed check_permission() from BaseView, closes #1677 Refs #1660 --- datasette/app.py | 1 + datasette/views/base.py | 10 ---------- datasette/views/database.py | 2 +- datasette/views/index.py | 2 +- datasette/views/special.py | 10 +++++----- tests/test_permissions.py | 13 ++++++++----- 6 files changed, 16 insertions(+), 22 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 9e509e96..22ae211f 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -639,6 +639,7 @@ class Datasette: Raises datasette.Forbidden() if any of the checks fail """ + assert actor is None or isinstance(actor, dict) for permission in permissions: if isinstance(permission, str): action = permission diff --git a/datasette/views/base.py b/datasette/views/base.py index d1e684a2..221e1882 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -66,16 +66,6 @@ class BaseView: response.body = b"" return response - async def check_permission(self, request, action, resource=None): - ok = await self.ds.permission_allowed( - request.actor, - action, - resource=resource, - default=True, - ) - if not ok: - raise Forbidden(action) - def database_color(self, database): return "ff0000" diff --git a/datasette/views/database.py b/datasette/views/database.py index 69ed1233..31a1839f 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -229,7 +229,7 @@ class QueryView(DataView): None, "view-query", (database, canned_query), default=True ) else: - await self.check_permission(request, "execute-sql", database) + await self.ds.ensure_permissions(request.actor, [("execute-sql", database)]) # Extract any :named parameters named_parameters = named_parameters or await derive_named_parameters( diff --git a/datasette/views/index.py b/datasette/views/index.py index f5e31181..1c391e26 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -20,7 +20,7 @@ class IndexView(BaseView): async def get(self, request): as_format = request.url_vars["format"] - await self.check_permission(request, "view-instance") + await self.ds.ensure_permissions(request.actor, ["view-instance"]) databases = [] for name, db in self.ds.databases.items(): visible, database_private = await check_visibility( diff --git a/datasette/views/special.py b/datasette/views/special.py index 395ee587..dd834528 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -16,7 +16,7 @@ class JsonDataView(BaseView): async def get(self, request): as_format = request.url_vars["format"] - await self.check_permission(request, "view-instance") + await self.ds.ensure_permissions(request.actor, ["view-instance"]) if self.needs_request: data = self.data_callback(request) else: @@ -47,7 +47,7 @@ class PatternPortfolioView(BaseView): has_json_alternate = False async def get(self, request): - await self.check_permission(request, "view-instance") + await self.ds.ensure_permissions(request.actor, ["view-instance"]) return await self.render(["patterns.html"], request=request) @@ -95,7 +95,7 @@ class PermissionsDebugView(BaseView): has_json_alternate = False async def get(self, request): - await self.check_permission(request, "view-instance") + await self.ds.ensure_permissions(request.actor, ["view-instance"]) if not await self.ds.permission_allowed(request.actor, "permissions-debug"): raise Forbidden("Permission denied") return await self.render( @@ -146,11 +146,11 @@ class MessagesDebugView(BaseView): has_json_alternate = False async def get(self, request): - await self.check_permission(request, "view-instance") + await self.ds.ensure_permissions(request.actor, ["view-instance"]) return await self.render(["messages_debug.html"], request) async def post(self, request): - await self.check_permission(request, "view-instance") + await self.ds.ensure_permissions(request.actor, ["view-instance"]) post = await request.post_vars() message = post.get("message", "") message_type = post.get("message_type") or "INFO" diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 788523b0..f4169dbe 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -321,17 +321,20 @@ def test_permissions_debug(app_client): checks = [ { "action": div.select_one(".check-action").text, - "result": bool(div.select(".check-result-true")), + # True = green tick, False = red cross, None = gray None + "result": None + if div.select(".check-result-no-opinion") + else bool(div.select(".check-result-true")), "used_default": bool(div.select(".check-used-default")), } for div in check_divs ] - assert [ + assert checks == [ {"action": "permissions-debug", "result": True, "used_default": False}, - {"action": "view-instance", "result": True, "used_default": True}, + {"action": "view-instance", "result": None, "used_default": True}, {"action": "permissions-debug", "result": False, "used_default": True}, - {"action": "view-instance", "result": True, "used_default": True}, - ] == checks + {"action": "view-instance", "result": None, "used_default": True}, + ] @pytest.mark.parametrize( From 1a7750eb29fd15dd2eea3b9f6e33028ce441b143 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 21 Mar 2022 12:01:37 -0700 Subject: [PATCH 0180/1103] Documented datasette.check_visibility() method, closes #1678 --- datasette/app.py | 18 ++++++++++++++++++ datasette/utils/__init__.py | 19 ------------------- datasette/views/database.py | 10 +++------- datasette/views/index.py | 11 ++++------- docs/internals.rst | 28 +++++++++++++++++++++++++++- 5 files changed, 52 insertions(+), 34 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 22ae211f..c9eede26 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -664,6 +664,24 @@ class Datasette: else: raise Forbidden(action) + async def check_visibility(self, actor, action, resource): + """Returns (visible, private) - visible = can you see it, private = can others see it too""" + visible = await self.permission_allowed( + actor, + action, + resource=resource, + default=True, + ) + if not visible: + return False, False + private = not await self.permission_allowed( + None, + action, + resource=resource, + default=True, + ) + return visible, private + async def execute( self, db_name, diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index c89b9d23..cd8e3d61 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1002,25 +1002,6 @@ def actor_matches_allow(actor, allow): return False -async def check_visibility(datasette, actor, action, resource, default=True): - """Returns (visible, private) - visible = can you see it, private = can others see it too""" - visible = await datasette.permission_allowed( - actor, - action, - resource=resource, - default=default, - ) - if not visible: - return False, False - private = not await datasette.permission_allowed( - None, - action, - resource=resource, - default=default, - ) - return visible, private - - def resolve_env_secrets(config, environ): """Create copy that recursively replaces {"$env": "NAME"} with values from environ""" if isinstance(config, dict): diff --git a/datasette/views/database.py b/datasette/views/database.py index 31a1839f..103bd575 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -10,7 +10,6 @@ import markupsafe from datasette.utils import ( add_cors_headers, await_me_maybe, - check_visibility, derive_named_parameters, tilde_decode, to_css_class, @@ -62,8 +61,7 @@ class DatabaseView(DataView): views = [] for view_name in await db.view_names(): - visible, private = await check_visibility( - self.ds, + visible, private = await self.ds.check_visibility( request.actor, "view-table", (database, view_name), @@ -78,8 +76,7 @@ class DatabaseView(DataView): tables = [] for table in table_counts: - visible, private = await check_visibility( - self.ds, + visible, private = await self.ds.check_visibility( request.actor, "view-table", (database, table), @@ -105,8 +102,7 @@ class DatabaseView(DataView): for query in ( await self.ds.get_canned_queries(database, request.actor) ).values(): - visible, private = await check_visibility( - self.ds, + visible, private = await self.ds.check_visibility( request.actor, "view-query", (database, query["name"]), diff --git a/datasette/views/index.py b/datasette/views/index.py index 1c391e26..aec78814 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -1,7 +1,7 @@ import hashlib import json -from datasette.utils import add_cors_headers, check_visibility, CustomJSONEncoder +from datasette.utils import add_cors_headers, CustomJSONEncoder from datasette.utils.asgi import Response from datasette.version import __version__ @@ -23,8 +23,7 @@ class IndexView(BaseView): await self.ds.ensure_permissions(request.actor, ["view-instance"]) databases = [] for name, db in self.ds.databases.items(): - visible, database_private = await check_visibility( - self.ds, + visible, database_private = await self.ds.check_visibility( request.actor, "view-database", name, @@ -36,8 +35,7 @@ class IndexView(BaseView): views = [] for view_name in await db.view_names(): - visible, private = await check_visibility( - self.ds, + visible, private = await self.ds.check_visibility( request.actor, "view-table", (name, view_name), @@ -55,8 +53,7 @@ class IndexView(BaseView): tables = {} for table in table_names: - visible, private = await check_visibility( - self.ds, + visible, private = await self.ds.check_visibility( request.actor, "view-table", (name, table), diff --git a/docs/internals.rst b/docs/internals.rst index 12adde00..f9a24fea 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -295,7 +295,7 @@ If neither ``metadata.json`` nor any of the plugins provide an answer to the per See :ref:`permissions` for a full list of permission actions included in Datasette core. -.. _datasette_permission_allowed: +.. _datasette_ensure_permissions: await .ensure_permissions(actor, permissions) --------------------------------------------- @@ -321,6 +321,32 @@ This is useful when you need to check multiple permissions at once. For example, ] ) +.. _datasette_check_visibilty: + +await .check_visibility(actor, action, resource=None) +----------------------------------------------------- + +``actor`` - dictionary + The authenticated actor. This is usually ``request.actor``. + +``action`` - string + The name of the action that is being permission checked. + +``resource`` - string or tuple, optional + The resource, e.g. the name of the database, or a tuple of two strings containing the name of the database and the name of the table. Only some permissions apply to a resource. + +This convenience method can be used to answer the question "should this item be considered private, in that it is visible to me but it is not visible to anonymous users?" + +It returns a tuple of two booleans, ``(visible, private)``. ``visible`` indicates if the actor can see this resource. ``private`` will be ``True`` if an anonymous user would not be able to view the resource. + +This example checks if the user can access a specific table, and sets ``private`` so that a padlock icon can later be displayed: + +.. code-block:: python + + visible, private = await self.ds.check_visibility( + request.actor, "view-table", (database, table) + ) + .. _datasette_get_database: .get_database(name) From 72bfd75fb7241893c931348e6aca712edc67ab04 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 21 Mar 2022 14:55:50 -0700 Subject: [PATCH 0181/1103] Drop n=1 threshold down to <= 20ms, closes #1679 --- datasette/utils/__init__.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index cd8e3d61..9109f823 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -182,15 +182,16 @@ class CustomJSONEncoder(json.JSONEncoder): def sqlite_timelimit(conn, ms): deadline = time.perf_counter() + (ms / 1000) # n is the number of SQLite virtual machine instructions that will be - # executed between each check. It's hard to know what to pick here. - # After some experimentation, I've decided to go with 1000 by default and - # 1 for time limits that are less than 50ms + # executed between each check. It takes about 0.08ms to execute 1000. + # https://github.com/simonw/datasette/issues/1679 n = 1000 - if ms < 50: + if ms <= 20: + # This mainly happens while executing our test suite n = 1 def handler(): if time.perf_counter() >= deadline: + # Returning 1 terminates the query with an error return 1 conn.set_progress_handler(handler, n) From 12f3ca79956ed9003c874f67748432adcacc6fd2 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 21 Mar 2022 18:42:03 -0700 Subject: [PATCH 0182/1103] google-github-actions/setup-gcloud@v0 --- .github/workflows/deploy-latest.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 92aa1c6b..a61f6629 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -14,7 +14,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" - uses: actions/cache@v2 name: Configure pip caching with: @@ -54,7 +54,7 @@ jobs: ' > plugins/alternative_route.py cp fixtures.db fixtures2.db - name: Set up Cloud Run - uses: google-github-actions/setup-gcloud@master + uses: google-github-actions/setup-gcloud@v0 with: version: '275.0.0' service_account_email: ${{ secrets.GCP_SA_EMAIL }} From c4c9dbd0386e46d2bf199f0ed34e4895c98cb78c Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 22 Mar 2022 09:49:26 -0700 Subject: [PATCH 0183/1103] google-github-actions/setup-gcloud@v0 --- .github/workflows/publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 3cfc67da..3e4f8146 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -85,7 +85,7 @@ jobs: sphinx-to-sqlite ../docs.db _build cd .. - name: Set up Cloud Run - uses: google-github-actions/setup-gcloud@master + uses: google-github-actions/setup-gcloud@v0 with: version: '275.0.0' service_account_email: ${{ secrets.GCP_SA_EMAIL }} From d7c793d7998388d915f8d270079c68a77a785051 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 23 Mar 2022 11:12:26 -0700 Subject: [PATCH 0184/1103] Release 0.61 Refs #957, #1228, #1533, #1545, #1576, #1577, #1587, #1601, #1603, #1607, #1612, #1621, #1649, #1654, #1657, #1661, #1668, #1675, #1678 --- datasette/version.py | 2 +- docs/changelog.rst | 22 ++++++++++++++-------- 2 files changed, 15 insertions(+), 9 deletions(-) diff --git a/datasette/version.py b/datasette/version.py index ccc1e04b..f9b10696 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.61a0" +__version__ = "0.61" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 05ad85f2..d2de8da1 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,30 +4,36 @@ Changelog ========= -.. _v0_61_a0: +.. _v0_61: -0.61a0 (2022-03-19) -------------------- +0.61 (2022-03-23) +----------------- +In preparation for Datasette 1.0, this release includes two potentially backwards-incompatible changes. Hashed URL mode has been moved to a separate plugin, and the way Datasette generates URLs to databases and tables with special characters in their name such as ``/`` and ``.`` has changed. + +Datasette also now requires Python 3.7 or higher. + +- URLs within Datasette now use a different encoding scheme for tables or databases that include "special" characters outside of the range of ``a-zA-Z0-9_-``. This scheme is explained here: :ref:`internals_tilde_encoding`. (:issue:`1657`) - Removed hashed URL mode from Datasette. The new ``datasette-hashed-urls`` plugin can be used to achieve the same result, see :ref:`performance_hashed_urls` for details. (:issue:`1661`) - Databases can now have a custom path within the Datasette instance that is independent of the database name, using the ``db.route`` property. (:issue:`1668`) -- URLs within Datasette now use a different encoding scheme for tables or databases that include "special" characters outside of the range of ``a-zA-Z0-9_-``. This scheme is explained here: :ref:`internals_tilde_encoding`. (:issue:`1657`) +- Datasette is now covered by a `Code of Conduct <https://github.com/simonw/datasette/blob/main/CODE_OF_CONDUCT.md>`__. (:issue:`1654`) +- Python 3.6 is no longer supported. (:issue:`1577`) +- Tests now run against Python 3.11-dev. (:issue:`1621`) +- New :ref:`datasette.ensure_permissions(actor, permissions) <datasette_ensure_permissions>` internal method for checking multiple permissions at once. (:issue:`1675`) +- New :ref:`datasette.check_visibility(actor, action, resource=None) <datasette_check_visibilty>` internal method for checking if a user can see a resource that would otherwise be invisible to unauthenticated users. (:issue:`1678`) - Table and row HTML pages now include a ``<link rel="alternate" type="application/json+datasette" href="...">`` element and return a ``Link: URL; rel="alternate"; type="application/json+datasette"`` HTTP header pointing to the JSON version of those pages. (:issue:`1533`) - ``Access-Control-Expose-Headers: Link`` is now added to the CORS headers, allowing remote JavaScript to access that header. - Canned queries are now shown at the top of the database page, directly below the SQL editor. Previously they were shown at the bottom, below the list of tables. (:issue:`1612`) - Datasette now has a default favicon. (:issue:`1603`) - ``sqlite_stat`` tables are now hidden by default. (:issue:`1587`) - SpatiaLite tables ``data_licenses``, ``KNN`` and ``KNN2`` are now hidden by default. (:issue:`1601`) -- Python 3.6 is no longer supported. (:issue:`1577`) -- Tests now run against Python 3.11-dev. (:issue:`1621`) -- Fixed bug where :ref:`custom pages <custom_pages>` did not work on Windows. Thanks, Robert Christie. (:issue:`1545`) - SQL query tracing mechanism now works for queries executed in ``asyncio`` sub-tasks, such as those created by ``asyncio.gather()``. (:issue:`1576`) - :ref:`internals_tracer` mechanism is now documented. - Common Datasette symbols can now be imported directly from the top-level ``datasette`` package, see :ref:`internals_shortcuts`. Those symbols are ``Response``, ``Forbidden``, ``NotFound``, ``hookimpl``, ``actor_matches_allow``. (:issue:`957`) - ``/-/versions`` page now returns additional details for libraries used by SpatiaLite. (:issue:`1607`) - Documentation now links to the `Datasette Tutorials <https://datasette.io/tutorials>`__. - Datasette will now also look for SpatiaLite in ``/opt/homebrew`` - thanks, Dan Peterson. (`#1649 <https://github.com/simonw/datasette/pull/1649>`__) -- Datasette is now covered by a `Code of Conduct <https://github.com/simonw/datasette/blob/main/CODE_OF_CONDUCT.md>`__. (:issue:`1654`) +- Fixed bug where :ref:`custom pages <custom_pages>` did not work on Windows. Thanks, Robert Christie. (:issue:`1545`) - Fixed error caused when a table had a column named ``n``. (:issue:`1228`) .. _v0_60_2: From 0159662ab8ccb363c59647861360e0cb7a6f930d Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 23 Mar 2022 11:48:10 -0700 Subject: [PATCH 0185/1103] Fix for bug running ?sql= against databases with a different route, closes #1682 --- datasette/views/database.py | 7 ++++++- tests/test_routes.py | 1 + 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index 103bd575..bdd433cc 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -203,7 +203,12 @@ class QueryView(DataView): named_parameters=None, write=False, ): - database = tilde_decode(request.url_vars["database"]) + database_route = tilde_decode(request.url_vars["database"]) + try: + db = self.ds.get_database(route=database_route) + except KeyError: + raise NotFound("Database not found: {}".format(database_route)) + database = db.name params = {key: request.args.get(key) for key in request.args} if "sql" in params: params.pop("sql") diff --git a/tests/test_routes.py b/tests/test_routes.py index 211b77b5..5ae55d21 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -94,6 +94,7 @@ async def test_db_with_route_databases(ds_with_route): ("/original-name/t", 404), ("/original-name/t/1", 404), ("/custom-route-name", 200), + ("/custom-route-name?sql=select+id+from+t", 200), ("/custom-route-name/t", 200), ("/custom-route-name/t/1", 200), ), From d431a9055e977aefe48689a2e5866ea8d3558a6c Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 23 Mar 2022 11:54:10 -0700 Subject: [PATCH 0186/1103] Release 0.61.1 Refs #1682 Refs https://github.com/simonw/datasette-hashed-urls/issues/13 --- datasette/version.py | 2 +- docs/changelog.rst | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index f9b10696..02451a1e 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.61" +__version__ = "0.61.1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index d2de8da1..03cf62b6 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_61_1: + +0.61.1 (2022-03-23) +------------------- + +- Fixed a bug where databases with a different route from their name (as used by the `datasette-hashed-urls plugin <https://datasette.io/plugins/datasette-hashed-urls>`__) returned errors when executing custom SQL queries. (:issue:`1682`) + .. _v0_61: 0.61 (2022-03-23) From c496f2b663ff0cef908ffaaa68b8cb63111fb5f2 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 24 Mar 2022 12:16:19 -0700 Subject: [PATCH 0187/1103] Don't show facet in cog menu if not allow_facet, closes #1683 --- datasette/static/table.js | 10 ++++++++-- datasette/templates/table.html | 1 + datasette/views/table.py | 3 +++ tests/test_table_html.py | 14 ++++++++++++++ 4 files changed, 26 insertions(+), 2 deletions(-) diff --git a/datasette/static/table.js b/datasette/static/table.js index 3c88cc40..096a27ac 100644 --- a/datasette/static/table.js +++ b/datasette/static/table.js @@ -128,7 +128,8 @@ var DROPDOWN_ICON_SVG = `<svg xmlns="http://www.w3.org/2000/svg" width="14" heig } else { hideColumn.parentNode.style.display = "none"; } - /* Only show facet if it's not the first column, not selected, not a single PK */ + /* Only show "Facet by this" if it's not the first column, not selected, + not a single PK and the Datasette allow_facet setting is True */ var displayedFacets = Array.from( document.querySelectorAll(".facet-info") ).map((el) => el.dataset.column); @@ -137,7 +138,12 @@ var DROPDOWN_ICON_SVG = `<svg xmlns="http://www.w3.org/2000/svg" width="14" heig var isSinglePk = th.getAttribute("data-is-pk") == "1" && document.querySelectorAll('th[data-is-pk="1"]').length == 1; - if (isFirstColumn || displayedFacets.includes(column) || isSinglePk) { + if ( + !DATASETTE_ALLOW_FACET || + isFirstColumn || + displayedFacets.includes(column) || + isSinglePk + ) { facetItem.parentNode.style.display = "none"; } else { facetItem.parentNode.style.display = "block"; diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 81bd044a..a9e88330 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -5,6 +5,7 @@ {% block extra_head %} {{- super() -}} <script src="{{ urls.static('table.js') }}" defer></script> +<script>DATASETTE_ALLOW_FACET = {{ datasette_allow_facet }};</script> <style> @media only screen and (max-width: 576px) { {% for column in display_columns -%} diff --git a/datasette/views/table.py b/datasette/views/table.py index 84169820..cd7afea6 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -888,6 +888,9 @@ class TableView(RowTableShared): "metadata": metadata, "view_definition": await db.get_view_definition(table), "table_definition": await db.get_table_definition(table), + "datasette_allow_facet": "true" + if self.ds.setting("allow_facet") + else "false", } d.update(extra_context_from_filters) return d diff --git a/tests/test_table_html.py b/tests/test_table_html.py index d40f017a..6dc26434 100644 --- a/tests/test_table_html.py +++ b/tests/test_table_html.py @@ -1075,3 +1075,17 @@ def test_table_page_title(app_client, path, expected): response = app_client.get(path) title = Soup(response.text, "html.parser").find("title").text assert title == expected + + +@pytest.mark.parametrize("allow_facet", (True, False)) +def test_allow_facet_off(allow_facet): + with make_app_client(settings={"allow_facet": allow_facet}) as client: + response = client.get("/fixtures/facetable") + expected = "DATASETTE_ALLOW_FACET = {};".format( + "true" if allow_facet else "false" + ) + assert expected in response.text + if allow_facet: + assert "Suggested facets" in response.text + else: + assert "Suggested facets" not in response.text From 6b99e4a66ba0ed8fca8ee41ceb7206928b60d5d1 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 25 Mar 2022 16:44:35 -0700 Subject: [PATCH 0188/1103] Added missing hookimpl import Useful for copying and pasting to create a quick plugin --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 92cf662f..9c1f4402 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -542,7 +542,7 @@ Return a list of ``(regex, view_function)`` pairs, something like this: .. code-block:: python - from datasette import Response + from datasette import hookimpl, Response import html From bd8a58ae61b2c986ef04ea721897906e0852e33e Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 26 Mar 2022 13:51:20 -0700 Subject: [PATCH 0189/1103] Fix message_type in documentation, closes #1689 --- docs/internals.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/internals.rst b/docs/internals.rst index f9a24fea..0ba3fa69 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -458,8 +458,8 @@ Returns the original, decoded object that was passed to :ref:`datasette_sign`. I .. _datasette_add_message: -.add_message(request, message, message_type=datasette.INFO) ------------------------------------------------------------ +.add_message(request, message, type=datasette.INFO) +--------------------------------------------------- ``request`` - Request The current Request object @@ -467,7 +467,7 @@ Returns the original, decoded object that was passed to :ref:`datasette_sign`. I ``message`` - string The message string -``message_type`` - constant, optional +``type`` - constant, optional The message type - ``datasette.INFO``, ``datasette.WARNING`` or ``datasette.ERROR`` Datasette's flash messaging mechanism allows you to add a message that will be displayed to the user on the next page that they visit. Messages are persisted in a ``ds_messages`` cookie. This method adds a message to that cookie. From e73fa72917ca28c152208d62d07a490c81cadf52 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 26 Mar 2022 15:46:08 -0700 Subject: [PATCH 0190/1103] Fixed bug in httpx_mock example, closes #1691 --- docs/testing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 1291a875..8e4e3f91 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -198,7 +198,7 @@ Here's a test for that plugin that mocks the HTTPX outbound request: async def test_outbound_http_call(httpx_mock): httpx_mock.add_response( url='https://www.example.com/', - data='Hello world', + text='Hello world', ) datasette = Datasette([], memory=True) response = await datasette.client.post("/-/fetch-url", data={ From 5c5e9b365790d7c75cf2611e650d1013f587d316 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 31 Mar 2022 19:01:58 -0700 Subject: [PATCH 0191/1103] Request.fake(... url_vars), plus .fake() is now documented Also made 'from datasette import Request' shortcut work. Closes #1697 --- datasette/__init__.py | 2 +- datasette/utils/asgi.py | 4 +++- docs/internals.rst | 27 +++++++++++++++++++++++++++ tests/test_internals_request.py | 7 +++++++ 4 files changed, 38 insertions(+), 2 deletions(-) diff --git a/datasette/__init__.py b/datasette/__init__.py index faa36051..ea10c13d 100644 --- a/datasette/__init__.py +++ b/datasette/__init__.py @@ -1,5 +1,5 @@ from datasette.version import __version_info__, __version__ # noqa -from datasette.utils.asgi import Forbidden, NotFound, Response # noqa +from datasette.utils.asgi import Forbidden, NotFound, Request, Response # noqa from datasette.utils import actor_matches_allow # noqa from .hookspecs import hookimpl # noqa from .hookspecs import hookspec # noqa diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index cd3ec654..8a2fa060 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -118,7 +118,7 @@ class Request: return dict(parse_qsl(body.decode("utf-8"), keep_blank_values=True)) @classmethod - def fake(cls, path_with_query_string, method="GET", scheme="http"): + def fake(cls, path_with_query_string, method="GET", scheme="http", url_vars=None): """Useful for constructing Request objects for tests""" path, _, query_string = path_with_query_string.partition("?") scope = { @@ -130,6 +130,8 @@ class Request: "scheme": scheme, "type": "http", } + if url_vars: + scope["url_route"] = {"kwargs": url_vars} return cls(scope, None) diff --git a/docs/internals.rst b/docs/internals.rst index 0ba3fa69..854b96f8 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -60,6 +60,33 @@ The object also has two awaitable methods: ``await request.post_body()`` - bytes Returns the un-parsed body of a request submitted by ``POST`` - useful for things like incoming JSON data. +And a class method that can be used to create fake request objects for use in tests: + +``fake(path_with_query_string, method="GET", scheme="http", url_vars=None)`` + Returns a ``Request`` instance for the specified path and method. For example: + + .. code-block:: python + + from datasette import Request + from pprint import pprint + + request = Request.fake("/fixtures/facetable/", url_vars={ + "database": "fixtures", + "table": "facetable" + }) + pprint(request.scope) + + This outputs:: + + {'http_version': '1.1', + 'method': 'GET', + 'path': '/fixtures/facetable/', + 'query_string': b'', + 'raw_path': b'/fixtures/facetable/', + 'scheme': 'http', + 'type': 'http', + 'url_route': {'kwargs': {'database': 'fixtures', 'table': 'facetable'}}} + .. _internals_multiparams: The MultiParams class diff --git a/tests/test_internals_request.py b/tests/test_internals_request.py index 44aaa153..d1ca1f46 100644 --- a/tests/test_internals_request.py +++ b/tests/test_internals_request.py @@ -75,6 +75,13 @@ def test_request_args(): request.args["missing"] +def test_request_fake_url_vars(): + request = Request.fake("/") + assert request.url_vars == {} + request = Request.fake("/", url_vars={"database": "fixtures"}) + assert request.url_vars == {"database": "fixtures"} + + def test_request_repr(): request = Request.fake("/foo?multi=1&multi=2&single=3") assert ( From df88d03298fa34d141ace7d6d8c35ca5e70576da Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 2 Apr 2022 23:05:10 -0700 Subject: [PATCH 0192/1103] Warn about Cloud Run and bots Refs #1698 --- docs/publish.rst | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/docs/publish.rst b/docs/publish.rst index 1d9664e7..166f2883 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -20,7 +20,14 @@ You will need a hosting account with `Heroku <https://www.heroku.com/>`__ or `Go Publishing to Google Cloud Run ------------------------------ -`Google Cloud Run <https://cloud.google.com/run/>`__ launched as a GA in in November 2019. It allows you to publish data in a scale-to-zero environment, so your application will start running when the first request is received and will shut down again when traffic ceases. This means you only pay for time spent serving traffic. +`Google Cloud Run <https://cloud.google.com/run/>`__ allows you to publish data in a scale-to-zero environment, so your application will start running when the first request is received and will shut down again when traffic ceases. This means you only pay for time spent serving traffic. + +.. warning:: + Cloud Run is a great option for inexpensively hosting small, low traffic projects - but costs can add up for projects that serve a lot of requests. + + Be particularly careful if your project has tables with large numbers of rows. Search engine crawlers that index a page for every row could result in a high bill. + + The `datasette-block-robots <https://datasette.io/plugins/datasette-block-robots>`__ plugin can be used to request search engine crawlers omit crawling your site, which can help avoid this issue. You will first need to install and configure the Google Cloud CLI tools by following `these instructions <https://cloud.google.com/sdk/>`__. @@ -171,4 +178,4 @@ You can customize the port that is exposed by the container using the ``--port`` A full list of options can be seen by running ``datasette package --help``: -See :ref:`cli_help_package___help` for the full list of options for this command. \ No newline at end of file +See :ref:`cli_help_package___help` for the full list of options for this command. From 90d1be9952db9aaddc21a536e4d00a8de44765d7 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 6 Apr 2022 08:55:01 -0700 Subject: [PATCH 0193/1103] Tilde encoding now encodes space as plus, closes #1701 Refs #1657 --- datasette/utils/__init__.py | 12 ++++++++++-- docs/internals.rst | 6 ++++-- tests/test_html.py | 6 +++--- tests/test_utils.py | 1 + 4 files changed, 18 insertions(+), 7 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 9109f823..4745254e 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1113,12 +1113,20 @@ _TILDE_ENCODING_SAFE = frozenset( # '.' and '~' ) +_space = ord(" ") + class TildeEncoder(dict): # Keeps a cache internally, via __missing__ def __missing__(self, b): + print("b is ", b) # Handle a cache miss, store encoded string in cache and return. - res = chr(b) if b in _TILDE_ENCODING_SAFE else "~{:02X}".format(b) + if b in _TILDE_ENCODING_SAFE: + res = chr(b) + elif b == _space: + res = "+" + else: + res = "~{:02X}".format(b) self[b] = res return res @@ -1138,7 +1146,7 @@ def tilde_decode(s: str) -> str: # Avoid accidentally decoding a %2f style sequence temp = secrets.token_hex(16) s = s.replace("%", temp) - decoded = urllib.parse.unquote(s.replace("~", "%")) + decoded = urllib.parse.unquote_plus(s.replace("~", "%")) return decoded.replace(temp, "%") diff --git a/docs/internals.rst b/docs/internals.rst index 854b96f8..76e27e5f 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -980,15 +980,17 @@ Datasette uses a custom encoding scheme in some places, called **tilde encoding* Tilde encoding uses the same algorithm as `URL percent-encoding <https://developer.mozilla.org/en-US/docs/Glossary/percent-encoding>`__, but with the ``~`` tilde character used in place of ``%``. -Any character other than ``ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz 0123456789_-`` will be replaced by the numeric equivalent preceded by a tilde. For example: +Any character other than ``ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz0123456789_-`` will be replaced by the numeric equivalent preceded by a tilde. For example: - ``/`` becomes ``~2F`` - ``.`` becomes ``~2E`` - ``%`` becomes ``~25`` - ``~`` becomes ``~7E`` -- Space character becomes ``~20`` +- Space becomes ``+`` - ``polls/2022.primary`` becomes ``polls~2F2022~2Eprimary`` +Note that the space character is a special case: it will be replaced with a ``+`` symbol. + .. _internals_utils_tilde_encode: .. autofunction:: datasette.utils.tilde_encode diff --git a/tests/test_html.py b/tests/test_html.py index 6e4c22b1..42f1a3ee 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -28,7 +28,7 @@ def test_homepage(app_client_two_attached_databases): ) # Should be two attached databases assert [ - {"href": "/extra~20database", "text": "extra database"}, + {"href": "/extra+database", "text": "extra database"}, {"href": "/fixtures", "text": "fixtures"}, ] == [{"href": a["href"], "text": a.text.strip()} for a in soup.select("h2 a")] # Database should show count text and attached tables @@ -43,8 +43,8 @@ def test_homepage(app_client_two_attached_databases): {"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a") ] assert [ - {"href": r"/extra~20database/searchable", "text": "searchable"}, - {"href": r"/extra~20database/searchable_view", "text": "searchable_view"}, + {"href": r"/extra+database/searchable", "text": "searchable"}, + {"href": r"/extra+database/searchable_view", "text": "searchable_view"}, ] == table_links diff --git a/tests/test_utils.py b/tests/test_utils.py index 7b41a87f..df788767 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -618,6 +618,7 @@ async def test_derive_named_parameters(sql, expected): ("-/db-/table.csv", "-~2Fdb-~2Ftable~2Ecsv"), (r"%~-/", "~25~7E-~2F"), ("~25~7E~2D~2F", "~7E25~7E7E~7E2D~7E2F"), + ("with space", "with+space"), ), ) def test_tilde_encoding(original, expected): From 247e460e08bf823142f7b84058fe44e43626787f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Apr 2022 15:51:04 -0700 Subject: [PATCH 0194/1103] Update beautifulsoup4 requirement (#1703) Updates the requirements on [beautifulsoup4](https://www.crummy.com/software/BeautifulSoup/bs4/) to permit the latest version. --- updated-dependencies: - dependency-name: beautifulsoup4 dependency-type: direct:development ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 4b58b8c4..77fca8cd 100644 --- a/setup.py +++ b/setup.py @@ -70,7 +70,7 @@ setup( "pytest>=5.2.2,<7.2.0", "pytest-xdist>=2.2.1,<2.6", "pytest-asyncio>=0.17,<0.19", - "beautifulsoup4>=4.8.1,<4.11.0", + "beautifulsoup4>=4.8.1,<4.12.0", "black==22.1.0", "pytest-timeout>=1.4.2,<2.2", "trustme>=0.7,<0.10", From 138e4d9a53e3982137294ba383303c3a848cfca4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Apr 2022 16:05:09 -0700 Subject: [PATCH 0195/1103] Update click requirement from <8.1.0,>=7.1.1 to >=7.1.1,<8.2.0 (#1694) Updates the requirements on [click](https://github.com/pallets/click) to permit the latest version. - [Release notes](https://github.com/pallets/click/releases) - [Changelog](https://github.com/pallets/click/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/click/compare/7.1.1...8.1.0) --- updated-dependencies: - dependency-name: click dependency-type: direct:production ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 77fca8cd..e5dd55fd 100644 --- a/setup.py +++ b/setup.py @@ -43,7 +43,7 @@ setup( python_requires=">=3.7", install_requires=[ "asgiref>=3.2.10,<3.6.0", - "click>=7.1.1,<8.1.0", + "click>=7.1.1,<8.2.0", "click-default-group~=1.2.2", "Jinja2>=2.10.3,<3.1.0", "hupper~=1.9", From 143c105f875f4c8d4512233fa856477a938b38ca Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 12 Apr 2022 11:43:32 -0700 Subject: [PATCH 0196/1103] Removed rogue print --- datasette/utils/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 4745254e..77768112 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1119,7 +1119,6 @@ _space = ord(" ") class TildeEncoder(dict): # Keeps a cache internally, via __missing__ def __missing__(self, b): - print("b is ", b) # Handle a cache miss, store encoded string in cache and return. if b in _TILDE_ENCODING_SAFE: res = chr(b) From 0bc5186b7bb4fc82392df08f99a9132f84dcb331 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 12 Apr 2022 11:44:12 -0700 Subject: [PATCH 0197/1103] Tooltip and commas for byte length display, closes #1712 --- datasette/views/database.py | 12 +++++++++--- datasette/views/table.py | 8 +++++++- tests/test_table_html.py | 26 ++++++++++++++++++++++++++ 3 files changed, 42 insertions(+), 4 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index bdd433cc..9a8aca32 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -11,6 +11,7 @@ from datasette.utils import ( add_cors_headers, await_me_maybe, derive_named_parameters, + format_bytes, tilde_decode, to_css_class, validate_sql_select, @@ -399,13 +400,18 @@ class QueryView(DataView): ).hexdigest(), }, ) - display_value = Markup( - '<a class="blob-download" href="{}"><Binary: {} byte{}></a>'.format( + formatted = format_bytes(len(value)) + display_value = markupsafe.Markup( + '<a class="blob-download" href="{}"{}><Binary: {:,} byte{}></a>'.format( blob_url, - len(display_value), + ' title="{}"'.format(formatted) + if "bytes" not in formatted + else "", + len(value), "" if len(value) == 1 else "s", ) ) + display_row.append(display_value) display_rows.append(display_row) diff --git a/datasette/views/table.py b/datasette/views/table.py index cd7afea6..dc85165e 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -12,10 +12,12 @@ from datasette.utils import ( MultiParams, append_querystring, compound_keys_after_sql, + format_bytes, tilde_decode, tilde_encode, escape_sqlite, filters_should_redirect, + format_bytes, is_url, path_from_row_pks, path_with_added_args, @@ -175,14 +177,18 @@ class RowTableShared(DataView): if plugin_display_value: display_value = plugin_display_value elif isinstance(value, bytes): + formatted = format_bytes(len(value)) display_value = markupsafe.Markup( - '<a class="blob-download" href="{}"><Binary: {} byte{}></a>'.format( + '<a class="blob-download" href="{}"{}><Binary: {:,} byte{}></a>'.format( self.ds.urls.row_blob( database, table, path_from_row_pks(row, pks, not pks), column, ), + ' title="{}"'.format(formatted) + if "bytes" not in formatted + else "", len(value), "" if len(value) == 1 else "s", ) diff --git a/tests/test_table_html.py b/tests/test_table_html.py index 6dc26434..d3cb3e17 100644 --- a/tests/test_table_html.py +++ b/tests/test_table_html.py @@ -1,3 +1,4 @@ +from datasette.app import Datasette, Database from bs4 import BeautifulSoup as Soup from .fixtures import ( # noqa app_client, @@ -1089,3 +1090,28 @@ def test_allow_facet_off(allow_facet): assert "Suggested facets" in response.text else: assert "Suggested facets" not in response.text + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "size,title,length_bytes", + ( + (2000, ' title="2.0 KB"', "2,000"), + (20000, ' title="19.5 KB"', "20,000"), + (20, "", "20"), + ), +) +async def test_format_of_binary_links(size, title, length_bytes): + ds = Datasette() + db_name = "binary-links-{}".format(size) + db = ds.add_memory_database(db_name) + sql = "select zeroblob({}) as blob".format(size) + await db.execute_write("create table blobs as {}".format(sql)) + response = await ds.client.get("/{}/blobs".format(db_name)) + assert response.status_code == 200 + expected = "{}><Binary: {} bytes></a>".format(title, length_bytes) + assert expected in response.text + # And test with arbitrary SQL query too + sql_response = await ds.client.get("/{}".format(db_name), params={"sql": sql}) + assert sql_response.status_code == 200 + assert expected in sql_response.text From 8338c66a57502ef27c3d7afb2527fbc0663b2570 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 21 Apr 2022 11:05:43 -0700 Subject: [PATCH 0198/1103] datasette-geojson is an example of register_output_renderer --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 9c1f4402..67842fc4 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -526,7 +526,7 @@ And here is an example ``can_render`` function which returns ``True`` only if th def can_render_demo(columns): return {"atom_id", "atom_title", "atom_updated"}.issubset(columns) -Examples: `datasette-atom <https://datasette.io/plugins/datasette-atom>`_, `datasette-ics <https://datasette.io/plugins/datasette-ics>`_ +Examples: `datasette-atom <https://datasette.io/plugins/datasette-atom>`_, `datasette-ics <https://datasette.io/plugins/datasette-ics>`_, `datasette-geojson <https://datasette.io/plugins/datasette-geojson>`__ .. _plugin_register_routes: From d57c347f35bcd8cff15f913da851b4b8eb030867 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 22 Apr 2022 14:58:46 -0700 Subject: [PATCH 0199/1103] Ignore Black commits in git blame, refs #1716 --- .git-blame-ignore-revs | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000..84e574fd --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,4 @@ +# Applying Black +35d6ee2790e41e96f243c1ff58be0c9c0519a8ce +368638555160fb9ac78f462d0f79b1394163fa30 +2b344f6a34d2adaa305996a1a580ece06397f6e4 From 3001e1e394b6cb605c2cd81eed671a7da419c1b3 Mon Sep 17 00:00:00 2001 From: Tim Sherratt <tim@discontents.com.au> Date: Mon, 25 Apr 2022 00:03:08 +1000 Subject: [PATCH 0200/1103] Add timeout option to Cloudrun build (#1717) * Add timeout option for build phase * Make the --timeout setting optional * Add test for --timeout setting Thanks, @wragge --- datasette/publish/cloudrun.py | 12 +++++++++++- tests/test_publish_cloudrun.py | 31 +++++++++++++++++++------------ 2 files changed, 30 insertions(+), 13 deletions(-) diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index a1e2f580..11a39fb2 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -41,6 +41,10 @@ def publish_subcommand(publish): type=click.Choice(["1", "2", "4"]), help="Number of vCPUs to allocate in Cloud Run", ) + @click.option( + "--timeout", + help="Build timeout in seconds", + ) @click.option( "--apt-get-install", "apt_get_extras", @@ -72,6 +76,7 @@ def publish_subcommand(publish): show_files, memory, cpu, + timeout, apt_get_extras, ): "Publish databases to Datasette running on Cloud Run" @@ -156,7 +161,12 @@ def publish_subcommand(publish): print("\n====================\n") image_id = f"gcr.io/{project}/{name}" - check_call(f"gcloud builds submit --tag {image_id}", shell=True) + check_call( + "gcloud builds submit --tag {}{}".format( + image_id, " --timeout {}".format(timeout) if timeout else "" + ), + shell=True, + ) check_call( "gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}{}".format( image_id, diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 9c8c38cf..3427f4f7 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -105,18 +105,19 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which, tmp_path_factory): @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @pytest.mark.parametrize( - "memory,cpu,expected_gcloud_args", + "memory,cpu,timeout,expected_gcloud_args", [ - ["1Gi", None, "--memory 1Gi"], - ["2G", None, "--memory 2G"], - ["256Mi", None, "--memory 256Mi"], - ["4", None, None], - ["GB", None, None], - [None, 1, "--cpu 1"], - [None, 2, "--cpu 2"], - [None, 3, None], - [None, 4, "--cpu 4"], - ["2G", 4, "--memory 2G --cpu 4"], + ["1Gi", None, None, "--memory 1Gi"], + ["2G", None, None, "--memory 2G"], + ["256Mi", None, None, "--memory 256Mi"], + ["4", None, None, None], + ["GB", None, None, None], + [None, 1, None, "--cpu 1"], + [None, 2, None, "--cpu 2"], + [None, 3, None, None], + [None, 4, None, "--cpu 4"], + ["2G", 4, None, "--memory 2G --cpu 4"], + [None, None, 1800, "--timeout 1800"], ], ) def test_publish_cloudrun_memory_cpu( @@ -125,6 +126,7 @@ def test_publish_cloudrun_memory_cpu( mock_which, memory, cpu, + timeout, expected_gcloud_args, tmp_path_factory, ): @@ -139,6 +141,8 @@ def test_publish_cloudrun_memory_cpu( args.extend(["--memory", memory]) if cpu: args.extend(["--cpu", str(cpu)]) + if timeout: + args.extend(["--timeout", str(timeout)]) result = runner.invoke(cli.cli, args) if expected_gcloud_args is None: assert 2 == result.exit_code @@ -149,13 +153,16 @@ def test_publish_cloudrun_memory_cpu( "gcloud run deploy --allow-unauthenticated --platform=managed" " --image {} test".format(tag) ) + expected_build_call = f"gcloud builds submit --tag {tag}" if memory: expected_call += " --memory {}".format(memory) if cpu: expected_call += " --cpu {}".format(cpu) + if timeout: + expected_build_call += f" --timeout {timeout}" mock_call.assert_has_calls( [ - mock.call(f"gcloud builds submit --tag {tag}", shell=True), + mock.call(expected_build_call, shell=True), mock.call( expected_call, shell=True, From 4bd3a30e1ea460e17011c11c16408300b87d1106 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 24 Apr 2022 07:04:11 -0700 Subject: [PATCH 0201/1103] Update cog docs for publish cloudrun, refs #1717 --- docs/cli-reference.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 69670d8a..3ca48aa2 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -248,6 +248,7 @@ datasette publish cloudrun --help metadata.json --memory TEXT Memory to allocate in Cloud Run, e.g. 1Gi --cpu [1|2|4] Number of vCPUs to allocate in Cloud Run + --timeout TEXT Build timeout in seconds --apt-get-install TEXT Additional packages to apt-get install --help Show this message and exit. From e64d14e413a955a10df88e106a8b5f1572ec8613 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 24 Apr 2022 07:09:08 -0700 Subject: [PATCH 0202/1103] Use type integer for --timeout, refs #1717 --- datasette/publish/cloudrun.py | 1 + docs/cli-reference.rst | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 11a39fb2..50b2b2fd 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -43,6 +43,7 @@ def publish_subcommand(publish): ) @click.option( "--timeout", + type=int, help="Build timeout in seconds", ) @click.option( diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 3ca48aa2..2a6fbfc8 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -248,7 +248,7 @@ datasette publish cloudrun --help metadata.json --memory TEXT Memory to allocate in Cloud Run, e.g. 1Gi --cpu [1|2|4] Number of vCPUs to allocate in Cloud Run - --timeout TEXT Build timeout in seconds + --timeout INTEGER Build timeout in seconds --apt-get-install TEXT Additional packages to apt-get install --help Show this message and exit. From 40ef8ebac2d83c34f467fd2d7bf80f0549b6f6c3 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 24 Apr 2022 07:10:13 -0700 Subject: [PATCH 0203/1103] Run tests on pull requests --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 478e1f34..c11bfa2e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,6 +1,6 @@ name: Test -on: [push] +on: [push, pull_request] jobs: test: From 36573638b0948174ae237d62e6369b7d55220d7f Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 24 Apr 2022 08:50:43 -0700 Subject: [PATCH 0204/1103] Apply Black to code examples in documentation, refs #1718 Uses blacken-docs. This has a deliberate error which I hope will fail CI. --- .github/workflows/test.yml | 5 ++++ docs/contributing.rst | 9 +++++++ docs/spatialite.rst | 50 ++++++++++++++++++++++------------ docs/writing_plugins.rst | 55 ++++++++++++++++++++++---------------- setup.py | 3 ++- 5 files changed, 81 insertions(+), 41 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index c11bfa2e..38b62995 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -32,3 +32,8 @@ jobs: - name: Check if cog needs to be run run: | cog --check docs/*.rst + - name: Check if blacken-docs needs to be run + run: | + blacken-docs -l 60 docs/*.rst + # This fails if a diff was generated: + git diff-index --quiet HEAD -- diff --git a/docs/contributing.rst b/docs/contributing.rst index b74f2f36..c193ba49 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -144,6 +144,15 @@ If any of your code does not conform to Black you can run this to automatically All done! ✨ 🍰 ✨ 1 file reformatted, 94 files left unchanged. +.. _contributing_formatting_blacken_docs: + +blacken-docs +~~~~~~~~~~~~ + +The `blacken-docs <https://pypi.org/project/blacken-docs/>`__ command applies Black formatting rules to code examples in the documentation. Run it like this:: + + blacken-docs -l 60 docs/*.rst + .. _contributing_formatting_prettier: Prettier diff --git a/docs/spatialite.rst b/docs/spatialite.rst index d1b300b2..52b6747e 100644 --- a/docs/spatialite.rst +++ b/docs/spatialite.rst @@ -58,21 +58,28 @@ Here's a recipe for taking a table with existing latitude and longitude columns, .. code-block:: python import sqlite3 - conn = sqlite3.connect('museums.db') + + conn = sqlite3.connect("museums.db") # Lead the spatialite extension: conn.enable_load_extension(True) - conn.load_extension('/usr/local/lib/mod_spatialite.dylib') + conn.load_extension("/usr/local/lib/mod_spatialite.dylib") # Initialize spatial metadata for this database: - conn.execute('select InitSpatialMetadata(1)') + conn.execute("select InitSpatialMetadata(1)") # Add a geometry column called point_geom to our museums table: - conn.execute("SELECT AddGeometryColumn('museums', 'point_geom', 4326, 'POINT', 2);") + conn.execute( + "SELECT AddGeometryColumn('museums', 'point_geom', 4326, 'POINT', 2);" + ) # Now update that geometry column with the lat/lon points - conn.execute(''' + conn.execute( + """ UPDATE museums SET point_geom = GeomFromText('POINT('||"longitude"||' '||"latitude"||')',4326); - ''') + """ + ) # Now add a spatial index to that column - conn.execute('select CreateSpatialIndex("museums", "point_geom");') + conn.execute( + 'select CreateSpatialIndex("museums", "point_geom");' + ) # If you don't commit your changes will not be persisted: conn.commit() conn.close() @@ -186,28 +193,37 @@ Here's Python code to create a SQLite database, enable SpatiaLite, create a plac .. code-block:: python import sqlite3 - conn = sqlite3.connect('places.db') + + conn = sqlite3.connect("places.db") # Enable SpatialLite extension conn.enable_load_extension(True) - conn.load_extension('/usr/local/lib/mod_spatialite.dylib') + conn.load_extension("/usr/local/lib/mod_spatialite.dylib") # Create the masic countries table - conn.execute('select InitSpatialMetadata(1)') - conn.execute('create table places (id integer primary key, name text);') + conn.execute("select InitSpatialMetadata(1)") + conn.execute( + "create table places (id integer primary key, name text);" + ) # Add a MULTIPOLYGON Geometry column - conn.execute("SELECT AddGeometryColumn('places', 'geom', 4326, 'MULTIPOLYGON', 2);") + conn.execute( + "SELECT AddGeometryColumn('places', 'geom', 4326, 'MULTIPOLYGON', 2);" + ) # Add a spatial index against the new column conn.execute("SELECT CreateSpatialIndex('places', 'geom');") # Now populate the table from shapely.geometry.multipolygon import MultiPolygon from shapely.geometry import shape import requests - geojson = requests.get('https://data.whosonfirst.org/404/227/475/404227475.geojson').json() + + geojson = requests.get( + "https://data.whosonfirst.org/404/227/475/404227475.geojson" + ).json() # Convert to "Well Known Text" format - wkt = shape(geojson['geometry']).wkt + wkt = shape(geojson["geometry"]).wkt # Insert and commit the record - conn.execute("INSERT INTO places (id, name, geom) VALUES(null, ?, GeomFromText(?, 4326))", ( - "Wales", wkt - )) + conn.execute( + "INSERT INTO places (id, name, geom) VALUES(null, ?, GeomFromText(?, 4326))", + ("Wales", wkt), + ) conn.commit() Querying polygons using within() diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index bd60a4b6..89f7f5eb 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -18,9 +18,12 @@ The quickest way to start writing a plugin is to create a ``my_plugin.py`` file from datasette import hookimpl + @hookimpl def prepare_connection(conn): - conn.create_function('hello_world', 0, lambda: 'Hello world!') + conn.create_function( + "hello_world", 0, lambda: "Hello world!" + ) If you save this in ``plugins/my_plugin.py`` you can then start Datasette like this:: @@ -60,22 +63,22 @@ The example consists of two files: a ``setup.py`` file that defines the plugin: from setuptools import setup - VERSION = '0.1' + VERSION = "0.1" setup( - name='datasette-plugin-demos', - description='Examples of plugins for Datasette', - author='Simon Willison', - url='https://github.com/simonw/datasette-plugin-demos', - license='Apache License, Version 2.0', + name="datasette-plugin-demos", + description="Examples of plugins for Datasette", + author="Simon Willison", + url="https://github.com/simonw/datasette-plugin-demos", + license="Apache License, Version 2.0", version=VERSION, - py_modules=['datasette_plugin_demos'], + py_modules=["datasette_plugin_demos"], entry_points={ - 'datasette': [ - 'plugin_demos = datasette_plugin_demos' + "datasette": [ + "plugin_demos = datasette_plugin_demos" ] }, - install_requires=['datasette'] + install_requires=["datasette"], ) And a Python module file, ``datasette_plugin_demos.py``, that implements the plugin: @@ -88,12 +91,14 @@ And a Python module file, ``datasette_plugin_demos.py``, that implements the plu @hookimpl def prepare_jinja2_environment(env): - env.filters['uppercase'] = lambda u: u.upper() + env.filters["uppercase"] = lambda u: u.upper() @hookimpl def prepare_connection(conn): - conn.create_function('random_integer', 2, random.randint) + conn.create_function( + "random_integer", 2, random.randint + ) Having built a plugin in this way you can turn it into an installable package using the following command:: @@ -123,11 +128,13 @@ To bundle the static assets for a plugin in the package that you publish to PyPI .. code-block:: python - package_data={ - 'datasette_plugin_name': [ - 'static/plugin.js', - ], - }, + package_data = ( + { + "datasette_plugin_name": [ + "static/plugin.js", + ], + }, + ) Where ``datasette_plugin_name`` is the name of the plugin package (note that it uses underscores, not hyphens) and ``static/plugin.js`` is the path within that package to the static file. @@ -152,11 +159,13 @@ Templates should be bundled for distribution using the same ``package_data`` mec .. code-block:: python - package_data={ - 'datasette_plugin_name': [ - 'templates/my_template.html', - ], - }, + package_data = ( + { + "datasette_plugin_name": [ + "templates/my_template.html", + ], + }, + ) You can also use wildcards here such as ``templates/*.html``. See `datasette-edit-schema <https://github.com/simonw/datasette-edit-schema>`__ for an example of this pattern. diff --git a/setup.py b/setup.py index e5dd55fd..7f0562fd 100644 --- a/setup.py +++ b/setup.py @@ -65,13 +65,14 @@ setup( """, setup_requires=["pytest-runner"], extras_require={ - "docs": ["sphinx_rtd_theme", "sphinx-autobuild", "codespell"], + "docs": ["sphinx_rtd_theme", "sphinx-autobuild", "codespell", "blacken-docs"], "test": [ "pytest>=5.2.2,<7.2.0", "pytest-xdist>=2.2.1,<2.6", "pytest-asyncio>=0.17,<0.19", "beautifulsoup4>=4.8.1,<4.12.0", "black==22.1.0", + "blacken-docs==1.12.1", "pytest-timeout>=1.4.2,<2.2", "trustme>=0.7,<0.10", "cogapp>=3.3.0", From 92b26673d86a663050c9a40a8ffd5b56c25be85f Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 24 Apr 2022 08:51:09 -0700 Subject: [PATCH 0205/1103] Fix blacken-docs errors and warnings, refs #1718 --- docs/authentication.rst | 25 ++-- docs/internals.rst | 98 ++++++++----- docs/json_api.rst | 2 +- docs/plugin_hooks.rst | 305 +++++++++++++++++++++++++++------------- 4 files changed, 289 insertions(+), 141 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 0d98cf82..24960733 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -381,11 +381,10 @@ Authentication plugins can set signed ``ds_actor`` cookies themselves like so: .. code-block:: python response = Response.redirect("/") - response.set_cookie("ds_actor", datasette.sign({ - "a": { - "id": "cleopaws" - } - }, "actor")) + response.set_cookie( + "ds_actor", + datasette.sign({"a": {"id": "cleopaws"}}, "actor"), + ) Note that you need to pass ``"actor"`` as the namespace to :ref:`datasette_sign`. @@ -412,12 +411,16 @@ To include an expiry, add a ``"e"`` key to the cookie value containing a `base62 expires_at = int(time.time()) + (24 * 60 * 60) response = Response.redirect("/") - response.set_cookie("ds_actor", datasette.sign({ - "a": { - "id": "cleopaws" - }, - "e": baseconv.base62.encode(expires_at), - }, "actor")) + response.set_cookie( + "ds_actor", + datasette.sign( + { + "a": {"id": "cleopaws"}, + "e": baseconv.base62.encode(expires_at), + }, + "actor", + ), + ) The resulting cookie will encode data that looks something like this: diff --git a/docs/internals.rst b/docs/internals.rst index 76e27e5f..aad608dc 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -70,10 +70,10 @@ And a class method that can be used to create fake request objects for use in te from datasette import Request from pprint import pprint - request = Request.fake("/fixtures/facetable/", url_vars={ - "database": "fixtures", - "table": "facetable" - }) + request = Request.fake( + "/fixtures/facetable/", + url_vars={"database": "fixtures", "table": "facetable"}, + ) pprint(request.scope) This outputs:: @@ -146,7 +146,7 @@ For example: response = Response( "<xml>This is XML</xml>", - content_type="application/xml; charset=utf-8" + content_type="application/xml; charset=utf-8", ) The quickest way to create responses is using the ``Response.text(...)``, ``Response.html(...)``, ``Response.json(...)`` or ``Response.redirect(...)`` helper methods: @@ -157,9 +157,13 @@ The quickest way to create responses is using the ``Response.text(...)``, ``Resp html_response = Response.html("This is HTML") json_response = Response.json({"this_is": "json"}) - text_response = Response.text("This will become utf-8 encoded text") + text_response = Response.text( + "This will become utf-8 encoded text" + ) # Redirects are served as 302, unless you pass status=301: - redirect_response = Response.redirect("https://latest.datasette.io/") + redirect_response = Response.redirect( + "https://latest.datasette.io/" + ) Each of these responses will use the correct corresponding content-type - ``text/html; charset=utf-8``, ``application/json; charset=utf-8`` or ``text/plain; charset=utf-8`` respectively. @@ -207,13 +211,17 @@ To set cookies on the response, use the ``response.set_cookie(...)`` method. The httponly=False, samesite="lax", ): + ... You can use this with :ref:`datasette.sign() <datasette_sign>` to set signed cookies. Here's how you would set the :ref:`ds_actor cookie <authentication_ds_actor>` for use with Datasette :ref:`authentication <authentication>`: .. code-block:: python response = Response.redirect("/") - response.set_cookie("ds_actor", datasette.sign({"a": {"id": "cleopaws"}}, "actor")) + response.set_cookie( + "ds_actor", + datasette.sign({"a": {"id": "cleopaws"}}, "actor"), + ) return response .. _internals_datasette: @@ -236,13 +244,16 @@ You can create your own instance of this - for example to help write tests for a datasette = Datasette(files=["/path/to/my-database.db"]) # Pass metadata as a JSON dictionary like this - datasette = Datasette(files=["/path/to/my-database.db"], metadata={ - "databases": { - "my-database": { - "description": "This is my database" + datasette = Datasette( + files=["/path/to/my-database.db"], + metadata={ + "databases": { + "my-database": { + "description": "This is my database" + } } - } - }) + }, + ) Constructor parameters include: @@ -345,7 +356,7 @@ This is useful when you need to check multiple permissions at once. For example, ("view-table", (database, table)), ("view-database", database), "view-instance", - ] + ], ) .. _datasette_check_visibilty: @@ -406,11 +417,13 @@ The ``db`` parameter should be an instance of the ``datasette.database.Database` from datasette.database import Database - datasette.add_database(Database( - datasette, - path="path/to/my-new-database.db", - is_mutable=True - )) + datasette.add_database( + Database( + datasette, + path="path/to/my-new-database.db", + is_mutable=True, + ) + ) This will add a mutable database and serve it at ``/my-new-database``. @@ -418,8 +431,12 @@ This will add a mutable database and serve it at ``/my-new-database``. .. code-block:: python - db = datasette.add_database(Database(datasette, memory_name="statistics")) - await db.execute_write("CREATE TABLE foo(id integer primary key)") + db = datasette.add_database( + Database(datasette, memory_name="statistics") + ) + await db.execute_write( + "CREATE TABLE foo(id integer primary key)" + ) .. _datasette_add_memory_database: @@ -438,10 +455,9 @@ This is a shortcut for the following: from datasette.database import Database - datasette.add_database(Database( - datasette, - memory_name="statistics" - )) + datasette.add_database( + Database(datasette, memory_name="statistics") + ) Using either of these pattern will result in the in-memory database being served at ``/statistics``. @@ -516,7 +532,9 @@ Returns the absolute URL for the given path, including the protocol and host. Fo .. code-block:: python - absolute_url = datasette.absolute_url(request, "/dbname/table.json") + absolute_url = datasette.absolute_url( + request, "/dbname/table.json" + ) # Would return "http://localhost:8001/dbname/table.json" The current request object is used to determine the hostname and protocol that should be used for the returned URL. The :ref:`setting_force_https_urls` configuration setting is taken into account. @@ -578,7 +596,9 @@ These methods can be used with :ref:`internals_datasette_urls` - for example: table_json = ( await datasette.client.get( - datasette.urls.table("fixtures", "facetable", format="json") + datasette.urls.table( + "fixtures", "facetable", format="json" + ) ) ).json() @@ -754,6 +774,7 @@ Example usage: "select sqlite_version()" ).fetchall()[0][0] + version = await db.execute_fn(get_version) .. _database_execute_write: @@ -789,7 +810,7 @@ Like ``execute_write()`` but uses the ``sqlite3`` `conn.executemany() <https://d await db.execute_write_many( "insert into characters (id, name) values (?, ?)", - [(1, "Melanie"), (2, "Selma"), (2, "Viktor")] + [(1, "Melanie"), (2, "Selma"), (2, "Viktor")], ) .. _database_execute_write_fn: @@ -811,10 +832,15 @@ For example: def delete_and_return_count(conn): conn.execute("delete from some_table where id > 5") - return conn.execute("select count(*) from some_table").fetchone()[0] + return conn.execute( + "select count(*) from some_table" + ).fetchone()[0] + try: - num_rows_left = await database.execute_write_fn(delete_and_return_count) + num_rows_left = await database.execute_write_fn( + delete_and_return_count + ) except Exception as e: print("An error occurred:", e) @@ -1021,6 +1047,7 @@ This example uses trace to record the start, end and duration of any HTTP GET re from datasette.tracer import trace import httpx + async def fetch_url(url): with trace("fetch-url", url=url): async with httpx.AsyncClient() as client: @@ -1051,9 +1078,9 @@ This example uses the :ref:`register_routes() <plugin_register_routes>` plugin h from datasette import hookimpl from datasette import tracer + @hookimpl def register_routes(): - async def parallel_queries(datasette): db = datasette.get_database() with tracer.trace_child_tasks(): @@ -1061,7 +1088,12 @@ This example uses the :ref:`register_routes() <plugin_register_routes>` plugin h db.execute("select 1"), db.execute("select 2"), ) - return Response.json({"one": one.single_value(), "two": two.single_value()}) + return Response.json( + { + "one": one.single_value(), + "two": two.single_value(), + } + ) return [ (r"/parallel-queries$", parallel_queries), diff --git a/docs/json_api.rst b/docs/json_api.rst index aa6fcdaa..d3fdb1e4 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -446,7 +446,7 @@ Most of the HTML pages served by Datasette provide a mechanism for discovering t You can find this near the top of the source code of those pages, looking like this: -.. code-block:: python +.. code-block:: html <link rel="alternate" type="application/json+datasette" diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 67842fc4..ace206b7 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -44,9 +44,12 @@ aggregates and collations. For example: from datasette import hookimpl import random + @hookimpl def prepare_connection(conn): - conn.create_function('random_integer', 2, random.randint) + conn.create_function( + "random_integer", 2, random.randint + ) This registers a SQL function called ``random_integer`` which takes two arguments and can be called like this:: @@ -72,9 +75,10 @@ example: from datasette import hookimpl + @hookimpl def prepare_jinja2_environment(env): - env.filters['uppercase'] = lambda u: u.upper() + env.filters["uppercase"] = lambda u: u.upper() You can now use this filter in your custom templates like so:: @@ -127,9 +131,7 @@ Here's an example plugin that adds a ``"user_agent"`` variable to the template c @hookimpl def extra_template_vars(request): - return { - "user_agent": request.headers.get("user-agent") - } + return {"user_agent": request.headers.get("user-agent")} This example returns an awaitable function which adds a list of ``hidden_table_names`` to the context: @@ -140,9 +142,12 @@ This example returns an awaitable function which adds a list of ``hidden_table_n async def hidden_table_names(): if database: db = datasette.databases[database] - return {"hidden_table_names": await db.hidden_table_names()} + return { + "hidden_table_names": await db.hidden_table_names() + } else: return {} + return hidden_table_names And here's an example which adds a ``sql_first(sql_query)`` function which executes a SQL statement and returns the first column of the first row of results: @@ -152,8 +157,15 @@ And here's an example which adds a ``sql_first(sql_query)`` function which execu @hookimpl def extra_template_vars(datasette, database): async def sql_first(sql, dbname=None): - dbname = dbname or database or next(iter(datasette.databases.keys())) - return (await datasette.execute(dbname, sql)).rows[0][0] + dbname = ( + dbname + or database + or next(iter(datasette.databases.keys())) + ) + return (await datasette.execute(dbname, sql)).rows[ + 0 + ][0] + return {"sql_first": sql_first} You can then use the new function in a template like so:: @@ -178,6 +190,7 @@ This can be a list of URLs: from datasette import hookimpl + @hookimpl def extra_css_urls(): return [ @@ -191,10 +204,12 @@ Or a list of dictionaries defining both a URL and an @hookimpl def extra_css_urls(): - return [{ - "url": "https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css", - "sri": "sha384-9gVQ4dYFwwWSjIDZnLEWnxCjeSWFphJiwGPXr1jddIhOegiu1FwO5qRGvFXOdJZ4", - }] + return [ + { + "url": "https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css", + "sri": "sha384-9gVQ4dYFwwWSjIDZnLEWnxCjeSWFphJiwGPXr1jddIhOegiu1FwO5qRGvFXOdJZ4", + } + ] This function can also return an awaitable function, useful if it needs to run any async code: @@ -204,7 +219,9 @@ This function can also return an awaitable function, useful if it needs to run a def extra_css_urls(datasette): async def inner(): db = datasette.get_database() - results = await db.execute("select url from css_files") + results = await db.execute( + "select url from css_files" + ) return [r[0] for r in results] return inner @@ -225,12 +242,15 @@ return a list of URLs, a list of dictionaries or an awaitable function that retu from datasette import hookimpl + @hookimpl def extra_js_urls(): - return [{ - "url": "https://code.jquery.com/jquery-3.3.1.slim.min.js", - "sri": "sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo", - }] + return [ + { + "url": "https://code.jquery.com/jquery-3.3.1.slim.min.js", + "sri": "sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo", + } + ] You can also return URLs to files from your plugin's ``static/`` directory, if you have one: @@ -239,9 +259,7 @@ you have one: @hookimpl def extra_js_urls(): - return [ - "/-/static-plugins/your-plugin/app.js" - ] + return ["/-/static-plugins/your-plugin/app.js"] Note that `your-plugin` here should be the hyphenated plugin name - the name that is displayed in the list on the `/-/plugins` debug page. @@ -251,9 +269,11 @@ If your code uses `JavaScript modules <https://developer.mozilla.org/en-US/docs/ @hookimpl def extra_js_urls(): - return [{ - "url": "/-/static-plugins/your-plugin/app.js", - "module": True + return [ + { + "url": "/-/static-plugins/your-plugin/app.js", + "module": True, + } ] Examples: `datasette-cluster-map <https://datasette.io/plugins/datasette-cluster-map>`_, `datasette-vega <https://datasette.io/plugins/datasette-vega>`_ @@ -281,7 +301,7 @@ Use a dictionary if you want to specify that the code should be placed in a ``<s def extra_body_script(): return { "module": True, - "script": "console.log('Your JavaScript goes here...')" + "script": "console.log('Your JavaScript goes here...')", } This will add the following to the end of your page: @@ -311,7 +331,9 @@ Let's say you want to build a plugin that adds a ``datasette publish my_hosting_ .. code-block:: python from datasette import hookimpl - from datasette.publish.common import add_common_publish_arguments_and_options + from datasette.publish.common import ( + add_common_publish_arguments_and_options, + ) import click @@ -345,7 +367,7 @@ Let's say you want to build a plugin that adds a ``datasette publish my_hosting_ about_url, api_key, ): - # Your implementation goes here + ... Examples: `datasette-publish-fly <https://datasette.io/plugins/datasette-publish-fly>`_, `datasette-publish-vercel <https://datasette.io/plugins/datasette-publish-vercel>`_ @@ -400,7 +422,9 @@ If the value matches that pattern, the plugin returns an HTML link element: if not isinstance(value, str): return None stripped = value.strip() - if not stripped.startswith("{") and stripped.endswith("}"): + if not stripped.startswith("{") and stripped.endswith( + "}" + ): return None try: data = json.loads(value) @@ -412,14 +436,18 @@ If the value matches that pattern, the plugin returns an HTML link element: return None href = data["href"] if not ( - href.startswith("/") or href.startswith("http://") + href.startswith("/") + or href.startswith("http://") or href.startswith("https://") ): return None - return markupsafe.Markup('<a href="{href}">{label}</a>'.format( - href=markupsafe.escape(data["href"]), - label=markupsafe.escape(data["label"] or "") or " " - )) + return markupsafe.Markup( + '<a href="{href}">{label}</a>'.format( + href=markupsafe.escape(data["href"]), + label=markupsafe.escape(data["label"] or "") + or " ", + ) + ) Examples: `datasette-render-binary <https://datasette.io/plugins/datasette-render-binary>`_, `datasette-render-markdown <https://datasette.io/plugins/datasette-render-markdown>`__, `datasette-json-html <https://datasette.io/plugins/datasette-json-html>`__ @@ -516,7 +544,7 @@ Here is a more complex example: return Response( "\n".join(lines), content_type="text/plain; charset=utf-8", - headers={"x-sqlite-version": result.first()[0]} + headers={"x-sqlite-version": result.first()[0]}, ) And here is an example ``can_render`` function which returns ``True`` only if the query results contain the columns ``atom_id``, ``atom_title`` and ``atom_updated``: @@ -524,7 +552,11 @@ And here is an example ``can_render`` function which returns ``True`` only if th .. code-block:: python def can_render_demo(columns): - return {"atom_id", "atom_title", "atom_updated"}.issubset(columns) + return { + "atom_id", + "atom_title", + "atom_updated", + }.issubset(columns) Examples: `datasette-atom <https://datasette.io/plugins/datasette-atom>`_, `datasette-ics <https://datasette.io/plugins/datasette-ics>`_, `datasette-geojson <https://datasette.io/plugins/datasette-geojson>`__ @@ -548,16 +580,14 @@ Return a list of ``(regex, view_function)`` pairs, something like this: async def hello_from(request): name = request.url_vars["name"] - return Response.html("Hello from {}".format( - html.escape(name) - )) + return Response.html( + "Hello from {}".format(html.escape(name)) + ) @hookimpl def register_routes(): - return [ - (r"^/hello-from/(?P<name>.*)$", hello_from) - ] + return [(r"^/hello-from/(?P<name>.*)$", hello_from)] The view functions can take a number of different optional arguments. The corresponding argument will be passed to your function depending on its named parameters - a form of dependency injection. @@ -606,10 +636,13 @@ This example registers a new ``datasette verify file1.db file2.db`` command that import click import sqlite3 + @hookimpl def register_commands(cli): @cli.command() - @click.argument("files", type=click.Path(exists=True), nargs=-1) + @click.argument( + "files", type=click.Path(exists=True), nargs=-1 + ) def verify(files): "Verify that files can be opened by Datasette" for file in files: @@ -617,7 +650,9 @@ This example registers a new ``datasette verify file1.db file2.db`` command that try: conn.execute("select * from sqlite_master") except sqlite3.DatabaseError: - raise click.ClickException("Invalid database: {}".format(file)) + raise click.ClickException( + "Invalid database: {}".format(file) + ) The new command can then be executed like so:: @@ -656,15 +691,18 @@ Each Facet subclass implements a new type of facet operation. The class should l async def suggest(self): # Use self.sql and self.params to suggest some facets suggested_facets = [] - suggested_facets.append({ - "name": column, # Or other unique name - # Construct the URL that will enable this facet: - "toggle_url": self.ds.absolute_url( - self.request, path_with_added_args( - self.request, {"_facet": column} - ) - ), - }) + suggested_facets.append( + { + "name": column, # Or other unique name + # Construct the URL that will enable this facet: + "toggle_url": self.ds.absolute_url( + self.request, + path_with_added_args( + self.request, {"_facet": column} + ), + ), + } + ) return suggested_facets async def facet_results(self): @@ -678,18 +716,25 @@ Each Facet subclass implements a new type of facet operation. The class should l try: facet_results_values = [] # More calculations... - facet_results_values.append({ - "value": value, - "label": label, - "count": count, - "toggle_url": self.ds.absolute_url(self.request, toggle_path), - "selected": selected, - }) - facet_results.append({ - "name": column, - "results": facet_results_values, - "truncated": len(facet_rows_results) > facet_size, - }) + facet_results_values.append( + { + "value": value, + "label": label, + "count": count, + "toggle_url": self.ds.absolute_url( + self.request, toggle_path + ), + "selected": selected, + } + ) + facet_results.append( + { + "name": column, + "results": facet_results_values, + "truncated": len(facet_rows_results) + > facet_size, + } + ) except QueryInterrupted: facets_timed_out.append(column) @@ -728,21 +773,33 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att def asgi_wrapper(datasette): def wrap_with_databases_header(app): @wraps(app) - async def add_x_databases_header(scope, receive, send): + async def add_x_databases_header( + scope, receive, send + ): async def wrapped_send(event): if event["type"] == "http.response.start": - original_headers = event.get("headers") or [] + original_headers = ( + event.get("headers") or [] + ) event = { "type": event["type"], "status": event["status"], - "headers": original_headers + [ - [b"x-databases", - ", ".join(datasette.databases.keys()).encode("utf-8")] + "headers": original_headers + + [ + [ + b"x-databases", + ", ".join( + datasette.databases.keys() + ).encode("utf-8"), + ] ], } await send(event) + await app(scope, receive, wrapped_send) + return add_x_databases_header + return wrap_with_databases_header Examples: `datasette-cors <https://datasette.io/plugins/datasette-cors>`__, `datasette-pyinstrument <https://datasette.io/plugins/datasette-pyinstrument>`__ @@ -759,7 +816,9 @@ This hook fires when the Datasette application server first starts up. You can i @hookimpl def startup(datasette): config = datasette.plugin_config("my-plugin") or {} - assert "required-setting" in config, "my-plugin requires setting required-setting" + assert ( + "required-setting" in config + ), "my-plugin requires setting required-setting" Or you can return an async function which will be awaited on startup. Use this option if you need to make any database queries: @@ -770,9 +829,12 @@ Or you can return an async function which will be awaited on startup. Use this o async def inner(): db = datasette.get_database() if "my_table" not in await db.table_names(): - await db.execute_write(""" + await db.execute_write( + """ create table my_table (mycol text) - """) + """ + ) + return inner Potential use-cases: @@ -815,6 +877,7 @@ Ues this hook to return a dictionary of additional :ref:`canned query <canned_qu from datasette import hookimpl + @hookimpl def canned_queries(datasette, database): if database == "mydb": @@ -830,15 +893,20 @@ The hook can alternatively return an awaitable function that returns a list. Her from datasette import hookimpl + @hookimpl def canned_queries(datasette, database): async def inner(): db = datasette.get_database(database) if await db.table_exists("saved_queries"): - results = await db.execute("select name, sql from saved_queries") - return {result["name"]: { - "sql": result["sql"] - } for result in results} + results = await db.execute( + "select name, sql from saved_queries" + ) + return { + result["name"]: {"sql": result["sql"]} + for result in results + } + return inner The actor parameter can be used to include the currently authenticated actor in your decision. Here's an example that returns saved queries that were saved by that actor: @@ -847,19 +915,23 @@ The actor parameter can be used to include the currently authenticated actor in from datasette import hookimpl + @hookimpl def canned_queries(datasette, database, actor): async def inner(): db = datasette.get_database(database) - if actor is not None and await db.table_exists("saved_queries"): + if actor is not None and await db.table_exists( + "saved_queries" + ): results = await db.execute( - "select name, sql from saved_queries where actor_id = :id", { - "id": actor["id"] - } + "select name, sql from saved_queries where actor_id = :id", + {"id": actor["id"]}, ) - return {result["name"]: { - "sql": result["sql"] - } for result in results} + return { + result["name"]: {"sql": result["sql"]} + for result in results + } + return inner Example: `datasette-saved-queries <https://datasette.io/plugins/datasette-saved-queries>`__ @@ -888,9 +960,12 @@ Here's an example that authenticates the actor based on an incoming API key: SECRET_KEY = "this-is-a-secret" + @hookimpl def actor_from_request(datasette, request): - authorization = request.headers.get("authorization") or "" + authorization = ( + request.headers.get("authorization") or "" + ) expected = "Bearer {}".format(SECRET_KEY) if secrets.compare_digest(authorization, expected): @@ -906,6 +981,7 @@ Instead of returning a dictionary, this function can return an awaitable functio from datasette import hookimpl + @hookimpl def actor_from_request(datasette, request): async def inner(): @@ -914,7 +990,8 @@ Instead of returning a dictionary, this function can return an awaitable functio return None # Look up ?_token=xxx in sessions table result = await datasette.get_database().execute( - "select count(*) from sessions where token = ?", [token] + "select count(*) from sessions where token = ?", + [token], ) if result.first()[0]: return {"token": token} @@ -952,7 +1029,7 @@ The hook should return an instance of ``datasette.filters.FilterArguments`` whic where_clauses=["id > :max_id"], params={"max_id": 5}, human_descriptions=["max_id is greater than 5"], - extra_context={} + extra_context={}, ) The arguments to the ``FilterArguments`` class constructor are as follows: @@ -973,10 +1050,13 @@ This example plugin causes 0 results to be returned if ``?_nothing=1`` is added from datasette import hookimpl from datasette.filters import FilterArguments + @hookimpl def filters_from_request(self, request): if request.args.get("_nothing"): - return FilterArguments(["1 = 0"], human_descriptions=["NOTHING"]) + return FilterArguments( + ["1 = 0"], human_descriptions=["NOTHING"] + ) Example: `datasette-leaflet-freedraw <https://datasette.io/plugins/datasette-leaflet-freedraw>`_ @@ -1006,6 +1086,7 @@ Here's an example plugin which randomly selects if a permission should be allowe from datasette import hookimpl import random + @hookimpl def permission_allowed(action): if action != "view-instance": @@ -1024,11 +1105,16 @@ Here's an example that allows users to view the ``admin_log`` table only if thei async def inner(): if action == "execute-sql" and resource == "staff": return False - if action == "view-table" and resource == ("staff", "admin_log"): + if action == "view-table" and resource == ( + "staff", + "admin_log", + ): if not actor: return False user_id = actor["id"] - return await datasette.get_database("staff").execute( + return await datasette.get_database( + "staff" + ).execute( "select count(*) from admin_users where user_id = :user_id", {"user_id": user_id}, ) @@ -1059,18 +1145,21 @@ This example registers two new magic parameters: ``:_request_http_version`` retu from uuid import uuid4 + def uuid(key, request): if key == "new": return str(uuid4()) else: raise KeyError + def request(key, request): if key == "http_version": return request.scope["http_version"] else: raise KeyError + @hookimpl def register_magic_parameters(datasette): return [ @@ -1103,9 +1192,12 @@ This example returns a redirect to a ``/-/login`` page: from datasette import hookimpl from urllib.parse import urlencode + @hookimpl def forbidden(request, message): - return Response.redirect("/-/login?=" + urlencode({"message": message})) + return Response.redirect( + "/-/login?=" + urlencode({"message": message}) + ) The function can alternatively return an awaitable function if it needs to make any asynchronous method calls. This example renders a template: @@ -1114,10 +1206,15 @@ The function can alternatively return an awaitable function if it needs to make from datasette import hookimpl from datasette.utils.asgi import Response + @hookimpl def forbidden(datasette): async def inner(): - return Response.html(await datasette.render_template("forbidden.html")) + return Response.html( + await datasette.render_template( + "forbidden.html" + ) + ) return inner @@ -1147,11 +1244,17 @@ This example adds a new menu item but only if the signed in user is ``"root"``: from datasette import hookimpl + @hookimpl def menu_links(datasette, actor): if actor and actor.get("id") == "root": return [ - {"href": datasette.urls.path("/-/edit-schema"), "label": "Edit schema"}, + { + "href": datasette.urls.path( + "/-/edit-schema" + ), + "label": "Edit schema", + }, ] Using :ref:`internals_datasette_urls` here ensures that links in the menu will take the :ref:`setting_base_url` setting into account. @@ -1188,13 +1291,20 @@ This example adds a new table action if the signed in user is ``"root"``: from datasette import hookimpl + @hookimpl def table_actions(datasette, actor): if actor and actor.get("id") == "root": - return [{ - "href": datasette.urls.path("/-/edit-schema/{}/{}".format(database, table)), - "label": "Edit schema for this table", - }] + return [ + { + "href": datasette.urls.path( + "/-/edit-schema/{}/{}".format( + database, table + ) + ), + "label": "Edit schema for this table", + } + ] Example: `datasette-graphql <https://datasette.io/plugins/datasette-graphql>`_ @@ -1238,6 +1348,7 @@ This example will disable CSRF protection for that specific URL path: from datasette import hookimpl + @hookimpl def skip_csrf(scope): return scope["path"] == "/submit-comment" @@ -1278,7 +1389,9 @@ This hook is responsible for returning a dictionary corresponding to Datasette : "description": get_instance_description(datasette), "databases": [], } - for db_name, db_data_dict in get_my_database_meta(datasette, database, table, key): + for db_name, db_data_dict in get_my_database_meta( + datasette, database, table, key + ): metadata["databases"][db_name] = db_data_dict # whatever we return here will be merged with any other plugins using this hook and # will be overwritten by a local metadata.yaml if one exists! From 498e1536f5f3e69c50934c0c031055e0af770bf6 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 24 Apr 2022 09:08:56 -0700 Subject: [PATCH 0206/1103] One more blacken-docs test, refs #1718 --- docs/testing_plugins.rst | 45 ++++++++++++++++++++++++---------------- 1 file changed, 27 insertions(+), 18 deletions(-) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 8e4e3f91..6361d744 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -19,7 +19,10 @@ If you use the template described in :ref:`writing_plugins_cookiecutter` your pl response = await datasette.client.get("/-/plugins.json") assert response.status_code == 200 installed_plugins = {p["name"] for p in response.json()} - assert "datasette-plugin-template-demo" in installed_plugins + assert ( + "datasette-plugin-template-demo" + in installed_plugins + ) This test uses the :ref:`internals_datasette_client` object to exercise a test instance of Datasette. ``datasette.client`` is a wrapper around the `HTTPX <https://www.python-httpx.org/>`__ Python library which can imitate HTTP requests using ASGI. This is the recommended way to write tests against a Datasette instance. @@ -37,9 +40,7 @@ If you are building an installable package you can add them as test dependencies setup( name="datasette-my-plugin", # ... - extras_require={ - "test": ["pytest", "pytest-asyncio"] - }, + extras_require={"test": ["pytest", "pytest-asyncio"]}, tests_require=["datasette-my-plugin[test]"], ) @@ -87,31 +88,34 @@ Here's an example that uses the `sqlite-utils library <https://sqlite-utils.data import pytest import sqlite_utils + @pytest.fixture(scope="session") def datasette(tmp_path_factory): db_directory = tmp_path_factory.mktemp("dbs") db_path = db_directory / "test.db" db = sqlite_utils.Database(db_path) - db["dogs"].insert_all([ - {"id": 1, "name": "Cleo", "age": 5}, - {"id": 2, "name": "Pancakes", "age": 4} - ], pk="id") + db["dogs"].insert_all( + [ + {"id": 1, "name": "Cleo", "age": 5}, + {"id": 2, "name": "Pancakes", "age": 4}, + ], + pk="id", + ) datasette = Datasette( [db_path], metadata={ "databases": { "test": { "tables": { - "dogs": { - "title": "Some dogs" - } + "dogs": {"title": "Some dogs"} } } } - } + }, ) return datasette + @pytest.mark.asyncio async def test_example_table_json(datasette): response = await datasette.client.get("/test/dogs.json?_shape=array") @@ -121,6 +125,7 @@ Here's an example that uses the `sqlite-utils library <https://sqlite-utils.data {"id": 2, "name": "Pancakes", "age": 4}, ] + @pytest.mark.asyncio async def test_example_table_html(datasette): response = await datasette.client.get("/test/dogs") @@ -137,6 +142,7 @@ If you want to create that test database repeatedly for every individual test fu @pytest.fixture def datasette(tmp_path_factory): # This fixture will be executed repeatedly for every test + ... .. _testing_plugins_pytest_httpx: @@ -197,14 +203,17 @@ Here's a test for that plugin that mocks the HTTPX outbound request: async def test_outbound_http_call(httpx_mock): httpx_mock.add_response( - url='https://www.example.com/', - text='Hello world', + url="https://www.example.com/", + text="Hello world", ) datasette = Datasette([], memory=True) - response = await datasette.client.post("/-/fetch-url", data={ - "url": "https://www.example.com/" - }) + response = await datasette.client.post( + "/-/fetch-url", + data={"url": "https://www.example.com/"}, + ) assert response.text == "Hello world" outbound_request = httpx_mock.get_request() - assert outbound_request.url == "https://www.example.com/" + assert ( + outbound_request.url == "https://www.example.com/" + ) From 289e4cf80a14f05f791b218f092556148b49a0fa Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 24 Apr 2022 09:17:59 -0700 Subject: [PATCH 0207/1103] Finished applying blacken-docs, closes #1718 --- .github/workflows/test.yml | 3 +-- docs/testing_plugins.rst | 4 +++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 38b62995..8d916e49 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -34,6 +34,5 @@ jobs: cog --check docs/*.rst - name: Check if blacken-docs needs to be run run: | + # This fails on syntax errors, or a diff was applied blacken-docs -l 60 docs/*.rst - # This fails if a diff was generated: - git diff-index --quiet HEAD -- diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 6361d744..1bbaaac1 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -118,7 +118,9 @@ Here's an example that uses the `sqlite-utils library <https://sqlite-utils.data @pytest.mark.asyncio async def test_example_table_json(datasette): - response = await datasette.client.get("/test/dogs.json?_shape=array") + response = await datasette.client.get( + "/test/dogs.json?_shape=array" + ) assert response.status_code == 200 assert response.json() == [ {"id": 1, "name": "Cleo", "age": 5}, From 7463b051cf8d7f856df5eba9f7aa944183ebabe5 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 24 Apr 2022 09:59:20 -0700 Subject: [PATCH 0208/1103] Cosmetic tweaks after blacken-docs, refs #1718 --- docs/plugin_hooks.rst | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index ace206b7..4560ec9a 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -162,9 +162,8 @@ And here's an example which adds a ``sql_first(sql_query)`` function which execu or database or next(iter(datasette.databases.keys())) ) - return (await datasette.execute(dbname, sql)).rows[ - 0 - ][0] + result = await datasette.execute(dbname, sql) + return result.rows[0][0] return {"sql_first": sql_first} @@ -422,8 +421,8 @@ If the value matches that pattern, the plugin returns an HTML link element: if not isinstance(value, str): return None stripped = value.strip() - if not stripped.startswith("{") and stripped.endswith( - "}" + if not ( + stripped.startswith("{") and stripped.endswith("}") ): return None try: From 579f59dcec43a91dd7d404e00b87a00afd8515f2 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 25 Apr 2022 11:33:35 -0700 Subject: [PATCH 0209/1103] Refactor to remove RowTableShared class, closes #1719 Refs #1715 --- datasette/app.py | 3 +- datasette/views/row.py | 142 +++++++++++ datasette/views/table.py | 497 +++++++++++++++------------------------ 3 files changed, 328 insertions(+), 314 deletions(-) create mode 100644 datasette/views/row.py diff --git a/datasette/app.py b/datasette/app.py index c9eede26..d269372c 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -40,7 +40,8 @@ from .views.special import ( PermissionsDebugView, MessagesDebugView, ) -from .views.table import RowView, TableView +from .views.table import TableView +from .views.row import RowView from .renderer import json_renderer from .url_builder import Urls from .database import Database, QueryInterrupted diff --git a/datasette/views/row.py b/datasette/views/row.py new file mode 100644 index 00000000..b1c7362d --- /dev/null +++ b/datasette/views/row.py @@ -0,0 +1,142 @@ +from datasette.utils.asgi import NotFound +from datasette.database import QueryInterrupted +from .base import DataView +from datasette.utils import ( + tilde_decode, + urlsafe_components, + to_css_class, + escape_sqlite, +) +from .table import _sql_params_pks, display_columns_and_rows + + +class RowView(DataView): + name = "row" + + async def data(self, request, default_labels=False): + database_route = tilde_decode(request.url_vars["database"]) + table = tilde_decode(request.url_vars["table"]) + try: + db = self.ds.get_database(route=database_route) + except KeyError: + raise NotFound("Database not found: {}".format(database_route)) + database = db.name + await self.ds.ensure_permissions( + request.actor, + [ + ("view-table", (database, table)), + ("view-database", database), + "view-instance", + ], + ) + pk_values = urlsafe_components(request.url_vars["pks"]) + try: + db = self.ds.get_database(route=database_route) + except KeyError: + raise NotFound("Database not found: {}".format(database_route)) + database = db.name + sql, params, pks = await _sql_params_pks(db, table, pk_values) + results = await db.execute(sql, params, truncate=True) + columns = [r[0] for r in results.description] + rows = list(results.rows) + if not rows: + raise NotFound(f"Record not found: {pk_values}") + + async def template_data(): + display_columns, display_rows = await display_columns_and_rows( + self.ds, + database, + table, + results.description, + rows, + link_column=False, + truncate_cells=0, + ) + for column in display_columns: + column["sortable"] = False + return { + "foreign_key_tables": await self.foreign_key_tables( + database, table, pk_values + ), + "display_columns": display_columns, + "display_rows": display_rows, + "custom_table_templates": [ + f"_table-{to_css_class(database)}-{to_css_class(table)}.html", + f"_table-row-{to_css_class(database)}-{to_css_class(table)}.html", + "_table.html", + ], + "metadata": (self.ds.metadata("databases") or {}) + .get(database, {}) + .get("tables", {}) + .get(table, {}), + } + + data = { + "database": database, + "table": table, + "rows": rows, + "columns": columns, + "primary_keys": pks, + "primary_key_values": pk_values, + "units": self.ds.table_metadata(database, table).get("units", {}), + } + + if "foreign_key_tables" in (request.args.get("_extras") or "").split(","): + data["foreign_key_tables"] = await self.foreign_key_tables( + database, table, pk_values + ) + + return ( + data, + template_data, + ( + f"row-{to_css_class(database)}-{to_css_class(table)}.html", + "row.html", + ), + ) + + async def foreign_key_tables(self, database, table, pk_values): + if len(pk_values) != 1: + return [] + db = self.ds.databases[database] + all_foreign_keys = await db.get_all_foreign_keys() + foreign_keys = all_foreign_keys[table]["incoming"] + if len(foreign_keys) == 0: + return [] + + sql = "select " + ", ".join( + [ + "(select count(*) from {table} where {column}=:id)".format( + table=escape_sqlite(fk["other_table"]), + column=escape_sqlite(fk["other_column"]), + ) + for fk in foreign_keys + ] + ) + try: + rows = list(await db.execute(sql, {"id": pk_values[0]})) + except QueryInterrupted: + # Almost certainly hit the timeout + return [] + + foreign_table_counts = dict( + zip( + [(fk["other_table"], fk["other_column"]) for fk in foreign_keys], + list(rows[0]), + ) + ) + foreign_key_tables = [] + for fk in foreign_keys: + count = ( + foreign_table_counts.get((fk["other_table"], fk["other_column"])) or 0 + ) + key = fk["other_column"] + if key.startswith("_"): + key += "__exact" + link = "{}?{}={}".format( + self.ds.urls.table(database, fk["other_table"]), + key, + ",".join(pk_values), + ) + foreign_key_tables.append({**fk, **{"count": count, "link": link}}) + return foreign_key_tables diff --git a/datasette/views/table.py b/datasette/views/table.py index dc85165e..37fb2ebb 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -1,4 +1,3 @@ -import urllib import itertools import json @@ -9,7 +8,6 @@ from datasette.database import QueryInterrupted from datasette.utils import ( await_me_maybe, CustomRow, - MultiParams, append_querystring, compound_keys_after_sql, format_bytes, @@ -21,7 +19,6 @@ from datasette.utils import ( is_url, path_from_row_pks, path_with_added_args, - path_with_format, path_with_removed_args, path_with_replaced_args, to_css_class, @@ -68,7 +65,9 @@ class Row: return json.dumps(d, default=repr, indent=2) -class RowTableShared(DataView): +class TableView(DataView): + name = "table" + async def sortable_columns_for_table(self, database, table, use_rowid): db = self.ds.databases[database] table_metadata = self.ds.table_metadata(database, table) @@ -89,193 +88,6 @@ class RowTableShared(DataView): expandables.append((fk, label_column)) return expandables - async def display_columns_and_rows( - self, database, table, description, rows, link_column=False, truncate_cells=0 - ): - """Returns columns, rows for specified table - including fancy foreign key treatment""" - db = self.ds.databases[database] - table_metadata = self.ds.table_metadata(database, table) - column_descriptions = table_metadata.get("columns") or {} - column_details = {col.name: col for col in await db.table_column_details(table)} - sortable_columns = await self.sortable_columns_for_table(database, table, True) - pks = await db.primary_keys(table) - pks_for_display = pks - if not pks_for_display: - pks_for_display = ["rowid"] - - columns = [] - for r in description: - if r[0] == "rowid" and "rowid" not in column_details: - type_ = "integer" - notnull = 0 - else: - type_ = column_details[r[0]].type - notnull = column_details[r[0]].notnull - columns.append( - { - "name": r[0], - "sortable": r[0] in sortable_columns, - "is_pk": r[0] in pks_for_display, - "type": type_, - "notnull": notnull, - "description": column_descriptions.get(r[0]), - } - ) - - column_to_foreign_key_table = { - fk["column"]: fk["other_table"] - for fk in await db.foreign_keys_for_table(table) - } - - cell_rows = [] - base_url = self.ds.setting("base_url") - for row in rows: - cells = [] - # Unless we are a view, the first column is a link - either to the rowid - # or to the simple or compound primary key - if link_column: - is_special_link_column = len(pks) != 1 - pk_path = path_from_row_pks(row, pks, not pks, False) - cells.append( - { - "column": pks[0] if len(pks) == 1 else "Link", - "value_type": "pk", - "is_special_link_column": is_special_link_column, - "raw": pk_path, - "value": markupsafe.Markup( - '<a href="{table_path}/{flat_pks_quoted}">{flat_pks}</a>'.format( - base_url=base_url, - table_path=self.ds.urls.table(database, table), - flat_pks=str(markupsafe.escape(pk_path)), - flat_pks_quoted=path_from_row_pks(row, pks, not pks), - ) - ), - } - ) - - for value, column_dict in zip(row, columns): - column = column_dict["name"] - if link_column and len(pks) == 1 and column == pks[0]: - # If there's a simple primary key, don't repeat the value as it's - # already shown in the link column. - continue - - # First let the plugins have a go - # pylint: disable=no-member - plugin_display_value = None - for candidate in pm.hook.render_cell( - value=value, - column=column, - table=table, - database=database, - datasette=self.ds, - ): - candidate = await await_me_maybe(candidate) - if candidate is not None: - plugin_display_value = candidate - break - if plugin_display_value: - display_value = plugin_display_value - elif isinstance(value, bytes): - formatted = format_bytes(len(value)) - display_value = markupsafe.Markup( - '<a class="blob-download" href="{}"{}><Binary: {:,} byte{}></a>'.format( - self.ds.urls.row_blob( - database, - table, - path_from_row_pks(row, pks, not pks), - column, - ), - ' title="{}"'.format(formatted) - if "bytes" not in formatted - else "", - len(value), - "" if len(value) == 1 else "s", - ) - ) - elif isinstance(value, dict): - # It's an expanded foreign key - display link to other row - label = value["label"] - value = value["value"] - # The table we link to depends on the column - other_table = column_to_foreign_key_table[column] - link_template = ( - LINK_WITH_LABEL if (label != value) else LINK_WITH_VALUE - ) - display_value = markupsafe.Markup( - link_template.format( - database=database, - base_url=base_url, - table=tilde_encode(other_table), - link_id=tilde_encode(str(value)), - id=str(markupsafe.escape(value)), - label=str(markupsafe.escape(label)) or "-", - ) - ) - elif value in ("", None): - display_value = markupsafe.Markup(" ") - elif is_url(str(value).strip()): - display_value = markupsafe.Markup( - '<a href="{url}">{url}</a>'.format( - url=markupsafe.escape(value.strip()) - ) - ) - elif column in table_metadata.get("units", {}) and value != "": - # Interpret units using pint - value = value * ureg(table_metadata["units"][column]) - # Pint uses floating point which sometimes introduces errors in the compact - # representation, which we have to round off to avoid ugliness. In the vast - # majority of cases this rounding will be inconsequential. I hope. - value = round(value.to_compact(), 6) - display_value = markupsafe.Markup( - f"{value:~P}".replace(" ", " ") - ) - else: - display_value = str(value) - if truncate_cells and len(display_value) > truncate_cells: - display_value = display_value[:truncate_cells] + "\u2026" - - cells.append( - { - "column": column, - "value": display_value, - "raw": value, - "value_type": "none" - if value is None - else str(type(value).__name__), - } - ) - cell_rows.append(Row(cells)) - - if link_column: - # Add the link column header. - # If it's a simple primary key, we have to remove and re-add that column name at - # the beginning of the header row. - first_column = None - if len(pks) == 1: - columns = [col for col in columns if col["name"] != pks[0]] - first_column = { - "name": pks[0], - "sortable": len(pks) == 1, - "is_pk": True, - "type": column_details[pks[0]].type, - "notnull": column_details[pks[0]].notnull, - } - else: - first_column = { - "name": "Link", - "sortable": False, - "is_pk": False, - "type": "", - "notnull": 0, - } - columns = [first_column] + columns - return columns, cell_rows - - -class TableView(RowTableShared): - name = "table" - async def post(self, request): database_route = tilde_decode(request.url_vars["database"]) try: @@ -807,13 +619,17 @@ class TableView(RowTableShared): async def extra_template(): nonlocal sort - display_columns, display_rows = await self.display_columns_and_rows( + display_columns, display_rows = await display_columns_and_rows( + self.ds, database, table, results.description, rows, link_column=not is_view, truncate_cells=self.ds.setting("truncate_cells_html"), + sortable_columns=await self.sortable_columns_for_table( + database, table, use_rowid=True + ), ) metadata = ( (self.ds.metadata("databases") or {}) @@ -948,132 +764,187 @@ async def _sql_params_pks(db, table, pk_values): return sql, params, pks -class RowView(RowTableShared): - name = "row" +async def display_columns_and_rows( + datasette, + database, + table, + description, + rows, + link_column=False, + truncate_cells=0, + sortable_columns=None, +): + """Returns columns, rows for specified table - including fancy foreign key treatment""" + sortable_columns = sortable_columns or set() + db = datasette.databases[database] + table_metadata = datasette.table_metadata(database, table) + column_descriptions = table_metadata.get("columns") or {} + column_details = {col.name: col for col in await db.table_column_details(table)} + pks = await db.primary_keys(table) + pks_for_display = pks + if not pks_for_display: + pks_for_display = ["rowid"] - async def data(self, request, default_labels=False): - database_route = tilde_decode(request.url_vars["database"]) - table = tilde_decode(request.url_vars["table"]) - try: - db = self.ds.get_database(route=database_route) - except KeyError: - raise NotFound("Database not found: {}".format(database_route)) - database = db.name - await self.ds.ensure_permissions( - request.actor, - [ - ("view-table", (database, table)), - ("view-database", database), - "view-instance", - ], - ) - pk_values = urlsafe_components(request.url_vars["pks"]) - try: - db = self.ds.get_database(route=database_route) - except KeyError: - raise NotFound("Database not found: {}".format(database_route)) - database = db.name - sql, params, pks = await _sql_params_pks(db, table, pk_values) - results = await db.execute(sql, params, truncate=True) - columns = [r[0] for r in results.description] - rows = list(results.rows) - if not rows: - raise NotFound(f"Record not found: {pk_values}") - - async def template_data(): - display_columns, display_rows = await self.display_columns_and_rows( - database, - table, - results.description, - rows, - link_column=False, - truncate_cells=0, - ) - for column in display_columns: - column["sortable"] = False - return { - "foreign_key_tables": await self.foreign_key_tables( - database, table, pk_values - ), - "display_columns": display_columns, - "display_rows": display_rows, - "custom_table_templates": [ - f"_table-{to_css_class(database)}-{to_css_class(table)}.html", - f"_table-row-{to_css_class(database)}-{to_css_class(table)}.html", - "_table.html", - ], - "metadata": (self.ds.metadata("databases") or {}) - .get(database, {}) - .get("tables", {}) - .get(table, {}), + columns = [] + for r in description: + if r[0] == "rowid" and "rowid" not in column_details: + type_ = "integer" + notnull = 0 + else: + type_ = column_details[r[0]].type + notnull = column_details[r[0]].notnull + columns.append( + { + "name": r[0], + "sortable": r[0] in sortable_columns, + "is_pk": r[0] in pks_for_display, + "type": type_, + "notnull": notnull, + "description": column_descriptions.get(r[0]), } - - data = { - "database": database, - "table": table, - "rows": rows, - "columns": columns, - "primary_keys": pks, - "primary_key_values": pk_values, - "units": self.ds.table_metadata(database, table).get("units", {}), - } - - if "foreign_key_tables" in (request.args.get("_extras") or "").split(","): - data["foreign_key_tables"] = await self.foreign_key_tables( - database, table, pk_values - ) - - return ( - data, - template_data, - ( - f"row-{to_css_class(database)}-{to_css_class(table)}.html", - "row.html", - ), ) - async def foreign_key_tables(self, database, table, pk_values): - if len(pk_values) != 1: - return [] - db = self.ds.databases[database] - all_foreign_keys = await db.get_all_foreign_keys() - foreign_keys = all_foreign_keys[table]["incoming"] - if len(foreign_keys) == 0: - return [] + column_to_foreign_key_table = { + fk["column"]: fk["other_table"] for fk in await db.foreign_keys_for_table(table) + } - sql = "select " + ", ".join( - [ - "(select count(*) from {table} where {column}=:id)".format( - table=escape_sqlite(fk["other_table"]), - column=escape_sqlite(fk["other_column"]), + cell_rows = [] + base_url = datasette.setting("base_url") + for row in rows: + cells = [] + # Unless we are a view, the first column is a link - either to the rowid + # or to the simple or compound primary key + if link_column: + is_special_link_column = len(pks) != 1 + pk_path = path_from_row_pks(row, pks, not pks, False) + cells.append( + { + "column": pks[0] if len(pks) == 1 else "Link", + "value_type": "pk", + "is_special_link_column": is_special_link_column, + "raw": pk_path, + "value": markupsafe.Markup( + '<a href="{table_path}/{flat_pks_quoted}">{flat_pks}</a>'.format( + base_url=base_url, + table_path=datasette.urls.table(database, table), + flat_pks=str(markupsafe.escape(pk_path)), + flat_pks_quoted=path_from_row_pks(row, pks, not pks), + ) + ), + } + ) + + for value, column_dict in zip(row, columns): + column = column_dict["name"] + if link_column and len(pks) == 1 and column == pks[0]: + # If there's a simple primary key, don't repeat the value as it's + # already shown in the link column. + continue + + # First let the plugins have a go + # pylint: disable=no-member + plugin_display_value = None + for candidate in pm.hook.render_cell( + value=value, + column=column, + table=table, + database=database, + datasette=datasette, + ): + candidate = await await_me_maybe(candidate) + if candidate is not None: + plugin_display_value = candidate + break + if plugin_display_value: + display_value = plugin_display_value + elif isinstance(value, bytes): + formatted = format_bytes(len(value)) + display_value = markupsafe.Markup( + '<a class="blob-download" href="{}"{}><Binary: {:,} byte{}></a>'.format( + datasette.urls.row_blob( + database, + table, + path_from_row_pks(row, pks, not pks), + column, + ), + ' title="{}"'.format(formatted) + if "bytes" not in formatted + else "", + len(value), + "" if len(value) == 1 else "s", + ) ) - for fk in foreign_keys - ] - ) - try: - rows = list(await db.execute(sql, {"id": pk_values[0]})) - except QueryInterrupted: - # Almost certainly hit the timeout - return [] + elif isinstance(value, dict): + # It's an expanded foreign key - display link to other row + label = value["label"] + value = value["value"] + # The table we link to depends on the column + other_table = column_to_foreign_key_table[column] + link_template = LINK_WITH_LABEL if (label != value) else LINK_WITH_VALUE + display_value = markupsafe.Markup( + link_template.format( + database=database, + base_url=base_url, + table=tilde_encode(other_table), + link_id=tilde_encode(str(value)), + id=str(markupsafe.escape(value)), + label=str(markupsafe.escape(label)) or "-", + ) + ) + elif value in ("", None): + display_value = markupsafe.Markup(" ") + elif is_url(str(value).strip()): + display_value = markupsafe.Markup( + '<a href="{url}">{url}</a>'.format( + url=markupsafe.escape(value.strip()) + ) + ) + elif column in table_metadata.get("units", {}) and value != "": + # Interpret units using pint + value = value * ureg(table_metadata["units"][column]) + # Pint uses floating point which sometimes introduces errors in the compact + # representation, which we have to round off to avoid ugliness. In the vast + # majority of cases this rounding will be inconsequential. I hope. + value = round(value.to_compact(), 6) + display_value = markupsafe.Markup(f"{value:~P}".replace(" ", " ")) + else: + display_value = str(value) + if truncate_cells and len(display_value) > truncate_cells: + display_value = display_value[:truncate_cells] + "\u2026" - foreign_table_counts = dict( - zip( - [(fk["other_table"], fk["other_column"]) for fk in foreign_keys], - list(rows[0]), + cells.append( + { + "column": column, + "value": display_value, + "raw": value, + "value_type": "none" + if value is None + else str(type(value).__name__), + } ) - ) - foreign_key_tables = [] - for fk in foreign_keys: - count = ( - foreign_table_counts.get((fk["other_table"], fk["other_column"])) or 0 - ) - key = fk["other_column"] - if key.startswith("_"): - key += "__exact" - link = "{}?{}={}".format( - self.ds.urls.table(database, fk["other_table"]), - key, - ",".join(pk_values), - ) - foreign_key_tables.append({**fk, **{"count": count, "link": link}}) - return foreign_key_tables + cell_rows.append(Row(cells)) + + if link_column: + # Add the link column header. + # If it's a simple primary key, we have to remove and re-add that column name at + # the beginning of the header row. + first_column = None + if len(pks) == 1: + columns = [col for col in columns if col["name"] != pks[0]] + first_column = { + "name": pks[0], + "sortable": len(pks) == 1, + "is_pk": True, + "type": column_details[pks[0]].type, + "notnull": column_details[pks[0]].notnull, + } + else: + first_column = { + "name": "Link", + "sortable": False, + "is_pk": False, + "type": "", + "notnull": 0, + } + columns = [first_column] + columns + return columns, cell_rows From c101f0efeec4f6e49298a542c5e2b59236cfa0ff Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 26 Apr 2022 15:34:29 -0700 Subject: [PATCH 0210/1103] datasette-total-page-time example of asgi_wrapper --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 4560ec9a..3c9ae2e2 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -801,7 +801,7 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att return wrap_with_databases_header -Examples: `datasette-cors <https://datasette.io/plugins/datasette-cors>`__, `datasette-pyinstrument <https://datasette.io/plugins/datasette-pyinstrument>`__ +Examples: `datasette-cors <https://datasette.io/plugins/datasette-cors>`__, `datasette-pyinstrument <https://datasette.io/plugins/datasette-pyinstrument>`__, `datasette-total-page-time <https://datasette.io/plugins/datasette-total-page-time>`__ .. _plugin_hook_startup: From 8a0c38f0b89543e652a968a90d480859cb102510 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 26 Apr 2022 13:56:27 -0700 Subject: [PATCH 0211/1103] Rename database->database_name and table-> table_name, refs #1715 --- datasette/views/table.py | 143 +++++++++++++++++++++------------------ 1 file changed, 76 insertions(+), 67 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 37fb2ebb..d66adb82 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -68,22 +68,22 @@ class Row: class TableView(DataView): name = "table" - async def sortable_columns_for_table(self, database, table, use_rowid): - db = self.ds.databases[database] - table_metadata = self.ds.table_metadata(database, table) + async def sortable_columns_for_table(self, database_name, table_name, use_rowid): + db = self.ds.databases[database_name] + table_metadata = self.ds.table_metadata(database_name, table_name) if "sortable_columns" in table_metadata: sortable_columns = set(table_metadata["sortable_columns"]) else: - sortable_columns = set(await db.table_columns(table)) + sortable_columns = set(await db.table_columns(table_name)) if use_rowid: sortable_columns.add("rowid") return sortable_columns - async def expandable_columns(self, database, table): + async def expandable_columns(self, database_name, table_name): # Returns list of (fk_dict, label_column-or-None) pairs for that table expandables = [] - db = self.ds.databases[database] - for fk in await db.foreign_keys_for_table(table): + db = self.ds.databases[database_name] + for fk in await db.foreign_keys_for_table(table_name): label_column = await db.label_column_for_table(fk["other_table"]) expandables.append((fk, label_column)) return expandables @@ -94,17 +94,19 @@ class TableView(DataView): db = self.ds.get_database(route=database_route) except KeyError: raise NotFound("Database not found: {}".format(database_route)) - database = db.name - table = tilde_decode(request.url_vars["table"]) + database_name = db.name + table_name = tilde_decode(request.url_vars["table"]) # Handle POST to a canned query - canned_query = await self.ds.get_canned_query(database, table, request.actor) + canned_query = await self.ds.get_canned_query( + database_name, table_name, request.actor + ) assert canned_query, "You may only POST to a canned query" return await QueryView(self.ds).data( request, canned_query["sql"], metadata=canned_query, editable=False, - canned_query=table, + canned_query=table_name, named_parameters=canned_query.get("params"), write=bool(canned_query.get("write")), ) @@ -150,45 +152,47 @@ class TableView(DataView): _size=None, ): database_route = tilde_decode(request.url_vars["database"]) - table = tilde_decode(request.url_vars["table"]) + table_name = tilde_decode(request.url_vars["table"]) try: db = self.ds.get_database(route=database_route) except KeyError: raise NotFound("Database not found: {}".format(database_route)) - database = db.name + database_name = db.name # If this is a canned query, not a table, then dispatch to QueryView instead - canned_query = await self.ds.get_canned_query(database, table, request.actor) + canned_query = await self.ds.get_canned_query( + database_name, table_name, request.actor + ) if canned_query: return await QueryView(self.ds).data( request, canned_query["sql"], metadata=canned_query, editable=False, - canned_query=table, + canned_query=table_name, named_parameters=canned_query.get("params"), write=bool(canned_query.get("write")), ) - is_view = bool(await db.get_view_definition(table)) - table_exists = bool(await db.table_exists(table)) + is_view = bool(await db.get_view_definition(table_name)) + table_exists = bool(await db.table_exists(table_name)) # If table or view not found, return 404 if not is_view and not table_exists: - raise NotFound(f"Table not found: {table}") + raise NotFound(f"Table not found: {table_name}") # Ensure user has permission to view this table await self.ds.ensure_permissions( request.actor, [ - ("view-table", (database, table)), - ("view-database", database), + ("view-table", (database_name, table_name)), + ("view-database", database_name), "view-instance", ], ) private = not await self.ds.permission_allowed( - None, "view-table", (database, table), default=True + None, "view-table", (database_name, table_name), default=True ) # Handle ?_filter_column and redirect, if present @@ -216,8 +220,8 @@ class TableView(DataView): ) # Introspect columns and primary keys for table - pks = await db.primary_keys(table) - table_columns = await db.table_columns(table) + pks = await db.primary_keys(table_name) + table_columns = await db.table_columns(table_name) # Take ?_col= and ?_nocol= into account specified_columns = await self.columns_to_select(table_columns, pks, request) @@ -248,7 +252,7 @@ class TableView(DataView): nocount = True nofacet = True - table_metadata = self.ds.table_metadata(database, table) + table_metadata = self.ds.table_metadata(database_name, table_name) units = table_metadata.get("units", {}) # Arguments that start with _ and don't contain a __ are @@ -262,7 +266,7 @@ class TableView(DataView): # Build where clauses from query string arguments filters = Filters(sorted(filter_args), units, ureg) - where_clauses, params = filters.build_where_clauses(table) + where_clauses, params = filters.build_where_clauses(table_name) # Execute filters_from_request plugin hooks - including the default # ones that live in datasette/filters.py @@ -271,8 +275,8 @@ class TableView(DataView): for hook in pm.hook.filters_from_request( request=request, - table=table, - database=database, + table=table_name, + database=database_name, datasette=self.ds, ): filter_arguments = await await_me_maybe(hook) @@ -284,7 +288,7 @@ class TableView(DataView): # Deal with custom sort orders sortable_columns = await self.sortable_columns_for_table( - database, table, use_rowid + database_name, table_name, use_rowid ) sort = request.args.get("_sort") sort_desc = request.args.get("_sort_desc") @@ -309,7 +313,7 @@ class TableView(DataView): order_by = f"{escape_sqlite(sort_desc)} desc" from_sql = "from {table_name} {where}".format( - table_name=escape_sqlite(table), + table_name=escape_sqlite(table_name), where=("where {} ".format(" and ".join(where_clauses))) if where_clauses else "", @@ -422,7 +426,7 @@ class TableView(DataView): sql_no_order_no_limit = ( "select {select_all_columns} from {table_name} {where}".format( select_all_columns=select_all_columns, - table_name=escape_sqlite(table), + table_name=escape_sqlite(table_name), where=where_clause, ) ) @@ -430,7 +434,7 @@ class TableView(DataView): # This is the SQL that populates the main table on the page sql = "select {select_specified_columns} from {table_name} {where}{order_by} limit {page_size}{offset}".format( select_specified_columns=select_specified_columns, - table_name=escape_sqlite(table), + table_name=escape_sqlite(table_name), where=where_clause, order_by=order_by, page_size=page_size + 1, @@ -448,13 +452,13 @@ class TableView(DataView): if ( not db.is_mutable and self.ds.inspect_data - and count_sql == f"select count(*) from {table} " + and count_sql == f"select count(*) from {table_name} " ): # We can use a previously cached table row count try: - filtered_table_rows_count = self.ds.inspect_data[database]["tables"][ - table - ]["count"] + filtered_table_rows_count = self.ds.inspect_data[database_name][ + "tables" + ][table_name]["count"] except KeyError: pass @@ -484,10 +488,10 @@ class TableView(DataView): klass( self.ds, request, - database, + database_name, sql=sql_no_order_no_limit, params=params, - table=table, + table=table_name, metadata=table_metadata, row_count=filtered_table_rows_count, ) @@ -527,7 +531,7 @@ class TableView(DataView): # Expand labeled columns if requested expanded_columns = [] - expandable_columns = await self.expandable_columns(database, table) + expandable_columns = await self.expandable_columns(database_name, table_name) columns_to_expand = None try: all_labels = value_as_boolean(request.args.get("_labels", "")) @@ -554,7 +558,9 @@ class TableView(DataView): values = [row[column_index] for row in rows] # Expand them expanded_labels.update( - await self.ds.expand_foreign_keys(database, table, column, values) + await self.ds.expand_foreign_keys( + database_name, table_name, column, values + ) ) if expanded_labels: # Rewrite the rows @@ -621,21 +627,21 @@ class TableView(DataView): display_columns, display_rows = await display_columns_and_rows( self.ds, - database, - table, + database_name, + table_name, results.description, rows, link_column=not is_view, truncate_cells=self.ds.setting("truncate_cells_html"), sortable_columns=await self.sortable_columns_for_table( - database, table, use_rowid=True + database_name, table_name, use_rowid=True ), ) metadata = ( (self.ds.metadata("databases") or {}) - .get(database, {}) + .get(database_name, {}) .get("tables", {}) - .get(table, {}) + .get(table_name, {}) ) self.ds.update_with_inherited_metadata(metadata) @@ -661,8 +667,8 @@ class TableView(DataView): links = [] for hook in pm.hook.table_actions( datasette=self.ds, - table=table, - database=database, + table=table_name, + database=database_name, actor=request.actor, request=request, ): @@ -703,13 +709,13 @@ class TableView(DataView): "sort_desc": sort_desc, "disable_sort": is_view, "custom_table_templates": [ - f"_table-{to_css_class(database)}-{to_css_class(table)}.html", - f"_table-table-{to_css_class(database)}-{to_css_class(table)}.html", + f"_table-{to_css_class(database_name)}-{to_css_class(table_name)}.html", + f"_table-table-{to_css_class(database_name)}-{to_css_class(table_name)}.html", "_table.html", ], "metadata": metadata, - "view_definition": await db.get_view_definition(table), - "table_definition": await db.get_table_definition(table), + "view_definition": await db.get_view_definition(table_name), + "table_definition": await db.get_table_definition(table_name), "datasette_allow_facet": "true" if self.ds.setting("allow_facet") else "false", @@ -719,8 +725,8 @@ class TableView(DataView): return ( { - "database": database, - "table": table, + "database": database_name, + "table": table_name, "is_view": is_view, "human_description_en": human_description_en, "rows": rows[:page_size], @@ -738,12 +744,12 @@ class TableView(DataView): "next_url": next_url, "private": private, "allow_execute_sql": await self.ds.permission_allowed( - request.actor, "execute-sql", database, default=True + request.actor, "execute-sql", database_name, default=True ), }, extra_template, ( - f"table-{to_css_class(database)}-{to_css_class(table)}.html", + f"table-{to_css_class(database_name)}-{to_css_class(table_name)}.html", "table.html", ), ) @@ -766,8 +772,8 @@ async def _sql_params_pks(db, table, pk_values): async def display_columns_and_rows( datasette, - database, - table, + database_name, + table_name, description, rows, link_column=False, @@ -776,11 +782,13 @@ async def display_columns_and_rows( ): """Returns columns, rows for specified table - including fancy foreign key treatment""" sortable_columns = sortable_columns or set() - db = datasette.databases[database] - table_metadata = datasette.table_metadata(database, table) + db = datasette.databases[database_name] + table_metadata = datasette.table_metadata(database_name, table_name) column_descriptions = table_metadata.get("columns") or {} - column_details = {col.name: col for col in await db.table_column_details(table)} - pks = await db.primary_keys(table) + column_details = { + col.name: col for col in await db.table_column_details(table_name) + } + pks = await db.primary_keys(table_name) pks_for_display = pks if not pks_for_display: pks_for_display = ["rowid"] @@ -805,7 +813,8 @@ async def display_columns_and_rows( ) column_to_foreign_key_table = { - fk["column"]: fk["other_table"] for fk in await db.foreign_keys_for_table(table) + fk["column"]: fk["other_table"] + for fk in await db.foreign_keys_for_table(table_name) } cell_rows = [] @@ -826,7 +835,7 @@ async def display_columns_and_rows( "value": markupsafe.Markup( '<a href="{table_path}/{flat_pks_quoted}">{flat_pks}</a>'.format( base_url=base_url, - table_path=datasette.urls.table(database, table), + table_path=datasette.urls.table(database_name, table_name), flat_pks=str(markupsafe.escape(pk_path)), flat_pks_quoted=path_from_row_pks(row, pks, not pks), ) @@ -847,8 +856,8 @@ async def display_columns_and_rows( for candidate in pm.hook.render_cell( value=value, column=column, - table=table, - database=database, + table=table_name, + database=database_name, datasette=datasette, ): candidate = await await_me_maybe(candidate) @@ -862,8 +871,8 @@ async def display_columns_and_rows( display_value = markupsafe.Markup( '<a class="blob-download" href="{}"{}><Binary: {:,} byte{}></a>'.format( datasette.urls.row_blob( - database, - table, + database_name, + table_name, path_from_row_pks(row, pks, not pks), column, ), @@ -883,7 +892,7 @@ async def display_columns_and_rows( link_template = LINK_WITH_LABEL if (label != value) else LINK_WITH_VALUE display_value = markupsafe.Markup( link_template.format( - database=database, + database=database_name, base_url=base_url, table=tilde_encode(other_table), link_id=tilde_encode(str(value)), From 942411ef946e9a34a2094944d3423cddad27efd3 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 26 Apr 2022 15:48:56 -0700 Subject: [PATCH 0212/1103] Execute some TableView queries in parallel Use ?_noparallel=1 to opt out (undocumented, useful for benchmark comparisons) Refs #1723, #1715 --- datasette/views/table.py | 91 +++++++++++++++++++++++++++++----------- 1 file changed, 66 insertions(+), 25 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index d66adb82..23289b29 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -1,3 +1,4 @@ +import asyncio import itertools import json @@ -5,6 +6,7 @@ import markupsafe from datasette.plugins import pm from datasette.database import QueryInterrupted +from datasette import tracer from datasette.utils import ( await_me_maybe, CustomRow, @@ -150,6 +152,16 @@ class TableView(DataView): default_labels=False, _next=None, _size=None, + ): + with tracer.trace_child_tasks(): + return await self._data_traced(request, default_labels, _next, _size) + + async def _data_traced( + self, + request, + default_labels=False, + _next=None, + _size=None, ): database_route = tilde_decode(request.url_vars["database"]) table_name = tilde_decode(request.url_vars["table"]) @@ -159,6 +171,20 @@ class TableView(DataView): raise NotFound("Database not found: {}".format(database_route)) database_name = db.name + # For performance profiling purposes, ?_noparallel=1 turns off asyncio.gather + async def _gather_parallel(*args): + return await asyncio.gather(*args) + + async def _gather_sequential(*args): + results = [] + for fn in args: + results.append(await fn) + return results + + gather = ( + _gather_sequential if request.args.get("_noparallel") else _gather_parallel + ) + # If this is a canned query, not a table, then dispatch to QueryView instead canned_query = await self.ds.get_canned_query( database_name, table_name, request.actor @@ -174,8 +200,12 @@ class TableView(DataView): write=bool(canned_query.get("write")), ) - is_view = bool(await db.get_view_definition(table_name)) - table_exists = bool(await db.table_exists(table_name)) + is_view, table_exists = map( + bool, + await gather( + db.get_view_definition(table_name), db.table_exists(table_name) + ), + ) # If table or view not found, return 404 if not is_view and not table_exists: @@ -497,33 +527,44 @@ class TableView(DataView): ) ) - if not nofacet: - for facet in facet_instances: - ( + async def execute_facets(): + if not nofacet: + # Run them in parallel + facet_awaitables = [facet.facet_results() for facet in facet_instances] + facet_awaitable_results = await gather(*facet_awaitables) + for ( instance_facet_results, instance_facets_timed_out, - ) = await facet.facet_results() - for facet_info in instance_facet_results: - base_key = facet_info["name"] - key = base_key - i = 1 - while key in facet_results: - i += 1 - key = f"{base_key}_{i}" - facet_results[key] = facet_info - facets_timed_out.extend(instance_facets_timed_out) + ) in facet_awaitable_results: + for facet_info in instance_facet_results: + base_key = facet_info["name"] + key = base_key + i = 1 + while key in facet_results: + i += 1 + key = f"{base_key}_{i}" + facet_results[key] = facet_info + facets_timed_out.extend(instance_facets_timed_out) - # Calculate suggested facets suggested_facets = [] - if ( - self.ds.setting("suggest_facets") - and self.ds.setting("allow_facet") - and not _next - and not nofacet - and not nosuggest - ): - for facet in facet_instances: - suggested_facets.extend(await facet.suggest()) + + async def execute_suggested_facets(): + # Calculate suggested facets + if ( + self.ds.setting("suggest_facets") + and self.ds.setting("allow_facet") + and not _next + and not nofacet + and not nosuggest + ): + # Run them in parallel + facet_suggest_awaitables = [ + facet.suggest() for facet in facet_instances + ] + for suggest_result in await gather(*facet_suggest_awaitables): + suggested_facets.extend(suggest_result) + + await gather(execute_facets(), execute_suggested_facets()) # Figure out columns and rows for the query columns = [r[0] for r in results.description] From 94a3171b01fde5c52697aeeff052e3ad4bab5391 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 28 Apr 2022 13:29:11 -0700 Subject: [PATCH 0213/1103] .plugin_config() can return None --- docs/internals.rst | 4 ++++ docs/writing_plugins.rst | 2 ++ 2 files changed, 6 insertions(+) diff --git a/docs/internals.rst b/docs/internals.rst index aad608dc..18822d47 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -288,6 +288,10 @@ All databases are listed, irrespective of user permissions. This means that the This method lets you read plugin configuration values that were set in ``metadata.json``. See :ref:`writing_plugins_configuration` for full details of how this method should be used. +The return value will be the value from the configuration file - usually a dictionary. + +If the plugin is not configured the return value will be ``None``. + .. _datasette_render_template: await .render_template(template, context=None, request=None) diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index 89f7f5eb..9aee70f6 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -182,6 +182,8 @@ When you are writing plugins, you can access plugin configuration like this usin This will return the ``{"latitude_column": "lat", "longitude_column": "lng"}`` in the above example. +If there is no configuration for that plugin, the method will return ``None``. + If it cannot find the requested configuration at the table layer, it will fall back to the database layer and then the root layer. For example, a user may have set the plugin configuration option like so:: { From 4afc1afc721ac0d14f58b0f8339c1bf431d5313c Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 2 May 2022 12:13:11 -0700 Subject: [PATCH 0214/1103] Depend on click-default-group-wheel>=1.2.2 Refs #1733 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 7f0562fd..fcb43aa1 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,7 @@ setup( install_requires=[ "asgiref>=3.2.10,<3.6.0", "click>=7.1.1,<8.2.0", - "click-default-group~=1.2.2", + "click-default-group-wheel>=1.2.2", "Jinja2>=2.10.3,<3.1.0", "hupper~=1.9", "httpx>=0.20", From 7e03394734307a5761e4c98d902b6a8cab188562 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 2 May 2022 12:20:14 -0700 Subject: [PATCH 0215/1103] Optional uvicorn import for Pyodide, refs #1733 --- datasette/app.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index d269372c..a5330458 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -26,7 +26,6 @@ from itsdangerous import URLSafeSerializer from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader from jinja2.environment import Template from jinja2.exceptions import TemplateNotFound -import uvicorn from .views.base import DatasetteError, ureg from .views.database import DatabaseDownload, DatabaseView @@ -806,6 +805,15 @@ class Datasette: datasette_version = {"version": __version__} if self.version_note: datasette_version["note"] = self.version_note + + try: + # Optional import to avoid breaking Pyodide + # https://github.com/simonw/datasette/issues/1733#issuecomment-1115268245 + import uvicorn + + uvicorn_version = uvicorn.__version__ + except ImportError: + uvicorn_version = None info = { "python": { "version": ".".join(map(str, sys.version_info[:3])), @@ -813,7 +821,7 @@ class Datasette: }, "datasette": datasette_version, "asgi": "3.0", - "uvicorn": uvicorn.__version__, + "uvicorn": uvicorn_version, "sqlite": { "version": sqlite_version, "fts_versions": fts_versions, From 687907aa2b1bde4de6ae7155b0e2a949ca015ca9 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 2 May 2022 12:39:06 -0700 Subject: [PATCH 0216/1103] Remove python-baseconv dependency, refs #1733, closes #1734 --- datasette/actor_auth_cookie.py | 2 +- datasette/utils/baseconv.py | 59 ++++++++++++++++++++++++++++++++++ docs/authentication.rst | 4 +-- setup.py | 1 - tests/test_auth.py | 2 +- 5 files changed, 63 insertions(+), 5 deletions(-) create mode 100644 datasette/utils/baseconv.py diff --git a/datasette/actor_auth_cookie.py b/datasette/actor_auth_cookie.py index 15ecd331..368213af 100644 --- a/datasette/actor_auth_cookie.py +++ b/datasette/actor_auth_cookie.py @@ -1,6 +1,6 @@ from datasette import hookimpl from itsdangerous import BadSignature -import baseconv +from datasette.utils import baseconv import time diff --git a/datasette/utils/baseconv.py b/datasette/utils/baseconv.py new file mode 100644 index 00000000..27e4fb00 --- /dev/null +++ b/datasette/utils/baseconv.py @@ -0,0 +1,59 @@ +""" +Convert numbers from base 10 integers to base X strings and back again. + +Sample usage: + +>>> base20 = BaseConverter('0123456789abcdefghij') +>>> base20.from_decimal(1234) +'31e' +>>> base20.to_decimal('31e') +1234 + +Originally shared here: https://www.djangosnippets.org/snippets/1431/ +""" + + +class BaseConverter(object): + decimal_digits = "0123456789" + + def __init__(self, digits): + self.digits = digits + + def from_decimal(self, i): + return self.convert(i, self.decimal_digits, self.digits) + + def to_decimal(self, s): + return int(self.convert(s, self.digits, self.decimal_digits)) + + def convert(number, fromdigits, todigits): + # Based on http://code.activestate.com/recipes/111286/ + if str(number)[0] == "-": + number = str(number)[1:] + neg = 1 + else: + neg = 0 + + # make an integer out of the number + x = 0 + for digit in str(number): + x = x * len(fromdigits) + fromdigits.index(digit) + + # create the result in base 'len(todigits)' + if x == 0: + res = todigits[0] + else: + res = "" + while x > 0: + digit = x % len(todigits) + res = todigits[digit] + res + x = int(x / len(todigits)) + if neg: + res = "-" + res + return res + + convert = staticmethod(convert) + + +bin = BaseConverter("01") +hexconv = BaseConverter("0123456789ABCDEF") +base62 = BaseConverter("ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyz") diff --git a/docs/authentication.rst b/docs/authentication.rst index 24960733..685dab15 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -401,12 +401,12 @@ Including an expiry time ``ds_actor`` cookies can optionally include a signed expiry timestamp, after which the cookies will no longer be valid. Authentication plugins may chose to use this mechanism to limit the lifetime of the cookie. For example, if a plugin implements single-sign-on against another source it may decide to set short-lived cookies so that if the user is removed from the SSO system their existing Datasette cookies will stop working shortly afterwards. -To include an expiry, add a ``"e"`` key to the cookie value containing a `base62-encoded integer <https://pypi.org/project/python-baseconv/>`__ representing the timestamp when the cookie should expire. For example, here's how to set a cookie that expires after 24 hours: +To include an expiry, add a ``"e"`` key to the cookie value containing a base62-encoded integer representing the timestamp when the cookie should expire. For example, here's how to set a cookie that expires after 24 hours: .. code-block:: python import time - import baseconv + from datasette.utils import baseconv expires_at = int(time.time()) + (24 * 60 * 60) diff --git a/setup.py b/setup.py index fcb43aa1..ca449f02 100644 --- a/setup.py +++ b/setup.py @@ -57,7 +57,6 @@ setup( "PyYAML>=5.3,<7.0", "mergedeep>=1.1.1,<1.4.0", "itsdangerous>=1.1,<3.0", - "python-baseconv==1.2.2", ], entry_points=""" [console_scripts] diff --git a/tests/test_auth.py b/tests/test_auth.py index 974f89ea..4ef35a76 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,5 +1,5 @@ from .fixtures import app_client -import baseconv +from datasette.utils import baseconv import pytest import time From a29c1277896b6a7905ef5441c42a37bc15f67599 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 2 May 2022 12:44:09 -0700 Subject: [PATCH 0217/1103] Rename to_decimal/from_decimal to decode/encode, refs #1734 --- datasette/utils/baseconv.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/datasette/utils/baseconv.py b/datasette/utils/baseconv.py index 27e4fb00..c4b64908 100644 --- a/datasette/utils/baseconv.py +++ b/datasette/utils/baseconv.py @@ -19,10 +19,10 @@ class BaseConverter(object): def __init__(self, digits): self.digits = digits - def from_decimal(self, i): + def encode(self, i): return self.convert(i, self.decimal_digits, self.digits) - def to_decimal(self, s): + def decode(self, s): return int(self.convert(s, self.digits, self.decimal_digits)) def convert(number, fromdigits, todigits): From 3f00a29141bdea5be747f6d1c93871ccdb792167 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 2 May 2022 13:15:27 -0700 Subject: [PATCH 0218/1103] Clean up compatibility with Pyodide (#1736) * Optional uvicorn import for Pyodide, refs #1733 * --setting num_sql_threads 0 to disable threading, refs #1735 --- datasette/app.py | 11 ++++++++--- datasette/database.py | 19 +++++++++++++++++++ docs/settings.rst | 2 ++ tests/test_internals_datasette.py | 14 +++++++++++++- 4 files changed, 42 insertions(+), 4 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index a5330458..b7b84371 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -288,9 +288,12 @@ class Datasette: self._settings = dict(DEFAULT_SETTINGS, **(settings or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note - self.executor = futures.ThreadPoolExecutor( - max_workers=self.setting("num_sql_threads") - ) + if self.setting("num_sql_threads") == 0: + self.executor = None + else: + self.executor = futures.ThreadPoolExecutor( + max_workers=self.setting("num_sql_threads") + ) self.max_returned_rows = self.setting("max_returned_rows") self.sql_time_limit_ms = self.setting("sql_time_limit_ms") self.page_size = self.setting("default_page_size") @@ -862,6 +865,8 @@ class Datasette: ] def _threads(self): + if self.setting("num_sql_threads") == 0: + return {"num_threads": 0, "threads": []} threads = list(threading.enumerate()) d = { "num_threads": len(threads), diff --git a/datasette/database.py b/datasette/database.py index ba594a8c..44d32667 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -45,6 +45,9 @@ class Database: self._cached_table_counts = None self._write_thread = None self._write_queue = None + # These are used when in non-threaded mode: + self._read_connection = None + self._write_connection = None if not self.is_mutable and not self.is_memory: p = Path(path) self.hash = inspect_hash(p) @@ -134,6 +137,14 @@ class Database: return results async def execute_write_fn(self, fn, block=True): + if self.ds.executor is None: + # non-threaded mode + if self._write_connection is None: + self._write_connection = self.connect(write=True) + self.ds._prepare_connection(self._write_connection, self.name) + return fn(self._write_connection) + + # threaded mode task_id = uuid.uuid5(uuid.NAMESPACE_DNS, "datasette.io") if self._write_queue is None: self._write_queue = queue.Queue() @@ -177,6 +188,14 @@ class Database: task.reply_queue.sync_q.put(result) async def execute_fn(self, fn): + if self.ds.executor is None: + # non-threaded mode + if self._read_connection is None: + self._read_connection = self.connect() + self.ds._prepare_connection(self._read_connection, self.name) + return fn(self._read_connection) + + # threaded mode def in_thread(): conn = getattr(connections, self.name, None) if not conn: diff --git a/docs/settings.rst b/docs/settings.rst index 60c4b36d..8437fb04 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -107,6 +107,8 @@ Maximum number of threads in the thread pool Datasette uses to execute SQLite qu datasette mydatabase.db --setting num_sql_threads 10 +Setting this to 0 turns off threaded SQL queries entirely - useful for environments that do not support threading such as `Pyodide <https://pyodide.org/>`__. + .. _setting_allow_facet: allow_facet diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index cc200a2d..1dc14cab 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -1,7 +1,7 @@ """ Tests for the datasette.app.Datasette class """ -from datasette.app import Datasette +from datasette.app import Datasette, Database from itsdangerous import BadSignature from .fixtures import app_client import pytest @@ -63,3 +63,15 @@ async def test_datasette_constructor(): "hash": None, } ] + + +@pytest.mark.asyncio +async def test_num_sql_threads_zero(): + ds = Datasette([], memory=True, settings={"num_sql_threads": 0}) + db = ds.add_database(Database(ds, memory_name="test_num_sql_threads_zero")) + await db.execute_write("create table t(id integer primary key)") + await db.execute_write("insert into t (id) values (1)") + response = await ds.client.get("/-/threads.json") + assert response.json() == {"num_threads": 0, "threads": []} + response2 = await ds.client.get("/test_num_sql_threads_zero/t.json?_shape=array") + assert response2.json() == [{"id": 1}] From 943aa2e1f7341cb51e60332cde46bde650c64217 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 2 May 2022 14:38:34 -0700 Subject: [PATCH 0219/1103] Release 0.62a0 Refs #1683, #1701, #1712, #1717, #1718, #1733 --- datasette/version.py | 2 +- docs/changelog.rst | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 02451a1e..cf18c441 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.61.1" +__version__ = "0.62a0" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 03cf62b6..74814fcb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,20 @@ Changelog ========= +.. _v0_62a0: + +0.62a0 (2022-05-02) +------------------- + +- Datasette now runs some SQL queries in parallel. This has limited impact on performance, see `this research issue <https://github.com/simonw/datasette/issues/1727>`__ for details. +- Datasette should now be compatible with Pyodide. (:issue:`1733`) +- ``datasette publish cloudrun`` has a new ``--timeout`` option which can be used to increase the time limit applied by the Google Cloud build environment. Thanks, Tim Sherratt. (`#1717 <https://github.com/simonw/datasette/pull/1717>`__) +- Spaces in database names are now encoded as ``+`` rather than ``~20``. (:issue:`1701`) +- ``<Binary: 2427344 bytes>`` is now displayed as ``<Binary: 2,427,344 bytes>`` and is accompanied by tooltip showing "2.3MB". (:issue:`1712`) +- Don't show the facet option in the cog menu if faceting is not allowed. (:issue:`1683`) +- Code examples in the documentation are now all formatted using Black. (:issue:`1718`) +- ``Request.fake()`` method is now documented, see :ref:`internals_request`. + .. _v0_61_1: 0.61.1 (2022-03-23) From 847d6b1aac38c3e776e8c600eed07ba4c9ac9942 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 2 May 2022 16:32:24 -0700 Subject: [PATCH 0220/1103] Test wheel against Pyodide, refs #1737, #1733 --- .github/workflows/test-pyodide.yml | 28 ++++++++++++++++++ test-in-pyodide-with-shot-scraper.sh | 43 ++++++++++++++++++++++++++++ 2 files changed, 71 insertions(+) create mode 100644 .github/workflows/test-pyodide.yml create mode 100755 test-in-pyodide-with-shot-scraper.sh diff --git a/.github/workflows/test-pyodide.yml b/.github/workflows/test-pyodide.yml new file mode 100644 index 00000000..3715d055 --- /dev/null +++ b/.github/workflows/test-pyodide.yml @@ -0,0 +1,28 @@ +name: Test in Pyodide with shot-scraper + +on: + workflow_dispatch: + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.10 + uses: actions/setup-python@v3 + with: + python-version: "3.10" + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Cache Playwright browsers + uses: actions/cache@v2 + with: + path: ~/.cache/ms-playwright/ + key: ${{ runner.os }}-browsers + - name: Install Playwright dependencies + run: | + pip install shot-scraper + shot-scraper install + - name: Run test + run: | + ./test-in-pyodide-with-shot-scraper.sh diff --git a/test-in-pyodide-with-shot-scraper.sh b/test-in-pyodide-with-shot-scraper.sh new file mode 100755 index 00000000..0f29c0e0 --- /dev/null +++ b/test-in-pyodide-with-shot-scraper.sh @@ -0,0 +1,43 @@ +#!/bin/bash + +# Build the wheel +python3 -m build + +# Find name of wheel +wheel=$(basename $(ls dist/*.whl)) +# strip off the dist/ + + +# Create a blank index page +echo ' +<script src="https://cdn.jsdelivr.net/pyodide/v0.20.0/full/pyodide.js"></script> +' > dist/index.html + +# Run a server for that dist/ folder +cd dist +python3 -m http.server 8529 & +cd .. + +shot-scraper javascript http://localhost:8529/ " +async () => { + let pyodide = await loadPyodide(); + await pyodide.loadPackage(['micropip', 'ssl', 'setuptools']); + let output = await pyodide.runPythonAsync(\` + import micropip + await micropip.install('h11==0.12.0') + await micropip.install('http://localhost:8529/$wheel') + import ssl + import setuptools + from datasette.app import Datasette + ds = Datasette(memory=True, settings={'num_sql_threads': 0}) + (await ds.client.get('/_memory.json?sql=select+55+as+itworks&_shape=array')).text + \`); + if (JSON.parse(output)[0].itworks != 55) { + throw 'Got ' + output + ', expected itworks: 55'; + } + return 'Test passed!'; +} +" + +# Shut down the server +pkill -f 'http.server 8529' From c0cbcf2aba0d8393ba464acc515803ebf2eeda12 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 2 May 2022 16:36:58 -0700 Subject: [PATCH 0221/1103] Tweaks to test scripts, refs #1737 --- .github/workflows/test-pyodide.yml | 2 +- test-in-pyodide-with-shot-scraper.sh | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test-pyodide.yml b/.github/workflows/test-pyodide.yml index 3715d055..beb6a5fb 100644 --- a/.github/workflows/test-pyodide.yml +++ b/.github/workflows/test-pyodide.yml @@ -21,7 +21,7 @@ jobs: key: ${{ runner.os }}-browsers - name: Install Playwright dependencies run: | - pip install shot-scraper + pip install shot-scraper build shot-scraper install - name: Run test run: | diff --git a/test-in-pyodide-with-shot-scraper.sh b/test-in-pyodide-with-shot-scraper.sh index 0f29c0e0..e5df7398 100755 --- a/test-in-pyodide-with-shot-scraper.sh +++ b/test-in-pyodide-with-shot-scraper.sh @@ -1,12 +1,12 @@ #!/bin/bash +set -e +# So the script fails if there are any errors # Build the wheel python3 -m build -# Find name of wheel +# Find name of wheel, strip off the dist/ wheel=$(basename $(ls dist/*.whl)) -# strip off the dist/ - # Create a blank index page echo ' From d60f163528f466b1127b2935c3b6869c34fd6545 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 2 May 2022 16:40:49 -0700 Subject: [PATCH 0222/1103] Run on push and PR, closes #1737 --- .github/workflows/test-pyodide.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/test-pyodide.yml b/.github/workflows/test-pyodide.yml index beb6a5fb..1b75aade 100644 --- a/.github/workflows/test-pyodide.yml +++ b/.github/workflows/test-pyodide.yml @@ -1,6 +1,8 @@ name: Test in Pyodide with shot-scraper on: + push: + pull_request: workflow_dispatch: jobs: From 280ff372ab30df244f6c54f6f3002da57334b3d7 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 3 May 2022 07:59:18 -0700 Subject: [PATCH 0223/1103] ETag support for .db downloads, closes #1739 --- datasette/utils/testing.py | 20 ++++++++++++++++++-- datasette/views/database.py | 7 +++++++ tests/test_html.py | 10 +++++++++- 3 files changed, 34 insertions(+), 3 deletions(-) diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index 94750b1f..640c94e6 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -55,10 +55,21 @@ class TestClient: @async_to_sync async def get( - self, path, follow_redirects=False, redirect_count=0, method="GET", cookies=None + self, + path, + follow_redirects=False, + redirect_count=0, + method="GET", + cookies=None, + if_none_match=None, ): return await self._request( - path, follow_redirects, redirect_count, method, cookies + path=path, + follow_redirects=follow_redirects, + redirect_count=redirect_count, + method=method, + cookies=cookies, + if_none_match=if_none_match, ) @async_to_sync @@ -110,6 +121,7 @@ class TestClient: headers=None, post_body=None, content_type=None, + if_none_match=None, ): return await self._request( path, @@ -120,6 +132,7 @@ class TestClient: headers=headers, post_body=post_body, content_type=content_type, + if_none_match=if_none_match, ) async def _request( @@ -132,10 +145,13 @@ class TestClient: headers=None, post_body=None, content_type=None, + if_none_match=None, ): headers = headers or {} if content_type: headers["content-type"] = content_type + if if_none_match: + headers["if-none-match"] = if_none_match httpx_response = await self.ds.client.request( method, path, diff --git a/datasette/views/database.py b/datasette/views/database.py index 9a8aca32..bc08ba05 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -183,6 +183,13 @@ class DatabaseDownload(DataView): headers = {} if self.ds.cors: add_cors_headers(headers) + if db.hash: + etag = '"{}"'.format(db.hash) + headers["Etag"] = etag + # Has user seen this already? + if_none_match = request.headers.get("if-none-match") + if if_none_match and if_none_match == etag: + return Response("", status=304) headers["Transfer-Encoding"] = "chunked" return AsgiFileDownload( filepath, diff --git a/tests/test_html.py b/tests/test_html.py index 42f1a3ee..409fec68 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -401,7 +401,7 @@ def test_database_download_for_immutable(): assert len(soup.findAll("a", {"href": re.compile(r"\.db$")})) # Check we can actually download it download_response = client.get("/fixtures.db") - assert 200 == download_response.status + assert download_response.status == 200 # Check the content-length header exists assert "content-length" in download_response.headers content_length = download_response.headers["content-length"] @@ -413,6 +413,14 @@ def test_database_download_for_immutable(): == 'attachment; filename="fixtures.db"' ) assert download_response.headers["transfer-encoding"] == "chunked" + # ETag header should be present and match db.hash + assert "etag" in download_response.headers + etag = download_response.headers["etag"] + assert etag == '"{}"'.format(client.ds.databases["fixtures"].hash) + # Try a second download with If-None-Match: current-etag + download_response2 = client.get("/fixtures.db", if_none_match=etag) + assert download_response2.body == b"" + assert download_response2.status == 304 def test_database_download_disallowed_for_mutable(app_client): From a5acfff4bd364d30ce8878e19f9839890371ef14 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 16 May 2022 17:06:40 -0700 Subject: [PATCH 0224/1103] Empty Datasette([]) list is no longer required --- docs/testing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 1bbaaac1..41046bfb 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -15,7 +15,7 @@ If you use the template described in :ref:`writing_plugins_cookiecutter` your pl @pytest.mark.asyncio async def test_plugin_is_installed(): - datasette = Datasette([], memory=True) + datasette = Datasette(memory=True) response = await datasette.client.get("/-/plugins.json") assert response.status_code == 200 installed_plugins = {p["name"] for p in response.json()} From 3508bf7875f8d62b2725222f3b07747974d54b97 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 17 May 2022 12:40:05 -0700 Subject: [PATCH 0225/1103] --nolock mode to ignore locked files, closes #1744 --- datasette/app.py | 2 ++ datasette/cli.py | 7 +++++++ datasette/database.py | 2 ++ docs/cli-reference.rst | 1 + docs/getting_started.rst | 4 +++- 5 files changed, 15 insertions(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index b7b84371..f43700d4 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -213,6 +213,7 @@ class Datasette: config_dir=None, pdb=False, crossdb=False, + nolock=False, ): assert config_dir is None or isinstance( config_dir, Path @@ -238,6 +239,7 @@ class Datasette: self.databases = collections.OrderedDict() self._refresh_schemas_lock = asyncio.Lock() self.crossdb = crossdb + self.nolock = nolock if memory or crossdb or not self.files: self.add_database(Database(self, is_memory=True), name="_memory") # memory_name is a random string so that each Datasette instance gets its own diff --git a/datasette/cli.py b/datasette/cli.py index 3c6e1b2c..8781747c 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -452,6 +452,11 @@ def uninstall(packages, yes): is_flag=True, help="Enable cross-database joins using the /_memory database", ) +@click.option( + "--nolock", + is_flag=True, + help="Ignore locking, open locked files in read-only mode", +) @click.option( "--ssl-keyfile", help="SSL key file", @@ -486,6 +491,7 @@ def serve( open_browser, create, crossdb, + nolock, ssl_keyfile, ssl_certfile, return_instance=False, @@ -545,6 +551,7 @@ def serve( version_note=version_note, pdb=pdb, crossdb=crossdb, + nolock=nolock, ) # if files is a single directory, use that as config_dir= diff --git a/datasette/database.py b/datasette/database.py index 44d32667..fa558045 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -89,6 +89,8 @@ class Database: # mode=ro or immutable=1? if self.is_mutable: qs = "?mode=ro" + if self.ds.nolock: + qs += "&nolock=1" else: qs = "?immutable=1" assert not (write and not self.is_mutable) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 2a6fbfc8..1c1aff15 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -115,6 +115,7 @@ datasette serve --help --create Create database files if they do not exist --crossdb Enable cross-database joins using the /_memory database + --nolock Ignore locking, open locked files in read-only mode --ssl-keyfile TEXT SSL key file --ssl-certfile TEXT SSL certificate file --help Show this message and exit. diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 3e357afb..502a9e5a 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -56,7 +56,9 @@ like so: :: - datasette ~/Library/Application\ Support/Google/Chrome/Default/History + datasette ~/Library/Application\ Support/Google/Chrome/Default/History --nolock + +The `--nolock` option ignores any file locks. This is safe as Datasette will open the file in read-only mode. Now visiting http://localhost:8001/History/downloads will show you a web interface to browse your downloads data: From 5555bc8aef043f75d2200f66de90c54aeeaa08c3 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 17 May 2022 12:43:44 -0700 Subject: [PATCH 0226/1103] How to run cog, closes #1745 --- docs/contributing.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index c193ba49..bddceafe 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -211,6 +211,17 @@ For added productivity, you can use use `sphinx-autobuild <https://pypi.org/proj Now browse to ``http://localhost:8000/`` to view the documentation. Any edits you make should be instantly reflected in your browser. +.. _contributing_documentation_cog: + +Running Cog +~~~~~~~~~~~ + +Some pages of documentation (in particular the :ref:`cli_reference`) are automatically updated using `Cog <https://github.com/nedbat/cog>`__. + +To update these pages, run the following command:: + + cog -r docs/*.rst + .. _contributing_continuous_deployment: Continuously deployed demo instances From b393e164dc9e962702546d6f1ad9c857b5788dc0 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 17 May 2022 12:45:28 -0700 Subject: [PATCH 0227/1103] ReST fix --- docs/getting_started.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 502a9e5a..af3a1385 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -58,7 +58,7 @@ like so: datasette ~/Library/Application\ Support/Google/Chrome/Default/History --nolock -The `--nolock` option ignores any file locks. This is safe as Datasette will open the file in read-only mode. +The ``--nolock`` option ignores any file locks. This is safe as Datasette will open the file in read-only mode. Now visiting http://localhost:8001/History/downloads will show you a web interface to browse your downloads data: From 7d1e004ff679b3fb4dca36d1d751a1ad16688fe6 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 17 May 2022 12:59:28 -0700 Subject: [PATCH 0228/1103] Fix test I broke in #1744 --- tests/test_cli.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_cli.py b/tests/test_cli.py index dca65f26..d0f6e26c 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -150,6 +150,7 @@ def test_metadata_yaml(): help_settings=False, pdb=False, crossdb=False, + nolock=False, open_browser=False, create=False, ssl_keyfile=None, From 0e2f6f1f82f4445a63f1251470a7778a34f5c8b9 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 18 May 2022 17:37:46 -0700 Subject: [PATCH 0229/1103] datasette-copyable is an example of register_output_renderer --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 3c9ae2e2..c0d88964 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -557,7 +557,7 @@ And here is an example ``can_render`` function which returns ``True`` only if th "atom_updated", }.issubset(columns) -Examples: `datasette-atom <https://datasette.io/plugins/datasette-atom>`_, `datasette-ics <https://datasette.io/plugins/datasette-ics>`_, `datasette-geojson <https://datasette.io/plugins/datasette-geojson>`__ +Examples: `datasette-atom <https://datasette.io/plugins/datasette-atom>`_, `datasette-ics <https://datasette.io/plugins/datasette-ics>`_, `datasette-geojson <https://datasette.io/plugins/datasette-geojson>`__, `datasette-copyable <https://datasette.io/plugins/datasette-copyable>`__ .. _plugin_register_routes: From 18a6e05887abf1ac946a6e0d36ce662dfd8aeff1 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 20 May 2022 12:05:33 -0700 Subject: [PATCH 0230/1103] Added "follow a tutorial" to getting started docs Closes #1747 --- docs/getting_started.rst | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/getting_started.rst b/docs/getting_started.rst index af3a1385..00b753a9 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -1,6 +1,8 @@ Getting started =============== +.. _getting_started_demo: + Play with a live demo --------------------- @@ -9,6 +11,16 @@ The best way to experience Datasette for the first time is with a demo: * `global-power-plants.datasettes.com <https://global-power-plants.datasettes.com/global-power-plants/global-power-plants>`__ provides a searchable database of power plants around the world, using data from the `World Resources Institude <https://www.wri.org/publication/global-power-plant-database>`__ rendered using the `datasette-cluster-map <https://github.com/simonw/datasette-cluster-map>`__ plugin. * `fivethirtyeight.datasettes.com <https://fivethirtyeight.datasettes.com/fivethirtyeight>`__ shows Datasette running against over 400 datasets imported from the `FiveThirtyEight GitHub repository <https://github.com/fivethirtyeight/data>`__. +.. _getting_started_tutorial: + +Follow a tutorial +----------------- + +Datasette has several `tutorials <https://datasette.io/tutorials>`__ to help you get started with the tool. Try one of the following: + +- `Exploring a database with Datasette <https://datasette.io/tutorials/explore>`__ shows how to use the Datasette web interface to explore a new database. +- `Learn SQL with Datasette <https://datasette.io/tutorials/learn-sql>`__ introduces SQL, and shows how to use that query language to ask questions of your data. + .. _getting_started_glitch: Try Datasette without installing anything using Glitch From 1465fea4798599eccfe7e8f012bd8d9adfac3039 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 20 May 2022 12:11:08 -0700 Subject: [PATCH 0231/1103] sphinx-copybutton for docs, closes #1748 --- docs/conf.py | 2 +- setup.py | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index d114bc52..351cb1b1 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -31,7 +31,7 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ["sphinx.ext.extlinks", "sphinx.ext.autodoc"] +extensions = ["sphinx.ext.extlinks", "sphinx.ext.autodoc", "sphinx_copybutton"] extlinks = { "issue": ("https://github.com/simonw/datasette/issues/%s", "#"), diff --git a/setup.py b/setup.py index ca449f02..aad05840 100644 --- a/setup.py +++ b/setup.py @@ -64,7 +64,13 @@ setup( """, setup_requires=["pytest-runner"], extras_require={ - "docs": ["sphinx_rtd_theme", "sphinx-autobuild", "codespell", "blacken-docs"], + "docs": [ + "sphinx_rtd_theme", + "sphinx-autobuild", + "codespell", + "blacken-docs", + "sphinx-copybutton", + ], "test": [ "pytest>=5.2.2,<7.2.0", "pytest-xdist>=2.2.1,<2.6", From 1d33fd03b3c211e0f48a8f3bde83880af89e4e69 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 20 May 2022 13:34:51 -0700 Subject: [PATCH 0232/1103] Switch docs theme to Furo, refs #1746 --- docs/_static/css/custom.css | 7 ++-- .../layout.html => _static/js/custom.js} | 34 ------------------- docs/_templates/base.html | 6 ++++ docs/_templates/sidebar/brand.html | 16 +++++++++ docs/_templates/sidebar/navigation.html | 11 ++++++ docs/conf.py | 24 +++---------- docs/installation.rst | 1 + docs/plugin_hooks.rst | 1 + setup.py | 2 +- 9 files changed, 45 insertions(+), 57 deletions(-) rename docs/{_templates/layout.html => _static/js/custom.js} (55%) create mode 100644 docs/_templates/base.html create mode 100644 docs/_templates/sidebar/brand.html create mode 100644 docs/_templates/sidebar/navigation.html diff --git a/docs/_static/css/custom.css b/docs/_static/css/custom.css index 4dabb725..0a6f8799 100644 --- a/docs/_static/css/custom.css +++ b/docs/_static/css/custom.css @@ -1,7 +1,8 @@ a.external { overflow-wrap: anywhere; } - -div .wy-side-nav-search > div.version { - color: rgba(0,0,0,0.75); +body[data-theme="dark"] .sidebar-logo-container { + background-color: white; + padding: 5px; + opacity: 0.6; } diff --git a/docs/_templates/layout.html b/docs/_static/js/custom.js similarity index 55% rename from docs/_templates/layout.html rename to docs/_static/js/custom.js index 785cdc7c..efca33ed 100644 --- a/docs/_templates/layout.html +++ b/docs/_static/js/custom.js @@ -1,35 +1,3 @@ -{%- extends "!layout.html" %} - -{% block htmltitle %} -{{ super() }} -<script defer data-domain="docs.datasette.io" src="https://plausible.io/js/plausible.js"></script> -{% endblock %} - -{% block sidebartitle %} - -<a href="https://datasette.io/"> - <img src="{{ pathto('_static/' + logo, 1) }}" class="logo" alt="{{ _('Logo') }}"/> -</a> - -{% if theme_display_version %} - {%- set nav_version = version %} - {% if READTHEDOCS and current_version %} - {%- set nav_version = current_version %} - {% endif %} - {% if nav_version %} - <div class="version"> - {{ nav_version }} - </div> - {% endif %} -{% endif %} - -{% include "searchbox.html" %} - -{% endblock %} - -{% block footer %} -{{ super() }} -<script> jQuery(function ($) { // Show banner linking to /stable/ if this is a /latest/ page if (!/\/latest\//.test(location.pathname)) { @@ -57,5 +25,3 @@ jQuery(function ($) { } }); }); -</script> -{% endblock %} diff --git a/docs/_templates/base.html b/docs/_templates/base.html new file mode 100644 index 00000000..969de5ab --- /dev/null +++ b/docs/_templates/base.html @@ -0,0 +1,6 @@ +{%- extends "!base.html" %} + +{% block site_meta %} +{{ super() }} +<script defer data-domain="docs.datasette.io" src="https://plausible.io/js/plausible.js"></script> +{% endblock %} diff --git a/docs/_templates/sidebar/brand.html b/docs/_templates/sidebar/brand.html new file mode 100644 index 00000000..8be9e8ee --- /dev/null +++ b/docs/_templates/sidebar/brand.html @@ -0,0 +1,16 @@ +<div class="sidebar-brand centered"> + {% block brand_content %} + <div class="sidebar-logo-container"> + <a href="https://datasette.io/"><img class="sidebar-logo" src="{{ logo_url }}" alt="Datasette"></a> + </div> + {%- set nav_version = version %} + {% if READTHEDOCS and current_version %} + {%- set nav_version = current_version %} + {% endif %} + {% if nav_version %} + <div class="version"> + {{ nav_version }} + </div> + {% endif %} + {% endblock brand_content %} +</div> diff --git a/docs/_templates/sidebar/navigation.html b/docs/_templates/sidebar/navigation.html new file mode 100644 index 00000000..c460a17e --- /dev/null +++ b/docs/_templates/sidebar/navigation.html @@ -0,0 +1,11 @@ +<div class="sidebar-tree"> + <ul> + <li class="toctree-l1"><a class="reference internal" href="{{ pathto(master_doc) }}">Contents</a></li> + </ul> + {{ toctree( + collapse=True, + titles_only=False, + maxdepth=3, + includehidden=True, +) }} +</div> \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py index 351cb1b1..25d2acfe 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -90,18 +90,15 @@ todo_include_todos = False # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = "sphinx_rtd_theme" +html_theme = "furo" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "logo_only": True, - "style_nav_header_background": "white", - "prev_next_buttons_location": "both", + "sidebar_hide_name": True, } - # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". @@ -112,20 +109,9 @@ html_logo = "datasette-logo.svg" html_css_files = [ "css/custom.css", ] - - -# Custom sidebar templates, must be a dictionary that maps document names -# to template names. -# -# This is required for the alabaster theme -# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars -html_sidebars = { - "**": [ - "relations.html", # needs 'show_related': True theme option to display - "searchbox.html", - ] -} - +html_js_files = [ + "js/custom.js" +] # -- Options for HTMLHelp output ------------------------------------------ diff --git a/docs/installation.rst b/docs/installation.rst index e8bef9cd..a4757736 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -13,6 +13,7 @@ If you want to start making contributions to the Datasette project by installing .. contents:: :local: + :class: this-will-duplicate-information-and-it-is-still-useful-here .. _installation_basic: diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index c0d88964..7d10fe37 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -20,6 +20,7 @@ For example, you can implement the ``render_cell`` plugin hook like this even th .. contents:: List of plugin hooks :local: + :class: this-will-duplicate-information-and-it-is-still-useful-here .. _plugin_hook_prepare_connection: diff --git a/setup.py b/setup.py index aad05840..d3fcdbd1 100644 --- a/setup.py +++ b/setup.py @@ -65,7 +65,7 @@ setup( setup_requires=["pytest-runner"], extras_require={ "docs": [ - "sphinx_rtd_theme", + "furo==2022.4.7", "sphinx-autobuild", "codespell", "blacken-docs", From 4446075334ea7231beb56b630bc7ec363afc2d08 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 20 May 2022 13:44:23 -0700 Subject: [PATCH 0233/1103] Append warning to the write element, refs #1746 --- docs/_static/js/custom.js | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/docs/_static/js/custom.js b/docs/_static/js/custom.js index efca33ed..91c3e306 100644 --- a/docs/_static/js/custom.js +++ b/docs/_static/js/custom.js @@ -17,11 +17,7 @@ jQuery(function ($) { </div>` ); warning.find("a").attr("href", stableUrl); - var body = $("div.body"); - if (!body.length) { - body = $("div.document"); - } - body.prepend(warning); + $("article[role=main]").prepend(warning); } }); }); From b010af7bb85856aeb44f69e7e980f617c1fc0db1 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 20 May 2022 15:23:09 -0700 Subject: [PATCH 0234/1103] Updated copyright years in documentation footer --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 25d2acfe..7ffeedd0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -51,7 +51,7 @@ master_doc = "index" # General information about the project. project = "Datasette" -copyright = "2017-2021, Simon Willison" +copyright = "2017-2022, Simon Willison" author = "Simon Willison" # Disable -- turning into – From adedd85b68ec66e03b97fb62ff4da8987734436e Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 28 May 2022 18:42:31 -0700 Subject: [PATCH 0235/1103] Clarify that request.headers names are converted to lowercase --- docs/internals.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/internals.rst b/docs/internals.rst index 18822d47..da135282 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -26,7 +26,7 @@ The request object is passed to various plugin hooks. It represents an incoming The request scheme - usually ``https`` or ``http``. ``.headers`` - dictionary (str -> str) - A dictionary of incoming HTTP request headers. + A dictionary of incoming HTTP request headers. Header names have been converted to lowercase. ``.cookies`` - dictionary (str -> str) A dictionary of incoming cookies From 8dd816bc76937f1e37f86acce10dc2cb4fa31e52 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 30 May 2022 15:42:38 -0700 Subject: [PATCH 0236/1103] Applied Black --- docs/conf.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 7ffeedd0..4ef6b768 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -109,9 +109,7 @@ html_logo = "datasette-logo.svg" html_css_files = [ "css/custom.css", ] -html_js_files = [ - "js/custom.js" -] +html_js_files = ["js/custom.js"] # -- Options for HTMLHelp output ------------------------------------------ From 2e9751672d4fe329b3c359d5b7b1992283185820 Mon Sep 17 00:00:00 2001 From: Naveen <172697+naveensrinivasan@users.noreply.github.com> Date: Tue, 31 May 2022 14:28:40 -0500 Subject: [PATCH 0237/1103] chore: Set permissions for GitHub actions (#1740) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Restrict the GitHub token permissions only to the required ones; this way, even if the attackers will succeed in compromising your workflow, they won’t be able to do much. - Included permissions for the action. https://github.com/ossf/scorecard/blob/main/docs/checks.md#token-permissions https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions https://docs.github.com/en/actions/using-jobs/assigning-permissions-to-jobs [Keeping your GitHub Actions and workflows secure Part 1: Preventing pwn requests](https://securitylab.github.com/research/github-actions-preventing-pwn-requests/) Signed-off-by: naveen <172697+naveensrinivasan@users.noreply.github.com> --- .github/workflows/deploy-latest.yml | 3 +++ .github/workflows/prettier.yml | 3 +++ .github/workflows/publish.yml | 3 +++ .github/workflows/push_docker_tag.yml | 3 +++ .github/workflows/spellcheck.yml | 3 +++ .github/workflows/test-coverage.yml | 3 +++ .github/workflows/test-pyodide.yml | 3 +++ .github/workflows/test.yml | 3 +++ .github/workflows/tmate-mac.yml | 3 +++ .github/workflows/tmate.yml | 3 +++ 10 files changed, 30 insertions(+) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index a61f6629..2b94a7f1 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -5,6 +5,9 @@ on: branches: - main +permissions: + contents: read + jobs: deploy: runs-on: ubuntu-latest diff --git a/.github/workflows/prettier.yml b/.github/workflows/prettier.yml index 9dfe7ee0..ded41040 100644 --- a/.github/workflows/prettier.yml +++ b/.github/workflows/prettier.yml @@ -2,6 +2,9 @@ name: Check JavaScript for conformance with Prettier on: [push] +permissions: + contents: read + jobs: prettier: runs-on: ubuntu-latest diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 3e4f8146..9ef09d2e 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -4,6 +4,9 @@ on: release: types: [created] +permissions: + contents: read + jobs: test: runs-on: ubuntu-latest diff --git a/.github/workflows/push_docker_tag.yml b/.github/workflows/push_docker_tag.yml index 9a3969f0..afe8d6b2 100644 --- a/.github/workflows/push_docker_tag.yml +++ b/.github/workflows/push_docker_tag.yml @@ -6,6 +6,9 @@ on: version_tag: description: Tag to build and push +permissions: + contents: read + jobs: deploy_docker: runs-on: ubuntu-latest diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index 2e24d3eb..a2621ecc 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -2,6 +2,9 @@ name: Check spelling in documentation on: [push, pull_request] +permissions: + contents: read + jobs: spellcheck: runs-on: ubuntu-latest diff --git a/.github/workflows/test-coverage.yml b/.github/workflows/test-coverage.yml index 1d1cf332..bd720664 100644 --- a/.github/workflows/test-coverage.yml +++ b/.github/workflows/test-coverage.yml @@ -7,6 +7,9 @@ on: pull_request: branches: - main +permissions: + contents: read + jobs: test: runs-on: ubuntu-latest diff --git a/.github/workflows/test-pyodide.yml b/.github/workflows/test-pyodide.yml index 1b75aade..bc9593a8 100644 --- a/.github/workflows/test-pyodide.yml +++ b/.github/workflows/test-pyodide.yml @@ -5,6 +5,9 @@ on: pull_request: workflow_dispatch: +permissions: + contents: read + jobs: test: runs-on: ubuntu-latest diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8d916e49..90b6555e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -2,6 +2,9 @@ name: Test on: [push, pull_request] +permissions: + contents: read + jobs: test: runs-on: ubuntu-latest diff --git a/.github/workflows/tmate-mac.yml b/.github/workflows/tmate-mac.yml index 46be117e..fcee0f21 100644 --- a/.github/workflows/tmate-mac.yml +++ b/.github/workflows/tmate-mac.yml @@ -3,6 +3,9 @@ name: tmate session mac on: workflow_dispatch: +permissions: + contents: read + jobs: build: runs-on: macos-latest diff --git a/.github/workflows/tmate.yml b/.github/workflows/tmate.yml index 02e7bd33..9792245d 100644 --- a/.github/workflows/tmate.yml +++ b/.github/workflows/tmate.yml @@ -3,6 +3,9 @@ name: tmate session on: workflow_dispatch: +permissions: + contents: read + jobs: build: runs-on: ubuntu-latest From e780b2f5d662ef3579d801d33567440055d4e84d Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 20 Jun 2022 10:54:23 -0700 Subject: [PATCH 0238/1103] Trying out one-sentence-per-line As suggested here: https://sive.rs/1s Markdown and reStructuredText will display this as if it is a single paragraph, even though the sentences themselves are separated by newlines. This could result in more useful diffs. Trying it out on this page first. --- docs/facets.rst | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/docs/facets.rst b/docs/facets.rst index 0228aa84..2a2eb039 100644 --- a/docs/facets.rst +++ b/docs/facets.rst @@ -3,7 +3,9 @@ Facets ====== -Datasette facets can be used to add a faceted browse interface to any database table. With facets, tables are displayed along with a summary showing the most common values in specified columns. These values can be selected to further filter the table. +Datasette facets can be used to add a faceted browse interface to any database table. +With facets, tables are displayed along with a summary showing the most common values in specified columns. +These values can be selected to further filter the table. .. image:: facets.png @@ -12,11 +14,13 @@ Facets can be specified in two ways: using query string parameters, or in ``meta Facets in query strings ----------------------- -To turn on faceting for specific columns on a Datasette table view, add one or more ``_facet=COLUMN`` parameters to the URL. For example, if you want to turn on facets for the ``city_id`` and ``state`` columns, construct a URL that looks like this:: +To turn on faceting for specific columns on a Datasette table view, add one or more ``_facet=COLUMN`` parameters to the URL. +For example, if you want to turn on facets for the ``city_id`` and ``state`` columns, construct a URL that looks like this:: /dbname/tablename?_facet=state&_facet=city_id -This works for both the HTML interface and the ``.json`` view. When enabled, facets will cause a ``facet_results`` block to be added to the JSON output, looking something like this: +This works for both the HTML interface and the ``.json`` view. +When enabled, facets will cause a ``facet_results`` block to be added to the JSON output, looking something like this: .. code-block:: json @@ -86,7 +90,8 @@ This works for both the HTML interface and the ``.json`` view. When enabled, fac If Datasette detects that a column is a foreign key, the ``"label"`` property will be automatically derived from the detected label column on the referenced table. -The default number of facet results returned is 30, controlled by the :ref:`setting_default_facet_size` setting. You can increase this on an individual page by adding ``?_facet_size=100`` to the query string, up to a maximum of :ref:`setting_max_returned_rows` (which defaults to 1000). +The default number of facet results returned is 30, controlled by the :ref:`setting_default_facet_size` setting. +You can increase this on an individual page by adding ``?_facet_size=100`` to the query string, up to a maximum of :ref:`setting_max_returned_rows` (which defaults to 1000). .. _facets_metadata: @@ -137,12 +142,14 @@ For the currently filtered data are there any columns which, if applied as a fac * Will return less unique options than the total number of filtered rows * And the query used to evaluate this criteria can be completed in under 50ms -That last point is particularly important: Datasette runs a query for every column that is displayed on a page, which could get expensive - so to avoid slow load times it sets a time limit of just 50ms for each of those queries. This means suggested facets are unlikely to appear for tables with millions of records in them. +That last point is particularly important: Datasette runs a query for every column that is displayed on a page, which could get expensive - so to avoid slow load times it sets a time limit of just 50ms for each of those queries. +This means suggested facets are unlikely to appear for tables with millions of records in them. Speeding up facets with indexes ------------------------------- -The performance of facets can be greatly improved by adding indexes on the columns you wish to facet by. Adding indexes can be performed using the ``sqlite3`` command-line utility. Here's how to add an index on the ``state`` column in a table called ``Food_Trucks``:: +The performance of facets can be greatly improved by adding indexes on the columns you wish to facet by. +Adding indexes can be performed using the ``sqlite3`` command-line utility. Here's how to add an index on the ``state`` column in a table called ``Food_Trucks``:: $ sqlite3 mydatabase.db SQLite version 3.19.3 2017-06-27 16:48:08 @@ -169,6 +176,7 @@ Example here: `latest.datasette.io/fixtures/facetable?_facet_array=tags <https:/ Facet by date ------------- -If Datasette finds any columns that contain dates in the first 100 values, it will offer a faceting interface against the dates of those values. This works especially well against timestamp values such as ``2019-03-01 12:44:00``. +If Datasette finds any columns that contain dates in the first 100 values, it will offer a faceting interface against the dates of those values. +This works especially well against timestamp values such as ``2019-03-01 12:44:00``. Example here: `latest.datasette.io/fixtures/facetable?_facet_date=created <https://latest.datasette.io/fixtures/facetable?_facet_date=created>`__ From 00e59ec461dc0150772b999c7cc15fcb9b507d58 Mon Sep 17 00:00:00 2001 From: "M. Nasimul Haque" <nasim.haque@gmail.com> Date: Mon, 20 Jun 2022 19:05:44 +0100 Subject: [PATCH 0239/1103] Extract facet pieces of table.html into included templates Thanks, @nsmgr8 --- datasette/templates/_facet_results.html | 28 ++++++++++++++++++ datasette/templates/_suggested_facets.html | 3 ++ datasette/templates/table.html | 33 ++-------------------- 3 files changed, 33 insertions(+), 31 deletions(-) create mode 100644 datasette/templates/_facet_results.html create mode 100644 datasette/templates/_suggested_facets.html diff --git a/datasette/templates/_facet_results.html b/datasette/templates/_facet_results.html new file mode 100644 index 00000000..d0cbcf77 --- /dev/null +++ b/datasette/templates/_facet_results.html @@ -0,0 +1,28 @@ +<div class="facet-results"> + {% for facet_info in sorted_facet_results %} + <div class="facet-info facet-{{ database|to_css_class }}-{{ table|to_css_class }}-{{ facet_info.name|to_css_class }}" id="facet-{{ facet_info.name|to_css_class }}" data-column="{{ facet_info.name }}"> + <p class="facet-info-name"> + <strong>{{ facet_info.name }}{% if facet_info.type != "column" %} ({{ facet_info.type }}){% endif %} + <span class="facet-info-total">{% if facet_info.truncated %}>{% endif %}{{ facet_info.results|length }}</span> + </strong> + {% if facet_info.hideable %} + <a href="{{ facet_info.toggle_url }}" class="cross">✖</a> + {% endif %} + </p> + <ul class="tight-bullets"> + {% for facet_value in facet_info.results %} + {% if not facet_value.selected %} + <li><a href="{{ facet_value.toggle_url }}">{{ (facet_value.label | string()) or "-" }}</a> {{ "{:,}".format(facet_value.count) }}</li> + {% else %} + <li>{{ facet_value.label or "-" }} · {{ "{:,}".format(facet_value.count) }} <a href="{{ facet_value.toggle_url }}" class="cross">✖</a></li> + {% endif %} + {% endfor %} + {% if facet_info.truncated %} + <li class="facet-truncated">{% if request.args._facet_size != "max" -%} + <a href="{{ path_with_replaced_args(request, {"_facet_size": "max"}) }}">…</a>{% else -%}…{% endif %} + </li> + {% endif %} + </ul> + </div> + {% endfor %} +</div> diff --git a/datasette/templates/_suggested_facets.html b/datasette/templates/_suggested_facets.html new file mode 100644 index 00000000..ec98fb36 --- /dev/null +++ b/datasette/templates/_suggested_facets.html @@ -0,0 +1,3 @@ +<p class="suggested-facets"> + Suggested facets: {% for facet in suggested_facets %}<a href="{{ facet.toggle_url }}#facet-{{ facet.name|to_css_class }}">{{ facet.name }}</a>{% if facet.type %} ({{ facet.type }}){% endif %}{% if not loop.last %}, {% endif %}{% endfor %} +</p> diff --git a/datasette/templates/table.html b/datasette/templates/table.html index a9e88330..a86398ea 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -142,9 +142,7 @@ <p class="export-links">This data as {% for name, url in renderers.items() %}<a href="{{ url }}">{{ name }}</a>{{ ", " if not loop.last }}{% endfor %}{% if display_rows %}, <a href="{{ url_csv }}">CSV</a> (<a href="#export">advanced</a>){% endif %}</p> {% if suggested_facets %} - <p class="suggested-facets"> - Suggested facets: {% for facet in suggested_facets %}<a href="{{ facet.toggle_url }}#facet-{{ facet.name|to_css_class }}">{{ facet.name }}</a>{% if facet.type %} ({{ facet.type }}){% endif %}{% if not loop.last %}, {% endif %}{% endfor %} - </p> + {% include "_suggested_facets.html" %} {% endif %} {% if facets_timed_out %} @@ -152,34 +150,7 @@ {% endif %} {% if facet_results %} - <div class="facet-results"> - {% for facet_info in sorted_facet_results %} - <div class="facet-info facet-{{ database|to_css_class }}-{{ table|to_css_class }}-{{ facet_info.name|to_css_class }}" id="facet-{{ facet_info.name|to_css_class }}" data-column="{{ facet_info.name }}"> - <p class="facet-info-name"> - <strong>{{ facet_info.name }}{% if facet_info.type != "column" %} ({{ facet_info.type }}){% endif %} - <span class="facet-info-total">{% if facet_info.truncated %}>{% endif %}{{ facet_info.results|length }}</span> - </strong> - {% if facet_info.hideable %} - <a href="{{ facet_info.toggle_url }}" class="cross">✖</a> - {% endif %} - </p> - <ul class="tight-bullets"> - {% for facet_value in facet_info.results %} - {% if not facet_value.selected %} - <li><a href="{{ facet_value.toggle_url }}">{{ (facet_value.label | string()) or "-" }}</a> {{ "{:,}".format(facet_value.count) }}</li> - {% else %} - <li>{{ facet_value.label or "-" }} · {{ "{:,}".format(facet_value.count) }} <a href="{{ facet_value.toggle_url }}" class="cross">✖</a></li> - {% endif %} - {% endfor %} - {% if facet_info.truncated %} - <li class="facet-truncated">{% if request.args._facet_size != "max" -%} - <a href="{{ path_with_replaced_args(request, {"_facet_size": "max"}) }}">…</a>{% else -%}…{% endif %} - </li> - {% endif %} - </ul> - </div> - {% endfor %} - </div> + {% include "_facet_results.html" %} {% endif %} {% include custom_table_templates %} From 9f1eb0d4eac483b953392157bd9fd6cc4df37de7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Jun 2022 10:40:24 -0700 Subject: [PATCH 0240/1103] Bump black from 22.1.0 to 22.6.0 (#1763) Bumps [black](https://github.com/psf/black) from 22.1.0 to 22.6.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/22.1.0...22.6.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index d3fcdbd1..29cb77bf 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ setup( "pytest-xdist>=2.2.1,<2.6", "pytest-asyncio>=0.17,<0.19", "beautifulsoup4>=4.8.1,<4.12.0", - "black==22.1.0", + "black==22.6.0", "blacken-docs==1.12.1", "pytest-timeout>=1.4.2,<2.2", "trustme>=0.7,<0.10", From 6373bb341457e5becfd5b67792ac2c8b9ed7c384 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 7 Jul 2022 09:30:49 -0700 Subject: [PATCH 0241/1103] Expose current SQLite row to render_cell hook, closes #1300 --- datasette/hookspecs.py | 2 +- datasette/views/database.py | 1 + datasette/views/table.py | 1 + docs/plugin_hooks.rst | 9 ++++++--- tests/plugins/my_plugin.py | 3 ++- tests/test_plugins.py | 5 +++-- 6 files changed, 14 insertions(+), 7 deletions(-) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 8f4fecab..c84db0a3 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -60,7 +60,7 @@ def publish_subcommand(publish): @hookspec -def render_cell(value, column, table, database, datasette): +def render_cell(row, value, column, table, database, datasette): """Customize rendering of HTML table cell values""" diff --git a/datasette/views/database.py b/datasette/views/database.py index bc08ba05..42058752 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -375,6 +375,7 @@ class QueryView(DataView): # pylint: disable=no-member plugin_display_value = None for candidate in pm.hook.render_cell( + row=row, value=value, column=column, table=None, diff --git a/datasette/views/table.py b/datasette/views/table.py index 23289b29..cd4be823 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -895,6 +895,7 @@ async def display_columns_and_rows( # pylint: disable=no-member plugin_display_value = None for candidate in pm.hook.render_cell( + row=row, value=value, column=column, table=table_name, diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 7d10fe37..f5c3ee83 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -373,12 +373,15 @@ Examples: `datasette-publish-fly <https://datasette.io/plugins/datasette-publish .. _plugin_hook_render_cell: -render_cell(value, column, table, database, datasette) ------------------------------------------------------- +render_cell(row, value, column, table, database, datasette) +----------------------------------------------------------- Lets you customize the display of values within table cells in the HTML table view. -``value`` - string, integer or None +``row`` - ``sqlite.Row`` + The SQLite row object that the value being rendered is part of + +``value`` - string, integer, float, bytes or None The value that was loaded from the database ``column`` - string diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 1c9b0575..53613b7d 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -98,12 +98,13 @@ def extra_body_script( @hookimpl -def render_cell(value, column, table, database, datasette): +def render_cell(row, value, column, table, database, datasette): async def inner(): # Render some debug output in cell with value RENDER_CELL_DEMO if value == "RENDER_CELL_DEMO": return json.dumps( { + "row": dict(row), "column": column, "table": table, "database": database, diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 15bde962..4a7ad7c6 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -181,12 +181,13 @@ def test_hook_render_cell_demo(app_client): response = app_client.get("/fixtures/simple_primary_key?id=4") soup = Soup(response.body, "html.parser") td = soup.find("td", {"class": "col-content"}) - assert { + assert json.loads(td.string) == { + "row": {"id": "4", "content": "RENDER_CELL_DEMO"}, "column": "content", "table": "simple_primary_key", "database": "fixtures", "config": {"depth": "table", "special": "this-is-simple_primary_key"}, - } == json.loads(td.string) + } @pytest.mark.parametrize( From 035dc5e7b95142d4a700819a8cc4ff64aefe4efe Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 9 Jul 2022 10:25:37 -0700 Subject: [PATCH 0242/1103] More than 90 plugins now --- docs/writing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index 9aee70f6..01ee8c90 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -5,7 +5,7 @@ Writing plugins You can write one-off plugins that apply to just one Datasette instance, or you can write plugins which can be installed using ``pip`` and can be shipped to the Python Package Index (`PyPI <https://pypi.org/>`__) for other people to install. -Want to start by looking at an example? The `Datasette plugins directory <https://datasette.io/plugins>`__ lists more than 50 open source plugins with code you can explore. The :ref:`plugin hooks <plugin_hooks>` page includes links to example plugins for each of the documented hooks. +Want to start by looking at an example? The `Datasette plugins directory <https://datasette.io/plugins>`__ lists more than 90 open source plugins with code you can explore. The :ref:`plugin hooks <plugin_hooks>` page includes links to example plugins for each of the documented hooks. .. _writing_plugins_one_off: From 5d76c1f81b2d978f48b85c70d041a2142cf8ee26 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 14 Jul 2022 15:03:33 -0700 Subject: [PATCH 0243/1103] Discord badge Refs https://github.com/simonw/datasette.io/issues/112 --- README.md | 1 + docs/index.rst | 2 ++ 2 files changed, 3 insertions(+) diff --git a/README.md b/README.md index 557d9290..c57ee604 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,7 @@ [![Documentation Status](https://readthedocs.org/projects/datasette/badge/?version=latest)](https://docs.datasette.io/en/latest/?badge=latest) [![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://github.com/simonw/datasette/blob/main/LICENSE) [![docker: datasette](https://img.shields.io/badge/docker-datasette-blue)](https://hub.docker.com/r/datasetteproject/datasette) +[![discord](https://img.shields.io/discord/823971286308356157?label=Discord)](https://discord.gg/ktd74dm5mw) *An open source multi-tool for exploring and publishing data* diff --git a/docs/index.rst b/docs/index.rst index a2888822..62ed70f8 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -16,6 +16,8 @@ datasette| :target: https://github.com/simonw/datasette/blob/main/LICENSE .. |docker: datasette| image:: https://img.shields.io/badge/docker-datasette-blue :target: https://hub.docker.com/r/datasetteproject/datasette +.. |discord| image:: https://img.shields.io/discord/823971286308356157?label=Discord + :target: https://discord.gg/ktd74dm5mw *An open source multi-tool for exploring and publishing data* From c133545fe9c7ac2d509e55bf4bf6164bfbe892ad Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 14 Jul 2022 15:04:38 -0700 Subject: [PATCH 0244/1103] Make discord badge lowercase Refs https://github.com/simonw/datasette.io/issues/112 --- README.md | 2 +- docs/index.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index c57ee604..032180aa 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ [![Documentation Status](https://readthedocs.org/projects/datasette/badge/?version=latest)](https://docs.datasette.io/en/latest/?badge=latest) [![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://github.com/simonw/datasette/blob/main/LICENSE) [![docker: datasette](https://img.shields.io/badge/docker-datasette-blue)](https://hub.docker.com/r/datasetteproject/datasette) -[![discord](https://img.shields.io/discord/823971286308356157?label=Discord)](https://discord.gg/ktd74dm5mw) +[![discord](https://img.shields.io/discord/823971286308356157?label=discord)](https://discord.gg/ktd74dm5mw) *An open source multi-tool for exploring and publishing data* diff --git a/docs/index.rst b/docs/index.rst index 62ed70f8..051898b1 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -16,7 +16,7 @@ datasette| :target: https://github.com/simonw/datasette/blob/main/LICENSE .. |docker: datasette| image:: https://img.shields.io/badge/docker-datasette-blue :target: https://hub.docker.com/r/datasetteproject/datasette -.. |discord| image:: https://img.shields.io/discord/823971286308356157?label=Discord +.. |discord| image:: https://img.shields.io/discord/823971286308356157?label=discord :target: https://discord.gg/ktd74dm5mw *An open source multi-tool for exploring and publishing data* From 950cc7677f65aa2543067b3bbfc2b6acb98b62c8 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 14 Jul 2022 15:18:28 -0700 Subject: [PATCH 0245/1103] Fix missing Discord image Refs https://github.com/simonw/datasette.io/issues/112 --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 051898b1..efe196b3 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,7 +2,7 @@ Datasette ========= |PyPI| |Changelog| |Python 3.x| |Tests| |License| |docker: -datasette| +datasette| |discord| .. |PyPI| image:: https://img.shields.io/pypi/v/datasette.svg :target: https://pypi.org/project/datasette/ From 8188f55efc0fcca1be692b0d0c875f2d1ee99f17 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 17 Jul 2022 15:24:16 -0700 Subject: [PATCH 0246/1103] Rename handle_500 to handle_exception, refs #1770 --- datasette/app.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f43700d4..43e60dbc 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1275,7 +1275,7 @@ class DatasetteRouter: except NotFound as exception: return await self.handle_404(request, send, exception) except Exception as exception: - return await self.handle_500(request, send, exception) + return await self.handle_exception(request, send, exception) async def handle_404(self, request, send, exception=None): # If path contains % encoding, redirect to tilde encoding @@ -1354,7 +1354,7 @@ class DatasetteRouter: view_name="page", ) except NotFoundExplicit as e: - await self.handle_500(request, send, e) + await self.handle_exception(request, send, e) return # Pull content-type out into separate parameter content_type = "text/html; charset=utf-8" @@ -1369,9 +1369,9 @@ class DatasetteRouter: content_type=content_type, ) else: - await self.handle_500(request, send, exception or NotFound("404")) + await self.handle_exception(request, send, exception or NotFound("404")) - async def handle_500(self, request, send, exception): + async def handle_exception(self, request, send, exception): if self.ds.pdb: import pdb From c09c53f3455a7b9574cf7695478f2b87d20897db Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 17 Jul 2022 16:24:39 -0700 Subject: [PATCH 0247/1103] New handle_exception plugin hook, refs #1770 Also refs: - https://github.com/simonw/datasette-sentry/issues/1 - https://github.com/simonw/datasette-show-errors/issues/2 --- datasette/app.py | 97 +++++++++-------------------------- datasette/forbidden.py | 20 ++++++++ datasette/handle_exception.py | 74 ++++++++++++++++++++++++++ datasette/hookspecs.py | 5 ++ datasette/plugins.py | 2 + docs/plugin_hooks.rst | 78 ++++++++++++++++++++-------- tests/fixtures.py | 1 + tests/plugins/my_plugin_2.py | 18 +++++++ tests/test_permissions.py | 1 + tests/test_plugins.py | 14 +++++ 10 files changed, 215 insertions(+), 95 deletions(-) create mode 100644 datasette/forbidden.py create mode 100644 datasette/handle_exception.py diff --git a/datasette/app.py b/datasette/app.py index 43e60dbc..edd05bb3 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -16,7 +16,6 @@ import re import secrets import sys import threading -import traceback import urllib.parse from concurrent import futures from pathlib import Path @@ -27,7 +26,7 @@ from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader from jinja2.environment import Template from jinja2.exceptions import TemplateNotFound -from .views.base import DatasetteError, ureg +from .views.base import ureg from .views.database import DatabaseDownload, DatabaseView from .views.index import IndexView from .views.special import ( @@ -49,7 +48,6 @@ from .utils import ( PrefixedUrlString, SPATIALITE_FUNCTIONS, StartupError, - add_cors_headers, async_call_with_supported_arguments, await_me_maybe, call_with_supported_arguments, @@ -87,11 +85,6 @@ from .tracer import AsgiTracer from .plugins import pm, DEFAULT_PLUGINS, get_plugins from .version import __version__ -try: - import rich -except ImportError: - rich = None - app_root = Path(__file__).parent.parent # https://github.com/simonw/datasette/issues/283#issuecomment-781591015 @@ -1274,6 +1267,16 @@ class DatasetteRouter: return except NotFound as exception: return await self.handle_404(request, send, exception) + except Forbidden as exception: + # Try the forbidden() plugin hook + for custom_response in pm.hook.forbidden( + datasette=self.ds, request=request, message=exception.args[0] + ): + custom_response = await await_me_maybe(custom_response) + assert ( + custom_response + ), "Default forbidden() hook should have been called" + return await custom_response.asgi_send(send) except Exception as exception: return await self.handle_exception(request, send, exception) @@ -1372,72 +1375,20 @@ class DatasetteRouter: await self.handle_exception(request, send, exception or NotFound("404")) async def handle_exception(self, request, send, exception): - if self.ds.pdb: - import pdb + responses = [] + for hook in pm.hook.handle_exception( + datasette=self.ds, + request=request, + exception=exception, + ): + response = await await_me_maybe(hook) + if response is not None: + responses.append(response) - pdb.post_mortem(exception.__traceback__) - - if rich is not None: - rich.get_console().print_exception(show_locals=True) - - title = None - if isinstance(exception, Forbidden): - status = 403 - info = {} - message = exception.args[0] - # Try the forbidden() plugin hook - for custom_response in pm.hook.forbidden( - datasette=self.ds, request=request, message=message - ): - custom_response = await await_me_maybe(custom_response) - if custom_response is not None: - await custom_response.asgi_send(send) - return - elif isinstance(exception, Base400): - status = exception.status - info = {} - message = exception.args[0] - elif isinstance(exception, DatasetteError): - status = exception.status - info = exception.error_dict - message = exception.message - if exception.message_is_html: - message = Markup(message) - title = exception.title - else: - status = 500 - info = {} - message = str(exception) - traceback.print_exc() - templates = [f"{status}.html", "error.html"] - info.update( - { - "ok": False, - "error": message, - "status": status, - "title": title, - } - ) - headers = {} - if self.ds.cors: - add_cors_headers(headers) - if request.path.split("?")[0].endswith(".json"): - await asgi_send_json(send, info, status=status, headers=headers) - else: - template = self.ds.jinja_env.select_template(templates) - await asgi_send_html( - send, - await template.render_async( - dict( - info, - urls=self.ds.urls, - app_css_hash=self.ds.app_css_hash(), - menu_links=lambda: [], - ) - ), - status=status, - headers=headers, - ) + assert responses, "Default exception handler should have returned something" + # Even if there are multiple responses use just the first one + response = responses[0] + await response.asgi_send(send) _cleaner_task_str_re = re.compile(r"\S*site-packages/") diff --git a/datasette/forbidden.py b/datasette/forbidden.py new file mode 100644 index 00000000..156a44d4 --- /dev/null +++ b/datasette/forbidden.py @@ -0,0 +1,20 @@ +from os import stat +from datasette import hookimpl, Response + + +@hookimpl(trylast=True) +def forbidden(datasette, request, message): + async def inner(): + return Response.html( + await datasette.render_template( + "error.html", + { + "title": "Forbidden", + "error": message, + }, + request=request, + ), + status=403, + ) + + return inner diff --git a/datasette/handle_exception.py b/datasette/handle_exception.py new file mode 100644 index 00000000..8b7e83e3 --- /dev/null +++ b/datasette/handle_exception.py @@ -0,0 +1,74 @@ +from datasette import hookimpl, Response +from .utils import await_me_maybe, add_cors_headers +from .utils.asgi import ( + Base400, + Forbidden, +) +from .views.base import DatasetteError +from markupsafe import Markup +import pdb +import traceback +from .plugins import pm + +try: + import rich +except ImportError: + rich = None + + +@hookimpl(trylast=True) +def handle_exception(datasette, request, exception): + async def inner(): + if datasette.pdb: + pdb.post_mortem(exception.__traceback__) + + if rich is not None: + rich.get_console().print_exception(show_locals=True) + + title = None + if isinstance(exception, Base400): + status = exception.status + info = {} + message = exception.args[0] + elif isinstance(exception, DatasetteError): + status = exception.status + info = exception.error_dict + message = exception.message + if exception.message_is_html: + message = Markup(message) + title = exception.title + else: + status = 500 + info = {} + message = str(exception) + traceback.print_exc() + templates = [f"{status}.html", "error.html"] + info.update( + { + "ok": False, + "error": message, + "status": status, + "title": title, + } + ) + headers = {} + if datasette.cors: + add_cors_headers(headers) + if request.path.split("?")[0].endswith(".json"): + return Response.json(info, status=status, headers=headers) + else: + template = datasette.jinja_env.select_template(templates) + return Response.html( + await template.render_async( + dict( + info, + urls=datasette.urls, + app_css_hash=datasette.app_css_hash(), + menu_links=lambda: [], + ) + ), + status=status, + headers=headers, + ) + + return inner diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index c84db0a3..a5fb536f 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -138,3 +138,8 @@ def database_actions(datasette, actor, database, request): @hookspec def skip_csrf(datasette, scope): """Mechanism for skipping CSRF checks for certain requests""" + + +@hookspec +def handle_exception(datasette, request, exception): + """Handle an uncaught exception. Can return a Response or None.""" diff --git a/datasette/plugins.py b/datasette/plugins.py index 76b46a47..fef0c8e9 100644 --- a/datasette/plugins.py +++ b/datasette/plugins.py @@ -15,6 +15,8 @@ DEFAULT_PLUGINS = ( "datasette.default_magic_parameters", "datasette.blob_renderer", "datasette.default_menu_links", + "datasette.handle_exception", + "datasette.forbidden", ) pm = pluggy.PluginManager("datasette") diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index f5c3ee83..6020a941 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -107,8 +107,8 @@ Extra template variables that should be made available in the rendered template ``view_name`` - string The name of the view being displayed. (``index``, ``database``, ``table``, and ``row`` are the most important ones.) -``request`` - object or None - The current HTTP :ref:`internals_request`. This can be ``None`` if the request object is not available. +``request`` - :ref:`internals_request` or None + The current HTTP request. This can be ``None`` if the request object is not available. ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` @@ -504,7 +504,7 @@ When a request is received, the ``"render"`` callback function is called with ze The table or view, if one is being rendered. ``request`` - :ref:`internals_request` - The incoming HTTP request. + The current HTTP request. ``view_name`` - string The name of the current view being called. ``index``, ``database``, ``table``, and ``row`` are the most important ones. @@ -599,8 +599,8 @@ The optional view function arguments are as follows: ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. -``request`` - Request object - The current HTTP :ref:`internals_request`. +``request`` - :ref:`internals_request` + The current HTTP request. ``scope`` - dictionary The incoming ASGI scope dictionary. @@ -947,8 +947,8 @@ actor_from_request(datasette, request) ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. -``request`` - object - The current HTTP :ref:`internals_request`. +``request`` - :ref:`internals_request` + The current HTTP request. This is part of Datasette's :ref:`authentication and permissions system <authentication>`. The function should attempt to authenticate an actor (either a user or an API actor of some sort) based on information in the request. @@ -1010,8 +1010,8 @@ Example: `datasette-auth-tokens <https://datasette.io/plugins/datasette-auth-tok filters_from_request(request, database, table, datasette) --------------------------------------------------------- -``request`` - object - The current HTTP :ref:`internals_request`. +``request`` - :ref:`internals_request` + The current HTTP request. ``database`` - string The name of the database. @@ -1178,8 +1178,8 @@ forbidden(datasette, request, message) ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. -``request`` - object - The current HTTP :ref:`internals_request`. +``request`` - :ref:`internals_request` + The current HTTP request. ``message`` - string A message hinting at why the request was forbidden. @@ -1206,21 +1206,55 @@ The function can alternatively return an awaitable function if it needs to make .. code-block:: python - from datasette import hookimpl - from datasette.utils.asgi import Response + from datasette import hookimpl, Response @hookimpl def forbidden(datasette): async def inner(): return Response.html( - await datasette.render_template( - "forbidden.html" - ) + await datasette.render_template("render_message.html", request=request) ) return inner +.. _plugin_hook_handle_exception: + +handle_exception(datasette, request, exception) +----------------------------------------------- + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + +``request`` - :ref:`internals_request` + The current HTTP request. + +``exception`` - ``Exception`` + The exception that was raised. + +This hook is called any time an unexpected exception is raised. You can use it to record the exception. + +If your handler returns a ``Response`` object it will be returned to the client in place of the default Datasette error page. + +The handler can return a response directly, or it can return return an awaitable function that returns a response. + +This example logs an error to `Sentry <https://sentry.io/>`__ and then renders a custom error page: + +.. code-block:: python + + from datasette import hookimpl, Response + import sentry_sdk + + + @hookimpl + def handle_exception(datasette, exception): + sentry_sdk.capture_exception(exception) + async def inner(): + return Response.html( + await datasette.render_template("custom_error.html", request=request) + ) + return inner + .. _plugin_hook_menu_links: menu_links(datasette, actor, request) @@ -1232,8 +1266,8 @@ menu_links(datasette, actor, request) ``actor`` - dictionary or None The currently authenticated :ref:`actor <authentication_actor>`. -``request`` - object or None - The current HTTP :ref:`internals_request`. This can be ``None`` if the request object is not available. +``request`` - :ref:`internals_request` + The current HTTP request. This can be ``None`` if the request object is not available. This hook allows additional items to be included in the menu displayed by Datasette's top right menu icon. @@ -1281,8 +1315,8 @@ table_actions(datasette, actor, database, table, request) ``table`` - string The name of the table. -``request`` - object - The current HTTP :ref:`internals_request`. This can be ``None`` if the request object is not available. +``request`` - :ref:`internals_request` + The current HTTP request. This can be ``None`` if the request object is not available. This hook allows table actions to be displayed in a menu accessed via an action icon at the top of the table page. It should return a list of ``{"href": "...", "label": "..."}`` menu items. @@ -1325,8 +1359,8 @@ database_actions(datasette, actor, database, request) ``database`` - string The name of the database. -``request`` - object - The current HTTP :ref:`internals_request`. +``request`` - :ref:`internals_request` + The current HTTP request. This hook is similar to :ref:`plugin_hook_table_actions` but populates an actions menu on the database page. diff --git a/tests/fixtures.py b/tests/fixtures.py index e0e4ec7b..c145ac78 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -68,6 +68,7 @@ EXPECTED_PLUGINS = [ "canned_queries", "extra_js_urls", "extra_template_vars", + "handle_exception", "menu_links", "permission_allowed", "register_routes", diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index f5ce36b3..4df02343 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -185,3 +185,21 @@ def register_routes(datasette): # Also serves to demonstrate over-ride of default paths: (r"/(?P<db_name>[^/]+)/(?P<table_and_format>[^/]+?$)", new_table), ] + + +@hookimpl +def handle_exception(datasette, request, exception): + datasette._exception_hook_fired = (request, exception) + if request.args.get("_custom_error"): + return Response.text("_custom_error") + elif request.args.get("_custom_error_async"): + + async def inner(): + return Response.text("_custom_error_async") + + return inner + + +@hookimpl(specname="register_routes") +def register_triger_error(): + return ((r"/trigger-error", lambda: 1 / 0),) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index f4169dbe..2a519e76 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -332,6 +332,7 @@ def test_permissions_debug(app_client): assert checks == [ {"action": "permissions-debug", "result": True, "used_default": False}, {"action": "view-instance", "result": None, "used_default": True}, + {"action": "debug-menu", "result": False, "used_default": True}, {"action": "permissions-debug", "result": False, "used_default": True}, {"action": "view-instance", "result": None, "used_default": True}, ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 4a7ad7c6..948a40b8 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -824,6 +824,20 @@ def test_hook_forbidden(restore_working_directory): assert "view-database" == client.ds._last_forbidden_message +def test_hook_handle_exception(app_client): + app_client.get("/trigger-error?x=123") + assert hasattr(app_client.ds, "_exception_hook_fired") + request, exception = app_client.ds._exception_hook_fired + assert request.url == "http://localhost/trigger-error?x=123" + assert isinstance(exception, ZeroDivisionError) + + +@pytest.mark.parametrize("param", ("_custom_error", "_custom_error_async")) +def test_hook_handle_exception_custom_response(app_client, param): + response = app_client.get("/trigger-error?{}=1".format(param)) + assert response.text == param + + def test_hook_menu_links(app_client): def get_menu_links(html): soup = Soup(html, "html.parser") From 58fd1e33ec7ac5ed85431d5c86d60600cd5280fb Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 17 Jul 2022 16:30:58 -0700 Subject: [PATCH 0248/1103] Hint that you can render templates for these hooks, refs #1770 --- docs/plugin_hooks.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 6020a941..b4869606 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1176,7 +1176,7 @@ forbidden(datasette, request, message) -------------------------------------- ``datasette`` - :ref:`internals_datasette` - You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to render templates or execute SQL queries. ``request`` - :ref:`internals_request` The current HTTP request. @@ -1224,7 +1224,7 @@ handle_exception(datasette, request, exception) ----------------------------------------------- ``datasette`` - :ref:`internals_datasette` - You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to render templates or execute SQL queries. ``request`` - :ref:`internals_request` The current HTTP request. From e543a095cc4c1ca895b082cfd1263ca25203a7c0 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 17 Jul 2022 17:57:41 -0700 Subject: [PATCH 0249/1103] Updated default plugins in docs, refs #1770 --- docs/plugins.rst | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/docs/plugins.rst b/docs/plugins.rst index f2ed02f7..29078054 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -172,6 +172,24 @@ If you run ``datasette plugins --all`` it will include default plugins that ship "filters_from_request" ] }, + { + "name": "datasette.forbidden", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "forbidden" + ] + }, + { + "name": "datasette.handle_exception", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "handle_exception" + ] + }, { "name": "datasette.publish.cloudrun", "static": false, From 6d5e1955470424cf4faf5d35788d328ebdd6d463 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 17 Jul 2022 17:59:20 -0700 Subject: [PATCH 0250/1103] Release 0.62a1 Refs #1300, #1739, #1744, #1746, #1748, #1759, #1770 --- datasette/version.py | 2 +- docs/changelog.rst | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index cf18c441..86f4cf7e 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.62a0" +__version__ = "0.62a1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 74814fcb..3f105811 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,20 @@ Changelog ========= +.. _v0_62a1: + +0.62a1 (2022-07-17) +------------------- + +- New plugin hook: :ref:`handle_exception() <plugin_hook_handle_exception>`, for custom handling of exceptions caught by Datasette. (:issue:`1770`) +- The :ref:`render_cell() <plugin_hook_render_cell>` plugin hook is now also passed a ``row`` argument, representing the ``sqlite3.Row`` object that is being rendered. (:issue:`1300`) +- New ``--nolock`` option for ignoring file locks when opening read-only databases. (:issue:`1744`) +- Documentation now uses the `Furo <https://github.com/pradyunsg/furo>`__ Sphinx theme. (:issue:`1746`) +- Datasette now has a `Discord community <https://discord.gg/ktd74dm5mw>`__. +- Database file downloads now implement conditional GET using ETags. (:issue:`1739`) +- Examples in the documentation now include a copy-to-clipboard button. (:issue:`1748`) +- HTML for facet results and suggested results has been extracted out into new templates ``_facet_results.html`` and ``_suggested_facets.html``. Thanks, M. Nasimul Haque. (`#1759 <https://github.com/simonw/datasette/pull/1759>`__) + .. _v0_62a0: 0.62a0 (2022-05-02) From ed1ebc0f1d4153e3e0934f2af19f82e5fdf137d3 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 17 Jul 2022 18:03:33 -0700 Subject: [PATCH 0251/1103] Run blacken-docs, refs #1770 --- docs/plugin_hooks.rst | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index b4869606..aec1df56 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1213,7 +1213,9 @@ The function can alternatively return an awaitable function if it needs to make def forbidden(datasette): async def inner(): return Response.html( - await datasette.render_template("render_message.html", request=request) + await datasette.render_template( + "render_message.html", request=request + ) ) return inner @@ -1249,10 +1251,14 @@ This example logs an error to `Sentry <https://sentry.io/>`__ and then renders a @hookimpl def handle_exception(datasette, exception): sentry_sdk.capture_exception(exception) + async def inner(): return Response.html( - await datasette.render_template("custom_error.html", request=request) + await datasette.render_template( + "custom_error.html", request=request + ) ) + return inner .. _plugin_hook_menu_links: From ea6161f8475d9fa41c4879049511c58f692cce04 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 17 Jul 2022 18:06:26 -0700 Subject: [PATCH 0252/1103] Bump furo from 2022.4.7 to 2022.6.21 (#1760) Bumps [furo](https://github.com/pradyunsg/furo) from 2022.4.7 to 2022.6.21. - [Release notes](https://github.com/pradyunsg/furo/releases) - [Changelog](https://github.com/pradyunsg/furo/blob/main/docs/changelog.md) - [Commits](https://github.com/pradyunsg/furo/compare/2022.04.07...2022.06.21) --- updated-dependencies: - dependency-name: furo dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 29cb77bf..558b5c87 100644 --- a/setup.py +++ b/setup.py @@ -65,7 +65,7 @@ setup( setup_requires=["pytest-runner"], extras_require={ "docs": [ - "furo==2022.4.7", + "furo==2022.6.21", "sphinx-autobuild", "codespell", "blacken-docs", From 22354c48ce4d514d7a1b321e5651c7f1340e3f5e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 17 Jul 2022 18:06:37 -0700 Subject: [PATCH 0253/1103] Update pytest-asyncio requirement from <0.19,>=0.17 to >=0.17,<0.20 (#1769) Updates the requirements on [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) - [Changelog](https://github.com/pytest-dev/pytest-asyncio/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.17.0...v0.19.0) --- updated-dependencies: - dependency-name: pytest-asyncio dependency-type: direct:development ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 558b5c87..a1c51d0b 100644 --- a/setup.py +++ b/setup.py @@ -74,7 +74,7 @@ setup( "test": [ "pytest>=5.2.2,<7.2.0", "pytest-xdist>=2.2.1,<2.6", - "pytest-asyncio>=0.17,<0.19", + "pytest-asyncio>=0.17,<0.20", "beautifulsoup4>=4.8.1,<4.12.0", "black==22.6.0", "blacken-docs==1.12.1", From 01369176b0a8943ab45292ffc6f9c929b80a00e8 Mon Sep 17 00:00:00 2001 From: Chris Amico <eyeseast@gmail.com> Date: Sun, 17 Jul 2022 21:12:45 -0400 Subject: [PATCH 0254/1103] Keep track of datasette.config_dir (#1766) Thanks, @eyeseast - closes #1764 --- datasette/app.py | 1 + tests/test_config_dir.py | 9 +++++++++ 2 files changed, 10 insertions(+) diff --git a/datasette/app.py b/datasette/app.py index edd05bb3..1a9afc10 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -211,6 +211,7 @@ class Datasette: assert config_dir is None or isinstance( config_dir, Path ), "config_dir= should be a pathlib.Path" + self.config_dir = config_dir self.pdb = pdb self._secret = secret or secrets.token_hex(32) self.files = tuple(files or []) + tuple(immutables or []) diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index 015c6ace..fe927c42 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -1,4 +1,5 @@ import json +import pathlib import pytest from datasette.app import Datasette @@ -150,3 +151,11 @@ def test_metadata_yaml(tmp_path_factory, filename): response = client.get("/-/metadata.json") assert 200 == response.status assert {"title": "Title from metadata"} == response.json + + +def test_store_config_dir(config_dir_client): + ds = config_dir_client.ds + + assert hasattr(ds, "config_dir") + assert ds.config_dir is not None + assert isinstance(ds.config_dir, pathlib.Path) From 7af67b54b7d9bca43e948510fc62f6db2b748fa8 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 18 Jul 2022 14:31:09 -0700 Subject: [PATCH 0255/1103] How to register temporary plugins in tests, closes #903 --- docs/testing_plugins.rst | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 41046bfb..d02003a9 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -219,3 +219,39 @@ Here's a test for that plugin that mocks the HTTPX outbound request: assert ( outbound_request.url == "https://www.example.com/" ) + +.. _testing_plugins_register_in_test: + +Registering a plugin for the duration of a test +----------------------------------------------- + +When writing tests for plugins you may find it useful to register a test plugin just for the duration of a single test. You can do this using ``pm.register()`` and ``pm.unregister()`` like this: + +.. code-block:: python + + from datasette import hookimpl + from datasette.app import Datasette + from datasette.plugins import pm + import pytest + + + @pytest.mark.asyncio + async def test_using_test_plugin(): + class TestPlugin: + __name__ = "TestPlugin" + + # Use hookimpl and method names to register hooks + @hookimpl + def register_routes(self): + return [ + (r"^/error$", lambda: 1/0), + ] + + pm.register(TestPlugin(), name="undo") + try: + # The test implementation goes here + datasette = Datasette() + response = await datasette.client.get("/error") + assert response.status_code == 500 + finally: + pm.unregister(name="undo") From bca2d95d0228f80a108e13408f8e72b2c06c2c7b Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 2 Aug 2022 16:38:02 -0700 Subject: [PATCH 0256/1103] Configure readthedocs/readthedocs-preview --- .github/workflows/documentation-links.yml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .github/workflows/documentation-links.yml diff --git a/.github/workflows/documentation-links.yml b/.github/workflows/documentation-links.yml new file mode 100644 index 00000000..e7062a46 --- /dev/null +++ b/.github/workflows/documentation-links.yml @@ -0,0 +1,16 @@ +name: Read the Docs Pull Request Preview +on: + pull_request_target: + types: + - opened + +permissions: + pull-requests: write + +jobs: + documentation-links: + runs-on: ubuntu-latest + steps: + - uses: readthedocs/readthedocs-preview@main + with: + project-slug: "datasette" From 8cfc72336878dd846d149658e99cc598e835b661 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 9 Aug 2022 11:21:53 -0700 Subject: [PATCH 0257/1103] Ran blacken-docs --- docs/testing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index d02003a9..992b4b0e 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -244,7 +244,7 @@ When writing tests for plugins you may find it useful to register a test plugin @hookimpl def register_routes(self): return [ - (r"^/error$", lambda: 1/0), + (r"^/error$", lambda: 1 / 0), ] pm.register(TestPlugin(), name="undo") From 05d9c682689a0f1d23cbb502e027364ab3363910 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 14 Aug 2022 08:16:53 -0700 Subject: [PATCH 0258/1103] Promote Discord more in the README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 032180aa..7ebbca57 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover * Comprehensive documentation: https://docs.datasette.io/ * Examples: https://datasette.io/examples * Live demo of current main: https://latest.datasette.io/ -* Support questions, feedback? Join our [GitHub Discussions forum](https://github.com/simonw/datasette/discussions) +* Questions, feedback or want to talk about the project? Join our [Discord](https://discord.gg/ktd74dm5mw) Want to stay up-to-date with the project? Subscribe to the [Datasette newsletter](https://datasette.substack.com/) for tips, tricks and news on what's new in the Datasette ecosystem. From db00c00f6397287749331e8042fe998ee7f3b919 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 14 Aug 2022 08:19:30 -0700 Subject: [PATCH 0259/1103] Promote Datasette Lite in the README, refs #1781 --- README.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 7ebbca57..1af20129 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover * Latest [Datasette News](https://datasette.io/news) * Comprehensive documentation: https://docs.datasette.io/ * Examples: https://datasette.io/examples -* Live demo of current main: https://latest.datasette.io/ +* Live demo of current `main` branch: https://latest.datasette.io/ * Questions, feedback or want to talk about the project? Join our [Discord](https://discord.gg/ktd74dm5mw) Want to stay up-to-date with the project? Subscribe to the [Datasette newsletter](https://datasette.substack.com/) for tips, tricks and news on what's new in the Datasette ecosystem. @@ -85,3 +85,7 @@ Or: This will create a docker image containing both the datasette application and the specified SQLite database files. It will then deploy that image to Heroku or Cloud Run and give you a URL to access the resulting website and API. See [Publishing data](https://docs.datasette.io/en/stable/publish.html) in the documentation for more details. + +## Datasette Lite + +[Datasette Lite](https://lite.datasette.io/) is Datasette packaged using WebAssembly so that it runs entirely in your browser, no Python web application server required. Read more about that in the [Datasette Lite documentation](https://github.com/simonw/datasette-lite/blob/main/README.md). From 8eb699de7becdefc6d72555d9fb17c9f06235dc4 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 14 Aug 2022 08:24:39 -0700 Subject: [PATCH 0260/1103] Datasette Lite in Getting Started docs, closes #1781 --- docs/getting_started.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 00b753a9..571540cf 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -21,6 +21,17 @@ Datasette has several `tutorials <https://datasette.io/tutorials>`__ to help you - `Exploring a database with Datasette <https://datasette.io/tutorials/explore>`__ shows how to use the Datasette web interface to explore a new database. - `Learn SQL with Datasette <https://datasette.io/tutorials/learn-sql>`__ introduces SQL, and shows how to use that query language to ask questions of your data. +.. _getting_started_datasette_lite: + +Datasette in your browser with Datasette Lite +--------------------------------------------- + +`Datasette Lite <https://lite.datasette.io/>`__ is Datasette packaged using WebAssembly so that it runs entirely in your browser, no Python web application server required. + +You can pass a URL to a CSV, SQLite or raw SQL file directly to Datasette Lite to explore that data in your browser. + +This `example link <https://lite.datasette.io/?url=https%3A%2F%2Fraw.githubusercontent.com%2FNUKnightLab%2Fsql-mysteries%2Fmaster%2Fsql-murder-mystery.db#/sql-murder-mystery>`__ opens Datasette Lite and loads the SQL Murder Mystery example database from `Northwestern University Knight Lab <https://github.com/NUKnightLab/sql-mysteries>`__. + .. _getting_started_glitch: Try Datasette without installing anything using Glitch From df4fd2d7ddca8956d8a51c72ce007b8c75227f32 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 14 Aug 2022 08:44:02 -0700 Subject: [PATCH 0261/1103] _sort= works even if sort column not selected, closes #1773 --- datasette/views/table.py | 22 +++++++++++++++++++++- tests/test_table_api.py | 2 ++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index cd4be823..94d2673b 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -630,7 +630,27 @@ class TableView(DataView): next_value = path_from_row_pks(rows[-2], pks, use_rowid) # If there's a sort or sort_desc, add that value as a prefix if (sort or sort_desc) and not is_view: - prefix = rows[-2][sort or sort_desc] + try: + prefix = rows[-2][sort or sort_desc] + except IndexError: + # sort/sort_desc column missing from SELECT - look up value by PK instead + prefix_where_clause = " and ".join( + "[{}] = :pk{}".format(pk, i) for i, pk in enumerate(pks) + ) + prefix_lookup_sql = "select [{}] from [{}] where {}".format( + sort or sort_desc, table_name, prefix_where_clause + ) + prefix = ( + await db.execute( + prefix_lookup_sql, + { + **{ + "pk{}".format(i): rows[-2][pk] + for i, pk in enumerate(pks) + } + }, + ) + ).single_value() if isinstance(prefix, dict) and "value" in prefix: prefix = prefix["value"] if prefix is None: diff --git a/tests/test_table_api.py b/tests/test_table_api.py index 9db383c3..e56a72b5 100644 --- a/tests/test_table_api.py +++ b/tests/test_table_api.py @@ -288,6 +288,8 @@ def test_paginate_compound_keys_with_extra_filters(app_client): ), # text column contains '$null' - ensure it doesn't confuse pagination: ("_sort=text", lambda row: row["text"], "sorted by text"), + # Still works if sort column removed using _col= + ("_sort=text&_col=content", lambda row: row["text"], "sorted by text"), ], ) def test_sortable(app_client, query_string, sort_key, human_description_en): From 668415df9f6334bd255c22ab02018bed5bc14edd Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 14 Aug 2022 08:47:17 -0700 Subject: [PATCH 0262/1103] Upgrade Docker baes to 3.10.6-slim-bullseye - refs #1768 --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 42f5529b..ee7ed957 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.7-slim-bullseye as build +FROM python:3.10.6-slim-bullseye as build # Version of Datasette to install, e.g. 0.55 # docker build . -t datasette --build-arg VERSION=0.55 From 080d4b3e065d78faf977c6ded6ead31aae24e2ae Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 14 Aug 2022 08:49:14 -0700 Subject: [PATCH 0263/1103] Switch to python:3.10.6-slim-bullseye for datasette publish - refs #1768 --- datasette/utils/__init__.py | 2 +- demos/apache-proxy/Dockerfile | 2 +- docs/publish.rst | 2 +- tests/test_package.py | 2 +- tests/test_publish_cloudrun.py | 4 ++-- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 77768112..d148cc2c 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -390,7 +390,7 @@ def make_dockerfile( "SQLITE_EXTENSIONS" ] = "/usr/lib/x86_64-linux-gnu/mod_spatialite.so" return """ -FROM python:3.8 +FROM python:3.10.6-slim-bullseye COPY . /app WORKDIR /app {apt_get_extras} diff --git a/demos/apache-proxy/Dockerfile b/demos/apache-proxy/Dockerfile index 6c921963..70b33bec 100644 --- a/demos/apache-proxy/Dockerfile +++ b/demos/apache-proxy/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.7-slim-bullseye +FROM python:3.10.6-slim-bullseye RUN apt-get update && \ apt-get install -y apache2 supervisor && \ diff --git a/docs/publish.rst b/docs/publish.rst index 166f2883..9c7c99cc 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -144,7 +144,7 @@ Here's example output for the package command:: $ datasette package parlgov.db --extra-options="--setting sql_time_limit_ms 2500" Sending build context to Docker daemon 4.459MB - Step 1/7 : FROM python:3 + Step 1/7 : FROM python:3.10.6-slim-bullseye ---> 79e1dc9af1c1 Step 2/7 : COPY . /app ---> Using cache diff --git a/tests/test_package.py b/tests/test_package.py index 02ed1775..ac15e61e 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -12,7 +12,7 @@ class CaptureDockerfile: EXPECTED_DOCKERFILE = """ -FROM python:3.8 +FROM python:3.10.6-slim-bullseye COPY . /app WORKDIR /app diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 3427f4f7..60079ab3 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -223,7 +223,7 @@ def test_publish_cloudrun_plugin_secrets( ) expected = textwrap.dedent( r""" - FROM python:3.8 + FROM python:3.10.6-slim-bullseye COPY . /app WORKDIR /app @@ -290,7 +290,7 @@ def test_publish_cloudrun_apt_get_install( ) expected = textwrap.dedent( r""" - FROM python:3.8 + FROM python:3.10.6-slim-bullseye COPY . /app WORKDIR /app From 1563c22a8c65e6cff5194aa07df54d0ab8d4eecb Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 14 Aug 2022 09:13:12 -0700 Subject: [PATCH 0264/1103] Don't duplicate _sort_desc, refs #1738 --- datasette/views/table.py | 2 +- tests/test_table_html.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 94d2673b..49c30c9c 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -710,7 +710,7 @@ class TableView(DataView): for key in request.args: if ( key.startswith("_") - and key not in ("_sort", "_search", "_next") + and key not in ("_sort", "_sort_desc", "_search", "_next") and "__" not in key ): for value in request.args.getlist(key): diff --git a/tests/test_table_html.py b/tests/test_table_html.py index d3cb3e17..f3808ea3 100644 --- a/tests/test_table_html.py +++ b/tests/test_table_html.py @@ -828,6 +828,7 @@ def test_other_hidden_form_fields(app_client, path, expected_hidden): [ ("/fixtures/searchable?_search=terry", []), ("/fixtures/searchable?_sort=text2", []), + ("/fixtures/searchable?_sort_desc=text2", []), ("/fixtures/searchable?_sort=text2&_where=1", [("_where", "1")]), ], ) From c1396bf86033a7bd99fa0c0431f585475391a11a Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 14 Aug 2022 09:34:31 -0700 Subject: [PATCH 0265/1103] Don't allow canned write queries on immutable DBs, closes #1728 --- datasette/templates/query.html | 6 ++++- datasette/views/database.py | 4 ++++ tests/test_canned_queries.py | 40 ++++++++++++++++++++++++++++++++++ 3 files changed, 49 insertions(+), 1 deletion(-) diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 8c920527..cee779fc 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -28,6 +28,10 @@ {% block content %} +{% if canned_write and db_is_immutable %} + <p class="message-error">This query cannot be executed because the database is immutable.</p> +{% endif %} + <h1 style="padding-left: 10px; border-left: 10px solid #{{ database_color(database) }}">{{ metadata.title or database }}{% if canned_query and not metadata.title %}: {{ canned_query }}{% endif %}{% if private %} 🔒{% endif %}</h1> {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} @@ -61,7 +65,7 @@ <p> {% if not hide_sql %}<button id="sql-format" type="button" hidden>Format SQL</button>{% endif %} {% if canned_write %}<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">{% endif %} - <input type="submit" value="Run SQL"> + <input type="submit" value="Run SQL"{% if canned_write and db_is_immutable %} disabled{% endif %}> {{ show_hide_hidden }} {% if canned_query and edit_sql_url %}<a href="{{ edit_sql_url }}" class="canned-query-edit-sql">Edit SQL</a>{% endif %} </p> diff --git a/datasette/views/database.py b/datasette/views/database.py index 42058752..77632b9d 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -273,6 +273,9 @@ class QueryView(DataView): # Execute query - as write or as read if write: if request.method == "POST": + # If database is immutable, return an error + if not db.is_mutable: + raise Forbidden("Database is immutable") body = await request.post_body() body = body.decode("utf-8").strip() if body.startswith("{") and body.endswith("}"): @@ -326,6 +329,7 @@ class QueryView(DataView): async def extra_template(): return { "request": request, + "db_is_immutable": not db.is_mutable, "path_with_added_args": path_with_added_args, "path_with_removed_args": path_with_removed_args, "named_parameter_values": named_parameter_values, diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index 5abffdcc..976aa0db 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -53,6 +53,26 @@ def canned_write_client(tmpdir): yield client +@pytest.fixture +def canned_write_immutable_client(): + with make_app_client( + is_immutable=True, + metadata={ + "databases": { + "fixtures": { + "queries": { + "add": { + "sql": "insert into sortable (text) values (:text)", + "write": True, + }, + } + } + } + }, + ) as client: + yield client + + def test_canned_query_with_named_parameter(app_client): response = app_client.get("/fixtures/neighborhood_search.json?text=town") assert [ @@ -373,3 +393,23 @@ def test_canned_write_custom_template(canned_write_client): response.headers["link"] == 'http://localhost/data/update_name.json; rel="alternate"; type="application/json+datasette"' ) + + +def test_canned_write_query_disabled_for_immutable_database( + canned_write_immutable_client, +): + response = canned_write_immutable_client.get("/fixtures/add") + assert response.status == 200 + assert ( + "This query cannot be executed because the database is immutable." + in response.text + ) + assert '<input type="submit" value="Run SQL" disabled>' in response.text + # Submitting form should get a forbidden error + response = canned_write_immutable_client.post( + "/fixtures/add", + {"text": "text"}, + csrftoken_from=True, + ) + assert response.status == 403 + assert "Database is immutable" in response.text From 82167105ee699c850cc106ea927de1ad09276cfe Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 14 Aug 2022 10:07:30 -0700 Subject: [PATCH 0266/1103] --min-instances and --max-instances Cloud Run publish options, closes #1779 --- datasette/publish/cloudrun.py | 26 +++++++++++++++++--- docs/cli-reference.rst | 2 ++ tests/test_publish_cloudrun.py | 43 ++++++++++++++++++++++++---------- 3 files changed, 56 insertions(+), 15 deletions(-) diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 50b2b2fd..77274eb0 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -52,6 +52,16 @@ def publish_subcommand(publish): multiple=True, help="Additional packages to apt-get install", ) + @click.option( + "--max-instances", + type=int, + help="Maximum Cloud Run instances", + ) + @click.option( + "--min-instances", + type=int, + help="Minimum Cloud Run instances", + ) def cloudrun( files, metadata, @@ -79,6 +89,8 @@ def publish_subcommand(publish): cpu, timeout, apt_get_extras, + max_instances, + min_instances, ): "Publish databases to Datasette running on Cloud Run" fail_if_publish_binary_not_installed( @@ -168,12 +180,20 @@ def publish_subcommand(publish): ), shell=True, ) + extra_deploy_options = [] + for option, value in ( + ("--memory", memory), + ("--cpu", cpu), + ("--max-instances", max_instances), + ("--min-instances", min_instances), + ): + if value: + extra_deploy_options.append("{} {}".format(option, value)) check_call( - "gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}{}".format( + "gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}".format( image_id, service, - " --memory {}".format(memory) if memory else "", - " --cpu {}".format(cpu) if cpu else "", + " " + " ".join(extra_deploy_options) if extra_deploy_options else "", ), shell=True, ) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 1c1aff15..415af13c 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -251,6 +251,8 @@ datasette publish cloudrun --help --cpu [1|2|4] Number of vCPUs to allocate in Cloud Run --timeout INTEGER Build timeout in seconds --apt-get-install TEXT Additional packages to apt-get install + --max-instances INTEGER Maximum Cloud Run instances + --min-instances INTEGER Minimum Cloud Run instances --help Show this message and exit. diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 60079ab3..e64534d2 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -105,19 +105,36 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which, tmp_path_factory): @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @pytest.mark.parametrize( - "memory,cpu,timeout,expected_gcloud_args", + "memory,cpu,timeout,min_instances,max_instances,expected_gcloud_args", [ - ["1Gi", None, None, "--memory 1Gi"], - ["2G", None, None, "--memory 2G"], - ["256Mi", None, None, "--memory 256Mi"], - ["4", None, None, None], - ["GB", None, None, None], - [None, 1, None, "--cpu 1"], - [None, 2, None, "--cpu 2"], - [None, 3, None, None], - [None, 4, None, "--cpu 4"], - ["2G", 4, None, "--memory 2G --cpu 4"], - [None, None, 1800, "--timeout 1800"], + ["1Gi", None, None, None, None, "--memory 1Gi"], + ["2G", None, None, None, None, "--memory 2G"], + ["256Mi", None, None, None, None, "--memory 256Mi"], + [ + "4", + None, + None, + None, + None, + None, + ], + [ + "GB", + None, + None, + None, + None, + None, + ], + [None, 1, None, None, None, "--cpu 1"], + [None, 2, None, None, None, "--cpu 2"], + [None, 3, None, None, None, None], + [None, 4, None, None, None, "--cpu 4"], + ["2G", 4, None, None, None, "--memory 2G --cpu 4"], + [None, None, 1800, None, None, "--timeout 1800"], + [None, None, None, 2, None, "--min-instances 2"], + [None, None, None, 2, 4, "--min-instances 2 --max-instances 4"], + [None, 2, None, None, 4, "--cpu 2 --max-instances 4"], ], ) def test_publish_cloudrun_memory_cpu( @@ -127,6 +144,8 @@ def test_publish_cloudrun_memory_cpu( memory, cpu, timeout, + min_instances, + max_instances, expected_gcloud_args, tmp_path_factory, ): From 5e6c5c9e3191a80f17a91c5205d9d69efdebb73f Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 14 Aug 2022 10:18:47 -0700 Subject: [PATCH 0267/1103] Document datasette.config_dir, refs #1766 --- docs/internals.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/internals.rst b/docs/internals.rst index da135282..20797e98 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -260,6 +260,7 @@ Constructor parameters include: - ``files=[...]`` - a list of database files to open - ``immutables=[...]`` - a list of database files to open in immutable mode - ``metadata={...}`` - a dictionary of :ref:`metadata` +- ``config_dir=...`` - the :ref:`configuration directory <config_dir>` to use, stored in ``datasette.config_dir`` .. _datasette_databases: From 815162cf029fab9f1c9308c1d6ecdba7ee369ebe Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 14 Aug 2022 10:32:42 -0700 Subject: [PATCH 0268/1103] Release 0.62 Refs #903, #1300, #1683, #1701, #1712, #1717, #1718, #1728, #1733, #1738, #1739, #1744, #1746, #1748, #1759, #1766, #1768, #1770, #1773, #1779 Closes #1782 --- datasette/version.py | 2 +- docs/changelog.rst | 53 ++++++++++++++++++++++++++++++-------------- 2 files changed, 37 insertions(+), 18 deletions(-) diff --git a/datasette/version.py b/datasette/version.py index 86f4cf7e..0453346c 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.62a1" +__version__ = "0.62" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 3f105811..1225c63f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,33 +4,52 @@ Changelog ========= -.. _v0_62a1: +.. _v0_62: -0.62a1 (2022-07-17) +0.62 (2022-08-14) ------------------- +Datasette can now run entirely in your browser using WebAssembly. Try out `Datasette Lite <https://lite.datasette.io/>`__, take a look `at the code <https://github.com/simonw/datasette-lite>`__ or read more about it in `Datasette Lite: a server-side Python web application running in a browser <https://simonwillison.net/2022/May/4/datasette-lite/>`__. + +Datasette now has a `Discord community <https://discord.gg/ktd74dm5mw>`__ for questions and discussions about Datasette and its ecosystem of projects. + +Features +~~~~~~~~ + +- Datasette is now compatible with `Pyodide <https://pyodide.org/>`__. This is the enabling technology behind `Datasette Lite <https://lite.datasette.io/>`__. (:issue:`1733`) +- Database file downloads now implement conditional GET using ETags. (:issue:`1739`) +- HTML for facet results and suggested results has been extracted out into new templates ``_facet_results.html`` and ``_suggested_facets.html``. Thanks, M. Nasimul Haque. (`#1759 <https://github.com/simonw/datasette/pull/1759>`__) +- Datasette now runs some SQL queries in parallel. This has limited impact on performance, see `this research issue <https://github.com/simonw/datasette/issues/1727>`__ for details. +- New ``--nolock`` option for ignoring file locks when opening read-only databases. (:issue:`1744`) +- Spaces in the database names in URLs are now encoded as ``+`` rather than ``~20``. (:issue:`1701`) +- ``<Binary: 2427344 bytes>`` is now displayed as ``<Binary: 2,427,344 bytes>`` and is accompanied by tooltip showing "2.3MB". (:issue:`1712`) +- The base Docker image used by ``datasette publish cloudrun``, ``datasette package`` and the `official Datasette image <https://hub.docker.com/datasetteproject/datasette>`__ has been upgraded to ``3.10.6-slim-bullseye``. (:issue:`1768`) +- Canned writable queries against immutable databases now show a warning message. (:issue:`1728`) +- ``datasette publish cloudrun`` has a new ``--timeout`` option which can be used to increase the time limit applied by the Google Cloud build environment. Thanks, Tim Sherratt. (`#1717 <https://github.com/simonw/datasette/pull/1717>`__) +- ``datasette publish cloudrun`` has new ``--min-instances`` and ``--max-instances`` options. (:issue:`1779`) + +Plugin hooks +~~~~~~~~~~~~ + - New plugin hook: :ref:`handle_exception() <plugin_hook_handle_exception>`, for custom handling of exceptions caught by Datasette. (:issue:`1770`) - The :ref:`render_cell() <plugin_hook_render_cell>` plugin hook is now also passed a ``row`` argument, representing the ``sqlite3.Row`` object that is being rendered. (:issue:`1300`) -- New ``--nolock`` option for ignoring file locks when opening read-only databases. (:issue:`1744`) -- Documentation now uses the `Furo <https://github.com/pradyunsg/furo>`__ Sphinx theme. (:issue:`1746`) -- Datasette now has a `Discord community <https://discord.gg/ktd74dm5mw>`__. -- Database file downloads now implement conditional GET using ETags. (:issue:`1739`) -- Examples in the documentation now include a copy-to-clipboard button. (:issue:`1748`) -- HTML for facet results and suggested results has been extracted out into new templates ``_facet_results.html`` and ``_suggested_facets.html``. Thanks, M. Nasimul Haque. (`#1759 <https://github.com/simonw/datasette/pull/1759>`__) +- The :ref:`configuration directory <config_dir>` is now stored in ``datasette.config_dir``, making it available to plugins. Thanks, Chris Amico. (`#1766 <https://github.com/simonw/datasette/pull/1766>`__) -.. _v0_62a0: +Bug fixes +~~~~~~~~~ -0.62a0 (2022-05-02) -------------------- - -- Datasette now runs some SQL queries in parallel. This has limited impact on performance, see `this research issue <https://github.com/simonw/datasette/issues/1727>`__ for details. -- Datasette should now be compatible with Pyodide. (:issue:`1733`) -- ``datasette publish cloudrun`` has a new ``--timeout`` option which can be used to increase the time limit applied by the Google Cloud build environment. Thanks, Tim Sherratt. (`#1717 <https://github.com/simonw/datasette/pull/1717>`__) -- Spaces in database names are now encoded as ``+`` rather than ``~20``. (:issue:`1701`) -- ``<Binary: 2427344 bytes>`` is now displayed as ``<Binary: 2,427,344 bytes>`` and is accompanied by tooltip showing "2.3MB". (:issue:`1712`) - Don't show the facet option in the cog menu if faceting is not allowed. (:issue:`1683`) +- ``?_sort`` and ``?_sort_desc`` now work if the column that is being sorted has been excluded from the query using ``?_col=`` or ``?_nocol=``. (:issue:`1773`) +- Fixed bug where ``?_sort_desc`` was duplicated in the URL every time the Apply button was clicked. (:issue:`1738`) + +Documentation +~~~~~~~~~~~~~ + +- Examples in the documentation now include a copy-to-clipboard button. (:issue:`1748`) +- Documentation now uses the `Furo <https://github.com/pradyunsg/furo>`__ Sphinx theme. (:issue:`1746`) - Code examples in the documentation are now all formatted using Black. (:issue:`1718`) - ``Request.fake()`` method is now documented, see :ref:`internals_request`. +- New documentation for plugin authors: :ref:`testing_plugins_register_in_test`. (:issue:`903`) .. _v0_61_1: From a107e3a028923c1ab3911c0f880011283f93f368 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 14 Aug 2022 16:07:46 -0700 Subject: [PATCH 0269/1103] datasette-sentry is an example of handle_exception --- docs/plugin_hooks.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index aec1df56..c6f35d06 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1261,6 +1261,8 @@ This example logs an error to `Sentry <https://sentry.io/>`__ and then renders a return inner +Example: `datasette-sentry <https://datasette.io/plugins/datasette-sentry>`_ + .. _plugin_hook_menu_links: menu_links(datasette, actor, request) From 481eb96d85291cdfa5767a83884a1525dfc382d8 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 15 Aug 2022 13:17:28 -0700 Subject: [PATCH 0270/1103] https://datasette.io/tutorials/clean-data tutorial Refs #1783 --- docs/getting_started.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 571540cf..a9eaa404 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -20,6 +20,7 @@ Datasette has several `tutorials <https://datasette.io/tutorials>`__ to help you - `Exploring a database with Datasette <https://datasette.io/tutorials/explore>`__ shows how to use the Datasette web interface to explore a new database. - `Learn SQL with Datasette <https://datasette.io/tutorials/learn-sql>`__ introduces SQL, and shows how to use that query language to ask questions of your data. +- `Cleaning data with sqlite-utils and Datasette <https://datasette.io/tutorials/clean-data>`__ guides you through using `sqlite-utils <https://sqlite-utils.datasette.io/>`__ to turn a CSV file into a database that you can explore using Datasette. .. _getting_started_datasette_lite: From a3e6f1b16757fb2d39e7ddba4e09eda2362508bf Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 18 Aug 2022 09:06:02 -0700 Subject: [PATCH 0271/1103] Increase height of non-JS textarea to fit query Closes #1786 --- datasette/templates/query.html | 3 ++- tests/test_html.py | 6 ++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/datasette/templates/query.html b/datasette/templates/query.html index cee779fc..a35e3afe 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -45,7 +45,8 @@ {% endif %} {% if not hide_sql %} {% if editable and allow_execute_sql %} - <p><textarea id="sql-editor" name="sql">{% if query and query.sql %}{{ query.sql }}{% else %}select * from {{ tables[0].name|escape_sqlite }}{% endif %}</textarea></p> + <p><textarea id="sql-editor" name="sql"{% if query and query.sql %} style="height: {{ query.sql.split("\n")|length + 2 }}em"{% endif %} + >{% if query and query.sql %}{{ query.sql }}{% else %}select * from {{ tables[0].name|escape_sqlite }}{% endif %}</textarea></p> {% else %} <pre id="sql-query">{% if query %}{{ query.sql }}{% endif %}</pre> {% endif %} diff --git a/tests/test_html.py b/tests/test_html.py index 409fec68..be21bd84 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -695,10 +695,8 @@ def test_query_error(app_client): response = app_client.get("/fixtures?sql=select+*+from+notatable") html = response.text assert '<p class="message-error">no such table: notatable</p>' in html - assert ( - '<textarea id="sql-editor" name="sql">select * from notatable</textarea>' - in html - ) + assert '<textarea id="sql-editor" name="sql" style="height: 3em' in html + assert ">select * from notatable</textarea>" in html assert "0 results" not in html From 09a41662e70b788469157bb58ed9ca4acdf2f904 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 18 Aug 2022 09:10:48 -0700 Subject: [PATCH 0272/1103] Fix typo --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index c6f35d06..30bd75b7 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -874,7 +874,7 @@ canned_queries(datasette, database, actor) ``actor`` - dictionary or None The currently authenticated :ref:`actor <authentication_actor>`. -Ues this hook to return a dictionary of additional :ref:`canned query <canned_queries>` definitions for the specified database. The return value should be the same shape as the JSON described in the :ref:`canned query <canned_queries>` documentation. +Use this hook to return a dictionary of additional :ref:`canned query <canned_queries>` definitions for the specified database. The return value should be the same shape as the JSON described in the :ref:`canned query <canned_queries>` documentation. .. code-block:: python From 6c0ba7c00c2ae3ecbb5309efa59079cea1c850b3 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 18 Aug 2022 14:52:04 -0700 Subject: [PATCH 0273/1103] Improved CLI reference documentation, refs #1787 --- datasette/cli.py | 2 +- docs/changelog.rst | 2 +- docs/cli-reference.rst | 325 ++++++++++++++++++++++++++++++--------- docs/getting_started.rst | 50 ------ docs/index.rst | 2 +- docs/publish.rst | 2 + 6 files changed, 259 insertions(+), 124 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 8781747c..f2a03d53 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -282,7 +282,7 @@ def package( port, **extra_metadata, ): - """Package specified SQLite files into a new datasette Docker container""" + """Package SQLite files into a Datasette Docker container""" if not shutil.which("docker"): click.secho( ' The package command requires "docker" to be installed and configured ', diff --git a/docs/changelog.rst b/docs/changelog.rst index 1225c63f..f9dcc980 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -621,7 +621,7 @@ See also `Datasette 0.49: The annotated release notes <https://simonwillison.net - Datasette now has `a GitHub discussions forum <https://github.com/simonw/datasette/discussions>`__ for conversations about the project that go beyond just bug reports and issues. - Datasette can now be installed on macOS using Homebrew! Run ``brew install simonw/datasette/datasette``. See :ref:`installation_homebrew`. (:issue:`335`) - Two new commands: ``datasette install name-of-plugin`` and ``datasette uninstall name-of-plugin``. These are equivalent to ``pip install`` and ``pip uninstall`` but automatically run in the same virtual environment as Datasette, so users don't have to figure out where that virtual environment is - useful for installations created using Homebrew or ``pipx``. See :ref:`plugins_installing`. (:issue:`925`) -- A new command-line option, ``datasette --get``, accepts a path to a URL within the Datasette instance. It will run that request through Datasette (without starting a web server) and print out the response. See :ref:`getting_started_datasette_get` for an example. (:issue:`926`) +- A new command-line option, ``datasette --get``, accepts a path to a URL within the Datasette instance. It will run that request through Datasette (without starting a web server) and print out the response. See :ref:`cli_datasette_get` for an example. (:issue:`926`) .. _v0_46: diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 415af13c..a1e56774 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -4,44 +4,34 @@ CLI reference =============== -This page lists the ``--help`` for every ``datasette`` CLI command. +The ``datasette`` CLI tool provides a number of commands. + +Running ``datasette`` without specifying a command runs the default command, ``datasette serve``. See :ref:`cli_help_serve___help` for the full list of options for that command. .. [[[cog from datasette import cli from click.testing import CliRunner import textwrap - commands = [ - ["--help"], - ["serve", "--help"], - ["serve", "--help-settings"], - ["plugins", "--help"], - ["publish", "--help"], - ["publish", "cloudrun", "--help"], - ["publish", "heroku", "--help"], - ["package", "--help"], - ["inspect", "--help"], - ["install", "--help"], - ["uninstall", "--help"], - ] - cog.out("\n") - for command in commands: - title = "datasette " + " ".join(command) - ref = "_cli_help_" + ("_".join(command).replace("-", "_")) - cog.out(".. {}:\n\n".format(ref)) - cog.out(title + "\n") - cog.out(("=" * len(title)) + "\n\n") + def help(args): + title = "datasette " + " ".join(args) cog.out("::\n\n") - result = CliRunner().invoke(cli.cli, command) + result = CliRunner().invoke(cli.cli, args) output = result.output.replace("Usage: cli ", "Usage: datasette ") cog.out(textwrap.indent(output, ' ')) cog.out("\n\n") .. ]]] +.. [[[end]]] .. _cli_help___help: datasette --help ================ +Running ``datasette --help`` shows a list of all of the available commands. + +.. [[[cog + help(["--help"]) +.. ]]] :: Usage: datasette [OPTIONS] COMMAND [ARGS]... @@ -59,17 +49,34 @@ datasette --help serve* Serve up specified SQLite database files with a web UI inspect Generate JSON summary of provided database files install Install plugins and packages from PyPI into the same... - package Package specified SQLite files into a new datasette Docker... + package Package SQLite files into a Datasette Docker container plugins List currently installed plugins publish Publish specified SQLite database files to the internet along... uninstall Uninstall plugins and Python packages from the Datasette... +.. [[[end]]] + +Additional commands added by plugins that use the :ref:`plugin_hook_register_commands` hook will be listed here as well. + .. _cli_help_serve___help: -datasette serve --help -====================== +datasette serve +=============== +This command starts the Datasette web application running on your machine:: + + datasette serve mydatabase.db + +Or since this is the default command you can run this instead:: + + datasette mydatabase.db + +Once started you can access it at ``http://localhost:8001`` + +.. [[[cog + help(["serve", "--help"]) +.. ]]] :: Usage: datasette serve [OPTIONS] [FILES]... @@ -121,11 +128,75 @@ datasette serve --help --help Show this message and exit. +.. [[[end]]] + + +.. _cli_datasette_get: + +datasette --get +--------------- + +The ``--get`` option to ``datasette serve`` (or just ``datasette``) specifies the path to a page within Datasette and causes Datasette to output the content from that path without starting the web server. + +This means that all of Datasette's functionality can be accessed directly from the command-line. + +For example:: + + $ datasette --get '/-/versions.json' | jq . + { + "python": { + "version": "3.8.5", + "full": "3.8.5 (default, Jul 21 2020, 10:48:26) \n[Clang 11.0.3 (clang-1103.0.32.62)]" + }, + "datasette": { + "version": "0.46+15.g222a84a.dirty" + }, + "asgi": "3.0", + "uvicorn": "0.11.8", + "sqlite": { + "version": "3.32.3", + "fts_versions": [ + "FTS5", + "FTS4", + "FTS3" + ], + "extensions": { + "json1": null + }, + "compile_options": [ + "COMPILER=clang-11.0.3", + "ENABLE_COLUMN_METADATA", + "ENABLE_FTS3", + "ENABLE_FTS3_PARENTHESIS", + "ENABLE_FTS4", + "ENABLE_FTS5", + "ENABLE_GEOPOLY", + "ENABLE_JSON1", + "ENABLE_PREUPDATE_HOOK", + "ENABLE_RTREE", + "ENABLE_SESSION", + "MAX_VARIABLE_NUMBER=250000", + "THREADSAFE=1" + ] + } + } + +The exit code will be 0 if the request succeeds and 1 if the request produced an HTTP status code other than 200 - e.g. a 404 or 500 error. + +This lets you use ``datasette --get /`` to run tests against a Datasette application in a continuous integration environment such as GitHub Actions. + .. _cli_help_serve___help_settings: datasette serve --help-settings -=============================== +------------------------------- +This command outputs all of the available Datasette :ref:`settings <settings>`. + +These can be passed to ``datasette serve`` using ``datasette serve --setting name value``. + +.. [[[cog + help(["--help-settings"]) +.. ]]] :: Settings: @@ -170,11 +241,18 @@ datasette serve --help-settings +.. [[[end]]] + .. _cli_help_plugins___help: -datasette plugins --help -======================== +datasette plugins +================= +Output JSON showing all currently installed plugins, their versions, whether they include static files or templates and which :ref:`plugin_hooks` they use. + +.. [[[cog + help(["plugins", "--help"]) +.. ]]] :: Usage: datasette plugins [OPTIONS] @@ -187,11 +265,110 @@ datasette plugins --help --help Show this message and exit. +.. [[[end]]] + +Example output: + +.. code-block:: json + + [ + { + "name": "datasette-geojson", + "static": false, + "templates": false, + "version": "0.3.1", + "hooks": [ + "register_output_renderer" + ] + }, + { + "name": "datasette-geojson-map", + "static": true, + "templates": false, + "version": "0.4.0", + "hooks": [ + "extra_body_script", + "extra_css_urls", + "extra_js_urls" + ] + }, + { + "name": "datasette-leaflet", + "static": true, + "templates": false, + "version": "0.2.2", + "hooks": [ + "extra_body_script", + "extra_template_vars" + ] + } + ] + + +.. _cli_help_install___help: + +datasette install +================= + +Install new Datasette plugins. This command works like ``pip install`` but ensures that your plugins will be installed into the same environment as Datasette. + +This command:: + + datasette install datasette-cluster-map + +Would install the `datasette-cluster-map <https://datasette.io/plugins/datasette-cluster-map>`__ plugin. + +.. [[[cog + help(["install", "--help"]) +.. ]]] +:: + + Usage: datasette install [OPTIONS] PACKAGES... + + Install plugins and packages from PyPI into the same environment as Datasette + + Options: + -U, --upgrade Upgrade packages to latest version + --help Show this message and exit. + + +.. [[[end]]] + +.. _cli_help_uninstall___help: + +datasette uninstall +=================== + +Uninstall one or more plugins. + +.. [[[cog + help(["uninstall", "--help"]) +.. ]]] +:: + + Usage: datasette uninstall [OPTIONS] PACKAGES... + + Uninstall plugins and Python packages from the Datasette environment + + Options: + -y, --yes Don't ask for confirmation + --help Show this message and exit. + + +.. [[[end]]] + .. _cli_help_publish___help: -datasette publish --help -======================== +datasette publish +================= +Shows a list of available deployment targets for :ref:`publishing data <publishing>` with Datasette. + +Additional deployment targets can be added by plugins that use the :ref:`plugin_hook_publish_subcommand` hook. + +.. [[[cog + help(["publish", "--help"]) +.. ]]] :: Usage: datasette publish [OPTIONS] COMMAND [ARGS]... @@ -207,11 +384,19 @@ datasette publish --help heroku Publish databases to Datasette running on Heroku +.. [[[end]]] + + .. _cli_help_publish_cloudrun___help: -datasette publish cloudrun --help -================================= +datasette publish cloudrun +========================== +See :ref:`publish_cloud_run`. + +.. [[[cog + help(["publish", "cloudrun", "--help"]) +.. ]]] :: Usage: datasette publish cloudrun [OPTIONS] [FILES]... @@ -256,11 +441,19 @@ datasette publish cloudrun --help --help Show this message and exit. +.. [[[end]]] + + .. _cli_help_publish_heroku___help: -datasette publish heroku --help -=============================== +datasette publish heroku +======================== +See :ref:`publish_heroku`. + +.. [[[cog + help(["publish", "heroku", "--help"]) +.. ]]] :: Usage: datasette publish heroku [OPTIONS] [FILES]... @@ -297,16 +490,23 @@ datasette publish heroku --help --help Show this message and exit. +.. [[[end]]] + .. _cli_help_package___help: -datasette package --help -======================== +datasette package +================= +Package SQLite files into a Datasette Docker container, see :ref:`cli_package`. + +.. [[[cog + help(["package", "--help"]) +.. ]]] :: Usage: datasette package [OPTIONS] FILES... - Package specified SQLite files into a new datasette Docker container + Package SQLite files into a Datasette Docker container Options: -t, --tag TEXT Name for the resulting Docker container, can @@ -335,11 +535,26 @@ datasette package --help --help Show this message and exit. +.. [[[end]]] + + .. _cli_help_inspect___help: -datasette inspect --help -======================== +datasette inspect +================= +Outputs JSON representing introspected data about one or more SQLite database files. + +If you are opening an immutable database, you can pass this file to the ``--inspect-data`` option to improve Datasette's performance by allowing it to skip running row counts against the database when it first starts running:: + + datasette inspect mydatabase.db > inspect-data.json + datasette serve -i mydatabase.db --inspect-file inspect-data.json + +This performance optimization is used automatically by some of the ``datasette publish`` commands. You are unlikely to need to apply this optimization manually. + +.. [[[cog + help(["inspect", "--help"]) +.. ]]] :: Usage: datasette inspect [OPTIONS] [FILES]... @@ -355,36 +570,4 @@ datasette inspect --help --help Show this message and exit. -.. _cli_help_install___help: - -datasette install --help -======================== - -:: - - Usage: datasette install [OPTIONS] PACKAGES... - - Install plugins and packages from PyPI into the same environment as Datasette - - Options: - -U, --upgrade Upgrade packages to latest version - --help Show this message and exit. - - -.. _cli_help_uninstall___help: - -datasette uninstall --help -========================== - -:: - - Usage: datasette uninstall [OPTIONS] PACKAGES... - - Uninstall plugins and Python packages from the Datasette environment - - Options: - -y, --yes Don't ask for confirmation - --help Show this message and exit. - - .. [[[end]]] diff --git a/docs/getting_started.rst b/docs/getting_started.rst index a9eaa404..6515ef8d 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -138,53 +138,3 @@ JSON in a more convenient format: } ] } - -.. _getting_started_datasette_get: - -datasette --get ---------------- - -The ``--get`` option can specify the path to a page within Datasette and cause Datasette to output the content from that path without starting the web server. This means that all of Datasette's functionality can be accessed directly from the command-line. For example:: - - $ datasette --get '/-/versions.json' | jq . - { - "python": { - "version": "3.8.5", - "full": "3.8.5 (default, Jul 21 2020, 10:48:26) \n[Clang 11.0.3 (clang-1103.0.32.62)]" - }, - "datasette": { - "version": "0.46+15.g222a84a.dirty" - }, - "asgi": "3.0", - "uvicorn": "0.11.8", - "sqlite": { - "version": "3.32.3", - "fts_versions": [ - "FTS5", - "FTS4", - "FTS3" - ], - "extensions": { - "json1": null - }, - "compile_options": [ - "COMPILER=clang-11.0.3", - "ENABLE_COLUMN_METADATA", - "ENABLE_FTS3", - "ENABLE_FTS3_PARENTHESIS", - "ENABLE_FTS4", - "ENABLE_FTS5", - "ENABLE_GEOPOLY", - "ENABLE_JSON1", - "ENABLE_PREUPDATE_HOOK", - "ENABLE_RTREE", - "ENABLE_SESSION", - "MAX_VARIABLE_NUMBER=250000", - "THREADSAFE=1" - ] - } - } - -The exit code will be 0 if the request succeeds and 1 if the request produced an HTTP status code other than 200 - e.g. a 404 or 500 error. This means you can use ``datasette --get /`` to run tests against a Datasette application in a continuous integration environment such as GitHub Actions. - -Running ``datasette`` without specifying a command runs the default command, ``datasette serve``. See :ref:`cli_help_serve___help` for the full list of options for that command. diff --git a/docs/index.rst b/docs/index.rst index efe196b3..5a9cc7ed 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -40,6 +40,7 @@ Contents getting_started installation ecosystem + cli-reference pages publish deploying @@ -61,6 +62,5 @@ Contents plugin_hooks testing_plugins internals - cli-reference contributing changelog diff --git a/docs/publish.rst b/docs/publish.rst index 9c7c99cc..dd8566ed 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -56,6 +56,8 @@ Cloud Run provides a URL on the ``.run.app`` domain, but you can also point your See :ref:`cli_help_publish_cloudrun___help` for the full list of options for this command. +.. _publish_heroku: + Publishing to Heroku -------------------- From aff3df03d4fe0806ce432d1818f6643cdb2a854e Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 18 Aug 2022 14:55:08 -0700 Subject: [PATCH 0274/1103] Ignore ro which stands for read only Refs #1787 where it caused tests to break --- docs/codespell-ignore-words.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/codespell-ignore-words.txt b/docs/codespell-ignore-words.txt index a625cde5..d6744d05 100644 --- a/docs/codespell-ignore-words.txt +++ b/docs/codespell-ignore-words.txt @@ -1 +1 @@ -AddWordsToIgnoreHere +ro From 0d9d33955b503c88a2c712144d97f094baa5d46d Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 18 Aug 2022 16:06:12 -0700 Subject: [PATCH 0275/1103] Clarify you can publish multiple files, closes #1788 --- docs/publish.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/publish.rst b/docs/publish.rst index dd8566ed..d817ed31 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -31,7 +31,7 @@ Publishing to Google Cloud Run You will first need to install and configure the Google Cloud CLI tools by following `these instructions <https://cloud.google.com/sdk/>`__. -You can then publish a database to Google Cloud Run using the following command:: +You can then publish one or more SQLite database files to Google Cloud Run using the following command:: datasette publish cloudrun mydatabase.db --service=my-database @@ -63,7 +63,7 @@ Publishing to Heroku To publish your data using `Heroku <https://www.heroku.com/>`__, first create an account there and install and configure the `Heroku CLI tool <https://devcenter.heroku.com/articles/heroku-cli>`_. -You can publish a database to Heroku using the following command:: +You can publish one or more databases to Heroku using the following command:: datasette publish heroku mydatabase.db @@ -138,7 +138,7 @@ If a plugin has any :ref:`plugins_configuration_secret` you can use the ``--plug datasette package ================= -If you have docker installed (e.g. using `Docker for Mac <https://www.docker.com/docker-mac>`_) you can use the ``datasette package`` command to create a new Docker image in your local repository containing the datasette app bundled together with your selected SQLite databases:: +If you have docker installed (e.g. using `Docker for Mac <https://www.docker.com/docker-mac>`_) you can use the ``datasette package`` command to create a new Docker image in your local repository containing the datasette app bundled together with one or more SQLite databases:: datasette package mydatabase.db From 663ac431fe7202c85967568d82b2034f92b9aa43 Mon Sep 17 00:00:00 2001 From: Manuel Kaufmann <humitos@gmail.com> Date: Sat, 20 Aug 2022 02:04:16 +0200 Subject: [PATCH 0276/1103] Use Read the Docs action v1 (#1778) Read the Docs repository was renamed from `readthedocs/readthedocs-preview` to `readthedocs/actions/`. Now, the `preview` action is under `readthedocs/actions/preview` and is tagged as `v1` --- .github/workflows/documentation-links.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/documentation-links.yml b/.github/workflows/documentation-links.yml index e7062a46..a54bd83a 100644 --- a/.github/workflows/documentation-links.yml +++ b/.github/workflows/documentation-links.yml @@ -11,6 +11,6 @@ jobs: documentation-links: runs-on: ubuntu-latest steps: - - uses: readthedocs/readthedocs-preview@main + - uses: readthedocs/actions/preview@v1 with: project-slug: "datasette" From 1d64c9a8dac45b9a3452acf8e76dfadea2b0bc49 Mon Sep 17 00:00:00 2001 From: Alex Garcia <alexsebastian.garcia@gmail.com> Date: Tue, 23 Aug 2022 11:34:30 -0700 Subject: [PATCH 0277/1103] Add new entrypoint option to --load-extensions. (#1789) Thanks, @asg017 --- .gitignore | 6 ++++ datasette/app.py | 8 ++++- datasette/cli.py | 4 ++- datasette/utils/__init__.py | 11 ++++++ tests/ext.c | 48 ++++++++++++++++++++++++++ tests/test_load_extensions.py | 65 +++++++++++++++++++++++++++++++++++ 6 files changed, 140 insertions(+), 2 deletions(-) create mode 100644 tests/ext.c create mode 100644 tests/test_load_extensions.py diff --git a/.gitignore b/.gitignore index 066009f0..277ff653 100644 --- a/.gitignore +++ b/.gitignore @@ -118,3 +118,9 @@ ENV/ .DS_Store node_modules .*.swp + +# In case someone compiled tests/ext.c for test_load_extensions, don't +# include it in source control. +tests/*.dylib +tests/*.so +tests/*.dll \ No newline at end of file diff --git a/datasette/app.py b/datasette/app.py index 1a9afc10..bb9232c9 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -559,7 +559,13 @@ class Datasette: if self.sqlite_extensions: conn.enable_load_extension(True) for extension in self.sqlite_extensions: - conn.execute("SELECT load_extension(?)", [extension]) + # "extension" is either a string path to the extension + # or a 2-item tuple that specifies which entrypoint to load. + if isinstance(extension, tuple): + path, entrypoint = extension + conn.execute("SELECT load_extension(?, ?)", [path, entrypoint]) + else: + conn.execute("SELECT load_extension(?)", [extension]) if self.setting("cache_size_kb"): conn.execute(f"PRAGMA cache_size=-{self.setting('cache_size_kb')}") # pylint: disable=no-member diff --git a/datasette/cli.py b/datasette/cli.py index f2a03d53..6eb42712 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -21,6 +21,7 @@ from .app import ( pm, ) from .utils import ( + LoadExtension, StartupError, check_connection, find_spatialite, @@ -128,9 +129,10 @@ def sqlite_extensions(fn): return click.option( "sqlite_extensions", "--load-extension", + type=LoadExtension(), envvar="SQLITE_EXTENSIONS", multiple=True, - help="Path to a SQLite extension to load", + help="Path to a SQLite extension to load, and optional entrypoint", )(fn) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index d148cc2c..0fc87d51 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -833,6 +833,17 @@ class StaticMount(click.ParamType): self.fail(f"{value} is not a valid directory path", param, ctx) return path, dirpath +# The --load-extension parameter can optionally include a specific entrypoint. +# This is done by appending ":entrypoint_name" after supplying the path to the extension +class LoadExtension(click.ParamType): + name = "path:entrypoint?" + + def convert(self, value, param, ctx): + if ":" not in value: + return value + path, entrypoint = value.split(":", 1) + return path, entrypoint + def format_bytes(bytes): current = float(bytes) diff --git a/tests/ext.c b/tests/ext.c new file mode 100644 index 00000000..5fe970d9 --- /dev/null +++ b/tests/ext.c @@ -0,0 +1,48 @@ +/* +** This file implements a SQLite extension with multiple entrypoints. +** +** The default entrypoint, sqlite3_ext_init, has a single function "a". +** The 1st alternate entrypoint, sqlite3_ext_b_init, has a single function "b". +** The 2nd alternate entrypoint, sqlite3_ext_c_init, has a single function "c". +** +** Compiling instructions: +** https://www.sqlite.org/loadext.html#compiling_a_loadable_extension +** +*/ + +#include "sqlite3ext.h" + +SQLITE_EXTENSION_INIT1 + +// SQL function that returns back the value supplied during sqlite3_create_function() +static void func(sqlite3_context *context, int argc, sqlite3_value **argv) { + sqlite3_result_text(context, (char *) sqlite3_user_data(context), -1, SQLITE_STATIC); +} + + +// The default entrypoint, since it matches the "ext.dylib"/"ext.so" name +#ifdef _WIN32 +__declspec(dllexport) +#endif +int sqlite3_ext_init(sqlite3 *db, char **pzErrMsg, const sqlite3_api_routines *pApi) { + SQLITE_EXTENSION_INIT2(pApi); + return sqlite3_create_function(db, "a", 0, 0, "a", func, 0, 0); +} + +// Alternate entrypoint #1 +#ifdef _WIN32 +__declspec(dllexport) +#endif +int sqlite3_ext_b_init(sqlite3 *db, char **pzErrMsg, const sqlite3_api_routines *pApi) { + SQLITE_EXTENSION_INIT2(pApi); + return sqlite3_create_function(db, "b", 0, 0, "b", func, 0, 0); +} + +// Alternate entrypoint #2 +#ifdef _WIN32 +__declspec(dllexport) +#endif +int sqlite3_ext_c_init(sqlite3 *db, char **pzErrMsg, const sqlite3_api_routines *pApi) { + SQLITE_EXTENSION_INIT2(pApi); + return sqlite3_create_function(db, "c", 0, 0, "c", func, 0, 0); +} diff --git a/tests/test_load_extensions.py b/tests/test_load_extensions.py new file mode 100644 index 00000000..360bc8f3 --- /dev/null +++ b/tests/test_load_extensions.py @@ -0,0 +1,65 @@ +from datasette.app import Datasette +import pytest +from pathlib import Path + +# not necessarily a full path - the full compiled path looks like "ext.dylib" +# or another suffix, but sqlite will, under the hood, decide which file +# extension to use based on the operating system (apple=dylib, windows=dll etc) +# this resolves to "./ext", which is enough for SQLite to calculate the rest +COMPILED_EXTENSION_PATH = str(Path(__file__).parent / "ext") + +# See if ext.c has been compiled, based off the different possible suffixes. +def has_compiled_ext(): + for ext in ["dylib", "so", "dll"]: + path = Path(__file__).parent / f"ext.{ext}" + if path.is_file(): + return True + return False + + +@pytest.mark.asyncio +@pytest.mark.skipif(not has_compiled_ext(), reason="Requires compiled ext.c") +async def test_load_extension_default_entrypoint(): + + # The default entrypoint only loads a() and NOT b() or c(), so those + # should fail. + ds = Datasette(sqlite_extensions=[COMPILED_EXTENSION_PATH]) + + response = await ds.client.get("/_memory.json?sql=select+a()") + assert response.status_code == 200 + assert response.json()["rows"][0][0] == "a" + + response = await ds.client.get("/_memory.json?sql=select+b()") + assert response.status_code == 400 + assert response.json()["error"] == "no such function: b" + + response = await ds.client.get("/_memory.json?sql=select+c()") + assert response.status_code == 400 + assert response.json()["error"] == "no such function: c" + + +@pytest.mark.asyncio +@pytest.mark.skipif(not has_compiled_ext(), reason="Requires compiled ext.c") +async def test_load_extension_multiple_entrypoints(): + + # Load in the default entrypoint and the other 2 custom entrypoints, now + # all a(), b(), and c() should run successfully. + ds = Datasette( + sqlite_extensions=[ + COMPILED_EXTENSION_PATH, + (COMPILED_EXTENSION_PATH, "sqlite3_ext_b_init"), + (COMPILED_EXTENSION_PATH, "sqlite3_ext_c_init"), + ] + ) + + response = await ds.client.get("/_memory.json?sql=select+a()") + assert response.status_code == 200 + assert response.json()["rows"][0][0] == "a" + + response = await ds.client.get("/_memory.json?sql=select+b()") + assert response.status_code == 200 + assert response.json()["rows"][0][0] == "b" + + response = await ds.client.get("/_memory.json?sql=select+c()") + assert response.status_code == 200 + assert response.json()["rows"][0][0] == "c" From fd1086c6867f3e3582b1eca456e4ea95f6cecf8b Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 23 Aug 2022 11:35:41 -0700 Subject: [PATCH 0278/1103] Applied Black, refs #1789 --- datasette/app.py | 4 ++-- datasette/utils/__init__.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index bb9232c9..f2a6763a 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -559,8 +559,8 @@ class Datasette: if self.sqlite_extensions: conn.enable_load_extension(True) for extension in self.sqlite_extensions: - # "extension" is either a string path to the extension - # or a 2-item tuple that specifies which entrypoint to load. + # "extension" is either a string path to the extension + # or a 2-item tuple that specifies which entrypoint to load. if isinstance(extension, tuple): path, entrypoint = extension conn.execute("SELECT load_extension(?, ?)", [path, entrypoint]) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 0fc87d51..bbaa0510 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -833,6 +833,7 @@ class StaticMount(click.ParamType): self.fail(f"{value} is not a valid directory path", param, ctx) return path, dirpath + # The --load-extension parameter can optionally include a specific entrypoint. # This is done by appending ":entrypoint_name" after supplying the path to the extension class LoadExtension(click.ParamType): From 456dc155d491a009942ace71a4e1827cddc6b93d Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 23 Aug 2022 11:40:36 -0700 Subject: [PATCH 0279/1103] Ran cog, refs #1789 --- docs/cli-reference.rst | 95 +++++++++++++++++++++++------------------- 1 file changed, 51 insertions(+), 44 deletions(-) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index a1e56774..f8419d58 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -84,48 +84,53 @@ Once started you can access it at ``http://localhost:8001`` Serve up specified SQLite database files with a web UI Options: - -i, --immutable PATH Database files to open in immutable mode - -h, --host TEXT Host for server. Defaults to 127.0.0.1 which means - only connections from the local machine will be - allowed. Use 0.0.0.0 to listen to all IPs and allow - access from other machines. - -p, --port INTEGER RANGE Port for server, defaults to 8001. Use -p 0 to - automatically assign an available port. - [0<=x<=65535] - --uds TEXT Bind to a Unix domain socket - --reload Automatically reload if code or metadata change - detected - useful for development - --cors Enable CORS by serving Access-Control-Allow-Origin: - * - --load-extension TEXT Path to a SQLite extension to load - --inspect-file TEXT Path to JSON file created using "datasette inspect" - -m, --metadata FILENAME Path to JSON/YAML file containing license/source - metadata - --template-dir DIRECTORY Path to directory containing custom templates - --plugins-dir DIRECTORY Path to directory containing custom plugins - --static MOUNT:DIRECTORY Serve static files from this directory at /MOUNT/... - --memory Make /_memory database available - --config CONFIG Deprecated: set config option using - configname:value. Use --setting instead. - --setting SETTING... Setting, see - docs.datasette.io/en/stable/settings.html - --secret TEXT Secret used for signing secure values, such as - signed cookies - --root Output URL that sets a cookie authenticating the - root user - --get TEXT Run an HTTP GET request against this path, print - results and exit - --version-note TEXT Additional note to show on /-/versions - --help-settings Show available settings - --pdb Launch debugger on any errors - -o, --open Open Datasette in your web browser - --create Create database files if they do not exist - --crossdb Enable cross-database joins using the /_memory - database - --nolock Ignore locking, open locked files in read-only mode - --ssl-keyfile TEXT SSL key file - --ssl-certfile TEXT SSL certificate file - --help Show this message and exit. + -i, --immutable PATH Database files to open in immutable mode + -h, --host TEXT Host for server. Defaults to 127.0.0.1 which + means only connections from the local machine + will be allowed. Use 0.0.0.0 to listen to all + IPs and allow access from other machines. + -p, --port INTEGER RANGE Port for server, defaults to 8001. Use -p 0 to + automatically assign an available port. + [0<=x<=65535] + --uds TEXT Bind to a Unix domain socket + --reload Automatically reload if code or metadata + change detected - useful for development + --cors Enable CORS by serving Access-Control-Allow- + Origin: * + --load-extension PATH:ENTRYPOINT? + Path to a SQLite extension to load, and + optional entrypoint + --inspect-file TEXT Path to JSON file created using "datasette + inspect" + -m, --metadata FILENAME Path to JSON/YAML file containing + license/source metadata + --template-dir DIRECTORY Path to directory containing custom templates + --plugins-dir DIRECTORY Path to directory containing custom plugins + --static MOUNT:DIRECTORY Serve static files from this directory at + /MOUNT/... + --memory Make /_memory database available + --config CONFIG Deprecated: set config option using + configname:value. Use --setting instead. + --setting SETTING... Setting, see + docs.datasette.io/en/stable/settings.html + --secret TEXT Secret used for signing secure values, such as + signed cookies + --root Output URL that sets a cookie authenticating + the root user + --get TEXT Run an HTTP GET request against this path, + print results and exit + --version-note TEXT Additional note to show on /-/versions + --help-settings Show available settings + --pdb Launch debugger on any errors + -o, --open Open Datasette in your web browser + --create Create database files if they do not exist + --crossdb Enable cross-database joins using the /_memory + database + --nolock Ignore locking, open locked files in read-only + mode + --ssl-keyfile TEXT SSL key file + --ssl-certfile TEXT SSL certificate file + --help Show this message and exit. .. [[[end]]] @@ -566,8 +571,10 @@ This performance optimization is used automatically by some of the ``datasette p Options: --inspect-file TEXT - --load-extension TEXT Path to a SQLite extension to load - --help Show this message and exit. + --load-extension PATH:ENTRYPOINT? + Path to a SQLite extension to load, and + optional entrypoint + --help Show this message and exit. .. [[[end]]] From ba35105eee2d3ba620e4f230028a02b2e2571df2 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 23 Aug 2022 17:11:45 -0700 Subject: [PATCH 0280/1103] Test `--load-extension` in GitHub Actions (#1792) * Run the --load-extension test, refs #1789 * Ran cog, refs #1789 --- .github/workflows/test.yml | 3 +++ tests/test_api.py | 2 +- tests/test_html.py | 4 ++-- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 90b6555e..e38d5ee9 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -24,6 +24,9 @@ jobs: key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} restore-keys: | ${{ runner.os }}-pip- + - name: Build extension for --load-extension test + run: |- + (cd tests && gcc ext.c -fPIC -shared -o ext.so) - name: Install dependencies run: | pip install -e '.[test]' diff --git a/tests/test_api.py b/tests/test_api.py index 253c1718..f6db2f9d 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -36,7 +36,7 @@ def test_homepage(app_client): # 4 hidden FTS tables + no_primary_key (hidden in metadata) assert d["hidden_tables_count"] == 6 # 201 in no_primary_key, plus 6 in other hidden tables: - assert d["hidden_table_rows_sum"] == 207 + assert d["hidden_table_rows_sum"] == 207, response.json assert d["views_count"] == 4 diff --git a/tests/test_html.py b/tests/test_html.py index be21bd84..d6e969ad 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -115,7 +115,7 @@ def test_database_page(app_client): assert fragment in response.text # And views - views_ul = soup.find("h2", text="Views").find_next_sibling("ul") + views_ul = soup.find("h2", string="Views").find_next_sibling("ul") assert views_ul is not None assert [ ("/fixtures/paginated_view", "paginated_view"), @@ -128,7 +128,7 @@ def test_database_page(app_client): ] == sorted([(a["href"], a.text) for a in views_ul.find_all("a")]) # And a list of canned queries - queries_ul = soup.find("h2", text="Queries").find_next_sibling("ul") + queries_ul = soup.find("h2", string="Queries").find_next_sibling("ul") assert queries_ul is not None assert [ ("/fixtures/from_async_hook", "from_async_hook"), From 51030df1869b3b574dd3584d1563415776b9cd4e Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 5 Sep 2022 11:35:40 -0700 Subject: [PATCH 0281/1103] Don't use upper bound dependencies any more See https://iscinumpy.dev/post/bound-version-constraints/ for the rationale behind this change. Closes #1800 --- setup.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/setup.py b/setup.py index a1c51d0b..b2e50b38 100644 --- a/setup.py +++ b/setup.py @@ -42,21 +42,21 @@ setup( include_package_data=True, python_requires=">=3.7", install_requires=[ - "asgiref>=3.2.10,<3.6.0", - "click>=7.1.1,<8.2.0", + "asgiref>=3.2.10", + "click>=7.1.1", "click-default-group-wheel>=1.2.2", - "Jinja2>=2.10.3,<3.1.0", - "hupper~=1.9", + "Jinja2>=2.10.3", + "hupper>=1.9", "httpx>=0.20", - "pint~=0.9", - "pluggy>=1.0,<1.1", - "uvicorn~=0.11", - "aiofiles>=0.4,<0.9", - "janus>=0.6.2,<1.1", + "pint>=0.9", + "pluggy>=1.0", + "uvicorn>=0.11", + "aiofiles>=0.4", + "janus>=0.6.2", "asgi-csrf>=0.9", - "PyYAML>=5.3,<7.0", - "mergedeep>=1.1.1,<1.4.0", - "itsdangerous>=1.1,<3.0", + "PyYAML>=5.3", + "mergedeep>=1.1.1", + "itsdangerous>=1.1", ], entry_points=""" [console_scripts] @@ -72,14 +72,14 @@ setup( "sphinx-copybutton", ], "test": [ - "pytest>=5.2.2,<7.2.0", - "pytest-xdist>=2.2.1,<2.6", - "pytest-asyncio>=0.17,<0.20", - "beautifulsoup4>=4.8.1,<4.12.0", + "pytest>=5.2.2", + "pytest-xdist>=2.2.1", + "pytest-asyncio>=0.17", + "beautifulsoup4>=4.8.1", "black==22.6.0", "blacken-docs==1.12.1", - "pytest-timeout>=1.4.2,<2.2", - "trustme>=0.7,<0.10", + "pytest-timeout>=1.4.2", + "trustme>=0.7", "cogapp>=3.3.0", ], "rich": ["rich"], From 294ecd45f7801971dbeef383d0c5456ee95ab839 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Sep 2022 11:51:51 -0700 Subject: [PATCH 0282/1103] Bump black from 22.6.0 to 22.8.0 (#1797) Bumps [black](https://github.com/psf/black) from 22.6.0 to 22.8.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/22.6.0...22.8.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b2e50b38..92fa60d0 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ setup( "pytest-xdist>=2.2.1", "pytest-asyncio>=0.17", "beautifulsoup4>=4.8.1", - "black==22.6.0", + "black==22.8.0", "blacken-docs==1.12.1", "pytest-timeout>=1.4.2", "trustme>=0.7", From b91e17280c05bbb9cf97432081bdcea8665879f9 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 5 Sep 2022 16:50:53 -0700 Subject: [PATCH 0283/1103] Run tests in serial, refs #1802 --- .github/workflows/test.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e38d5ee9..9c8c48ef 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -33,8 +33,7 @@ jobs: pip freeze - name: Run tests run: | - pytest -n auto -m "not serial" - pytest -m "serial" + pytest - name: Check if cog needs to be run run: | cog --check docs/*.rst From b2b901e8c4b939e50ee1117ffcd2881ed8a8e3bf Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 5 Sep 2022 17:05:23 -0700 Subject: [PATCH 0284/1103] Skip SpatiaLite test if no conn.enable_load_extension() Ran into this problem while working on #1802 --- tests/test_spatialite.py | 2 ++ tests/utils.py | 8 ++++++++ 2 files changed, 10 insertions(+) diff --git a/tests/test_spatialite.py b/tests/test_spatialite.py index 8b98c5d6..c07a30e8 100644 --- a/tests/test_spatialite.py +++ b/tests/test_spatialite.py @@ -1,5 +1,6 @@ from datasette.app import Datasette from datasette.utils import find_spatialite, SpatialiteNotFound, SPATIALITE_FUNCTIONS +from .utils import has_load_extension import pytest @@ -13,6 +14,7 @@ def has_spatialite(): @pytest.mark.asyncio @pytest.mark.skipif(not has_spatialite(), reason="Requires SpatiaLite") +@pytest.mark.skipif(not has_load_extension(), reason="Requires enable_load_extension") async def test_spatialite_version_info(): ds = Datasette(sqlite_extensions=["spatialite"]) response = await ds.client.get("/-/versions.json") diff --git a/tests/utils.py b/tests/utils.py index 972300db..191ead9b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,3 +1,6 @@ +from datasette.utils.sqlite import sqlite3 + + def assert_footer_links(soup): footer_links = soup.find("footer").findAll("a") assert 4 == len(footer_links) @@ -22,3 +25,8 @@ def inner_html(soup): # This includes the parent tag - so remove that inner_html = html.split(">", 1)[1].rsplit("<", 1)[0] return inner_html.strip() + + +def has_load_extension(): + conn = sqlite3.connect(":memory:") + return hasattr(conn, "enable_load_extension") From 1c29b925d300d1ee17047504473f2517767aa05b Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 5 Sep 2022 17:10:52 -0700 Subject: [PATCH 0285/1103] Run tests in serial again Because this didn't fix the issue I'm seeing in #1802 Revert "Run tests in serial, refs #1802" This reverts commit b91e17280c05bbb9cf97432081bdcea8665879f9. --- .github/workflows/test.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9c8c48ef..e38d5ee9 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -33,7 +33,8 @@ jobs: pip freeze - name: Run tests run: | - pytest + pytest -n auto -m "not serial" + pytest -m "serial" - name: Check if cog needs to be run run: | cog --check docs/*.rst From 64288d827f7ff97f825e10f714da3f781ecf9345 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 5 Sep 2022 17:40:19 -0700 Subject: [PATCH 0286/1103] Workaround for test failure: RuntimeError: There is no current event loop (#1803) * Remove ensure_eventloop hack * Hack to recover from intermittent RuntimeError calling asyncio.Lock() --- datasette/app.py | 10 +++++++++- tests/test_cli.py | 27 ++++++++++----------------- 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f2a6763a..c6bbdaf0 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -231,7 +231,15 @@ class Datasette: self.inspect_data = inspect_data self.immutables = set(immutables or []) self.databases = collections.OrderedDict() - self._refresh_schemas_lock = asyncio.Lock() + try: + self._refresh_schemas_lock = asyncio.Lock() + except RuntimeError as rex: + # Workaround for intermittent test failure, see: + # https://github.com/simonw/datasette/issues/1802 + if "There is no current event loop in thread" in str(rex): + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + self._refresh_schemas_lock = asyncio.Lock() self.crossdb = crossdb self.nolock = nolock if memory or crossdb or not self.files: diff --git a/tests/test_cli.py b/tests/test_cli.py index d0f6e26c..f0d28037 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -22,13 +22,6 @@ from unittest import mock import urllib -@pytest.fixture -def ensure_eventloop(): - # Workaround for "Event loop is closed" error - if asyncio.get_event_loop().is_closed(): - asyncio.set_event_loop(asyncio.new_event_loop()) - - def test_inspect_cli(app_client): runner = CliRunner() result = runner.invoke(cli, ["inspect", "fixtures.db"]) @@ -72,7 +65,7 @@ def test_serve_with_inspect_file_prepopulates_table_counts_cache(): ), ) def test_spatialite_error_if_attempt_to_open_spatialite( - ensure_eventloop, spatialite_paths, should_suggest_load_extension + spatialite_paths, should_suggest_load_extension ): with mock.patch("datasette.utils.SPATIALITE_PATHS", spatialite_paths): runner = CliRunner() @@ -199,14 +192,14 @@ def test_version(): @pytest.mark.parametrize("invalid_port", ["-1", "0.5", "dog", "65536"]) -def test_serve_invalid_ports(ensure_eventloop, invalid_port): +def test_serve_invalid_ports(invalid_port): runner = CliRunner(mix_stderr=False) result = runner.invoke(cli, ["--port", invalid_port]) assert result.exit_code == 2 assert "Invalid value for '-p'" in result.stderr -def test_setting(ensure_eventloop): +def test_setting(): runner = CliRunner() result = runner.invoke( cli, ["--setting", "default_page_size", "5", "--get", "/-/settings.json"] @@ -215,14 +208,14 @@ def test_setting(ensure_eventloop): assert json.loads(result.output)["default_page_size"] == 5 -def test_setting_type_validation(ensure_eventloop): +def test_setting_type_validation(): runner = CliRunner(mix_stderr=False) result = runner.invoke(cli, ["--setting", "default_page_size", "dog"]) assert result.exit_code == 2 assert '"default_page_size" should be an integer' in result.stderr -def test_config_deprecated(ensure_eventloop): +def test_config_deprecated(): # The --config option should show a deprecation message runner = CliRunner(mix_stderr=False) result = runner.invoke( @@ -233,14 +226,14 @@ def test_config_deprecated(ensure_eventloop): assert "will be deprecated in" in result.stderr -def test_sql_errors_logged_to_stderr(ensure_eventloop): +def test_sql_errors_logged_to_stderr(): runner = CliRunner(mix_stderr=False) result = runner.invoke(cli, ["--get", "/_memory.json?sql=select+blah"]) assert result.exit_code == 1 assert "sql = 'select blah', params = {}: no such column: blah\n" in result.stderr -def test_serve_create(ensure_eventloop, tmpdir): +def test_serve_create(tmpdir): runner = CliRunner() db_path = tmpdir / "does_not_exist_yet.db" assert not db_path.exists() @@ -258,7 +251,7 @@ def test_serve_create(ensure_eventloop, tmpdir): assert db_path.exists() -def test_serve_duplicate_database_names(ensure_eventloop, tmpdir): +def test_serve_duplicate_database_names(tmpdir): "'datasette db.db nested/db.db' should attach two databases, /db and /db_2" runner = CliRunner() db_1_path = str(tmpdir / "db.db") @@ -273,7 +266,7 @@ def test_serve_duplicate_database_names(ensure_eventloop, tmpdir): assert {db["name"] for db in databases} == {"db", "db_2"} -def test_serve_deduplicate_same_database_path(ensure_eventloop, tmpdir): +def test_serve_deduplicate_same_database_path(tmpdir): "'datasette db.db db.db' should only attach one database, /db" runner = CliRunner() db_path = str(tmpdir / "db.db") @@ -287,7 +280,7 @@ def test_serve_deduplicate_same_database_path(ensure_eventloop, tmpdir): @pytest.mark.parametrize( "filename", ["test-database (1).sqlite", "database (1).sqlite"] ) -def test_weird_database_names(ensure_eventloop, tmpdir, filename): +def test_weird_database_names(tmpdir, filename): # https://github.com/simonw/datasette/issues/1181 runner = CliRunner() db_path = str(tmpdir / filename) From c9d1943aede436fa3413fd49bc56335cbda4ad07 Mon Sep 17 00:00:00 2001 From: Daniel Rech <dr@netsyno.com> Date: Tue, 6 Sep 2022 02:45:41 +0200 Subject: [PATCH 0287/1103] Fix word break in facets by adding ul.tight-bullets li word-break: break-all (#1794) Thanks, @dmr --- datasette/static/app.css | 1 + 1 file changed, 1 insertion(+) diff --git a/datasette/static/app.css b/datasette/static/app.css index af3e14d5..712b9925 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -260,6 +260,7 @@ ul.bullets li { ul.tight-bullets li { list-style-type: disc; margin-bottom: 0; + word-break: break-all; } a.not-underlined { text-decoration: none; From d80775a48d20917633792fdc9525f075d3bc2c7a Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 5 Sep 2022 17:44:44 -0700 Subject: [PATCH 0288/1103] Raise error if it's not about loops, refs #1802 --- datasette/app.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/datasette/app.py b/datasette/app.py index c6bbdaf0..aeb81687 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -240,6 +240,8 @@ class Datasette: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) self._refresh_schemas_lock = asyncio.Lock() + else: + raise self.crossdb = crossdb self.nolock = nolock if memory or crossdb or not self.files: From 8430c3bc7dd22b173c1a8c6cd7180e3b31240cd1 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 6 Sep 2022 08:59:19 -0700 Subject: [PATCH 0289/1103] table facet_size in metadata, refs #1804 --- datasette/facets.py | 14 +++++++++++--- tests/test_facets.py | 17 +++++++++++++++++ 2 files changed, 28 insertions(+), 3 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index b15a758c..e70d42df 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -102,11 +102,19 @@ class Facet: def get_facet_size(self): facet_size = self.ds.setting("default_facet_size") max_returned_rows = self.ds.setting("max_returned_rows") + table_facet_size = None + if self.table: + tables_metadata = self.ds.metadata("tables", database=self.database) or {} + table_metadata = tables_metadata.get(self.table) or {} + if table_metadata: + table_facet_size = table_metadata.get("facet_size") custom_facet_size = self.request.args.get("_facet_size") - if custom_facet_size == "max": - facet_size = max_returned_rows - elif custom_facet_size and custom_facet_size.isdigit(): + if custom_facet_size and custom_facet_size.isdigit(): facet_size = int(custom_facet_size) + elif table_facet_size: + facet_size = table_facet_size + if facet_size == "max": + facet_size = max_returned_rows return min(facet_size, max_returned_rows) async def suggest(self): diff --git a/tests/test_facets.py b/tests/test_facets.py index c28dc43c..cbee23b0 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -581,6 +581,23 @@ async def test_facet_size(): ) data5 = response5.json() assert len(data5["facet_results"]["city"]["results"]) == 20 + # Now try messing with facet_size in the table metadata + ds._metadata_local = { + "databases": { + "test_facet_size": {"tables": {"neighbourhoods": {"facet_size": 6}}} + } + } + response6 = await ds.client.get("/test_facet_size/neighbourhoods.json?_facet=city") + data6 = response6.json() + assert len(data6["facet_results"]["city"]["results"]) == 6 + # Setting it to max bumps it up to 50 again + ds._metadata_local["databases"]["test_facet_size"]["tables"]["neighbourhoods"][ + "facet_size" + ] = "max" + data7 = ( + await ds.client.get("/test_facet_size/neighbourhoods.json?_facet=city") + ).json() + assert len(data7["facet_results"]["city"]["results"]) == 20 def test_other_types_of_facet_in_metadata(): From 303c6c733d95a6133558ec1b468f5bea5827d0d2 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 6 Sep 2022 11:05:00 -0700 Subject: [PATCH 0290/1103] Fix for incorrectly handled _facet_size=max, refs #1804 --- datasette/facets.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index e70d42df..7fb0c68b 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -109,12 +109,19 @@ class Facet: if table_metadata: table_facet_size = table_metadata.get("facet_size") custom_facet_size = self.request.args.get("_facet_size") - if custom_facet_size and custom_facet_size.isdigit(): - facet_size = int(custom_facet_size) - elif table_facet_size: - facet_size = table_facet_size - if facet_size == "max": - facet_size = max_returned_rows + if custom_facet_size: + if custom_facet_size == "max": + facet_size = max_returned_rows + elif custom_facet_size.isdigit(): + facet_size = int(custom_facet_size) + else: + # Invalid value, ignore it + custom_facet_size = None + if table_facet_size and not custom_facet_size: + if table_facet_size == "max": + facet_size = max_returned_rows + else: + facet_size = table_facet_size return min(facet_size, max_returned_rows) async def suggest(self): From 0a7815d2038255a0834c955066a2a16c01f707b2 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 6 Sep 2022 11:06:49 -0700 Subject: [PATCH 0291/1103] Documentation for facet_size in metadata, closes #1804 --- docs/facets.rst | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/facets.rst b/docs/facets.rst index 2a2eb039..6c9d99bd 100644 --- a/docs/facets.rst +++ b/docs/facets.rst @@ -129,6 +129,22 @@ You can specify :ref:`array <facet_by_json_array>` or :ref:`date <facet_by_date> ] } +You can change the default facet size (the number of results shown for each facet) for a table using ``facet_size``: + +.. code-block:: json + + { + "databases": { + "sf-trees": { + "tables": { + "Street_Tree_List": { + "facets": ["qLegalStatus"], + "facet_size": 10 + } + } + } + } + } Suggested facets ---------------- From d0476897e10249bb4867473722270d02491c2c1f Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 6 Sep 2022 11:24:30 -0700 Subject: [PATCH 0292/1103] Fixed Sphinx warning about language = None --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 4ef6b768..8965974a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -71,7 +71,7 @@ release = "" # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. From ff9c87197dde8b09f9787ee878804cb6842ea5dc Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 6 Sep 2022 11:26:21 -0700 Subject: [PATCH 0293/1103] Fixed Sphinx warnings on cli-reference page --- docs/cli-reference.rst | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index f8419d58..4a8465cb 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -14,7 +14,7 @@ Running ``datasette`` without specifying a command runs the default command, ``d import textwrap def help(args): title = "datasette " + " ".join(args) - cog.out("::\n\n") + cog.out("\n::\n\n") result = CliRunner().invoke(cli.cli, args) output = result.output.replace("Usage: cli ", "Usage: datasette ") cog.out(textwrap.indent(output, ' ')) @@ -32,6 +32,7 @@ Running ``datasette --help`` shows a list of all of the available commands. .. [[[cog help(["--help"]) .. ]]] + :: Usage: datasette [OPTIONS] COMMAND [ARGS]... @@ -77,6 +78,7 @@ Once started you can access it at ``http://localhost:8001`` .. [[[cog help(["serve", "--help"]) .. ]]] + :: Usage: datasette serve [OPTIONS] [FILES]... @@ -202,6 +204,7 @@ These can be passed to ``datasette serve`` using ``datasette serve --setting nam .. [[[cog help(["--help-settings"]) .. ]]] + :: Settings: @@ -258,6 +261,7 @@ Output JSON showing all currently installed plugins, their versions, whether the .. [[[cog help(["plugins", "--help"]) .. ]]] + :: Usage: datasette plugins [OPTIONS] @@ -326,6 +330,7 @@ Would install the `datasette-cluster-map <https://datasette.io/plugins/datasette .. [[[cog help(["install", "--help"]) .. ]]] + :: Usage: datasette install [OPTIONS] PACKAGES... @@ -349,6 +354,7 @@ Uninstall one or more plugins. .. [[[cog help(["uninstall", "--help"]) .. ]]] + :: Usage: datasette uninstall [OPTIONS] PACKAGES... @@ -374,6 +380,7 @@ Additional deployment targets can be added by plugins that use the :ref:`plugin_ .. [[[cog help(["publish", "--help"]) .. ]]] + :: Usage: datasette publish [OPTIONS] COMMAND [ARGS]... @@ -402,6 +409,7 @@ See :ref:`publish_cloud_run`. .. [[[cog help(["publish", "cloudrun", "--help"]) .. ]]] + :: Usage: datasette publish cloudrun [OPTIONS] [FILES]... @@ -459,6 +467,7 @@ See :ref:`publish_heroku`. .. [[[cog help(["publish", "heroku", "--help"]) .. ]]] + :: Usage: datasette publish heroku [OPTIONS] [FILES]... @@ -507,6 +516,7 @@ Package SQLite files into a Datasette Docker container, see :ref:`cli_package`. .. [[[cog help(["package", "--help"]) .. ]]] + :: Usage: datasette package [OPTIONS] FILES... @@ -560,6 +570,7 @@ This performance optimization is used automatically by some of the ``datasette p .. [[[cog help(["inspect", "--help"]) .. ]]] + :: Usage: datasette inspect [OPTIONS] [FILES]... From d0737e4de51ce178e556fc011ccb8cc46bbb6359 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 6 Sep 2022 16:50:43 -0700 Subject: [PATCH 0294/1103] truncate_cells_html now affects URLs too, refs #1805 --- datasette/utils/__init__.py | 10 ++++++++++ datasette/views/database.py | 11 ++++++++--- datasette/views/table.py | 8 ++++++-- tests/fixtures.py | 9 +++++---- tests/test_api.py | 2 +- tests/test_table_api.py | 11 +++++++---- tests/test_table_html.py | 11 +++++++++++ tests/test_utils.py | 20 ++++++++++++++++++++ 8 files changed, 68 insertions(+), 14 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index bbaa0510..2bdea673 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1167,3 +1167,13 @@ def resolve_routes(routes, path): if match is not None: return match, view return None, None + + +def truncate_url(url, length): + if (not length) or (len(url) <= length): + return url + bits = url.rsplit(".", 1) + if len(bits) == 2 and 1 <= len(bits[1]) <= 4 and "/" not in bits[1]: + rest, ext = bits + return rest[: length - 1 - len(ext)] + "…." + ext + return url[: length - 1] + "…" diff --git a/datasette/views/database.py b/datasette/views/database.py index 77632b9d..fc344245 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -20,6 +20,7 @@ from datasette.utils import ( path_with_format, path_with_removed_args, sqlite3, + truncate_url, InvalidSql, ) from datasette.utils.asgi import AsgiFileDownload, NotFound, Response, Forbidden @@ -371,6 +372,7 @@ class QueryView(DataView): async def extra_template(): display_rows = [] + truncate_cells = self.ds.setting("truncate_cells_html") for row in results.rows if results else []: display_row = [] for column, value in zip(results.columns, row): @@ -396,9 +398,12 @@ class QueryView(DataView): if value in ("", None): display_value = Markup(" ") elif is_url(str(display_value).strip()): - display_value = Markup( - '<a href="{url}">{url}</a>'.format( - url=escape(value.strip()) + display_value = markupsafe.Markup( + '<a href="{url}">{truncated_url}</a>'.format( + url=markupsafe.escape(value.strip()), + truncated_url=markupsafe.escape( + truncate_url(value.strip(), truncate_cells) + ), ) ) elif isinstance(display_value, bytes): diff --git a/datasette/views/table.py b/datasette/views/table.py index 49c30c9c..60c092f9 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -24,6 +24,7 @@ from datasette.utils import ( path_with_removed_args, path_with_replaced_args, to_css_class, + truncate_url, urlsafe_components, value_as_boolean, ) @@ -966,8 +967,11 @@ async def display_columns_and_rows( display_value = markupsafe.Markup(" ") elif is_url(str(value).strip()): display_value = markupsafe.Markup( - '<a href="{url}">{url}</a>'.format( - url=markupsafe.escape(value.strip()) + '<a href="{url}">{truncated_url}</a>'.format( + url=markupsafe.escape(value.strip()), + truncated_url=markupsafe.escape( + truncate_url(value.strip(), truncate_cells) + ), ) ) elif column in table_metadata.get("units", {}) and value != "": diff --git a/tests/fixtures.py b/tests/fixtures.py index c145ac78..82d8452e 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -598,23 +598,24 @@ CREATE TABLE roadside_attractions ( pk integer primary key, name text, address text, + url text, latitude real, longitude real ); INSERT INTO roadside_attractions VALUES ( - 1, "The Mystery Spot", "465 Mystery Spot Road, Santa Cruz, CA 95065", + 1, "The Mystery Spot", "465 Mystery Spot Road, Santa Cruz, CA 95065", "https://www.mysteryspot.com/", 37.0167, -122.0024 ); INSERT INTO roadside_attractions VALUES ( - 2, "Winchester Mystery House", "525 South Winchester Boulevard, San Jose, CA 95128", + 2, "Winchester Mystery House", "525 South Winchester Boulevard, San Jose, CA 95128", "https://winchestermysteryhouse.com/", 37.3184, -121.9511 ); INSERT INTO roadside_attractions VALUES ( - 3, "Burlingame Museum of PEZ Memorabilia", "214 California Drive, Burlingame, CA 94010", + 3, "Burlingame Museum of PEZ Memorabilia", "214 California Drive, Burlingame, CA 94010", null, 37.5793, -122.3442 ); INSERT INTO roadside_attractions VALUES ( - 4, "Bigfoot Discovery Museum", "5497 Highway 9, Felton, CA 95018", + 4, "Bigfoot Discovery Museum", "5497 Highway 9, Felton, CA 95018", "https://www.bigfootdiscoveryproject.com/", 37.0414, -122.0725 ); diff --git a/tests/test_api.py b/tests/test_api.py index f6db2f9d..7a2bf91f 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -339,7 +339,7 @@ def test_database_page(app_client): }, { "name": "roadside_attractions", - "columns": ["pk", "name", "address", "latitude", "longitude"], + "columns": ["pk", "name", "address", "url", "latitude", "longitude"], "primary_keys": ["pk"], "count": 4, "hidden": False, diff --git a/tests/test_table_api.py b/tests/test_table_api.py index e56a72b5..0db04434 100644 --- a/tests/test_table_api.py +++ b/tests/test_table_api.py @@ -615,11 +615,12 @@ def test_table_through(app_client): response = app_client.get( '/fixtures/roadside_attractions.json?_through={"table":"roadside_attraction_characteristics","column":"characteristic_id","value":"1"}' ) - assert [ + assert response.json["rows"] == [ [ 3, "Burlingame Museum of PEZ Memorabilia", "214 California Drive, Burlingame, CA 94010", + None, 37.5793, -122.3442, ], @@ -627,13 +628,15 @@ def test_table_through(app_client): 4, "Bigfoot Discovery Museum", "5497 Highway 9, Felton, CA 95018", + "https://www.bigfootdiscoveryproject.com/", 37.0414, -122.0725, ], - ] == response.json["rows"] + ] + assert ( - 'where roadside_attraction_characteristics.characteristic_id = "1"' - == response.json["human_description_en"] + response.json["human_description_en"] + == 'where roadside_attraction_characteristics.characteristic_id = "1"' ) diff --git a/tests/test_table_html.py b/tests/test_table_html.py index f3808ea3..8e37468f 100644 --- a/tests/test_table_html.py +++ b/tests/test_table_html.py @@ -69,6 +69,17 @@ def test_table_cell_truncation(): td.string for td in table.findAll("td", {"class": "col-neighborhood-b352a7"}) ] + # URLs should be truncated too + response2 = client.get("/fixtures/roadside_attractions") + assert response2.status == 200 + table = Soup(response2.body, "html.parser").find("table") + tds = table.findAll("td", {"class": "col-url"}) + assert [str(td) for td in tds] == [ + '<td class="col-url type-str"><a href="https://www.mysteryspot.com/">http…</a></td>', + '<td class="col-url type-str"><a href="https://winchestermysteryhouse.com/">http…</a></td>', + '<td class="col-url type-none">\xa0</td>', + '<td class="col-url type-str"><a href="https://www.bigfootdiscoveryproject.com/">http…</a></td>', + ] def test_add_filter_redirects(app_client): diff --git a/tests/test_utils.py b/tests/test_utils.py index df788767..d71a612d 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -626,3 +626,23 @@ def test_tilde_encoding(original, expected): assert actual == expected # And test round-trip assert original == utils.tilde_decode(actual) + + +@pytest.mark.parametrize( + "url,length,expected", + ( + ("https://example.com/", 5, "http…"), + ("https://example.com/foo/bar", 15, "https://exampl…"), + ("https://example.com/foo/bar/baz.jpg", 30, "https://example.com/foo/ba….jpg"), + # Extensions longer than 4 characters are not treated specially: + ("https://example.com/foo/bar/baz.jpeg2", 30, "https://example.com/foo/bar/b…"), + ( + "https://example.com/foo/bar/baz.jpeg2", + None, + "https://example.com/foo/bar/baz.jpeg2", + ), + ), +) +def test_truncate_url(url, length, expected): + actual = utils.truncate_url(url, length) + assert actual == expected From 5aa359b86907d11b3ee601510775a85a90224da8 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 6 Sep 2022 16:58:30 -0700 Subject: [PATCH 0295/1103] Apply cell truncation on query page too, refs #1805 --- datasette/views/database.py | 7 ++++++- tests/test_html.py | 19 +++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index fc344245..affbc540 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -428,7 +428,12 @@ class QueryView(DataView): "" if len(value) == 1 else "s", ) ) - + else: + display_value = str(value) + if truncate_cells and len(display_value) > truncate_cells: + display_value = ( + display_value[:truncate_cells] + "\u2026" + ) display_row.append(display_value) display_rows.append(display_row) diff --git a/tests/test_html.py b/tests/test_html.py index d6e969ad..bf915247 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -186,6 +186,25 @@ def test_row_page_does_not_truncate(): ] +def test_query_page_truncates(): + with make_app_client(settings={"truncate_cells_html": 5}) as client: + response = client.get( + "/fixtures?" + + urllib.parse.urlencode( + { + "sql": "select 'this is longer than 5' as a, 'https://example.com/' as b" + } + ) + ) + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + tds = table.findAll("td") + assert [str(td) for td in tds] == [ + '<td class="col-a">this …</td>', + '<td class="col-b"><a href="https://example.com/">http…</a></td>', + ] + + @pytest.mark.parametrize( "path,expected_classes", [ From bf8d84af5422606597be893cedd375020cb2b369 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 6 Sep 2022 20:34:59 -0700 Subject: [PATCH 0296/1103] word-wrap: anywhere on links in cells, refs #1805 --- datasette/static/app.css | 1 + 1 file changed, 1 insertion(+) diff --git a/datasette/static/app.css b/datasette/static/app.css index 712b9925..08b724f6 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -446,6 +446,7 @@ th { } table a:link { text-decoration: none; + word-wrap: anywhere; } .rows-and-columns td:before { display: block; From fb7e70d5e72a951efe4b29ad999d8915c032d021 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 9 Sep 2022 09:19:20 -0700 Subject: [PATCH 0297/1103] Database(is_mutable=) now defaults to True, closes #1808 Refs https://github.com/simonw/datasette-upload-dbs/issues/6 --- datasette/database.py | 3 +-- docs/internals.rst | 9 +++++---- tests/test_internals_database.py | 1 + tests/test_internals_datasette.py | 2 +- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index fa558045..44467370 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -28,7 +28,7 @@ AttachedDatabase = namedtuple("AttachedDatabase", ("seq", "name", "file")) class Database: def __init__( - self, ds, path=None, is_mutable=False, is_memory=False, memory_name=None + self, ds, path=None, is_mutable=True, is_memory=False, memory_name=None ): self.name = None self.route = None @@ -39,7 +39,6 @@ class Database: self.memory_name = memory_name if memory_name is not None: self.is_memory = True - self.is_mutable = True self.hash = None self.cached_size = None self._cached_table_counts = None diff --git a/docs/internals.rst b/docs/internals.rst index 20797e98..adeec1d8 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -426,12 +426,13 @@ The ``db`` parameter should be an instance of the ``datasette.database.Database` Database( datasette, path="path/to/my-new-database.db", - is_mutable=True, ) ) This will add a mutable database and serve it at ``/my-new-database``. +Use ``is_mutable=False`` to add an immutable database. + ``.add_database()`` returns the Database instance, with its name set as the ``database.name`` attribute. Any time you are working with a newly added database you should use the return value of ``.add_database()``, for example: .. code-block:: python @@ -671,8 +672,8 @@ Instances of the ``Database`` class can be used to execute queries against attac .. _database_constructor: -Database(ds, path=None, is_mutable=False, is_memory=False, memory_name=None) ----------------------------------------------------------------------------- +Database(ds, path=None, is_mutable=True, is_memory=False, memory_name=None) +--------------------------------------------------------------------------- The ``Database()`` constructor can be used by plugins, in conjunction with :ref:`datasette_add_database`, to create and register new databases. @@ -685,7 +686,7 @@ The arguments are as follows: Path to a SQLite database file on disk. ``is_mutable`` - boolean - Set this to ``True`` if it is possible that updates will be made to that database - otherwise Datasette will open it in immutable mode and any changes could cause undesired behavior. + Set this to ``False`` to cause Datasette to open the file in immutable mode. ``is_memory`` - boolean Use this to create non-shared memory connections. diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 551f67e1..9e81c1d6 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -499,6 +499,7 @@ def test_mtime_ns_is_none_for_memory(app_client): def test_is_mutable(app_client): + assert Database(app_client.ds, is_memory=True).is_mutable is True assert Database(app_client.ds, is_memory=True, is_mutable=True).is_mutable is True assert Database(app_client.ds, is_memory=True, is_mutable=False).is_mutable is False diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index 1dc14cab..249920fe 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -58,7 +58,7 @@ async def test_datasette_constructor(): "route": "_memory", "path": None, "size": 0, - "is_mutable": False, + "is_mutable": True, "is_memory": True, "hash": None, } From 610425460b519e9c16d386cb81aa081c9d730ef0 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sat, 10 Sep 2022 14:24:26 -0700 Subject: [PATCH 0298/1103] Add --nolock to the README Chrome demo Refs #1744 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1af20129..af95b85e 100644 --- a/README.md +++ b/README.md @@ -48,7 +48,7 @@ This will start a web server on port 8001 - visit http://localhost:8001/ to acce Use Chrome on OS X? You can run datasette against your browser history like so: - datasette ~/Library/Application\ Support/Google/Chrome/Default/History + datasette ~/Library/Application\ Support/Google/Chrome/Default/History --nolock Now visiting http://localhost:8001/History/downloads will show you a web interface to browse your downloads data: From b40872f5e5ae5dad331c58f75451e2d206565196 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 14 Sep 2022 14:31:54 -0700 Subject: [PATCH 0299/1103] prepare_jinja2_environment(datasette) argument, refs #1809 --- datasette/app.py | 2 +- datasette/hookspecs.py | 2 +- docs/plugin_hooks.rst | 9 +++++++-- tests/plugins/my_plugin.py | 3 ++- tests/test_plugins.py | 5 +++-- 5 files changed, 14 insertions(+), 7 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index aeb81687..db686670 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -345,7 +345,7 @@ class Datasette: self.jinja_env.filters["escape_sqlite"] = escape_sqlite self.jinja_env.filters["to_css_class"] = to_css_class # pylint: disable=no-member - pm.hook.prepare_jinja2_environment(env=self.jinja_env) + pm.hook.prepare_jinja2_environment(env=self.jinja_env, datasette=self) self._register_renderers() self._permission_checks = collections.deque(maxlen=200) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index a5fb536f..34e19664 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -26,7 +26,7 @@ def prepare_connection(conn, database, datasette): @hookspec -def prepare_jinja2_environment(env): +def prepare_jinja2_environment(env, datasette): """Modify Jinja2 template environment e.g. register custom template tags""" diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 30bd75b7..62ec5c90 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -61,12 +61,15 @@ Examples: `datasette-jellyfish <https://datasette.io/plugins/datasette-jellyfish .. _plugin_hook_prepare_jinja2_environment: -prepare_jinja2_environment(env) -------------------------------- +prepare_jinja2_environment(env, datasette) +------------------------------------------ ``env`` - jinja2 Environment The template environment that is being prepared +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` + This hook is called with the Jinja2 environment that is used to evaluate Datasette HTML templates. You can use it to do things like `register custom template filters <http://jinja.pocoo.org/docs/2.10/api/#custom-filters>`_, for @@ -85,6 +88,8 @@ You can now use this filter in your custom templates like so:: Table name: {{ table|uppercase }} +Examples: `datasette-edit-templates <https://datasette.io/plugins/datasette-edit-templates>`_ + .. _plugin_hook_extra_template_vars: extra_template_vars(template, database, table, columns, view_name, request, datasette) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 53613b7d..d49a7a34 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -142,8 +142,9 @@ def extra_template_vars( @hookimpl -def prepare_jinja2_environment(env): +def prepare_jinja2_environment(env, datasette): env.filters["format_numeric"] = lambda s: f"{float(s):,.0f}" + env.filters["to_hello"] = lambda s: datasette._HELLO @hookimpl diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 948a40b8..590d88f6 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -545,11 +545,12 @@ def test_hook_register_output_renderer_can_render(app_client): @pytest.mark.asyncio async def test_hook_prepare_jinja2_environment(app_client): + app_client.ds._HELLO = "HI" template = app_client.ds.jinja_env.from_string( - "Hello there, {{ a|format_numeric }}", {"a": 3412341} + "Hello there, {{ a|format_numeric }}, {{ a|to_hello }}", {"a": 3412341} ) rendered = await app_client.ds.render_template(template) - assert "Hello there, 3,412,341" == rendered + assert "Hello there, 3,412,341, HI" == rendered def test_hook_publish_subcommand(): From 2ebcffe2226ece2a5a86722790d486a480338632 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Sep 2022 12:50:52 -0700 Subject: [PATCH 0300/1103] Bump furo from 2022.6.21 to 2022.9.15 (#1812) Bumps [furo](https://github.com/pradyunsg/furo) from 2022.6.21 to 2022.9.15. - [Release notes](https://github.com/pradyunsg/furo/releases) - [Changelog](https://github.com/pradyunsg/furo/blob/main/docs/changelog.md) - [Commits](https://github.com/pradyunsg/furo/compare/2022.06.21...2022.09.15) --- updated-dependencies: - dependency-name: furo dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 92fa60d0..afcba1f0 100644 --- a/setup.py +++ b/setup.py @@ -65,7 +65,7 @@ setup( setup_requires=["pytest-runner"], extras_require={ "docs": [ - "furo==2022.6.21", + "furo==2022.9.15", "sphinx-autobuild", "codespell", "blacken-docs", From ddc999ad1296e8c69cffede3e367dda059b8adad Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 16 Sep 2022 20:38:15 -0700 Subject: [PATCH 0301/1103] Async support for prepare_jinja2_environment, closes #1809 --- datasette/app.py | 22 ++++++++++++++--- datasette/utils/testing.py | 1 + docs/plugin_hooks.rst | 2 ++ docs/testing_plugins.rst | 30 ++++++++++++++++++++++++ tests/fixtures.py | 1 + tests/plugins/my_plugin.py | 10 ++++++-- tests/plugins/my_plugin_2.py | 6 +++++ tests/test_internals_datasette_client.py | 6 +++-- tests/test_plugins.py | 6 +++-- tests/test_routes.py | 1 + 10 files changed, 76 insertions(+), 9 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index db686670..ea3e7b43 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -208,6 +208,7 @@ class Datasette: crossdb=False, nolock=False, ): + self._startup_invoked = False assert config_dir is None or isinstance( config_dir, Path ), "config_dir= should be a pathlib.Path" @@ -344,9 +345,6 @@ class Datasette: self.jinja_env.filters["quote_plus"] = urllib.parse.quote_plus self.jinja_env.filters["escape_sqlite"] = escape_sqlite self.jinja_env.filters["to_css_class"] = to_css_class - # pylint: disable=no-member - pm.hook.prepare_jinja2_environment(env=self.jinja_env, datasette=self) - self._register_renderers() self._permission_checks = collections.deque(maxlen=200) self._root_token = secrets.token_hex(32) @@ -389,8 +387,16 @@ class Datasette: return Urls(self) async def invoke_startup(self): + # This must be called for Datasette to be in a usable state + if self._startup_invoked: + return + for hook in pm.hook.prepare_jinja2_environment( + env=self.jinja_env, datasette=self + ): + await await_me_maybe(hook) for hook in pm.hook.startup(datasette=self): await await_me_maybe(hook) + self._startup_invoked = True def sign(self, value, namespace="default"): return URLSafeSerializer(self._secret, namespace).dumps(value) @@ -933,6 +939,8 @@ class Datasette: async def render_template( self, templates, context=None, request=None, view_name=None ): + if not self._startup_invoked: + raise Exception("render_template() called before await ds.invoke_startup()") context = context or {} if isinstance(templates, Template): template = templates @@ -1495,34 +1503,42 @@ class DatasetteClient: return path async def get(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.get(self._fix(path), **kwargs) async def options(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.options(self._fix(path), **kwargs) async def head(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.head(self._fix(path), **kwargs) async def post(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.post(self._fix(path), **kwargs) async def put(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.put(self._fix(path), **kwargs) async def patch(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.patch(self._fix(path), **kwargs) async def delete(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.delete(self._fix(path), **kwargs) async def request(self, method, path, **kwargs): + await self.ds.invoke_startup() avoid_path_rewrites = kwargs.pop("avoid_path_rewrites", None) async with httpx.AsyncClient(app=self.app) as client: return await client.request( diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index 640c94e6..b28fc575 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -147,6 +147,7 @@ class TestClient: content_type=None, if_none_match=None, ): + await self.ds.invoke_startup() headers = headers or {} if content_type: headers["content-type"] = content_type diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 62ec5c90..f208e727 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -88,6 +88,8 @@ You can now use this filter in your custom templates like so:: Table name: {{ table|uppercase }} +This function can return an awaitable function if it needs to run any async code. + Examples: `datasette-edit-templates <https://datasette.io/plugins/datasette-edit-templates>`_ .. _plugin_hook_extra_template_vars: diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 992b4b0e..41f50e56 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -52,6 +52,36 @@ Then run the tests using pytest like so:: pytest +.. _testing_plugins_datasette_test_instance: + +Setting up a Datasette test instance +------------------------------------ + +The above example shows the easiest way to start writing tests against a Datasette instance: + +.. code-block:: python + + from datasette.app import Datasette + import pytest + + + @pytest.mark.asyncio + async def test_plugin_is_installed(): + datasette = Datasette(memory=True) + response = await datasette.client.get("/-/plugins.json") + assert response.status_code == 200 + +Creating a ``Datasette()`` instance like this as useful shortcut in tests, but there is one detail you need to be aware of. It's important to ensure that the async method ``.invoke_startup()`` is called on that instance. You can do that like this: + +.. code-block:: python + + datasette = Datasette(memory=True) + await datasette.invoke_startup() + +This method registers any :ref:`plugin_hook_startup` or :ref:`plugin_hook_prepare_jinja2_environment` plugins that might themselves need to make async calls. + +If you are using ``await datasette.client.get()`` and similar methods then you don't need to worry about this - those method calls ensure that ``.invoke_startup()`` has been called for you. + .. _testing_plugins_pdb: Using pdb for errors thrown inside Datasette diff --git a/tests/fixtures.py b/tests/fixtures.py index 82d8452e..5a875cd2 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -71,6 +71,7 @@ EXPECTED_PLUGINS = [ "handle_exception", "menu_links", "permission_allowed", + "prepare_jinja2_environment", "register_routes", "render_cell", "startup", diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index d49a7a34..1a41de38 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -143,8 +143,14 @@ def extra_template_vars( @hookimpl def prepare_jinja2_environment(env, datasette): - env.filters["format_numeric"] = lambda s: f"{float(s):,.0f}" - env.filters["to_hello"] = lambda s: datasette._HELLO + async def select_times_three(s): + db = datasette.get_database() + return (await db.execute("select 3 * ?", [int(s)])).first()[0] + + async def inner(): + env.filters["select_times_three"] = select_times_three + + return inner @hookimpl diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index 4df02343..cee80703 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -126,6 +126,12 @@ def permission_allowed(datasette, actor, action): return inner +@hookimpl +def prepare_jinja2_environment(env, datasette): + env.filters["format_numeric"] = lambda s: f"{float(s):,.0f}" + env.filters["to_hello"] = lambda s: datasette._HELLO + + @hookimpl def startup(datasette): async def inner(): diff --git a/tests/test_internals_datasette_client.py b/tests/test_internals_datasette_client.py index 8c5b5bd3..497bf475 100644 --- a/tests/test_internals_datasette_client.py +++ b/tests/test_internals_datasette_client.py @@ -1,10 +1,12 @@ from .fixtures import app_client import httpx import pytest +import pytest_asyncio -@pytest.fixture -def datasette(app_client): +@pytest_asyncio.fixture +async def datasette(app_client): + await app_client.ds.invoke_startup() return app_client.ds diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 590d88f6..0ae3abf3 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -546,11 +546,13 @@ def test_hook_register_output_renderer_can_render(app_client): @pytest.mark.asyncio async def test_hook_prepare_jinja2_environment(app_client): app_client.ds._HELLO = "HI" + await app_client.ds.invoke_startup() template = app_client.ds.jinja_env.from_string( - "Hello there, {{ a|format_numeric }}, {{ a|to_hello }}", {"a": 3412341} + "Hello there, {{ a|format_numeric }}, {{ a|to_hello }}, {{ b|select_times_three }}", + {"a": 3412341, "b": 5}, ) rendered = await app_client.ds.render_template(template) - assert "Hello there, 3,412,341, HI" == rendered + assert "Hello there, 3,412,341, HI, 15" == rendered def test_hook_publish_subcommand(): diff --git a/tests/test_routes.py b/tests/test_routes.py index 5ae55d21..d467abe1 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -59,6 +59,7 @@ def test_routes(routes, path, expected_class, expected_matches): @pytest_asyncio.fixture async def ds_with_route(): ds = Datasette() + await ds.invoke_startup() ds.remove_database("_memory") db = Database(ds, is_memory=True, memory_name="route-name-db") ds.add_database(db, name="original-name", route="custom-route-name") From df851c117db031dec50dd4ef1ca34745920ac77a Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 19 Sep 2022 16:46:39 -0700 Subject: [PATCH 0302/1103] Validate settings.json keys on startup, closes #1816 Refs #1814 --- datasette/app.py | 4 ++++ tests/test_config_dir.py | 20 ++++++++++++++++++-- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index ea3e7b43..8873ce28 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -292,6 +292,10 @@ class Datasette: raise StartupError("config.json should be renamed to settings.json") if config_dir and (config_dir / "settings.json").exists() and not settings: settings = json.loads((config_dir / "settings.json").read_text()) + # Validate those settings + for key in settings: + if key not in DEFAULT_SETTINGS: + raise StartupError("Invalid setting '{key}' in settings.json") self._settings = dict(DEFAULT_SETTINGS, **(settings or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index fe927c42..e365515b 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -5,6 +5,7 @@ import pytest from datasette.app import Datasette from datasette.cli import cli from datasette.utils.sqlite import sqlite3 +from datasette.utils import StartupError from .fixtures import TestClient as _TestClient from click.testing import CliRunner @@ -27,9 +28,8 @@ body { margin-top: 3em} @pytest.fixture(scope="session") -def config_dir_client(tmp_path_factory): +def config_dir(tmp_path_factory): config_dir = tmp_path_factory.mktemp("config-dir") - plugins_dir = config_dir / "plugins" plugins_dir.mkdir() (plugins_dir / "hooray.py").write_text(PLUGIN, "utf-8") @@ -77,7 +77,23 @@ def config_dir_client(tmp_path_factory): ), "utf-8", ) + return config_dir + +def test_invalid_settings(config_dir): + previous = (config_dir / "settings.json").read_text("utf-8") + (config_dir / "settings.json").write_text( + json.dumps({"invalid": "invalid-setting"}), "utf-8" + ) + try: + with pytest.raises(StartupError): + ds = Datasette([], config_dir=config_dir) + finally: + (config_dir / "settings.json").write_text(previous, "utf-8") + + +@pytest.fixture(scope="session") +def config_dir_client(config_dir): ds = Datasette([], config_dir=config_dir) yield _TestClient(ds) From cb1e093fd361b758120aefc1a444df02462389a3 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 19 Sep 2022 18:15:40 -0700 Subject: [PATCH 0303/1103] Fixed error message, closes #1816 --- datasette/app.py | 4 +++- tests/test_config_dir.py | 3 ++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 8873ce28..03d1dacc 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -295,7 +295,9 @@ class Datasette: # Validate those settings for key in settings: if key not in DEFAULT_SETTINGS: - raise StartupError("Invalid setting '{key}' in settings.json") + raise StartupError( + "Invalid setting '{}' in settings.json".format(key) + ) self._settings = dict(DEFAULT_SETTINGS, **(settings or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index e365515b..f5ecf0d6 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -86,8 +86,9 @@ def test_invalid_settings(config_dir): json.dumps({"invalid": "invalid-setting"}), "utf-8" ) try: - with pytest.raises(StartupError): + with pytest.raises(StartupError) as ex: ds = Datasette([], config_dir=config_dir) + assert ex.value.args[0] == "Invalid setting 'invalid' in settings.json" finally: (config_dir / "settings.json").write_text(previous, "utf-8") From 212137a90b4291db9605e039f198564dae59c5d0 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 26 Sep 2022 14:14:25 -0700 Subject: [PATCH 0304/1103] Release 0.63a0 Refs #1786, #1787, #1789, #1794, #1800, #1804, #1805, #1808, #1809, #1816 --- datasette/version.py | 2 +- docs/changelog.rst | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 0453346c..e5ad585f 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.62" +__version__ = "0.63a0" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index f9dcc980..bd93f4cb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,23 @@ Changelog ========= +.. _v0_63a0: + +0.63a0 (2022-09-26) +------------------- + +- The :ref:`plugin_hook_prepare_jinja2_environment` plugin hook now accepts an optional ``datasette`` argument. Hook implementations can also now return an ``async`` function which will be awaited automatically. (:issue:`1809`) +- ``--load-extension`` option now supports entrypoints. Thanks, Alex Garcia. (`#1789 <https://github.com/simonw/datasette/pull/1789>`__) +- New tutorial: `Cleaning data with sqlite-utils and Datasette <https://datasette.io/tutorials/clean-data>`__. +- Facet size can now be set per-table with the new ``facet_size`` table metadata option. (:issue:`1804`) +- ``truncate_cells_html`` setting now also affects long URLs in columns. (:issue:`1805`) +- ``Database(is_mutable=)`` now defaults to ``True``. (:issue:`1808`) +- Non-JavaScript textarea now increases height to fit the SQL query. (:issue:`1786`) +- More detailed command descriptions on the :ref:`CLI reference <cli_reference>` page. (:issue:`1787`) +- Datasette no longer enforces upper bounds on its depenedencies. (:issue:`1800`) +- Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 <https://github.com/simonw/datasette/pull/1794>`__) +- The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) + .. _v0_62: 0.62 (2022-08-14) From 5f9f567acbc58c9fcd88af440e68034510fb5d2b Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Mon, 26 Sep 2022 16:06:01 -0700 Subject: [PATCH 0305/1103] Show SQL query when reporting time limit error, closes #1819 --- datasette/database.py | 5 ++++- datasette/views/base.py | 21 +++++++++++++-------- tests/test_api.py | 12 +++++++++++- tests/test_html.py | 10 +++++++--- 4 files changed, 35 insertions(+), 13 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 44467370..46094bd7 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -476,7 +476,10 @@ class WriteTask: class QueryInterrupted(Exception): - pass + def __init__(self, e, sql, params): + self.e = e + self.sql = sql + self.params = params class MultipleValues(Exception): diff --git a/datasette/views/base.py b/datasette/views/base.py index 221e1882..67aa3a42 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -1,10 +1,12 @@ import asyncio import csv import hashlib -import re import sys +import textwrap import time import urllib +from markupsafe import escape + import pint @@ -24,11 +26,9 @@ from datasette.utils import ( path_with_removed_args, path_with_format, sqlite3, - HASH_LENGTH, ) from datasette.utils.asgi import ( AsgiStream, - Forbidden, NotFound, Response, BadRequest, @@ -371,13 +371,18 @@ class DataView(BaseView): ) = response_or_template_contexts else: data, extra_template_data, templates = response_or_template_contexts - except QueryInterrupted: + except QueryInterrupted as ex: raise DatasetteError( - """ - SQL query took too long. The time limit is controlled by the + textwrap.dedent( + """ + <p>SQL query took too long. The time limit is controlled by the <a href="https://docs.datasette.io/en/stable/settings.html#sql-time-limit-ms">sql_time_limit_ms</a> - configuration option. - """, + configuration option.</p> + <pre>{}</pre> + """.format( + escape(ex.sql) + ) + ).strip(), title="SQL Interrupted", status=400, message_is_html=True, diff --git a/tests/test_api.py b/tests/test_api.py index 7a2bf91f..ad74d16e 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -656,7 +656,17 @@ def test_custom_sql(app_client): def test_sql_time_limit(app_client_shorter_time_limit): response = app_client_shorter_time_limit.get("/fixtures.json?sql=select+sleep(0.5)") assert 400 == response.status - assert "SQL Interrupted" == response.json["title"] + assert response.json == { + "ok": False, + "error": ( + "<p>SQL query took too long. The time limit is controlled by the\n" + '<a href="https://docs.datasette.io/en/stable/settings.html#sql-time-limit-ms">sql_time_limit_ms</a>\n' + "configuration option.</p>\n" + "<pre>select sleep(0.5)</pre>" + ), + "status": 400, + "title": "SQL Interrupted", + } def test_custom_sql_time_limit(app_client): diff --git a/tests/test_html.py b/tests/test_html.py index bf915247..a99b0b6c 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -168,10 +168,14 @@ def test_disallowed_custom_sql_pragma(app_client): def test_sql_time_limit(app_client_shorter_time_limit): response = app_client_shorter_time_limit.get("/fixtures?sql=select+sleep(0.5)") assert 400 == response.status - expected_html_fragment = """ + expected_html_fragments = [ + """ <a href="https://docs.datasette.io/en/stable/settings.html#sql-time-limit-ms">sql_time_limit_ms</a> - """.strip() - assert expected_html_fragment in response.text + """.strip(), + "<pre>select sleep(0.5)</pre>", + ] + for expected_html_fragment in expected_html_fragments: + assert expected_html_fragment in response.text def test_row_page_does_not_truncate(): From 7fb4ea4e39a15e1f7d3202949794d98af1cfa272 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 27 Sep 2022 21:06:40 -0700 Subject: [PATCH 0306/1103] Update note about render_cell signature, refs #1826 --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index f208e727..c9cab8ab 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -9,7 +9,7 @@ Each plugin can implement one or more hooks using the ``@hookimpl`` decorator ag When you implement a plugin hook you can accept any or all of the parameters that are documented as being passed to that hook. -For example, you can implement the ``render_cell`` plugin hook like this even though the full documented hook signature is ``render_cell(value, column, table, database, datasette)``: +For example, you can implement the ``render_cell`` plugin hook like this even though the full documented hook signature is ``render_cell(row, value, column, table, database, datasette)``: .. code-block:: python From 984b1df12cf19a6731889fc0665bb5f622e07b7c Mon Sep 17 00:00:00 2001 From: Adam Simpson <adam@adamsimpson.net> Date: Wed, 28 Sep 2022 00:21:36 -0400 Subject: [PATCH 0307/1103] Add documentation for serving via OpenRC (#1825) * Add documentation for serving via OpenRC --- docs/deploying.rst | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index d4ad8836..c8552758 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -74,18 +74,30 @@ Once the service has started you can confirm that Datasette is running on port 8 curl 127.0.0.1:8000/-/versions.json # Should output JSON showing the installed version -Datasette will not be accessible from outside the server because it is listening on ``127.0.0.1``. You can expose it by instead listening on ``0.0.0.0``, but a better way is to set up a proxy such as ``nginx``. +Datasette will not be accessible from outside the server because it is listening on ``127.0.0.1``. You can expose it by instead listening on ``0.0.0.0``, but a better way is to set up a proxy such as ``nginx`` - see :ref:`deploying_proxy`. -Ubuntu offer `a tutorial on installing nginx <https://ubuntu.com/tutorials/install-and-configure-nginx#1-overview>`__. Once it is installed you can add configuration to proxy traffic through to Datasette that looks like this:: +.. _deploying_openrc: - server { - server_name mysubdomain.myhost.net; +Running Datasette using OpenRC +=============================== +OpenRC is the service manager on non-systemd Linux distributions like `Alpine Linux <https://www.alpinelinux.org/>`__ and `Gentoo <https://www.gentoo.org/>`__. - location / { - proxy_pass http://127.0.0.1:8000/; - proxy_set_header Host $host; - } - } +Create an init script at ``/etc/init.d/datasette`` with the following contents: + +.. code-block:: sh + + #!/sbin/openrc-run + + name="datasette" + command="datasette" + command_args="serve -h 0.0.0.0 /path/to/db.db" + command_background=true + pidfile="/run/${RC_SVCNAME}.pid" + +You then need to configure the service to run at boot and start it:: + + rc-update add datasette + rc-service datasette start .. _deploying_buildpacks: From 34defdc10aa293294ca01cfab70780755447e1d7 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Wed, 28 Sep 2022 17:39:36 -0700 Subject: [PATCH 0308/1103] Browse the plugins directory --- docs/writing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index 01ee8c90..a3fc88ec 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -234,7 +234,7 @@ To avoid accidentally conflicting with a database file that may be loaded into D - ``/-/upload-excel`` -Try to avoid registering URLs that clash with other plugins that your users might have installed. There is no central repository of reserved URL paths (yet) but you can review existing plugins by browsing the `datasette-plugin topic <https://github.com/topics/datasette-plugin>`__ on GitHub. +Try to avoid registering URLs that clash with other plugins that your users might have installed. There is no central repository of reserved URL paths (yet) but you can review existing plugins by browsing the `plugins directory <https://datasette.io/plugins>`. If your plugin includes functionality that relates to a specific database you could also register a URL route like this: From c92c4318e9892101f75fa158410c0a12c1d80b6e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 30 Sep 2022 10:55:40 -0700 Subject: [PATCH 0309/1103] Bump furo from 2022.9.15 to 2022.9.29 (#1827) Bumps [furo](https://github.com/pradyunsg/furo) from 2022.9.15 to 2022.9.29. - [Release notes](https://github.com/pradyunsg/furo/releases) - [Changelog](https://github.com/pradyunsg/furo/blob/main/docs/changelog.md) - [Commits](https://github.com/pradyunsg/furo/compare/2022.09.15...2022.09.29) --- updated-dependencies: - dependency-name: furo dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index afcba1f0..fe258adb 100644 --- a/setup.py +++ b/setup.py @@ -65,7 +65,7 @@ setup( setup_requires=["pytest-runner"], extras_require={ "docs": [ - "furo==2022.9.15", + "furo==2022.9.29", "sphinx-autobuild", "codespell", "blacken-docs", From 883e326dd6ef95f854f7750ef2d4b0e17082fa96 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Sun, 2 Oct 2022 14:26:16 -0700 Subject: [PATCH 0310/1103] Drop word-wrap: anywhere, refs #1828, #1805 --- datasette/static/app.css | 1 - 1 file changed, 1 deletion(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index 08b724f6..712b9925 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -446,7 +446,6 @@ th { } table a:link { text-decoration: none; - word-wrap: anywhere; } .rows-and-columns td:before { display: block; From 4218c9cd742b79b1e3cb80878e42b7e39d16ded2 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 4 Oct 2022 11:45:36 -0700 Subject: [PATCH 0311/1103] reST markup fix --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index c9cab8ab..832a76b0 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -268,7 +268,7 @@ you have one: def extra_js_urls(): return ["/-/static-plugins/your-plugin/app.js"] -Note that `your-plugin` here should be the hyphenated plugin name - the name that is displayed in the list on the `/-/plugins` debug page. +Note that ``your-plugin`` here should be the hyphenated plugin name - the name that is displayed in the list on the ``/-/plugins`` debug page. If your code uses `JavaScript modules <https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules>`__ you should include the ``"module": True`` key. See :ref:`customization_css_and_javascript` for more details. From b6ba117b7978b58b40e3c3c2b723b92c3010ed53 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 4 Oct 2022 18:25:52 -0700 Subject: [PATCH 0312/1103] Clarify request or None for two hooks --- docs/plugin_hooks.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 832a76b0..b61f953a 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1281,7 +1281,7 @@ menu_links(datasette, actor, request) ``actor`` - dictionary or None The currently authenticated :ref:`actor <authentication_actor>`. -``request`` - :ref:`internals_request` +``request`` - :ref:`internals_request` or None The current HTTP request. This can be ``None`` if the request object is not available. This hook allows additional items to be included in the menu displayed by Datasette's top right menu icon. @@ -1330,7 +1330,7 @@ table_actions(datasette, actor, database, table, request) ``table`` - string The name of the table. -``request`` - :ref:`internals_request` +``request`` - :ref:`internals_request` or None The current HTTP request. This can be ``None`` if the request object is not available. This hook allows table actions to be displayed in a menu accessed via an action icon at the top of the table page. It should return a list of ``{"href": "...", "label": "..."}`` menu items. From bbf33a763537a1d913180b22bd3b5fe4a5e5b252 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Tue, 4 Oct 2022 21:32:11 -0700 Subject: [PATCH 0313/1103] Test for bool(results), closes #1832 --- tests/test_internals_database.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 9e81c1d6..4e33beed 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -30,6 +30,14 @@ async def test_results_first(db): assert isinstance(row, sqlite3.Row) +@pytest.mark.asyncio +@pytest.mark.parametrize("expected", (True, False)) +async def test_results_bool(db, expected): + where = "" if expected else "where pk = 0" + results = await db.execute("select * from facetable {}".format(where)) + assert bool(results) is expected + + @pytest.mark.parametrize( "query,expected", [ From eff112498ecc499323c26612d707908831446d25 Mon Sep 17 00:00:00 2001 From: Forest Gregg <fgregg@users.noreply.github.com> Date: Thu, 6 Oct 2022 16:06:06 -0400 Subject: [PATCH 0314/1103] Useuse inspect data for hash and file size on startup Thanks, @fgregg Closes #1834 --- datasette/database.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 46094bd7..d75bd70c 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -48,9 +48,13 @@ class Database: self._read_connection = None self._write_connection = None if not self.is_mutable and not self.is_memory: - p = Path(path) - self.hash = inspect_hash(p) - self.cached_size = p.stat().st_size + if self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self.hash = self.ds.inspect_data[self.name]["hash"] + self.cached_size = self.ds.inspect_data[self.name]["size"] + else: + p = Path(path) + self.hash = inspect_hash(p) + self.cached_size = p.stat().st_size @property def cached_table_counts(self): From b7fec7f9020b79c1fe60cc5a2def86b50eeb5af9 Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Fri, 7 Oct 2022 16:03:09 -0700 Subject: [PATCH 0315/1103] .sqlite/.sqlite3 extensions for config directory mode Closes #1646 --- datasette/app.py | 5 ++++- docs/settings.rst | 2 +- tests/test_config_dir.py | 11 +++++------ 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 03d1dacc..32a911c2 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -217,7 +217,10 @@ class Datasette: self._secret = secret or secrets.token_hex(32) self.files = tuple(files or []) + tuple(immutables or []) if config_dir: - self.files += tuple([str(p) for p in config_dir.glob("*.db")]) + db_files = [] + for ext in ("db", "sqlite", "sqlite3"): + db_files.extend(config_dir.glob("*.{}".format(ext))) + self.files += tuple(str(f) for f in db_files) if ( config_dir and (config_dir / "inspect-data.json").exists() diff --git a/docs/settings.rst b/docs/settings.rst index 8437fb04..a6d50543 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -46,7 +46,7 @@ Datasette will detect the files in that directory and automatically configure it The files that can be included in this directory are as follows. All are optional. -* ``*.db`` - SQLite database files that will be served by Datasette +* ``*.db`` (or ``*.sqlite3`` or ``*.sqlite``) - SQLite database files that will be served by Datasette * ``metadata.json`` - :ref:`metadata` for those databases - ``metadata.yaml`` or ``metadata.yml`` can be used as well * ``inspect-data.json`` - the result of running ``datasette inspect *.db --inspect-file=inspect-data.json`` from the configuration directory - any database files listed here will be treated as immutable, so they should not be changed while Datasette is running * ``settings.json`` - settings that would normally be passed using ``--setting`` - here they should be stored as a JSON object of key/value pairs diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index f5ecf0d6..c2af3836 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -49,7 +49,7 @@ def config_dir(tmp_path_factory): (config_dir / "metadata.json").write_text(json.dumps(METADATA), "utf-8") (config_dir / "settings.json").write_text(json.dumps(SETTINGS), "utf-8") - for dbname in ("demo.db", "immutable.db"): + for dbname in ("demo.db", "immutable.db", "j.sqlite3", "k.sqlite"): db = sqlite3.connect(str(config_dir / dbname)) db.executescript( """ @@ -151,12 +151,11 @@ def test_databases(config_dir_client): response = config_dir_client.get("/-/databases.json") assert 200 == response.status databases = response.json - assert 2 == len(databases) + assert 4 == len(databases) databases.sort(key=lambda d: d["name"]) - assert "demo" == databases[0]["name"] - assert databases[0]["is_mutable"] - assert "immutable" == databases[1]["name"] - assert not databases[1]["is_mutable"] + for db, expected_name in zip(databases, ("demo", "immutable", "j", "k")): + assert expected_name == db["name"] + assert db["is_mutable"] == (expected_name != "immutable") @pytest.mark.parametrize("filename", ("metadata.yml", "metadata.yaml")) From 1a5e5f2aa951e5bd731067a49819efba68fbe8ef Mon Sep 17 00:00:00 2001 From: Simon Willison <swillison@gmail.com> Date: Thu, 13 Oct 2022 14:42:52 -0700 Subject: [PATCH 0316/1103] Refactor breadcrumbs to respect permissions, refs #1831 --- datasette/app.py | 40 ++++++++++++++++++++++ datasette/templates/_crumbs.html | 15 ++++++++ datasette/templates/base.html | 4 +-- datasette/templates/database.html | 9 ----- datasette/templates/error.html | 7 ---- datasette/templates/logout.html | 7 ---- datasette/templates/permissions_debug.html | 7 ---- datasette/templates/query.html | 8 ++--- datasette/templates/row.html | 9 ++--- datasette/templates/show_json.html | 7 ---- datasette/templates/table.html | 8 ++--- tests/test_permissions.py | 1 + tests/test_plugins.py | 2 +- 13 files changed, 65 insertions(+), 59 deletions(-) create mode 100644 datasette/templates/_crumbs.html diff --git a/datasette/app.py b/datasette/app.py index 32a911c2..5fa4955c 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -631,6 +631,44 @@ class Datasette: else: return [] + async def _crumb_items(self, request, table=None, database=None): + crumbs = [] + # Top-level link + if await self.permission_allowed( + actor=request.actor, action="view-instance", default=True + ): + crumbs.append({"href": self.urls.instance(), "label": "home"}) + # Database link + if database: + if await self.permission_allowed( + actor=request.actor, + action="view-database", + resource=database, + default=True, + ): + crumbs.append( + { + "href": self.urls.database(database), + "label": database, + } + ) + # Table link + if table: + assert database, "table= requires database=" + if await self.permission_allowed( + actor=request.actor, + action="view-table", + resource=(database, table), + default=True, + ): + crumbs.append( + { + "href": self.urls.table(database, table), + "label": table, + } + ) + return crumbs + async def permission_allowed(self, actor, action, resource=None, default=False): """Check permissions using the permissions_allowed plugin hook""" result = None @@ -1009,6 +1047,8 @@ class Datasette: template_context = { **context, **{ + "request": request, + "crumb_items": self._crumb_items, "urls": self.urls, "actor": request.actor if request else None, "menu_links": menu_links, diff --git a/datasette/templates/_crumbs.html b/datasette/templates/_crumbs.html new file mode 100644 index 00000000..bd1ff0da --- /dev/null +++ b/datasette/templates/_crumbs.html @@ -0,0 +1,15 @@ +{% macro nav(request, database=None, table=None) -%} +{% if crumb_items is defined %} + {% set items=crumb_items(request=request, database=database, table=table) %} + {% if items %} + <p class="crumbs"> + {% for item in items %} + <a href="{{ item.href }}">{{ item.label }}</a> + {% if not loop.last %} + / + {% endif %} + {% endfor %} + </p> + {% endif %} +{% endif %} +{%- endmacro %} diff --git a/datasette/templates/base.html b/datasette/templates/base.html index c3a71acb..87c939ac 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -1,4 +1,4 @@ -<!DOCTYPE html> +{% import "_crumbs.html" as crumbs with context %}<!DOCTYPE html> <html> <head> <title>{% block title %}{% endblock %} @@ -17,7 +17,7 @@

    " + anon_response = padlock_client.get(path) + assert expected_anon == anon_response.status + if allow and anon_response.status == 200: + # Should be no padlock + assert fragment not in anon_response.text + auth_response = padlock_client.get( + path, + cookies={"ds_actor": padlock_client.actor_cookie({"id": "root"})}, + ) + assert expected_auth == auth_response.status + # Check for the padlock + if allow and expected_anon == 403 and expected_auth == 200: + assert fragment in auth_response.text + del padlock_client.ds._metadata_local["allow"] @pytest.mark.parametrize( @@ -467,6 +487,10 @@ def test_permissions_cascade(cascade_app_client, path, permissions, expected_sta path, cookies={"ds_actor": cascade_app_client.actor_cookie(actor)}, ) - assert expected_status == response.status + assert ( + response.status == expected_status + ), "path: {}, permissions: {}, expected_status: {}, status: {}".format( + path, permissions, expected_status, response.status + ) finally: - cascade_app_client.ds._metadata_local = previous_metadata + cascade_app_client.ds._local_metadata = previous_metadata diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 02cac132..e0a7bc76 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -823,8 +823,14 @@ def test_hook_forbidden(restore_working_directory): assert 403 == response.status response2 = client.get("/data2") assert 302 == response2.status - assert "/login?message=view-database" == response2.headers["Location"] - assert "view-database" == client.ds._last_forbidden_message + assert ( + response2.headers["Location"] + == "/login?message=You do not have permission to view this database" + ) + assert ( + client.ds._last_forbidden_message + == "You do not have permission to view this database" + ) def test_hook_handle_exception(app_client): From 5be86d48b2e31565faca208fc4aeb0ddfaca71f9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 23 Oct 2022 19:42:30 -0700 Subject: [PATCH 0321/1103] Fix display of padlocks on database page, closes #1848 --- datasette/views/database.py | 21 +++++++++++++++------ tests/test_permissions.py | 35 ++++++++++++++++++++++++++++++++++- 2 files changed, 49 insertions(+), 7 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index bd9e4a7c..8e08c3b1 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -68,8 +68,11 @@ class DatabaseView(DataView): for view_name in await db.view_names(): view_visible, view_private = await self.ds.check_visibility( request.actor, - "view-table", - (database, view_name), + permissions=[ + ("view-table", (database, view_name)), + ("view-database", database), + "view-instance", + ], ) if view_visible: views.append( @@ -83,8 +86,11 @@ class DatabaseView(DataView): for table in table_counts: table_visible, table_private = await self.ds.check_visibility( request.actor, - "view-table", - (database, table), + permissions=[ + ("view-table", (database, table)), + ("view-database", database), + "view-instance", + ], ) if not table_visible: continue @@ -109,8 +115,11 @@ class DatabaseView(DataView): ).values(): query_visible, query_private = await self.ds.check_visibility( request.actor, - "view-query", - (database, query["name"]), + permissions=[ + ("view-query", (database, query["name"])), + ("view-database", database), + "view-instance", + ], ) if query_visible: canned_queries.append(dict(query, private=query_private)) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 2d48431a..8812d0f7 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -493,4 +493,37 @@ def test_permissions_cascade(cascade_app_client, path, permissions, expected_sta path, permissions, expected_status, response.status ) finally: - cascade_app_client.ds._local_metadata = previous_metadata + cascade_app_client.ds._metadata_local = previous_metadata + + +def test_padlocks_on_database_page(cascade_app_client): + metadata = { + "databases": { + "fixtures": { + "allow": {"id": "test"}, + "tables": { + "123_starts_with_digits": {"allow": True}, + "simple_view": {"allow": True}, + }, + "queries": {"query_two": {"allow": True, "sql": "select 2"}}, + } + } + } + previous_metadata = cascade_app_client.ds._metadata_local + try: + cascade_app_client.ds._metadata_local = metadata + response = cascade_app_client.get( + "/fixtures", + cookies={"ds_actor": cascade_app_client.actor_cookie({"id": "test"})}, + ) + # Tables + assert ">123_starts_with_digits" in response.text + assert ">Table With Space In Name 🔒" in response.text + # Queries + assert ">from_async_hook 🔒" in response.text + assert ">query_two" in response.text + # Views + assert ">paginated_view 🔒" in response.text + assert ">simple_view" in response.text + finally: + cascade_app_client.ds._metadata_local = previous_metadata From 602c0888ce633000cfae42be00de474ef681bda7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 23 Oct 2022 20:07:09 -0700 Subject: [PATCH 0322/1103] Release 0.63a1 Refs #1646, #1819, #1825, #1829, #1831, #1832, #1834, #1844, #1848 --- datasette/version.py | 2 +- docs/changelog.rst | 16 +++++++++++++++- docs/internals.rst | 2 +- docs/performance.rst | 2 ++ 4 files changed, 19 insertions(+), 3 deletions(-) diff --git a/datasette/version.py b/datasette/version.py index e5ad585f..eb36da45 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.63a0" +__version__ = "0.63a1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index f5cf03e8..dd4c20b7 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,20 @@ Changelog ========= +.. _v0_63a1: + +0.63a1 (2022-10-23) +------------------- + +- SQL query is now re-displayed when terminated with a time limit error. (:issue:`1819`) +- New documentation on :ref:`deploying_openrc` - thanks, Adam Simpson. (`#1825 `__) +- The :ref:`inspect data ` mechanism is now used to speed up server startup - thanks, Forest Gregg. (:issue:`1834`) +- In :ref:`config_dir` databases with filenames ending in ``.sqlite`` or ``.sqlite3`` are now automatically added to the Datasette instance. (:issue:`1646`) +- Breadcrumb navigation display now respects the current user's permissions. (:issue:`1831`) +- Screenshots in the documentation are now maintained using `shot-scraper `__, as described in `Automating screenshots for the Datasette documentation using shot-scraper `__. (:issue:`1844`) +- The :ref:`datasette.check_visibility() ` method now accepts an optional ``permissions=`` list, allowing it to take multiple permissions into account at once when deciding if something should be shown as public or private. This has been used to correctly display padlock icons in more places in the Datasette interface. (:issue:`1829`) + + .. _v0_63a0: 0.63a0 (2022-09-26) @@ -91,7 +105,7 @@ Datasette also now requires Python 3.7 or higher. - Python 3.6 is no longer supported. (:issue:`1577`) - Tests now run against Python 3.11-dev. (:issue:`1621`) - New :ref:`datasette.ensure_permissions(actor, permissions) ` internal method for checking multiple permissions at once. (:issue:`1675`) -- New :ref:`datasette.check_visibility(actor, action, resource=None) ` internal method for checking if a user can see a resource that would otherwise be invisible to unauthenticated users. (:issue:`1678`) +- New :ref:`datasette.check_visibility(actor, action, resource=None) ` internal method for checking if a user can see a resource that would otherwise be invisible to unauthenticated users. (:issue:`1678`) - Table and row HTML pages now include a ```` element and return a ``Link: URL; rel="alternate"; type="application/json+datasette"`` HTTP header pointing to the JSON version of those pages. (:issue:`1533`) - ``Access-Control-Expose-Headers: Link`` is now added to the CORS headers, allowing remote JavaScript to access that header. - Canned queries are now shown at the top of the database page, directly below the SQL editor. Previously they were shown at the bottom, below the list of tables. (:issue:`1612`) diff --git a/docs/internals.rst b/docs/internals.rst index 92f4efee..c3892a7c 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -364,7 +364,7 @@ This is useful when you need to check multiple permissions at once. For example, ], ) -.. _datasette_check_visibilty: +.. _datasette_check_visibility: await .check_visibility(actor, action=None, resource=None, permissions=None) ---------------------------------------------------------------------------- diff --git a/docs/performance.rst b/docs/performance.rst index 89bbf5ae..4427757c 100644 --- a/docs/performance.rst +++ b/docs/performance.rst @@ -24,6 +24,8 @@ To open a file in immutable mode pass it to the datasette command using the ``-i When you open a file in immutable mode like this Datasette will also calculate and cache the row counts for each table in that database when it first starts up, further improving performance. +.. _performance_inspect: + Using "datasette inspect" ------------------------- From a0dd5fa02fb1e6d5477b962a2062f1a4be3354a5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 23 Oct 2022 20:14:49 -0700 Subject: [PATCH 0323/1103] Fixed typo in release notes --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index dd4c20b7..2255dcce 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -31,7 +31,7 @@ Changelog - ``Database(is_mutable=)`` now defaults to ``True``. (:issue:`1808`) - Non-JavaScript textarea now increases height to fit the SQL query. (:issue:`1786`) - More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) -- Datasette no longer enforces upper bounds on its depenedencies. (:issue:`1800`) +- Datasette no longer enforces upper bounds on its dependencies. (:issue:`1800`) - Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) - The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) From 83adf55b2da83fd9a227f7e4c8506d72def72294 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 23 Oct 2022 20:28:15 -0700 Subject: [PATCH 0324/1103] Deploy one-dot-zero branch preview --- .github/workflows/deploy-latest.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 2b94a7f1..43a843ed 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -3,7 +3,8 @@ name: Deploy latest.datasette.io on: push: branches: - - main + - main + - 1.0-dev permissions: contents: read @@ -68,6 +69,8 @@ jobs: gcloud config set project datasette-222320 export SUFFIX="-${GITHUB_REF#refs/heads/}" export SUFFIX=${SUFFIX#-main} + # Replace 1.0 with one-dot-zero in SUFFIX + export SUFFIX=${SUFFIX//1.0/one-dot-zero} datasette publish cloudrun fixtures.db fixtures2.db extra_database.db \ -m fixtures.json \ --plugins-dir=plugins \ From e135da8efe8fccecf9a137a941cc1f1db0db583a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 07:13:43 -0700 Subject: [PATCH 0325/1103] Python 3.11 in CI --- .github/workflows/publish.yml | 16 ++++++++-------- .github/workflows/test.yml | 8 ++++---- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 9ef09d2e..fa608055 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -12,14 +12,14 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip @@ -37,12 +37,12 @@ jobs: runs-on: ubuntu-latest needs: [test] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: '3.10' - - uses: actions/cache@v2 + python-version: '3.11' + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e38d5ee9..886f649a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -10,14 +10,14 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11-dev"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip From 02ae1a002918eb91f794e912c32742559da34cf5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 11:59:03 -0700 Subject: [PATCH 0326/1103] Upgrade Docker images to Python 3.11, closes #1853 --- Dockerfile | 2 +- datasette/utils/__init__.py | 2 +- demos/apache-proxy/Dockerfile | 2 +- docs/publish.rst | 2 +- tests/test_package.py | 2 +- tests/test_publish_cloudrun.py | 4 ++-- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index ee7ed957..9a8f06cf 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10.6-slim-bullseye as build +FROM python:3.11.0-slim-bullseye as build # Version of Datasette to install, e.g. 0.55 # docker build . -t datasette --build-arg VERSION=0.55 diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 2bdea673..803ba96d 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -390,7 +390,7 @@ def make_dockerfile( "SQLITE_EXTENSIONS" ] = "/usr/lib/x86_64-linux-gnu/mod_spatialite.so" return """ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app {apt_get_extras} diff --git a/demos/apache-proxy/Dockerfile b/demos/apache-proxy/Dockerfile index 70b33bec..9a8448da 100644 --- a/demos/apache-proxy/Dockerfile +++ b/demos/apache-proxy/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye RUN apt-get update && \ apt-get install -y apache2 supervisor && \ diff --git a/docs/publish.rst b/docs/publish.rst index d817ed31..4ba94792 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -146,7 +146,7 @@ Here's example output for the package command:: $ datasette package parlgov.db --extra-options="--setting sql_time_limit_ms 2500" Sending build context to Docker daemon 4.459MB - Step 1/7 : FROM python:3.10.6-slim-bullseye + Step 1/7 : FROM python:3.11.0-slim-bullseye ---> 79e1dc9af1c1 Step 2/7 : COPY . /app ---> Using cache diff --git a/tests/test_package.py b/tests/test_package.py index ac15e61e..f05f3ece 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -12,7 +12,7 @@ class CaptureDockerfile: EXPECTED_DOCKERFILE = """ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index e64534d2..158a090e 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -242,7 +242,7 @@ def test_publish_cloudrun_plugin_secrets( ) expected = textwrap.dedent( r""" - FROM python:3.10.6-slim-bullseye + FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app @@ -309,7 +309,7 @@ def test_publish_cloudrun_apt_get_install( ) expected = textwrap.dedent( r""" - FROM python:3.10.6-slim-bullseye + FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app From 9676b2deb07cff20247ba91dad3e84a4ab0b00d1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 11:59:03 -0700 Subject: [PATCH 0327/1103] Upgrade Docker images to Python 3.11, closes #1853 --- Dockerfile | 2 +- datasette/utils/__init__.py | 2 +- demos/apache-proxy/Dockerfile | 2 +- docs/publish.rst | 2 +- tests/test_package.py | 2 +- tests/test_publish_cloudrun.py | 4 ++-- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index ee7ed957..9a8f06cf 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10.6-slim-bullseye as build +FROM python:3.11.0-slim-bullseye as build # Version of Datasette to install, e.g. 0.55 # docker build . -t datasette --build-arg VERSION=0.55 diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 2bdea673..803ba96d 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -390,7 +390,7 @@ def make_dockerfile( "SQLITE_EXTENSIONS" ] = "/usr/lib/x86_64-linux-gnu/mod_spatialite.so" return """ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app {apt_get_extras} diff --git a/demos/apache-proxy/Dockerfile b/demos/apache-proxy/Dockerfile index 70b33bec..9a8448da 100644 --- a/demos/apache-proxy/Dockerfile +++ b/demos/apache-proxy/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye RUN apt-get update && \ apt-get install -y apache2 supervisor && \ diff --git a/docs/publish.rst b/docs/publish.rst index d817ed31..4ba94792 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -146,7 +146,7 @@ Here's example output for the package command:: $ datasette package parlgov.db --extra-options="--setting sql_time_limit_ms 2500" Sending build context to Docker daemon 4.459MB - Step 1/7 : FROM python:3.10.6-slim-bullseye + Step 1/7 : FROM python:3.11.0-slim-bullseye ---> 79e1dc9af1c1 Step 2/7 : COPY . /app ---> Using cache diff --git a/tests/test_package.py b/tests/test_package.py index ac15e61e..f05f3ece 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -12,7 +12,7 @@ class CaptureDockerfile: EXPECTED_DOCKERFILE = """ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index e64534d2..158a090e 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -242,7 +242,7 @@ def test_publish_cloudrun_plugin_secrets( ) expected = textwrap.dedent( r""" - FROM python:3.10.6-slim-bullseye + FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app @@ -309,7 +309,7 @@ def test_publish_cloudrun_apt_get_install( ) expected = textwrap.dedent( r""" - FROM python:3.10.6-slim-bullseye + FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app From 613ad05c095f92653221db267ef53d54d00cdfbb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 12:16:48 -0700 Subject: [PATCH 0328/1103] Don't need pysqlite3-binary any more, refs #1853 --- .github/workflows/deploy-latest.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 2b94a7f1..e423b8fa 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -13,12 +13,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out datasette - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: "3.10" - - uses: actions/cache@v2 + python-version: "3.11" + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip @@ -74,7 +74,6 @@ jobs: --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \ - --install=pysqlite3-binary \ --service "datasette-latest$SUFFIX" - name: Deploy to docs as well (only for main) if: ${{ github.ref == 'refs/heads/main' }} From c7dd76c26257ded5bcdfd0570e12412531b8b88f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 12:42:21 -0700 Subject: [PATCH 0329/1103] Poll until servers start, refs #1854 --- tests/conftest.py | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 215853b3..f4638a14 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,4 @@ +import httpx import os import pathlib import pytest @@ -110,8 +111,13 @@ def ds_localhost_http_server(): # Avoid FileNotFoundError: [Errno 2] No such file or directory: cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + # Loop until port 8041 serves traffic + while True: + try: + httpx.get("http://localhost:8041/") + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc @@ -146,8 +152,12 @@ def ds_localhost_https_server(tmp_path_factory): stderr=subprocess.STDOUT, cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + while True: + try: + httpx.get("https://localhost:8042/", verify=client_cert) + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc, client_cert @@ -168,8 +178,15 @@ def ds_unix_domain_socket_server(tmp_path_factory): stderr=subprocess.STDOUT, cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + # Poll until available + transport = httpx.HTTPTransport(uds=uds) + client = httpx.Client(transport=transport) + while True: + try: + client.get("http://localhost/_memory.json") + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc, uds From 6d085af28c63c28ecda388fc0552c91f756be0c6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 07:13:43 -0700 Subject: [PATCH 0330/1103] Python 3.11 in CI --- .github/workflows/publish.yml | 16 ++++++++-------- .github/workflows/test.yml | 8 ++++---- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 9ef09d2e..fa608055 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -12,14 +12,14 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip @@ -37,12 +37,12 @@ jobs: runs-on: ubuntu-latest needs: [test] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: '3.10' - - uses: actions/cache@v2 + python-version: '3.11' + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e38d5ee9..886f649a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -10,14 +10,14 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11-dev"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip From 05b479224fa57af3ab2d03769edd5081dad62a19 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 12:16:48 -0700 Subject: [PATCH 0331/1103] Don't need pysqlite3-binary any more, refs #1853 --- .github/workflows/deploy-latest.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 43a843ed..5598dc12 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -14,12 +14,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out datasette - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: "3.10" - - uses: actions/cache@v2 + python-version: "3.11" + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip @@ -77,7 +77,6 @@ jobs: --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \ - --install=pysqlite3-binary \ --service "datasette-latest$SUFFIX" - name: Deploy to docs as well (only for main) if: ${{ github.ref == 'refs/heads/main' }} From f9ae92b37796f7f559d57b1ee9718aa4d43547e8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 12:42:21 -0700 Subject: [PATCH 0332/1103] Poll until servers start, refs #1854 --- tests/conftest.py | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 215853b3..f4638a14 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,4 @@ +import httpx import os import pathlib import pytest @@ -110,8 +111,13 @@ def ds_localhost_http_server(): # Avoid FileNotFoundError: [Errno 2] No such file or directory: cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + # Loop until port 8041 serves traffic + while True: + try: + httpx.get("http://localhost:8041/") + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc @@ -146,8 +152,12 @@ def ds_localhost_https_server(tmp_path_factory): stderr=subprocess.STDOUT, cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + while True: + try: + httpx.get("https://localhost:8042/", verify=client_cert) + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc, client_cert @@ -168,8 +178,15 @@ def ds_unix_domain_socket_server(tmp_path_factory): stderr=subprocess.STDOUT, cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + # Poll until available + transport = httpx.HTTPTransport(uds=uds) + client = httpx.Client(transport=transport) + while True: + try: + client.get("http://localhost/_memory.json") + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc, uds From 42f8b402e6aa56af4bbe921e346af8df42acd50f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 17:07:58 -0700 Subject: [PATCH 0333/1103] Initial prototype of create API token page, refs #1852 --- datasette/app.py | 5 ++ datasette/templates/create_token.html | 83 +++++++++++++++++++++++++++ datasette/views/special.py | 54 +++++++++++++++++ 3 files changed, 142 insertions(+) create mode 100644 datasette/templates/create_token.html diff --git a/datasette/app.py b/datasette/app.py index 9df16558..cab9d142 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -33,6 +33,7 @@ from .views.special import ( JsonDataView, PatternPortfolioView, AuthTokenView, + CreateTokenView, LogoutView, AllowDebugView, PermissionsDebugView, @@ -1212,6 +1213,10 @@ class Datasette: AuthTokenView.as_view(self), r"/-/auth-token$", ) + add_route( + CreateTokenView.as_view(self), + r"/-/create-token$", + ) add_route( LogoutView.as_view(self), r"/-/logout$", diff --git a/datasette/templates/create_token.html b/datasette/templates/create_token.html new file mode 100644 index 00000000..a94881ed --- /dev/null +++ b/datasette/templates/create_token.html @@ -0,0 +1,83 @@ +{% extends "base.html" %} + +{% block title %}Create an API token{% endblock %} + +{% block content %} + +

    Create an API token

    + +

    This token will allow API access with the same abilities as your current user.

    + +{% if errors %} + {% for error in errors %} +

    {{ error }}

    + {% endfor %} +{% endif %} + +
    +
    +
    + +
    + + + +
    +
    + +{% if token %} +
    +

    Your API token

    +
    + + +
    + +
    + Token details +
    {{ token_bits|tojson }}
    +
    +
    + {% endif %} + + + +{% endblock %} diff --git a/datasette/views/special.py b/datasette/views/special.py index dd834528..f2e69412 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -3,6 +3,7 @@ from datasette.utils.asgi import Response, Forbidden from datasette.utils import actor_matches_allow, add_cors_headers from .base import BaseView import secrets +import time class JsonDataView(BaseView): @@ -163,3 +164,56 @@ class MessagesDebugView(BaseView): else: datasette.add_message(request, message, getattr(datasette, message_type)) return Response.redirect(self.ds.urls.instance()) + + +class CreateTokenView(BaseView): + name = "create_token" + has_json_alternate = False + + async def get(self, request): + if not request.actor: + raise Forbidden("You must be logged in to create a token") + return await self.render( + ["create_token.html"], + request, + {"actor": request.actor}, + ) + + async def post(self, request): + if not request.actor: + raise Forbidden("You must be logged in to create a token") + post = await request.post_vars() + expires = None + errors = [] + if post.get("expire_type"): + duration = post.get("expire_duration") + if not duration or not duration.isdigit() or not int(duration) > 0: + errors.append("Invalid expire duration") + else: + unit = post["expire_type"] + if unit == "minutes": + expires = int(duration) * 60 + elif unit == "hours": + expires = int(duration) * 60 * 60 + elif unit == "days": + expires = int(duration) * 60 * 60 * 24 + else: + errors.append("Invalid expire duration unit") + token_bits = None + token = None + if not errors: + token_bits = { + "a": request.actor, + "e": (int(time.time()) + expires) if expires else None, + } + token = self.ds.sign(token_bits, "token") + return await self.render( + ["create_token.html"], + request, + { + "actor": request.actor, + "errors": errors, + "token": token, + "token_bits": token_bits, + }, + ) From 68ccb7578b5d3bf68b86fb2f5cf8753098dfe075 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 18:40:07 -0700 Subject: [PATCH 0334/1103] dstoke_ prefix for tokens Refs https://github.com/simonw/datasette/issues/1852#issuecomment-1291290451 --- datasette/views/special.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/views/special.py b/datasette/views/special.py index f2e69412..d3f202f4 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -206,7 +206,7 @@ class CreateTokenView(BaseView): "a": request.actor, "e": (int(time.time()) + expires) if expires else None, } - token = self.ds.sign(token_bits, "token") + token = "dstok_{}".format(self.ds.sign(token_bits, "token")) return await self.render( ["create_token.html"], request, From 7ab091e8ef8d3af1e23b5a81ffad2bd8c96cc47c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 19:04:05 -0700 Subject: [PATCH 0335/1103] Tests and docs for /-/create-token, refs #1852 --- datasette/views/special.py | 14 +++++--- docs/authentication.rst | 15 +++++++++ tests/test_auth.py | 68 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 93 insertions(+), 4 deletions(-) diff --git a/datasette/views/special.py b/datasette/views/special.py index d3f202f4..7f70eb1f 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -170,9 +170,16 @@ class CreateTokenView(BaseView): name = "create_token" has_json_alternate = False - async def get(self, request): + def check_permission(self, request): if not request.actor: raise Forbidden("You must be logged in to create a token") + if not request.actor.get("id"): + raise Forbidden( + "You must be logged in as an actor with an ID to create a token" + ) + + async def get(self, request): + self.check_permission(request) return await self.render( ["create_token.html"], request, @@ -180,8 +187,7 @@ class CreateTokenView(BaseView): ) async def post(self, request): - if not request.actor: - raise Forbidden("You must be logged in to create a token") + self.check_permission(request) post = await request.post_vars() expires = None errors = [] @@ -203,7 +209,7 @@ class CreateTokenView(BaseView): token = None if not errors: token_bits = { - "a": request.actor, + "a": request.actor["id"], "e": (int(time.time()) + expires) if expires else None, } token = "dstok_{}".format(self.ds.sign(token_bits, "token")) diff --git a/docs/authentication.rst b/docs/authentication.rst index 685dab15..fc903fbb 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -333,6 +333,21 @@ To limit this ability for just one specific database, use this: } } +.. _CreateTokenView: + +API Tokens +========== + +Datasette includes a default mechanism for generating API tokens that can be used to authenticate requests. + +Authenticated users can create new API tokens using a form on the ``/-/create-token`` page. + +Created tokens can then be passed in the ``Authorization: Bearer token_here`` header of HTTP requests to Datasette. + +A token created by a user will include that user's ``"id"`` in the token payload, so any permissions granted to that user based on their ID will be made available to the token as well. + +Coming soon: a mechanism for creating tokens that can only perform a subset of the actions available to the user who created them. + .. _permissions_plugins: Checking permissions in plugins diff --git a/tests/test_auth.py b/tests/test_auth.py index 4ef35a76..3aaab50d 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -110,3 +110,71 @@ def test_no_logout_button_in_navigation_if_no_ds_actor_cookie(app_client, path): response = app_client.get(path + "?_bot=1") assert "bot" in response.text assert '
    ' not in response.text + + +@pytest.mark.parametrize( + "post_data,errors,expected_duration", + ( + ({"expire_type": ""}, [], None), + ({"expire_type": "x"}, ["Invalid expire duration"], None), + ({"expire_type": "minutes"}, ["Invalid expire duration"], None), + ( + {"expire_type": "minutes", "expire_duration": "x"}, + ["Invalid expire duration"], + None, + ), + ( + {"expire_type": "minutes", "expire_duration": "-1"}, + ["Invalid expire duration"], + None, + ), + ( + {"expire_type": "minutes", "expire_duration": "0"}, + ["Invalid expire duration"], + None, + ), + ( + {"expire_type": "minutes", "expire_duration": "10"}, + [], + 600, + ), + ( + {"expire_type": "hours", "expire_duration": "10"}, + [], + 10 * 60 * 60, + ), + ( + {"expire_type": "days", "expire_duration": "3"}, + [], + 60 * 60 * 24 * 3, + ), + ), +) +def test_auth_create_token(app_client, post_data, errors, expected_duration): + assert app_client.get("/-/create-token").status == 403 + ds_actor = app_client.actor_cookie({"id": "test"}) + response = app_client.get("/-/create-token", cookies={"ds_actor": ds_actor}) + assert response.status == 200 + assert ">Create an API token<" in response.text + # Now try actually creating one + response2 = app_client.post( + "/-/create-token", + post_data, + csrftoken_from=True, + cookies={"ds_actor": ds_actor}, + ) + assert response2.status == 200 + if errors: + for error in errors: + assert '

    {}

    '.format(error) in response2.text + else: + # Extract token from page + token = response2.text.split('value="dstok_')[1].split('"')[0] + details = app_client.ds.unsign(token, "token") + assert details.keys() == {"a", "e"} + assert details["a"] == "test" + if expected_duration is None: + assert details["e"] is None + else: + about_right = int(time.time()) + expected_duration + assert about_right - 2 < details["e"] < about_right + 2 From b29e487bc3fde6418bf45bda7cfed2e081ff03fb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 19:18:41 -0700 Subject: [PATCH 0336/1103] actor_from_request for dstok_ tokens, refs #1852 --- datasette/default_permissions.py | 25 +++++++++++++++++++++++++ datasette/utils/testing.py | 2 ++ tests/test_auth.py | 32 ++++++++++++++++++++++++++++++++ 3 files changed, 59 insertions(+) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index b58d8d1b..4d836ddc 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -1,5 +1,7 @@ from datasette import hookimpl from datasette.utils import actor_matches_allow +import itsdangerous +import time @hookimpl(tryfirst=True) @@ -45,3 +47,26 @@ def permission_allowed(datasette, actor, action, resource): return actor_matches_allow(actor, database_allow_sql) return inner + + +@hookimpl +def actor_from_request(datasette, request): + prefix = "dstok_" + authorization = request.headers.get("authorization") + if not authorization: + return None + if not authorization.startswith("Bearer "): + return None + token = authorization[len("Bearer ") :] + if not token.startswith(prefix): + return None + token = token[len(prefix) :] + try: + decoded = datasette.unsign(token, namespace="token") + except itsdangerous.BadSignature: + return None + expires_at = decoded.get("e") + if expires_at is not None: + if expires_at < time.time(): + return None + return {"id": decoded["a"], "dstok": True} diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index b28fc575..4f76a799 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -62,6 +62,7 @@ class TestClient: method="GET", cookies=None, if_none_match=None, + headers=None, ): return await self._request( path=path, @@ -70,6 +71,7 @@ class TestClient: method=method, cookies=cookies, if_none_match=if_none_match, + headers=headers, ) @async_to_sync diff --git a/tests/test_auth.py b/tests/test_auth.py index 3aaab50d..be21d6a5 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -178,3 +178,35 @@ def test_auth_create_token(app_client, post_data, errors, expected_duration): else: about_right = int(time.time()) + expected_duration assert about_right - 2 < details["e"] < about_right + 2 + + +@pytest.mark.parametrize( + "scenario,should_work", + ( + ("no_token", False), + ("invalid_token", False), + ("expired_token", False), + ("valid_unlimited_token", True), + ("valid_expiring_token", True), + ), +) +def test_auth_with_dstok_token(app_client, scenario, should_work): + token = None + if scenario == "valid_unlimited_token": + token = app_client.ds.sign({"a": "test"}, "token") + elif scenario == "valid_expiring_token": + token = app_client.ds.sign({"a": "test", "e": int(time.time()) + 1000}, "token") + elif scenario == "expired_token": + token = app_client.ds.sign({"a": "test", "e": int(time.time()) - 1000}, "token") + elif scenario == "invalid_token": + token = "invalid" + if token: + token = "dstok_{}".format(token) + headers = {} + if token: + headers["Authorization"] = "Bearer {}".format(token) + response = app_client.get("/-/actor.json", headers=headers) + if should_work: + assert response.json == {"actor": {"id": "test", "dstok": True}} + else: + assert response.json == {"actor": None} From 0f013ff497df62e1dd2075777b9817555646010e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 19:43:55 -0700 Subject: [PATCH 0337/1103] Mechanism to prevent tokens creating tokens, closes #1857 --- datasette/default_permissions.py | 2 +- datasette/views/special.py | 4 ++++ docs/authentication.rst | 2 ++ tests/test_auth.py | 11 ++++++++++- 4 files changed, 17 insertions(+), 2 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 4d836ddc..d908af7a 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -69,4 +69,4 @@ def actor_from_request(datasette, request): if expires_at is not None: if expires_at < time.time(): return None - return {"id": decoded["a"], "dstok": True} + return {"id": decoded["a"], "token": "dstok"} diff --git a/datasette/views/special.py b/datasette/views/special.py index 7f70eb1f..91130353 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -177,6 +177,10 @@ class CreateTokenView(BaseView): raise Forbidden( "You must be logged in as an actor with an ID to create a token" ) + if request.actor.get("token"): + raise Forbidden( + "Token authentication cannot be used to create additional tokens" + ) async def get(self, request): self.check_permission(request) diff --git a/docs/authentication.rst b/docs/authentication.rst index fc903fbb..cbecd296 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -348,6 +348,8 @@ A token created by a user will include that user's ``"id"`` in the token payload Coming soon: a mechanism for creating tokens that can only perform a subset of the actions available to the user who created them. +This page cannot be accessed by actors with a ``"token": "some-value"`` property. This is to prevent API tokens from being used to automatically create more tokens. Datasette plugins that implement their own form of API token authentication should follow this convention. + .. _permissions_plugins: Checking permissions in plugins diff --git a/tests/test_auth.py b/tests/test_auth.py index be21d6a5..397d51d7 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -180,6 +180,15 @@ def test_auth_create_token(app_client, post_data, errors, expected_duration): assert about_right - 2 < details["e"] < about_right + 2 +def test_auth_create_token_not_allowed_for_tokens(app_client): + ds_tok = app_client.ds.sign({"a": "test", "token": "dstok"}, "token") + response = app_client.get( + "/-/create-token", + headers={"Authorization": "Bearer dstok_{}".format(ds_tok)}, + ) + assert response.status == 403 + + @pytest.mark.parametrize( "scenario,should_work", ( @@ -207,6 +216,6 @@ def test_auth_with_dstok_token(app_client, scenario, should_work): headers["Authorization"] = "Bearer {}".format(token) response = app_client.get("/-/actor.json", headers=headers) if should_work: - assert response.json == {"actor": {"id": "test", "dstok": True}} + assert response.json == {"actor": {"id": "test", "token": "dstok"}} else: assert response.json == {"actor": None} From c23fa850e7f21977e367e3467656055216978e8a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 19:55:47 -0700 Subject: [PATCH 0338/1103] allow_signed_tokens setting, closes #1856 --- datasette/app.py | 5 +++++ datasette/default_permissions.py | 2 ++ datasette/views/special.py | 2 ++ docs/authentication.rst | 2 ++ docs/cli-reference.rst | 2 ++ docs/plugins.rst | 1 + docs/settings.rst | 13 +++++++++++++ tests/test_auth.py | 26 +++++++++++++++++++++----- 8 files changed, 48 insertions(+), 5 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index cab9d142..c868f8d3 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -124,6 +124,11 @@ SETTINGS = ( True, "Allow users to download the original SQLite database files", ), + Setting( + "allow_signed_tokens", + True, + "Allow users to create and use signed API tokens", + ), Setting("suggest_facets", True, "Calculate and display suggested facets"), Setting( "default_cache_ttl", diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index d908af7a..49ca8851 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -52,6 +52,8 @@ def permission_allowed(datasette, actor, action, resource): @hookimpl def actor_from_request(datasette, request): prefix = "dstok_" + if not datasette.setting("allow_signed_tokens"): + return None authorization = request.headers.get("authorization") if not authorization: return None diff --git a/datasette/views/special.py b/datasette/views/special.py index 91130353..89015958 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -171,6 +171,8 @@ class CreateTokenView(BaseView): has_json_alternate = False def check_permission(self, request): + if not self.ds.setting("allow_signed_tokens"): + raise Forbidden("Signed tokens are not enabled for this Datasette instance") if not request.actor: raise Forbidden("You must be logged in to create a token") if not request.actor.get("id"): diff --git a/docs/authentication.rst b/docs/authentication.rst index cbecd296..50304ec5 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -350,6 +350,8 @@ Coming soon: a mechanism for creating tokens that can only perform a subset of t This page cannot be accessed by actors with a ``"token": "some-value"`` property. This is to prevent API tokens from being used to automatically create more tokens. Datasette plugins that implement their own form of API token authentication should follow this convention. +You can disable this feature using the :ref:`allow_signed_tokens ` setting. + .. _permissions_plugins: Checking permissions in plugins diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 4a8465cb..fd5e2404 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -226,6 +226,8 @@ These can be passed to ``datasette serve`` using ``datasette serve --setting nam ?_facet= parameter (default=True) allow_download Allow users to download the original SQLite database files (default=True) + allow_signed_tokens Allow users to create and use signed API tokens + (default=True) suggest_facets Calculate and display suggested facets (default=True) default_cache_ttl Default HTTP cache TTL (used in Cache-Control: diff --git a/docs/plugins.rst b/docs/plugins.rst index 29078054..9efef32f 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -151,6 +151,7 @@ If you run ``datasette plugins --all`` it will include default plugins that ship "templates": false, "version": null, "hooks": [ + "actor_from_request", "permission_allowed" ] }, diff --git a/docs/settings.rst b/docs/settings.rst index a6d50543..be640b21 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -169,6 +169,19 @@ Should users be able to download the original SQLite database using a link on th datasette mydatabase.db --setting allow_download off +.. _setting_allow_signed_tokens: + +allow_signed_tokens +~~~~~~~~~~~~~~~~~~~ + +Should users be able to create signed API tokens to access Datasette? + +This is turned on by default. Use the following to turn it off:: + + datasette mydatabase.db --setting allow_signed_tokens off + +Turning this setting off will disable the ``/-/create-token`` page, :ref:`described here `. It will also cause any incoming ``Authorization: Bearer dstok_...`` API tokens to be ignored. + .. _setting_default_cache_ttl: default_cache_ttl diff --git a/tests/test_auth.py b/tests/test_auth.py index 397d51d7..a79dafd8 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -189,9 +189,20 @@ def test_auth_create_token_not_allowed_for_tokens(app_client): assert response.status == 403 +def test_auth_create_token_not_allowed_if_allow_signed_tokens_off(app_client): + app_client.ds._settings["allow_signed_tokens"] = False + try: + ds_actor = app_client.actor_cookie({"id": "test"}) + response = app_client.get("/-/create-token", cookies={"ds_actor": ds_actor}) + assert response.status == 403 + finally: + app_client.ds._settings["allow_signed_tokens"] = True + + @pytest.mark.parametrize( "scenario,should_work", ( + ("allow_signed_tokens_off", False), ("no_token", False), ("invalid_token", False), ("expired_token", False), @@ -201,7 +212,7 @@ def test_auth_create_token_not_allowed_for_tokens(app_client): ) def test_auth_with_dstok_token(app_client, scenario, should_work): token = None - if scenario == "valid_unlimited_token": + if scenario in ("valid_unlimited_token", "allow_signed_tokens_off"): token = app_client.ds.sign({"a": "test"}, "token") elif scenario == "valid_expiring_token": token = app_client.ds.sign({"a": "test", "e": int(time.time()) + 1000}, "token") @@ -211,11 +222,16 @@ def test_auth_with_dstok_token(app_client, scenario, should_work): token = "invalid" if token: token = "dstok_{}".format(token) + if scenario == "allow_signed_tokens_off": + app_client.ds._settings["allow_signed_tokens"] = False headers = {} if token: headers["Authorization"] = "Bearer {}".format(token) response = app_client.get("/-/actor.json", headers=headers) - if should_work: - assert response.json == {"actor": {"id": "test", "token": "dstok"}} - else: - assert response.json == {"actor": None} + try: + if should_work: + assert response.json == {"actor": {"id": "test", "token": "dstok"}} + else: + assert response.json == {"actor": None} + finally: + app_client.ds._settings["allow_signed_tokens"] = True From c36a74ece1e475291af326d493d8db9ff3afdd30 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 21:04:39 -0700 Subject: [PATCH 0339/1103] Try shutting down executor in tests to free up thread local SQLite connections, refs #1843 --- tests/fixtures.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/fixtures.py b/tests/fixtures.py index 13a3dffa..d1afd2f3 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -166,6 +166,7 @@ def make_app_client( # Close the connection to avoid "too many open files" errors conn.close() os.remove(filepath) + ds.executor.shutdown() @pytest.fixture(scope="session") From c556fad65d8a45ce85027678796a12ac9107d9ed Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 21:25:47 -0700 Subject: [PATCH 0340/1103] Try to address too many files error again, refs #1843 --- tests/fixtures.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index d1afd2f3..92a10da6 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -131,10 +131,14 @@ def make_app_client( for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) + # Close the connection to avoid "too many open files" errors + conn.close() if extra_databases is not None: for extra_filename, extra_sql in extra_databases.items(): extra_filepath = os.path.join(tmpdir, extra_filename) - sqlite3.connect(extra_filepath).executescript(extra_sql) + c2 = sqlite3.connect(extra_filepath) + c2.executescript(extra_sql) + c2.close() # Insert at start to help test /-/databases ordering: files.insert(0, extra_filepath) os.chdir(os.path.dirname(filepath)) @@ -163,10 +167,7 @@ def make_app_client( crossdb=crossdb, ) yield TestClient(ds) - # Close the connection to avoid "too many open files" errors - conn.close() os.remove(filepath) - ds.executor.shutdown() @pytest.fixture(scope="session") From c7956eed7777c62653b4d508570c5d77cfead7d9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 21:26:12 -0700 Subject: [PATCH 0341/1103] datasette create-token command, refs #1859 --- datasette/default_permissions.py | 38 ++++++++++++++++++++++++++++ docs/authentication.rst | 23 +++++++++++++++++ docs/cli-reference.rst | 43 ++++++++++++++++++++++++++------ docs/plugins.rst | 3 ++- tests/test_api.py | 1 + tests/test_auth.py | 28 +++++++++++++++++++++ tests/test_plugins.py | 2 ++ 7 files changed, 130 insertions(+), 8 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 49ca8851..12499c16 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -1,6 +1,8 @@ from datasette import hookimpl from datasette.utils import actor_matches_allow +import click import itsdangerous +import json import time @@ -72,3 +74,39 @@ def actor_from_request(datasette, request): if expires_at < time.time(): return None return {"id": decoded["a"], "token": "dstok"} + + +@hookimpl +def register_commands(cli): + from datasette.app import Datasette + + @cli.command() + @click.argument("id") + @click.option( + "--secret", + help="Secret used for signing the API tokens", + envvar="DATASETTE_SECRET", + required=True, + ) + @click.option( + "-e", + "--expires-after", + help="Token should expire after this many seconds", + type=int, + ) + @click.option( + "--debug", + help="Show decoded token", + is_flag=True, + ) + def create_token(id, secret, expires_after, debug): + "Create a signed API token for the specified actor ID" + ds = Datasette(secret=secret) + bits = {"a": id, "token": "dstok"} + if expires_after: + bits["e"] = int(time.time()) + expires_after + token = ds.sign(bits, namespace="token") + click.echo("dstok_{}".format(token)) + if debug: + click.echo("\nDecoded:\n") + click.echo(json.dumps(ds.unsign(token, namespace="token"), indent=2)) diff --git a/docs/authentication.rst b/docs/authentication.rst index 50304ec5..0835e17c 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -352,6 +352,29 @@ This page cannot be accessed by actors with a ``"token": "some-value"`` property You can disable this feature using the :ref:`allow_signed_tokens ` setting. +.. _authentication_cli_create_token: + +datasette create-token +---------------------- + +You can also create tokens on the command line using the ``datasette create-token`` command. + +This command takes one required argument - the ID of the actor to be associated with the created token. + +You can specify an ``--expires-after`` option in seconds. If omitted, the token will never expire. + +The command will sign the token using the ``DATASETTE_SECRET`` environment variable, if available. You can also pass the secret using the ``--secret`` option. + +This means you can run the command locally to create tokens for use with a deployed Datasette instance, provided you know that instance's secret. + +To create a token for the ``root`` actor that will expire in one hour:: + + datasette create-token root --expires-after 3600 + +To create a secret that never expires using a specific secret:: + + datasette create-token root --secret my-secret-goes-here + .. _permissions_plugins: Checking permissions in plugins diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index fd5e2404..b40c6b2c 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -47,13 +47,14 @@ Running ``datasette --help`` shows a list of all of the available commands. --help Show this message and exit. Commands: - serve* Serve up specified SQLite database files with a web UI - inspect Generate JSON summary of provided database files - install Install plugins and packages from PyPI into the same... - package Package SQLite files into a Datasette Docker container - plugins List currently installed plugins - publish Publish specified SQLite database files to the internet along... - uninstall Uninstall plugins and Python packages from the Datasette... + serve* Serve up specified SQLite database files with a web UI + create-token Create a signed API token for the specified actor ID + inspect Generate JSON summary of provided database files + install Install plugins and packages from PyPI into the same... + package Package SQLite files into a Datasette Docker container + plugins List currently installed plugins + publish Publish specified SQLite database files to the internet... + uninstall Uninstall plugins and Python packages from the Datasette... .. [[[end]]] @@ -591,3 +592,31 @@ This performance optimization is used automatically by some of the ``datasette p .. [[[end]]] + + +.. _cli_help_create_token___help: + +datasette create-token +====================== + +Create a signed API token, see :ref:`authentication_cli_create_token`. + +.. [[[cog + help(["create-token", "--help"]) +.. ]]] + +:: + + Usage: datasette create-token [OPTIONS] ID + + Create a signed API token for the specified actor ID + + Options: + --secret TEXT Secret used for signing the API tokens + [required] + -e, --expires-after INTEGER Token should expire after this many seconds + --debug Show decoded token + --help Show this message and exit. + + +.. [[[end]]] diff --git a/docs/plugins.rst b/docs/plugins.rst index 9efef32f..3ae42293 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -152,7 +152,8 @@ If you run ``datasette plugins --all`` it will include default plugins that ship "version": null, "hooks": [ "actor_from_request", - "permission_allowed" + "permission_allowed", + "register_commands" ] }, { diff --git a/tests/test_api.py b/tests/test_api.py index ad74d16e..f7cbe950 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -806,6 +806,7 @@ def test_settings_json(app_client): "max_returned_rows": 100, "sql_time_limit_ms": 200, "allow_download": True, + "allow_signed_tokens": True, "allow_facet": True, "suggest_facets": True, "default_cache_ttl": 5, diff --git a/tests/test_auth.py b/tests/test_auth.py index a79dafd8..f2d82107 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,5 +1,7 @@ from .fixtures import app_client +from click.testing import CliRunner from datasette.utils import baseconv +from datasette.cli import cli import pytest import time @@ -235,3 +237,29 @@ def test_auth_with_dstok_token(app_client, scenario, should_work): assert response.json == {"actor": None} finally: app_client.ds._settings["allow_signed_tokens"] = True + + +@pytest.mark.parametrize("expires", (None, 1000, -1000)) +def test_cli_create_token(app_client, expires): + secret = app_client.ds._secret + runner = CliRunner(mix_stderr=False) + args = ["create-token", "--secret", secret, "test"] + if expires: + args += ["--expires-after", str(expires)] + result = runner.invoke(cli, args) + assert result.exit_code == 0 + token = result.output.strip() + assert token.startswith("dstok_") + details = app_client.ds.unsign(token[len("dstok_") :], "token") + expected_keys = {"a", "token"} + if expires: + expected_keys.add("e") + assert details.keys() == expected_keys + assert details["a"] == "test" + response = app_client.get( + "/-/actor.json", headers={"Authorization": "Bearer {}".format(token)} + ) + if expires is None or expires > 0: + assert response.json == {"actor": {"id": "test", "token": "dstok"}} + else: + assert response.json == {"actor": None} diff --git a/tests/test_plugins.py b/tests/test_plugins.py index e0a7bc76..de3fde8e 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -971,6 +971,7 @@ def test_hook_register_commands(): "plugins", "publish", "uninstall", + "create-token", } # Now install a plugin @@ -1001,6 +1002,7 @@ def test_hook_register_commands(): "uninstall", "verify", "unverify", + "create-token", } pm.unregister(name="verify") importlib.reload(cli) From df7bf0b2fc262f0b025b3cdd283ff8ce60653175 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 14:13:31 -0700 Subject: [PATCH 0342/1103] Fix bug with breadcrumbs and request=None, closes #1849 --- datasette/app.py | 9 ++++++--- tests/test_internals_datasette.py | 9 +++++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 9df16558..246269f3 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -633,15 +633,18 @@ class Datasette: async def _crumb_items(self, request, table=None, database=None): crumbs = [] + actor = None + if request: + actor = request.actor # Top-level link if await self.permission_allowed( - actor=request.actor, action="view-instance", default=True + actor=actor, action="view-instance", default=True ): crumbs.append({"href": self.urls.instance(), "label": "home"}) # Database link if database: if await self.permission_allowed( - actor=request.actor, + actor=actor, action="view-database", resource=database, default=True, @@ -656,7 +659,7 @@ class Datasette: if table: assert database, "table= requires database=" if await self.permission_allowed( - actor=request.actor, + actor=actor, action="view-table", resource=(database, table), default=True, diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index c82cafb3..1b4732af 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -125,3 +125,12 @@ async def test_datasette_ensure_permissions_check_visibility( visible, private = await ds.check_visibility(actor, permissions=permissions) assert visible == should_allow assert private == expected_private + + +@pytest.mark.asyncio +async def test_datasette_render_template_no_request(): + # https://github.com/simonw/datasette/issues/1849 + ds = Datasette([], memory=True) + await ds.invoke_startup() + rendered = await ds.render_template("error.html") + assert "Error " in rendered From 55a709c480a1e7401b4ff6208f37a2cf7c682183 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 14:34:33 -0700 Subject: [PATCH 0343/1103] Allow leading comments on SQL queries, refs #1860 --- datasette/utils/__init__.py | 27 +++++++++++++++++++++------ tests/test_utils.py | 7 +++++++ 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 803ba96d..977a66d6 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -205,13 +205,28 @@ class InvalidSql(Exception): pass +# Allow SQL to start with a /* */ or -- comment +comment_re = ( + # Start of string, then any amount of whitespace + r"^(\s*" + + + # Comment that starts with -- and ends at a newline + r"(?:\-\-.*?\n\s*)" + + + # Comment that starts with /* and ends with */ + r"|(?:/\*[\s\S]*?\*/)" + + + # Whitespace + r")*\s*" +) + allowed_sql_res = [ - re.compile(r"^select\b"), - re.compile(r"^explain\s+select\b"), - re.compile(r"^explain\s+query\s+plan\s+select\b"), - re.compile(r"^with\b"), - re.compile(r"^explain\s+with\b"), - re.compile(r"^explain\s+query\s+plan\s+with\b"), + re.compile(comment_re + r"select\b"), + re.compile(comment_re + r"explain\s+select\b"), + re.compile(comment_re + r"explain\s+query\s+plan\s+select\b"), + re.compile(comment_re + r"with\b"), + re.compile(comment_re + r"explain\s+with\b"), + re.compile(comment_re + r"explain\s+query\s+plan\s+with\b"), ] allowed_pragmas = ( "database_list", diff --git a/tests/test_utils.py b/tests/test_utils.py index d71a612d..e89f1e6b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -141,6 +141,7 @@ def test_custom_json_encoder(obj, expected): "update blah set some_column='# Hello there\n\n* This is a list\n* of items\n--\n[And a link](https://github.com/simonw/datasette-render-markdown).'\nas demo_markdown", "PRAGMA case_sensitive_like = true", "SELECT * FROM pragma_not_on_allow_list('idx52')", + "/* This comment is not valid. select 1", ], ) def test_validate_sql_select_bad(bad_sql): @@ -166,6 +167,12 @@ def test_validate_sql_select_bad(bad_sql): "explain query plan WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;", "SELECT * FROM pragma_index_info('idx52')", "select * from pragma_table_xinfo('table')", + # Various types of comment + "-- comment\nselect 1", + "-- one line\n -- two line\nselect 1", + " /* comment */\nselect 1", + " /* comment */select 1", + "/* comment */\n -- another\n /* one more */ select 1", ], ) def test_validate_sql_select_good(good_sql): From 55f860c304aea813cb7ed740cc5625560a0722a0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 14:13:31 -0700 Subject: [PATCH 0344/1103] Fix bug with breadcrumbs and request=None, closes #1849 --- datasette/app.py | 9 ++++++--- tests/test_internals_datasette.py | 9 +++++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index c868f8d3..596ff44d 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -639,15 +639,18 @@ class Datasette: async def _crumb_items(self, request, table=None, database=None): crumbs = [] + actor = None + if request: + actor = request.actor # Top-level link if await self.permission_allowed( - actor=request.actor, action="view-instance", default=True + actor=actor, action="view-instance", default=True ): crumbs.append({"href": self.urls.instance(), "label": "home"}) # Database link if database: if await self.permission_allowed( - actor=request.actor, + actor=actor, action="view-database", resource=database, default=True, @@ -662,7 +665,7 @@ class Datasette: if table: assert database, "table= requires database=" if await self.permission_allowed( - actor=request.actor, + actor=actor, action="view-table", resource=(database, table), default=True, diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index c82cafb3..1b4732af 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -125,3 +125,12 @@ async def test_datasette_ensure_permissions_check_visibility( visible, private = await ds.check_visibility(actor, permissions=permissions) assert visible == should_allow assert private == expected_private + + +@pytest.mark.asyncio +async def test_datasette_render_template_no_request(): + # https://github.com/simonw/datasette/issues/1849 + ds = Datasette([], memory=True) + await ds.invoke_startup() + rendered = await ds.render_template("error.html") + assert "Error " in rendered From af5d5d0243631562ad83f2c318bff31a077feb5d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 14:34:33 -0700 Subject: [PATCH 0345/1103] Allow leading comments on SQL queries, refs #1860 --- datasette/utils/__init__.py | 27 +++++++++++++++++++++------ tests/test_utils.py | 7 +++++++ 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 803ba96d..977a66d6 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -205,13 +205,28 @@ class InvalidSql(Exception): pass +# Allow SQL to start with a /* */ or -- comment +comment_re = ( + # Start of string, then any amount of whitespace + r"^(\s*" + + + # Comment that starts with -- and ends at a newline + r"(?:\-\-.*?\n\s*)" + + + # Comment that starts with /* and ends with */ + r"|(?:/\*[\s\S]*?\*/)" + + + # Whitespace + r")*\s*" +) + allowed_sql_res = [ - re.compile(r"^select\b"), - re.compile(r"^explain\s+select\b"), - re.compile(r"^explain\s+query\s+plan\s+select\b"), - re.compile(r"^with\b"), - re.compile(r"^explain\s+with\b"), - re.compile(r"^explain\s+query\s+plan\s+with\b"), + re.compile(comment_re + r"select\b"), + re.compile(comment_re + r"explain\s+select\b"), + re.compile(comment_re + r"explain\s+query\s+plan\s+select\b"), + re.compile(comment_re + r"with\b"), + re.compile(comment_re + r"explain\s+with\b"), + re.compile(comment_re + r"explain\s+query\s+plan\s+with\b"), ] allowed_pragmas = ( "database_list", diff --git a/tests/test_utils.py b/tests/test_utils.py index d71a612d..e89f1e6b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -141,6 +141,7 @@ def test_custom_json_encoder(obj, expected): "update blah set some_column='# Hello there\n\n* This is a list\n* of items\n--\n[And a link](https://github.com/simonw/datasette-render-markdown).'\nas demo_markdown", "PRAGMA case_sensitive_like = true", "SELECT * FROM pragma_not_on_allow_list('idx52')", + "/* This comment is not valid. select 1", ], ) def test_validate_sql_select_bad(bad_sql): @@ -166,6 +167,12 @@ def test_validate_sql_select_bad(bad_sql): "explain query plan WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;", "SELECT * FROM pragma_index_info('idx52')", "select * from pragma_table_xinfo('table')", + # Various types of comment + "-- comment\nselect 1", + "-- one line\n -- two line\nselect 1", + " /* comment */\nselect 1", + " /* comment */select 1", + "/* comment */\n -- another\n /* one more */ select 1", ], ) def test_validate_sql_select_good(good_sql): From 382a87158337540f991c6dc887080f7b37c7c26e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 14:13:31 -0700 Subject: [PATCH 0346/1103] max_signed_tokens_ttl setting, closes #1858 Also redesigned token format to include creation time and optional duration. --- datasette/app.py | 5 ++++ datasette/default_permissions.py | 33 +++++++++++++++++---- datasette/views/special.py | 20 ++++++++----- docs/settings.rst | 15 ++++++++++ tests/test_api.py | 1 + tests/test_auth.py | 50 ++++++++++++++++++++++++-------- 6 files changed, 99 insertions(+), 25 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 596ff44d..894d7f0f 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -129,6 +129,11 @@ SETTINGS = ( True, "Allow users to create and use signed API tokens", ), + Setting( + "max_signed_tokens_ttl", + 0, + "Maximum allowed expiry time for signed API tokens", + ), Setting("suggest_facets", True, "Calculate and display suggested facets"), Setting( "default_cache_ttl", diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 12499c16..c502dd70 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -56,6 +56,7 @@ def actor_from_request(datasette, request): prefix = "dstok_" if not datasette.setting("allow_signed_tokens"): return None + max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl") authorization = request.headers.get("authorization") if not authorization: return None @@ -69,11 +70,31 @@ def actor_from_request(datasette, request): decoded = datasette.unsign(token, namespace="token") except itsdangerous.BadSignature: return None - expires_at = decoded.get("e") - if expires_at is not None: - if expires_at < time.time(): + if "t" not in decoded: + # Missing timestamp + return None + created = decoded["t"] + if not isinstance(created, int): + # Invalid timestamp + return None + duration = decoded.get("d") + if duration is not None and not isinstance(duration, int): + # Invalid duration + return None + if (duration is None and max_signed_tokens_ttl) or ( + duration is not None + and max_signed_tokens_ttl + and duration > max_signed_tokens_ttl + ): + duration = max_signed_tokens_ttl + if duration: + if time.time() - created > duration: + # Expired return None - return {"id": decoded["a"], "token": "dstok"} + actor = {"id": decoded["a"], "token": "dstok"} + if duration: + actor["token_expires"] = created + duration + return actor @hookimpl @@ -102,9 +123,9 @@ def register_commands(cli): def create_token(id, secret, expires_after, debug): "Create a signed API token for the specified actor ID" ds = Datasette(secret=secret) - bits = {"a": id, "token": "dstok"} + bits = {"a": id, "token": "dstok", "t": int(time.time())} if expires_after: - bits["e"] = int(time.time()) + expires_after + bits["d"] = expires_after token = ds.sign(bits, namespace="token") click.echo("dstok_{}".format(token)) if debug: diff --git a/datasette/views/special.py b/datasette/views/special.py index 89015958..b754a2f0 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -195,20 +195,24 @@ class CreateTokenView(BaseView): async def post(self, request): self.check_permission(request) post = await request.post_vars() - expires = None errors = [] + duration = None if post.get("expire_type"): - duration = post.get("expire_duration") - if not duration or not duration.isdigit() or not int(duration) > 0: + duration_string = post.get("expire_duration") + if ( + not duration_string + or not duration_string.isdigit() + or not int(duration_string) > 0 + ): errors.append("Invalid expire duration") else: unit = post["expire_type"] if unit == "minutes": - expires = int(duration) * 60 + duration = int(duration_string) * 60 elif unit == "hours": - expires = int(duration) * 60 * 60 + duration = int(duration_string) * 60 * 60 elif unit == "days": - expires = int(duration) * 60 * 60 * 24 + duration = int(duration_string) * 60 * 60 * 24 else: errors.append("Invalid expire duration unit") token_bits = None @@ -216,8 +220,10 @@ class CreateTokenView(BaseView): if not errors: token_bits = { "a": request.actor["id"], - "e": (int(time.time()) + expires) if expires else None, + "t": int(time.time()), } + if duration: + token_bits["d"] = duration token = "dstok_{}".format(self.ds.sign(token_bits, "token")) return await self.render( ["create_token.html"], diff --git a/docs/settings.rst b/docs/settings.rst index be640b21..a990c78c 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -182,6 +182,21 @@ This is turned on by default. Use the following to turn it off:: Turning this setting off will disable the ``/-/create-token`` page, :ref:`described here `. It will also cause any incoming ``Authorization: Bearer dstok_...`` API tokens to be ignored. +.. _setting_max_signed_tokens_ttl: + +max_signed_tokens_ttl +~~~~~~~~~~~~~~~~~~~~~ + +Maximum allowed expiry time for signed API tokens created by users. + +Defaults to ``0`` which means no limit - tokens can be created that will never expire. + +Set this to a value in seconds to limit the maximum expiry time. For example, to set that limit to 24 hours you would use:: + + datasette mydatabase.db --setting max_signed_tokens_ttl 86400 + +This setting is enforced when incoming tokens are processed. + .. _setting_default_cache_ttl: default_cache_ttl diff --git a/tests/test_api.py b/tests/test_api.py index f7cbe950..fc171421 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -807,6 +807,7 @@ def test_settings_json(app_client): "sql_time_limit_ms": 200, "allow_download": True, "allow_signed_tokens": True, + "max_signed_tokens_ttl": 0, "allow_facet": True, "suggest_facets": True, "default_cache_ttl": 5, diff --git a/tests/test_auth.py b/tests/test_auth.py index f2d82107..fa1b2e46 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -173,13 +173,19 @@ def test_auth_create_token(app_client, post_data, errors, expected_duration): # Extract token from page token = response2.text.split('value="dstok_')[1].split('"')[0] details = app_client.ds.unsign(token, "token") - assert details.keys() == {"a", "e"} + assert details.keys() == {"a", "t", "d"} or details.keys() == {"a", "t"} assert details["a"] == "test" if expected_duration is None: - assert details["e"] is None + assert "d" not in details else: - about_right = int(time.time()) + expected_duration - assert about_right - 2 < details["e"] < about_right + 2 + assert details["d"] == expected_duration + # And test that token + response3 = app_client.get( + "/-/actor.json", + headers={"Authorization": "Bearer {}".format("dstok_{}".format(token))}, + ) + assert response3.status == 200 + assert response3.json["actor"]["id"] == "test" def test_auth_create_token_not_allowed_for_tokens(app_client): @@ -206,6 +212,7 @@ def test_auth_create_token_not_allowed_if_allow_signed_tokens_off(app_client): ( ("allow_signed_tokens_off", False), ("no_token", False), + ("no_timestamp", False), ("invalid_token", False), ("expired_token", False), ("valid_unlimited_token", True), @@ -214,12 +221,15 @@ def test_auth_create_token_not_allowed_if_allow_signed_tokens_off(app_client): ) def test_auth_with_dstok_token(app_client, scenario, should_work): token = None + _time = int(time.time()) if scenario in ("valid_unlimited_token", "allow_signed_tokens_off"): - token = app_client.ds.sign({"a": "test"}, "token") + token = app_client.ds.sign({"a": "test", "t": _time}, "token") elif scenario == "valid_expiring_token": - token = app_client.ds.sign({"a": "test", "e": int(time.time()) + 1000}, "token") + token = app_client.ds.sign({"a": "test", "t": _time - 50, "d": 1000}, "token") elif scenario == "expired_token": - token = app_client.ds.sign({"a": "test", "e": int(time.time()) - 1000}, "token") + token = app_client.ds.sign({"a": "test", "t": _time - 2000, "d": 1000}, "token") + elif scenario == "no_timestamp": + token = app_client.ds.sign({"a": "test"}, "token") elif scenario == "invalid_token": token = "invalid" if token: @@ -232,7 +242,16 @@ def test_auth_with_dstok_token(app_client, scenario, should_work): response = app_client.get("/-/actor.json", headers=headers) try: if should_work: - assert response.json == {"actor": {"id": "test", "token": "dstok"}} + assert response.json.keys() == {"actor"} + actor = response.json["actor"] + expected_keys = {"id", "token"} + if scenario != "valid_unlimited_token": + expected_keys.add("token_expires") + assert actor.keys() == expected_keys + assert actor["id"] == "test" + assert actor["token"] == "dstok" + if scenario != "valid_unlimited_token": + assert isinstance(actor["token_expires"], int) else: assert response.json == {"actor": None} finally: @@ -251,15 +270,22 @@ def test_cli_create_token(app_client, expires): token = result.output.strip() assert token.startswith("dstok_") details = app_client.ds.unsign(token[len("dstok_") :], "token") - expected_keys = {"a", "token"} + expected_keys = {"a", "token", "t"} if expires: - expected_keys.add("e") + expected_keys.add("d") assert details.keys() == expected_keys assert details["a"] == "test" response = app_client.get( "/-/actor.json", headers={"Authorization": "Bearer {}".format(token)} ) if expires is None or expires > 0: - assert response.json == {"actor": {"id": "test", "token": "dstok"}} + expected_actor = { + "id": "test", + "token": "dstok", + } + if expires and expires > 0: + expected_actor["token_expires"] = details["t"] + expires + assert response.json == {"actor": expected_actor} else: - assert response.json == {"actor": None} + expected_actor = None + assert response.json == {"actor": expected_actor} From 51c436fed29205721dcf17fa31d7e7090d34ebb8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 20:57:02 -0700 Subject: [PATCH 0347/1103] First draft of insert row write API, refs #1851 --- datasette/default_permissions.py | 2 +- datasette/views/table.py | 76 +++++++++++++++++++++++++++----- docs/authentication.rst | 12 +++++ docs/cli-reference.rst | 2 + docs/json_api.rst | 38 ++++++++++++++++ 5 files changed, 119 insertions(+), 11 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index c502dd70..87684e2a 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -9,7 +9,7 @@ import time @hookimpl(tryfirst=True) def permission_allowed(datasette, actor, action, resource): async def inner(): - if action in ("permissions-debug", "debug-menu"): + if action in ("permissions-debug", "debug-menu", "insert-row"): if actor and actor.get("id") == "root": return True elif action == "view-instance": diff --git a/datasette/views/table.py b/datasette/views/table.py index f73b0957..74d1c532 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -28,7 +28,7 @@ from datasette.utils import ( urlsafe_components, value_as_boolean, ) -from datasette.utils.asgi import BadRequest, Forbidden, NotFound +from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response from datasette.filters import Filters from .base import DataView, DatasetteError, ureg from .database import QueryView @@ -103,15 +103,71 @@ class TableView(DataView): canned_query = await self.ds.get_canned_query( database_name, table_name, request.actor ) - assert canned_query, "You may only POST to a canned query" - return await QueryView(self.ds).data( - request, - canned_query["sql"], - metadata=canned_query, - editable=False, - canned_query=table_name, - named_parameters=canned_query.get("params"), - write=bool(canned_query.get("write")), + if canned_query: + return await QueryView(self.ds).data( + request, + canned_query["sql"], + metadata=canned_query, + editable=False, + canned_query=table_name, + named_parameters=canned_query.get("params"), + write=bool(canned_query.get("write")), + ) + else: + # Handle POST to a table + return await self.table_post(request, database_name, table_name) + + async def table_post(self, request, database_name, table_name): + # Table must exist (may handle table creation in the future) + db = self.ds.get_database(database_name) + if not await db.table_exists(table_name): + raise NotFound("Table not found: {}".format(table_name)) + # Must have insert-row permission + if not await self.ds.permission_allowed( + request.actor, "insert-row", resource=(database_name, table_name) + ): + raise Forbidden("Permission denied") + if request.headers.get("content-type") != "application/json": + # TODO: handle form-encoded data + raise BadRequest("Must send JSON data") + data = json.loads(await request.post_body()) + if "row" not in data: + raise BadRequest('Must send "row" data') + row = data["row"] + if not isinstance(row, dict): + raise BadRequest("row must be a dictionary") + # Verify all columns exist + columns = await db.table_columns(table_name) + pks = await db.primary_keys(table_name) + for key in row: + if key not in columns: + raise BadRequest("Column not found: {}".format(key)) + if key in pks: + raise BadRequest( + "Cannot insert into primary key column: {}".format(key) + ) + # Perform the insert + sql = "INSERT INTO [{table}] ({columns}) VALUES ({values})".format( + table=escape_sqlite(table_name), + columns=", ".join(escape_sqlite(c) for c in row), + values=", ".join("?" for c in row), + ) + cursor = await db.execute_write(sql, list(row.values())) + # Return the new row + rowid = cursor.lastrowid + new_row = ( + await db.execute( + "SELECT * FROM [{table}] WHERE rowid = ?".format( + table=escape_sqlite(table_name) + ), + [rowid], + ) + ).first() + return Response.json( + { + "row": dict(new_row), + }, + status=201, ) async def columns_to_select(self, table_columns, pks, request): diff --git a/docs/authentication.rst b/docs/authentication.rst index 0835e17c..233a50d2 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -547,6 +547,18 @@ Actor is allowed to view (and execute) a :ref:`canned query ` pa Default *allow*. +.. _permissions_insert_row: + +insert-row +---------- + +Actor is allowed to insert rows into a table. + +``resource`` - tuple: (string, string) + The name of the database, then the name of the table + +Default *deny*. + .. _permissions_execute_sql: execute-sql diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index b40c6b2c..56156568 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -229,6 +229,8 @@ These can be passed to ``datasette serve`` using ``datasette serve --setting nam database files (default=True) allow_signed_tokens Allow users to create and use signed API tokens (default=True) + max_signed_tokens_ttl Maximum allowed expiry time for signed API tokens + (default=0) suggest_facets Calculate and display suggested facets (default=True) default_cache_ttl Default HTTP cache TTL (used in Cache-Control: diff --git a/docs/json_api.rst b/docs/json_api.rst index d3fdb1e4..b339a738 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -455,3 +455,41 @@ You can find this near the top of the source code of those pages, looking like t The JSON URL is also made available in a ``Link`` HTTP header for the page:: Link: https://latest.datasette.io/fixtures/sortable.json; rel="alternate"; type="application/json+datasette" + +.. _json_api_write: + +The JSON write API +------------------ + +Datasette provides a write API for JSON data. This is a POST-only API that requires an authenticated API token, see :ref:`CreateTokenView`. + +.. _json_api_write_insert_row: + +Inserting a single row +~~~~~~~~~~~~~~~~~~~~~~ + +This requires the :ref:`permissions_insert_row` permission. + +:: + + POST // + Content-Type: application/json + Authorization: Bearer dstok_ + { + "row": { + "column1": "value1", + "column2": "value2" + } + } + +If successful, this will return a ``201`` status code and the newly inserted row, for example: + +.. code-block:: json + + { + "row": { + "id": 1, + "column1": "value1", + "column2": "value2" + } + } From f6ca86987ba9d7d48eccf2cfe0bfc94942003844 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 06:56:11 -0700 Subject: [PATCH 0348/1103] Delete mirror-master-and-main.yml Closes #1865 --- .github/workflows/mirror-master-and-main.yml | 21 -------------------- 1 file changed, 21 deletions(-) delete mode 100644 .github/workflows/mirror-master-and-main.yml diff --git a/.github/workflows/mirror-master-and-main.yml b/.github/workflows/mirror-master-and-main.yml deleted file mode 100644 index 8418df40..00000000 --- a/.github/workflows/mirror-master-and-main.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Mirror "master" and "main" branches -on: - push: - branches: - - master - - main - -jobs: - mirror: - runs-on: ubuntu-latest - steps: - - name: Mirror to "master" - uses: zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 - with: - target-branch: master - force: false - - name: Mirror to "main" - uses: zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 - with: - target-branch: main - force: false From 5f6be3c48b661f74198b8fc85361d3ad6657880e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 11:47:41 -0700 Subject: [PATCH 0349/1103] Better comment handling in SQL regex, refs #1860 --- datasette/utils/__init__.py | 9 +++++---- tests/test_utils.py | 1 + 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 977a66d6..5acfb8b4 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -208,16 +208,16 @@ class InvalidSql(Exception): # Allow SQL to start with a /* */ or -- comment comment_re = ( # Start of string, then any amount of whitespace - r"^(\s*" + r"^\s*(" + # Comment that starts with -- and ends at a newline r"(?:\-\-.*?\n\s*)" + - # Comment that starts with /* and ends with */ - r"|(?:/\*[\s\S]*?\*/)" + # Comment that starts with /* and ends with */ - but does not have */ in it + r"|(?:\/\*((?!\*\/)[\s\S])*\*\/)" + # Whitespace - r")*\s*" + r"\s*)*\s*" ) allowed_sql_res = [ @@ -228,6 +228,7 @@ allowed_sql_res = [ re.compile(comment_re + r"explain\s+with\b"), re.compile(comment_re + r"explain\s+query\s+plan\s+with\b"), ] + allowed_pragmas = ( "database_list", "foreign_key_list", diff --git a/tests/test_utils.py b/tests/test_utils.py index e89f1e6b..c1589107 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -142,6 +142,7 @@ def test_custom_json_encoder(obj, expected): "PRAGMA case_sensitive_like = true", "SELECT * FROM pragma_not_on_allow_list('idx52')", "/* This comment is not valid. select 1", + "/**/\nupdate foo set bar = 1\n/* test */ select 1", ], ) def test_validate_sql_select_bad(bad_sql): From d2ca13b699d441a201c55cb72ff96919d3cd22bf Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 11:50:54 -0700 Subject: [PATCH 0350/1103] Add test for /* multi line */ comment, refs #1860 --- tests/test_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_utils.py b/tests/test_utils.py index c1589107..8b64f865 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -174,6 +174,7 @@ def test_validate_sql_select_bad(bad_sql): " /* comment */\nselect 1", " /* comment */select 1", "/* comment */\n -- another\n /* one more */ select 1", + "/* This comment \n has multiple lines */\nselect 1", ], ) def test_validate_sql_select_good(good_sql): From 918f3561208ee58c44773d30e21bace7d7c7cf3b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 06:56:11 -0700 Subject: [PATCH 0351/1103] Delete mirror-master-and-main.yml Closes #1865 --- .github/workflows/mirror-master-and-main.yml | 21 -------------------- 1 file changed, 21 deletions(-) delete mode 100644 .github/workflows/mirror-master-and-main.yml diff --git a/.github/workflows/mirror-master-and-main.yml b/.github/workflows/mirror-master-and-main.yml deleted file mode 100644 index 8418df40..00000000 --- a/.github/workflows/mirror-master-and-main.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Mirror "master" and "main" branches -on: - push: - branches: - - master - - main - -jobs: - mirror: - runs-on: ubuntu-latest - steps: - - name: Mirror to "master" - uses: zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 - with: - target-branch: master - force: false - - name: Mirror to "main" - uses: zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 - with: - target-branch: main - force: false From b597bb6b3e7c4b449654bbfa5b01ceff3eb3cb33 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 11:47:41 -0700 Subject: [PATCH 0352/1103] Better comment handling in SQL regex, refs #1860 --- datasette/utils/__init__.py | 9 +++++---- tests/test_utils.py | 1 + 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 977a66d6..5acfb8b4 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -208,16 +208,16 @@ class InvalidSql(Exception): # Allow SQL to start with a /* */ or -- comment comment_re = ( # Start of string, then any amount of whitespace - r"^(\s*" + r"^\s*(" + # Comment that starts with -- and ends at a newline r"(?:\-\-.*?\n\s*)" + - # Comment that starts with /* and ends with */ - r"|(?:/\*[\s\S]*?\*/)" + # Comment that starts with /* and ends with */ - but does not have */ in it + r"|(?:\/\*((?!\*\/)[\s\S])*\*\/)" + # Whitespace - r")*\s*" + r"\s*)*\s*" ) allowed_sql_res = [ @@ -228,6 +228,7 @@ allowed_sql_res = [ re.compile(comment_re + r"explain\s+with\b"), re.compile(comment_re + r"explain\s+query\s+plan\s+with\b"), ] + allowed_pragmas = ( "database_list", "foreign_key_list", diff --git a/tests/test_utils.py b/tests/test_utils.py index e89f1e6b..c1589107 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -142,6 +142,7 @@ def test_custom_json_encoder(obj, expected): "PRAGMA case_sensitive_like = true", "SELECT * FROM pragma_not_on_allow_list('idx52')", "/* This comment is not valid. select 1", + "/**/\nupdate foo set bar = 1\n/* test */ select 1", ], ) def test_validate_sql_select_bad(bad_sql): From 6958e21b5c2012adf5655d2512cb4106490d10f2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 11:50:54 -0700 Subject: [PATCH 0353/1103] Add test for /* multi line */ comment, refs #1860 --- tests/test_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_utils.py b/tests/test_utils.py index c1589107..8b64f865 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -174,6 +174,7 @@ def test_validate_sql_select_bad(bad_sql): " /* comment */\nselect 1", " /* comment */select 1", "/* comment */\n -- another\n /* one more */ select 1", + "/* This comment \n has multiple lines */\nselect 1", ], ) def test_validate_sql_select_good(good_sql): From a51608090b5ee37593078f71d18b33767ef3af79 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 12:06:18 -0700 Subject: [PATCH 0354/1103] Slight tweak to insert row API design, refs #1851 https://github.com/simonw/datasette/issues/1851#issuecomment-1292997608 --- datasette/views/table.py | 10 +++++----- docs/json_api.rst | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 74d1c532..056b7b04 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -131,11 +131,11 @@ class TableView(DataView): # TODO: handle form-encoded data raise BadRequest("Must send JSON data") data = json.loads(await request.post_body()) - if "row" not in data: - raise BadRequest('Must send "row" data') - row = data["row"] + if "insert" not in data: + raise BadRequest('Must send a "insert" key containing a dictionary') + row = data["insert"] if not isinstance(row, dict): - raise BadRequest("row must be a dictionary") + raise BadRequest("insert must be a dictionary") # Verify all columns exist columns = await db.table_columns(table_name) pks = await db.primary_keys(table_name) @@ -165,7 +165,7 @@ class TableView(DataView): ).first() return Response.json( { - "row": dict(new_row), + "inserted_row": dict(new_row), }, status=201, ) diff --git a/docs/json_api.rst b/docs/json_api.rst index b339a738..2ed8a354 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -476,7 +476,7 @@ This requires the :ref:`permissions_insert_row` permission. Content-Type: application/json Authorization: Bearer dstok_ { - "row": { + "insert": { "column1": "value1", "column2": "value2" } @@ -487,7 +487,7 @@ If successful, this will return a ``201`` status code and the newly inserted row .. code-block:: json { - "row": { + "inserted_row": { "id": 1, "column1": "value1", "column2": "value2" From a2a5dff709c6f1676ac30b5e734c2763002562cf Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 12:08:26 -0700 Subject: [PATCH 0355/1103] Missing tests for insert row API, refs #1851 --- tests/test_api_write.py | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 tests/test_api_write.py diff --git a/tests/test_api_write.py b/tests/test_api_write.py new file mode 100644 index 00000000..86c221d0 --- /dev/null +++ b/tests/test_api_write.py @@ -0,0 +1,38 @@ +from datasette.app import Datasette +from datasette.utils import sqlite3 +import pytest +import time + + +@pytest.fixture +def ds_write(tmp_path_factory): + db_directory = tmp_path_factory.mktemp("dbs") + db_path = str(db_directory / "data.db") + db = sqlite3.connect(str(db_path)) + db.execute("vacuum") + db.execute("create table docs (id integer primary key, title text, score float)") + ds = Datasette([db_path]) + yield ds + db.close() + + +@pytest.mark.asyncio +async def test_write_row(ds_write): + token = "dstok_{}".format( + ds_write.sign( + {"a": "root", "token": "dstok", "t": int(time.time())}, namespace="token" + ) + ) + response = await ds_write.client.post( + "/data/docs", + json={"insert": {"title": "Test", "score": 1.0}}, + headers={ + "Authorization": "Bearer {}".format(token), + "Content-Type": "application/json", + }, + ) + expected_row = {"id": 1, "title": "Test", "score": 1.0} + assert response.status_code == 201 + assert response.json()["inserted_row"] == expected_row + rows = (await ds_write.get_database("data").execute("select * from docs")).rows + assert dict(rows[0]) == expected_row From 6e788b49edf4f842c0817f006eb9d865778eea5e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 13:17:18 -0700 Subject: [PATCH 0356/1103] New URL design /db/table/-/insert, refs #1851 --- datasette/app.py | 6 +++- datasette/views/table.py | 69 +++++++++++++++++++++++++++++++++++++++- docs/json_api.rst | 18 ++++++----- tests/test_api_write.py | 6 ++-- 4 files changed, 86 insertions(+), 13 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 894d7f0f..8bc5fe36 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -39,7 +39,7 @@ from .views.special import ( PermissionsDebugView, MessagesDebugView, ) -from .views.table import TableView +from .views.table import TableView, TableInsertView from .views.row import RowView from .renderer import json_renderer from .url_builder import Urls @@ -1262,6 +1262,10 @@ class Datasette: RowView.as_view(self), r"/(?P[^\/\.]+)/(?P
    [^/]+?)/(?P[^/]+?)(\.(?P\w+))?$", ) + add_route( + TableInsertView.as_view(self), + r"/(?P[^\/\.]+)/(?P
    [^\/\.]+)/-/insert$", + ) return [ # Compile any strings to regular expressions ((re.compile(pattern) if isinstance(pattern, str) else pattern), view) diff --git a/datasette/views/table.py b/datasette/views/table.py index 056b7b04..be3d4f93 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -30,7 +30,7 @@ from datasette.utils import ( ) from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response from datasette.filters import Filters -from .base import DataView, DatasetteError, ureg +from .base import BaseView, DataView, DatasetteError, ureg from .database import QueryView LINK_WITH_LABEL = ( @@ -1077,3 +1077,70 @@ async def display_columns_and_rows( } columns = [first_column] + columns return columns, cell_rows + + +class TableInsertView(BaseView): + name = "table-insert" + + def __init__(self, datasette): + self.ds = datasette + + async def post(self, request): + database_route = tilde_decode(request.url_vars["database"]) + try: + db = self.ds.get_database(route=database_route) + except KeyError: + raise NotFound("Database not found: {}".format(database_route)) + database_name = db.name + table_name = tilde_decode(request.url_vars["table"]) + # Table must exist (may handle table creation in the future) + db = self.ds.get_database(database_name) + if not await db.table_exists(table_name): + raise NotFound("Table not found: {}".format(table_name)) + # Must have insert-row permission + if not await self.ds.permission_allowed( + request.actor, "insert-row", resource=(database_name, table_name) + ): + raise Forbidden("Permission denied") + if request.headers.get("content-type") != "application/json": + # TODO: handle form-encoded data + raise BadRequest("Must send JSON data") + data = json.loads(await request.post_body()) + if "row" not in data: + raise BadRequest('Must send a "row" key containing a dictionary') + row = data["row"] + if not isinstance(row, dict): + raise BadRequest("row must be a dictionary") + # Verify all columns exist + columns = await db.table_columns(table_name) + pks = await db.primary_keys(table_name) + for key in row: + if key not in columns: + raise BadRequest("Column not found: {}".format(key)) + if key in pks: + raise BadRequest( + "Cannot insert into primary key column: {}".format(key) + ) + # Perform the insert + sql = "INSERT INTO [{table}] ({columns}) VALUES ({values})".format( + table=escape_sqlite(table_name), + columns=", ".join(escape_sqlite(c) for c in row), + values=", ".join("?" for c in row), + ) + cursor = await db.execute_write(sql, list(row.values())) + # Return the new row + rowid = cursor.lastrowid + new_row = ( + await db.execute( + "SELECT * FROM [{table}] WHERE rowid = ?".format( + table=escape_sqlite(table_name) + ), + [rowid], + ) + ).first() + return Response.json( + { + "inserted": [dict(new_row)], + }, + status=201, + ) diff --git a/docs/json_api.rst b/docs/json_api.rst index 2ed8a354..4a7961f2 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -463,7 +463,7 @@ The JSON write API Datasette provides a write API for JSON data. This is a POST-only API that requires an authenticated API token, see :ref:`CreateTokenView`. -.. _json_api_write_insert_row: +.. _TableInsertView: Inserting a single row ~~~~~~~~~~~~~~~~~~~~~~ @@ -472,11 +472,11 @@ This requires the :ref:`permissions_insert_row` permission. :: - POST //
    + POST //
    /-/insert Content-Type: application/json Authorization: Bearer dstok_ { - "insert": { + "row": { "column1": "value1", "column2": "value2" } @@ -487,9 +487,11 @@ If successful, this will return a ``201`` status code and the newly inserted row .. code-block:: json { - "inserted_row": { - "id": 1, - "column1": "value1", - "column2": "value2" - } + "inserted": [ + { + "id": 1, + "column1": "value1", + "column2": "value2" + } + ] } diff --git a/tests/test_api_write.py b/tests/test_api_write.py index 86c221d0..e8222e43 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -24,8 +24,8 @@ async def test_write_row(ds_write): ) ) response = await ds_write.client.post( - "/data/docs", - json={"insert": {"title": "Test", "score": 1.0}}, + "/data/docs/-/insert", + json={"row": {"title": "Test", "score": 1.0}}, headers={ "Authorization": "Bearer {}".format(token), "Content-Type": "application/json", @@ -33,6 +33,6 @@ async def test_write_row(ds_write): ) expected_row = {"id": 1, "title": "Test", "score": 1.0} assert response.status_code == 201 - assert response.json()["inserted_row"] == expected_row + assert response.json()["inserted"] == [expected_row] rows = (await ds_write.get_database("data").execute("select * from docs")).rows assert dict(rows[0]) == expected_row From b912d92b651c4f0b5137da924d135654511f0fe0 Mon Sep 17 00:00:00 2001 From: Forest Gregg Date: Thu, 27 Oct 2022 16:51:20 -0400 Subject: [PATCH 0357/1103] Make hash and size a lazy property (#1837) * use inspect data for hash and file size * make hash and cached_size lazy properties * move hash property near size --- datasette/database.py | 36 ++++++++++++++++++++++++------------ 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index d75bd70c..af1df0a8 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -39,7 +39,7 @@ class Database: self.memory_name = memory_name if memory_name is not None: self.is_memory = True - self.hash = None + self.cached_hash = None self.cached_size = None self._cached_table_counts = None self._write_thread = None @@ -47,14 +47,6 @@ class Database: # These are used when in non-threaded mode: self._read_connection = None self._write_connection = None - if not self.is_mutable and not self.is_memory: - if self.ds.inspect_data and self.ds.inspect_data.get(self.name): - self.hash = self.ds.inspect_data[self.name]["hash"] - self.cached_size = self.ds.inspect_data[self.name]["size"] - else: - p = Path(path) - self.hash = inspect_hash(p) - self.cached_size = p.stat().st_size @property def cached_table_counts(self): @@ -266,14 +258,34 @@ class Database: results = await self.execute_fn(sql_operation_in_thread) return results + @property + def hash(self): + if self.cached_hash is not None: + return self.cached_hash + elif self.is_mutable or self.is_memory: + return None + elif self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self.cached_hash = self.ds.inspect_data[self.name]["hash"] + return self.cached_hash + else: + p = Path(self.path) + self.cached_hash = inspect_hash(p) + return self.cached_hash + @property def size(self): - if self.is_memory: - return 0 if self.cached_size is not None: return self.cached_size - else: + elif self.is_memory: + return 0 + elif self.is_mutable: return Path(self.path).stat().st_size + elif self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self.cached_size = self.ds.inspect_data[self.name]["size"] + return self.cached_size + else: + self.cached_size = Path(self.path).stat().st_size + return self.cached_size async def table_counts(self, limit=10): if not self.is_mutable and self.cached_table_counts is not None: From 2c36e45447494cd7505440943367e29ec57c8e72 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 27 Oct 2022 13:51:45 -0700 Subject: [PATCH 0358/1103] Bump black from 22.8.0 to 22.10.0 (#1839) Bumps [black](https://github.com/psf/black) from 22.8.0 to 22.10.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/22.8.0...22.10.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index fe258adb..625557ae 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ setup( "pytest-xdist>=2.2.1", "pytest-asyncio>=0.17", "beautifulsoup4>=4.8.1", - "black==22.8.0", + "black==22.10.0", "blacken-docs==1.12.1", "pytest-timeout>=1.4.2", "trustme>=0.7", From e5e0459a0b60608cb5e9ff83f6b41f59e6cafdfd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 13:58:00 -0700 Subject: [PATCH 0359/1103] Release notes for 0.63, refs #1869 --- docs/changelog.rst | 44 +++++++++++++++++++++++++------------------- 1 file changed, 25 insertions(+), 19 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2255dcce..01957e4f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,36 +4,42 @@ Changelog ========= -.. _v0_63a1: +.. _v0_63: -0.63a1 (2022-10-23) -------------------- +0.63 (2022-10-27) +----------------- +Features +~~~~~~~~ + +- Now tested against Python 3.11. Docker containers used by ``datasette publish`` and ``datasette package`` both now use that version of Python. (:issue:`1853`) +- ``--load-extension`` option now supports entrypoints. Thanks, Alex Garcia. (`#1789 `__) +- Facet size can now be set per-table with the new ``facet_size`` table metadata option. (:issue:`1804`) +- The :ref:`setting_truncate_cells_html` setting now also affects long URLs in columns. (:issue:`1805`) +- The non-JavaScript SQL editor textarea now increases height to fit the SQL query. (:issue:`1786`) +- Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) +- The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) +- SQL queries can now include leading SQL comments, using ``/* ... */`` or ``-- ...`` syntax. Thanks, Charles Nepote. (:issue:`1860`) - SQL query is now re-displayed when terminated with a time limit error. (:issue:`1819`) -- New documentation on :ref:`deploying_openrc` - thanks, Adam Simpson. (`#1825 `__) - The :ref:`inspect data ` mechanism is now used to speed up server startup - thanks, Forest Gregg. (:issue:`1834`) - In :ref:`config_dir` databases with filenames ending in ``.sqlite`` or ``.sqlite3`` are now automatically added to the Datasette instance. (:issue:`1646`) - Breadcrumb navigation display now respects the current user's permissions. (:issue:`1831`) -- Screenshots in the documentation are now maintained using `shot-scraper `__, as described in `Automating screenshots for the Datasette documentation using shot-scraper `__. (:issue:`1844`) -- The :ref:`datasette.check_visibility() ` method now accepts an optional ``permissions=`` list, allowing it to take multiple permissions into account at once when deciding if something should be shown as public or private. This has been used to correctly display padlock icons in more places in the Datasette interface. (:issue:`1829`) - -.. _v0_63a0: - -0.63a0 (2022-09-26) -------------------- +Plugin hooks and internals +~~~~~~~~~~~~~~~~~~~~~~~~~~ - The :ref:`plugin_hook_prepare_jinja2_environment` plugin hook now accepts an optional ``datasette`` argument. Hook implementations can also now return an ``async`` function which will be awaited automatically. (:issue:`1809`) -- ``--load-extension`` option now supports entrypoints. Thanks, Alex Garcia. (`#1789 `__) -- New tutorial: `Cleaning data with sqlite-utils and Datasette `__. -- Facet size can now be set per-table with the new ``facet_size`` table metadata option. (:issue:`1804`) -- ``truncate_cells_html`` setting now also affects long URLs in columns. (:issue:`1805`) - ``Database(is_mutable=)`` now defaults to ``True``. (:issue:`1808`) -- Non-JavaScript textarea now increases height to fit the SQL query. (:issue:`1786`) -- More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) +- The :ref:`datasette.check_visibility() ` method now accepts an optional ``permissions=`` list, allowing it to take multiple permissions into account at once when deciding if something should be shown as public or private. This has been used to correctly display padlock icons in more places in the Datasette interface. (:issue:`1829`) - Datasette no longer enforces upper bounds on its dependencies. (:issue:`1800`) -- Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) -- The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) + +Documentation +~~~~~~~~~~~~~ + +- New tutorial: `Cleaning data with sqlite-utils and Datasette `__. +- Screenshots in the documentation are now maintained using `shot-scraper `__, as described in `Automating screenshots for the Datasette documentation using shot-scraper `__. (:issue:`1844`) +- More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) +- New documentation on :ref:`deploying_openrc` - thanks, Adam Simpson. (`#1825 `__) .. _v0_62: From bf00b0b59b6692bdec597ac9db4e0b497c5a47b4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 15:11:26 -0700 Subject: [PATCH 0360/1103] Release 0.63 Refs #1646, #1786, #1787, #1789, #1794, #1800, #1804, #1805, #1808, #1809, #1816, #1819, #1825, #1829, #1831, #1834, #1844, #1853, #1860 Closes #1869 --- datasette/version.py | 2 +- docs/changelog.rst | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index eb36da45..ac012640 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.63a1" +__version__ = "0.63" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 01957e4f..f573afb3 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,6 +9,8 @@ Changelog 0.63 (2022-10-27) ----------------- +See `Datasette 0.63: The annotated release notes `__ for more background on the changes in this release. + Features ~~~~~~~~ From 2ea60e12d90b7cec03ebab728854d3ec4d553f54 Mon Sep 17 00:00:00 2001 From: Forest Gregg Date: Thu, 27 Oct 2022 16:51:20 -0400 Subject: [PATCH 0361/1103] Make hash and size a lazy property (#1837) * use inspect data for hash and file size * make hash and cached_size lazy properties * move hash property near size --- datasette/database.py | 36 ++++++++++++++++++++++++------------ 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index d75bd70c..af1df0a8 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -39,7 +39,7 @@ class Database: self.memory_name = memory_name if memory_name is not None: self.is_memory = True - self.hash = None + self.cached_hash = None self.cached_size = None self._cached_table_counts = None self._write_thread = None @@ -47,14 +47,6 @@ class Database: # These are used when in non-threaded mode: self._read_connection = None self._write_connection = None - if not self.is_mutable and not self.is_memory: - if self.ds.inspect_data and self.ds.inspect_data.get(self.name): - self.hash = self.ds.inspect_data[self.name]["hash"] - self.cached_size = self.ds.inspect_data[self.name]["size"] - else: - p = Path(path) - self.hash = inspect_hash(p) - self.cached_size = p.stat().st_size @property def cached_table_counts(self): @@ -266,14 +258,34 @@ class Database: results = await self.execute_fn(sql_operation_in_thread) return results + @property + def hash(self): + if self.cached_hash is not None: + return self.cached_hash + elif self.is_mutable or self.is_memory: + return None + elif self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self.cached_hash = self.ds.inspect_data[self.name]["hash"] + return self.cached_hash + else: + p = Path(self.path) + self.cached_hash = inspect_hash(p) + return self.cached_hash + @property def size(self): - if self.is_memory: - return 0 if self.cached_size is not None: return self.cached_size - else: + elif self.is_memory: + return 0 + elif self.is_mutable: return Path(self.path).stat().st_size + elif self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self.cached_size = self.ds.inspect_data[self.name]["size"] + return self.cached_size + else: + self.cached_size = Path(self.path).stat().st_size + return self.cached_size async def table_counts(self, limit=10): if not self.is_mutable and self.cached_table_counts is not None: From 641bc4453b5ef1dff0b2fc7dfad0b692be7aa61c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 27 Oct 2022 13:51:45 -0700 Subject: [PATCH 0362/1103] Bump black from 22.8.0 to 22.10.0 (#1839) Bumps [black](https://github.com/psf/black) from 22.8.0 to 22.10.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/22.8.0...22.10.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index fe258adb..625557ae 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ setup( "pytest-xdist>=2.2.1", "pytest-asyncio>=0.17", "beautifulsoup4>=4.8.1", - "black==22.8.0", + "black==22.10.0", "blacken-docs==1.12.1", "pytest-timeout>=1.4.2", "trustme>=0.7", From 26af9b9c4a6c62ee15870caa1c7bc455165d3b11 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 13:58:00 -0700 Subject: [PATCH 0363/1103] Release notes for 0.63, refs #1869 --- docs/changelog.rst | 44 +++++++++++++++++++++++++------------------- 1 file changed, 25 insertions(+), 19 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2255dcce..01957e4f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,36 +4,42 @@ Changelog ========= -.. _v0_63a1: +.. _v0_63: -0.63a1 (2022-10-23) -------------------- +0.63 (2022-10-27) +----------------- +Features +~~~~~~~~ + +- Now tested against Python 3.11. Docker containers used by ``datasette publish`` and ``datasette package`` both now use that version of Python. (:issue:`1853`) +- ``--load-extension`` option now supports entrypoints. Thanks, Alex Garcia. (`#1789 `__) +- Facet size can now be set per-table with the new ``facet_size`` table metadata option. (:issue:`1804`) +- The :ref:`setting_truncate_cells_html` setting now also affects long URLs in columns. (:issue:`1805`) +- The non-JavaScript SQL editor textarea now increases height to fit the SQL query. (:issue:`1786`) +- Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) +- The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) +- SQL queries can now include leading SQL comments, using ``/* ... */`` or ``-- ...`` syntax. Thanks, Charles Nepote. (:issue:`1860`) - SQL query is now re-displayed when terminated with a time limit error. (:issue:`1819`) -- New documentation on :ref:`deploying_openrc` - thanks, Adam Simpson. (`#1825 `__) - The :ref:`inspect data ` mechanism is now used to speed up server startup - thanks, Forest Gregg. (:issue:`1834`) - In :ref:`config_dir` databases with filenames ending in ``.sqlite`` or ``.sqlite3`` are now automatically added to the Datasette instance. (:issue:`1646`) - Breadcrumb navigation display now respects the current user's permissions. (:issue:`1831`) -- Screenshots in the documentation are now maintained using `shot-scraper `__, as described in `Automating screenshots for the Datasette documentation using shot-scraper `__. (:issue:`1844`) -- The :ref:`datasette.check_visibility() ` method now accepts an optional ``permissions=`` list, allowing it to take multiple permissions into account at once when deciding if something should be shown as public or private. This has been used to correctly display padlock icons in more places in the Datasette interface. (:issue:`1829`) - -.. _v0_63a0: - -0.63a0 (2022-09-26) -------------------- +Plugin hooks and internals +~~~~~~~~~~~~~~~~~~~~~~~~~~ - The :ref:`plugin_hook_prepare_jinja2_environment` plugin hook now accepts an optional ``datasette`` argument. Hook implementations can also now return an ``async`` function which will be awaited automatically. (:issue:`1809`) -- ``--load-extension`` option now supports entrypoints. Thanks, Alex Garcia. (`#1789 `__) -- New tutorial: `Cleaning data with sqlite-utils and Datasette `__. -- Facet size can now be set per-table with the new ``facet_size`` table metadata option. (:issue:`1804`) -- ``truncate_cells_html`` setting now also affects long URLs in columns. (:issue:`1805`) - ``Database(is_mutable=)`` now defaults to ``True``. (:issue:`1808`) -- Non-JavaScript textarea now increases height to fit the SQL query. (:issue:`1786`) -- More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) +- The :ref:`datasette.check_visibility() ` method now accepts an optional ``permissions=`` list, allowing it to take multiple permissions into account at once when deciding if something should be shown as public or private. This has been used to correctly display padlock icons in more places in the Datasette interface. (:issue:`1829`) - Datasette no longer enforces upper bounds on its dependencies. (:issue:`1800`) -- Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) -- The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) + +Documentation +~~~~~~~~~~~~~ + +- New tutorial: `Cleaning data with sqlite-utils and Datasette `__. +- Screenshots in the documentation are now maintained using `shot-scraper `__, as described in `Automating screenshots for the Datasette documentation using shot-scraper `__. (:issue:`1844`) +- More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) +- New documentation on :ref:`deploying_openrc` - thanks, Adam Simpson. (`#1825 `__) .. _v0_62: From 61171f01549549e5fb25c72b13280d941d96dbf1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 15:11:26 -0700 Subject: [PATCH 0364/1103] Release 0.63 Refs #1646, #1786, #1787, #1789, #1794, #1800, #1804, #1805, #1808, #1809, #1816, #1819, #1825, #1829, #1831, #1834, #1844, #1853, #1860 Closes #1869 --- datasette/version.py | 2 +- docs/changelog.rst | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index eb36da45..ac012640 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.63a1" +__version__ = "0.63" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 01957e4f..f573afb3 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,6 +9,8 @@ Changelog 0.63 (2022-10-27) ----------------- +See `Datasette 0.63: The annotated release notes `__ for more background on the changes in this release. + Features ~~~~~~~~ From c9b5f5d598e7f85cd3e1ce020351a27da334408b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 17:58:36 -0700 Subject: [PATCH 0365/1103] Depend on sqlite-utils>=3.30 Decided to use the most recent version in case I decide later to use the flatten() utility function. Refs #1850 --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 625557ae..99e2a4ad 100644 --- a/setup.py +++ b/setup.py @@ -57,6 +57,7 @@ setup( "PyYAML>=5.3", "mergedeep>=1.1.1", "itsdangerous>=1.1", + "sqlite-utils>=3.30", ], entry_points=""" [console_scripts] From c35859ae3df163406f1a1895ccf9803e933b2d8e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 29 Oct 2022 23:03:45 -0700 Subject: [PATCH 0366/1103] API for bulk inserts, closes #1866 --- datasette/app.py | 5 ++ datasette/views/table.py | 136 +++++++++++++++++++++---------- docs/cli-reference.rst | 2 + docs/json_api.rst | 48 ++++++++++- docs/settings.rst | 11 +++ tests/test_api.py | 1 + tests/test_api_write.py | 168 +++++++++++++++++++++++++++++++++++++-- 7 files changed, 320 insertions(+), 51 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 8bc5fe36..f80d3792 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -99,6 +99,11 @@ SETTINGS = ( 1000, "Maximum rows that can be returned from a table or custom query", ), + Setting( + "max_insert_rows", + 100, + "Maximum rows that can be inserted at a time using the bulk insert API", + ), Setting( "num_sql_threads", 3, diff --git a/datasette/views/table.py b/datasette/views/table.py index be3d4f93..fd203036 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -30,6 +30,7 @@ from datasette.utils import ( ) from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response from datasette.filters import Filters +import sqlite_utils from .base import BaseView, DataView, DatasetteError, ureg from .database import QueryView @@ -1085,62 +1086,109 @@ class TableInsertView(BaseView): def __init__(self, datasette): self.ds = datasette + async def _validate_data(self, request, db, table_name): + errors = [] + + def _errors(errors): + return None, errors, {} + + if request.headers.get("content-type") != "application/json": + # TODO: handle form-encoded data + return _errors(["Invalid content-type, must be application/json"]) + body = await request.post_body() + try: + data = json.loads(body) + except json.JSONDecodeError as e: + return _errors(["Invalid JSON: {}".format(e)]) + if not isinstance(data, dict): + return _errors(["JSON must be a dictionary"]) + keys = data.keys() + # keys must contain "row" or "rows" + if "row" not in keys and "rows" not in keys: + return _errors(['JSON must have one or other of "row" or "rows"']) + rows = [] + if "row" in keys: + if "rows" in keys: + return _errors(['Cannot use "row" and "rows" at the same time']) + row = data["row"] + if not isinstance(row, dict): + return _errors(['"row" must be a dictionary']) + rows = [row] + data["return_rows"] = True + else: + rows = data["rows"] + if not isinstance(rows, list): + return _errors(['"rows" must be a list']) + for row in rows: + if not isinstance(row, dict): + return _errors(['"rows" must be a list of dictionaries']) + # Does this exceed max_insert_rows? + max_insert_rows = self.ds.setting("max_insert_rows") + if len(rows) > max_insert_rows: + return _errors( + ["Too many rows, maximum allowed is {}".format(max_insert_rows)] + ) + # Validate columns of each row + columns = await db.table_columns(table_name) + # TODO: There are cases where pks are OK, if not using auto-incrementing pk + pks = await db.primary_keys(table_name) + allowed_columns = set(columns) - set(pks) + for i, row in enumerate(rows): + invalid_columns = set(row.keys()) - allowed_columns + if invalid_columns: + errors.append( + "Row {} has invalid columns: {}".format( + i, ", ".join(sorted(invalid_columns)) + ) + ) + if errors: + return _errors(errors) + extra = {key: data[key] for key in data if key not in ("rows", "row")} + return rows, errors, extra + async def post(self, request): + def _error(messages, status=400): + return Response.json({"ok": False, "errors": messages}, status=status) + database_route = tilde_decode(request.url_vars["database"]) try: db = self.ds.get_database(route=database_route) except KeyError: - raise NotFound("Database not found: {}".format(database_route)) + return _error(["Database not found: {}".format(database_route)], 404) database_name = db.name table_name = tilde_decode(request.url_vars["table"]) + # Table must exist (may handle table creation in the future) db = self.ds.get_database(database_name) if not await db.table_exists(table_name): - raise NotFound("Table not found: {}".format(table_name)) + return _error(["Table not found: {}".format(table_name)], 404) # Must have insert-row permission if not await self.ds.permission_allowed( request.actor, "insert-row", resource=(database_name, table_name) ): - raise Forbidden("Permission denied") - if request.headers.get("content-type") != "application/json": - # TODO: handle form-encoded data - raise BadRequest("Must send JSON data") - data = json.loads(await request.post_body()) - if "row" not in data: - raise BadRequest('Must send a "row" key containing a dictionary') - row = data["row"] - if not isinstance(row, dict): - raise BadRequest("row must be a dictionary") - # Verify all columns exist - columns = await db.table_columns(table_name) - pks = await db.primary_keys(table_name) - for key in row: - if key not in columns: - raise BadRequest("Column not found: {}".format(key)) - if key in pks: - raise BadRequest( - "Cannot insert into primary key column: {}".format(key) + return _error(["Permission denied"], 403) + rows, errors, extra = await self._validate_data(request, db, table_name) + if errors: + return _error(errors, 400) + + should_return = bool(extra.get("return_rows", False)) + # Insert rows + def insert_rows(conn): + table = sqlite_utils.Database(conn)[table_name] + if should_return: + rowids = [] + for row in rows: + rowids.append(table.insert(row).last_rowid) + return list( + table.rows_where( + "rowid in ({})".format(",".join("?" for _ in rowids)), rowids + ) ) - # Perform the insert - sql = "INSERT INTO [{table}] ({columns}) VALUES ({values})".format( - table=escape_sqlite(table_name), - columns=", ".join(escape_sqlite(c) for c in row), - values=", ".join("?" for c in row), - ) - cursor = await db.execute_write(sql, list(row.values())) - # Return the new row - rowid = cursor.lastrowid - new_row = ( - await db.execute( - "SELECT * FROM [{table}] WHERE rowid = ?".format( - table=escape_sqlite(table_name) - ), - [rowid], - ) - ).first() - return Response.json( - { - "inserted": [dict(new_row)], - }, - status=201, - ) + else: + table.insert_all(rows) + + rows = await db.execute_write_fn(insert_rows) + result = {"ok": True} + if should_return: + result["inserted"] = rows + return Response.json(result, status=201) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 56156568..649a3dcd 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -213,6 +213,8 @@ These can be passed to ``datasette serve`` using ``datasette serve --setting nam (default=100) max_returned_rows Maximum rows that can be returned from a table or custom query (default=1000) + max_insert_rows Maximum rows that can be inserted at a time using + the bulk insert API (default=1000) num_sql_threads Number of threads in the thread pool for executing SQLite queries (default=3) sql_time_limit_ms Time limit for a SQL query in milliseconds diff --git a/docs/json_api.rst b/docs/json_api.rst index 4a7961f2..01558c23 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -465,11 +465,13 @@ Datasette provides a write API for JSON data. This is a POST-only API that requi .. _TableInsertView: -Inserting a single row -~~~~~~~~~~~~~~~~~~~~~~ +Inserting rows +~~~~~~~~~~~~~~ This requires the :ref:`permissions_insert_row` permission. +A single row can be inserted using the ``"row"`` key: + :: POST //
    /-/insert @@ -495,3 +497,45 @@ If successful, this will return a ``201`` status code and the newly inserted row } ] } + +To insert multiple rows at a time, use the same API method but send a list of dictionaries as the ``"rows"`` key: + +:: + + POST //
    /-/insert + Content-Type: application/json + Authorization: Bearer dstok_ + { + "rows": [ + { + "column1": "value1", + "column2": "value2" + }, + { + "column1": "value3", + "column2": "value4" + } + ] + } + +If successful, this will return a ``201`` status code and an empty ``{}`` response body. + +To return the newly inserted rows, add the ``"return_rows": true`` key to the request body: + +.. code-block:: json + + { + "rows": [ + { + "column1": "value1", + "column2": "value2" + }, + { + "column1": "value3", + "column2": "value4" + } + ], + "return_rows": true + } + +This will return the same ``"inserted"`` key as the single row example above. There is a small performance penalty for using this option. diff --git a/docs/settings.rst b/docs/settings.rst index a990c78c..b86b18bd 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -96,6 +96,17 @@ You can increase or decrease this limit like so:: datasette mydatabase.db --setting max_returned_rows 2000 +.. _setting_max_insert_rows: + +max_insert_rows +~~~~~~~~~~~~~~~ + +Maximum rows that can be inserted at a time using the bulk insert API, see :ref:`TableInsertView`. Defaults to 100. + +You can increase or decrease this limit like so:: + + datasette mydatabase.db --setting max_insert_rows 1000 + .. _setting_num_sql_threads: num_sql_threads diff --git a/tests/test_api.py b/tests/test_api.py index fc171421..ebd675b9 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -804,6 +804,7 @@ def test_settings_json(app_client): "facet_suggest_time_limit_ms": 50, "facet_time_limit_ms": 200, "max_returned_rows": 100, + "max_insert_rows": 100, "sql_time_limit_ms": 200, "allow_download": True, "allow_signed_tokens": True, diff --git a/tests/test_api_write.py b/tests/test_api_write.py index e8222e43..4a5a58aa 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -18,11 +18,7 @@ def ds_write(tmp_path_factory): @pytest.mark.asyncio async def test_write_row(ds_write): - token = "dstok_{}".format( - ds_write.sign( - {"a": "root", "token": "dstok", "t": int(time.time())}, namespace="token" - ) - ) + token = write_token(ds_write) response = await ds_write.client.post( "/data/docs/-/insert", json={"row": {"title": "Test", "score": 1.0}}, @@ -36,3 +32,165 @@ async def test_write_row(ds_write): assert response.json()["inserted"] == [expected_row] rows = (await ds_write.get_database("data").execute("select * from docs")).rows assert dict(rows[0]) == expected_row + + +@pytest.mark.asyncio +@pytest.mark.parametrize("return_rows", (True, False)) +async def test_write_rows(ds_write, return_rows): + token = write_token(ds_write) + data = {"rows": [{"title": "Test {}".format(i), "score": 1.0} for i in range(20)]} + if return_rows: + data["return_rows"] = True + response = await ds_write.client.post( + "/data/docs/-/insert", + json=data, + headers={ + "Authorization": "Bearer {}".format(token), + "Content-Type": "application/json", + }, + ) + assert response.status_code == 201 + actual_rows = [ + dict(r) + for r in ( + await ds_write.get_database("data").execute("select * from docs") + ).rows + ] + assert len(actual_rows) == 20 + assert actual_rows == [ + {"id": i + 1, "title": "Test {}".format(i), "score": 1.0} for i in range(20) + ] + assert response.json()["ok"] is True + if return_rows: + assert response.json()["inserted"] == actual_rows + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "path,input,special_case,expected_status,expected_errors", + ( + ( + "/data2/docs/-/insert", + {}, + None, + 404, + ["Database not found: data2"], + ), + ( + "/data/docs2/-/insert", + {}, + None, + 404, + ["Table not found: docs2"], + ), + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test"} for i in range(10)]}, + "bad_token", + 403, + ["Permission denied"], + ), + ( + "/data/docs/-/insert", + {}, + "invalid_json", + 400, + [ + "Invalid JSON: Expecting property name enclosed in double quotes: line 1 column 2 (char 1)" + ], + ), + ( + "/data/docs/-/insert", + {}, + "invalid_content_type", + 400, + ["Invalid content-type, must be application/json"], + ), + ( + "/data/docs/-/insert", + [], + None, + 400, + ["JSON must be a dictionary"], + ), + ( + "/data/docs/-/insert", + {"row": "blah"}, + None, + 400, + ['"row" must be a dictionary'], + ), + ( + "/data/docs/-/insert", + {"blah": "blah"}, + None, + 400, + ['JSON must have one or other of "row" or "rows"'], + ), + ( + "/data/docs/-/insert", + {"rows": "blah"}, + None, + 400, + ['"rows" must be a list'], + ), + ( + "/data/docs/-/insert", + {"rows": ["blah"]}, + None, + 400, + ['"rows" must be a list of dictionaries'], + ), + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test"} for i in range(101)]}, + None, + 400, + ["Too many rows, maximum allowed is 100"], + ), + # Validate columns of each row + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test", "bad": 1, "worse": 2} for i in range(2)]}, + None, + 400, + [ + "Row 0 has invalid columns: bad, worse", + "Row 1 has invalid columns: bad, worse", + ], + ), + ), +) +async def test_write_row_errors( + ds_write, path, input, special_case, expected_status, expected_errors +): + token = write_token(ds_write) + if special_case == "bad_token": + token += "bad" + kwargs = dict( + json=input, + headers={ + "Authorization": "Bearer {}".format(token), + "Content-Type": "text/plain" + if special_case == "invalid_content_type" + else "application/json", + }, + ) + if special_case == "invalid_json": + del kwargs["json"] + kwargs["content"] = "{bad json" + response = await ds_write.client.post( + path, + **kwargs, + ) + assert response.status_code == expected_status + assert response.json()["ok"] is False + assert response.json()["errors"] == expected_errors + + +def write_token(ds): + return "dstok_{}".format( + ds.sign( + {"a": "root", "token": "dstok", "t": int(time.time())}, namespace="token" + ) + ) From f6bf2d8045cc239fe34357342bff1440561c8909 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 29 Oct 2022 23:20:11 -0700 Subject: [PATCH 0367/1103] Initial prototype of API explorer at /-/api, refs #1871 --- datasette/app.py | 5 ++ datasette/templates/api_explorer.html | 73 +++++++++++++++++++++++++++ datasette/views/special.py | 8 +++ tests/test_docs.py | 2 +- 4 files changed, 87 insertions(+), 1 deletion(-) create mode 100644 datasette/templates/api_explorer.html diff --git a/datasette/app.py b/datasette/app.py index f80d3792..c3d802a4 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -33,6 +33,7 @@ from .views.special import ( JsonDataView, PatternPortfolioView, AuthTokenView, + ApiExplorerView, CreateTokenView, LogoutView, AllowDebugView, @@ -1235,6 +1236,10 @@ class Datasette: CreateTokenView.as_view(self), r"/-/create-token$", ) + add_route( + ApiExplorerView.as_view(self), + r"/-/api$", + ) add_route( LogoutView.as_view(self), r"/-/logout$", diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html new file mode 100644 index 00000000..034bee60 --- /dev/null +++ b/datasette/templates/api_explorer.html @@ -0,0 +1,73 @@ +{% extends "base.html" %} + +{% block title %}API Explorer{% endblock %} + +{% block content %} + +

    API Explorer

    + +

    Use this tool to try out the Datasette write API.

    + +{% if errors %} + {% for error in errors %} +

    {{ error }}

    + {% endfor %} +{% endif %} + + +
    + + +
    +
    + + +
    +
    + +
    +

    + + + + +{% endblock %} diff --git a/datasette/views/special.py b/datasette/views/special.py index b754a2f0..9922a621 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -235,3 +235,11 @@ class CreateTokenView(BaseView): "token_bits": token_bits, }, ) + + +class ApiExplorerView(BaseView): + name = "api_explorer" + has_json_alternate = False + + async def get(self, request): + return await self.render(["api_explorer.html"], request) diff --git a/tests/test_docs.py b/tests/test_docs.py index cd5a6c13..e9b813fe 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -62,7 +62,7 @@ def documented_views(): if first_word.endswith("View"): view_labels.add(first_word) # We deliberately don't document these: - view_labels.update(("PatternPortfolioView", "AuthTokenView")) + view_labels.update(("PatternPortfolioView", "AuthTokenView", "ApiExplorerView")) return view_labels From 9eb9ffae3ddd4e8ff0b713bf6fd6a0afed3368d7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 30 Oct 2022 13:09:55 -0700 Subject: [PATCH 0368/1103] Drop API token requirement from API explorer, refs #1871 --- datasette/default_permissions.py | 9 +++++++++ datasette/templates/api_explorer.html | 13 ++++--------- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 87684e2a..151ba2b5 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -131,3 +131,12 @@ def register_commands(cli): if debug: click.echo("\nDecoded:\n") click.echo(json.dumps(ds.unsign(token, namespace="token"), indent=2)) + + +@hookimpl +def skip_csrf(scope): + # Skip CSRF check for requests with content-type: application/json + if scope["type"] == "http": + headers = scope.get("headers") or {} + if dict(headers).get(b"content-type") == b"application/json": + return True diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html index 034bee60..01b182d8 100644 --- a/datasette/templates/api_explorer.html +++ b/datasette/templates/api_explorer.html @@ -15,16 +15,13 @@ {% endif %}
    -
    - - -
    - +
    -
    - +
    + +

    @@ -46,7 +43,6 @@ form.addEventListener("submit", (ev) => { var formData = new FormData(form); var json = formData.get('json'); var path = formData.get('path'); - var token = formData.get('token'); // Validate JSON try { var data = JSON.parse(json); @@ -60,7 +56,6 @@ form.addEventListener("submit", (ev) => { body: json, headers: { 'Content-Type': 'application/json', - 'Authorization': `Bearer ${token}` } }).then(r => r.json()).then(r => { alert(JSON.stringify(r, null, 2)); From fedbfcc36873366143195d8fe124e1859bf88346 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 30 Oct 2022 14:49:07 -0700 Subject: [PATCH 0369/1103] Neater display of output and errors in API explorer, refs #1871 --- datasette/templates/api_explorer.html | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html index 01b182d8..38fdb7bc 100644 --- a/datasette/templates/api_explorer.html +++ b/datasette/templates/api_explorer.html @@ -26,6 +26,12 @@

    + + """.format( escape(ex.sql) ) diff --git a/tests/test_api.py b/tests/test_api.py index ad74d16e..4027a7a5 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -662,7 +662,11 @@ def test_sql_time_limit(app_client_shorter_time_limit): "

    SQL query took too long. The time limit is controlled by the\n" 'sql_time_limit_ms\n' "configuration option.

    \n" - "
    select sleep(0.5)
    " + '\n' + "" ), "status": 400, "title": "SQL Interrupted", diff --git a/tests/test_html.py b/tests/test_html.py index 4b394199..7cfe9d90 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -172,7 +172,7 @@ def test_sql_time_limit(app_client_shorter_time_limit): """ sql_time_limit_ms """.strip(), - "
    select sleep(0.5)
    ", + '', ] for expected_html_fragment in expected_html_fragments: assert expected_html_fragment in response.text From 93a02281dad2f23da84210f6ae9c63777ad8af5e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Nov 2022 10:22:26 -0700 Subject: [PATCH 0374/1103] Show interrupted query in resizing textarea, closes #1876 --- datasette/views/base.py | 6 +++++- tests/test_api.py | 6 +++++- tests/test_html.py | 2 +- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index 67aa3a42..6b01fdd2 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -378,7 +378,11 @@ class DataView(BaseView):

    SQL query took too long. The time limit is controlled by the sql_time_limit_ms configuration option.

    -
    {}
    + + """.format( escape(ex.sql) ) diff --git a/tests/test_api.py b/tests/test_api.py index ebd675b9..de0223e2 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -662,7 +662,11 @@ def test_sql_time_limit(app_client_shorter_time_limit): "

    SQL query took too long. The time limit is controlled by the\n" 'sql_time_limit_ms\n' "configuration option.

    \n" - "
    select sleep(0.5)
    " + '\n' + "" ), "status": 400, "title": "SQL Interrupted", diff --git a/tests/test_html.py b/tests/test_html.py index 4b394199..7cfe9d90 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -172,7 +172,7 @@ def test_sql_time_limit(app_client_shorter_time_limit): """ sql_time_limit_ms """.strip(), - "
    select sleep(0.5)
    ", + '', ] for expected_html_fragment in expected_html_fragments: assert expected_html_fragment in response.text From 9bec7c38eb93cde5afb16df9bdd96aea2a5b0459 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Nov 2022 11:07:59 -0700 Subject: [PATCH 0375/1103] ignore and replace options for bulk inserts, refs #1873 Also removed the rule that you cannot include primary keys in the rows you insert. And added validation that catches invalid parameters in the incoming JSON. And renamed "inserted" to "rows" in the returned JSON for return_rows: true --- datasette/views/table.py | 41 ++++++++++++++------ docs/json_api.rst | 4 +- tests/test_api_write.py | 83 ++++++++++++++++++++++++++++++++++++++-- 3 files changed, 111 insertions(+), 17 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 1e3d566e..7692a4e3 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -1107,6 +1107,7 @@ class TableInsertView(BaseView): if not isinstance(data, dict): return _errors(["JSON must be a dictionary"]) keys = data.keys() + # keys must contain "row" or "rows" if "row" not in keys and "rows" not in keys: return _errors(['JSON must have one or other of "row" or "rows"']) @@ -1126,19 +1127,31 @@ class TableInsertView(BaseView): for row in rows: if not isinstance(row, dict): return _errors(['"rows" must be a list of dictionaries']) + # Does this exceed max_insert_rows? max_insert_rows = self.ds.setting("max_insert_rows") if len(rows) > max_insert_rows: return _errors( ["Too many rows, maximum allowed is {}".format(max_insert_rows)] ) + + # Validate other parameters + extras = { + key: value for key, value in data.items() if key not in ("row", "rows") + } + valid_extras = {"return_rows", "ignore", "replace"} + invalid_extras = extras.keys() - valid_extras + if invalid_extras: + return _errors( + ['Invalid parameter: "{}"'.format('", "'.join(sorted(invalid_extras)))] + ) + if extras.get("ignore") and extras.get("replace"): + return _errors(['Cannot use "ignore" and "replace" at the same time']) + # Validate columns of each row - columns = await db.table_columns(table_name) - # TODO: There are cases where pks are OK, if not using auto-incrementing pk - pks = await db.primary_keys(table_name) - allowed_columns = set(columns) - set(pks) + columns = set(await db.table_columns(table_name)) for i, row in enumerate(rows): - invalid_columns = set(row.keys()) - allowed_columns + invalid_columns = set(row.keys()) - columns if invalid_columns: errors.append( "Row {} has invalid columns: {}".format( @@ -1147,8 +1160,7 @@ class TableInsertView(BaseView): ) if errors: return _errors(errors) - extra = {key: data[key] for key in data if key not in ("rows", "row")} - return rows, errors, extra + return rows, errors, extras async def post(self, request): database_route = tilde_decode(request.url_vars["database"]) @@ -1168,18 +1180,23 @@ class TableInsertView(BaseView): request.actor, "insert-row", resource=(database_name, table_name) ): return _error(["Permission denied"], 403) - rows, errors, extra = await self._validate_data(request, db, table_name) + rows, errors, extras = await self._validate_data(request, db, table_name) if errors: return _error(errors, 400) - should_return = bool(extra.get("return_rows", False)) + ignore = extras.get("ignore") + replace = extras.get("replace") + + should_return = bool(extras.get("return_rows", False)) # Insert rows def insert_rows(conn): table = sqlite_utils.Database(conn)[table_name] if should_return: rowids = [] for row in rows: - rowids.append(table.insert(row).last_rowid) + rowids.append( + table.insert(row, ignore=ignore, replace=replace).last_rowid + ) return list( table.rows_where( "rowid in ({})".format(",".join("?" for _ in rowids)), @@ -1187,12 +1204,12 @@ class TableInsertView(BaseView): ) ) else: - table.insert_all(rows) + table.insert_all(rows, ignore=ignore, replace=replace) rows = await db.execute_write_fn(insert_rows) result = {"ok": True} if should_return: - result["inserted"] = rows + result["rows"] = rows return Response.json(result, status=201) diff --git a/docs/json_api.rst b/docs/json_api.rst index da4500ab..34c13211 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -489,7 +489,7 @@ If successful, this will return a ``201`` status code and the newly inserted row .. code-block:: json { - "inserted": [ + "rows": [ { "id": 1, "column1": "value1", @@ -538,7 +538,7 @@ To return the newly inserted rows, add the ``"return_rows": true`` key to the re "return_rows": true } -This will return the same ``"inserted"`` key as the single row example above. There is a small performance penalty for using this option. +This will return the same ``"rows"`` key as the single row example above. There is a small performance penalty for using this option. .. _RowDeleteView: diff --git a/tests/test_api_write.py b/tests/test_api_write.py index 1cfba104..d0b0f324 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -37,7 +37,7 @@ async def test_write_row(ds_write): ) expected_row = {"id": 1, "title": "Test", "score": 1.0} assert response.status_code == 201 - assert response.json()["inserted"] == [expected_row] + assert response.json()["rows"] == [expected_row] rows = (await ds_write.get_database("data").execute("select * from docs")).rows assert dict(rows[0]) == expected_row @@ -70,7 +70,7 @@ async def test_write_rows(ds_write, return_rows): ] assert response.json()["ok"] is True if return_rows: - assert response.json()["inserted"] == actual_rows + assert response.json()["rows"] == actual_rows @pytest.mark.asyncio @@ -156,6 +156,27 @@ async def test_write_rows(ds_write, return_rows): 400, ["Too many rows, maximum allowed is 100"], ), + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test"}], "ignore": True, "replace": True}, + None, + 400, + ['Cannot use "ignore" and "replace" at the same time'], + ), + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test"}], "invalid_param": True}, + None, + 400, + ['Invalid parameter: "invalid_param"'], + ), + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test"}], "one": True, "two": True}, + None, + 400, + ['Invalid parameter: "one", "two"'], + ), # Validate columns of each row ( "/data/docs/-/insert", @@ -196,6 +217,62 @@ async def test_write_row_errors( assert response.json()["errors"] == expected_errors +@pytest.mark.asyncio +@pytest.mark.parametrize( + "ignore,replace,expected_rows", + ( + ( + True, + False, + [ + {"id": 1, "title": "Exists", "score": None}, + ], + ), + ( + False, + True, + [ + {"id": 1, "title": "One", "score": None}, + ], + ), + ), +) +@pytest.mark.parametrize("should_return", (True, False)) +async def test_insert_ignore_replace( + ds_write, ignore, replace, expected_rows, should_return +): + await ds_write.get_database("data").execute_write( + "insert into docs (id, title) values (1, 'Exists')" + ) + token = write_token(ds_write) + data = {"rows": [{"id": 1, "title": "One"}]} + if ignore: + data["ignore"] = True + if replace: + data["replace"] = True + if should_return: + data["return_rows"] = True + response = await ds_write.client.post( + "/data/docs/-/insert", + json=data, + headers={ + "Authorization": "Bearer {}".format(token), + "Content-Type": "application/json", + }, + ) + assert response.status_code == 201 + actual_rows = [ + dict(r) + for r in ( + await ds_write.get_database("data").execute("select * from docs") + ).rows + ] + assert actual_rows == expected_rows + assert response.json()["ok"] is True + if should_return: + assert response.json()["rows"] == expected_rows + + @pytest.mark.asyncio @pytest.mark.parametrize("scenario", ("no_token", "no_perm", "bad_table", "has_perm")) async def test_delete_row(ds_write, scenario): @@ -217,7 +294,7 @@ async def test_delete_row(ds_write, scenario): }, ) assert insert_response.status_code == 201 - pk = insert_response.json()["inserted"][0]["id"] + pk = insert_response.json()["rows"][0]["id"] path = "/data/{}/{}/-/delete".format( "docs" if scenario != "bad_table" else "bad_table", pk From 497290beaf32e6b779f9683ef15f1c5bc142a41a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Nov 2022 12:59:17 -0700 Subject: [PATCH 0376/1103] Handle database errors in /-/insert, refs #1866, #1873 Also improved API explorer to show HTTP status of response, refs #1871 --- datasette/templates/api_explorer.html | 14 +++++++++----- datasette/views/table.py | 5 ++++- tests/test_api_write.py | 11 +++++++++++ 3 files changed, 24 insertions(+), 6 deletions(-) diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html index 38fdb7bc..93bacde3 100644 --- a/datasette/templates/api_explorer.html +++ b/datasette/templates/api_explorer.html @@ -27,7 +27,8 @@ @@ -64,12 +65,15 @@ form.addEventListener("submit", (ev) => { headers: { 'Content-Type': 'application/json', } - }).then(r => r.json()).then(r => { + }).then(r => { + document.getElementById('response-status').textContent = r.status; + return r.json(); + }).then(data => { var errorList = output.querySelector('.errors'); - if (r.errors) { + if (data.errors) { errorList.style.display = 'block'; errorList.innerHTML = ''; - r.errors.forEach(error => { + data.errors.forEach(error => { var li = document.createElement('li'); li.textContent = error; errorList.appendChild(li); @@ -77,7 +81,7 @@ form.addEventListener("submit", (ev) => { } else { errorList.style.display = 'none'; } - output.querySelector('pre').innerText = JSON.stringify(r, null, 2); + output.querySelector('pre').innerText = JSON.stringify(data, null, 2); output.style.display = 'block'; }).catch(err => { alert("Error: " + err); diff --git a/datasette/views/table.py b/datasette/views/table.py index 7692a4e3..61227206 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -1206,7 +1206,10 @@ class TableInsertView(BaseView): else: table.insert_all(rows, ignore=ignore, replace=replace) - rows = await db.execute_write_fn(insert_rows) + try: + rows = await db.execute_write_fn(insert_rows) + except Exception as e: + return _error([str(e)]) result = {"ok": True} if should_return: result["rows"] = rows diff --git a/tests/test_api_write.py b/tests/test_api_write.py index d0b0f324..0b567f48 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -156,6 +156,13 @@ async def test_write_rows(ds_write, return_rows): 400, ["Too many rows, maximum allowed is 100"], ), + ( + "/data/docs/-/insert", + {"rows": [{"id": 1, "title": "Test"}]}, + "duplicate_id", + 400, + ["UNIQUE constraint failed: docs.id"], + ), ( "/data/docs/-/insert", {"rows": [{"title": "Test"}], "ignore": True, "replace": True}, @@ -194,6 +201,10 @@ async def test_write_row_errors( ds_write, path, input, special_case, expected_status, expected_errors ): token = write_token(ds_write) + if special_case == "duplicate_id": + await ds_write.get_database("data").execute_write( + "insert into docs (id) values (1)" + ) if special_case == "bad_token": token += "bad" kwargs = dict( From 0b166befc0096fca30d71e19608a928d59c331a4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Nov 2022 17:31:22 -0700 Subject: [PATCH 0377/1103] API explorer can now do GET, has JSON syntax highlighting Refs #1871 --- .../static/json-format-highlight-1.0.1.js | 43 +++++++++++ datasette/templates/api_explorer.html | 77 +++++++++++++++---- 2 files changed, 103 insertions(+), 17 deletions(-) create mode 100644 datasette/static/json-format-highlight-1.0.1.js diff --git a/datasette/static/json-format-highlight-1.0.1.js b/datasette/static/json-format-highlight-1.0.1.js new file mode 100644 index 00000000..e87c76e1 --- /dev/null +++ b/datasette/static/json-format-highlight-1.0.1.js @@ -0,0 +1,43 @@ +/* +https://github.com/luyilin/json-format-highlight +From https://unpkg.com/json-format-highlight@1.0.1/dist/json-format-highlight.js +MIT Licensed +*/ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : + typeof define === 'function' && define.amd ? define(factory) : + (global.jsonFormatHighlight = factory()); +}(this, (function () { 'use strict'; + +var defaultColors = { + keyColor: 'dimgray', + numberColor: 'lightskyblue', + stringColor: 'lightcoral', + trueColor: 'lightseagreen', + falseColor: '#f66578', + nullColor: 'cornflowerblue' +}; + +function index (json, colorOptions) { + if ( colorOptions === void 0 ) colorOptions = {}; + + if (!json) { return; } + if (typeof json !== 'string') { + json = JSON.stringify(json, null, 2); + } + var colors = Object.assign({}, defaultColors, colorOptions); + json = json.replace(/&/g, '&').replace(//g, '>'); + return json.replace(/("(\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*"(\s*:)?|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+]?\d+)?)/g, function (match) { + var color = colors.numberColor; + if (/^"/.test(match)) { + color = /:$/.test(match) ? colors.keyColor : colors.stringColor; + } else { + color = /true/.test(match) ? colors.trueColor : /false/.test(match) ? colors.falseColor : /null/.test(match) ? colors.nullColor : color; + } + return ("" + match + ""); + }); +} + +return index; + +}))); diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html index 93bacde3..de5337e3 100644 --- a/datasette/templates/api_explorer.html +++ b/datasette/templates/api_explorer.html @@ -2,6 +2,10 @@ {% block title %}API Explorer{% endblock %} +{% block extra_head %} + +{% endblock %} + {% block content %}

    API Explorer

    @@ -14,17 +18,30 @@ {% endfor %} {% endif %} -
    -
    - - -
    -
    - - -
    -

    - +
    + GET +
    +
    + + + +
    + +
    +
    + POST +
    +
    + + +
    +
    + + +
    +

    + +
    {% else %} - {% if not canned_write and not error %} + {% if not canned_query_write and not error %}

    0 results

    {% endif %} {% endif %} diff --git a/datasette/views/database.py b/datasette/views/database.py index 0770a380..658c35e6 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -1,4 +1,3 @@ -from asyncinject import Registry from dataclasses import dataclass, field from typing import Callable from urllib.parse import parse_qsl, urlencode @@ -33,7 +32,7 @@ from datasette.utils import ( from datasette.utils.asgi import AsgiFileDownload, NotFound, Response, Forbidden from datasette.plugins import pm -from .base import BaseView, DatasetteError, DataView, View, _error, stream_csv +from .base import BaseView, DatasetteError, View, _error, stream_csv class DatabaseView(View): @@ -57,7 +56,7 @@ class DatabaseView(View): sql = (request.args.get("sql") or "").strip() if sql: - return await query_view(request, datasette) + return await QueryView()(request, datasette) if format_ not in ("html", "json"): raise NotFound("Invalid format: {}".format(format_)) @@ -65,10 +64,6 @@ class DatabaseView(View): metadata = (datasette.metadata("databases") or {}).get(database, {}) datasette.update_with_inherited_metadata(metadata) - table_counts = await db.table_counts(5) - hidden_table_names = set(await db.hidden_table_names()) - all_foreign_keys = await db.get_all_foreign_keys() - sql_views = [] for view_name in await db.view_names(): view_visible, view_private = await datasette.check_visibility( @@ -196,8 +191,13 @@ class QueryContext: # urls: dict = field( # metadata={"help": "Object containing URL helpers like `database()`"} # ) - canned_write: bool = field( - metadata={"help": "Boolean indicating if this canned query allows writes"} + canned_query_write: bool = field( + metadata={ + "help": "Boolean indicating if this is a canned query that allows writes" + } + ) + metadata: dict = field( + metadata={"help": "Metadata about the database or the canned query"} ) db_is_immutable: bool = field( metadata={"help": "Boolean indicating if this database is immutable"} @@ -232,7 +232,6 @@ class QueryContext: show_hide_hidden: str = field( metadata={"help": "Hidden input field for the _show_sql parameter"} ) - metadata: dict = field(metadata={"help": "Metadata about the query/database"}) database_color: Callable = field( metadata={"help": "Function that returns a color for a given database name"} ) @@ -242,6 +241,12 @@ class QueryContext: alternate_url_json: str = field( metadata={"help": "URL for alternate JSON version of this page"} ) + # TODO: refactor this to somewhere else, probably ds.render_template() + select_templates: list = field( + metadata={ + "help": "List of templates that were considered for rendering this page" + } + ) async def get_tables(datasette, request, db): @@ -320,287 +325,105 @@ async def database_download(request, datasette): ) -async def query_view( - request, - datasette, - # canned_query=None, - # _size=None, - # named_parameters=None, - # write=False, -): - db = await datasette.resolve_database(request) - database = db.name - # Flattened because of ?sql=&name1=value1&name2=value2 feature - params = {key: request.args.get(key) for key in request.args} - sql = None - if "sql" in params: - sql = params.pop("sql") - if "_shape" in params: - params.pop("_shape") +class QueryView(View): + async def post(self, request, datasette): + from datasette.app import TableNotFound - # extras come from original request.args to avoid being flattened - extras = request.args.getlist("_extra") + db = await datasette.resolve_database(request) - # TODO: Behave differently for canned query here: - await datasette.ensure_permissions(request.actor, [("execute-sql", database)]) - - _, private = await datasette.check_visibility( - request.actor, - permissions=[ - ("view-database", database), - "view-instance", - ], - ) - - extra_args = {} - if params.get("_timelimit"): - extra_args["custom_time_limit"] = int(params["_timelimit"]) - - format_ = request.url_vars.get("format") or "html" - query_error = None - try: - validate_sql_select(sql) - results = await datasette.execute( - database, sql, params, truncate=True, **extra_args - ) - columns = results.columns - rows = results.rows - except QueryInterrupted as ex: - raise DatasetteError( - textwrap.dedent( - """ -

    SQL query took too long. The time limit is controlled by the - sql_time_limit_ms - configuration option.

    - - - """.format( - markupsafe.escape(ex.sql) - ) - ).strip(), - title="SQL Interrupted", - status=400, - message_is_html=True, - ) - except sqlite3.DatabaseError as ex: - query_error = str(ex) - results = None - rows = [] - columns = [] - except (sqlite3.OperationalError, InvalidSql) as ex: - raise DatasetteError(str(ex), title="Invalid SQL", status=400) - except sqlite3.OperationalError as ex: - raise DatasetteError(str(ex)) - except DatasetteError: - raise - - # Handle formats from plugins - if format_ == "csv": - - async def fetch_data_for_csv(request, _next=None): - results = await db.execute(sql, params, truncate=True) - data = {"rows": results.rows, "columns": results.columns} - return data, None, None - - return await stream_csv(datasette, fetch_data_for_csv, request, db.name) - elif format_ in datasette.renderers.keys(): - # Dispatch request to the correct output format renderer - # (CSV is not handled here due to streaming) - result = call_with_supported_arguments( - datasette.renderers[format_][0], - datasette=datasette, - columns=columns, - rows=rows, - sql=sql, - query_name=None, - database=database, - table=None, - request=request, - view_name="table", - truncated=results.truncated if results else False, - error=query_error, - # These will be deprecated in Datasette 1.0: - args=request.args, - data={"rows": rows, "columns": columns}, - ) - if asyncio.iscoroutine(result): - result = await result - if result is None: - raise NotFound("No data") - if isinstance(result, dict): - r = Response( - body=result.get("body"), - status=result.get("status_code") or 200, - content_type=result.get("content_type", "text/plain"), - headers=result.get("headers"), + # We must be a canned query + table_found = False + try: + await datasette.resolve_table(request) + table_found = True + except TableNotFound as table_not_found: + canned_query = await datasette.get_canned_query( + table_not_found.database_name, table_not_found.table, request.actor ) - elif isinstance(result, Response): - r = result - # if status_code is not None: - # # Over-ride the status code - # r.status = status_code - else: - assert False, f"{result} should be dict or Response" - elif format_ == "html": - headers = {} - templates = [f"query-{to_css_class(database)}.html", "query.html"] - template = datasette.jinja_env.select_template(templates) - alternate_url_json = datasette.absolute_url( - request, - datasette.urls.path(path_with_format(request=request, format="json")), - ) - data = {} - headers.update( - { - "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( - alternate_url_json - ) - } - ) - metadata = (datasette.metadata("databases") or {}).get(database, {}) - datasette.update_with_inherited_metadata(metadata) + if canned_query is None: + raise + if table_found: + # That should not have happened + raise DatasetteError("Unexpected table found on POST", status=404) - renderers = {} - for key, (_, can_render) in datasette.renderers.items(): - it_can_render = call_with_supported_arguments( - can_render, - datasette=datasette, - columns=data.get("columns") or [], - rows=data.get("rows") or [], - sql=data.get("query", {}).get("sql", None), - query_name=data.get("query_name"), - database=database, - table=data.get("table"), - request=request, - view_name="database", + # If database is immutable, return an error + if not db.is_mutable: + raise Forbidden("Database is immutable") + + # Process the POST + body = await request.post_body() + body = body.decode("utf-8").strip() + if body.startswith("{") and body.endswith("}"): + params = json.loads(body) + # But we want key=value strings + for key, value in params.items(): + params[key] = str(value) + else: + params = dict(parse_qsl(body, keep_blank_values=True)) + # Should we return JSON? + should_return_json = ( + request.headers.get("accept") == "application/json" + or request.args.get("_json") + or params.get("_json") + ) + params_for_query = MagicParameters(params, request, datasette) + ok = None + redirect_url = None + try: + cursor = await db.execute_write(canned_query["sql"], params_for_query) + message = canned_query.get( + "on_success_message" + ) or "Query executed, {} row{} affected".format( + cursor.rowcount, "" if cursor.rowcount == 1 else "s" + ) + message_type = datasette.INFO + redirect_url = canned_query.get("on_success_redirect") + ok = True + except Exception as ex: + message = canned_query.get("on_error_message") or str(ex) + message_type = datasette.ERROR + redirect_url = canned_query.get("on_error_redirect") + ok = False + if should_return_json: + return Response.json( + { + "ok": ok, + "message": message, + "redirect": redirect_url, + } ) - it_can_render = await await_me_maybe(it_can_render) - if it_can_render: - renderers[key] = datasette.urls.path( - path_with_format(request=request, format=key) - ) - - allow_execute_sql = await datasette.permission_allowed( - request.actor, "execute-sql", database - ) - - show_hide_hidden = "" - if metadata.get("hide_sql"): - if bool(params.get("_show_sql")): - show_hide_link = path_with_removed_args(request, {"_show_sql"}) - show_hide_text = "hide" - show_hide_hidden = '' - else: - show_hide_link = path_with_added_args(request, {"_show_sql": 1}) - show_hide_text = "show" else: - if bool(params.get("_hide_sql")): - show_hide_link = path_with_removed_args(request, {"_hide_sql"}) - show_hide_text = "show" - show_hide_hidden = '' - else: - show_hide_link = path_with_added_args(request, {"_hide_sql": 1}) - show_hide_text = "hide" - hide_sql = show_hide_text == "show" + datasette.add_message(request, message, message_type) + return Response.redirect(redirect_url or request.path) - # Extract any :named parameters - named_parameters = await derive_named_parameters( - datasette.get_database(database), sql - ) - named_parameter_values = { - named_parameter: params.get(named_parameter) or "" - for named_parameter in named_parameters - if not named_parameter.startswith("_") - } + async def get(self, request, datasette): + from datasette.app import TableNotFound - # Set to blank string if missing from params - for named_parameter in named_parameters: - if named_parameter not in params and not named_parameter.startswith("_"): - params[named_parameter] = "" - - r = Response.html( - await datasette.render_template( - template, - QueryContext( - database=database, - query={ - "sql": sql, - "params": params, - }, - canned_query=None, - private=private, - canned_write=False, - db_is_immutable=not db.is_mutable, - error=query_error, - hide_sql=hide_sql, - show_hide_link=datasette.urls.path(show_hide_link), - show_hide_text=show_hide_text, - editable=True, # TODO - allow_execute_sql=allow_execute_sql, - tables=await get_tables(datasette, request, db), - named_parameter_values=named_parameter_values, - edit_sql_url="todo", - display_rows=await display_rows( - datasette, database, request, rows, columns - ), - table_columns=await _table_columns(datasette, database) - if allow_execute_sql - else {}, - columns=columns, - renderers=renderers, - url_csv=datasette.urls.path( - path_with_format( - request=request, format="csv", extra_qs={"_size": "max"} - ) - ), - show_hide_hidden=markupsafe.Markup(show_hide_hidden), - metadata=metadata, - database_color=lambda _: "#ff0000", - alternate_url_json=alternate_url_json, - ), - request=request, - view_name="database", - ), - headers=headers, - ) - else: - assert False, "Invalid format: {}".format(format_) - if datasette.cors: - add_cors_headers(r.headers) - return r - - -class QueryView(DataView): - async def data( - self, - request, - sql, - editable=True, - canned_query=None, - metadata=None, - _size=None, - named_parameters=None, - write=False, - default_labels=None, - ): - db = await self.ds.resolve_database(request) + db = await datasette.resolve_database(request) database = db.name - params = {key: request.args.get(key) for key in request.args} - if "sql" in params: - params.pop("sql") - if "_shape" in params: - params.pop("_shape") + + # Are we a canned query? + canned_query = None + canned_query_write = False + if "table" in request.url_vars: + try: + await datasette.resolve_table(request) + except TableNotFound as table_not_found: + # Was this actually a canned query? + canned_query = await datasette.get_canned_query( + table_not_found.database_name, table_not_found.table, request.actor + ) + if canned_query is None: + raise + canned_query_write = bool(canned_query.get("write")) private = False if canned_query: # Respect canned query permissions - visible, private = await self.ds.check_visibility( + visible, private = await datasette.check_visibility( request.actor, permissions=[ - ("view-query", (database, canned_query)), + ("view-query", (database, canned_query["name"])), ("view-database", database), "view-instance", ], @@ -609,18 +432,32 @@ class QueryView(DataView): raise Forbidden("You do not have permission to view this query") else: - await self.ds.ensure_permissions(request.actor, [("execute-sql", database)]) + await datasette.ensure_permissions( + request.actor, [("execute-sql", database)] + ) + + # Flattened because of ?sql=&name1=value1&name2=value2 feature + params = {key: request.args.get(key) for key in request.args} + sql = None + + if canned_query: + sql = canned_query["sql"] + elif "sql" in params: + sql = params.pop("sql") # Extract any :named parameters - named_parameters = named_parameters or await derive_named_parameters( - self.ds.get_database(database), sql - ) + named_parameters = [] + if canned_query and canned_query.get("params"): + named_parameters = canned_query["params"] + if not named_parameters: + named_parameters = await derive_named_parameters( + datasette.get_database(database), sql + ) named_parameter_values = { named_parameter: params.get(named_parameter) or "" for named_parameter in named_parameters if not named_parameter.startswith("_") } - # Set to blank string if missing from params for named_parameter in named_parameters: if named_parameter not in params and not named_parameter.startswith("_"): @@ -629,212 +466,159 @@ class QueryView(DataView): extra_args = {} if params.get("_timelimit"): extra_args["custom_time_limit"] = int(params["_timelimit"]) - if _size: - extra_args["page_size"] = _size - templates = [f"query-{to_css_class(database)}.html", "query.html"] - if canned_query: - templates.insert( - 0, - f"query-{to_css_class(database)}-{to_css_class(canned_query)}.html", - ) + format_ = request.url_vars.get("format") or "html" query_error = None + results = None + rows = [] + columns = [] - # Execute query - as write or as read - if write: - if request.method == "POST": - # If database is immutable, return an error - if not db.is_mutable: - raise Forbidden("Database is immutable") - body = await request.post_body() - body = body.decode("utf-8").strip() - if body.startswith("{") and body.endswith("}"): - params = json.loads(body) - # But we want key=value strings - for key, value in params.items(): - params[key] = str(value) - else: - params = dict(parse_qsl(body, keep_blank_values=True)) - # Should we return JSON? - should_return_json = ( - request.headers.get("accept") == "application/json" - or request.args.get("_json") - or params.get("_json") - ) - if canned_query: - params_for_query = MagicParameters(params, request, self.ds) - else: - params_for_query = params - ok = None - try: - cursor = await self.ds.databases[database].execute_write( - sql, params_for_query - ) - message = metadata.get( - "on_success_message" - ) or "Query executed, {} row{} affected".format( - cursor.rowcount, "" if cursor.rowcount == 1 else "s" - ) - message_type = self.ds.INFO - redirect_url = metadata.get("on_success_redirect") - ok = True - except Exception as e: - message = metadata.get("on_error_message") or str(e) - message_type = self.ds.ERROR - redirect_url = metadata.get("on_error_redirect") - ok = False - if should_return_json: - return Response.json( - { - "ok": ok, - "message": message, - "redirect": redirect_url, - } - ) - else: - self.ds.add_message(request, message, message_type) - return self.redirect(request, redirect_url or request.path) - else: + params_for_query = params - async def extra_template(): - return { - "request": request, - "db_is_immutable": not db.is_mutable, - "path_with_added_args": path_with_added_args, - "path_with_removed_args": path_with_removed_args, - "named_parameter_values": named_parameter_values, - "canned_query": canned_query, - "success_message": request.args.get("_success") or "", - "canned_write": True, - } - - return ( - { - "database": database, - "rows": [], - "truncated": False, - "columns": [], - "query": {"sql": sql, "params": params}, - "private": private, - }, - extra_template, - templates, - ) - else: # Not a write - if canned_query: - params_for_query = MagicParameters(params, request, self.ds) - else: - params_for_query = params + if not canned_query_write: try: - results = await self.ds.execute( + if not canned_query: + # For regular queries we only allow SELECT, plus other rules + validate_sql_select(sql) + else: + # Canned queries can run magic parameters + params_for_query = MagicParameters(params, request, datasette) + results = await datasette.execute( database, sql, params_for_query, truncate=True, **extra_args ) - columns = [r[0] for r in results.description] - except sqlite3.DatabaseError as e: - query_error = e + columns = results.columns + rows = results.rows + except QueryInterrupted as ex: + raise DatasetteError( + textwrap.dedent( + """ +

    SQL query took too long. The time limit is controlled by the + sql_time_limit_ms + configuration option.

    + + + """.format( + markupsafe.escape(ex.sql) + ) + ).strip(), + title="SQL Interrupted", + status=400, + message_is_html=True, + ) + except sqlite3.DatabaseError as ex: + query_error = str(ex) results = None + rows = [] columns = [] + except (sqlite3.OperationalError, InvalidSql) as ex: + raise DatasetteError(str(ex), title="Invalid SQL", status=400) + except sqlite3.OperationalError as ex: + raise DatasetteError(str(ex)) + except DatasetteError: + raise - allow_execute_sql = await self.ds.permission_allowed( - request.actor, "execute-sql", database - ) + # Handle formats from plugins + if format_ == "csv": - async def extra_template(): - display_rows = [] - truncate_cells = self.ds.setting("truncate_cells_html") - for row in results.rows if results else []: - display_row = [] - for column, value in zip(results.columns, row): - display_value = value - # Let the plugins have a go - # pylint: disable=no-member - plugin_display_value = None - for candidate in pm.hook.render_cell( - row=row, - value=value, - column=column, - table=None, - database=database, - datasette=self.ds, - request=request, - ): - candidate = await await_me_maybe(candidate) - if candidate is not None: - plugin_display_value = candidate - break - if plugin_display_value is not None: - display_value = plugin_display_value - else: - if value in ("", None): - display_value = markupsafe.Markup(" ") - elif is_url(str(display_value).strip()): - display_value = markupsafe.Markup( - '{truncated_url}'.format( - url=markupsafe.escape(value.strip()), - truncated_url=markupsafe.escape( - truncate_url(value.strip(), truncate_cells) - ), - ) - ) - elif isinstance(display_value, bytes): - blob_url = path_with_format( - request=request, - format="blob", - extra_qs={ - "_blob_column": column, - "_blob_hash": hashlib.sha256( - display_value - ).hexdigest(), - }, - ) - formatted = format_bytes(len(value)) - display_value = markupsafe.Markup( - '<Binary: {:,} byte{}>'.format( - blob_url, - ' title="{}"'.format(formatted) - if "bytes" not in formatted - else "", - len(value), - "" if len(value) == 1 else "s", - ) - ) - else: - display_value = str(value) - if truncate_cells and len(display_value) > truncate_cells: - display_value = ( - display_value[:truncate_cells] + "\u2026" - ) - display_row.append(display_value) - display_rows.append(display_row) + async def fetch_data_for_csv(request, _next=None): + results = await db.execute(sql, params, truncate=True) + data = {"rows": results.rows, "columns": results.columns} + return data, None, None - # Show 'Edit SQL' button only if: - # - User is allowed to execute SQL - # - SQL is an approved SELECT statement - # - No magic parameters, so no :_ in the SQL string - edit_sql_url = None - is_validated_sql = False - try: - validate_sql_select(sql) - is_validated_sql = True - except InvalidSql: - pass - if allow_execute_sql and is_validated_sql and ":_" not in sql: - edit_sql_url = ( - self.ds.urls.database(database) - + "?" - + urlencode( - { - **{ - "sql": sql, - }, - **named_parameter_values, - } - ) + return await stream_csv(datasette, fetch_data_for_csv, request, db.name) + elif format_ in datasette.renderers.keys(): + # Dispatch request to the correct output format renderer + # (CSV is not handled here due to streaming) + result = call_with_supported_arguments( + datasette.renderers[format_][0], + datasette=datasette, + columns=columns, + rows=rows, + sql=sql, + query_name=canned_query["name"] if canned_query else None, + database=database, + table=None, + request=request, + view_name="table", + truncated=results.truncated if results else False, + error=query_error, + # These will be deprecated in Datasette 1.0: + args=request.args, + data={"rows": rows, "columns": columns}, + ) + if asyncio.iscoroutine(result): + result = await result + if result is None: + raise NotFound("No data") + if isinstance(result, dict): + r = Response( + body=result.get("body"), + status=result.get("status_code") or 200, + content_type=result.get("content_type", "text/plain"), + headers=result.get("headers"), + ) + elif isinstance(result, Response): + r = result + # if status_code is not None: + # # Over-ride the status code + # r.status = status_code + else: + assert False, f"{result} should be dict or Response" + elif format_ == "html": + headers = {} + templates = [f"query-{to_css_class(database)}.html", "query.html"] + if canned_query: + templates.insert( + 0, + f"query-{to_css_class(database)}-{to_css_class(canned_query['name'])}.html", ) + template = datasette.jinja_env.select_template(templates) + alternate_url_json = datasette.absolute_url( + request, + datasette.urls.path(path_with_format(request=request, format="json")), + ) + data = {} + headers.update( + { + "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( + alternate_url_json + ) + } + ) + metadata = (datasette.metadata("databases") or {}).get(database, {}) + datasette.update_with_inherited_metadata(metadata) + + renderers = {} + for key, (_, can_render) in datasette.renderers.items(): + it_can_render = call_with_supported_arguments( + can_render, + datasette=datasette, + columns=data.get("columns") or [], + rows=data.get("rows") or [], + sql=data.get("query", {}).get("sql", None), + query_name=data.get("query_name"), + database=database, + table=data.get("table"), + request=request, + view_name="database", + ) + it_can_render = await await_me_maybe(it_can_render) + if it_can_render: + renderers[key] = datasette.urls.path( + path_with_format(request=request, format=key) + ) + + allow_execute_sql = await datasette.permission_allowed( + request.actor, "execute-sql", database + ) + show_hide_hidden = "" - if metadata.get("hide_sql"): + if canned_query and canned_query.get("hide_sql"): if bool(params.get("_show_sql")): show_hide_link = path_with_removed_args(request, {"_show_sql"}) show_hide_text = "hide" @@ -855,42 +639,86 @@ class QueryView(DataView): show_hide_link = path_with_added_args(request, {"_hide_sql": 1}) show_hide_text = "hide" hide_sql = show_hide_text == "show" - return { - "display_rows": display_rows, - "custom_sql": True, - "named_parameter_values": named_parameter_values, - "editable": editable, - "canned_query": canned_query, - "edit_sql_url": edit_sql_url, - "metadata": metadata, - "settings": self.ds.settings_dict(), - "request": request, - "show_hide_link": self.ds.urls.path(show_hide_link), - "show_hide_text": show_hide_text, - "show_hide_hidden": markupsafe.Markup(show_hide_hidden), - "hide_sql": hide_sql, - "table_columns": await _table_columns(self.ds, database) - if allow_execute_sql - else {}, - } - return ( - { - "ok": not query_error, - "database": database, - "query_name": canned_query, - "rows": results.rows if results else [], - "truncated": results.truncated if results else False, - "columns": columns, - "query": {"sql": sql, "params": params}, - "error": str(query_error) if query_error else None, - "private": private, - "allow_execute_sql": allow_execute_sql, - }, - extra_template, - templates, - 400 if query_error else 200, - ) + # Show 'Edit SQL' button only if: + # - User is allowed to execute SQL + # - SQL is an approved SELECT statement + # - No magic parameters, so no :_ in the SQL string + edit_sql_url = None + is_validated_sql = False + try: + validate_sql_select(sql) + is_validated_sql = True + except InvalidSql: + pass + if allow_execute_sql and is_validated_sql and ":_" not in sql: + edit_sql_url = ( + datasette.urls.database(database) + + "?" + + urlencode( + { + **{ + "sql": sql, + }, + **named_parameter_values, + } + ) + ) + + r = Response.html( + await datasette.render_template( + template, + QueryContext( + database=database, + query={ + "sql": sql, + "params": params, + }, + canned_query=canned_query["name"] if canned_query else None, + private=private, + canned_query_write=canned_query_write, + db_is_immutable=not db.is_mutable, + error=query_error, + hide_sql=hide_sql, + show_hide_link=datasette.urls.path(show_hide_link), + show_hide_text=show_hide_text, + editable=not canned_query, + allow_execute_sql=allow_execute_sql, + tables=await get_tables(datasette, request, db), + named_parameter_values=named_parameter_values, + edit_sql_url=edit_sql_url, + display_rows=await display_rows( + datasette, database, request, rows, columns + ), + table_columns=await _table_columns(datasette, database) + if allow_execute_sql + else {}, + columns=columns, + renderers=renderers, + url_csv=datasette.urls.path( + path_with_format( + request=request, format="csv", extra_qs={"_size": "max"} + ) + ), + show_hide_hidden=markupsafe.Markup(show_hide_hidden), + metadata=canned_query or metadata, + database_color=lambda _: "#ff0000", + alternate_url_json=alternate_url_json, + select_templates=[ + f"{'*' if template_name == template.name else ''}{template_name}" + for template_name in templates + ], + ), + request=request, + view_name="database", + ), + headers=headers, + ) + else: + assert False, "Invalid format: {}".format(format_) + if datasette.cors: + add_cors_headers(r.headers) + return r class MagicParameters(dict): diff --git a/datasette/views/table.py b/datasette/views/table.py index 77acfd95..28264e92 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -9,7 +9,6 @@ import markupsafe from datasette.plugins import pm from datasette.database import QueryInterrupted from datasette import tracer -from datasette.renderer import json_renderer from datasette.utils import ( add_cors_headers, await_me_maybe, @@ -21,7 +20,6 @@ from datasette.utils import ( tilde_encode, escape_sqlite, filters_should_redirect, - format_bytes, is_url, path_from_row_pks, path_with_added_args, @@ -38,7 +36,7 @@ from datasette.utils import ( from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response from datasette.filters import Filters import sqlite_utils -from .base import BaseView, DataView, DatasetteError, ureg, _error, stream_csv +from .base import BaseView, DatasetteError, ureg, _error, stream_csv from .database import QueryView LINK_WITH_LABEL = ( @@ -698,57 +696,6 @@ async def table_view(datasette, request): return response -class CannedQueryView(DataView): - def __init__(self, datasette): - self.ds = datasette - - async def post(self, request): - from datasette.app import TableNotFound - - try: - await self.ds.resolve_table(request) - except TableNotFound as e: - # Was this actually a canned query? - canned_query = await self.ds.get_canned_query( - e.database_name, e.table, request.actor - ) - if canned_query: - # Handle POST to a canned query - return await QueryView(self.ds).data( - request, - canned_query["sql"], - metadata=canned_query, - editable=False, - canned_query=e.table, - named_parameters=canned_query.get("params"), - write=bool(canned_query.get("write")), - ) - - return Response.text("Method not allowed", status=405) - - async def data(self, request, **kwargs): - from datasette.app import TableNotFound - - try: - await self.ds.resolve_table(request) - except TableNotFound as not_found: - canned_query = await self.ds.get_canned_query( - not_found.database_name, not_found.table, request.actor - ) - if canned_query: - return await QueryView(self.ds).data( - request, - canned_query["sql"], - metadata=canned_query, - editable=False, - canned_query=not_found.table, - named_parameters=canned_query.get("params"), - write=bool(canned_query.get("write")), - ) - else: - raise - - async def table_view_traced(datasette, request): from datasette.app import TableNotFound @@ -761,10 +708,7 @@ async def table_view_traced(datasette, request): ) # If this is a canned query, not a table, then dispatch to QueryView instead if canned_query: - if request.method == "POST": - return await CannedQueryView(datasette).post(request) - else: - return await CannedQueryView(datasette).get(request) + return await QueryView()(request, datasette) else: raise diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index d6a88733..e9ad3239 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -95,12 +95,12 @@ def test_insert(canned_write_client): csrftoken_from=True, cookies={"foo": "bar"}, ) - assert 302 == response.status - assert "/data/add_name?success" == response.headers["Location"] messages = canned_write_client.ds.unsign( response.cookies["ds_messages"], "messages" ) - assert [["Query executed, 1 row affected", 1]] == messages + assert messages == [["Query executed, 1 row affected", 1]] + assert response.status == 302 + assert response.headers["Location"] == "/data/add_name?success" @pytest.mark.parametrize( @@ -382,11 +382,11 @@ def test_magic_parameters_cannot_be_used_in_arbitrary_queries(magic_parameters_c def test_canned_write_custom_template(canned_write_client): response = canned_write_client.get("/data/update_name") assert response.status == 200 + assert "!!!CUSTOM_UPDATE_NAME_TEMPLATE!!!" in response.text assert ( "" in response.text ) - assert "!!!CUSTOM_UPDATE_NAME_TEMPLATE!!!" in response.text # And test for link rel=alternate while we're here: assert ( '' From 8920d425f4d417cfd998b61016c5ff3530cd34e1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 10:20:58 -0700 Subject: [PATCH 0619/1103] 1.0a3 release notes, smaller changes section - refs #2135 --- docs/changelog.rst | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index ee48d075..b4416f94 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,25 @@ Changelog ========= +.. _v1_0_a3: + +1.0a3 (2023-08-09) +------------------ + +This alpha release previews the updated design for Datasette's default JSON API. + +Smaller changes +~~~~~~~~~~~~~~~ + +- Datasette documentation now shows YAML examples for :ref:`metadata` by default, with a tab interface for switching to JSON. (:issue:`1153`) +- :ref:`plugin_register_output_renderer` plugins now have access to ``error`` and ``truncated`` arguments, allowing them to display error messages and take into account truncated results. (:issue:`2130`) +- ``render_cell()`` plugin hook now also supports an optional ``request`` argument. (:issue:`2007`) +- New ``Justfile`` to support development workflows for Datasette using `Just `__. +- ``datasette.render_template()`` can now accepts a ``datasette.views.Context`` subclass as an alternative to a dictionary. (:issue:`2127`) +- ``datasette install -e path`` option for editable installations, useful while developing plugins. (:issue:`2106`) +- When started with the ``--cors`` option Datasette now serves an ``Access-Control-Max-Age: 3600`` header, ensuring CORS OPTIONS requests are repeated no more than once an hour. (:issue:`2079`) +- Fixed a bug where the ``_internal`` database could display ``None`` instead of ``null`` for in-memory databases. (:issue:`1970`) + .. _v0_64_2: 0.64.2 (2023-03-08) From e34d09c6ec16ff5e7717e112afdad67f7c05a62a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 12:01:59 -0700 Subject: [PATCH 0620/1103] Don't include columns in query JSON, refs #2136 --- datasette/renderer.py | 8 +++++++- datasette/views/database.py | 2 +- tests/test_api.py | 1 - tests/test_cli_serve_get.py | 11 ++++++----- 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/datasette/renderer.py b/datasette/renderer.py index 0bd74e81..224031a7 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -27,7 +27,7 @@ def convert_specific_columns_to_json(rows, columns, json_cols): return new_rows -def json_renderer(args, data, error, truncated=None): +def json_renderer(request, args, data, error, truncated=None): """Render a response as JSON""" status_code = 200 @@ -106,6 +106,12 @@ def json_renderer(args, data, error, truncated=None): "status": 400, "title": None, } + + # Don't include "columns" in output + # https://github.com/simonw/datasette/issues/2136 + if isinstance(data, dict) and "columns" not in request.args.getlist("_extra"): + data.pop("columns", None) + # Handle _nl option for _shape=array nl = args.get("_nl", "") if nl and shape == "array": diff --git a/datasette/views/database.py b/datasette/views/database.py index 658c35e6..cf76f3c2 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -548,7 +548,7 @@ class QueryView(View): error=query_error, # These will be deprecated in Datasette 1.0: args=request.args, - data={"rows": rows, "columns": columns}, + data={"ok": True, "rows": rows, "columns": columns}, ) if asyncio.iscoroutine(result): result = await result diff --git a/tests/test_api.py b/tests/test_api.py index 28415a0b..f96f571e 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -649,7 +649,6 @@ async def test_custom_sql(ds_client): {"content": "RENDER_CELL_DEMO"}, {"content": "RENDER_CELL_ASYNC"}, ], - "columns": ["content"], "ok": True, "truncated": False, } diff --git a/tests/test_cli_serve_get.py b/tests/test_cli_serve_get.py index 2e0390bb..dc7fc1e2 100644 --- a/tests/test_cli_serve_get.py +++ b/tests/test_cli_serve_get.py @@ -34,11 +34,12 @@ def test_serve_with_get(tmp_path_factory): "/_memory.json?sql=select+sqlite_version()", ], ) - assert 0 == result.exit_code, result.output - assert { - "truncated": False, - "columns": ["sqlite_version()"], - }.items() <= json.loads(result.output).items() + assert result.exit_code == 0, result.output + data = json.loads(result.output) + # Should have a single row with a single column + assert len(data["rows"]) == 1 + assert list(data["rows"][0].keys()) == ["sqlite_version()"] + assert set(data.keys()) == {"rows", "ok", "truncated"} # The plugin should have created hello.txt assert (plugins_dir / "hello.txt").read_text() == "hello" From 856ca68d94708c6e94673cb6bc28bf3e3ca17845 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 12:04:40 -0700 Subject: [PATCH 0621/1103] Update default JSON representation docs, refs #2135 --- docs/json_api.rst | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/docs/json_api.rst b/docs/json_api.rst index c273c2a8..16b997eb 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -9,10 +9,10 @@ through the Datasette user interface can also be accessed as JSON via the API. To access the API for a page, either click on the ``.json`` link on that page or edit the URL and add a ``.json`` extension to it. -.. _json_api_shapes: +.. _json_api_default: -Different shapes ----------------- +Default representation +---------------------- The default JSON representation of data from a SQLite table or custom query looks like this: @@ -21,7 +21,6 @@ looks like this: { "ok": true, - "next": null, "rows": [ { "id": 3, @@ -39,13 +38,22 @@ looks like this: "id": 1, "name": "San Francisco" } - ] + ], + "truncated": false } -The ``rows`` key is a list of objects, each one representing a row. ``next`` indicates if -there is another page, and ``ok`` is always ``true`` if an error did not occur. +``"ok"`` is always ``true`` if an error did not occur. -If ``next`` is present then the next page in the pagination set can be retrieved using ``?_next=VALUE``. +The ``"rows"`` key is a list of objects, each one representing a row. + +The ``"truncated"`` key lets you know if the query was truncated. This can happen if a SQL query returns more than 1,000 results (or the :ref:`setting_max_returned_rows` setting). + +For table pages, an additional key ``"next"`` may be present. This indicates that the next page in the pagination set can be retrieved using ``?_next=VALUE``. + +.. _json_api_shapes: + +Different shapes +---------------- The ``_shape`` parameter can be used to access alternative formats for the ``rows`` key which may be more convenient for your application. There are three From 90cb9ca58d910f49e8f117bbdd94df6f0855cf99 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 12:11:16 -0700 Subject: [PATCH 0622/1103] JSON changes in release notes, refs #2135 --- docs/changelog.rst | 35 ++++++++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b4416f94..4c70855b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,7 +9,40 @@ Changelog 1.0a3 (2023-08-09) ------------------ -This alpha release previews the updated design for Datasette's default JSON API. +This alpha release previews the updated design for Datasette's default JSON API. (:issue:`782`) + +The new :ref:`default JSON representation ` for both table pages (``/dbname/table.json``) and arbitrary SQL queries (``/dbname.json?sql=...``) is now shaped like this: + +.. code-block:: json + + { + "ok": true, + "rows": [ + { + "id": 3, + "name": "Detroit" + }, + { + "id": 2, + "name": "Los Angeles" + }, + { + "id": 4, + "name": "Memnonia" + }, + { + "id": 1, + "name": "San Francisco" + } + ], + "truncated": false + } + +Tables will include an additional ``"next"`` key for pagination, which can be passed to ``?_next=`` to fetch the next page of results. + +The various ``?_shape=`` options continue to work as before - see :ref:`json_api_shapes` for details. + +A new ``?_extra=`` mechanism is available for tables, but has not yet been stabilized or documented. Details on that are available in :issue:`262`. Smaller changes ~~~~~~~~~~~~~~~ From 19ab4552e212c9845a59461cc73e82d5ae8c278a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 12:13:11 -0700 Subject: [PATCH 0623/1103] Release 1.0a3 Closes #2135 Refs #262, #782, #1153, #1970, #2007, #2079, #2106, #2127, #2130 --- datasette/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 3b81ab21..61dee464 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "1.0a2" +__version__ = "1.0a3" __version_info__ = tuple(__version__.split(".")) From 4a42476bb7ce4c5ed941f944115dedd9bce34656 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 15:04:16 -0700 Subject: [PATCH 0624/1103] datasette plugins --requirements, closes #2133 --- datasette/cli.py | 12 ++++++++++-- docs/cli-reference.rst | 1 + docs/plugins.rst | 32 ++++++++++++++++++++++++++++---- tests/test_cli.py | 3 +++ 4 files changed, 42 insertions(+), 6 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 32266888..21fd25d6 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -223,15 +223,23 @@ pm.hook.publish_subcommand(publish=publish) @cli.command() @click.option("--all", help="Include built-in default plugins", is_flag=True) +@click.option( + "--requirements", help="Output requirements.txt of installed plugins", is_flag=True +) @click.option( "--plugins-dir", type=click.Path(exists=True, file_okay=False, dir_okay=True), help="Path to directory containing custom plugins", ) -def plugins(all, plugins_dir): +def plugins(all, requirements, plugins_dir): """List currently installed plugins""" app = Datasette([], plugins_dir=plugins_dir) - click.echo(json.dumps(app._plugins(all=all), indent=4)) + if requirements: + for plugin in app._plugins(): + if plugin["version"]: + click.echo("{}=={}".format(plugin["name"], plugin["version"])) + else: + click.echo(json.dumps(app._plugins(all=all), indent=4)) @cli.command() diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 2177fc9e..7a96d311 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -282,6 +282,7 @@ Output JSON showing all currently installed plugins, their versions, whether the Options: --all Include built-in default plugins + --requirements Output requirements.txt of installed plugins --plugins-dir DIRECTORY Path to directory containing custom plugins --help Show this message and exit. diff --git a/docs/plugins.rst b/docs/plugins.rst index 979f94dd..19bfdd0c 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -90,7 +90,12 @@ You can see a list of installed plugins by navigating to the ``/-/plugins`` page You can also use the ``datasette plugins`` command:: - $ datasette plugins + datasette plugins + +Which outputs: + +.. code-block:: json + [ { "name": "datasette_json_html", @@ -107,7 +112,8 @@ You can also use the ``datasette plugins`` command:: cog.out("\n") result = CliRunner().invoke(cli.cli, ["plugins", "--all"]) # cog.out() with text containing newlines was unindenting for some reason - cog.outl("If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette::\n") + cog.outl("If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette:\n") + cog.outl(".. code-block:: json\n") plugins = [p for p in json.loads(result.output) if p["name"].startswith("datasette.")] indented = textwrap.indent(json.dumps(plugins, indent=4), " ") for line in indented.split("\n"): @@ -115,7 +121,9 @@ You can also use the ``datasette plugins`` command:: cog.out("\n\n") .. ]]] -If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette:: +If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette: + +.. code-block:: json [ { @@ -236,6 +244,22 @@ If you run ``datasette plugins --all`` it will include default plugins that ship You can add the ``--plugins-dir=`` option to include any plugins found in that directory. +Add ``--requirements`` to output a list of installed plugins that can then be installed in another Datasette instance using ``datasette install -r requirements.txt``:: + + datasette plugins --requirements + +The output will look something like this:: + + datasette-codespaces==0.1.1 + datasette-graphql==2.2 + datasette-json-html==1.0.1 + datasette-pretty-json==0.2.2 + datasette-x-forwarded-host==0.1 + +To write that to a ``requirements.txt`` file, run this:: + + datasette plugins --requirements > requirements.txt + .. _plugins_configuration: Plugin configuration @@ -390,7 +414,7 @@ Any values embedded in ``metadata.yaml`` will be visible to anyone who views the If you are publishing your data using the :ref:`datasette publish ` family of commands, you can use the ``--plugin-secret`` option to set these secrets at publish time. For example, using Heroku you might run the following command:: - $ datasette publish heroku my_database.db \ + datasette publish heroku my_database.db \ --name my-heroku-app-demo \ --install=datasette-auth-github \ --plugin-secret datasette-auth-github client_id your_client_id \ diff --git a/tests/test_cli.py b/tests/test_cli.py index 75724f61..056e2821 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -108,6 +108,9 @@ def test_plugins_cli(app_client): assert set(names).issuperset({p["name"] for p in EXPECTED_PLUGINS}) # And the following too: assert set(names).issuperset(DEFAULT_PLUGINS) + # --requirements should be empty because there are no installed non-plugins-dir plugins + result3 = runner.invoke(cli, ["plugins", "--requirements"]) + assert result3.output == "" def test_metadata_yaml(): From a3593c901580ea50854c3e0774b0ba0126e8a76f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 17:32:07 -0700 Subject: [PATCH 0625/1103] on_success_message_sql, closes #2138 --- datasette/views/database.py | 29 ++++++++++++++++---- docs/sql_queries.rst | 21 ++++++++++---- tests/test_canned_queries.py | 53 +++++++++++++++++++++++++++++++----- 3 files changed, 85 insertions(+), 18 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index cf76f3c2..79b3f88d 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -360,6 +360,10 @@ class QueryView(View): params[key] = str(value) else: params = dict(parse_qsl(body, keep_blank_values=True)) + + # Don't ever send csrftoken as a SQL parameter + params.pop("csrftoken", None) + # Should we return JSON? should_return_json = ( request.headers.get("accept") == "application/json" @@ -371,12 +375,27 @@ class QueryView(View): redirect_url = None try: cursor = await db.execute_write(canned_query["sql"], params_for_query) - message = canned_query.get( - "on_success_message" - ) or "Query executed, {} row{} affected".format( - cursor.rowcount, "" if cursor.rowcount == 1 else "s" - ) + # success message can come from on_success_message or on_success_message_sql + message = None message_type = datasette.INFO + on_success_message_sql = canned_query.get("on_success_message_sql") + if on_success_message_sql: + try: + message_result = ( + await db.execute(on_success_message_sql, params_for_query) + ).first() + if message_result: + message = message_result[0] + except Exception as ex: + message = "Error running on_success_message_sql: {}".format(ex) + message_type = datasette.ERROR + if not message: + message = canned_query.get( + "on_success_message" + ) or "Query executed, {} row{} affected".format( + cursor.rowcount, "" if cursor.rowcount == 1 else "s" + ) + redirect_url = canned_query.get("on_success_redirect") ok = True except Exception as ex: diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index 3c2cb228..1ae07e1f 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -392,6 +392,7 @@ This configuration will create a page at ``/mydatabase/add_name`` displaying a f You can customize how Datasette represents success and errors using the following optional properties: - ``on_success_message`` - the message shown when a query is successful +- ``on_success_message_sql`` - alternative to ``on_success_message``: a SQL query that should be executed to generate the message - ``on_success_redirect`` - the path or URL the user is redirected to on success - ``on_error_message`` - the message shown when a query throws an error - ``on_error_redirect`` - the path or URL the user is redirected to on error @@ -405,11 +406,12 @@ For example: "queries": { "add_name": { "sql": "INSERT INTO names (name) VALUES (:name)", + "params": ["name"], "write": True, - "on_success_message": "Name inserted", + "on_success_message_sql": "select 'Name inserted: ' || :name", "on_success_redirect": "/mydatabase/names", "on_error_message": "Name insert failed", - "on_error_redirect": "/mydatabase" + "on_error_redirect": "/mydatabase", } } } @@ -426,8 +428,10 @@ For example: queries: add_name: sql: INSERT INTO names (name) VALUES (:name) + params: + - name write: true - on_success_message: Name inserted + on_success_message_sql: 'select ''Name inserted: '' || :name' on_success_redirect: /mydatabase/names on_error_message: Name insert failed on_error_redirect: /mydatabase @@ -443,8 +447,11 @@ For example: "queries": { "add_name": { "sql": "INSERT INTO names (name) VALUES (:name)", + "params": [ + "name" + ], "write": true, - "on_success_message": "Name inserted", + "on_success_message_sql": "select 'Name inserted: ' || :name", "on_success_redirect": "/mydatabase/names", "on_error_message": "Name insert failed", "on_error_redirect": "/mydatabase" @@ -455,10 +462,12 @@ For example: } .. [[[end]]] -You can use ``"params"`` to explicitly list the named parameters that should be displayed as form fields - otherwise they will be automatically detected. +You can use ``"params"`` to explicitly list the named parameters that should be displayed as form fields - otherwise they will be automatically detected. ``"params"`` is not necessary in the above example, since without it ``"name"`` would be automatically detected from the query. You can pre-populate form fields when the page first loads using a query string, e.g. ``/mydatabase/add_name?name=Prepopulated``. The user will have to submit the form to execute the query. +If you specify a query in ``"on_success_message_sql"``, that query will be executed after the main query. The first column of the first row return by that query will be displayed as a success message. Named parameters from the main query will be made available to the success message query as well. + .. _canned_queries_magic_parameters: Magic parameters @@ -589,7 +598,7 @@ The JSON response will look like this: "redirect": "/data/add_name" } -The ``"message"`` and ``"redirect"`` values here will take into account ``on_success_message``, ``on_success_redirect``, ``on_error_message`` and ``on_error_redirect``, if they have been set. +The ``"message"`` and ``"redirect"`` values here will take into account ``on_success_message``, ``on_success_message_sql``, ``on_success_redirect``, ``on_error_message`` and ``on_error_redirect``, if they have been set. .. _pagination: diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index e9ad3239..5256c24c 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -31,9 +31,15 @@ def canned_write_client(tmpdir): }, "add_name_specify_id": { "sql": "insert into names (rowid, name) values (:rowid, :name)", + "on_success_message_sql": "select 'Name added: ' || :name || ' with rowid ' || :rowid", "write": True, "on_error_redirect": "/data/add_name_specify_id?error", }, + "add_name_specify_id_with_error_in_on_success_message_sql": { + "sql": "insert into names (rowid, name) values (:rowid, :name)", + "on_success_message_sql": "select this is bad SQL", + "write": True, + }, "delete_name": { "sql": "delete from names where rowid = :rowid", "write": True, @@ -179,6 +185,34 @@ def test_insert_error(canned_write_client): ) +def test_on_success_message_sql(canned_write_client): + response = canned_write_client.post( + "/data/add_name_specify_id", + {"rowid": 5, "name": "Should be OK"}, + csrftoken_from=True, + ) + assert response.status == 302 + assert response.headers["Location"] == "/data/add_name_specify_id" + messages = canned_write_client.ds.unsign( + response.cookies["ds_messages"], "messages" + ) + assert messages == [["Name added: Should be OK with rowid 5", 1]] + + +def test_error_in_on_success_message_sql(canned_write_client): + response = canned_write_client.post( + "/data/add_name_specify_id_with_error_in_on_success_message_sql", + {"rowid": 1, "name": "Should fail"}, + csrftoken_from=True, + ) + messages = canned_write_client.ds.unsign( + response.cookies["ds_messages"], "messages" + ) + assert messages == [ + ["Error running on_success_message_sql: no such column: bad", 3] + ] + + def test_custom_params(canned_write_client): response = canned_write_client.get("/data/update_name?extra=foo") assert '' in response.text @@ -232,21 +266,22 @@ def test_canned_query_permissions_on_database_page(canned_write_client): query_names = { q["name"] for q in canned_write_client.get("/data.json").json["queries"] } - assert { + assert query_names == { + "add_name_specify_id_with_error_in_on_success_message_sql", + "from_hook", + "update_name", + "add_name_specify_id", + "from_async_hook", "canned_read", "add_name", - "add_name_specify_id", - "update_name", - "from_async_hook", - "from_hook", - } == query_names + } # With auth shows four response = canned_write_client.get( "/data.json", cookies={"ds_actor": canned_write_client.actor_cookie({"id": "root"})}, ) - assert 200 == response.status + assert response.status == 200 query_names_and_private = sorted( [ {"name": q["name"], "private": q["private"]} @@ -257,6 +292,10 @@ def test_canned_query_permissions_on_database_page(canned_write_client): assert query_names_and_private == [ {"name": "add_name", "private": False}, {"name": "add_name_specify_id", "private": False}, + { + "name": "add_name_specify_id_with_error_in_on_success_message_sql", + "private": False, + }, {"name": "canned_read", "private": False}, {"name": "delete_name", "private": True}, {"name": "from_async_hook", "private": False}, From 33251d04e78d575cca62bb59069bb43a7d924746 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 17:56:27 -0700 Subject: [PATCH 0626/1103] Canned query write counters demo, refs #2134 --- .github/workflows/deploy-latest.yml | 30 +++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index ed60376c..4746aa07 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -57,6 +57,36 @@ jobs: db.route = "alternative-route" ' > plugins/alternative_route.py cp fixtures.db fixtures2.db + - name: And the counters writable canned query demo + run: | + cat > plugins/counters.py < Date: Thu, 10 Aug 2023 22:16:19 -0700 Subject: [PATCH 0627/1103] Fixed display of database color Closes #2139, closes #2119 --- datasette/database.py | 7 +++++++ datasette/templates/database.html | 2 +- datasette/templates/query.html | 2 +- datasette/templates/row.html | 2 +- datasette/templates/table.html | 2 +- datasette/views/base.py | 4 ---- datasette/views/database.py | 8 +++----- datasette/views/index.py | 4 +--- datasette/views/row.py | 4 +++- datasette/views/table.py | 2 +- tests/test_html.py | 20 ++++++++++++++++++++ 11 files changed, 39 insertions(+), 18 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index d8043c24..af39ac9e 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -1,6 +1,7 @@ import asyncio from collections import namedtuple from pathlib import Path +import hashlib import janus import queue import sys @@ -62,6 +63,12 @@ class Database: } return self._cached_table_counts + @property + def color(self): + if self.hash: + return self.hash[:6] + return hashlib.md5(self.name.encode("utf8")).hexdigest()[:6] + def suggest_name(self): if self.path: return Path(self.path).stem diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 7acf0369..3d4dae07 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -10,7 +10,7 @@ {% block body_class %}db db-{{ database|to_css_class }}{% endblock %} {% block content %} -