From d444b6aad568e3743199b44d4ae978f5a9ce36a4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 20 Aug 2024 09:34:53 -0700 Subject: [PATCH 001/251] Fix for spacing on index page, closes #2399 --- datasette/templates/index.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/templates/index.html b/datasette/templates/index.html index a3595a39..03349279 100644 --- a/datasette/templates/index.html +++ b/datasette/templates/index.html @@ -21,7 +21,7 @@ {% for database in databases %}

{{ database.name }}{% if database.private %} 🔒{% endif %}

- {% if database.show_table_row_counts %}{{ "{:,}".format(database.table_rows_sum) }} rows in {% endif %}{{ database.tables_count }} table{% if database.tables_count != 1 %}s{% endif %}{% if database.tables_count and database.hidden_tables_count %}, {% endif -%} + {% if database.show_table_row_counts %}{{ "{:,}".format(database.table_rows_sum) }} rows in {% endif %}{{ database.tables_count }} table{% if database.tables_count != 1 %}s{% endif %}{% if database.hidden_tables_count %}, {% endif -%} {% if database.hidden_tables_count -%} {% if database.show_table_row_counts %}{{ "{:,}".format(database.hidden_table_rows_sum) }} rows in {% endif %}{{ database.hidden_tables_count }} hidden table{% if database.hidden_tables_count != 1 %}s{% endif -%} {% endif -%} From 39dfc7d7d77b901d7fef5481e91465fa48b88799 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 20 Aug 2024 19:03:33 -0700 Subject: [PATCH 002/251] Removed units functionality and Pint dependency Closes #2400, unblocks #2320 --- datasette/app.py | 1 - datasette/filters.py | 24 +------- datasette/utils/__init__.py | 1 - datasette/views/base.py | 4 -- datasette/views/row.py | 1 - datasette/views/table.py | 13 +---- docs/metadata.rst | 94 -------------------------------- setup.py | 1 - tests/fixtures.py | 11 ---- tests/plugins/my_plugin.py | 13 +++-- tests/test_api.py | 12 ---- tests/test_internals_database.py | 1 - tests/test_plugins.py | 4 +- tests/test_table_api.py | 16 ------ 14 files changed, 14 insertions(+), 182 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 1c730a73..d7d20016 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -37,7 +37,6 @@ from jinja2.exceptions import TemplateNotFound from .events import Event from .views import Context -from .views.base import ureg from .views.database import database_download, DatabaseView, TableCreateView, QueryView from .views.index import IndexView from .views.special import ( diff --git a/datasette/filters.py b/datasette/filters.py index 585d4865..67d4170b 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -368,12 +368,8 @@ class Filters: ) _filters_by_key = {f.key: f for f in _filters} - def __init__(self, pairs, units=None, ureg=None): - if units is None: - units = {} + def __init__(self, pairs): self.pairs = pairs - self.units = units - self.ureg = ureg def lookups(self): """Yields (lookup, display, no_argument) pairs""" @@ -413,20 +409,6 @@ class Filters: def has_selections(self): return bool(self.pairs) - def convert_unit(self, column, value): - """If the user has provided a unit in the query, convert it into the column unit, if present.""" - if column not in self.units: - return value - - # Try to interpret the value as a unit - value = self.ureg(value) - if isinstance(value, numbers.Number): - # It's just a bare number, assume it's the column unit - return value - - column_unit = self.ureg(self.units[column]) - return value.to(column_unit).magnitude - def build_where_clauses(self, table): sql_bits = [] params = {} @@ -434,9 +416,7 @@ class Filters: for column, lookup, value in self.selections(): filter = self._filters_by_key.get(lookup, None) if filter: - sql_bit, param = filter.where_clause( - table, column, self.convert_unit(column, value), i - ) + sql_bit, param = filter.where_clause(table, column, value, i) sql_bits.append(sql_bit) if param is not None: if not isinstance(param, list): diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 073d6e86..7d248ee5 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1368,7 +1368,6 @@ _table_config_keys = ( "fts_table", "fts_pk", "searchmode", - "units", ) diff --git a/datasette/views/base.py b/datasette/views/base.py index 2e78b0a5..aee06b01 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -8,8 +8,6 @@ import urllib from markupsafe import escape -import pint - from datasette.database import QueryInterrupted from datasette.utils.asgi import Request from datasette.utils import ( @@ -32,8 +30,6 @@ from datasette.utils.asgi import ( BadRequest, ) -ureg = pint.UnitRegistry() - class DatasetteError(Exception): def __init__( diff --git a/datasette/views/row.py b/datasette/views/row.py index 6180446f..d802994e 100644 --- a/datasette/views/row.py +++ b/datasette/views/row.py @@ -103,7 +103,6 @@ class RowView(DataView): "columns": columns, "primary_keys": resolved.pks, "primary_key_values": pk_values, - "units": (await self.ds.table_config(database, table)).get("units", {}), } if "foreign_key_tables" in (request.args.get("_extras") or "").split(","): diff --git a/datasette/views/table.py b/datasette/views/table.py index ba0dd4f3..d71efeb0 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -43,7 +43,7 @@ from datasette.utils import ( from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response from datasette.filters import Filters import sqlite_utils -from .base import BaseView, DatasetteError, ureg, _error, stream_csv +from .base import BaseView, DatasetteError, _error, stream_csv from .database import QueryView LINK_WITH_LABEL = ( @@ -292,14 +292,6 @@ async def display_columns_and_rows( ), ) ) - elif column in table_config.get("units", {}) and value != "": - # Interpret units using pint - value = value * ureg(table_config["units"][column]) - # Pint uses floating point which sometimes introduces errors in the compact - # representation, which we have to round off to avoid ugliness. In the vast - # majority of cases this rounding will be inconsequential. I hope. - value = round(value.to_compact(), 6) - display_value = markupsafe.Markup(f"{value:~P}".replace(" ", " ")) else: display_value = str(value) if truncate_cells and len(display_value) > truncate_cells: @@ -1017,7 +1009,6 @@ async def table_view_data( nofacet = True table_metadata = await datasette.table_config(database_name, table_name) - units = table_metadata.get("units", {}) # Arguments that start with _ and don't contain a __ are # special - things like ?_search= - and should not be @@ -1029,7 +1020,7 @@ async def table_view_data( filter_args.append((key, v)) # Build where clauses from query string arguments - filters = Filters(sorted(filter_args), units, ureg) + filters = Filters(sorted(filter_args)) where_clauses, params = filters.build_where_clauses(table_name) # Execute filters_from_request plugin hooks - including the default diff --git a/docs/metadata.rst b/docs/metadata.rst index f3ca68ac..a3fa4040 100644 --- a/docs/metadata.rst +++ b/docs/metadata.rst @@ -205,100 +205,6 @@ These will be displayed at the top of the table page, and will also show in the You can see an example of how these look at `latest.datasette.io/fixtures/roadside_attractions `__. -Specifying units for a column ------------------------------ - -Datasette supports attaching units to a column, which will be used when displaying -values from that column. SI prefixes will be used where appropriate. - -Column units are configured in the metadata like so: - -.. [[[cog - metadata_example(cog, { - "databases": { - "database1": { - "tables": { - "example_table": { - "units": { - "column1": "metres", - "column2": "Hz" - } - } - } - } - } - }) -.. ]]] - -.. tab:: metadata.yaml - - .. code-block:: yaml - - databases: - database1: - tables: - example_table: - units: - column1: metres - column2: Hz - - -.. tab:: metadata.json - - .. code-block:: json - - { - "databases": { - "database1": { - "tables": { - "example_table": { - "units": { - "column1": "metres", - "column2": "Hz" - } - } - } - } - } - } -.. [[[end]]] - - -Units are interpreted using Pint_, and you can see the full list of available units in -Pint's `unit registry`_. You can also add `custom units`_ to the metadata, which will be -registered with Pint: - -.. [[[cog - metadata_example(cog, { - "custom_units": [ - "decibel = [] = dB" - ] - }) -.. ]]] - -.. tab:: metadata.yaml - - .. code-block:: yaml - - custom_units: - - decibel = [] = dB - - -.. tab:: metadata.json - - .. code-block:: json - - { - "custom_units": [ - "decibel = [] = dB" - ] - } -.. [[[end]]] - -.. _Pint: https://pint.readthedocs.io/ -.. _unit registry: https://github.com/hgrecco/pint/blob/master/pint/default_en.txt -.. _custom units: http://pint.readthedocs.io/en/latest/defining.html - .. _metadata_default_sort: Setting a default sort order diff --git a/setup.py b/setup.py index 22ec7963..47d796a3 100644 --- a/setup.py +++ b/setup.py @@ -50,7 +50,6 @@ setup( "httpx>=0.20", 'importlib_resources>=1.3.1; python_version < "3.9"', 'importlib_metadata>=4.6; python_version < "3.10"', - "pint>=0.9", "pluggy>=1.0", "uvicorn>=0.11", "aiofiles>=0.4", diff --git a/tests/fixtures.py b/tests/fixtures.py index af6b610b..0539b7c8 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -379,7 +379,6 @@ METADATA = { ], }, "no_primary_key": {"sortable_columns": [], "hidden": True}, - "units": {"units": {"distance": "m", "frequency": "Hz"}}, "primary_key_multiple_columns_explicit_label": { "label_column": "content2" }, @@ -507,16 +506,6 @@ CREATE TABLE "custom_foreign_key_label" ( FOREIGN KEY ("foreign_key_with_custom_label") REFERENCES [primary_key_multiple_columns_explicit_label](id) ); -CREATE TABLE units ( - pk integer primary key, - distance int, - frequency int -); - -INSERT INTO units VALUES (1, 1, 100); -INSERT INTO units VALUES (2, 5000, 2500); -INSERT INTO units VALUES (3, 100000, 75000); - CREATE TABLE tags ( tag TEXT PRIMARY KEY ); diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 4ca4f989..e87353ea 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -5,18 +5,21 @@ from datasette import tracer from datasette.utils import path_with_added_args from datasette.utils.asgi import asgi_send_json, Response import base64 -import pint import json -import urllib - -ureg = pint.UnitRegistry() +import urllib.parse @hookimpl def prepare_connection(conn, database, datasette): def convert_units(amount, from_, to_): """select convert_units(100, 'm', 'ft');""" - return (amount * ureg(from_)).to(to_).to_tuple()[0] + # Convert meters to feet + if from_ == "m" and to_ == "ft": + return amount * 3.28084 + # Convert feet to meters + if from_ == "ft" and to_ == "m": + return amount / 3.28084 + assert False, "Unsupported conversion" conn.create_function("convert_units", 3, convert_units) diff --git a/tests/test_api.py b/tests/test_api.py index 8a3fcc92..91f07563 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -528,16 +528,6 @@ async def test_database_page(ds_client): }, "private": False, }, - { - "name": "units", - "columns": ["pk", "distance", "frequency"], - "primary_keys": ["pk"], - "count": 3, - "hidden": False, - "fts_table": None, - "foreign_keys": {"incoming": [], "outgoing": []}, - "private": False, - }, { "name": "no_primary_key", "columns": ["content", "a", "b", "c"], @@ -1133,7 +1123,6 @@ async def test_config_json(config, expected): ], }, "no_primary_key": {"sortable_columns": [], "hidden": True}, - "units": {"units": {"distance": "m", "frequency": "Hz"}}, "primary_key_multiple_columns_explicit_label": { "label_column": "content2" }, @@ -1168,7 +1157,6 @@ async def test_config_json(config, expected): "text", ] }, - "units": {"units": {"distance": "m", "frequency": "Hz"}}, # These one get redacted: "no_primary_key": "***", "primary_key_multiple_columns_explicit_label": "***", diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index bc3c8fcf..0020668a 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -422,7 +422,6 @@ async def test_table_names(db): "table/with/slashes.csv", "complex_foreign_keys", "custom_foreign_key_label", - "units", "tags", "searchable", "searchable_tags", diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 5fad03ad..aa8f1578 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -424,8 +424,8 @@ def view_names_client(tmp_path_factory): ( ("/", "index"), ("/fixtures", "database"), - ("/fixtures/units", "table"), - ("/fixtures/units/1", "row"), + ("/fixtures/facetable", "table"), + ("/fixtures/facetable/1", "row"), ("/-/versions", "json_data"), ("/fixtures/-/query?sql=select+1", "database"), ), diff --git a/tests/test_table_api.py b/tests/test_table_api.py index 11542cb0..615b36eb 100644 --- a/tests/test_table_api.py +++ b/tests/test_table_api.py @@ -720,22 +720,6 @@ async def test_view(ds_client): ] -@pytest.mark.xfail -@pytest.mark.asyncio -async def test_unit_filters(ds_client): - response = await ds_client.get( - "/fixtures/units.json?_shape=arrays&distance__lt=75km&frequency__gt=1kHz" - ) - assert response.status_code == 200 - data = response.json() - - assert data["units"]["distance"] == "m" - assert data["units"]["frequency"] == "Hz" - - assert len(data["rows"]) == 1 - assert data["rows"][0][0] == 2 - - def test_page_size_matching_max_returned_rows( app_client_returned_rows_matches_page_size, ): From 4efcc29d02d594106e8e9f5206aa5a740b45eccb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 20 Aug 2024 19:15:36 -0700 Subject: [PATCH 003/251] Test against Python "3.13-dev" Refs: - #2320 --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3ac8756d..0e217ac3 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13-dev"] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} From 1f3fb5f96b3f6e773b8c1b8ec5d8f7516c6860b0 Mon Sep 17 00:00:00 2001 From: Tiago Ilieve Date: Wed, 21 Aug 2024 00:02:35 -0300 Subject: [PATCH 004/251] debugger: load 'ipdb' if present * debugger: load 'ipdb' if present Transparently chooses between the IPython-enhanced 'ipdb' or the standard 'pdb'. * datasette install ipdb --------- Co-authored-by: Simon Willison --- datasette/handle_exception.py | 6 +++++- docs/contributing.rst | 8 ++++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/datasette/handle_exception.py b/datasette/handle_exception.py index 1a0ac979..96398a4c 100644 --- a/datasette/handle_exception.py +++ b/datasette/handle_exception.py @@ -5,9 +5,13 @@ from .utils.asgi import ( ) from .views.base import DatasetteError from markupsafe import Markup -import pdb import traceback +try: + import ipdb as pdb +except ImportError: + import pdb + try: import rich except ImportError: diff --git a/docs/contributing.rst b/docs/contributing.rst index 45330a83..c1268321 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -111,10 +111,14 @@ Debugging Any errors that occur while Datasette is running while display a stack trace on the console. -You can tell Datasette to open an interactive ``pdb`` debugger session if an error occurs using the ``--pdb`` option:: +You can tell Datasette to open an interactive ``pdb`` (or ``ipdb``, if present) debugger session if an error occurs using the ``--pdb`` option:: datasette --pdb fixtures.db +For `ipdb `__, first run this:: + + datasette install ipdb + .. _contributing_formatting: Code formatting @@ -349,4 +353,4 @@ Datasette bundles `CodeMirror `__ for the SQL editing i -p @rollup/plugin-node-resolve \ -p @rollup/plugin-terser -* Update the version reference in the ``codemirror.html`` template. \ No newline at end of file +* Update the version reference in the ``codemirror.html`` template. From 9028d7f80527b696330805ef7921de14ab40b129 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 21 Aug 2024 09:53:52 -0700 Subject: [PATCH 005/251] Support nested JSON in metadata.json, closes #2403 --- datasette/app.py | 5 ++++- tests/test_internals_datasette.py | 7 +++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index d7d20016..3a53afa5 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -450,7 +450,10 @@ class Datasette: for key in self._metadata_local or {}: if key == "databases": continue - await self.set_instance_metadata(key, self._metadata_local[key]) + value = self._metadata_local[key] + if not isinstance(value, str): + value = json.dumps(value) + await self.set_instance_metadata(key, value) # step 2: database-level metadata for dbname, db in self._metadata_local.get("databases", {}).items(): diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index 135a9099..fc4e42cb 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -183,13 +183,16 @@ async def test_apply_metadata_json(): "legislators": { "tables": {"offices": {"summary": "office address or sumtin"}}, "queries": { - "millenntial_represetatives": { + "millennial_representatives": { "summary": "Social media accounts for current legislators" } }, } - } + }, + "weird_instance_value": {"nested": [1, 2, 3]}, }, ) await ds.invoke_startup() assert (await ds.client.get("/")).status_code == 200 + value = (await ds.get_instance_metadata()).get("weird_instance_value") + assert value == '{"nested": [1, 2, 3]}' From 34a6b2ac844a0784fae1f36e0243336a48413594 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 21 Aug 2024 10:58:17 -0700 Subject: [PATCH 006/251] Fixed bug with ?_trace=1 and large responses, closes #2404 --- datasette/tracer.py | 4 +++- datasette/utils/testing.py | 3 +++ tests/test_tracer.py | 17 +++++++++++++++++ 3 files changed, 23 insertions(+), 1 deletion(-) diff --git a/datasette/tracer.py b/datasette/tracer.py index fc7338b0..29dd4556 100644 --- a/datasette/tracer.py +++ b/datasette/tracer.py @@ -90,6 +90,7 @@ class AsgiTracer: async def wrapped_send(message): nonlocal accumulated_body, size_limit_exceeded, response_headers + if message["type"] == "http.response.start": response_headers = message["headers"] await send(message) @@ -102,11 +103,12 @@ class AsgiTracer: # Accumulate body until the end or until size is exceeded accumulated_body += message["body"] if len(accumulated_body) > self.max_body_bytes: + # Send what we have accumulated so far await send( { "type": "http.response.body", "body": accumulated_body, - "more_body": True, + "more_body": bool(message.get("more_body")), } ) size_limit_exceeded = True diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index d4990784..1606da05 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -62,10 +62,13 @@ class TestClient: follow_redirects=False, redirect_count=0, method="GET", + params=None, cookies=None, if_none_match=None, headers=None, ): + if params: + path += "?" + urlencode(params, doseq=True) return await self._request( path=path, follow_redirects=follow_redirects, diff --git a/tests/test_tracer.py b/tests/test_tracer.py index ceadee50..1a4074b0 100644 --- a/tests/test_tracer.py +++ b/tests/test_tracer.py @@ -53,6 +53,23 @@ def test_trace(trace_debug): assert all(isinstance(trace["count"], int) for trace in execute_manys) +def test_trace_silently_fails_for_large_page(): + # Max HTML size is 256KB + with make_app_client(settings={"trace_debug": True}) as client: + # Small response should have trace + small_response = client.get("/fixtures/simple_primary_key.json?_trace=1") + assert small_response.status == 200 + assert "_trace" in small_response.json + + # Big response should not + big_response = client.get( + "/fixtures/-/query.json", + params={"_trace": 1, "sql": "select zeroblob(1024 * 256)"}, + ) + assert big_response.status == 200 + assert "_trace" not in big_response.json + + def test_trace_parallel_queries(): with make_app_client(settings={"trace_debug": True}) as client: response = client.get("/parallel-queries?_trace=1") From 8a63cdccc7744e6c6969edf47f7c519bf4c25fa6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 21 Aug 2024 12:19:18 -0700 Subject: [PATCH 007/251] Tracer now catches errors, closes #2405 --- datasette/database.py | 3 +++ datasette/tracer.py | 31 +++++++++++++++++++------------ tests/test_tracer.py | 14 ++++++++++++++ 3 files changed, 36 insertions(+), 12 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 8d51befd..c761dad7 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -644,6 +644,9 @@ class QueryInterrupted(Exception): self.sql = sql self.params = params + def __str__(self): + return "QueryInterrupted: {}".format(self.e) + class MultipleValues(Exception): pass diff --git a/datasette/tracer.py b/datasette/tracer.py index 29dd4556..9e66613b 100644 --- a/datasette/tracer.py +++ b/datasette/tracer.py @@ -32,7 +32,7 @@ def trace_child_tasks(): @contextmanager -def trace(type, **kwargs): +def trace(trace_type, **kwargs): assert not TRACE_RESERVED_KEYS.intersection( kwargs.keys() ), f".trace() keyword parameters cannot include {TRACE_RESERVED_KEYS}" @@ -45,17 +45,24 @@ def trace(type, **kwargs): yield kwargs return start = time.perf_counter() - yield kwargs - end = time.perf_counter() - trace_info = { - "type": type, - "start": start, - "end": end, - "duration_ms": (end - start) * 1000, - "traceback": traceback.format_list(traceback.extract_stack(limit=6)[:-3]), - } - trace_info.update(kwargs) - tracer.append(trace_info) + captured_error = None + try: + yield kwargs + except Exception as ex: + captured_error = ex + raise + finally: + end = time.perf_counter() + trace_info = { + "type": trace_type, + "start": start, + "end": end, + "duration_ms": (end - start) * 1000, + "traceback": traceback.format_list(traceback.extract_stack(limit=6)[:-3]), + "error": str(captured_error) if captured_error else None, + } + trace_info.update(kwargs) + tracer.append(trace_info) @contextmanager diff --git a/tests/test_tracer.py b/tests/test_tracer.py index 1a4074b0..1e0d7001 100644 --- a/tests/test_tracer.py +++ b/tests/test_tracer.py @@ -70,6 +70,20 @@ def test_trace_silently_fails_for_large_page(): assert "_trace" not in big_response.json +def test_trace_query_errors(): + with make_app_client(settings={"trace_debug": True}) as client: + response = client.get( + "/fixtures/-/query.json", + params={"_trace": 1, "sql": "select * from non_existent_table"}, + ) + assert response.status == 400 + + data = response.json + assert "_trace" in data + trace_info = data["_trace"] + assert trace_info["traces"][-1]["error"] == "no such table: non_existent_table" + + def test_trace_parallel_queries(): with make_app_client(settings={"trace_debug": True}) as client: response = client.get("/parallel-queries?_trace=1") From f28ff8e4f0eb89bb67a6d8336e4a3e2655f3b983 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 21 Aug 2024 13:36:42 -0700 Subject: [PATCH 008/251] Consider just 1000 rows for suggest facet, closes #2406 --- datasette/facets.py | 46 +++++++++++++++++++++++++++----------------- tests/test_facets.py | 38 +++++++++++++++++++++++++++++++++++- 2 files changed, 65 insertions(+), 19 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index ccd85461..f49575d9 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -65,6 +65,8 @@ def register_facet_classes(): class Facet: type = None + # How many rows to consider when suggesting facets: + suggest_consider = 1000 def __init__( self, @@ -145,17 +147,6 @@ class Facet: ) ).columns - async def get_row_count(self): - if self.row_count is None: - self.row_count = ( - await self.ds.execute( - self.database, - f"select count(*) from ({self.sql})", - self.params, - ) - ).rows[0][0] - return self.row_count - class ColumnFacet(Facet): type = "column" @@ -170,13 +161,16 @@ class ColumnFacet(Facet): if column in already_enabled: continue suggested_facet_sql = """ - select {column} as value, count(*) as n from ( - {sql} - ) where value is not null + with limited as (select * from ({sql}) limit {suggest_consider}) + select {column} as value, count(*) as n from limited + where value is not null group by value limit {limit} """.format( - column=escape_sqlite(column), sql=self.sql, limit=facet_size + 1 + column=escape_sqlite(column), + sql=self.sql, + limit=facet_size + 1, + suggest_consider=self.suggest_consider, ) distinct_values = None try: @@ -211,6 +205,17 @@ class ColumnFacet(Facet): continue return suggested_facets + async def get_row_count(self): + if self.row_count is None: + self.row_count = ( + await self.ds.execute( + self.database, + f"select count(*) from (select * from ({self.sql}) limit {self.suggest_consider})", + self.params, + ) + ).rows[0][0] + return self.row_count + async def facet_results(self): facet_results = [] facets_timed_out = [] @@ -313,11 +318,14 @@ class ArrayFacet(Facet): continue # Is every value in this column either null or a JSON array? suggested_facet_sql = """ + with limited as (select * from ({sql}) limit {suggest_consider}) select distinct json_type({column}) - from ({sql}) + from limited where {column} is not null and {column} != '' """.format( - column=escape_sqlite(column), sql=self.sql + column=escape_sqlite(column), + sql=self.sql, + suggest_consider=self.suggest_consider, ) try: results = await self.ds.execute( @@ -402,7 +410,9 @@ class ArrayFacet(Facet): order by count(*) desc, value limit {limit} """.format( - col=escape_sqlite(column), sql=self.sql, limit=facet_size + 1 + col=escape_sqlite(column), + sql=self.sql, + limit=facet_size + 1, ) try: facet_rows_results = await self.ds.execute( diff --git a/tests/test_facets.py b/tests/test_facets.py index 023efcf0..a2b505ec 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -1,6 +1,6 @@ from datasette.app import Datasette from datasette.database import Database -from datasette.facets import ColumnFacet, ArrayFacet, DateFacet +from datasette.facets import Facet, ColumnFacet, ArrayFacet, DateFacet from datasette.utils.asgi import Request from datasette.utils import detect_json1 from .fixtures import make_app_client @@ -662,3 +662,39 @@ async def test_facet_against_in_memory_database(): assert response1.status_code == 200 response2 = await ds.client.get("/mem/t?_facet=name&_facet=name2") assert response2.status_code == 200 + + +@pytest.mark.asyncio +async def test_facet_only_considers_first_x_rows(): + # This test works by manually fiddling with Facet.suggest_consider + ds = Datasette() + original_suggest_consider = Facet.suggest_consider + try: + Facet.suggest_consider = 40 + db = ds.add_memory_database("test_facet_only_x_rows") + await db.execute_write("create table t (id integer primary key, col text)") + # First 50 rows make it look like col and col_json should be faceted + to_insert = [{"col": "one" if i % 2 else "two"} for i in range(50)] + await db.execute_write_many("insert into t (col) values (:col)", to_insert) + # Next 50 break that assumption + to_insert2 = [{"col": f"x{i}"} for i in range(50)] + await db.execute_write_many("insert into t (col) values (:col)", to_insert2) + response = await ds.client.get( + "/test_facet_only_x_rows/t.json?_extra=suggested_facets" + ) + data = response.json() + assert data["suggested_facets"] == [ + { + "name": "col", + "toggle_url": "http://localhost/test_facet_only_x_rows/t.json?_extra=suggested_facets&_facet=col", + } + ] + # But if we set suggest_consider to 100 they are not suggested + Facet.suggest_consider = 100 + response2 = await ds.client.get( + "/test_facet_only_x_rows/t.json?_extra=suggested_facets" + ) + data2 = response2.json() + assert data2["suggested_facets"] == [] + finally: + Facet.suggest_consider = original_suggest_consider From bc46066f9d96550286ac7694f18f44c8169a83a7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 21 Aug 2024 14:38:11 -0700 Subject: [PATCH 009/251] Fix huge performance bug in DateFacet, refs #2407 --- datasette/facets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index f49575d9..dd149424 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -480,8 +480,8 @@ class DateFacet(Facet): # Does this column contain any dates in the first 100 rows? suggested_facet_sql = """ select date({column}) from ( - {sql} - ) where {column} glob "????-??-*" limit 100; + select * from ({sql}) limit 100 + ) where {column} glob "????-??-*" """.format( column=escape_sqlite(column), sql=self.sql ) From dc1d15247647c350ac73ad520229467180b8433c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 21 Aug 2024 14:58:29 -0700 Subject: [PATCH 010/251] Stop counting at 10,000 rows when listing tables, refs #2398 --- datasette/database.py | 5 ++++- datasette/templates/database.html | 2 +- datasette/views/database.py | 3 ++- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index c761dad7..da0ab1de 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -29,6 +29,9 @@ AttachedDatabase = namedtuple("AttachedDatabase", ("seq", "name", "file")) class Database: + # For table counts stop at this many rows: + count_limit = 10000 + def __init__( self, ds, @@ -376,7 +379,7 @@ class Database: try: table_count = ( await self.execute( - f"select count(*) from [{table}]", + f"select count(*) from (select * from [{table}] limit {self.count_limit + 1})", custom_time_limit=limit, ) ).rows[0][0] diff --git a/datasette/templates/database.html b/datasette/templates/database.html index f921bc2d..c6f3da99 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -60,7 +60,7 @@

{{ table.name }}{% if table.private %} 🔒{% endif %}{% if table.hidden %} (hidden){% endif %}

{% for column in table.columns %}{{ column }}{% if not loop.last %}, {% endif %}{% endfor %}

-

{% if table.count is none %}Many rows{% else %}{{ "{:,}".format(table.count) }} row{% if table.count == 1 %}{% else %}s{% endif %}{% endif %}

+

{% if table.count is none %}Many rows{% elif table.count == count_limit + 1 %}>{{ "{:,}".format(count_limit) }} rows{% else %}{{ "{:,}".format(table.count) }} row{% if table.count == 1 %}{% else %}s{% endif %}{% endif %}

{% endif %} {% endfor %} diff --git a/datasette/views/database.py b/datasette/views/database.py index 9ab061a1..61fe15e4 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -159,6 +159,7 @@ class DatabaseView(View): "show_hidden": request.args.get("_show_hidden"), "editable": True, "metadata": metadata, + "count_limit": db.count_limit, "allow_download": datasette.setting("allow_download") and not db.is_mutable and not db.is_memory, @@ -272,7 +273,7 @@ class QueryContext: async def get_tables(datasette, request, db): tables = [] database = db.name - table_counts = await db.table_counts(5) + table_counts = await db.table_counts(100) hidden_table_names = set(await db.hidden_table_names()) all_foreign_keys = await db.get_all_foreign_keys() From 9ecce07b083824f56bd96966a1f63e18d44489b1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 21 Aug 2024 19:09:25 -0700 Subject: [PATCH 011/251] count all rows button on table page, refs #2408 --- datasette/templates/table.html | 33 ++++++++++++++++++++++++++++++++- datasette/url_builder.py | 6 ++++++ datasette/views/table.py | 11 ++++++++++- 3 files changed, 48 insertions(+), 2 deletions(-) diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 35e0b9c1..187f0143 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -40,7 +40,10 @@ {% endif %} {% if count or human_description_en %} -

{% if count or count == 0 %}{{ "{:,}".format(count) }} row{% if count == 1 %}{% else %}s{% endif %}{% endif %} +

+ {% if count == count_limit + 1 %}>{{ "{:,}".format(count_limit) }} rows + {% if allow_execute_sql and query.sql %} count all rows{% endif %} + {% elif count or count == 0 %}{{ "{:,}".format(count) }} row{% if count == 1 %}{% else %}s{% endif %}{% endif %} {% if human_description_en %}{{ human_description_en }}{% endif %}

{% endif %} @@ -172,4 +175,32 @@
{{ view_definition }}
{% endif %} +{% if allow_execute_sql and query.sql %} + +{% endif %} + {% endblock %} diff --git a/datasette/url_builder.py b/datasette/url_builder.py index 9c6bbde0..16b3d42b 100644 --- a/datasette/url_builder.py +++ b/datasette/url_builder.py @@ -31,6 +31,12 @@ class Urls: db = self.ds.get_database(database) return self.path(tilde_encode(db.route), format=format) + def database_query(self, database, sql, format=None): + path = f"{self.database(database)}/-/query?" + urllib.parse.urlencode( + {"sql": sql} + ) + return self.path(path, format=format) + def table(self, database, table, format=None): path = f"{self.database(database)}/{tilde_encode(table)}" if format is not None: diff --git a/datasette/views/table.py b/datasette/views/table.py index d71efeb0..ea044b36 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -929,6 +929,7 @@ async def table_view_traced(datasette, request): database=resolved.db.name, table=resolved.table, ), + count_limit=resolved.db.count_limit, ), request=request, view_name="table", @@ -1280,6 +1281,9 @@ async def table_view_data( if extra_extras: extras.update(extra_extras) + async def extra_count_sql(): + return count_sql + async def extra_count(): "Total count of rows matching these filters" # Calculate the total count for this query @@ -1299,8 +1303,11 @@ async def table_view_data( # Otherwise run a select count(*) ... if count_sql and count is None and not nocount: + count_sql_limited = ( + f"select count(*) from (select * {from_sql} limit 10001)" + ) try: - count_rows = list(await db.execute(count_sql, from_sql_params)) + count_rows = list(await db.execute(count_sql_limited, from_sql_params)) count = count_rows[0][0] except QueryInterrupted: pass @@ -1615,6 +1622,7 @@ async def table_view_data( "facet_results", "facets_timed_out", "count", + "count_sql", "human_description_en", "next_url", "metadata", @@ -1647,6 +1655,7 @@ async def table_view_data( registry = Registry( extra_count, + extra_count_sql, extra_facet_results, extra_facets_timed_out, extra_suggested_facets, From dc288056b81a3635bdb02a6d0121887db2720e5e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 21 Aug 2024 19:56:02 -0700 Subject: [PATCH 012/251] Better handling of errors for count all button, refs #2408 --- datasette/templates/table.html | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 187f0143..7246ff5d 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -42,7 +42,7 @@ {% if count or human_description_en %}

{% if count == count_limit + 1 %}>{{ "{:,}".format(count_limit) }} rows - {% if allow_execute_sql and query.sql %} count all rows{% endif %} + {% if allow_execute_sql and query.sql %} count all{% endif %} {% elif count or count == 0 %}{{ "{:,}".format(count) }} row{% if count == 1 %}{% else %}s{% endif %}{% endif %} {% if human_description_en %}{{ human_description_en }}{% endif %}

@@ -180,7 +180,7 @@ document.addEventListener('DOMContentLoaded', function() { const countLink = document.querySelector('a.count-sql'); if (countLink) { - countLink.addEventListener('click', function(ev) { + countLink.addEventListener('click', async function(ev) { ev.preventDefault(); // Replace countLink with span with same style attribute const span = document.createElement('span'); @@ -189,14 +189,23 @@ document.addEventListener('DOMContentLoaded', function() { countLink.replaceWith(span); countLink.setAttribute('disabled', 'disabled'); let url = countLink.href.replace(/(\?|$)/, '.json$1'); - fetch(url) - .then(response => response.json()) - .then(data => { - const count = data['rows'][0]['count(*)']; - const formattedCount = count.toLocaleString(); - span.closest('h3').textContent = formattedCount + ' rows'; - }) - .catch(error => countLink.textContent = 'error'); + try { + const response = await fetch(url); + console.log({response}); + const data = await response.json(); + console.log({data}); + if (!response.ok) { + console.log('throw error'); + throw new Error(data.title || data.error); + } + const count = data['rows'][0]['count(*)']; + const formattedCount = count.toLocaleString(); + span.closest('h3').textContent = formattedCount + ' rows'; + } catch (error) { + console.log('Update', span, 'with error message', error); + span.textContent = error.message; + span.style.color = 'red'; + } }); } }); From 92c4d41ca605e0837a2711ee52fde9cf1eea74d0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 1 Sep 2024 17:20:41 -0700 Subject: [PATCH 013/251] results.dicts() method, closes #2414 --- datasette/database.py | 3 +++ datasette/views/row.py | 3 +-- datasette/views/table.py | 2 +- docs/internals.rst | 3 +++ tests/test_api_write.py | 23 +++++++++-------------- tests/test_internals_database.py | 11 +++++++++++ 6 files changed, 28 insertions(+), 17 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index da0ab1de..a2e899bc 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -677,6 +677,9 @@ class Results: else: raise MultipleValues + def dicts(self): + return [dict(row) for row in self.rows] + def __iter__(self): return iter(self.rows) diff --git a/datasette/views/row.py b/datasette/views/row.py index d802994e..f374fd94 100644 --- a/datasette/views/row.py +++ b/datasette/views/row.py @@ -277,8 +277,7 @@ class RowUpdateView(BaseView): results = await resolved.db.execute( resolved.sql, resolved.params, truncate=True ) - rows = list(results.rows) - result["row"] = dict(rows[0]) + result["row"] = results.dicts()[0] await self.ds.track_event( UpdateRowEvent( diff --git a/datasette/views/table.py b/datasette/views/table.py index ea044b36..82dab613 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -558,7 +558,7 @@ class TableInsertView(BaseView): ), args, ) - result["rows"] = [dict(r) for r in fetched_rows.rows] + result["rows"] = fetched_rows.dicts() else: result["rows"] = rows # We track the number of rows requested, but do not attempt to show which were actually diff --git a/docs/internals.rst b/docs/internals.rst index 4289c815..facbc224 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -1093,6 +1093,9 @@ The ``Results`` object also has the following properties and methods: ``.rows`` - list of ``sqlite3.Row`` This property provides direct access to the list of rows returned by the database. You can access specific rows by index using ``results.rows[0]``. +``.dicts()`` - list of ``dict`` + This method returns a list of Python dictionaries, one for each row. + ``.first()`` - row or None Returns the first row in the results, or ``None`` if no rows were returned. diff --git a/tests/test_api_write.py b/tests/test_api_write.py index 9c2b9b45..04e61261 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -58,8 +58,8 @@ async def test_insert_row(ds_write, content_type): assert response.status_code == 201 assert response.json()["ok"] is True assert response.json()["rows"] == [expected_row] - rows = (await ds_write.get_database("data").execute("select * from docs")).rows - assert dict(rows[0]) == expected_row + rows = (await ds_write.get_database("data").execute("select * from docs")).dicts() + assert rows[0] == expected_row # Analytics event event = last_event(ds_write) assert event.name == "insert-rows" @@ -118,12 +118,9 @@ async def test_insert_rows(ds_write, return_rows): assert not event.ignore assert not event.replace - actual_rows = [ - dict(r) - for r in ( - await ds_write.get_database("data").execute("select * from docs") - ).rows - ] + actual_rows = ( + await ds_write.get_database("data").execute("select * from docs") + ).dicts() assert len(actual_rows) == 20 assert actual_rows == [ {"id": i + 1, "title": "Test {}".format(i), "score": 1.0, "age": 5} @@ -469,12 +466,10 @@ async def test_insert_ignore_replace( assert event.ignore == ignore assert event.replace == replace - actual_rows = [ - dict(r) - for r in ( - await ds_write.get_database("data").execute("select * from docs") - ).rows - ] + actual_rows = ( + await ds_write.get_database("data").execute("select * from docs") + ).dicts() + assert actual_rows == expected_rows assert response.json()["ok"] is True if should_return: diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 0020668a..edfc6bc7 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -40,6 +40,17 @@ async def test_results_bool(db, expected): assert bool(results) is expected +@pytest.mark.asyncio +async def test_results_dicts(db): + results = await db.execute("select pk, name from roadside_attractions") + assert results.dicts() == [ + {"pk": 1, "name": "The Mystery Spot"}, + {"pk": 2, "name": "Winchester Mystery House"}, + {"pk": 3, "name": "Burlingame Museum of PEZ Memorabilia"}, + {"pk": 4, "name": "Bigfoot Discovery Museum"}, + ] + + @pytest.mark.parametrize( "query,expected", [ From 2170269258d1de38f4e518aa3e55e6b3ed202841 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 3 Sep 2024 08:37:26 -0700 Subject: [PATCH 014/251] New .core CSS class for inputs and buttons * Initial .core input/button classes, refs #2415 * Docs for the new .core CSS class, refs #2415 * Applied .core class everywhere that needs it, closes #2415 --- datasette/static/app.css | 33 +++++++++++++++------- datasette/templates/allow_debug.html | 2 +- datasette/templates/api_explorer.html | 4 +-- datasette/templates/create_token.html | 2 +- datasette/templates/database.html | 2 +- datasette/templates/logout.html | 2 +- datasette/templates/messages_debug.html | 2 +- datasette/templates/permissions_debug.html | 2 +- datasette/templates/query.html | 2 +- datasette/templates/table.html | 4 +-- docs/custom_templates.rst | 9 ++++++ docs/writing_plugins.rst | 3 +- tests/test_permissions.py | 2 +- 13 files changed, 46 insertions(+), 23 deletions(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index 562d6adb..f975f0ad 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -528,8 +528,11 @@ label.sort_by_desc { pre#sql-query { margin-bottom: 1em; } -form input[type=text], -form input[type=search] { + +.core input[type=text], +input.core[type=text], +.core input[type=search], +input.core[type=search] { border: 1px solid #ccc; border-radius: 3px; width: 60%; @@ -540,17 +543,25 @@ form input[type=search] { } /* Stop Webkit from styling search boxes in an inconsistent way */ /* https://css-tricks.com/webkit-html5-search-inputs/ comments */ -input[type=search] { +.core input[type=search], +input.core[type=search] { -webkit-appearance: textfield; } -input[type="search"]::-webkit-search-decoration, -input[type="search"]::-webkit-search-cancel-button, -input[type="search"]::-webkit-search-results-button, -input[type="search"]::-webkit-search-results-decoration { +.core input[type="search"]::-webkit-search-decoration, +input.core[type="search"]::-webkit-search-decoration, +.core input[type="search"]::-webkit-search-cancel-button, +input.core[type="search"]::-webkit-search-cancel-button, +.core input[type="search"]::-webkit-search-results-button, +input.core[type="search"]::-webkit-search-results-button, +.core input[type="search"]::-webkit-search-results-decoration, +input.core[type="search"]::-webkit-search-results-decoration { display: none; } -form input[type=submit], form button[type=button] { +.core input[type=submit], +.core button[type=button], +input.core[type=submit], +button.core[type=button] { font-weight: 400; cursor: pointer; text-align: center; @@ -563,14 +574,16 @@ form input[type=submit], form button[type=button] { border-radius: .25rem; } -form input[type=submit] { +.core input[type=submit], +input.core[type=submit] { color: #fff; background: linear-gradient(180deg, #007bff 0%, #4E79C7 100%); border-color: #007bff; -webkit-appearance: button; } -form button[type=button] { +.core button[type=button], +button.core[type=button] { color: #007bff; background-color: #fff; border-color: #007bff; diff --git a/datasette/templates/allow_debug.html b/datasette/templates/allow_debug.html index 04181531..610417d2 100644 --- a/datasette/templates/allow_debug.html +++ b/datasette/templates/allow_debug.html @@ -35,7 +35,7 @@ p.message-warning {

Use this tool to try out different actor and allow combinations. See Defining permissions with "allow" blocks for documentation.

-
+

diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html index 109fb1e9..dc393c20 100644 --- a/datasette/templates/api_explorer.html +++ b/datasette/templates/api_explorer.html @@ -19,7 +19,7 @@

GET - +
@@ -29,7 +29,7 @@
POST - +
diff --git a/datasette/templates/create_token.html b/datasette/templates/create_token.html index 2be98d38..409fb8a9 100644 --- a/datasette/templates/create_token.html +++ b/datasette/templates/create_token.html @@ -39,7 +39,7 @@ {% endfor %} {% endif %} - +

diff --git a/datasette/templates/logout.html b/datasette/templates/logout.html index 4c4a7d11..c8fc642a 100644 --- a/datasette/templates/logout.html +++ b/datasette/templates/logout.html @@ -8,7 +8,7 @@

You are logged in as {{ display_actor(actor) }}

- +
diff --git a/datasette/templates/messages_debug.html b/datasette/templates/messages_debug.html index e0ab9a40..2940cd69 100644 --- a/datasette/templates/messages_debug.html +++ b/datasette/templates/messages_debug.html @@ -8,7 +8,7 @@

Set a message:

- +
diff --git a/datasette/templates/permissions_debug.html b/datasette/templates/permissions_debug.html index 5a5c1aa6..83891181 100644 --- a/datasette/templates/permissions_debug.html +++ b/datasette/templates/permissions_debug.html @@ -47,7 +47,7 @@ textarea {

This tool lets you simulate an actor and a permission check for that actor.

- +

diff --git a/datasette/templates/query.html b/datasette/templates/query.html index f7c8d0a3..a6e9a3aa 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -36,7 +36,7 @@ {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} - +

Custom SQL query{% if display_rows %} returning {% if truncated %}more than {% endif %}{{ "{:,}".format(display_rows|length) }} row{% if display_rows|length == 1 %}{% else %}s{% endif %}{% endif %}{% if not query_error %} ({{ show_hide_text }}) {% endif %}

diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 7246ff5d..c9e0e87b 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -48,7 +48,7 @@ {% endif %} - + {% if supports_search %}
{% endif %} @@ -152,7 +152,7 @@ object {% endif %}

- +

CSV options: diff --git a/docs/custom_templates.rst b/docs/custom_templates.rst index 534d8b33..8cc40f0f 100644 --- a/docs/custom_templates.rst +++ b/docs/custom_templates.rst @@ -83,6 +83,15 @@ database column they are representing, for example: +.. _customization_css: + +Writing custom CSS +~~~~~~~~~~~~~~~~~~ + +Custom templates need to take Datasette's default CSS into account. The pattern portfolio at ``/-/patterns`` (`example here `__) is a useful reference for understanding the available CSS classes. + +The ``core`` class is particularly useful - you can apply this directly to a ```` or ``