From e1c80efff8f4b0a53619546bb03e6dfd6cb42a32 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 16 Feb 2024 14:43:36 -0800 Subject: [PATCH 001/348] Note about activating alpha documentation versions on ReadTheDocs --- docs/contributing.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index ef022a4d..b678e637 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -254,6 +254,7 @@ Datasette releases are performed using tags. When a new release is published on * Re-point the "latest" tag on Docker Hub to the new image * Build a wheel bundle of the underlying Python source code * Push that new wheel up to PyPI: https://pypi.org/project/datasette/ +* If the release is an alpha, navigate to https://readthedocs.org/projects/datasette/versions/ and search for the tag name in the "Activate a version" filter, then mark that version as "active" to ensure it will appear on the public ReadTheDocs documentation site. To deploy new releases you will need to have push access to the main Datasette GitHub repository. From 5e0e440f2c8a0771b761b02801456e55e95e2a04 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 17 Feb 2024 20:28:15 -0800 Subject: [PATCH 002/348] database.execute_write_fn(transaction=True) parameter, closes #2277 --- datasette/database.py | 31 +++++++++++++++++++++++-------- docs/internals.rst | 15 ++++++++++----- tests/test_internals_database.py | 27 +++++++++++++++++++++++++++ 3 files changed, 60 insertions(+), 13 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 707d8f85..d34aac73 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -179,17 +179,25 @@ class Database: # Threaded mode - send to write thread return await self._send_to_write_thread(fn, isolated_connection=True) - async def execute_write_fn(self, fn, block=True): + async def execute_write_fn(self, fn, block=True, transaction=True): if self.ds.executor is None: # non-threaded mode if self._write_connection is None: self._write_connection = self.connect(write=True) self.ds._prepare_connection(self._write_connection, self.name) - return fn(self._write_connection) + if transaction: + with self._write_connection: + return fn(self._write_connection) + else: + return fn(self._write_connection) else: - return await self._send_to_write_thread(fn, block) + return await self._send_to_write_thread( + fn, block=block, transaction=transaction + ) - async def _send_to_write_thread(self, fn, block=True, isolated_connection=False): + async def _send_to_write_thread( + self, fn, block=True, isolated_connection=False, transaction=True + ): if self._write_queue is None: self._write_queue = queue.Queue() if self._write_thread is None: @@ -202,7 +210,9 @@ class Database: self._write_thread.start() task_id = uuid.uuid5(uuid.NAMESPACE_DNS, "datasette.io") reply_queue = janus.Queue() - self._write_queue.put(WriteTask(fn, task_id, reply_queue, isolated_connection)) + self._write_queue.put( + WriteTask(fn, task_id, reply_queue, isolated_connection, transaction) + ) if block: result = await reply_queue.async_q.get() if isinstance(result, Exception): @@ -244,7 +254,11 @@ class Database: pass else: try: - result = task.fn(conn) + if task.transaction: + with conn: + result = task.fn(conn) + else: + result = task.fn(conn) except Exception as e: sys.stderr.write("{}\n".format(e)) sys.stderr.flush() @@ -554,13 +568,14 @@ class Database: class WriteTask: - __slots__ = ("fn", "task_id", "reply_queue", "isolated_connection") + __slots__ = ("fn", "task_id", "reply_queue", "isolated_connection", "transaction") - def __init__(self, fn, task_id, reply_queue, isolated_connection): + def __init__(self, fn, task_id, reply_queue, isolated_connection, transaction): self.fn = fn self.task_id = task_id self.reply_queue = reply_queue self.isolated_connection = isolated_connection + self.transaction = transaction class QueryInterrupted(Exception): diff --git a/docs/internals.rst b/docs/internals.rst index bd7a70b5..6ca62423 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -1010,7 +1010,9 @@ You can pass additional SQL parameters as a tuple or dictionary. The method will block until the operation is completed, and the return value will be the return from calling ``conn.execute(...)`` using the underlying ``sqlite3`` Python library. -If you pass ``block=False`` this behaviour changes to "fire and forget" - queries will be added to the write queue and executed in a separate thread while your code can continue to do other things. The method will return a UUID representing the queued task. +If you pass ``block=False`` this behavior changes to "fire and forget" - queries will be added to the write queue and executed in a separate thread while your code can continue to do other things. The method will return a UUID representing the queued task. + +Each call to ``execute_write()`` will be executed inside a transaction. .. _database_execute_write_script: @@ -1019,6 +1021,8 @@ await db.execute_write_script(sql, block=True) Like ``execute_write()`` but can be used to send multiple SQL statements in a single string separated by semicolons, using the ``sqlite3`` `conn.executescript() `__ method. +Each call to ``execute_write_script()`` will be executed inside a transaction. + .. _database_execute_write_many: await db.execute_write_many(sql, params_seq, block=True) @@ -1033,10 +1037,12 @@ Like ``execute_write()`` but uses the ``sqlite3`` `conn.executemany() 5") - conn.commit() return conn.execute( "select count(*) from some_table" ).fetchone()[0] @@ -1069,7 +1074,7 @@ The value returned from ``await database.execute_write_fn(...)`` will be the ret If your function raises an exception that exception will be propagated up to the ``await`` line. -If you see ``OperationalError: database table is locked`` errors you should check that you remembered to explicitly call ``conn.commit()`` in your write function. +By default your function will be executed inside a transaction. You can pass ``transaction=False`` to disable this behavior, though if you do that you should be careful to manually apply transactions - ideally using the ``with conn:`` pattern, or you may see ``OperationalError: database table is locked`` errors. If you specify ``block=False`` the method becomes fire-and-forget, queueing your function to be executed and then allowing your code after the call to ``.execute_write_fn()`` to continue running while the underlying thread waits for an opportunity to run your function. A UUID representing the queued task will be returned. Any exceptions in your code will be silently swallowed. diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index dd68a6cb..57e75046 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -66,6 +66,33 @@ async def test_execute_fn(db): assert 2 == await db.execute_fn(get_1_plus_1) +@pytest.mark.asyncio +async def test_execute_fn_transaction_false(): + datasette = Datasette(memory=True) + db = datasette.add_memory_database("test_execute_fn_transaction_false") + + def run(conn): + try: + with conn: + conn.execute("create table foo (id integer primary key)") + conn.execute("insert into foo (id) values (44)") + # Table should exist + assert ( + conn.execute( + 'select count(*) from sqlite_master where name = "foo"' + ).fetchone()[0] + == 1 + ) + assert conn.execute("select id from foo").fetchall()[0][0] == 44 + raise ValueError("Cancel commit") + except ValueError: + pass + # Row should NOT exist + assert conn.execute("select count(*) from foo").fetchone()[0] == 0 + + await db.execute_write_fn(run, transaction=False) + + @pytest.mark.parametrize( "tables,exists", ( From 10f9ba1a0050724ba47a089861606bef58a4087f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 17 Feb 2024 20:51:19 -0800 Subject: [PATCH 003/348] Take advantage of execute_write_fn(transaction=True) A bunch of places no longer need to do manual transaction handling thanks to this change. Refs #2277 --- datasette/database.py | 9 +++------ datasette/utils/internal_db.py | 27 +++++++++++++-------------- tests/test_internals_database.py | 10 ++++------ 3 files changed, 20 insertions(+), 26 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index d34aac73..4e590d3a 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -123,8 +123,7 @@ class Database: async def execute_write(self, sql, params=None, block=True): def _inner(conn): - with conn: - return conn.execute(sql, params or []) + return conn.execute(sql, params or []) with trace("sql", database=self.name, sql=sql.strip(), params=params): results = await self.execute_write_fn(_inner, block=block) @@ -132,8 +131,7 @@ class Database: async def execute_write_script(self, sql, block=True): def _inner(conn): - with conn: - return conn.executescript(sql) + return conn.executescript(sql) with trace("sql", database=self.name, sql=sql.strip(), executescript=True): results = await self.execute_write_fn(_inner, block=block) @@ -149,8 +147,7 @@ class Database: count += 1 yield param - with conn: - return conn.executemany(sql, count_params(params_seq)), count + return conn.executemany(sql, count_params(params_seq)), count with trace( "sql", database=self.name, sql=sql.strip(), executemany=True diff --git a/datasette/utils/internal_db.py b/datasette/utils/internal_db.py index dd0d3a9d..dbfcceb4 100644 --- a/datasette/utils/internal_db.py +++ b/datasette/utils/internal_db.py @@ -69,20 +69,19 @@ async def populate_schema_tables(internal_db, db): database_name = db.name def delete_everything(conn): - with conn: - conn.execute( - "DELETE FROM catalog_tables WHERE database_name = ?", [database_name] - ) - conn.execute( - "DELETE FROM catalog_columns WHERE database_name = ?", [database_name] - ) - conn.execute( - "DELETE FROM catalog_foreign_keys WHERE database_name = ?", - [database_name], - ) - conn.execute( - "DELETE FROM catalog_indexes WHERE database_name = ?", [database_name] - ) + conn.execute( + "DELETE FROM catalog_tables WHERE database_name = ?", [database_name] + ) + conn.execute( + "DELETE FROM catalog_columns WHERE database_name = ?", [database_name] + ) + conn.execute( + "DELETE FROM catalog_foreign_keys WHERE database_name = ?", + [database_name], + ) + conn.execute( + "DELETE FROM catalog_indexes WHERE database_name = ?", [database_name] + ) await internal_db.execute_write_fn(delete_everything) diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 57e75046..1c155cf3 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -501,9 +501,8 @@ async def test_execute_write_has_correctly_prepared_connection(db): @pytest.mark.asyncio async def test_execute_write_fn_block_false(db): def write_fn(conn): - with conn: - conn.execute("delete from roadside_attractions where pk = 1;") - row = conn.execute("select count(*) from roadside_attractions").fetchone() + conn.execute("delete from roadside_attractions where pk = 1;") + row = conn.execute("select count(*) from roadside_attractions").fetchone() return row[0] task_id = await db.execute_write_fn(write_fn, block=False) @@ -513,9 +512,8 @@ async def test_execute_write_fn_block_false(db): @pytest.mark.asyncio async def test_execute_write_fn_block_true(db): def write_fn(conn): - with conn: - conn.execute("delete from roadside_attractions where pk = 1;") - row = conn.execute("select count(*) from roadside_attractions").fetchone() + conn.execute("delete from roadside_attractions where pk = 1;") + row = conn.execute("select count(*) from roadside_attractions").fetchone() return row[0] new_count = await db.execute_write_fn(write_fn) From a4fa1ef3bd6a6117118b5cdd64aca2308c21604b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 17 Feb 2024 20:56:15 -0800 Subject: [PATCH 004/348] Release 1.0a10 Refs #2277 --- datasette/version.py | 2 +- docs/changelog.rst | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index f5e07ac8..809c434f 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "1.0a9" +__version__ = "1.0a10" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index e567f422..92f198af 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,17 @@ Changelog ========= +.. _v1_0_a10: + +1.0a10 (2024-02-17) +------------------- + +The only changes in this alpha correspond to the way Datasette handles database transactions. (:issue:`2277`) + +- The :ref:`database.execute_write_fn() ` method has a new ``transaction=True`` parameter. This defaults to ``True`` which means all functions executed using this method are now automatically wrapped in a transaction - previously the functions needed to roll transaction handling on their own, and many did not. +- Pass ``transaction=False`` to ``execute_write_fn()`` if you want to manually handle transactions in your function. +- Several internal Datasette features, including parts of the :ref:`JSON write API `, had been failing to wrap their operations in a transaction. This has been fixed by the new ``transaction=True`` default. + .. _v1_0_a9: 1.0a9 (2024-02-16) From 81629dbeffb5cee9086bc956ce3a9ab7d051f4d1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 17 Feb 2024 21:03:41 -0800 Subject: [PATCH 005/348] Upgrade GitHub Actions, including PyPI publishing --- .github/workflows/publish.yml | 60 ++++++++++++++--------------------- .github/workflows/test.yml | 13 +++----- 2 files changed, 27 insertions(+), 46 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 64a03a77..55fc0eb9 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -12,20 +12,15 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v3 - name: Configure pip caching - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} - restore-keys: | - ${{ runner.os }}-pip- + cache: pip + cache-dependency-path: setup.py - name: Install dependencies run: | pip install -e '.[test]' @@ -36,47 +31,38 @@ jobs: deploy: runs-on: ubuntu-latest needs: [test] + environment: release + permissions: + id-token: write steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: '3.11' - - uses: actions/cache@v3 - name: Configure pip caching - with: - path: ~/.cache/pip - key: ${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }} - restore-keys: | - ${{ runner.os }}-publish-pip- + python-version: '3.12' + cache: pip + cache-dependency-path: setup.py - name: Install dependencies run: | - pip install setuptools wheel twine - - name: Publish - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} + pip install setuptools wheel build + - name: Build run: | - python setup.py sdist bdist_wheel - twine upload dist/* + python -m build + - name: Publish + uses: pypa/gh-action-pypi-publish@release/v1 deploy_static_docs: runs-on: ubuntu-latest needs: [deploy] if: "!github.event.release.prerelease" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: '3.9' - - uses: actions/cache@v2 - name: Configure pip caching - with: - path: ~/.cache/pip - key: ${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }} - restore-keys: | - ${{ runner.os }}-publish-pip- + cache: pip + cache-dependency-path: setup.py - name: Install dependencies run: | python -m pip install -e .[docs] @@ -105,7 +91,7 @@ jobs: needs: [deploy] if: "!github.event.release.prerelease" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Build and push to Docker Hub env: DOCKER_USER: ${{ secrets.DOCKER_USER }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 656b0b1c..3ac8756d 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -12,19 +12,14 @@ jobs: matrix: python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - - uses: actions/cache@v3 - name: Configure pip caching - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} - restore-keys: | - ${{ runner.os }}-pip- + cache: pip + cache-dependency-path: setup.py - name: Build extension for --load-extension test run: |- (cd tests && gcc ext.c -fPIC -shared -o ext.so) From 3856a8cb244f1338d2c4bceb76a510022d88ade5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 19 Feb 2024 12:51:14 -0800 Subject: [PATCH 006/348] Consistent Permission denied:, refs #2279 --- datasette/views/database.py | 6 +++--- tests/test_api_write.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index 2a8b40cc..56fc6f8c 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -860,7 +860,7 @@ class TableCreateView(BaseView): if not await self.ds.permission_allowed( request.actor, "update-row", resource=database_name ): - return _error(["Permission denied - need update-row"], 403) + return _error(["Permission denied: need update-row"], 403) table_name = data.get("table") if not table_name: @@ -884,7 +884,7 @@ class TableCreateView(BaseView): if not await self.ds.permission_allowed( request.actor, "insert-row", resource=database_name ): - return _error(["Permission denied - need insert-row"], 403) + return _error(["Permission denied: need insert-row"], 403) alter = False if rows or row: @@ -897,7 +897,7 @@ class TableCreateView(BaseView): if not await self.ds.permission_allowed( request.actor, "alter-table", resource=database_name ): - return _error(["Permission denied - need alter-table"], 403) + return _error(["Permission denied: need alter-table"], 403) alter = True if columns: diff --git a/tests/test_api_write.py b/tests/test_api_write.py index 0eb915ba..634f5ee9 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -1316,7 +1316,7 @@ async def test_create_table( ["create-table"], {"table": "t", "rows": [{"name": "c"}]}, 403, - ["Permission denied - need insert-row"], + ["Permission denied: need insert-row"], ), # This should work: ( @@ -1330,7 +1330,7 @@ async def test_create_table( ["create-table", "insert-row"], {"table": "t", "rows": [{"id": 1}], "pk": "id", "replace": True}, 403, - ["Permission denied - need update-row"], + ["Permission denied: need update-row"], ), ), ) @@ -1567,7 +1567,7 @@ async def test_create_using_alter_against_existing_table( assert response2.status_code == 403 assert response2.json() == { "ok": False, - "errors": ["Permission denied - need alter-table"], + "errors": ["Permission denied: need alter-table"], } else: assert response2.status_code == 201 From b36a2d8f4b566a3a4902cdaa7a549241e0e8c881 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 19 Feb 2024 12:55:51 -0800 Subject: [PATCH 007/348] Require update-row to use insert replace, closes #2279 --- datasette/views/table.py | 5 +++++ docs/json_api.rst | 4 ++-- tests/test_api_write.py | 8 ++++++++ 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 1c187692..6d0d9885 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -485,6 +485,11 @@ class TableInsertView(BaseView): if upsert and (ignore or replace): return _error(["Upsert does not support ignore or replace"], 400) + if replace and not await self.ds.permission_allowed( + request.actor, "update-row", resource=(database_name, table_name) + ): + return _error(['Permission denied: need update-row to use "replace"'], 403) + initial_schema = None if alter: # Must have alter-table permission diff --git a/docs/json_api.rst b/docs/json_api.rst index c401d97e..366f74b2 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -616,7 +616,7 @@ Pass ``"ignore": true`` to ignore these errors and insert the other rows: "ignore": true } -Or you can pass ``"replace": true`` to replace any rows with conflicting primary keys with the new values. +Or you can pass ``"replace": true`` to replace any rows with conflicting primary keys with the new values. This requires the :ref:`permissions_update_row` permission. Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`permissions_alter_table` permission. @@ -854,7 +854,7 @@ The JSON here describes the table that will be created: * ``pks`` can be used instead of ``pk`` to create a compound primary key. It should be a JSON list of column names to use in that primary key. * ``ignore`` can be set to ``true`` to ignore existing rows by primary key if the table already exists. -* ``replace`` can be set to ``true`` to replace existing rows by primary key if the table already exists. +* ``replace`` can be set to ``true`` to replace existing rows by primary key if the table already exists. This requires the :ref:`permissions_update_row` permission. * ``alter`` can be set to ``true`` if you want to automatically add any missing columns to the table. This requires the :ref:`permissions_alter_table` permission. If the table is successfully created this will return a ``201`` status code and the following response: diff --git a/tests/test_api_write.py b/tests/test_api_write.py index 634f5ee9..6a7ddeb6 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -221,6 +221,14 @@ async def test_insert_rows(ds_write, return_rows): 400, ['Cannot use "ignore" and "replace" at the same time'], ), + ( + # Replace is not allowed if you don't have update-row + "/data/docs/-/insert", + {"rows": [{"title": "Test"}], "replace": True}, + "insert-but-not-update", + 403, + ['Permission denied: need update-row to use "replace"'], + ), ( "/data/docs/-/insert", {"rows": [{"title": "Test"}], "invalid_param": True}, From 392ca2e24cc93a3918d07718f40524857d626d14 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 19 Feb 2024 13:40:48 -0800 Subject: [PATCH 008/348] Improvements to table column cog menu display, closes #2263 - Repositions if menu would cause a horizontal scrollbar - Arrow tip on menu now attempts to align with cog icon on column --- datasette/static/table.js | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/datasette/static/table.js b/datasette/static/table.js index 778457c5..0c54a472 100644 --- a/datasette/static/table.js +++ b/datasette/static/table.js @@ -217,6 +217,17 @@ const initDatasetteTable = function (manager) { menuList.appendChild(menuItem); }); + // Measure width of menu and adjust position if too far right + const menuWidth = menu.offsetWidth; + const windowWidth = window.innerWidth; + if (menuLeft + menuWidth > windowWidth) { + menu.style.left = windowWidth - menuWidth - 20 + "px"; + } + // Align menu .hook arrow with the column cog icon + const hook = menu.querySelector('.hook'); + const icon = th.querySelector('.dropdown-menu-icon'); + const iconRect = icon.getBoundingClientRect(); + hook.style.left = (iconRect.left - menuLeft + 1) + 'px'; } var svg = document.createElement("div"); From 27409a78929b4baa017cce2cc0ca636603ed6d37 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 19 Feb 2024 14:01:55 -0800 Subject: [PATCH 009/348] Fix for hook position in wide column names, refs #2263 --- datasette/static/table.js | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/datasette/static/table.js b/datasette/static/table.js index 0c54a472..4f81b2e5 100644 --- a/datasette/static/table.js +++ b/datasette/static/table.js @@ -227,7 +227,15 @@ const initDatasetteTable = function (manager) { const hook = menu.querySelector('.hook'); const icon = th.querySelector('.dropdown-menu-icon'); const iconRect = icon.getBoundingClientRect(); - hook.style.left = (iconRect.left - menuLeft + 1) + 'px'; + const hookLeft = (iconRect.left - menuLeft + 1) + 'px'; + hook.style.left = hookLeft; + // Move the whole menu right if the hook is too far right + const menuRect = menu.getBoundingClientRect(); + if (iconRect.right > menuRect.right) { + menu.style.left = (iconRect.right - menuWidth) + 'px'; + // And move hook tip as well + hook.style.left = (menuWidth - 13) + 'px'; + } } var svg = document.createElement("div"); From 26300738e3c6e7ad515bd513063f57249a05000a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 19 Feb 2024 14:17:37 -0800 Subject: [PATCH 010/348] Fixes for permissions debug page, closes #2278 --- datasette/templates/permissions_debug.html | 10 +++++----- datasette/views/special.py | 17 +++++++++-------- tests/test_permissions.py | 8 ++++++++ 3 files changed, 22 insertions(+), 13 deletions(-) diff --git a/datasette/templates/permissions_debug.html b/datasette/templates/permissions_debug.html index 36a12acc..5a5c1aa6 100644 --- a/datasette/templates/permissions_debug.html +++ b/datasette/templates/permissions_debug.html @@ -57,7 +57,7 @@ textarea {

@@ -71,19 +71,19 @@ textarea { +{% endif %} + {% endblock %} diff --git a/datasette/url_builder.py b/datasette/url_builder.py index 9c6bbde0..16b3d42b 100644 --- a/datasette/url_builder.py +++ b/datasette/url_builder.py @@ -31,6 +31,12 @@ class Urls: db = self.ds.get_database(database) return self.path(tilde_encode(db.route), format=format) + def database_query(self, database, sql, format=None): + path = f"{self.database(database)}/-/query?" + urllib.parse.urlencode( + {"sql": sql} + ) + return self.path(path, format=format) + def table(self, database, table, format=None): path = f"{self.database(database)}/{tilde_encode(table)}" if format is not None: diff --git a/datasette/views/table.py b/datasette/views/table.py index d71efeb0..ea044b36 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -929,6 +929,7 @@ async def table_view_traced(datasette, request): database=resolved.db.name, table=resolved.table, ), + count_limit=resolved.db.count_limit, ), request=request, view_name="table", @@ -1280,6 +1281,9 @@ async def table_view_data( if extra_extras: extras.update(extra_extras) + async def extra_count_sql(): + return count_sql + async def extra_count(): "Total count of rows matching these filters" # Calculate the total count for this query @@ -1299,8 +1303,11 @@ async def table_view_data( # Otherwise run a select count(*) ... if count_sql and count is None and not nocount: + count_sql_limited = ( + f"select count(*) from (select * {from_sql} limit 10001)" + ) try: - count_rows = list(await db.execute(count_sql, from_sql_params)) + count_rows = list(await db.execute(count_sql_limited, from_sql_params)) count = count_rows[0][0] except QueryInterrupted: pass @@ -1615,6 +1622,7 @@ async def table_view_data( "facet_results", "facets_timed_out", "count", + "count_sql", "human_description_en", "next_url", "metadata", @@ -1647,6 +1655,7 @@ async def table_view_data( registry = Registry( extra_count, + extra_count_sql, extra_facet_results, extra_facets_timed_out, extra_suggested_facets, From dc288056b81a3635bdb02a6d0121887db2720e5e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 21 Aug 2024 19:56:02 -0700 Subject: [PATCH 109/348] Better handling of errors for count all button, refs #2408 --- datasette/templates/table.html | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 187f0143..7246ff5d 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -42,7 +42,7 @@ {% if count or human_description_en %}

{% if count == count_limit + 1 %}>{{ "{:,}".format(count_limit) }} rows - {% if allow_execute_sql and query.sql %} count all rows{% endif %} + {% if allow_execute_sql and query.sql %} count all{% endif %} {% elif count or count == 0 %}{{ "{:,}".format(count) }} row{% if count == 1 %}{% else %}s{% endif %}{% endif %} {% if human_description_en %}{{ human_description_en }}{% endif %}

@@ -180,7 +180,7 @@ document.addEventListener('DOMContentLoaded', function() { const countLink = document.querySelector('a.count-sql'); if (countLink) { - countLink.addEventListener('click', function(ev) { + countLink.addEventListener('click', async function(ev) { ev.preventDefault(); // Replace countLink with span with same style attribute const span = document.createElement('span'); @@ -189,14 +189,23 @@ document.addEventListener('DOMContentLoaded', function() { countLink.replaceWith(span); countLink.setAttribute('disabled', 'disabled'); let url = countLink.href.replace(/(\?|$)/, '.json$1'); - fetch(url) - .then(response => response.json()) - .then(data => { - const count = data['rows'][0]['count(*)']; - const formattedCount = count.toLocaleString(); - span.closest('h3').textContent = formattedCount + ' rows'; - }) - .catch(error => countLink.textContent = 'error'); + try { + const response = await fetch(url); + console.log({response}); + const data = await response.json(); + console.log({data}); + if (!response.ok) { + console.log('throw error'); + throw new Error(data.title || data.error); + } + const count = data['rows'][0]['count(*)']; + const formattedCount = count.toLocaleString(); + span.closest('h3').textContent = formattedCount + ' rows'; + } catch (error) { + console.log('Update', span, 'with error message', error); + span.textContent = error.message; + span.style.color = 'red'; + } }); } }); From 92c4d41ca605e0837a2711ee52fde9cf1eea74d0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 1 Sep 2024 17:20:41 -0700 Subject: [PATCH 110/348] results.dicts() method, closes #2414 --- datasette/database.py | 3 +++ datasette/views/row.py | 3 +-- datasette/views/table.py | 2 +- docs/internals.rst | 3 +++ tests/test_api_write.py | 23 +++++++++-------------- tests/test_internals_database.py | 11 +++++++++++ 6 files changed, 28 insertions(+), 17 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index da0ab1de..a2e899bc 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -677,6 +677,9 @@ class Results: else: raise MultipleValues + def dicts(self): + return [dict(row) for row in self.rows] + def __iter__(self): return iter(self.rows) diff --git a/datasette/views/row.py b/datasette/views/row.py index d802994e..f374fd94 100644 --- a/datasette/views/row.py +++ b/datasette/views/row.py @@ -277,8 +277,7 @@ class RowUpdateView(BaseView): results = await resolved.db.execute( resolved.sql, resolved.params, truncate=True ) - rows = list(results.rows) - result["row"] = dict(rows[0]) + result["row"] = results.dicts()[0] await self.ds.track_event( UpdateRowEvent( diff --git a/datasette/views/table.py b/datasette/views/table.py index ea044b36..82dab613 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -558,7 +558,7 @@ class TableInsertView(BaseView): ), args, ) - result["rows"] = [dict(r) for r in fetched_rows.rows] + result["rows"] = fetched_rows.dicts() else: result["rows"] = rows # We track the number of rows requested, but do not attempt to show which were actually diff --git a/docs/internals.rst b/docs/internals.rst index 4289c815..facbc224 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -1093,6 +1093,9 @@ The ``Results`` object also has the following properties and methods: ``.rows`` - list of ``sqlite3.Row`` This property provides direct access to the list of rows returned by the database. You can access specific rows by index using ``results.rows[0]``. +``.dicts()`` - list of ``dict`` + This method returns a list of Python dictionaries, one for each row. + ``.first()`` - row or None Returns the first row in the results, or ``None`` if no rows were returned. diff --git a/tests/test_api_write.py b/tests/test_api_write.py index 9c2b9b45..04e61261 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -58,8 +58,8 @@ async def test_insert_row(ds_write, content_type): assert response.status_code == 201 assert response.json()["ok"] is True assert response.json()["rows"] == [expected_row] - rows = (await ds_write.get_database("data").execute("select * from docs")).rows - assert dict(rows[0]) == expected_row + rows = (await ds_write.get_database("data").execute("select * from docs")).dicts() + assert rows[0] == expected_row # Analytics event event = last_event(ds_write) assert event.name == "insert-rows" @@ -118,12 +118,9 @@ async def test_insert_rows(ds_write, return_rows): assert not event.ignore assert not event.replace - actual_rows = [ - dict(r) - for r in ( - await ds_write.get_database("data").execute("select * from docs") - ).rows - ] + actual_rows = ( + await ds_write.get_database("data").execute("select * from docs") + ).dicts() assert len(actual_rows) == 20 assert actual_rows == [ {"id": i + 1, "title": "Test {}".format(i), "score": 1.0, "age": 5} @@ -469,12 +466,10 @@ async def test_insert_ignore_replace( assert event.ignore == ignore assert event.replace == replace - actual_rows = [ - dict(r) - for r in ( - await ds_write.get_database("data").execute("select * from docs") - ).rows - ] + actual_rows = ( + await ds_write.get_database("data").execute("select * from docs") + ).dicts() + assert actual_rows == expected_rows assert response.json()["ok"] is True if should_return: diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 0020668a..edfc6bc7 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -40,6 +40,17 @@ async def test_results_bool(db, expected): assert bool(results) is expected +@pytest.mark.asyncio +async def test_results_dicts(db): + results = await db.execute("select pk, name from roadside_attractions") + assert results.dicts() == [ + {"pk": 1, "name": "The Mystery Spot"}, + {"pk": 2, "name": "Winchester Mystery House"}, + {"pk": 3, "name": "Burlingame Museum of PEZ Memorabilia"}, + {"pk": 4, "name": "Bigfoot Discovery Museum"}, + ] + + @pytest.mark.parametrize( "query,expected", [ From 2170269258d1de38f4e518aa3e55e6b3ed202841 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 3 Sep 2024 08:37:26 -0700 Subject: [PATCH 111/348] New .core CSS class for inputs and buttons * Initial .core input/button classes, refs #2415 * Docs for the new .core CSS class, refs #2415 * Applied .core class everywhere that needs it, closes #2415 --- datasette/static/app.css | 33 +++++++++++++++------- datasette/templates/allow_debug.html | 2 +- datasette/templates/api_explorer.html | 4 +-- datasette/templates/create_token.html | 2 +- datasette/templates/database.html | 2 +- datasette/templates/logout.html | 2 +- datasette/templates/messages_debug.html | 2 +- datasette/templates/permissions_debug.html | 2 +- datasette/templates/query.html | 2 +- datasette/templates/table.html | 4 +-- docs/custom_templates.rst | 9 ++++++ docs/writing_plugins.rst | 3 +- tests/test_permissions.py | 2 +- 13 files changed, 46 insertions(+), 23 deletions(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index 562d6adb..f975f0ad 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -528,8 +528,11 @@ label.sort_by_desc { pre#sql-query { margin-bottom: 1em; } -form input[type=text], -form input[type=search] { + +.core input[type=text], +input.core[type=text], +.core input[type=search], +input.core[type=search] { border: 1px solid #ccc; border-radius: 3px; width: 60%; @@ -540,17 +543,25 @@ form input[type=search] { } /* Stop Webkit from styling search boxes in an inconsistent way */ /* https://css-tricks.com/webkit-html5-search-inputs/ comments */ -input[type=search] { +.core input[type=search], +input.core[type=search] { -webkit-appearance: textfield; } -input[type="search"]::-webkit-search-decoration, -input[type="search"]::-webkit-search-cancel-button, -input[type="search"]::-webkit-search-results-button, -input[type="search"]::-webkit-search-results-decoration { +.core input[type="search"]::-webkit-search-decoration, +input.core[type="search"]::-webkit-search-decoration, +.core input[type="search"]::-webkit-search-cancel-button, +input.core[type="search"]::-webkit-search-cancel-button, +.core input[type="search"]::-webkit-search-results-button, +input.core[type="search"]::-webkit-search-results-button, +.core input[type="search"]::-webkit-search-results-decoration, +input.core[type="search"]::-webkit-search-results-decoration { display: none; } -form input[type=submit], form button[type=button] { +.core input[type=submit], +.core button[type=button], +input.core[type=submit], +button.core[type=button] { font-weight: 400; cursor: pointer; text-align: center; @@ -563,14 +574,16 @@ form input[type=submit], form button[type=button] { border-radius: .25rem; } -form input[type=submit] { +.core input[type=submit], +input.core[type=submit] { color: #fff; background: linear-gradient(180deg, #007bff 0%, #4E79C7 100%); border-color: #007bff; -webkit-appearance: button; } -form button[type=button] { +.core button[type=button], +button.core[type=button] { color: #007bff; background-color: #fff; border-color: #007bff; diff --git a/datasette/templates/allow_debug.html b/datasette/templates/allow_debug.html index 04181531..610417d2 100644 --- a/datasette/templates/allow_debug.html +++ b/datasette/templates/allow_debug.html @@ -35,7 +35,7 @@ p.message-warning {

Use this tool to try out different actor and allow combinations. See Defining permissions with "allow" blocks for documentation.

-
+

diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html index 109fb1e9..dc393c20 100644 --- a/datasette/templates/api_explorer.html +++ b/datasette/templates/api_explorer.html @@ -19,7 +19,7 @@

GET - +
@@ -29,7 +29,7 @@
POST - +
diff --git a/datasette/templates/create_token.html b/datasette/templates/create_token.html index 2be98d38..409fb8a9 100644 --- a/datasette/templates/create_token.html +++ b/datasette/templates/create_token.html @@ -39,7 +39,7 @@ {% endfor %} {% endif %} - +

diff --git a/datasette/templates/logout.html b/datasette/templates/logout.html index 4c4a7d11..c8fc642a 100644 --- a/datasette/templates/logout.html +++ b/datasette/templates/logout.html @@ -8,7 +8,7 @@

You are logged in as {{ display_actor(actor) }}

- +
diff --git a/datasette/templates/messages_debug.html b/datasette/templates/messages_debug.html index e0ab9a40..2940cd69 100644 --- a/datasette/templates/messages_debug.html +++ b/datasette/templates/messages_debug.html @@ -8,7 +8,7 @@

Set a message:

- +
diff --git a/datasette/templates/permissions_debug.html b/datasette/templates/permissions_debug.html index 5a5c1aa6..83891181 100644 --- a/datasette/templates/permissions_debug.html +++ b/datasette/templates/permissions_debug.html @@ -47,7 +47,7 @@ textarea {

This tool lets you simulate an actor and a permission check for that actor.

- +

diff --git a/datasette/templates/query.html b/datasette/templates/query.html index f7c8d0a3..a6e9a3aa 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -36,7 +36,7 @@ {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} - +

Custom SQL query{% if display_rows %} returning {% if truncated %}more than {% endif %}{{ "{:,}".format(display_rows|length) }} row{% if display_rows|length == 1 %}{% else %}s{% endif %}{% endif %}{% if not query_error %} ({{ show_hide_text }}) {% endif %}

diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 7246ff5d..c9e0e87b 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -48,7 +48,7 @@ {% endif %} - + {% if supports_search %}
{% endif %} @@ -152,7 +152,7 @@ object {% endif %}

- +

CSV options: diff --git a/docs/custom_templates.rst b/docs/custom_templates.rst index 534d8b33..8cc40f0f 100644 --- a/docs/custom_templates.rst +++ b/docs/custom_templates.rst @@ -83,6 +83,15 @@ database column they are representing, for example: +.. _customization_css: + +Writing custom CSS +~~~~~~~~~~~~~~~~~~ + +Custom templates need to take Datasette's default CSS into account. The pattern portfolio at ``/-/patterns`` (`example here `__) is a useful reference for understanding the available CSS classes. + +The ``core`` class is particularly useful - you can apply this directly to a ```` or ``