From 86e4b3d33685a31668bd4ecc56e403e419940fe6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 Apr 2022 08:10:55 -0700 Subject: [PATCH 001/891] 1-0-alpha branch with preview of TableView ?_extra=, refs #1729 The tests don't pass yet but this will let me deploy a preview using the mechanism from ##1442 --- .github/workflows/deploy-latest.yml | 3 ++- datasette/renderer.py | 35 +++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 1 deletion(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index a61f6629..3ee4cfc7 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -3,7 +3,8 @@ name: Deploy latest.datasette.io on: push: branches: - - main + - main + - 1-0-alpha jobs: deploy: diff --git a/datasette/renderer.py b/datasette/renderer.py index 45089498..8e225608 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -43,6 +43,41 @@ def json_renderer(args, data, view_name): if "rows" in data and not value_as_boolean(args.get("_json_infinity", "0")): data["rows"] = [remove_infinites(row) for row in data["rows"]] + # Start building the default JSON here + columns = data["columns"] + next_url = data.get("next_url") + output = { + "rows": [dict(zip(columns, row)) for row in data["rows"]], + "next": data["next"], + "next_url": next_url, + } + + extras = set(args.getlist("_extra")) + + extras_map = { + # _extra= : data[field] + "count": "filtered_table_rows_count", + "facet_results": "facet_results", + "suggested_facets": "suggested_facets", + "columns": "columns", + "primary_keys": "primary_keys", + "query_ms": "query_ms", + "query": "query", + } + for extra_key, data_key in extras_map.items(): + if extra_key in extras: + output[extra_key] = data[data_key] + + body = json.dumps(output, cls=CustomJSONEncoder) + content_type = "application/json; charset=utf-8" + headers = {} + if next_url: + headers["link"] = f'<{next_url}>; rel="next"' + return Response( + body, status=status_code, headers=headers, content_type=content_type + ) + + # Deal with the _shape option shape = args.get("_shape", "arrays") # if there's an error, ignore the shape entirely From 4afc1afc721ac0d14f58b0f8339c1bf431d5313c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 2 May 2022 12:13:11 -0700 Subject: [PATCH 002/891] Depend on click-default-group-wheel>=1.2.2 Refs #1733 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 7f0562fd..fcb43aa1 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,7 @@ setup( install_requires=[ "asgiref>=3.2.10,<3.6.0", "click>=7.1.1,<8.2.0", - "click-default-group~=1.2.2", + "click-default-group-wheel>=1.2.2", "Jinja2>=2.10.3,<3.1.0", "hupper~=1.9", "httpx>=0.20", From 7e03394734307a5761e4c98d902b6a8cab188562 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 2 May 2022 12:20:14 -0700 Subject: [PATCH 003/891] Optional uvicorn import for Pyodide, refs #1733 --- datasette/app.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index d269372c..a5330458 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -26,7 +26,6 @@ from itsdangerous import URLSafeSerializer from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader from jinja2.environment import Template from jinja2.exceptions import TemplateNotFound -import uvicorn from .views.base import DatasetteError, ureg from .views.database import DatabaseDownload, DatabaseView @@ -806,6 +805,15 @@ class Datasette: datasette_version = {"version": __version__} if self.version_note: datasette_version["note"] = self.version_note + + try: + # Optional import to avoid breaking Pyodide + # https://github.com/simonw/datasette/issues/1733#issuecomment-1115268245 + import uvicorn + + uvicorn_version = uvicorn.__version__ + except ImportError: + uvicorn_version = None info = { "python": { "version": ".".join(map(str, sys.version_info[:3])), @@ -813,7 +821,7 @@ class Datasette: }, "datasette": datasette_version, "asgi": "3.0", - "uvicorn": uvicorn.__version__, + "uvicorn": uvicorn_version, "sqlite": { "version": sqlite_version, "fts_versions": fts_versions, From 687907aa2b1bde4de6ae7155b0e2a949ca015ca9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 2 May 2022 12:39:06 -0700 Subject: [PATCH 004/891] Remove python-baseconv dependency, refs #1733, closes #1734 --- datasette/actor_auth_cookie.py | 2 +- datasette/utils/baseconv.py | 59 ++++++++++++++++++++++++++++++++++ docs/authentication.rst | 4 +-- setup.py | 1 - tests/test_auth.py | 2 +- 5 files changed, 63 insertions(+), 5 deletions(-) create mode 100644 datasette/utils/baseconv.py diff --git a/datasette/actor_auth_cookie.py b/datasette/actor_auth_cookie.py index 15ecd331..368213af 100644 --- a/datasette/actor_auth_cookie.py +++ b/datasette/actor_auth_cookie.py @@ -1,6 +1,6 @@ from datasette import hookimpl from itsdangerous import BadSignature -import baseconv +from datasette.utils import baseconv import time diff --git a/datasette/utils/baseconv.py b/datasette/utils/baseconv.py new file mode 100644 index 00000000..27e4fb00 --- /dev/null +++ b/datasette/utils/baseconv.py @@ -0,0 +1,59 @@ +""" +Convert numbers from base 10 integers to base X strings and back again. + +Sample usage: + +>>> base20 = BaseConverter('0123456789abcdefghij') +>>> base20.from_decimal(1234) +'31e' +>>> base20.to_decimal('31e') +1234 + +Originally shared here: https://www.djangosnippets.org/snippets/1431/ +""" + + +class BaseConverter(object): + decimal_digits = "0123456789" + + def __init__(self, digits): + self.digits = digits + + def from_decimal(self, i): + return self.convert(i, self.decimal_digits, self.digits) + + def to_decimal(self, s): + return int(self.convert(s, self.digits, self.decimal_digits)) + + def convert(number, fromdigits, todigits): + # Based on http://code.activestate.com/recipes/111286/ + if str(number)[0] == "-": + number = str(number)[1:] + neg = 1 + else: + neg = 0 + + # make an integer out of the number + x = 0 + for digit in str(number): + x = x * len(fromdigits) + fromdigits.index(digit) + + # create the result in base 'len(todigits)' + if x == 0: + res = todigits[0] + else: + res = "" + while x > 0: + digit = x % len(todigits) + res = todigits[digit] + res + x = int(x / len(todigits)) + if neg: + res = "-" + res + return res + + convert = staticmethod(convert) + + +bin = BaseConverter("01") +hexconv = BaseConverter("0123456789ABCDEF") +base62 = BaseConverter("ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyz") diff --git a/docs/authentication.rst b/docs/authentication.rst index 24960733..685dab15 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -401,12 +401,12 @@ Including an expiry time ``ds_actor`` cookies can optionally include a signed expiry timestamp, after which the cookies will no longer be valid. Authentication plugins may chose to use this mechanism to limit the lifetime of the cookie. For example, if a plugin implements single-sign-on against another source it may decide to set short-lived cookies so that if the user is removed from the SSO system their existing Datasette cookies will stop working shortly afterwards. -To include an expiry, add a ``"e"`` key to the cookie value containing a `base62-encoded integer `__ representing the timestamp when the cookie should expire. For example, here's how to set a cookie that expires after 24 hours: +To include an expiry, add a ``"e"`` key to the cookie value containing a base62-encoded integer representing the timestamp when the cookie should expire. For example, here's how to set a cookie that expires after 24 hours: .. code-block:: python import time - import baseconv + from datasette.utils import baseconv expires_at = int(time.time()) + (24 * 60 * 60) diff --git a/setup.py b/setup.py index fcb43aa1..ca449f02 100644 --- a/setup.py +++ b/setup.py @@ -57,7 +57,6 @@ setup( "PyYAML>=5.3,<7.0", "mergedeep>=1.1.1,<1.4.0", "itsdangerous>=1.1,<3.0", - "python-baseconv==1.2.2", ], entry_points=""" [console_scripts] diff --git a/tests/test_auth.py b/tests/test_auth.py index 974f89ea..4ef35a76 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,5 +1,5 @@ from .fixtures import app_client -import baseconv +from datasette.utils import baseconv import pytest import time From a29c1277896b6a7905ef5441c42a37bc15f67599 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 2 May 2022 12:44:09 -0700 Subject: [PATCH 005/891] Rename to_decimal/from_decimal to decode/encode, refs #1734 --- datasette/utils/baseconv.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/datasette/utils/baseconv.py b/datasette/utils/baseconv.py index 27e4fb00..c4b64908 100644 --- a/datasette/utils/baseconv.py +++ b/datasette/utils/baseconv.py @@ -19,10 +19,10 @@ class BaseConverter(object): def __init__(self, digits): self.digits = digits - def from_decimal(self, i): + def encode(self, i): return self.convert(i, self.decimal_digits, self.digits) - def to_decimal(self, s): + def decode(self, s): return int(self.convert(s, self.digits, self.decimal_digits)) def convert(number, fromdigits, todigits): From 3f00a29141bdea5be747f6d1c93871ccdb792167 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 2 May 2022 13:15:27 -0700 Subject: [PATCH 006/891] Clean up compatibility with Pyodide (#1736) * Optional uvicorn import for Pyodide, refs #1733 * --setting num_sql_threads 0 to disable threading, refs #1735 --- datasette/app.py | 11 ++++++++--- datasette/database.py | 19 +++++++++++++++++++ docs/settings.rst | 2 ++ tests/test_internals_datasette.py | 14 +++++++++++++- 4 files changed, 42 insertions(+), 4 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index a5330458..b7b84371 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -288,9 +288,12 @@ class Datasette: self._settings = dict(DEFAULT_SETTINGS, **(settings or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note - self.executor = futures.ThreadPoolExecutor( - max_workers=self.setting("num_sql_threads") - ) + if self.setting("num_sql_threads") == 0: + self.executor = None + else: + self.executor = futures.ThreadPoolExecutor( + max_workers=self.setting("num_sql_threads") + ) self.max_returned_rows = self.setting("max_returned_rows") self.sql_time_limit_ms = self.setting("sql_time_limit_ms") self.page_size = self.setting("default_page_size") @@ -862,6 +865,8 @@ class Datasette: ] def _threads(self): + if self.setting("num_sql_threads") == 0: + return {"num_threads": 0, "threads": []} threads = list(threading.enumerate()) d = { "num_threads": len(threads), diff --git a/datasette/database.py b/datasette/database.py index ba594a8c..44d32667 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -45,6 +45,9 @@ class Database: self._cached_table_counts = None self._write_thread = None self._write_queue = None + # These are used when in non-threaded mode: + self._read_connection = None + self._write_connection = None if not self.is_mutable and not self.is_memory: p = Path(path) self.hash = inspect_hash(p) @@ -134,6 +137,14 @@ class Database: return results async def execute_write_fn(self, fn, block=True): + if self.ds.executor is None: + # non-threaded mode + if self._write_connection is None: + self._write_connection = self.connect(write=True) + self.ds._prepare_connection(self._write_connection, self.name) + return fn(self._write_connection) + + # threaded mode task_id = uuid.uuid5(uuid.NAMESPACE_DNS, "datasette.io") if self._write_queue is None: self._write_queue = queue.Queue() @@ -177,6 +188,14 @@ class Database: task.reply_queue.sync_q.put(result) async def execute_fn(self, fn): + if self.ds.executor is None: + # non-threaded mode + if self._read_connection is None: + self._read_connection = self.connect() + self.ds._prepare_connection(self._read_connection, self.name) + return fn(self._read_connection) + + # threaded mode def in_thread(): conn = getattr(connections, self.name, None) if not conn: diff --git a/docs/settings.rst b/docs/settings.rst index 60c4b36d..8437fb04 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -107,6 +107,8 @@ Maximum number of threads in the thread pool Datasette uses to execute SQLite qu datasette mydatabase.db --setting num_sql_threads 10 +Setting this to 0 turns off threaded SQL queries entirely - useful for environments that do not support threading such as `Pyodide `__. + .. _setting_allow_facet: allow_facet diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index cc200a2d..1dc14cab 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -1,7 +1,7 @@ """ Tests for the datasette.app.Datasette class """ -from datasette.app import Datasette +from datasette.app import Datasette, Database from itsdangerous import BadSignature from .fixtures import app_client import pytest @@ -63,3 +63,15 @@ async def test_datasette_constructor(): "hash": None, } ] + + +@pytest.mark.asyncio +async def test_num_sql_threads_zero(): + ds = Datasette([], memory=True, settings={"num_sql_threads": 0}) + db = ds.add_database(Database(ds, memory_name="test_num_sql_threads_zero")) + await db.execute_write("create table t(id integer primary key)") + await db.execute_write("insert into t (id) values (1)") + response = await ds.client.get("/-/threads.json") + assert response.json() == {"num_threads": 0, "threads": []} + response2 = await ds.client.get("/test_num_sql_threads_zero/t.json?_shape=array") + assert response2.json() == [{"id": 1}] From 943aa2e1f7341cb51e60332cde46bde650c64217 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 2 May 2022 14:38:34 -0700 Subject: [PATCH 007/891] Release 0.62a0 Refs #1683, #1701, #1712, #1717, #1718, #1733 --- datasette/version.py | 2 +- docs/changelog.rst | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 02451a1e..cf18c441 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.61.1" +__version__ = "0.62a0" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 03cf62b6..74814fcb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,20 @@ Changelog ========= +.. _v0_62a0: + +0.62a0 (2022-05-02) +------------------- + +- Datasette now runs some SQL queries in parallel. This has limited impact on performance, see `this research issue `__ for details. +- Datasette should now be compatible with Pyodide. (:issue:`1733`) +- ``datasette publish cloudrun`` has a new ``--timeout`` option which can be used to increase the time limit applied by the Google Cloud build environment. Thanks, Tim Sherratt. (`#1717 `__) +- Spaces in database names are now encoded as ``+`` rather than ``~20``. (:issue:`1701`) +- ```` is now displayed as ```` and is accompanied by tooltip showing "2.3MB". (:issue:`1712`) +- Don't show the facet option in the cog menu if faceting is not allowed. (:issue:`1683`) +- Code examples in the documentation are now all formatted using Black. (:issue:`1718`) +- ``Request.fake()`` method is now documented, see :ref:`internals_request`. + .. _v0_61_1: 0.61.1 (2022-03-23) From 847d6b1aac38c3e776e8c600eed07ba4c9ac9942 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 2 May 2022 16:32:24 -0700 Subject: [PATCH 008/891] Test wheel against Pyodide, refs #1737, #1733 --- .github/workflows/test-pyodide.yml | 28 ++++++++++++++++++ test-in-pyodide-with-shot-scraper.sh | 43 ++++++++++++++++++++++++++++ 2 files changed, 71 insertions(+) create mode 100644 .github/workflows/test-pyodide.yml create mode 100755 test-in-pyodide-with-shot-scraper.sh diff --git a/.github/workflows/test-pyodide.yml b/.github/workflows/test-pyodide.yml new file mode 100644 index 00000000..3715d055 --- /dev/null +++ b/.github/workflows/test-pyodide.yml @@ -0,0 +1,28 @@ +name: Test in Pyodide with shot-scraper + +on: + workflow_dispatch: + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.10 + uses: actions/setup-python@v3 + with: + python-version: "3.10" + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Cache Playwright browsers + uses: actions/cache@v2 + with: + path: ~/.cache/ms-playwright/ + key: ${{ runner.os }}-browsers + - name: Install Playwright dependencies + run: | + pip install shot-scraper + shot-scraper install + - name: Run test + run: | + ./test-in-pyodide-with-shot-scraper.sh diff --git a/test-in-pyodide-with-shot-scraper.sh b/test-in-pyodide-with-shot-scraper.sh new file mode 100755 index 00000000..0f29c0e0 --- /dev/null +++ b/test-in-pyodide-with-shot-scraper.sh @@ -0,0 +1,43 @@ +#!/bin/bash + +# Build the wheel +python3 -m build + +# Find name of wheel +wheel=$(basename $(ls dist/*.whl)) +# strip off the dist/ + + +# Create a blank index page +echo ' + +' > dist/index.html + +# Run a server for that dist/ folder +cd dist +python3 -m http.server 8529 & +cd .. + +shot-scraper javascript http://localhost:8529/ " +async () => { + let pyodide = await loadPyodide(); + await pyodide.loadPackage(['micropip', 'ssl', 'setuptools']); + let output = await pyodide.runPythonAsync(\` + import micropip + await micropip.install('h11==0.12.0') + await micropip.install('http://localhost:8529/$wheel') + import ssl + import setuptools + from datasette.app import Datasette + ds = Datasette(memory=True, settings={'num_sql_threads': 0}) + (await ds.client.get('/_memory.json?sql=select+55+as+itworks&_shape=array')).text + \`); + if (JSON.parse(output)[0].itworks != 55) { + throw 'Got ' + output + ', expected itworks: 55'; + } + return 'Test passed!'; +} +" + +# Shut down the server +pkill -f 'http.server 8529' From c0cbcf2aba0d8393ba464acc515803ebf2eeda12 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 2 May 2022 16:36:58 -0700 Subject: [PATCH 009/891] Tweaks to test scripts, refs #1737 --- .github/workflows/test-pyodide.yml | 2 +- test-in-pyodide-with-shot-scraper.sh | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test-pyodide.yml b/.github/workflows/test-pyodide.yml index 3715d055..beb6a5fb 100644 --- a/.github/workflows/test-pyodide.yml +++ b/.github/workflows/test-pyodide.yml @@ -21,7 +21,7 @@ jobs: key: ${{ runner.os }}-browsers - name: Install Playwright dependencies run: | - pip install shot-scraper + pip install shot-scraper build shot-scraper install - name: Run test run: | diff --git a/test-in-pyodide-with-shot-scraper.sh b/test-in-pyodide-with-shot-scraper.sh index 0f29c0e0..e5df7398 100755 --- a/test-in-pyodide-with-shot-scraper.sh +++ b/test-in-pyodide-with-shot-scraper.sh @@ -1,12 +1,12 @@ #!/bin/bash +set -e +# So the script fails if there are any errors # Build the wheel python3 -m build -# Find name of wheel +# Find name of wheel, strip off the dist/ wheel=$(basename $(ls dist/*.whl)) -# strip off the dist/ - # Create a blank index page echo ' From d60f163528f466b1127b2935c3b6869c34fd6545 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 2 May 2022 16:40:49 -0700 Subject: [PATCH 010/891] Run on push and PR, closes #1737 --- .github/workflows/test-pyodide.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/test-pyodide.yml b/.github/workflows/test-pyodide.yml index beb6a5fb..1b75aade 100644 --- a/.github/workflows/test-pyodide.yml +++ b/.github/workflows/test-pyodide.yml @@ -1,6 +1,8 @@ name: Test in Pyodide with shot-scraper on: + push: + pull_request: workflow_dispatch: jobs: From 280ff372ab30df244f6c54f6f3002da57334b3d7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 3 May 2022 07:59:18 -0700 Subject: [PATCH 011/891] ETag support for .db downloads, closes #1739 --- datasette/utils/testing.py | 20 ++++++++++++++++++-- datasette/views/database.py | 7 +++++++ tests/test_html.py | 10 +++++++++- 3 files changed, 34 insertions(+), 3 deletions(-) diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index 94750b1f..640c94e6 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -55,10 +55,21 @@ class TestClient: @async_to_sync async def get( - self, path, follow_redirects=False, redirect_count=0, method="GET", cookies=None + self, + path, + follow_redirects=False, + redirect_count=0, + method="GET", + cookies=None, + if_none_match=None, ): return await self._request( - path, follow_redirects, redirect_count, method, cookies + path=path, + follow_redirects=follow_redirects, + redirect_count=redirect_count, + method=method, + cookies=cookies, + if_none_match=if_none_match, ) @async_to_sync @@ -110,6 +121,7 @@ class TestClient: headers=None, post_body=None, content_type=None, + if_none_match=None, ): return await self._request( path, @@ -120,6 +132,7 @@ class TestClient: headers=headers, post_body=post_body, content_type=content_type, + if_none_match=if_none_match, ) async def _request( @@ -132,10 +145,13 @@ class TestClient: headers=None, post_body=None, content_type=None, + if_none_match=None, ): headers = headers or {} if content_type: headers["content-type"] = content_type + if if_none_match: + headers["if-none-match"] = if_none_match httpx_response = await self.ds.client.request( method, path, diff --git a/datasette/views/database.py b/datasette/views/database.py index 9a8aca32..bc08ba05 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -183,6 +183,13 @@ class DatabaseDownload(DataView): headers = {} if self.ds.cors: add_cors_headers(headers) + if db.hash: + etag = '"{}"'.format(db.hash) + headers["Etag"] = etag + # Has user seen this already? + if_none_match = request.headers.get("if-none-match") + if if_none_match and if_none_match == etag: + return Response("", status=304) headers["Transfer-Encoding"] = "chunked" return AsgiFileDownload( filepath, diff --git a/tests/test_html.py b/tests/test_html.py index 42f1a3ee..409fec68 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -401,7 +401,7 @@ def test_database_download_for_immutable(): assert len(soup.findAll("a", {"href": re.compile(r"\.db$")})) # Check we can actually download it download_response = client.get("/fixtures.db") - assert 200 == download_response.status + assert download_response.status == 200 # Check the content-length header exists assert "content-length" in download_response.headers content_length = download_response.headers["content-length"] @@ -413,6 +413,14 @@ def test_database_download_for_immutable(): == 'attachment; filename="fixtures.db"' ) assert download_response.headers["transfer-encoding"] == "chunked" + # ETag header should be present and match db.hash + assert "etag" in download_response.headers + etag = download_response.headers["etag"] + assert etag == '"{}"'.format(client.ds.databases["fixtures"].hash) + # Try a second download with If-None-Match: current-etag + download_response2 = client.get("/fixtures.db", if_none_match=etag) + assert download_response2.body == b"" + assert download_response2.status == 304 def test_database_download_disallowed_for_mutable(app_client): From a5acfff4bd364d30ce8878e19f9839890371ef14 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 16 May 2022 17:06:40 -0700 Subject: [PATCH 012/891] Empty Datasette([]) list is no longer required --- docs/testing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 1bbaaac1..41046bfb 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -15,7 +15,7 @@ If you use the template described in :ref:`writing_plugins_cookiecutter` your pl @pytest.mark.asyncio async def test_plugin_is_installed(): - datasette = Datasette([], memory=True) + datasette = Datasette(memory=True) response = await datasette.client.get("/-/plugins.json") assert response.status_code == 200 installed_plugins = {p["name"] for p in response.json()} From 3508bf7875f8d62b2725222f3b07747974d54b97 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 17 May 2022 12:40:05 -0700 Subject: [PATCH 013/891] --nolock mode to ignore locked files, closes #1744 --- datasette/app.py | 2 ++ datasette/cli.py | 7 +++++++ datasette/database.py | 2 ++ docs/cli-reference.rst | 1 + docs/getting_started.rst | 4 +++- 5 files changed, 15 insertions(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index b7b84371..f43700d4 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -213,6 +213,7 @@ class Datasette: config_dir=None, pdb=False, crossdb=False, + nolock=False, ): assert config_dir is None or isinstance( config_dir, Path @@ -238,6 +239,7 @@ class Datasette: self.databases = collections.OrderedDict() self._refresh_schemas_lock = asyncio.Lock() self.crossdb = crossdb + self.nolock = nolock if memory or crossdb or not self.files: self.add_database(Database(self, is_memory=True), name="_memory") # memory_name is a random string so that each Datasette instance gets its own diff --git a/datasette/cli.py b/datasette/cli.py index 3c6e1b2c..8781747c 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -452,6 +452,11 @@ def uninstall(packages, yes): is_flag=True, help="Enable cross-database joins using the /_memory database", ) +@click.option( + "--nolock", + is_flag=True, + help="Ignore locking, open locked files in read-only mode", +) @click.option( "--ssl-keyfile", help="SSL key file", @@ -486,6 +491,7 @@ def serve( open_browser, create, crossdb, + nolock, ssl_keyfile, ssl_certfile, return_instance=False, @@ -545,6 +551,7 @@ def serve( version_note=version_note, pdb=pdb, crossdb=crossdb, + nolock=nolock, ) # if files is a single directory, use that as config_dir= diff --git a/datasette/database.py b/datasette/database.py index 44d32667..fa558045 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -89,6 +89,8 @@ class Database: # mode=ro or immutable=1? if self.is_mutable: qs = "?mode=ro" + if self.ds.nolock: + qs += "&nolock=1" else: qs = "?immutable=1" assert not (write and not self.is_mutable) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 2a6fbfc8..1c1aff15 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -115,6 +115,7 @@ datasette serve --help --create Create database files if they do not exist --crossdb Enable cross-database joins using the /_memory database + --nolock Ignore locking, open locked files in read-only mode --ssl-keyfile TEXT SSL key file --ssl-certfile TEXT SSL certificate file --help Show this message and exit. diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 3e357afb..502a9e5a 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -56,7 +56,9 @@ like so: :: - datasette ~/Library/Application\ Support/Google/Chrome/Default/History + datasette ~/Library/Application\ Support/Google/Chrome/Default/History --nolock + +The `--nolock` option ignores any file locks. This is safe as Datasette will open the file in read-only mode. Now visiting http://localhost:8001/History/downloads will show you a web interface to browse your downloads data: From 5555bc8aef043f75d2200f66de90c54aeeaa08c3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 17 May 2022 12:43:44 -0700 Subject: [PATCH 014/891] How to run cog, closes #1745 --- docs/contributing.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index c193ba49..bddceafe 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -211,6 +211,17 @@ For added productivity, you can use use `sphinx-autobuild `__. + +To update these pages, run the following command:: + + cog -r docs/*.rst + .. _contributing_continuous_deployment: Continuously deployed demo instances From b393e164dc9e962702546d6f1ad9c857b5788dc0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 17 May 2022 12:45:28 -0700 Subject: [PATCH 015/891] ReST fix --- docs/getting_started.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 502a9e5a..af3a1385 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -58,7 +58,7 @@ like so: datasette ~/Library/Application\ Support/Google/Chrome/Default/History --nolock -The `--nolock` option ignores any file locks. This is safe as Datasette will open the file in read-only mode. +The ``--nolock`` option ignores any file locks. This is safe as Datasette will open the file in read-only mode. Now visiting http://localhost:8001/History/downloads will show you a web interface to browse your downloads data: From 7d1e004ff679b3fb4dca36d1d751a1ad16688fe6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 17 May 2022 12:59:28 -0700 Subject: [PATCH 016/891] Fix test I broke in #1744 --- tests/test_cli.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_cli.py b/tests/test_cli.py index dca65f26..d0f6e26c 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -150,6 +150,7 @@ def test_metadata_yaml(): help_settings=False, pdb=False, crossdb=False, + nolock=False, open_browser=False, create=False, ssl_keyfile=None, From 0e2f6f1f82f4445a63f1251470a7778a34f5c8b9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 18 May 2022 17:37:46 -0700 Subject: [PATCH 017/891] datasette-copyable is an example of register_output_renderer --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 3c9ae2e2..c0d88964 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -557,7 +557,7 @@ And here is an example ``can_render`` function which returns ``True`` only if th "atom_updated", }.issubset(columns) -Examples: `datasette-atom `_, `datasette-ics `_, `datasette-geojson `__ +Examples: `datasette-atom `_, `datasette-ics `_, `datasette-geojson `__, `datasette-copyable `__ .. _plugin_register_routes: From 18a6e05887abf1ac946a6e0d36ce662dfd8aeff1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 20 May 2022 12:05:33 -0700 Subject: [PATCH 018/891] Added "follow a tutorial" to getting started docs Closes #1747 --- docs/getting_started.rst | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/getting_started.rst b/docs/getting_started.rst index af3a1385..00b753a9 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -1,6 +1,8 @@ Getting started =============== +.. _getting_started_demo: + Play with a live demo --------------------- @@ -9,6 +11,16 @@ The best way to experience Datasette for the first time is with a demo: * `global-power-plants.datasettes.com `__ provides a searchable database of power plants around the world, using data from the `World Resources Institude `__ rendered using the `datasette-cluster-map `__ plugin. * `fivethirtyeight.datasettes.com `__ shows Datasette running against over 400 datasets imported from the `FiveThirtyEight GitHub repository `__. +.. _getting_started_tutorial: + +Follow a tutorial +----------------- + +Datasette has several `tutorials `__ to help you get started with the tool. Try one of the following: + +- `Exploring a database with Datasette `__ shows how to use the Datasette web interface to explore a new database. +- `Learn SQL with Datasette `__ introduces SQL, and shows how to use that query language to ask questions of your data. + .. _getting_started_glitch: Try Datasette without installing anything using Glitch From 1465fea4798599eccfe7e8f012bd8d9adfac3039 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 20 May 2022 12:11:08 -0700 Subject: [PATCH 019/891] sphinx-copybutton for docs, closes #1748 --- docs/conf.py | 2 +- setup.py | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index d114bc52..351cb1b1 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -31,7 +31,7 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ["sphinx.ext.extlinks", "sphinx.ext.autodoc"] +extensions = ["sphinx.ext.extlinks", "sphinx.ext.autodoc", "sphinx_copybutton"] extlinks = { "issue": ("https://github.com/simonw/datasette/issues/%s", "#"), diff --git a/setup.py b/setup.py index ca449f02..aad05840 100644 --- a/setup.py +++ b/setup.py @@ -64,7 +64,13 @@ setup( """, setup_requires=["pytest-runner"], extras_require={ - "docs": ["sphinx_rtd_theme", "sphinx-autobuild", "codespell", "blacken-docs"], + "docs": [ + "sphinx_rtd_theme", + "sphinx-autobuild", + "codespell", + "blacken-docs", + "sphinx-copybutton", + ], "test": [ "pytest>=5.2.2,<7.2.0", "pytest-xdist>=2.2.1,<2.6", From 1d33fd03b3c211e0f48a8f3bde83880af89e4e69 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 20 May 2022 13:34:51 -0700 Subject: [PATCH 020/891] Switch docs theme to Furo, refs #1746 --- docs/_static/css/custom.css | 7 ++-- .../layout.html => _static/js/custom.js} | 34 ------------------- docs/_templates/base.html | 6 ++++ docs/_templates/sidebar/brand.html | 16 +++++++++ docs/_templates/sidebar/navigation.html | 11 ++++++ docs/conf.py | 24 +++---------- docs/installation.rst | 1 + docs/plugin_hooks.rst | 1 + setup.py | 2 +- 9 files changed, 45 insertions(+), 57 deletions(-) rename docs/{_templates/layout.html => _static/js/custom.js} (55%) create mode 100644 docs/_templates/base.html create mode 100644 docs/_templates/sidebar/brand.html create mode 100644 docs/_templates/sidebar/navigation.html diff --git a/docs/_static/css/custom.css b/docs/_static/css/custom.css index 4dabb725..0a6f8799 100644 --- a/docs/_static/css/custom.css +++ b/docs/_static/css/custom.css @@ -1,7 +1,8 @@ a.external { overflow-wrap: anywhere; } - -div .wy-side-nav-search > div.version { - color: rgba(0,0,0,0.75); +body[data-theme="dark"] .sidebar-logo-container { + background-color: white; + padding: 5px; + opacity: 0.6; } diff --git a/docs/_templates/layout.html b/docs/_static/js/custom.js similarity index 55% rename from docs/_templates/layout.html rename to docs/_static/js/custom.js index 785cdc7c..efca33ed 100644 --- a/docs/_templates/layout.html +++ b/docs/_static/js/custom.js @@ -1,35 +1,3 @@ -{%- extends "!layout.html" %} - -{% block htmltitle %} -{{ super() }} - -{% endblock %} - -{% block sidebartitle %} - - - - - -{% if theme_display_version %} - {%- set nav_version = version %} - {% if READTHEDOCS and current_version %} - {%- set nav_version = current_version %} - {% endif %} - {% if nav_version %} -
- {{ nav_version }} -
- {% endif %} -{% endif %} - -{% include "searchbox.html" %} - -{% endblock %} - -{% block footer %} -{{ super() }} - -{% endblock %} diff --git a/docs/_templates/base.html b/docs/_templates/base.html new file mode 100644 index 00000000..969de5ab --- /dev/null +++ b/docs/_templates/base.html @@ -0,0 +1,6 @@ +{%- extends "!base.html" %} + +{% block site_meta %} +{{ super() }} + +{% endblock %} diff --git a/docs/_templates/sidebar/brand.html b/docs/_templates/sidebar/brand.html new file mode 100644 index 00000000..8be9e8ee --- /dev/null +++ b/docs/_templates/sidebar/brand.html @@ -0,0 +1,16 @@ + diff --git a/docs/_templates/sidebar/navigation.html b/docs/_templates/sidebar/navigation.html new file mode 100644 index 00000000..c460a17e --- /dev/null +++ b/docs/_templates/sidebar/navigation.html @@ -0,0 +1,11 @@ + \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py index 351cb1b1..25d2acfe 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -90,18 +90,15 @@ todo_include_todos = False # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = "sphinx_rtd_theme" +html_theme = "furo" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "logo_only": True, - "style_nav_header_background": "white", - "prev_next_buttons_location": "both", + "sidebar_hide_name": True, } - # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". @@ -112,20 +109,9 @@ html_logo = "datasette-logo.svg" html_css_files = [ "css/custom.css", ] - - -# Custom sidebar templates, must be a dictionary that maps document names -# to template names. -# -# This is required for the alabaster theme -# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars -html_sidebars = { - "**": [ - "relations.html", # needs 'show_related': True theme option to display - "searchbox.html", - ] -} - +html_js_files = [ + "js/custom.js" +] # -- Options for HTMLHelp output ------------------------------------------ diff --git a/docs/installation.rst b/docs/installation.rst index e8bef9cd..a4757736 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -13,6 +13,7 @@ If you want to start making contributions to the Datasette project by installing .. contents:: :local: + :class: this-will-duplicate-information-and-it-is-still-useful-here .. _installation_basic: diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index c0d88964..7d10fe37 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -20,6 +20,7 @@ For example, you can implement the ``render_cell`` plugin hook like this even th .. contents:: List of plugin hooks :local: + :class: this-will-duplicate-information-and-it-is-still-useful-here .. _plugin_hook_prepare_connection: diff --git a/setup.py b/setup.py index aad05840..d3fcdbd1 100644 --- a/setup.py +++ b/setup.py @@ -65,7 +65,7 @@ setup( setup_requires=["pytest-runner"], extras_require={ "docs": [ - "sphinx_rtd_theme", + "furo==2022.4.7", "sphinx-autobuild", "codespell", "blacken-docs", From 4446075334ea7231beb56b630bc7ec363afc2d08 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 20 May 2022 13:44:23 -0700 Subject: [PATCH 021/891] Append warning to the write element, refs #1746 --- docs/_static/js/custom.js | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/docs/_static/js/custom.js b/docs/_static/js/custom.js index efca33ed..91c3e306 100644 --- a/docs/_static/js/custom.js +++ b/docs/_static/js/custom.js @@ -17,11 +17,7 @@ jQuery(function ($) { ` ); warning.find("a").attr("href", stableUrl); - var body = $("div.body"); - if (!body.length) { - body = $("div.document"); - } - body.prepend(warning); + $("article[role=main]").prepend(warning); } }); }); From b010af7bb85856aeb44f69e7e980f617c1fc0db1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 20 May 2022 15:23:09 -0700 Subject: [PATCH 022/891] Updated copyright years in documentation footer --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 25d2acfe..7ffeedd0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -51,7 +51,7 @@ master_doc = "index" # General information about the project. project = "Datasette" -copyright = "2017-2021, Simon Willison" +copyright = "2017-2022, Simon Willison" author = "Simon Willison" # Disable -- turning into – From adedd85b68ec66e03b97fb62ff4da8987734436e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 28 May 2022 18:42:31 -0700 Subject: [PATCH 023/891] Clarify that request.headers names are converted to lowercase --- docs/internals.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/internals.rst b/docs/internals.rst index 18822d47..da135282 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -26,7 +26,7 @@ The request object is passed to various plugin hooks. It represents an incoming The request scheme - usually ``https`` or ``http``. ``.headers`` - dictionary (str -> str) - A dictionary of incoming HTTP request headers. + A dictionary of incoming HTTP request headers. Header names have been converted to lowercase. ``.cookies`` - dictionary (str -> str) A dictionary of incoming cookies From 8dd816bc76937f1e37f86acce10dc2cb4fa31e52 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 May 2022 15:42:38 -0700 Subject: [PATCH 024/891] Applied Black --- docs/conf.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 7ffeedd0..4ef6b768 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -109,9 +109,7 @@ html_logo = "datasette-logo.svg" html_css_files = [ "css/custom.css", ] -html_js_files = [ - "js/custom.js" -] +html_js_files = ["js/custom.js"] # -- Options for HTMLHelp output ------------------------------------------ From 2e9751672d4fe329b3c359d5b7b1992283185820 Mon Sep 17 00:00:00 2001 From: Naveen <172697+naveensrinivasan@users.noreply.github.com> Date: Tue, 31 May 2022 14:28:40 -0500 Subject: [PATCH 025/891] chore: Set permissions for GitHub actions (#1740) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Restrict the GitHub token permissions only to the required ones; this way, even if the attackers will succeed in compromising your workflow, they won’t be able to do much. - Included permissions for the action. https://github.com/ossf/scorecard/blob/main/docs/checks.md#token-permissions https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions https://docs.github.com/en/actions/using-jobs/assigning-permissions-to-jobs [Keeping your GitHub Actions and workflows secure Part 1: Preventing pwn requests](https://securitylab.github.com/research/github-actions-preventing-pwn-requests/) Signed-off-by: naveen <172697+naveensrinivasan@users.noreply.github.com> --- .github/workflows/deploy-latest.yml | 3 +++ .github/workflows/prettier.yml | 3 +++ .github/workflows/publish.yml | 3 +++ .github/workflows/push_docker_tag.yml | 3 +++ .github/workflows/spellcheck.yml | 3 +++ .github/workflows/test-coverage.yml | 3 +++ .github/workflows/test-pyodide.yml | 3 +++ .github/workflows/test.yml | 3 +++ .github/workflows/tmate-mac.yml | 3 +++ .github/workflows/tmate.yml | 3 +++ 10 files changed, 30 insertions(+) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index a61f6629..2b94a7f1 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -5,6 +5,9 @@ on: branches: - main +permissions: + contents: read + jobs: deploy: runs-on: ubuntu-latest diff --git a/.github/workflows/prettier.yml b/.github/workflows/prettier.yml index 9dfe7ee0..ded41040 100644 --- a/.github/workflows/prettier.yml +++ b/.github/workflows/prettier.yml @@ -2,6 +2,9 @@ name: Check JavaScript for conformance with Prettier on: [push] +permissions: + contents: read + jobs: prettier: runs-on: ubuntu-latest diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 3e4f8146..9ef09d2e 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -4,6 +4,9 @@ on: release: types: [created] +permissions: + contents: read + jobs: test: runs-on: ubuntu-latest diff --git a/.github/workflows/push_docker_tag.yml b/.github/workflows/push_docker_tag.yml index 9a3969f0..afe8d6b2 100644 --- a/.github/workflows/push_docker_tag.yml +++ b/.github/workflows/push_docker_tag.yml @@ -6,6 +6,9 @@ on: version_tag: description: Tag to build and push +permissions: + contents: read + jobs: deploy_docker: runs-on: ubuntu-latest diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index 2e24d3eb..a2621ecc 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -2,6 +2,9 @@ name: Check spelling in documentation on: [push, pull_request] +permissions: + contents: read + jobs: spellcheck: runs-on: ubuntu-latest diff --git a/.github/workflows/test-coverage.yml b/.github/workflows/test-coverage.yml index 1d1cf332..bd720664 100644 --- a/.github/workflows/test-coverage.yml +++ b/.github/workflows/test-coverage.yml @@ -7,6 +7,9 @@ on: pull_request: branches: - main +permissions: + contents: read + jobs: test: runs-on: ubuntu-latest diff --git a/.github/workflows/test-pyodide.yml b/.github/workflows/test-pyodide.yml index 1b75aade..bc9593a8 100644 --- a/.github/workflows/test-pyodide.yml +++ b/.github/workflows/test-pyodide.yml @@ -5,6 +5,9 @@ on: pull_request: workflow_dispatch: +permissions: + contents: read + jobs: test: runs-on: ubuntu-latest diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8d916e49..90b6555e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -2,6 +2,9 @@ name: Test on: [push, pull_request] +permissions: + contents: read + jobs: test: runs-on: ubuntu-latest diff --git a/.github/workflows/tmate-mac.yml b/.github/workflows/tmate-mac.yml index 46be117e..fcee0f21 100644 --- a/.github/workflows/tmate-mac.yml +++ b/.github/workflows/tmate-mac.yml @@ -3,6 +3,9 @@ name: tmate session mac on: workflow_dispatch: +permissions: + contents: read + jobs: build: runs-on: macos-latest diff --git a/.github/workflows/tmate.yml b/.github/workflows/tmate.yml index 02e7bd33..9792245d 100644 --- a/.github/workflows/tmate.yml +++ b/.github/workflows/tmate.yml @@ -3,6 +3,9 @@ name: tmate session on: workflow_dispatch: +permissions: + contents: read + jobs: build: runs-on: ubuntu-latest From e780b2f5d662ef3579d801d33567440055d4e84d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 20 Jun 2022 10:54:23 -0700 Subject: [PATCH 026/891] Trying out one-sentence-per-line As suggested here: https://sive.rs/1s Markdown and reStructuredText will display this as if it is a single paragraph, even though the sentences themselves are separated by newlines. This could result in more useful diffs. Trying it out on this page first. --- docs/facets.rst | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/docs/facets.rst b/docs/facets.rst index 0228aa84..2a2eb039 100644 --- a/docs/facets.rst +++ b/docs/facets.rst @@ -3,7 +3,9 @@ Facets ====== -Datasette facets can be used to add a faceted browse interface to any database table. With facets, tables are displayed along with a summary showing the most common values in specified columns. These values can be selected to further filter the table. +Datasette facets can be used to add a faceted browse interface to any database table. +With facets, tables are displayed along with a summary showing the most common values in specified columns. +These values can be selected to further filter the table. .. image:: facets.png @@ -12,11 +14,13 @@ Facets can be specified in two ways: using query string parameters, or in ``meta Facets in query strings ----------------------- -To turn on faceting for specific columns on a Datasette table view, add one or more ``_facet=COLUMN`` parameters to the URL. For example, if you want to turn on facets for the ``city_id`` and ``state`` columns, construct a URL that looks like this:: +To turn on faceting for specific columns on a Datasette table view, add one or more ``_facet=COLUMN`` parameters to the URL. +For example, if you want to turn on facets for the ``city_id`` and ``state`` columns, construct a URL that looks like this:: /dbname/tablename?_facet=state&_facet=city_id -This works for both the HTML interface and the ``.json`` view. When enabled, facets will cause a ``facet_results`` block to be added to the JSON output, looking something like this: +This works for both the HTML interface and the ``.json`` view. +When enabled, facets will cause a ``facet_results`` block to be added to the JSON output, looking something like this: .. code-block:: json @@ -86,7 +90,8 @@ This works for both the HTML interface and the ``.json`` view. When enabled, fac If Datasette detects that a column is a foreign key, the ``"label"`` property will be automatically derived from the detected label column on the referenced table. -The default number of facet results returned is 30, controlled by the :ref:`setting_default_facet_size` setting. You can increase this on an individual page by adding ``?_facet_size=100`` to the query string, up to a maximum of :ref:`setting_max_returned_rows` (which defaults to 1000). +The default number of facet results returned is 30, controlled by the :ref:`setting_default_facet_size` setting. +You can increase this on an individual page by adding ``?_facet_size=100`` to the query string, up to a maximum of :ref:`setting_max_returned_rows` (which defaults to 1000). .. _facets_metadata: @@ -137,12 +142,14 @@ For the currently filtered data are there any columns which, if applied as a fac * Will return less unique options than the total number of filtered rows * And the query used to evaluate this criteria can be completed in under 50ms -That last point is particularly important: Datasette runs a query for every column that is displayed on a page, which could get expensive - so to avoid slow load times it sets a time limit of just 50ms for each of those queries. This means suggested facets are unlikely to appear for tables with millions of records in them. +That last point is particularly important: Datasette runs a query for every column that is displayed on a page, which could get expensive - so to avoid slow load times it sets a time limit of just 50ms for each of those queries. +This means suggested facets are unlikely to appear for tables with millions of records in them. Speeding up facets with indexes ------------------------------- -The performance of facets can be greatly improved by adding indexes on the columns you wish to facet by. Adding indexes can be performed using the ``sqlite3`` command-line utility. Here's how to add an index on the ``state`` column in a table called ``Food_Trucks``:: +The performance of facets can be greatly improved by adding indexes on the columns you wish to facet by. +Adding indexes can be performed using the ``sqlite3`` command-line utility. Here's how to add an index on the ``state`` column in a table called ``Food_Trucks``:: $ sqlite3 mydatabase.db SQLite version 3.19.3 2017-06-27 16:48:08 @@ -169,6 +176,7 @@ Example here: `latest.datasette.io/fixtures/facetable?_facet_array=tags `__ From 00e59ec461dc0150772b999c7cc15fcb9b507d58 Mon Sep 17 00:00:00 2001 From: "M. Nasimul Haque" Date: Mon, 20 Jun 2022 19:05:44 +0100 Subject: [PATCH 027/891] Extract facet pieces of table.html into included templates Thanks, @nsmgr8 --- datasette/templates/_facet_results.html | 28 ++++++++++++++++++ datasette/templates/_suggested_facets.html | 3 ++ datasette/templates/table.html | 33 ++-------------------- 3 files changed, 33 insertions(+), 31 deletions(-) create mode 100644 datasette/templates/_facet_results.html create mode 100644 datasette/templates/_suggested_facets.html diff --git a/datasette/templates/_facet_results.html b/datasette/templates/_facet_results.html new file mode 100644 index 00000000..d0cbcf77 --- /dev/null +++ b/datasette/templates/_facet_results.html @@ -0,0 +1,28 @@ +
+ {% for facet_info in sorted_facet_results %} +
+

+ {{ facet_info.name }}{% if facet_info.type != "column" %} ({{ facet_info.type }}){% endif %} + {% if facet_info.truncated %}>{% endif %}{{ facet_info.results|length }} + + {% if facet_info.hideable %} + + {% endif %} +

+
    + {% for facet_value in facet_info.results %} + {% if not facet_value.selected %} +
  • {{ (facet_value.label | string()) or "-" }} {{ "{:,}".format(facet_value.count) }}
  • + {% else %} +
  • {{ facet_value.label or "-" }} · {{ "{:,}".format(facet_value.count) }}
  • + {% endif %} + {% endfor %} + {% if facet_info.truncated %} +
  • {% if request.args._facet_size != "max" -%} + {% else -%}…{% endif %} +
  • + {% endif %} +
+
+ {% endfor %} +
diff --git a/datasette/templates/_suggested_facets.html b/datasette/templates/_suggested_facets.html new file mode 100644 index 00000000..ec98fb36 --- /dev/null +++ b/datasette/templates/_suggested_facets.html @@ -0,0 +1,3 @@ +

+ Suggested facets: {% for facet in suggested_facets %}{{ facet.name }}{% if facet.type %} ({{ facet.type }}){% endif %}{% if not loop.last %}, {% endif %}{% endfor %} +

diff --git a/datasette/templates/table.html b/datasette/templates/table.html index a9e88330..a86398ea 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -142,9 +142,7 @@ {% if suggested_facets %} -

- Suggested facets: {% for facet in suggested_facets %}{{ facet.name }}{% if facet.type %} ({{ facet.type }}){% endif %}{% if not loop.last %}, {% endif %}{% endfor %} -

+ {% include "_suggested_facets.html" %} {% endif %} {% if facets_timed_out %} @@ -152,34 +150,7 @@ {% endif %} {% if facet_results %} -
- {% for facet_info in sorted_facet_results %} -
-

- {{ facet_info.name }}{% if facet_info.type != "column" %} ({{ facet_info.type }}){% endif %} - {% if facet_info.truncated %}>{% endif %}{{ facet_info.results|length }} - - {% if facet_info.hideable %} - - {% endif %} -

-
    - {% for facet_value in facet_info.results %} - {% if not facet_value.selected %} -
  • {{ (facet_value.label | string()) or "-" }} {{ "{:,}".format(facet_value.count) }}
  • - {% else %} -
  • {{ facet_value.label or "-" }} · {{ "{:,}".format(facet_value.count) }}
  • - {% endif %} - {% endfor %} - {% if facet_info.truncated %} -
  • {% if request.args._facet_size != "max" -%} - {% else -%}…{% endif %} -
  • - {% endif %} -
-
- {% endfor %} -
+ {% include "_facet_results.html" %} {% endif %} {% include custom_table_templates %} From 9f1eb0d4eac483b953392157bd9fd6cc4df37de7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Jun 2022 10:40:24 -0700 Subject: [PATCH 028/891] Bump black from 22.1.0 to 22.6.0 (#1763) Bumps [black](https://github.com/psf/black) from 22.1.0 to 22.6.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/22.1.0...22.6.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index d3fcdbd1..29cb77bf 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ setup( "pytest-xdist>=2.2.1,<2.6", "pytest-asyncio>=0.17,<0.19", "beautifulsoup4>=4.8.1,<4.12.0", - "black==22.1.0", + "black==22.6.0", "blacken-docs==1.12.1", "pytest-timeout>=1.4.2,<2.2", "trustme>=0.7,<0.10", From 6373bb341457e5becfd5b67792ac2c8b9ed7c384 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 7 Jul 2022 09:30:49 -0700 Subject: [PATCH 029/891] Expose current SQLite row to render_cell hook, closes #1300 --- datasette/hookspecs.py | 2 +- datasette/views/database.py | 1 + datasette/views/table.py | 1 + docs/plugin_hooks.rst | 9 ++++++--- tests/plugins/my_plugin.py | 3 ++- tests/test_plugins.py | 5 +++-- 6 files changed, 14 insertions(+), 7 deletions(-) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 8f4fecab..c84db0a3 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -60,7 +60,7 @@ def publish_subcommand(publish): @hookspec -def render_cell(value, column, table, database, datasette): +def render_cell(row, value, column, table, database, datasette): """Customize rendering of HTML table cell values""" diff --git a/datasette/views/database.py b/datasette/views/database.py index bc08ba05..42058752 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -375,6 +375,7 @@ class QueryView(DataView): # pylint: disable=no-member plugin_display_value = None for candidate in pm.hook.render_cell( + row=row, value=value, column=column, table=None, diff --git a/datasette/views/table.py b/datasette/views/table.py index 23289b29..cd4be823 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -895,6 +895,7 @@ async def display_columns_and_rows( # pylint: disable=no-member plugin_display_value = None for candidate in pm.hook.render_cell( + row=row, value=value, column=column, table=table_name, diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 7d10fe37..f5c3ee83 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -373,12 +373,15 @@ Examples: `datasette-publish-fly Date: Sat, 9 Jul 2022 10:25:37 -0700 Subject: [PATCH 030/891] More than 90 plugins now --- docs/writing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index 9aee70f6..01ee8c90 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -5,7 +5,7 @@ Writing plugins You can write one-off plugins that apply to just one Datasette instance, or you can write plugins which can be installed using ``pip`` and can be shipped to the Python Package Index (`PyPI `__) for other people to install. -Want to start by looking at an example? The `Datasette plugins directory `__ lists more than 50 open source plugins with code you can explore. The :ref:`plugin hooks ` page includes links to example plugins for each of the documented hooks. +Want to start by looking at an example? The `Datasette plugins directory `__ lists more than 90 open source plugins with code you can explore. The :ref:`plugin hooks ` page includes links to example plugins for each of the documented hooks. .. _writing_plugins_one_off: From 5d76c1f81b2d978f48b85c70d041a2142cf8ee26 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 14 Jul 2022 15:03:33 -0700 Subject: [PATCH 031/891] Discord badge Refs https://github.com/simonw/datasette.io/issues/112 --- README.md | 1 + docs/index.rst | 2 ++ 2 files changed, 3 insertions(+) diff --git a/README.md b/README.md index 557d9290..c57ee604 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,7 @@ [![Documentation Status](https://readthedocs.org/projects/datasette/badge/?version=latest)](https://docs.datasette.io/en/latest/?badge=latest) [![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://github.com/simonw/datasette/blob/main/LICENSE) [![docker: datasette](https://img.shields.io/badge/docker-datasette-blue)](https://hub.docker.com/r/datasetteproject/datasette) +[![discord](https://img.shields.io/discord/823971286308356157?label=Discord)](https://discord.gg/ktd74dm5mw) *An open source multi-tool for exploring and publishing data* diff --git a/docs/index.rst b/docs/index.rst index a2888822..62ed70f8 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -16,6 +16,8 @@ datasette| :target: https://github.com/simonw/datasette/blob/main/LICENSE .. |docker: datasette| image:: https://img.shields.io/badge/docker-datasette-blue :target: https://hub.docker.com/r/datasetteproject/datasette +.. |discord| image:: https://img.shields.io/discord/823971286308356157?label=Discord + :target: https://discord.gg/ktd74dm5mw *An open source multi-tool for exploring and publishing data* From c133545fe9c7ac2d509e55bf4bf6164bfbe892ad Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 14 Jul 2022 15:04:38 -0700 Subject: [PATCH 032/891] Make discord badge lowercase Refs https://github.com/simonw/datasette.io/issues/112 --- README.md | 2 +- docs/index.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index c57ee604..032180aa 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ [![Documentation Status](https://readthedocs.org/projects/datasette/badge/?version=latest)](https://docs.datasette.io/en/latest/?badge=latest) [![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://github.com/simonw/datasette/blob/main/LICENSE) [![docker: datasette](https://img.shields.io/badge/docker-datasette-blue)](https://hub.docker.com/r/datasetteproject/datasette) -[![discord](https://img.shields.io/discord/823971286308356157?label=Discord)](https://discord.gg/ktd74dm5mw) +[![discord](https://img.shields.io/discord/823971286308356157?label=discord)](https://discord.gg/ktd74dm5mw) *An open source multi-tool for exploring and publishing data* diff --git a/docs/index.rst b/docs/index.rst index 62ed70f8..051898b1 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -16,7 +16,7 @@ datasette| :target: https://github.com/simonw/datasette/blob/main/LICENSE .. |docker: datasette| image:: https://img.shields.io/badge/docker-datasette-blue :target: https://hub.docker.com/r/datasetteproject/datasette -.. |discord| image:: https://img.shields.io/discord/823971286308356157?label=Discord +.. |discord| image:: https://img.shields.io/discord/823971286308356157?label=discord :target: https://discord.gg/ktd74dm5mw *An open source multi-tool for exploring and publishing data* From 950cc7677f65aa2543067b3bbfc2b6acb98b62c8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 14 Jul 2022 15:18:28 -0700 Subject: [PATCH 033/891] Fix missing Discord image Refs https://github.com/simonw/datasette.io/issues/112 --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 051898b1..efe196b3 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,7 +2,7 @@ Datasette ========= |PyPI| |Changelog| |Python 3.x| |Tests| |License| |docker: -datasette| +datasette| |discord| .. |PyPI| image:: https://img.shields.io/pypi/v/datasette.svg :target: https://pypi.org/project/datasette/ From 8188f55efc0fcca1be692b0d0c875f2d1ee99f17 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 17 Jul 2022 15:24:16 -0700 Subject: [PATCH 034/891] Rename handle_500 to handle_exception, refs #1770 --- datasette/app.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f43700d4..43e60dbc 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1275,7 +1275,7 @@ class DatasetteRouter: except NotFound as exception: return await self.handle_404(request, send, exception) except Exception as exception: - return await self.handle_500(request, send, exception) + return await self.handle_exception(request, send, exception) async def handle_404(self, request, send, exception=None): # If path contains % encoding, redirect to tilde encoding @@ -1354,7 +1354,7 @@ class DatasetteRouter: view_name="page", ) except NotFoundExplicit as e: - await self.handle_500(request, send, e) + await self.handle_exception(request, send, e) return # Pull content-type out into separate parameter content_type = "text/html; charset=utf-8" @@ -1369,9 +1369,9 @@ class DatasetteRouter: content_type=content_type, ) else: - await self.handle_500(request, send, exception or NotFound("404")) + await self.handle_exception(request, send, exception or NotFound("404")) - async def handle_500(self, request, send, exception): + async def handle_exception(self, request, send, exception): if self.ds.pdb: import pdb From c09c53f3455a7b9574cf7695478f2b87d20897db Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 17 Jul 2022 16:24:39 -0700 Subject: [PATCH 035/891] New handle_exception plugin hook, refs #1770 Also refs: - https://github.com/simonw/datasette-sentry/issues/1 - https://github.com/simonw/datasette-show-errors/issues/2 --- datasette/app.py | 97 +++++++++-------------------------- datasette/forbidden.py | 20 ++++++++ datasette/handle_exception.py | 74 ++++++++++++++++++++++++++ datasette/hookspecs.py | 5 ++ datasette/plugins.py | 2 + docs/plugin_hooks.rst | 78 ++++++++++++++++++++-------- tests/fixtures.py | 1 + tests/plugins/my_plugin_2.py | 18 +++++++ tests/test_permissions.py | 1 + tests/test_plugins.py | 14 +++++ 10 files changed, 215 insertions(+), 95 deletions(-) create mode 100644 datasette/forbidden.py create mode 100644 datasette/handle_exception.py diff --git a/datasette/app.py b/datasette/app.py index 43e60dbc..edd05bb3 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -16,7 +16,6 @@ import re import secrets import sys import threading -import traceback import urllib.parse from concurrent import futures from pathlib import Path @@ -27,7 +26,7 @@ from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader from jinja2.environment import Template from jinja2.exceptions import TemplateNotFound -from .views.base import DatasetteError, ureg +from .views.base import ureg from .views.database import DatabaseDownload, DatabaseView from .views.index import IndexView from .views.special import ( @@ -49,7 +48,6 @@ from .utils import ( PrefixedUrlString, SPATIALITE_FUNCTIONS, StartupError, - add_cors_headers, async_call_with_supported_arguments, await_me_maybe, call_with_supported_arguments, @@ -87,11 +85,6 @@ from .tracer import AsgiTracer from .plugins import pm, DEFAULT_PLUGINS, get_plugins from .version import __version__ -try: - import rich -except ImportError: - rich = None - app_root = Path(__file__).parent.parent # https://github.com/simonw/datasette/issues/283#issuecomment-781591015 @@ -1274,6 +1267,16 @@ class DatasetteRouter: return except NotFound as exception: return await self.handle_404(request, send, exception) + except Forbidden as exception: + # Try the forbidden() plugin hook + for custom_response in pm.hook.forbidden( + datasette=self.ds, request=request, message=exception.args[0] + ): + custom_response = await await_me_maybe(custom_response) + assert ( + custom_response + ), "Default forbidden() hook should have been called" + return await custom_response.asgi_send(send) except Exception as exception: return await self.handle_exception(request, send, exception) @@ -1372,72 +1375,20 @@ class DatasetteRouter: await self.handle_exception(request, send, exception or NotFound("404")) async def handle_exception(self, request, send, exception): - if self.ds.pdb: - import pdb + responses = [] + for hook in pm.hook.handle_exception( + datasette=self.ds, + request=request, + exception=exception, + ): + response = await await_me_maybe(hook) + if response is not None: + responses.append(response) - pdb.post_mortem(exception.__traceback__) - - if rich is not None: - rich.get_console().print_exception(show_locals=True) - - title = None - if isinstance(exception, Forbidden): - status = 403 - info = {} - message = exception.args[0] - # Try the forbidden() plugin hook - for custom_response in pm.hook.forbidden( - datasette=self.ds, request=request, message=message - ): - custom_response = await await_me_maybe(custom_response) - if custom_response is not None: - await custom_response.asgi_send(send) - return - elif isinstance(exception, Base400): - status = exception.status - info = {} - message = exception.args[0] - elif isinstance(exception, DatasetteError): - status = exception.status - info = exception.error_dict - message = exception.message - if exception.message_is_html: - message = Markup(message) - title = exception.title - else: - status = 500 - info = {} - message = str(exception) - traceback.print_exc() - templates = [f"{status}.html", "error.html"] - info.update( - { - "ok": False, - "error": message, - "status": status, - "title": title, - } - ) - headers = {} - if self.ds.cors: - add_cors_headers(headers) - if request.path.split("?")[0].endswith(".json"): - await asgi_send_json(send, info, status=status, headers=headers) - else: - template = self.ds.jinja_env.select_template(templates) - await asgi_send_html( - send, - await template.render_async( - dict( - info, - urls=self.ds.urls, - app_css_hash=self.ds.app_css_hash(), - menu_links=lambda: [], - ) - ), - status=status, - headers=headers, - ) + assert responses, "Default exception handler should have returned something" + # Even if there are multiple responses use just the first one + response = responses[0] + await response.asgi_send(send) _cleaner_task_str_re = re.compile(r"\S*site-packages/") diff --git a/datasette/forbidden.py b/datasette/forbidden.py new file mode 100644 index 00000000..156a44d4 --- /dev/null +++ b/datasette/forbidden.py @@ -0,0 +1,20 @@ +from os import stat +from datasette import hookimpl, Response + + +@hookimpl(trylast=True) +def forbidden(datasette, request, message): + async def inner(): + return Response.html( + await datasette.render_template( + "error.html", + { + "title": "Forbidden", + "error": message, + }, + request=request, + ), + status=403, + ) + + return inner diff --git a/datasette/handle_exception.py b/datasette/handle_exception.py new file mode 100644 index 00000000..8b7e83e3 --- /dev/null +++ b/datasette/handle_exception.py @@ -0,0 +1,74 @@ +from datasette import hookimpl, Response +from .utils import await_me_maybe, add_cors_headers +from .utils.asgi import ( + Base400, + Forbidden, +) +from .views.base import DatasetteError +from markupsafe import Markup +import pdb +import traceback +from .plugins import pm + +try: + import rich +except ImportError: + rich = None + + +@hookimpl(trylast=True) +def handle_exception(datasette, request, exception): + async def inner(): + if datasette.pdb: + pdb.post_mortem(exception.__traceback__) + + if rich is not None: + rich.get_console().print_exception(show_locals=True) + + title = None + if isinstance(exception, Base400): + status = exception.status + info = {} + message = exception.args[0] + elif isinstance(exception, DatasetteError): + status = exception.status + info = exception.error_dict + message = exception.message + if exception.message_is_html: + message = Markup(message) + title = exception.title + else: + status = 500 + info = {} + message = str(exception) + traceback.print_exc() + templates = [f"{status}.html", "error.html"] + info.update( + { + "ok": False, + "error": message, + "status": status, + "title": title, + } + ) + headers = {} + if datasette.cors: + add_cors_headers(headers) + if request.path.split("?")[0].endswith(".json"): + return Response.json(info, status=status, headers=headers) + else: + template = datasette.jinja_env.select_template(templates) + return Response.html( + await template.render_async( + dict( + info, + urls=datasette.urls, + app_css_hash=datasette.app_css_hash(), + menu_links=lambda: [], + ) + ), + status=status, + headers=headers, + ) + + return inner diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index c84db0a3..a5fb536f 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -138,3 +138,8 @@ def database_actions(datasette, actor, database, request): @hookspec def skip_csrf(datasette, scope): """Mechanism for skipping CSRF checks for certain requests""" + + +@hookspec +def handle_exception(datasette, request, exception): + """Handle an uncaught exception. Can return a Response or None.""" diff --git a/datasette/plugins.py b/datasette/plugins.py index 76b46a47..fef0c8e9 100644 --- a/datasette/plugins.py +++ b/datasette/plugins.py @@ -15,6 +15,8 @@ DEFAULT_PLUGINS = ( "datasette.default_magic_parameters", "datasette.blob_renderer", "datasette.default_menu_links", + "datasette.handle_exception", + "datasette.forbidden", ) pm = pluggy.PluginManager("datasette") diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index f5c3ee83..6020a941 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -107,8 +107,8 @@ Extra template variables that should be made available in the rendered template ``view_name`` - string The name of the view being displayed. (``index``, ``database``, ``table``, and ``row`` are the most important ones.) -``request`` - object or None - The current HTTP :ref:`internals_request`. This can be ``None`` if the request object is not available. +``request`` - :ref:`internals_request` or None + The current HTTP request. This can be ``None`` if the request object is not available. ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` @@ -504,7 +504,7 @@ When a request is received, the ``"render"`` callback function is called with ze The table or view, if one is being rendered. ``request`` - :ref:`internals_request` - The incoming HTTP request. + The current HTTP request. ``view_name`` - string The name of the current view being called. ``index``, ``database``, ``table``, and ``row`` are the most important ones. @@ -599,8 +599,8 @@ The optional view function arguments are as follows: ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. -``request`` - Request object - The current HTTP :ref:`internals_request`. +``request`` - :ref:`internals_request` + The current HTTP request. ``scope`` - dictionary The incoming ASGI scope dictionary. @@ -947,8 +947,8 @@ actor_from_request(datasette, request) ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. -``request`` - object - The current HTTP :ref:`internals_request`. +``request`` - :ref:`internals_request` + The current HTTP request. This is part of Datasette's :ref:`authentication and permissions system `. The function should attempt to authenticate an actor (either a user or an API actor of some sort) based on information in the request. @@ -1010,8 +1010,8 @@ Example: `datasette-auth-tokens `__ and then renders a custom error page: + +.. code-block:: python + + from datasette import hookimpl, Response + import sentry_sdk + + + @hookimpl + def handle_exception(datasette, exception): + sentry_sdk.capture_exception(exception) + async def inner(): + return Response.html( + await datasette.render_template("custom_error.html", request=request) + ) + return inner + .. _plugin_hook_menu_links: menu_links(datasette, actor, request) @@ -1232,8 +1266,8 @@ menu_links(datasette, actor, request) ``actor`` - dictionary or None The currently authenticated :ref:`actor `. -``request`` - object or None - The current HTTP :ref:`internals_request`. This can be ``None`` if the request object is not available. +``request`` - :ref:`internals_request` + The current HTTP request. This can be ``None`` if the request object is not available. This hook allows additional items to be included in the menu displayed by Datasette's top right menu icon. @@ -1281,8 +1315,8 @@ table_actions(datasette, actor, database, table, request) ``table`` - string The name of the table. -``request`` - object - The current HTTP :ref:`internals_request`. This can be ``None`` if the request object is not available. +``request`` - :ref:`internals_request` + The current HTTP request. This can be ``None`` if the request object is not available. This hook allows table actions to be displayed in a menu accessed via an action icon at the top of the table page. It should return a list of ``{"href": "...", "label": "..."}`` menu items. @@ -1325,8 +1359,8 @@ database_actions(datasette, actor, database, request) ``database`` - string The name of the database. -``request`` - object - The current HTTP :ref:`internals_request`. +``request`` - :ref:`internals_request` + The current HTTP request. This hook is similar to :ref:`plugin_hook_table_actions` but populates an actions menu on the database page. diff --git a/tests/fixtures.py b/tests/fixtures.py index e0e4ec7b..c145ac78 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -68,6 +68,7 @@ EXPECTED_PLUGINS = [ "canned_queries", "extra_js_urls", "extra_template_vars", + "handle_exception", "menu_links", "permission_allowed", "register_routes", diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index f5ce36b3..4df02343 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -185,3 +185,21 @@ def register_routes(datasette): # Also serves to demonstrate over-ride of default paths: (r"/(?P[^/]+)/(?P[^/]+?$)", new_table), ] + + +@hookimpl +def handle_exception(datasette, request, exception): + datasette._exception_hook_fired = (request, exception) + if request.args.get("_custom_error"): + return Response.text("_custom_error") + elif request.args.get("_custom_error_async"): + + async def inner(): + return Response.text("_custom_error_async") + + return inner + + +@hookimpl(specname="register_routes") +def register_triger_error(): + return ((r"/trigger-error", lambda: 1 / 0),) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index f4169dbe..2a519e76 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -332,6 +332,7 @@ def test_permissions_debug(app_client): assert checks == [ {"action": "permissions-debug", "result": True, "used_default": False}, {"action": "view-instance", "result": None, "used_default": True}, + {"action": "debug-menu", "result": False, "used_default": True}, {"action": "permissions-debug", "result": False, "used_default": True}, {"action": "view-instance", "result": None, "used_default": True}, ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 4a7ad7c6..948a40b8 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -824,6 +824,20 @@ def test_hook_forbidden(restore_working_directory): assert "view-database" == client.ds._last_forbidden_message +def test_hook_handle_exception(app_client): + app_client.get("/trigger-error?x=123") + assert hasattr(app_client.ds, "_exception_hook_fired") + request, exception = app_client.ds._exception_hook_fired + assert request.url == "http://localhost/trigger-error?x=123" + assert isinstance(exception, ZeroDivisionError) + + +@pytest.mark.parametrize("param", ("_custom_error", "_custom_error_async")) +def test_hook_handle_exception_custom_response(app_client, param): + response = app_client.get("/trigger-error?{}=1".format(param)) + assert response.text == param + + def test_hook_menu_links(app_client): def get_menu_links(html): soup = Soup(html, "html.parser") From 58fd1e33ec7ac5ed85431d5c86d60600cd5280fb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 17 Jul 2022 16:30:58 -0700 Subject: [PATCH 036/891] Hint that you can render templates for these hooks, refs #1770 --- docs/plugin_hooks.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 6020a941..b4869606 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1176,7 +1176,7 @@ forbidden(datasette, request, message) -------------------------------------- ``datasette`` - :ref:`internals_datasette` - You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to render templates or execute SQL queries. ``request`` - :ref:`internals_request` The current HTTP request. @@ -1224,7 +1224,7 @@ handle_exception(datasette, request, exception) ----------------------------------------------- ``datasette`` - :ref:`internals_datasette` - You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to render templates or execute SQL queries. ``request`` - :ref:`internals_request` The current HTTP request. From e543a095cc4c1ca895b082cfd1263ca25203a7c0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 17 Jul 2022 17:57:41 -0700 Subject: [PATCH 037/891] Updated default plugins in docs, refs #1770 --- docs/plugins.rst | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/docs/plugins.rst b/docs/plugins.rst index f2ed02f7..29078054 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -172,6 +172,24 @@ If you run ``datasette plugins --all`` it will include default plugins that ship "filters_from_request" ] }, + { + "name": "datasette.forbidden", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "forbidden" + ] + }, + { + "name": "datasette.handle_exception", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "handle_exception" + ] + }, { "name": "datasette.publish.cloudrun", "static": false, From 6d5e1955470424cf4faf5d35788d328ebdd6d463 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 17 Jul 2022 17:59:20 -0700 Subject: [PATCH 038/891] Release 0.62a1 Refs #1300, #1739, #1744, #1746, #1748, #1759, #1770 --- datasette/version.py | 2 +- docs/changelog.rst | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index cf18c441..86f4cf7e 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.62a0" +__version__ = "0.62a1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 74814fcb..3f105811 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,20 @@ Changelog ========= +.. _v0_62a1: + +0.62a1 (2022-07-17) +------------------- + +- New plugin hook: :ref:`handle_exception() `, for custom handling of exceptions caught by Datasette. (:issue:`1770`) +- The :ref:`render_cell() ` plugin hook is now also passed a ``row`` argument, representing the ``sqlite3.Row`` object that is being rendered. (:issue:`1300`) +- New ``--nolock`` option for ignoring file locks when opening read-only databases. (:issue:`1744`) +- Documentation now uses the `Furo `__ Sphinx theme. (:issue:`1746`) +- Datasette now has a `Discord community `__. +- Database file downloads now implement conditional GET using ETags. (:issue:`1739`) +- Examples in the documentation now include a copy-to-clipboard button. (:issue:`1748`) +- HTML for facet results and suggested results has been extracted out into new templates ``_facet_results.html`` and ``_suggested_facets.html``. Thanks, M. Nasimul Haque. (`#1759 `__) + .. _v0_62a0: 0.62a0 (2022-05-02) From ed1ebc0f1d4153e3e0934f2af19f82e5fdf137d3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 17 Jul 2022 18:03:33 -0700 Subject: [PATCH 039/891] Run blacken-docs, refs #1770 --- docs/plugin_hooks.rst | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index b4869606..aec1df56 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1213,7 +1213,9 @@ The function can alternatively return an awaitable function if it needs to make def forbidden(datasette): async def inner(): return Response.html( - await datasette.render_template("render_message.html", request=request) + await datasette.render_template( + "render_message.html", request=request + ) ) return inner @@ -1249,10 +1251,14 @@ This example logs an error to `Sentry `__ and then renders a @hookimpl def handle_exception(datasette, exception): sentry_sdk.capture_exception(exception) + async def inner(): return Response.html( - await datasette.render_template("custom_error.html", request=request) + await datasette.render_template( + "custom_error.html", request=request + ) ) + return inner .. _plugin_hook_menu_links: From ea6161f8475d9fa41c4879049511c58f692cce04 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 17 Jul 2022 18:06:26 -0700 Subject: [PATCH 040/891] Bump furo from 2022.4.7 to 2022.6.21 (#1760) Bumps [furo](https://github.com/pradyunsg/furo) from 2022.4.7 to 2022.6.21. - [Release notes](https://github.com/pradyunsg/furo/releases) - [Changelog](https://github.com/pradyunsg/furo/blob/main/docs/changelog.md) - [Commits](https://github.com/pradyunsg/furo/compare/2022.04.07...2022.06.21) --- updated-dependencies: - dependency-name: furo dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 29cb77bf..558b5c87 100644 --- a/setup.py +++ b/setup.py @@ -65,7 +65,7 @@ setup( setup_requires=["pytest-runner"], extras_require={ "docs": [ - "furo==2022.4.7", + "furo==2022.6.21", "sphinx-autobuild", "codespell", "blacken-docs", From 22354c48ce4d514d7a1b321e5651c7f1340e3f5e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 17 Jul 2022 18:06:37 -0700 Subject: [PATCH 041/891] Update pytest-asyncio requirement from <0.19,>=0.17 to >=0.17,<0.20 (#1769) Updates the requirements on [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) - [Changelog](https://github.com/pytest-dev/pytest-asyncio/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.17.0...v0.19.0) --- updated-dependencies: - dependency-name: pytest-asyncio dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 558b5c87..a1c51d0b 100644 --- a/setup.py +++ b/setup.py @@ -74,7 +74,7 @@ setup( "test": [ "pytest>=5.2.2,<7.2.0", "pytest-xdist>=2.2.1,<2.6", - "pytest-asyncio>=0.17,<0.19", + "pytest-asyncio>=0.17,<0.20", "beautifulsoup4>=4.8.1,<4.12.0", "black==22.6.0", "blacken-docs==1.12.1", From 01369176b0a8943ab45292ffc6f9c929b80a00e8 Mon Sep 17 00:00:00 2001 From: Chris Amico Date: Sun, 17 Jul 2022 21:12:45 -0400 Subject: [PATCH 042/891] Keep track of datasette.config_dir (#1766) Thanks, @eyeseast - closes #1764 --- datasette/app.py | 1 + tests/test_config_dir.py | 9 +++++++++ 2 files changed, 10 insertions(+) diff --git a/datasette/app.py b/datasette/app.py index edd05bb3..1a9afc10 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -211,6 +211,7 @@ class Datasette: assert config_dir is None or isinstance( config_dir, Path ), "config_dir= should be a pathlib.Path" + self.config_dir = config_dir self.pdb = pdb self._secret = secret or secrets.token_hex(32) self.files = tuple(files or []) + tuple(immutables or []) diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index 015c6ace..fe927c42 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -1,4 +1,5 @@ import json +import pathlib import pytest from datasette.app import Datasette @@ -150,3 +151,11 @@ def test_metadata_yaml(tmp_path_factory, filename): response = client.get("/-/metadata.json") assert 200 == response.status assert {"title": "Title from metadata"} == response.json + + +def test_store_config_dir(config_dir_client): + ds = config_dir_client.ds + + assert hasattr(ds, "config_dir") + assert ds.config_dir is not None + assert isinstance(ds.config_dir, pathlib.Path) From 7af67b54b7d9bca43e948510fc62f6db2b748fa8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 18 Jul 2022 14:31:09 -0700 Subject: [PATCH 043/891] How to register temporary plugins in tests, closes #903 --- docs/testing_plugins.rst | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 41046bfb..d02003a9 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -219,3 +219,39 @@ Here's a test for that plugin that mocks the HTTPX outbound request: assert ( outbound_request.url == "https://www.example.com/" ) + +.. _testing_plugins_register_in_test: + +Registering a plugin for the duration of a test +----------------------------------------------- + +When writing tests for plugins you may find it useful to register a test plugin just for the duration of a single test. You can do this using ``pm.register()`` and ``pm.unregister()`` like this: + +.. code-block:: python + + from datasette import hookimpl + from datasette.app import Datasette + from datasette.plugins import pm + import pytest + + + @pytest.mark.asyncio + async def test_using_test_plugin(): + class TestPlugin: + __name__ = "TestPlugin" + + # Use hookimpl and method names to register hooks + @hookimpl + def register_routes(self): + return [ + (r"^/error$", lambda: 1/0), + ] + + pm.register(TestPlugin(), name="undo") + try: + # The test implementation goes here + datasette = Datasette() + response = await datasette.client.get("/error") + assert response.status_code == 500 + finally: + pm.unregister(name="undo") From bca2d95d0228f80a108e13408f8e72b2c06c2c7b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Aug 2022 16:38:02 -0700 Subject: [PATCH 044/891] Configure readthedocs/readthedocs-preview --- .github/workflows/documentation-links.yml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .github/workflows/documentation-links.yml diff --git a/.github/workflows/documentation-links.yml b/.github/workflows/documentation-links.yml new file mode 100644 index 00000000..e7062a46 --- /dev/null +++ b/.github/workflows/documentation-links.yml @@ -0,0 +1,16 @@ +name: Read the Docs Pull Request Preview +on: + pull_request_target: + types: + - opened + +permissions: + pull-requests: write + +jobs: + documentation-links: + runs-on: ubuntu-latest + steps: + - uses: readthedocs/readthedocs-preview@main + with: + project-slug: "datasette" From 8cfc72336878dd846d149658e99cc598e835b661 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Aug 2022 11:21:53 -0700 Subject: [PATCH 045/891] Ran blacken-docs --- docs/testing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index d02003a9..992b4b0e 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -244,7 +244,7 @@ When writing tests for plugins you may find it useful to register a test plugin @hookimpl def register_routes(self): return [ - (r"^/error$", lambda: 1/0), + (r"^/error$", lambda: 1 / 0), ] pm.register(TestPlugin(), name="undo") From 05d9c682689a0f1d23cbb502e027364ab3363910 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 14 Aug 2022 08:16:53 -0700 Subject: [PATCH 046/891] Promote Discord more in the README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 032180aa..7ebbca57 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover * Comprehensive documentation: https://docs.datasette.io/ * Examples: https://datasette.io/examples * Live demo of current main: https://latest.datasette.io/ -* Support questions, feedback? Join our [GitHub Discussions forum](https://github.com/simonw/datasette/discussions) +* Questions, feedback or want to talk about the project? Join our [Discord](https://discord.gg/ktd74dm5mw) Want to stay up-to-date with the project? Subscribe to the [Datasette newsletter](https://datasette.substack.com/) for tips, tricks and news on what's new in the Datasette ecosystem. From db00c00f6397287749331e8042fe998ee7f3b919 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 14 Aug 2022 08:19:30 -0700 Subject: [PATCH 047/891] Promote Datasette Lite in the README, refs #1781 --- README.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 7ebbca57..1af20129 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover * Latest [Datasette News](https://datasette.io/news) * Comprehensive documentation: https://docs.datasette.io/ * Examples: https://datasette.io/examples -* Live demo of current main: https://latest.datasette.io/ +* Live demo of current `main` branch: https://latest.datasette.io/ * Questions, feedback or want to talk about the project? Join our [Discord](https://discord.gg/ktd74dm5mw) Want to stay up-to-date with the project? Subscribe to the [Datasette newsletter](https://datasette.substack.com/) for tips, tricks and news on what's new in the Datasette ecosystem. @@ -85,3 +85,7 @@ Or: This will create a docker image containing both the datasette application and the specified SQLite database files. It will then deploy that image to Heroku or Cloud Run and give you a URL to access the resulting website and API. See [Publishing data](https://docs.datasette.io/en/stable/publish.html) in the documentation for more details. + +## Datasette Lite + +[Datasette Lite](https://lite.datasette.io/) is Datasette packaged using WebAssembly so that it runs entirely in your browser, no Python web application server required. Read more about that in the [Datasette Lite documentation](https://github.com/simonw/datasette-lite/blob/main/README.md). From 8eb699de7becdefc6d72555d9fb17c9f06235dc4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 14 Aug 2022 08:24:39 -0700 Subject: [PATCH 048/891] Datasette Lite in Getting Started docs, closes #1781 --- docs/getting_started.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 00b753a9..571540cf 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -21,6 +21,17 @@ Datasette has several `tutorials `__ to help you - `Exploring a database with Datasette `__ shows how to use the Datasette web interface to explore a new database. - `Learn SQL with Datasette `__ introduces SQL, and shows how to use that query language to ask questions of your data. +.. _getting_started_datasette_lite: + +Datasette in your browser with Datasette Lite +--------------------------------------------- + +`Datasette Lite `__ is Datasette packaged using WebAssembly so that it runs entirely in your browser, no Python web application server required. + +You can pass a URL to a CSV, SQLite or raw SQL file directly to Datasette Lite to explore that data in your browser. + +This `example link `__ opens Datasette Lite and loads the SQL Murder Mystery example database from `Northwestern University Knight Lab `__. + .. _getting_started_glitch: Try Datasette without installing anything using Glitch From df4fd2d7ddca8956d8a51c72ce007b8c75227f32 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 14 Aug 2022 08:44:02 -0700 Subject: [PATCH 049/891] _sort= works even if sort column not selected, closes #1773 --- datasette/views/table.py | 22 +++++++++++++++++++++- tests/test_table_api.py | 2 ++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index cd4be823..94d2673b 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -630,7 +630,27 @@ class TableView(DataView): next_value = path_from_row_pks(rows[-2], pks, use_rowid) # If there's a sort or sort_desc, add that value as a prefix if (sort or sort_desc) and not is_view: - prefix = rows[-2][sort or sort_desc] + try: + prefix = rows[-2][sort or sort_desc] + except IndexError: + # sort/sort_desc column missing from SELECT - look up value by PK instead + prefix_where_clause = " and ".join( + "[{}] = :pk{}".format(pk, i) for i, pk in enumerate(pks) + ) + prefix_lookup_sql = "select [{}] from [{}] where {}".format( + sort or sort_desc, table_name, prefix_where_clause + ) + prefix = ( + await db.execute( + prefix_lookup_sql, + { + **{ + "pk{}".format(i): rows[-2][pk] + for i, pk in enumerate(pks) + } + }, + ) + ).single_value() if isinstance(prefix, dict) and "value" in prefix: prefix = prefix["value"] if prefix is None: diff --git a/tests/test_table_api.py b/tests/test_table_api.py index 9db383c3..e56a72b5 100644 --- a/tests/test_table_api.py +++ b/tests/test_table_api.py @@ -288,6 +288,8 @@ def test_paginate_compound_keys_with_extra_filters(app_client): ), # text column contains '$null' - ensure it doesn't confuse pagination: ("_sort=text", lambda row: row["text"], "sorted by text"), + # Still works if sort column removed using _col= + ("_sort=text&_col=content", lambda row: row["text"], "sorted by text"), ], ) def test_sortable(app_client, query_string, sort_key, human_description_en): From 668415df9f6334bd255c22ab02018bed5bc14edd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 14 Aug 2022 08:47:17 -0700 Subject: [PATCH 050/891] Upgrade Docker baes to 3.10.6-slim-bullseye - refs #1768 --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 42f5529b..ee7ed957 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.7-slim-bullseye as build +FROM python:3.10.6-slim-bullseye as build # Version of Datasette to install, e.g. 0.55 # docker build . -t datasette --build-arg VERSION=0.55 From 080d4b3e065d78faf977c6ded6ead31aae24e2ae Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 14 Aug 2022 08:49:14 -0700 Subject: [PATCH 051/891] Switch to python:3.10.6-slim-bullseye for datasette publish - refs #1768 --- datasette/utils/__init__.py | 2 +- demos/apache-proxy/Dockerfile | 2 +- docs/publish.rst | 2 +- tests/test_package.py | 2 +- tests/test_publish_cloudrun.py | 4 ++-- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 77768112..d148cc2c 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -390,7 +390,7 @@ def make_dockerfile( "SQLITE_EXTENSIONS" ] = "/usr/lib/x86_64-linux-gnu/mod_spatialite.so" return """ -FROM python:3.8 +FROM python:3.10.6-slim-bullseye COPY . /app WORKDIR /app {apt_get_extras} diff --git a/demos/apache-proxy/Dockerfile b/demos/apache-proxy/Dockerfile index 6c921963..70b33bec 100644 --- a/demos/apache-proxy/Dockerfile +++ b/demos/apache-proxy/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.7-slim-bullseye +FROM python:3.10.6-slim-bullseye RUN apt-get update && \ apt-get install -y apache2 supervisor && \ diff --git a/docs/publish.rst b/docs/publish.rst index 166f2883..9c7c99cc 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -144,7 +144,7 @@ Here's example output for the package command:: $ datasette package parlgov.db --extra-options="--setting sql_time_limit_ms 2500" Sending build context to Docker daemon 4.459MB - Step 1/7 : FROM python:3 + Step 1/7 : FROM python:3.10.6-slim-bullseye ---> 79e1dc9af1c1 Step 2/7 : COPY . /app ---> Using cache diff --git a/tests/test_package.py b/tests/test_package.py index 02ed1775..ac15e61e 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -12,7 +12,7 @@ class CaptureDockerfile: EXPECTED_DOCKERFILE = """ -FROM python:3.8 +FROM python:3.10.6-slim-bullseye COPY . /app WORKDIR /app diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 3427f4f7..60079ab3 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -223,7 +223,7 @@ def test_publish_cloudrun_plugin_secrets( ) expected = textwrap.dedent( r""" - FROM python:3.8 + FROM python:3.10.6-slim-bullseye COPY . /app WORKDIR /app @@ -290,7 +290,7 @@ def test_publish_cloudrun_apt_get_install( ) expected = textwrap.dedent( r""" - FROM python:3.8 + FROM python:3.10.6-slim-bullseye COPY . /app WORKDIR /app From 1563c22a8c65e6cff5194aa07df54d0ab8d4eecb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 14 Aug 2022 09:13:12 -0700 Subject: [PATCH 052/891] Don't duplicate _sort_desc, refs #1738 --- datasette/views/table.py | 2 +- tests/test_table_html.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 94d2673b..49c30c9c 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -710,7 +710,7 @@ class TableView(DataView): for key in request.args: if ( key.startswith("_") - and key not in ("_sort", "_search", "_next") + and key not in ("_sort", "_sort_desc", "_search", "_next") and "__" not in key ): for value in request.args.getlist(key): diff --git a/tests/test_table_html.py b/tests/test_table_html.py index d3cb3e17..f3808ea3 100644 --- a/tests/test_table_html.py +++ b/tests/test_table_html.py @@ -828,6 +828,7 @@ def test_other_hidden_form_fields(app_client, path, expected_hidden): [ ("/fixtures/searchable?_search=terry", []), ("/fixtures/searchable?_sort=text2", []), + ("/fixtures/searchable?_sort_desc=text2", []), ("/fixtures/searchable?_sort=text2&_where=1", [("_where", "1")]), ], ) From c1396bf86033a7bd99fa0c0431f585475391a11a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 14 Aug 2022 09:34:31 -0700 Subject: [PATCH 053/891] Don't allow canned write queries on immutable DBs, closes #1728 --- datasette/templates/query.html | 6 ++++- datasette/views/database.py | 4 ++++ tests/test_canned_queries.py | 40 ++++++++++++++++++++++++++++++++++ 3 files changed, 49 insertions(+), 1 deletion(-) diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 8c920527..cee779fc 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -28,6 +28,10 @@ {% block content %} +{% if canned_write and db_is_immutable %} +

This query cannot be executed because the database is immutable.

+{% endif %} +

{{ metadata.title or database }}{% if canned_query and not metadata.title %}: {{ canned_query }}{% endif %}{% if private %} 🔒{% endif %}

{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} @@ -61,7 +65,7 @@

{% if not hide_sql %}{% endif %} {% if canned_write %}{% endif %} - + {{ show_hide_hidden }} {% if canned_query and edit_sql_url %}Edit SQL{% endif %}

diff --git a/datasette/views/database.py b/datasette/views/database.py index 42058752..77632b9d 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -273,6 +273,9 @@ class QueryView(DataView): # Execute query - as write or as read if write: if request.method == "POST": + # If database is immutable, return an error + if not db.is_mutable: + raise Forbidden("Database is immutable") body = await request.post_body() body = body.decode("utf-8").strip() if body.startswith("{") and body.endswith("}"): @@ -326,6 +329,7 @@ class QueryView(DataView): async def extra_template(): return { "request": request, + "db_is_immutable": not db.is_mutable, "path_with_added_args": path_with_added_args, "path_with_removed_args": path_with_removed_args, "named_parameter_values": named_parameter_values, diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index 5abffdcc..976aa0db 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -53,6 +53,26 @@ def canned_write_client(tmpdir): yield client +@pytest.fixture +def canned_write_immutable_client(): + with make_app_client( + is_immutable=True, + metadata={ + "databases": { + "fixtures": { + "queries": { + "add": { + "sql": "insert into sortable (text) values (:text)", + "write": True, + }, + } + } + } + }, + ) as client: + yield client + + def test_canned_query_with_named_parameter(app_client): response = app_client.get("/fixtures/neighborhood_search.json?text=town") assert [ @@ -373,3 +393,23 @@ def test_canned_write_custom_template(canned_write_client): response.headers["link"] == 'http://localhost/data/update_name.json; rel="alternate"; type="application/json+datasette"' ) + + +def test_canned_write_query_disabled_for_immutable_database( + canned_write_immutable_client, +): + response = canned_write_immutable_client.get("/fixtures/add") + assert response.status == 200 + assert ( + "This query cannot be executed because the database is immutable." + in response.text + ) + assert '' in response.text + # Submitting form should get a forbidden error + response = canned_write_immutable_client.post( + "/fixtures/add", + {"text": "text"}, + csrftoken_from=True, + ) + assert response.status == 403 + assert "Database is immutable" in response.text From 82167105ee699c850cc106ea927de1ad09276cfe Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 14 Aug 2022 10:07:30 -0700 Subject: [PATCH 054/891] --min-instances and --max-instances Cloud Run publish options, closes #1779 --- datasette/publish/cloudrun.py | 26 +++++++++++++++++--- docs/cli-reference.rst | 2 ++ tests/test_publish_cloudrun.py | 43 ++++++++++++++++++++++++---------- 3 files changed, 56 insertions(+), 15 deletions(-) diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 50b2b2fd..77274eb0 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -52,6 +52,16 @@ def publish_subcommand(publish): multiple=True, help="Additional packages to apt-get install", ) + @click.option( + "--max-instances", + type=int, + help="Maximum Cloud Run instances", + ) + @click.option( + "--min-instances", + type=int, + help="Minimum Cloud Run instances", + ) def cloudrun( files, metadata, @@ -79,6 +89,8 @@ def publish_subcommand(publish): cpu, timeout, apt_get_extras, + max_instances, + min_instances, ): "Publish databases to Datasette running on Cloud Run" fail_if_publish_binary_not_installed( @@ -168,12 +180,20 @@ def publish_subcommand(publish): ), shell=True, ) + extra_deploy_options = [] + for option, value in ( + ("--memory", memory), + ("--cpu", cpu), + ("--max-instances", max_instances), + ("--min-instances", min_instances), + ): + if value: + extra_deploy_options.append("{} {}".format(option, value)) check_call( - "gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}{}".format( + "gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}".format( image_id, service, - " --memory {}".format(memory) if memory else "", - " --cpu {}".format(cpu) if cpu else "", + " " + " ".join(extra_deploy_options) if extra_deploy_options else "", ), shell=True, ) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 1c1aff15..415af13c 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -251,6 +251,8 @@ datasette publish cloudrun --help --cpu [1|2|4] Number of vCPUs to allocate in Cloud Run --timeout INTEGER Build timeout in seconds --apt-get-install TEXT Additional packages to apt-get install + --max-instances INTEGER Maximum Cloud Run instances + --min-instances INTEGER Minimum Cloud Run instances --help Show this message and exit. diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 60079ab3..e64534d2 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -105,19 +105,36 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which, tmp_path_factory): @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @pytest.mark.parametrize( - "memory,cpu,timeout,expected_gcloud_args", + "memory,cpu,timeout,min_instances,max_instances,expected_gcloud_args", [ - ["1Gi", None, None, "--memory 1Gi"], - ["2G", None, None, "--memory 2G"], - ["256Mi", None, None, "--memory 256Mi"], - ["4", None, None, None], - ["GB", None, None, None], - [None, 1, None, "--cpu 1"], - [None, 2, None, "--cpu 2"], - [None, 3, None, None], - [None, 4, None, "--cpu 4"], - ["2G", 4, None, "--memory 2G --cpu 4"], - [None, None, 1800, "--timeout 1800"], + ["1Gi", None, None, None, None, "--memory 1Gi"], + ["2G", None, None, None, None, "--memory 2G"], + ["256Mi", None, None, None, None, "--memory 256Mi"], + [ + "4", + None, + None, + None, + None, + None, + ], + [ + "GB", + None, + None, + None, + None, + None, + ], + [None, 1, None, None, None, "--cpu 1"], + [None, 2, None, None, None, "--cpu 2"], + [None, 3, None, None, None, None], + [None, 4, None, None, None, "--cpu 4"], + ["2G", 4, None, None, None, "--memory 2G --cpu 4"], + [None, None, 1800, None, None, "--timeout 1800"], + [None, None, None, 2, None, "--min-instances 2"], + [None, None, None, 2, 4, "--min-instances 2 --max-instances 4"], + [None, 2, None, None, 4, "--cpu 2 --max-instances 4"], ], ) def test_publish_cloudrun_memory_cpu( @@ -127,6 +144,8 @@ def test_publish_cloudrun_memory_cpu( memory, cpu, timeout, + min_instances, + max_instances, expected_gcloud_args, tmp_path_factory, ): From 5e6c5c9e3191a80f17a91c5205d9d69efdebb73f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 14 Aug 2022 10:18:47 -0700 Subject: [PATCH 055/891] Document datasette.config_dir, refs #1766 --- docs/internals.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/internals.rst b/docs/internals.rst index da135282..20797e98 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -260,6 +260,7 @@ Constructor parameters include: - ``files=[...]`` - a list of database files to open - ``immutables=[...]`` - a list of database files to open in immutable mode - ``metadata={...}`` - a dictionary of :ref:`metadata` +- ``config_dir=...`` - the :ref:`configuration directory ` to use, stored in ``datasette.config_dir`` .. _datasette_databases: From 815162cf029fab9f1c9308c1d6ecdba7ee369ebe Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 14 Aug 2022 10:32:42 -0700 Subject: [PATCH 056/891] Release 0.62 Refs #903, #1300, #1683, #1701, #1712, #1717, #1718, #1728, #1733, #1738, #1739, #1744, #1746, #1748, #1759, #1766, #1768, #1770, #1773, #1779 Closes #1782 --- datasette/version.py | 2 +- docs/changelog.rst | 53 ++++++++++++++++++++++++++++++-------------- 2 files changed, 37 insertions(+), 18 deletions(-) diff --git a/datasette/version.py b/datasette/version.py index 86f4cf7e..0453346c 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.62a1" +__version__ = "0.62" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 3f105811..1225c63f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,33 +4,52 @@ Changelog ========= -.. _v0_62a1: +.. _v0_62: -0.62a1 (2022-07-17) +0.62 (2022-08-14) ------------------- +Datasette can now run entirely in your browser using WebAssembly. Try out `Datasette Lite `__, take a look `at the code `__ or read more about it in `Datasette Lite: a server-side Python web application running in a browser `__. + +Datasette now has a `Discord community `__ for questions and discussions about Datasette and its ecosystem of projects. + +Features +~~~~~~~~ + +- Datasette is now compatible with `Pyodide `__. This is the enabling technology behind `Datasette Lite `__. (:issue:`1733`) +- Database file downloads now implement conditional GET using ETags. (:issue:`1739`) +- HTML for facet results and suggested results has been extracted out into new templates ``_facet_results.html`` and ``_suggested_facets.html``. Thanks, M. Nasimul Haque. (`#1759 `__) +- Datasette now runs some SQL queries in parallel. This has limited impact on performance, see `this research issue `__ for details. +- New ``--nolock`` option for ignoring file locks when opening read-only databases. (:issue:`1744`) +- Spaces in the database names in URLs are now encoded as ``+`` rather than ``~20``. (:issue:`1701`) +- ```` is now displayed as ```` and is accompanied by tooltip showing "2.3MB". (:issue:`1712`) +- The base Docker image used by ``datasette publish cloudrun``, ``datasette package`` and the `official Datasette image `__ has been upgraded to ``3.10.6-slim-bullseye``. (:issue:`1768`) +- Canned writable queries against immutable databases now show a warning message. (:issue:`1728`) +- ``datasette publish cloudrun`` has a new ``--timeout`` option which can be used to increase the time limit applied by the Google Cloud build environment. Thanks, Tim Sherratt. (`#1717 `__) +- ``datasette publish cloudrun`` has new ``--min-instances`` and ``--max-instances`` options. (:issue:`1779`) + +Plugin hooks +~~~~~~~~~~~~ + - New plugin hook: :ref:`handle_exception() `, for custom handling of exceptions caught by Datasette. (:issue:`1770`) - The :ref:`render_cell() ` plugin hook is now also passed a ``row`` argument, representing the ``sqlite3.Row`` object that is being rendered. (:issue:`1300`) -- New ``--nolock`` option for ignoring file locks when opening read-only databases. (:issue:`1744`) -- Documentation now uses the `Furo `__ Sphinx theme. (:issue:`1746`) -- Datasette now has a `Discord community `__. -- Database file downloads now implement conditional GET using ETags. (:issue:`1739`) -- Examples in the documentation now include a copy-to-clipboard button. (:issue:`1748`) -- HTML for facet results and suggested results has been extracted out into new templates ``_facet_results.html`` and ``_suggested_facets.html``. Thanks, M. Nasimul Haque. (`#1759 `__) +- The :ref:`configuration directory ` is now stored in ``datasette.config_dir``, making it available to plugins. Thanks, Chris Amico. (`#1766 `__) -.. _v0_62a0: +Bug fixes +~~~~~~~~~ -0.62a0 (2022-05-02) -------------------- - -- Datasette now runs some SQL queries in parallel. This has limited impact on performance, see `this research issue `__ for details. -- Datasette should now be compatible with Pyodide. (:issue:`1733`) -- ``datasette publish cloudrun`` has a new ``--timeout`` option which can be used to increase the time limit applied by the Google Cloud build environment. Thanks, Tim Sherratt. (`#1717 `__) -- Spaces in database names are now encoded as ``+`` rather than ``~20``. (:issue:`1701`) -- ```` is now displayed as ```` and is accompanied by tooltip showing "2.3MB". (:issue:`1712`) - Don't show the facet option in the cog menu if faceting is not allowed. (:issue:`1683`) +- ``?_sort`` and ``?_sort_desc`` now work if the column that is being sorted has been excluded from the query using ``?_col=`` or ``?_nocol=``. (:issue:`1773`) +- Fixed bug where ``?_sort_desc`` was duplicated in the URL every time the Apply button was clicked. (:issue:`1738`) + +Documentation +~~~~~~~~~~~~~ + +- Examples in the documentation now include a copy-to-clipboard button. (:issue:`1748`) +- Documentation now uses the `Furo `__ Sphinx theme. (:issue:`1746`) - Code examples in the documentation are now all formatted using Black. (:issue:`1718`) - ``Request.fake()`` method is now documented, see :ref:`internals_request`. +- New documentation for plugin authors: :ref:`testing_plugins_register_in_test`. (:issue:`903`) .. _v0_61_1: From a107e3a028923c1ab3911c0f880011283f93f368 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 14 Aug 2022 16:07:46 -0700 Subject: [PATCH 057/891] datasette-sentry is an example of handle_exception --- docs/plugin_hooks.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index aec1df56..c6f35d06 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1261,6 +1261,8 @@ This example logs an error to `Sentry `__ and then renders a return inner +Example: `datasette-sentry `_ + .. _plugin_hook_menu_links: menu_links(datasette, actor, request) From 481eb96d85291cdfa5767a83884a1525dfc382d8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 15 Aug 2022 13:17:28 -0700 Subject: [PATCH 058/891] https://datasette.io/tutorials/clean-data tutorial Refs #1783 --- docs/getting_started.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 571540cf..a9eaa404 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -20,6 +20,7 @@ Datasette has several `tutorials `__ to help you - `Exploring a database with Datasette `__ shows how to use the Datasette web interface to explore a new database. - `Learn SQL with Datasette `__ introduces SQL, and shows how to use that query language to ask questions of your data. +- `Cleaning data with sqlite-utils and Datasette `__ guides you through using `sqlite-utils `__ to turn a CSV file into a database that you can explore using Datasette. .. _getting_started_datasette_lite: From a3e6f1b16757fb2d39e7ddba4e09eda2362508bf Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Aug 2022 09:06:02 -0700 Subject: [PATCH 059/891] Increase height of non-JS textarea to fit query Closes #1786 --- datasette/templates/query.html | 3 ++- tests/test_html.py | 6 ++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/datasette/templates/query.html b/datasette/templates/query.html index cee779fc..a35e3afe 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -45,7 +45,8 @@ {% endif %} {% if not hide_sql %} {% if editable and allow_execute_sql %} -

+

{% else %}
{% if query %}{{ query.sql }}{% endif %}
{% endif %} diff --git a/tests/test_html.py b/tests/test_html.py index 409fec68..be21bd84 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -695,10 +695,8 @@ def test_query_error(app_client): response = app_client.get("/fixtures?sql=select+*+from+notatable") html = response.text assert '

no such table: notatable

' in html - assert ( - '' - in html - ) + assert '" in html assert "0 results" not in html From 09a41662e70b788469157bb58ed9ca4acdf2f904 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Aug 2022 09:10:48 -0700 Subject: [PATCH 060/891] Fix typo --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index c6f35d06..30bd75b7 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -874,7 +874,7 @@ canned_queries(datasette, database, actor) ``actor`` - dictionary or None The currently authenticated :ref:`actor `. -Ues this hook to return a dictionary of additional :ref:`canned query ` definitions for the specified database. The return value should be the same shape as the JSON described in the :ref:`canned query ` documentation. +Use this hook to return a dictionary of additional :ref:`canned query ` definitions for the specified database. The return value should be the same shape as the JSON described in the :ref:`canned query ` documentation. .. code-block:: python From 6c0ba7c00c2ae3ecbb5309efa59079cea1c850b3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Aug 2022 14:52:04 -0700 Subject: [PATCH 061/891] Improved CLI reference documentation, refs #1787 --- datasette/cli.py | 2 +- docs/changelog.rst | 2 +- docs/cli-reference.rst | 325 ++++++++++++++++++++++++++++++--------- docs/getting_started.rst | 50 ------ docs/index.rst | 2 +- docs/publish.rst | 2 + 6 files changed, 259 insertions(+), 124 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 8781747c..f2a03d53 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -282,7 +282,7 @@ def package( port, **extra_metadata, ): - """Package specified SQLite files into a new datasette Docker container""" + """Package SQLite files into a Datasette Docker container""" if not shutil.which("docker"): click.secho( ' The package command requires "docker" to be installed and configured ', diff --git a/docs/changelog.rst b/docs/changelog.rst index 1225c63f..f9dcc980 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -621,7 +621,7 @@ See also `Datasette 0.49: The annotated release notes `__ for conversations about the project that go beyond just bug reports and issues. - Datasette can now be installed on macOS using Homebrew! Run ``brew install simonw/datasette/datasette``. See :ref:`installation_homebrew`. (:issue:`335`) - Two new commands: ``datasette install name-of-plugin`` and ``datasette uninstall name-of-plugin``. These are equivalent to ``pip install`` and ``pip uninstall`` but automatically run in the same virtual environment as Datasette, so users don't have to figure out where that virtual environment is - useful for installations created using Homebrew or ``pipx``. See :ref:`plugins_installing`. (:issue:`925`) -- A new command-line option, ``datasette --get``, accepts a path to a URL within the Datasette instance. It will run that request through Datasette (without starting a web server) and print out the response. See :ref:`getting_started_datasette_get` for an example. (:issue:`926`) +- A new command-line option, ``datasette --get``, accepts a path to a URL within the Datasette instance. It will run that request through Datasette (without starting a web server) and print out the response. See :ref:`cli_datasette_get` for an example. (:issue:`926`) .. _v0_46: diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 415af13c..a1e56774 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -4,44 +4,34 @@ CLI reference =============== -This page lists the ``--help`` for every ``datasette`` CLI command. +The ``datasette`` CLI tool provides a number of commands. + +Running ``datasette`` without specifying a command runs the default command, ``datasette serve``. See :ref:`cli_help_serve___help` for the full list of options for that command. .. [[[cog from datasette import cli from click.testing import CliRunner import textwrap - commands = [ - ["--help"], - ["serve", "--help"], - ["serve", "--help-settings"], - ["plugins", "--help"], - ["publish", "--help"], - ["publish", "cloudrun", "--help"], - ["publish", "heroku", "--help"], - ["package", "--help"], - ["inspect", "--help"], - ["install", "--help"], - ["uninstall", "--help"], - ] - cog.out("\n") - for command in commands: - title = "datasette " + " ".join(command) - ref = "_cli_help_" + ("_".join(command).replace("-", "_")) - cog.out(".. {}:\n\n".format(ref)) - cog.out(title + "\n") - cog.out(("=" * len(title)) + "\n\n") + def help(args): + title = "datasette " + " ".join(args) cog.out("::\n\n") - result = CliRunner().invoke(cli.cli, command) + result = CliRunner().invoke(cli.cli, args) output = result.output.replace("Usage: cli ", "Usage: datasette ") cog.out(textwrap.indent(output, ' ')) cog.out("\n\n") .. ]]] +.. [[[end]]] .. _cli_help___help: datasette --help ================ +Running ``datasette --help`` shows a list of all of the available commands. + +.. [[[cog + help(["--help"]) +.. ]]] :: Usage: datasette [OPTIONS] COMMAND [ARGS]... @@ -59,17 +49,34 @@ datasette --help serve* Serve up specified SQLite database files with a web UI inspect Generate JSON summary of provided database files install Install plugins and packages from PyPI into the same... - package Package specified SQLite files into a new datasette Docker... + package Package SQLite files into a Datasette Docker container plugins List currently installed plugins publish Publish specified SQLite database files to the internet along... uninstall Uninstall plugins and Python packages from the Datasette... +.. [[[end]]] + +Additional commands added by plugins that use the :ref:`plugin_hook_register_commands` hook will be listed here as well. + .. _cli_help_serve___help: -datasette serve --help -====================== +datasette serve +=============== +This command starts the Datasette web application running on your machine:: + + datasette serve mydatabase.db + +Or since this is the default command you can run this instead:: + + datasette mydatabase.db + +Once started you can access it at ``http://localhost:8001`` + +.. [[[cog + help(["serve", "--help"]) +.. ]]] :: Usage: datasette serve [OPTIONS] [FILES]... @@ -121,11 +128,75 @@ datasette serve --help --help Show this message and exit. +.. [[[end]]] + + +.. _cli_datasette_get: + +datasette --get +--------------- + +The ``--get`` option to ``datasette serve`` (or just ``datasette``) specifies the path to a page within Datasette and causes Datasette to output the content from that path without starting the web server. + +This means that all of Datasette's functionality can be accessed directly from the command-line. + +For example:: + + $ datasette --get '/-/versions.json' | jq . + { + "python": { + "version": "3.8.5", + "full": "3.8.5 (default, Jul 21 2020, 10:48:26) \n[Clang 11.0.3 (clang-1103.0.32.62)]" + }, + "datasette": { + "version": "0.46+15.g222a84a.dirty" + }, + "asgi": "3.0", + "uvicorn": "0.11.8", + "sqlite": { + "version": "3.32.3", + "fts_versions": [ + "FTS5", + "FTS4", + "FTS3" + ], + "extensions": { + "json1": null + }, + "compile_options": [ + "COMPILER=clang-11.0.3", + "ENABLE_COLUMN_METADATA", + "ENABLE_FTS3", + "ENABLE_FTS3_PARENTHESIS", + "ENABLE_FTS4", + "ENABLE_FTS5", + "ENABLE_GEOPOLY", + "ENABLE_JSON1", + "ENABLE_PREUPDATE_HOOK", + "ENABLE_RTREE", + "ENABLE_SESSION", + "MAX_VARIABLE_NUMBER=250000", + "THREADSAFE=1" + ] + } + } + +The exit code will be 0 if the request succeeds and 1 if the request produced an HTTP status code other than 200 - e.g. a 404 or 500 error. + +This lets you use ``datasette --get /`` to run tests against a Datasette application in a continuous integration environment such as GitHub Actions. + .. _cli_help_serve___help_settings: datasette serve --help-settings -=============================== +------------------------------- +This command outputs all of the available Datasette :ref:`settings `. + +These can be passed to ``datasette serve`` using ``datasette serve --setting name value``. + +.. [[[cog + help(["--help-settings"]) +.. ]]] :: Settings: @@ -170,11 +241,18 @@ datasette serve --help-settings +.. [[[end]]] + .. _cli_help_plugins___help: -datasette plugins --help -======================== +datasette plugins +================= +Output JSON showing all currently installed plugins, their versions, whether they include static files or templates and which :ref:`plugin_hooks` they use. + +.. [[[cog + help(["plugins", "--help"]) +.. ]]] :: Usage: datasette plugins [OPTIONS] @@ -187,11 +265,110 @@ datasette plugins --help --help Show this message and exit. +.. [[[end]]] + +Example output: + +.. code-block:: json + + [ + { + "name": "datasette-geojson", + "static": false, + "templates": false, + "version": "0.3.1", + "hooks": [ + "register_output_renderer" + ] + }, + { + "name": "datasette-geojson-map", + "static": true, + "templates": false, + "version": "0.4.0", + "hooks": [ + "extra_body_script", + "extra_css_urls", + "extra_js_urls" + ] + }, + { + "name": "datasette-leaflet", + "static": true, + "templates": false, + "version": "0.2.2", + "hooks": [ + "extra_body_script", + "extra_template_vars" + ] + } + ] + + +.. _cli_help_install___help: + +datasette install +================= + +Install new Datasette plugins. This command works like ``pip install`` but ensures that your plugins will be installed into the same environment as Datasette. + +This command:: + + datasette install datasette-cluster-map + +Would install the `datasette-cluster-map `__ plugin. + +.. [[[cog + help(["install", "--help"]) +.. ]]] +:: + + Usage: datasette install [OPTIONS] PACKAGES... + + Install plugins and packages from PyPI into the same environment as Datasette + + Options: + -U, --upgrade Upgrade packages to latest version + --help Show this message and exit. + + +.. [[[end]]] + +.. _cli_help_uninstall___help: + +datasette uninstall +=================== + +Uninstall one or more plugins. + +.. [[[cog + help(["uninstall", "--help"]) +.. ]]] +:: + + Usage: datasette uninstall [OPTIONS] PACKAGES... + + Uninstall plugins and Python packages from the Datasette environment + + Options: + -y, --yes Don't ask for confirmation + --help Show this message and exit. + + +.. [[[end]]] + .. _cli_help_publish___help: -datasette publish --help -======================== +datasette publish +================= +Shows a list of available deployment targets for :ref:`publishing data ` with Datasette. + +Additional deployment targets can be added by plugins that use the :ref:`plugin_hook_publish_subcommand` hook. + +.. [[[cog + help(["publish", "--help"]) +.. ]]] :: Usage: datasette publish [OPTIONS] COMMAND [ARGS]... @@ -207,11 +384,19 @@ datasette publish --help heroku Publish databases to Datasette running on Heroku +.. [[[end]]] + + .. _cli_help_publish_cloudrun___help: -datasette publish cloudrun --help -================================= +datasette publish cloudrun +========================== +See :ref:`publish_cloud_run`. + +.. [[[cog + help(["publish", "cloudrun", "--help"]) +.. ]]] :: Usage: datasette publish cloudrun [OPTIONS] [FILES]... @@ -256,11 +441,19 @@ datasette publish cloudrun --help --help Show this message and exit. +.. [[[end]]] + + .. _cli_help_publish_heroku___help: -datasette publish heroku --help -=============================== +datasette publish heroku +======================== +See :ref:`publish_heroku`. + +.. [[[cog + help(["publish", "heroku", "--help"]) +.. ]]] :: Usage: datasette publish heroku [OPTIONS] [FILES]... @@ -297,16 +490,23 @@ datasette publish heroku --help --help Show this message and exit. +.. [[[end]]] + .. _cli_help_package___help: -datasette package --help -======================== +datasette package +================= +Package SQLite files into a Datasette Docker container, see :ref:`cli_package`. + +.. [[[cog + help(["package", "--help"]) +.. ]]] :: Usage: datasette package [OPTIONS] FILES... - Package specified SQLite files into a new datasette Docker container + Package SQLite files into a Datasette Docker container Options: -t, --tag TEXT Name for the resulting Docker container, can @@ -335,11 +535,26 @@ datasette package --help --help Show this message and exit. +.. [[[end]]] + + .. _cli_help_inspect___help: -datasette inspect --help -======================== +datasette inspect +================= +Outputs JSON representing introspected data about one or more SQLite database files. + +If you are opening an immutable database, you can pass this file to the ``--inspect-data`` option to improve Datasette's performance by allowing it to skip running row counts against the database when it first starts running:: + + datasette inspect mydatabase.db > inspect-data.json + datasette serve -i mydatabase.db --inspect-file inspect-data.json + +This performance optimization is used automatically by some of the ``datasette publish`` commands. You are unlikely to need to apply this optimization manually. + +.. [[[cog + help(["inspect", "--help"]) +.. ]]] :: Usage: datasette inspect [OPTIONS] [FILES]... @@ -355,36 +570,4 @@ datasette inspect --help --help Show this message and exit. -.. _cli_help_install___help: - -datasette install --help -======================== - -:: - - Usage: datasette install [OPTIONS] PACKAGES... - - Install plugins and packages from PyPI into the same environment as Datasette - - Options: - -U, --upgrade Upgrade packages to latest version - --help Show this message and exit. - - -.. _cli_help_uninstall___help: - -datasette uninstall --help -========================== - -:: - - Usage: datasette uninstall [OPTIONS] PACKAGES... - - Uninstall plugins and Python packages from the Datasette environment - - Options: - -y, --yes Don't ask for confirmation - --help Show this message and exit. - - .. [[[end]]] diff --git a/docs/getting_started.rst b/docs/getting_started.rst index a9eaa404..6515ef8d 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -138,53 +138,3 @@ JSON in a more convenient format: } ] } - -.. _getting_started_datasette_get: - -datasette --get ---------------- - -The ``--get`` option can specify the path to a page within Datasette and cause Datasette to output the content from that path without starting the web server. This means that all of Datasette's functionality can be accessed directly from the command-line. For example:: - - $ datasette --get '/-/versions.json' | jq . - { - "python": { - "version": "3.8.5", - "full": "3.8.5 (default, Jul 21 2020, 10:48:26) \n[Clang 11.0.3 (clang-1103.0.32.62)]" - }, - "datasette": { - "version": "0.46+15.g222a84a.dirty" - }, - "asgi": "3.0", - "uvicorn": "0.11.8", - "sqlite": { - "version": "3.32.3", - "fts_versions": [ - "FTS5", - "FTS4", - "FTS3" - ], - "extensions": { - "json1": null - }, - "compile_options": [ - "COMPILER=clang-11.0.3", - "ENABLE_COLUMN_METADATA", - "ENABLE_FTS3", - "ENABLE_FTS3_PARENTHESIS", - "ENABLE_FTS4", - "ENABLE_FTS5", - "ENABLE_GEOPOLY", - "ENABLE_JSON1", - "ENABLE_PREUPDATE_HOOK", - "ENABLE_RTREE", - "ENABLE_SESSION", - "MAX_VARIABLE_NUMBER=250000", - "THREADSAFE=1" - ] - } - } - -The exit code will be 0 if the request succeeds and 1 if the request produced an HTTP status code other than 200 - e.g. a 404 or 500 error. This means you can use ``datasette --get /`` to run tests against a Datasette application in a continuous integration environment such as GitHub Actions. - -Running ``datasette`` without specifying a command runs the default command, ``datasette serve``. See :ref:`cli_help_serve___help` for the full list of options for that command. diff --git a/docs/index.rst b/docs/index.rst index efe196b3..5a9cc7ed 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -40,6 +40,7 @@ Contents getting_started installation ecosystem + cli-reference pages publish deploying @@ -61,6 +62,5 @@ Contents plugin_hooks testing_plugins internals - cli-reference contributing changelog diff --git a/docs/publish.rst b/docs/publish.rst index 9c7c99cc..dd8566ed 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -56,6 +56,8 @@ Cloud Run provides a URL on the ``.run.app`` domain, but you can also point your See :ref:`cli_help_publish_cloudrun___help` for the full list of options for this command. +.. _publish_heroku: + Publishing to Heroku -------------------- From aff3df03d4fe0806ce432d1818f6643cdb2a854e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Aug 2022 14:55:08 -0700 Subject: [PATCH 062/891] Ignore ro which stands for read only Refs #1787 where it caused tests to break --- docs/codespell-ignore-words.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/codespell-ignore-words.txt b/docs/codespell-ignore-words.txt index a625cde5..d6744d05 100644 --- a/docs/codespell-ignore-words.txt +++ b/docs/codespell-ignore-words.txt @@ -1 +1 @@ -AddWordsToIgnoreHere +ro From 0d9d33955b503c88a2c712144d97f094baa5d46d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Aug 2022 16:06:12 -0700 Subject: [PATCH 063/891] Clarify you can publish multiple files, closes #1788 --- docs/publish.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/publish.rst b/docs/publish.rst index dd8566ed..d817ed31 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -31,7 +31,7 @@ Publishing to Google Cloud Run You will first need to install and configure the Google Cloud CLI tools by following `these instructions `__. -You can then publish a database to Google Cloud Run using the following command:: +You can then publish one or more SQLite database files to Google Cloud Run using the following command:: datasette publish cloudrun mydatabase.db --service=my-database @@ -63,7 +63,7 @@ Publishing to Heroku To publish your data using `Heroku `__, first create an account there and install and configure the `Heroku CLI tool `_. -You can publish a database to Heroku using the following command:: +You can publish one or more databases to Heroku using the following command:: datasette publish heroku mydatabase.db @@ -138,7 +138,7 @@ If a plugin has any :ref:`plugins_configuration_secret` you can use the ``--plug datasette package ================= -If you have docker installed (e.g. using `Docker for Mac `_) you can use the ``datasette package`` command to create a new Docker image in your local repository containing the datasette app bundled together with your selected SQLite databases:: +If you have docker installed (e.g. using `Docker for Mac `_) you can use the ``datasette package`` command to create a new Docker image in your local repository containing the datasette app bundled together with one or more SQLite databases:: datasette package mydatabase.db From 663ac431fe7202c85967568d82b2034f92b9aa43 Mon Sep 17 00:00:00 2001 From: Manuel Kaufmann Date: Sat, 20 Aug 2022 02:04:16 +0200 Subject: [PATCH 064/891] Use Read the Docs action v1 (#1778) Read the Docs repository was renamed from `readthedocs/readthedocs-preview` to `readthedocs/actions/`. Now, the `preview` action is under `readthedocs/actions/preview` and is tagged as `v1` --- .github/workflows/documentation-links.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/documentation-links.yml b/.github/workflows/documentation-links.yml index e7062a46..a54bd83a 100644 --- a/.github/workflows/documentation-links.yml +++ b/.github/workflows/documentation-links.yml @@ -11,6 +11,6 @@ jobs: documentation-links: runs-on: ubuntu-latest steps: - - uses: readthedocs/readthedocs-preview@main + - uses: readthedocs/actions/preview@v1 with: project-slug: "datasette" From 1d64c9a8dac45b9a3452acf8e76dfadea2b0bc49 Mon Sep 17 00:00:00 2001 From: Alex Garcia Date: Tue, 23 Aug 2022 11:34:30 -0700 Subject: [PATCH 065/891] Add new entrypoint option to --load-extensions. (#1789) Thanks, @asg017 --- .gitignore | 6 ++++ datasette/app.py | 8 ++++- datasette/cli.py | 4 ++- datasette/utils/__init__.py | 11 ++++++ tests/ext.c | 48 ++++++++++++++++++++++++++ tests/test_load_extensions.py | 65 +++++++++++++++++++++++++++++++++++ 6 files changed, 140 insertions(+), 2 deletions(-) create mode 100644 tests/ext.c create mode 100644 tests/test_load_extensions.py diff --git a/.gitignore b/.gitignore index 066009f0..277ff653 100644 --- a/.gitignore +++ b/.gitignore @@ -118,3 +118,9 @@ ENV/ .DS_Store node_modules .*.swp + +# In case someone compiled tests/ext.c for test_load_extensions, don't +# include it in source control. +tests/*.dylib +tests/*.so +tests/*.dll \ No newline at end of file diff --git a/datasette/app.py b/datasette/app.py index 1a9afc10..bb9232c9 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -559,7 +559,13 @@ class Datasette: if self.sqlite_extensions: conn.enable_load_extension(True) for extension in self.sqlite_extensions: - conn.execute("SELECT load_extension(?)", [extension]) + # "extension" is either a string path to the extension + # or a 2-item tuple that specifies which entrypoint to load. + if isinstance(extension, tuple): + path, entrypoint = extension + conn.execute("SELECT load_extension(?, ?)", [path, entrypoint]) + else: + conn.execute("SELECT load_extension(?)", [extension]) if self.setting("cache_size_kb"): conn.execute(f"PRAGMA cache_size=-{self.setting('cache_size_kb')}") # pylint: disable=no-member diff --git a/datasette/cli.py b/datasette/cli.py index f2a03d53..6eb42712 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -21,6 +21,7 @@ from .app import ( pm, ) from .utils import ( + LoadExtension, StartupError, check_connection, find_spatialite, @@ -128,9 +129,10 @@ def sqlite_extensions(fn): return click.option( "sqlite_extensions", "--load-extension", + type=LoadExtension(), envvar="SQLITE_EXTENSIONS", multiple=True, - help="Path to a SQLite extension to load", + help="Path to a SQLite extension to load, and optional entrypoint", )(fn) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index d148cc2c..0fc87d51 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -833,6 +833,17 @@ class StaticMount(click.ParamType): self.fail(f"{value} is not a valid directory path", param, ctx) return path, dirpath +# The --load-extension parameter can optionally include a specific entrypoint. +# This is done by appending ":entrypoint_name" after supplying the path to the extension +class LoadExtension(click.ParamType): + name = "path:entrypoint?" + + def convert(self, value, param, ctx): + if ":" not in value: + return value + path, entrypoint = value.split(":", 1) + return path, entrypoint + def format_bytes(bytes): current = float(bytes) diff --git a/tests/ext.c b/tests/ext.c new file mode 100644 index 00000000..5fe970d9 --- /dev/null +++ b/tests/ext.c @@ -0,0 +1,48 @@ +/* +** This file implements a SQLite extension with multiple entrypoints. +** +** The default entrypoint, sqlite3_ext_init, has a single function "a". +** The 1st alternate entrypoint, sqlite3_ext_b_init, has a single function "b". +** The 2nd alternate entrypoint, sqlite3_ext_c_init, has a single function "c". +** +** Compiling instructions: +** https://www.sqlite.org/loadext.html#compiling_a_loadable_extension +** +*/ + +#include "sqlite3ext.h" + +SQLITE_EXTENSION_INIT1 + +// SQL function that returns back the value supplied during sqlite3_create_function() +static void func(sqlite3_context *context, int argc, sqlite3_value **argv) { + sqlite3_result_text(context, (char *) sqlite3_user_data(context), -1, SQLITE_STATIC); +} + + +// The default entrypoint, since it matches the "ext.dylib"/"ext.so" name +#ifdef _WIN32 +__declspec(dllexport) +#endif +int sqlite3_ext_init(sqlite3 *db, char **pzErrMsg, const sqlite3_api_routines *pApi) { + SQLITE_EXTENSION_INIT2(pApi); + return sqlite3_create_function(db, "a", 0, 0, "a", func, 0, 0); +} + +// Alternate entrypoint #1 +#ifdef _WIN32 +__declspec(dllexport) +#endif +int sqlite3_ext_b_init(sqlite3 *db, char **pzErrMsg, const sqlite3_api_routines *pApi) { + SQLITE_EXTENSION_INIT2(pApi); + return sqlite3_create_function(db, "b", 0, 0, "b", func, 0, 0); +} + +// Alternate entrypoint #2 +#ifdef _WIN32 +__declspec(dllexport) +#endif +int sqlite3_ext_c_init(sqlite3 *db, char **pzErrMsg, const sqlite3_api_routines *pApi) { + SQLITE_EXTENSION_INIT2(pApi); + return sqlite3_create_function(db, "c", 0, 0, "c", func, 0, 0); +} diff --git a/tests/test_load_extensions.py b/tests/test_load_extensions.py new file mode 100644 index 00000000..360bc8f3 --- /dev/null +++ b/tests/test_load_extensions.py @@ -0,0 +1,65 @@ +from datasette.app import Datasette +import pytest +from pathlib import Path + +# not necessarily a full path - the full compiled path looks like "ext.dylib" +# or another suffix, but sqlite will, under the hood, decide which file +# extension to use based on the operating system (apple=dylib, windows=dll etc) +# this resolves to "./ext", which is enough for SQLite to calculate the rest +COMPILED_EXTENSION_PATH = str(Path(__file__).parent / "ext") + +# See if ext.c has been compiled, based off the different possible suffixes. +def has_compiled_ext(): + for ext in ["dylib", "so", "dll"]: + path = Path(__file__).parent / f"ext.{ext}" + if path.is_file(): + return True + return False + + +@pytest.mark.asyncio +@pytest.mark.skipif(not has_compiled_ext(), reason="Requires compiled ext.c") +async def test_load_extension_default_entrypoint(): + + # The default entrypoint only loads a() and NOT b() or c(), so those + # should fail. + ds = Datasette(sqlite_extensions=[COMPILED_EXTENSION_PATH]) + + response = await ds.client.get("/_memory.json?sql=select+a()") + assert response.status_code == 200 + assert response.json()["rows"][0][0] == "a" + + response = await ds.client.get("/_memory.json?sql=select+b()") + assert response.status_code == 400 + assert response.json()["error"] == "no such function: b" + + response = await ds.client.get("/_memory.json?sql=select+c()") + assert response.status_code == 400 + assert response.json()["error"] == "no such function: c" + + +@pytest.mark.asyncio +@pytest.mark.skipif(not has_compiled_ext(), reason="Requires compiled ext.c") +async def test_load_extension_multiple_entrypoints(): + + # Load in the default entrypoint and the other 2 custom entrypoints, now + # all a(), b(), and c() should run successfully. + ds = Datasette( + sqlite_extensions=[ + COMPILED_EXTENSION_PATH, + (COMPILED_EXTENSION_PATH, "sqlite3_ext_b_init"), + (COMPILED_EXTENSION_PATH, "sqlite3_ext_c_init"), + ] + ) + + response = await ds.client.get("/_memory.json?sql=select+a()") + assert response.status_code == 200 + assert response.json()["rows"][0][0] == "a" + + response = await ds.client.get("/_memory.json?sql=select+b()") + assert response.status_code == 200 + assert response.json()["rows"][0][0] == "b" + + response = await ds.client.get("/_memory.json?sql=select+c()") + assert response.status_code == 200 + assert response.json()["rows"][0][0] == "c" From fd1086c6867f3e3582b1eca456e4ea95f6cecf8b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 23 Aug 2022 11:35:41 -0700 Subject: [PATCH 066/891] Applied Black, refs #1789 --- datasette/app.py | 4 ++-- datasette/utils/__init__.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index bb9232c9..f2a6763a 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -559,8 +559,8 @@ class Datasette: if self.sqlite_extensions: conn.enable_load_extension(True) for extension in self.sqlite_extensions: - # "extension" is either a string path to the extension - # or a 2-item tuple that specifies which entrypoint to load. + # "extension" is either a string path to the extension + # or a 2-item tuple that specifies which entrypoint to load. if isinstance(extension, tuple): path, entrypoint = extension conn.execute("SELECT load_extension(?, ?)", [path, entrypoint]) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 0fc87d51..bbaa0510 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -833,6 +833,7 @@ class StaticMount(click.ParamType): self.fail(f"{value} is not a valid directory path", param, ctx) return path, dirpath + # The --load-extension parameter can optionally include a specific entrypoint. # This is done by appending ":entrypoint_name" after supplying the path to the extension class LoadExtension(click.ParamType): From 456dc155d491a009942ace71a4e1827cddc6b93d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 23 Aug 2022 11:40:36 -0700 Subject: [PATCH 067/891] Ran cog, refs #1789 --- docs/cli-reference.rst | 95 +++++++++++++++++++++++------------------- 1 file changed, 51 insertions(+), 44 deletions(-) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index a1e56774..f8419d58 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -84,48 +84,53 @@ Once started you can access it at ``http://localhost:8001`` Serve up specified SQLite database files with a web UI Options: - -i, --immutable PATH Database files to open in immutable mode - -h, --host TEXT Host for server. Defaults to 127.0.0.1 which means - only connections from the local machine will be - allowed. Use 0.0.0.0 to listen to all IPs and allow - access from other machines. - -p, --port INTEGER RANGE Port for server, defaults to 8001. Use -p 0 to - automatically assign an available port. - [0<=x<=65535] - --uds TEXT Bind to a Unix domain socket - --reload Automatically reload if code or metadata change - detected - useful for development - --cors Enable CORS by serving Access-Control-Allow-Origin: - * - --load-extension TEXT Path to a SQLite extension to load - --inspect-file TEXT Path to JSON file created using "datasette inspect" - -m, --metadata FILENAME Path to JSON/YAML file containing license/source - metadata - --template-dir DIRECTORY Path to directory containing custom templates - --plugins-dir DIRECTORY Path to directory containing custom plugins - --static MOUNT:DIRECTORY Serve static files from this directory at /MOUNT/... - --memory Make /_memory database available - --config CONFIG Deprecated: set config option using - configname:value. Use --setting instead. - --setting SETTING... Setting, see - docs.datasette.io/en/stable/settings.html - --secret TEXT Secret used for signing secure values, such as - signed cookies - --root Output URL that sets a cookie authenticating the - root user - --get TEXT Run an HTTP GET request against this path, print - results and exit - --version-note TEXT Additional note to show on /-/versions - --help-settings Show available settings - --pdb Launch debugger on any errors - -o, --open Open Datasette in your web browser - --create Create database files if they do not exist - --crossdb Enable cross-database joins using the /_memory - database - --nolock Ignore locking, open locked files in read-only mode - --ssl-keyfile TEXT SSL key file - --ssl-certfile TEXT SSL certificate file - --help Show this message and exit. + -i, --immutable PATH Database files to open in immutable mode + -h, --host TEXT Host for server. Defaults to 127.0.0.1 which + means only connections from the local machine + will be allowed. Use 0.0.0.0 to listen to all + IPs and allow access from other machines. + -p, --port INTEGER RANGE Port for server, defaults to 8001. Use -p 0 to + automatically assign an available port. + [0<=x<=65535] + --uds TEXT Bind to a Unix domain socket + --reload Automatically reload if code or metadata + change detected - useful for development + --cors Enable CORS by serving Access-Control-Allow- + Origin: * + --load-extension PATH:ENTRYPOINT? + Path to a SQLite extension to load, and + optional entrypoint + --inspect-file TEXT Path to JSON file created using "datasette + inspect" + -m, --metadata FILENAME Path to JSON/YAML file containing + license/source metadata + --template-dir DIRECTORY Path to directory containing custom templates + --plugins-dir DIRECTORY Path to directory containing custom plugins + --static MOUNT:DIRECTORY Serve static files from this directory at + /MOUNT/... + --memory Make /_memory database available + --config CONFIG Deprecated: set config option using + configname:value. Use --setting instead. + --setting SETTING... Setting, see + docs.datasette.io/en/stable/settings.html + --secret TEXT Secret used for signing secure values, such as + signed cookies + --root Output URL that sets a cookie authenticating + the root user + --get TEXT Run an HTTP GET request against this path, + print results and exit + --version-note TEXT Additional note to show on /-/versions + --help-settings Show available settings + --pdb Launch debugger on any errors + -o, --open Open Datasette in your web browser + --create Create database files if they do not exist + --crossdb Enable cross-database joins using the /_memory + database + --nolock Ignore locking, open locked files in read-only + mode + --ssl-keyfile TEXT SSL key file + --ssl-certfile TEXT SSL certificate file + --help Show this message and exit. .. [[[end]]] @@ -566,8 +571,10 @@ This performance optimization is used automatically by some of the ``datasette p Options: --inspect-file TEXT - --load-extension TEXT Path to a SQLite extension to load - --help Show this message and exit. + --load-extension PATH:ENTRYPOINT? + Path to a SQLite extension to load, and + optional entrypoint + --help Show this message and exit. .. [[[end]]] From ba35105eee2d3ba620e4f230028a02b2e2571df2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 23 Aug 2022 17:11:45 -0700 Subject: [PATCH 068/891] Test `--load-extension` in GitHub Actions (#1792) * Run the --load-extension test, refs #1789 * Ran cog, refs #1789 --- .github/workflows/test.yml | 3 +++ tests/test_api.py | 2 +- tests/test_html.py | 4 ++-- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 90b6555e..e38d5ee9 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -24,6 +24,9 @@ jobs: key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} restore-keys: | ${{ runner.os }}-pip- + - name: Build extension for --load-extension test + run: |- + (cd tests && gcc ext.c -fPIC -shared -o ext.so) - name: Install dependencies run: | pip install -e '.[test]' diff --git a/tests/test_api.py b/tests/test_api.py index 253c1718..f6db2f9d 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -36,7 +36,7 @@ def test_homepage(app_client): # 4 hidden FTS tables + no_primary_key (hidden in metadata) assert d["hidden_tables_count"] == 6 # 201 in no_primary_key, plus 6 in other hidden tables: - assert d["hidden_table_rows_sum"] == 207 + assert d["hidden_table_rows_sum"] == 207, response.json assert d["views_count"] == 4 diff --git a/tests/test_html.py b/tests/test_html.py index be21bd84..d6e969ad 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -115,7 +115,7 @@ def test_database_page(app_client): assert fragment in response.text # And views - views_ul = soup.find("h2", text="Views").find_next_sibling("ul") + views_ul = soup.find("h2", string="Views").find_next_sibling("ul") assert views_ul is not None assert [ ("/fixtures/paginated_view", "paginated_view"), @@ -128,7 +128,7 @@ def test_database_page(app_client): ] == sorted([(a["href"], a.text) for a in views_ul.find_all("a")]) # And a list of canned queries - queries_ul = soup.find("h2", text="Queries").find_next_sibling("ul") + queries_ul = soup.find("h2", string="Queries").find_next_sibling("ul") assert queries_ul is not None assert [ ("/fixtures/from_async_hook", "from_async_hook"), From 51030df1869b3b574dd3584d1563415776b9cd4e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 5 Sep 2022 11:35:40 -0700 Subject: [PATCH 069/891] Don't use upper bound dependencies any more See https://iscinumpy.dev/post/bound-version-constraints/ for the rationale behind this change. Closes #1800 --- setup.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/setup.py b/setup.py index a1c51d0b..b2e50b38 100644 --- a/setup.py +++ b/setup.py @@ -42,21 +42,21 @@ setup( include_package_data=True, python_requires=">=3.7", install_requires=[ - "asgiref>=3.2.10,<3.6.0", - "click>=7.1.1,<8.2.0", + "asgiref>=3.2.10", + "click>=7.1.1", "click-default-group-wheel>=1.2.2", - "Jinja2>=2.10.3,<3.1.0", - "hupper~=1.9", + "Jinja2>=2.10.3", + "hupper>=1.9", "httpx>=0.20", - "pint~=0.9", - "pluggy>=1.0,<1.1", - "uvicorn~=0.11", - "aiofiles>=0.4,<0.9", - "janus>=0.6.2,<1.1", + "pint>=0.9", + "pluggy>=1.0", + "uvicorn>=0.11", + "aiofiles>=0.4", + "janus>=0.6.2", "asgi-csrf>=0.9", - "PyYAML>=5.3,<7.0", - "mergedeep>=1.1.1,<1.4.0", - "itsdangerous>=1.1,<3.0", + "PyYAML>=5.3", + "mergedeep>=1.1.1", + "itsdangerous>=1.1", ], entry_points=""" [console_scripts] @@ -72,14 +72,14 @@ setup( "sphinx-copybutton", ], "test": [ - "pytest>=5.2.2,<7.2.0", - "pytest-xdist>=2.2.1,<2.6", - "pytest-asyncio>=0.17,<0.20", - "beautifulsoup4>=4.8.1,<4.12.0", + "pytest>=5.2.2", + "pytest-xdist>=2.2.1", + "pytest-asyncio>=0.17", + "beautifulsoup4>=4.8.1", "black==22.6.0", "blacken-docs==1.12.1", - "pytest-timeout>=1.4.2,<2.2", - "trustme>=0.7,<0.10", + "pytest-timeout>=1.4.2", + "trustme>=0.7", "cogapp>=3.3.0", ], "rich": ["rich"], From 294ecd45f7801971dbeef383d0c5456ee95ab839 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Sep 2022 11:51:51 -0700 Subject: [PATCH 070/891] Bump black from 22.6.0 to 22.8.0 (#1797) Bumps [black](https://github.com/psf/black) from 22.6.0 to 22.8.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/22.6.0...22.8.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b2e50b38..92fa60d0 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ setup( "pytest-xdist>=2.2.1", "pytest-asyncio>=0.17", "beautifulsoup4>=4.8.1", - "black==22.6.0", + "black==22.8.0", "blacken-docs==1.12.1", "pytest-timeout>=1.4.2", "trustme>=0.7", From b91e17280c05bbb9cf97432081bdcea8665879f9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 5 Sep 2022 16:50:53 -0700 Subject: [PATCH 071/891] Run tests in serial, refs #1802 --- .github/workflows/test.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e38d5ee9..9c8c48ef 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -33,8 +33,7 @@ jobs: pip freeze - name: Run tests run: | - pytest -n auto -m "not serial" - pytest -m "serial" + pytest - name: Check if cog needs to be run run: | cog --check docs/*.rst From b2b901e8c4b939e50ee1117ffcd2881ed8a8e3bf Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 5 Sep 2022 17:05:23 -0700 Subject: [PATCH 072/891] Skip SpatiaLite test if no conn.enable_load_extension() Ran into this problem while working on #1802 --- tests/test_spatialite.py | 2 ++ tests/utils.py | 8 ++++++++ 2 files changed, 10 insertions(+) diff --git a/tests/test_spatialite.py b/tests/test_spatialite.py index 8b98c5d6..c07a30e8 100644 --- a/tests/test_spatialite.py +++ b/tests/test_spatialite.py @@ -1,5 +1,6 @@ from datasette.app import Datasette from datasette.utils import find_spatialite, SpatialiteNotFound, SPATIALITE_FUNCTIONS +from .utils import has_load_extension import pytest @@ -13,6 +14,7 @@ def has_spatialite(): @pytest.mark.asyncio @pytest.mark.skipif(not has_spatialite(), reason="Requires SpatiaLite") +@pytest.mark.skipif(not has_load_extension(), reason="Requires enable_load_extension") async def test_spatialite_version_info(): ds = Datasette(sqlite_extensions=["spatialite"]) response = await ds.client.get("/-/versions.json") diff --git a/tests/utils.py b/tests/utils.py index 972300db..191ead9b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,3 +1,6 @@ +from datasette.utils.sqlite import sqlite3 + + def assert_footer_links(soup): footer_links = soup.find("footer").findAll("a") assert 4 == len(footer_links) @@ -22,3 +25,8 @@ def inner_html(soup): # This includes the parent tag - so remove that inner_html = html.split(">", 1)[1].rsplit("<", 1)[0] return inner_html.strip() + + +def has_load_extension(): + conn = sqlite3.connect(":memory:") + return hasattr(conn, "enable_load_extension") From 1c29b925d300d1ee17047504473f2517767aa05b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 5 Sep 2022 17:10:52 -0700 Subject: [PATCH 073/891] Run tests in serial again Because this didn't fix the issue I'm seeing in #1802 Revert "Run tests in serial, refs #1802" This reverts commit b91e17280c05bbb9cf97432081bdcea8665879f9. --- .github/workflows/test.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9c8c48ef..e38d5ee9 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -33,7 +33,8 @@ jobs: pip freeze - name: Run tests run: | - pytest + pytest -n auto -m "not serial" + pytest -m "serial" - name: Check if cog needs to be run run: | cog --check docs/*.rst From 64288d827f7ff97f825e10f714da3f781ecf9345 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 5 Sep 2022 17:40:19 -0700 Subject: [PATCH 074/891] Workaround for test failure: RuntimeError: There is no current event loop (#1803) * Remove ensure_eventloop hack * Hack to recover from intermittent RuntimeError calling asyncio.Lock() --- datasette/app.py | 10 +++++++++- tests/test_cli.py | 27 ++++++++++----------------- 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f2a6763a..c6bbdaf0 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -231,7 +231,15 @@ class Datasette: self.inspect_data = inspect_data self.immutables = set(immutables or []) self.databases = collections.OrderedDict() - self._refresh_schemas_lock = asyncio.Lock() + try: + self._refresh_schemas_lock = asyncio.Lock() + except RuntimeError as rex: + # Workaround for intermittent test failure, see: + # https://github.com/simonw/datasette/issues/1802 + if "There is no current event loop in thread" in str(rex): + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + self._refresh_schemas_lock = asyncio.Lock() self.crossdb = crossdb self.nolock = nolock if memory or crossdb or not self.files: diff --git a/tests/test_cli.py b/tests/test_cli.py index d0f6e26c..f0d28037 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -22,13 +22,6 @@ from unittest import mock import urllib -@pytest.fixture -def ensure_eventloop(): - # Workaround for "Event loop is closed" error - if asyncio.get_event_loop().is_closed(): - asyncio.set_event_loop(asyncio.new_event_loop()) - - def test_inspect_cli(app_client): runner = CliRunner() result = runner.invoke(cli, ["inspect", "fixtures.db"]) @@ -72,7 +65,7 @@ def test_serve_with_inspect_file_prepopulates_table_counts_cache(): ), ) def test_spatialite_error_if_attempt_to_open_spatialite( - ensure_eventloop, spatialite_paths, should_suggest_load_extension + spatialite_paths, should_suggest_load_extension ): with mock.patch("datasette.utils.SPATIALITE_PATHS", spatialite_paths): runner = CliRunner() @@ -199,14 +192,14 @@ def test_version(): @pytest.mark.parametrize("invalid_port", ["-1", "0.5", "dog", "65536"]) -def test_serve_invalid_ports(ensure_eventloop, invalid_port): +def test_serve_invalid_ports(invalid_port): runner = CliRunner(mix_stderr=False) result = runner.invoke(cli, ["--port", invalid_port]) assert result.exit_code == 2 assert "Invalid value for '-p'" in result.stderr -def test_setting(ensure_eventloop): +def test_setting(): runner = CliRunner() result = runner.invoke( cli, ["--setting", "default_page_size", "5", "--get", "/-/settings.json"] @@ -215,14 +208,14 @@ def test_setting(ensure_eventloop): assert json.loads(result.output)["default_page_size"] == 5 -def test_setting_type_validation(ensure_eventloop): +def test_setting_type_validation(): runner = CliRunner(mix_stderr=False) result = runner.invoke(cli, ["--setting", "default_page_size", "dog"]) assert result.exit_code == 2 assert '"default_page_size" should be an integer' in result.stderr -def test_config_deprecated(ensure_eventloop): +def test_config_deprecated(): # The --config option should show a deprecation message runner = CliRunner(mix_stderr=False) result = runner.invoke( @@ -233,14 +226,14 @@ def test_config_deprecated(ensure_eventloop): assert "will be deprecated in" in result.stderr -def test_sql_errors_logged_to_stderr(ensure_eventloop): +def test_sql_errors_logged_to_stderr(): runner = CliRunner(mix_stderr=False) result = runner.invoke(cli, ["--get", "/_memory.json?sql=select+blah"]) assert result.exit_code == 1 assert "sql = 'select blah', params = {}: no such column: blah\n" in result.stderr -def test_serve_create(ensure_eventloop, tmpdir): +def test_serve_create(tmpdir): runner = CliRunner() db_path = tmpdir / "does_not_exist_yet.db" assert not db_path.exists() @@ -258,7 +251,7 @@ def test_serve_create(ensure_eventloop, tmpdir): assert db_path.exists() -def test_serve_duplicate_database_names(ensure_eventloop, tmpdir): +def test_serve_duplicate_database_names(tmpdir): "'datasette db.db nested/db.db' should attach two databases, /db and /db_2" runner = CliRunner() db_1_path = str(tmpdir / "db.db") @@ -273,7 +266,7 @@ def test_serve_duplicate_database_names(ensure_eventloop, tmpdir): assert {db["name"] for db in databases} == {"db", "db_2"} -def test_serve_deduplicate_same_database_path(ensure_eventloop, tmpdir): +def test_serve_deduplicate_same_database_path(tmpdir): "'datasette db.db db.db' should only attach one database, /db" runner = CliRunner() db_path = str(tmpdir / "db.db") @@ -287,7 +280,7 @@ def test_serve_deduplicate_same_database_path(ensure_eventloop, tmpdir): @pytest.mark.parametrize( "filename", ["test-database (1).sqlite", "database (1).sqlite"] ) -def test_weird_database_names(ensure_eventloop, tmpdir, filename): +def test_weird_database_names(tmpdir, filename): # https://github.com/simonw/datasette/issues/1181 runner = CliRunner() db_path = str(tmpdir / filename) From c9d1943aede436fa3413fd49bc56335cbda4ad07 Mon Sep 17 00:00:00 2001 From: Daniel Rech Date: Tue, 6 Sep 2022 02:45:41 +0200 Subject: [PATCH 075/891] Fix word break in facets by adding ul.tight-bullets li word-break: break-all (#1794) Thanks, @dmr --- datasette/static/app.css | 1 + 1 file changed, 1 insertion(+) diff --git a/datasette/static/app.css b/datasette/static/app.css index af3e14d5..712b9925 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -260,6 +260,7 @@ ul.bullets li { ul.tight-bullets li { list-style-type: disc; margin-bottom: 0; + word-break: break-all; } a.not-underlined { text-decoration: none; From d80775a48d20917633792fdc9525f075d3bc2c7a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 5 Sep 2022 17:44:44 -0700 Subject: [PATCH 076/891] Raise error if it's not about loops, refs #1802 --- datasette/app.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/datasette/app.py b/datasette/app.py index c6bbdaf0..aeb81687 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -240,6 +240,8 @@ class Datasette: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) self._refresh_schemas_lock = asyncio.Lock() + else: + raise self.crossdb = crossdb self.nolock = nolock if memory or crossdb or not self.files: From 8430c3bc7dd22b173c1a8c6cd7180e3b31240cd1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 6 Sep 2022 08:59:19 -0700 Subject: [PATCH 077/891] table facet_size in metadata, refs #1804 --- datasette/facets.py | 14 +++++++++++--- tests/test_facets.py | 17 +++++++++++++++++ 2 files changed, 28 insertions(+), 3 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index b15a758c..e70d42df 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -102,11 +102,19 @@ class Facet: def get_facet_size(self): facet_size = self.ds.setting("default_facet_size") max_returned_rows = self.ds.setting("max_returned_rows") + table_facet_size = None + if self.table: + tables_metadata = self.ds.metadata("tables", database=self.database) or {} + table_metadata = tables_metadata.get(self.table) or {} + if table_metadata: + table_facet_size = table_metadata.get("facet_size") custom_facet_size = self.request.args.get("_facet_size") - if custom_facet_size == "max": - facet_size = max_returned_rows - elif custom_facet_size and custom_facet_size.isdigit(): + if custom_facet_size and custom_facet_size.isdigit(): facet_size = int(custom_facet_size) + elif table_facet_size: + facet_size = table_facet_size + if facet_size == "max": + facet_size = max_returned_rows return min(facet_size, max_returned_rows) async def suggest(self): diff --git a/tests/test_facets.py b/tests/test_facets.py index c28dc43c..cbee23b0 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -581,6 +581,23 @@ async def test_facet_size(): ) data5 = response5.json() assert len(data5["facet_results"]["city"]["results"]) == 20 + # Now try messing with facet_size in the table metadata + ds._metadata_local = { + "databases": { + "test_facet_size": {"tables": {"neighbourhoods": {"facet_size": 6}}} + } + } + response6 = await ds.client.get("/test_facet_size/neighbourhoods.json?_facet=city") + data6 = response6.json() + assert len(data6["facet_results"]["city"]["results"]) == 6 + # Setting it to max bumps it up to 50 again + ds._metadata_local["databases"]["test_facet_size"]["tables"]["neighbourhoods"][ + "facet_size" + ] = "max" + data7 = ( + await ds.client.get("/test_facet_size/neighbourhoods.json?_facet=city") + ).json() + assert len(data7["facet_results"]["city"]["results"]) == 20 def test_other_types_of_facet_in_metadata(): From 303c6c733d95a6133558ec1b468f5bea5827d0d2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 6 Sep 2022 11:05:00 -0700 Subject: [PATCH 078/891] Fix for incorrectly handled _facet_size=max, refs #1804 --- datasette/facets.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index e70d42df..7fb0c68b 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -109,12 +109,19 @@ class Facet: if table_metadata: table_facet_size = table_metadata.get("facet_size") custom_facet_size = self.request.args.get("_facet_size") - if custom_facet_size and custom_facet_size.isdigit(): - facet_size = int(custom_facet_size) - elif table_facet_size: - facet_size = table_facet_size - if facet_size == "max": - facet_size = max_returned_rows + if custom_facet_size: + if custom_facet_size == "max": + facet_size = max_returned_rows + elif custom_facet_size.isdigit(): + facet_size = int(custom_facet_size) + else: + # Invalid value, ignore it + custom_facet_size = None + if table_facet_size and not custom_facet_size: + if table_facet_size == "max": + facet_size = max_returned_rows + else: + facet_size = table_facet_size return min(facet_size, max_returned_rows) async def suggest(self): From 0a7815d2038255a0834c955066a2a16c01f707b2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 6 Sep 2022 11:06:49 -0700 Subject: [PATCH 079/891] Documentation for facet_size in metadata, closes #1804 --- docs/facets.rst | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/facets.rst b/docs/facets.rst index 2a2eb039..6c9d99bd 100644 --- a/docs/facets.rst +++ b/docs/facets.rst @@ -129,6 +129,22 @@ You can specify :ref:`array ` or :ref:`date ] } +You can change the default facet size (the number of results shown for each facet) for a table using ``facet_size``: + +.. code-block:: json + + { + "databases": { + "sf-trees": { + "tables": { + "Street_Tree_List": { + "facets": ["qLegalStatus"], + "facet_size": 10 + } + } + } + } + } Suggested facets ---------------- From d0476897e10249bb4867473722270d02491c2c1f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 6 Sep 2022 11:24:30 -0700 Subject: [PATCH 080/891] Fixed Sphinx warning about language = None --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 4ef6b768..8965974a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -71,7 +71,7 @@ release = "" # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. From ff9c87197dde8b09f9787ee878804cb6842ea5dc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 6 Sep 2022 11:26:21 -0700 Subject: [PATCH 081/891] Fixed Sphinx warnings on cli-reference page --- docs/cli-reference.rst | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index f8419d58..4a8465cb 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -14,7 +14,7 @@ Running ``datasette`` without specifying a command runs the default command, ``d import textwrap def help(args): title = "datasette " + " ".join(args) - cog.out("::\n\n") + cog.out("\n::\n\n") result = CliRunner().invoke(cli.cli, args) output = result.output.replace("Usage: cli ", "Usage: datasette ") cog.out(textwrap.indent(output, ' ')) @@ -32,6 +32,7 @@ Running ``datasette --help`` shows a list of all of the available commands. .. [[[cog help(["--help"]) .. ]]] + :: Usage: datasette [OPTIONS] COMMAND [ARGS]... @@ -77,6 +78,7 @@ Once started you can access it at ``http://localhost:8001`` .. [[[cog help(["serve", "--help"]) .. ]]] + :: Usage: datasette serve [OPTIONS] [FILES]... @@ -202,6 +204,7 @@ These can be passed to ``datasette serve`` using ``datasette serve --setting nam .. [[[cog help(["--help-settings"]) .. ]]] + :: Settings: @@ -258,6 +261,7 @@ Output JSON showing all currently installed plugins, their versions, whether the .. [[[cog help(["plugins", "--help"]) .. ]]] + :: Usage: datasette plugins [OPTIONS] @@ -326,6 +330,7 @@ Would install the `datasette-cluster-map Date: Tue, 6 Sep 2022 16:50:43 -0700 Subject: [PATCH 082/891] truncate_cells_html now affects URLs too, refs #1805 --- datasette/utils/__init__.py | 10 ++++++++++ datasette/views/database.py | 11 ++++++++--- datasette/views/table.py | 8 ++++++-- tests/fixtures.py | 9 +++++---- tests/test_api.py | 2 +- tests/test_table_api.py | 11 +++++++---- tests/test_table_html.py | 11 +++++++++++ tests/test_utils.py | 20 ++++++++++++++++++++ 8 files changed, 68 insertions(+), 14 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index bbaa0510..2bdea673 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1167,3 +1167,13 @@ def resolve_routes(routes, path): if match is not None: return match, view return None, None + + +def truncate_url(url, length): + if (not length) or (len(url) <= length): + return url + bits = url.rsplit(".", 1) + if len(bits) == 2 and 1 <= len(bits[1]) <= 4 and "/" not in bits[1]: + rest, ext = bits + return rest[: length - 1 - len(ext)] + "…." + ext + return url[: length - 1] + "…" diff --git a/datasette/views/database.py b/datasette/views/database.py index 77632b9d..fc344245 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -20,6 +20,7 @@ from datasette.utils import ( path_with_format, path_with_removed_args, sqlite3, + truncate_url, InvalidSql, ) from datasette.utils.asgi import AsgiFileDownload, NotFound, Response, Forbidden @@ -371,6 +372,7 @@ class QueryView(DataView): async def extra_template(): display_rows = [] + truncate_cells = self.ds.setting("truncate_cells_html") for row in results.rows if results else []: display_row = [] for column, value in zip(results.columns, row): @@ -396,9 +398,12 @@ class QueryView(DataView): if value in ("", None): display_value = Markup(" ") elif is_url(str(display_value).strip()): - display_value = Markup( - '{url}'.format( - url=escape(value.strip()) + display_value = markupsafe.Markup( + '{truncated_url}'.format( + url=markupsafe.escape(value.strip()), + truncated_url=markupsafe.escape( + truncate_url(value.strip(), truncate_cells) + ), ) ) elif isinstance(display_value, bytes): diff --git a/datasette/views/table.py b/datasette/views/table.py index 49c30c9c..60c092f9 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -24,6 +24,7 @@ from datasette.utils import ( path_with_removed_args, path_with_replaced_args, to_css_class, + truncate_url, urlsafe_components, value_as_boolean, ) @@ -966,8 +967,11 @@ async def display_columns_and_rows( display_value = markupsafe.Markup(" ") elif is_url(str(value).strip()): display_value = markupsafe.Markup( - '{url}'.format( - url=markupsafe.escape(value.strip()) + '{truncated_url}'.format( + url=markupsafe.escape(value.strip()), + truncated_url=markupsafe.escape( + truncate_url(value.strip(), truncate_cells) + ), ) ) elif column in table_metadata.get("units", {}) and value != "": diff --git a/tests/fixtures.py b/tests/fixtures.py index c145ac78..82d8452e 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -598,23 +598,24 @@ CREATE TABLE roadside_attractions ( pk integer primary key, name text, address text, + url text, latitude real, longitude real ); INSERT INTO roadside_attractions VALUES ( - 1, "The Mystery Spot", "465 Mystery Spot Road, Santa Cruz, CA 95065", + 1, "The Mystery Spot", "465 Mystery Spot Road, Santa Cruz, CA 95065", "https://www.mysteryspot.com/", 37.0167, -122.0024 ); INSERT INTO roadside_attractions VALUES ( - 2, "Winchester Mystery House", "525 South Winchester Boulevard, San Jose, CA 95128", + 2, "Winchester Mystery House", "525 South Winchester Boulevard, San Jose, CA 95128", "https://winchestermysteryhouse.com/", 37.3184, -121.9511 ); INSERT INTO roadside_attractions VALUES ( - 3, "Burlingame Museum of PEZ Memorabilia", "214 California Drive, Burlingame, CA 94010", + 3, "Burlingame Museum of PEZ Memorabilia", "214 California Drive, Burlingame, CA 94010", null, 37.5793, -122.3442 ); INSERT INTO roadside_attractions VALUES ( - 4, "Bigfoot Discovery Museum", "5497 Highway 9, Felton, CA 95018", + 4, "Bigfoot Discovery Museum", "5497 Highway 9, Felton, CA 95018", "https://www.bigfootdiscoveryproject.com/", 37.0414, -122.0725 ); diff --git a/tests/test_api.py b/tests/test_api.py index f6db2f9d..7a2bf91f 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -339,7 +339,7 @@ def test_database_page(app_client): }, { "name": "roadside_attractions", - "columns": ["pk", "name", "address", "latitude", "longitude"], + "columns": ["pk", "name", "address", "url", "latitude", "longitude"], "primary_keys": ["pk"], "count": 4, "hidden": False, diff --git a/tests/test_table_api.py b/tests/test_table_api.py index e56a72b5..0db04434 100644 --- a/tests/test_table_api.py +++ b/tests/test_table_api.py @@ -615,11 +615,12 @@ def test_table_through(app_client): response = app_client.get( '/fixtures/roadside_attractions.json?_through={"table":"roadside_attraction_characteristics","column":"characteristic_id","value":"1"}' ) - assert [ + assert response.json["rows"] == [ [ 3, "Burlingame Museum of PEZ Memorabilia", "214 California Drive, Burlingame, CA 94010", + None, 37.5793, -122.3442, ], @@ -627,13 +628,15 @@ def test_table_through(app_client): 4, "Bigfoot Discovery Museum", "5497 Highway 9, Felton, CA 95018", + "https://www.bigfootdiscoveryproject.com/", 37.0414, -122.0725, ], - ] == response.json["rows"] + ] + assert ( - 'where roadside_attraction_characteristics.characteristic_id = "1"' - == response.json["human_description_en"] + response.json["human_description_en"] + == 'where roadside_attraction_characteristics.characteristic_id = "1"' ) diff --git a/tests/test_table_html.py b/tests/test_table_html.py index f3808ea3..8e37468f 100644 --- a/tests/test_table_html.py +++ b/tests/test_table_html.py @@ -69,6 +69,17 @@ def test_table_cell_truncation(): td.string for td in table.findAll("td", {"class": "col-neighborhood-b352a7"}) ] + # URLs should be truncated too + response2 = client.get("/fixtures/roadside_attractions") + assert response2.status == 200 + table = Soup(response2.body, "html.parser").find("table") + tds = table.findAll("td", {"class": "col-url"}) + assert [str(td) for td in tds] == [ + 'http…', + 'http…', + '\xa0', + 'http…', + ] def test_add_filter_redirects(app_client): diff --git a/tests/test_utils.py b/tests/test_utils.py index df788767..d71a612d 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -626,3 +626,23 @@ def test_tilde_encoding(original, expected): assert actual == expected # And test round-trip assert original == utils.tilde_decode(actual) + + +@pytest.mark.parametrize( + "url,length,expected", + ( + ("https://example.com/", 5, "http…"), + ("https://example.com/foo/bar", 15, "https://exampl…"), + ("https://example.com/foo/bar/baz.jpg", 30, "https://example.com/foo/ba….jpg"), + # Extensions longer than 4 characters are not treated specially: + ("https://example.com/foo/bar/baz.jpeg2", 30, "https://example.com/foo/bar/b…"), + ( + "https://example.com/foo/bar/baz.jpeg2", + None, + "https://example.com/foo/bar/baz.jpeg2", + ), + ), +) +def test_truncate_url(url, length, expected): + actual = utils.truncate_url(url, length) + assert actual == expected From 5aa359b86907d11b3ee601510775a85a90224da8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 6 Sep 2022 16:58:30 -0700 Subject: [PATCH 083/891] Apply cell truncation on query page too, refs #1805 --- datasette/views/database.py | 7 ++++++- tests/test_html.py | 19 +++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index fc344245..affbc540 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -428,7 +428,12 @@ class QueryView(DataView): "" if len(value) == 1 else "s", ) ) - + else: + display_value = str(value) + if truncate_cells and len(display_value) > truncate_cells: + display_value = ( + display_value[:truncate_cells] + "\u2026" + ) display_row.append(display_value) display_rows.append(display_row) diff --git a/tests/test_html.py b/tests/test_html.py index d6e969ad..bf915247 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -186,6 +186,25 @@ def test_row_page_does_not_truncate(): ] +def test_query_page_truncates(): + with make_app_client(settings={"truncate_cells_html": 5}) as client: + response = client.get( + "/fixtures?" + + urllib.parse.urlencode( + { + "sql": "select 'this is longer than 5' as a, 'https://example.com/' as b" + } + ) + ) + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + tds = table.findAll("td") + assert [str(td) for td in tds] == [ + 'this …', + 'http…', + ] + + @pytest.mark.parametrize( "path,expected_classes", [ From bf8d84af5422606597be893cedd375020cb2b369 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 6 Sep 2022 20:34:59 -0700 Subject: [PATCH 084/891] word-wrap: anywhere on links in cells, refs #1805 --- datasette/static/app.css | 1 + 1 file changed, 1 insertion(+) diff --git a/datasette/static/app.css b/datasette/static/app.css index 712b9925..08b724f6 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -446,6 +446,7 @@ th { } table a:link { text-decoration: none; + word-wrap: anywhere; } .rows-and-columns td:before { display: block; From fb7e70d5e72a951efe4b29ad999d8915c032d021 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Sep 2022 09:19:20 -0700 Subject: [PATCH 085/891] Database(is_mutable=) now defaults to True, closes #1808 Refs https://github.com/simonw/datasette-upload-dbs/issues/6 --- datasette/database.py | 3 +-- docs/internals.rst | 9 +++++---- tests/test_internals_database.py | 1 + tests/test_internals_datasette.py | 2 +- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index fa558045..44467370 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -28,7 +28,7 @@ AttachedDatabase = namedtuple("AttachedDatabase", ("seq", "name", "file")) class Database: def __init__( - self, ds, path=None, is_mutable=False, is_memory=False, memory_name=None + self, ds, path=None, is_mutable=True, is_memory=False, memory_name=None ): self.name = None self.route = None @@ -39,7 +39,6 @@ class Database: self.memory_name = memory_name if memory_name is not None: self.is_memory = True - self.is_mutable = True self.hash = None self.cached_size = None self._cached_table_counts = None diff --git a/docs/internals.rst b/docs/internals.rst index 20797e98..adeec1d8 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -426,12 +426,13 @@ The ``db`` parameter should be an instance of the ``datasette.database.Database` Database( datasette, path="path/to/my-new-database.db", - is_mutable=True, ) ) This will add a mutable database and serve it at ``/my-new-database``. +Use ``is_mutable=False`` to add an immutable database. + ``.add_database()`` returns the Database instance, with its name set as the ``database.name`` attribute. Any time you are working with a newly added database you should use the return value of ``.add_database()``, for example: .. code-block:: python @@ -671,8 +672,8 @@ Instances of the ``Database`` class can be used to execute queries against attac .. _database_constructor: -Database(ds, path=None, is_mutable=False, is_memory=False, memory_name=None) ----------------------------------------------------------------------------- +Database(ds, path=None, is_mutable=True, is_memory=False, memory_name=None) +--------------------------------------------------------------------------- The ``Database()`` constructor can be used by plugins, in conjunction with :ref:`datasette_add_database`, to create and register new databases. @@ -685,7 +686,7 @@ The arguments are as follows: Path to a SQLite database file on disk. ``is_mutable`` - boolean - Set this to ``True`` if it is possible that updates will be made to that database - otherwise Datasette will open it in immutable mode and any changes could cause undesired behavior. + Set this to ``False`` to cause Datasette to open the file in immutable mode. ``is_memory`` - boolean Use this to create non-shared memory connections. diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 551f67e1..9e81c1d6 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -499,6 +499,7 @@ def test_mtime_ns_is_none_for_memory(app_client): def test_is_mutable(app_client): + assert Database(app_client.ds, is_memory=True).is_mutable is True assert Database(app_client.ds, is_memory=True, is_mutable=True).is_mutable is True assert Database(app_client.ds, is_memory=True, is_mutable=False).is_mutable is False diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index 1dc14cab..249920fe 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -58,7 +58,7 @@ async def test_datasette_constructor(): "route": "_memory", "path": None, "size": 0, - "is_mutable": False, + "is_mutable": True, "is_memory": True, "hash": None, } From 610425460b519e9c16d386cb81aa081c9d730ef0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 10 Sep 2022 14:24:26 -0700 Subject: [PATCH 086/891] Add --nolock to the README Chrome demo Refs #1744 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1af20129..af95b85e 100644 --- a/README.md +++ b/README.md @@ -48,7 +48,7 @@ This will start a web server on port 8001 - visit http://localhost:8001/ to acce Use Chrome on OS X? You can run datasette against your browser history like so: - datasette ~/Library/Application\ Support/Google/Chrome/Default/History + datasette ~/Library/Application\ Support/Google/Chrome/Default/History --nolock Now visiting http://localhost:8001/History/downloads will show you a web interface to browse your downloads data: From b40872f5e5ae5dad331c58f75451e2d206565196 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 14 Sep 2022 14:31:54 -0700 Subject: [PATCH 087/891] prepare_jinja2_environment(datasette) argument, refs #1809 --- datasette/app.py | 2 +- datasette/hookspecs.py | 2 +- docs/plugin_hooks.rst | 9 +++++++-- tests/plugins/my_plugin.py | 3 ++- tests/test_plugins.py | 5 +++-- 5 files changed, 14 insertions(+), 7 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index aeb81687..db686670 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -345,7 +345,7 @@ class Datasette: self.jinja_env.filters["escape_sqlite"] = escape_sqlite self.jinja_env.filters["to_css_class"] = to_css_class # pylint: disable=no-member - pm.hook.prepare_jinja2_environment(env=self.jinja_env) + pm.hook.prepare_jinja2_environment(env=self.jinja_env, datasette=self) self._register_renderers() self._permission_checks = collections.deque(maxlen=200) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index a5fb536f..34e19664 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -26,7 +26,7 @@ def prepare_connection(conn, database, datasette): @hookspec -def prepare_jinja2_environment(env): +def prepare_jinja2_environment(env, datasette): """Modify Jinja2 template environment e.g. register custom template tags""" diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 30bd75b7..62ec5c90 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -61,12 +61,15 @@ Examples: `datasette-jellyfish `_, for @@ -85,6 +88,8 @@ You can now use this filter in your custom templates like so:: Table name: {{ table|uppercase }} +Examples: `datasette-edit-templates `_ + .. _plugin_hook_extra_template_vars: extra_template_vars(template, database, table, columns, view_name, request, datasette) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 53613b7d..d49a7a34 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -142,8 +142,9 @@ def extra_template_vars( @hookimpl -def prepare_jinja2_environment(env): +def prepare_jinja2_environment(env, datasette): env.filters["format_numeric"] = lambda s: f"{float(s):,.0f}" + env.filters["to_hello"] = lambda s: datasette._HELLO @hookimpl diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 948a40b8..590d88f6 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -545,11 +545,12 @@ def test_hook_register_output_renderer_can_render(app_client): @pytest.mark.asyncio async def test_hook_prepare_jinja2_environment(app_client): + app_client.ds._HELLO = "HI" template = app_client.ds.jinja_env.from_string( - "Hello there, {{ a|format_numeric }}", {"a": 3412341} + "Hello there, {{ a|format_numeric }}, {{ a|to_hello }}", {"a": 3412341} ) rendered = await app_client.ds.render_template(template) - assert "Hello there, 3,412,341" == rendered + assert "Hello there, 3,412,341, HI" == rendered def test_hook_publish_subcommand(): From 2ebcffe2226ece2a5a86722790d486a480338632 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Sep 2022 12:50:52 -0700 Subject: [PATCH 088/891] Bump furo from 2022.6.21 to 2022.9.15 (#1812) Bumps [furo](https://github.com/pradyunsg/furo) from 2022.6.21 to 2022.9.15. - [Release notes](https://github.com/pradyunsg/furo/releases) - [Changelog](https://github.com/pradyunsg/furo/blob/main/docs/changelog.md) - [Commits](https://github.com/pradyunsg/furo/compare/2022.06.21...2022.09.15) --- updated-dependencies: - dependency-name: furo dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 92fa60d0..afcba1f0 100644 --- a/setup.py +++ b/setup.py @@ -65,7 +65,7 @@ setup( setup_requires=["pytest-runner"], extras_require={ "docs": [ - "furo==2022.6.21", + "furo==2022.9.15", "sphinx-autobuild", "codespell", "blacken-docs", From ddc999ad1296e8c69cffede3e367dda059b8adad Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 16 Sep 2022 20:38:15 -0700 Subject: [PATCH 089/891] Async support for prepare_jinja2_environment, closes #1809 --- datasette/app.py | 22 ++++++++++++++--- datasette/utils/testing.py | 1 + docs/plugin_hooks.rst | 2 ++ docs/testing_plugins.rst | 30 ++++++++++++++++++++++++ tests/fixtures.py | 1 + tests/plugins/my_plugin.py | 10 ++++++-- tests/plugins/my_plugin_2.py | 6 +++++ tests/test_internals_datasette_client.py | 6 +++-- tests/test_plugins.py | 6 +++-- tests/test_routes.py | 1 + 10 files changed, 76 insertions(+), 9 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index db686670..ea3e7b43 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -208,6 +208,7 @@ class Datasette: crossdb=False, nolock=False, ): + self._startup_invoked = False assert config_dir is None or isinstance( config_dir, Path ), "config_dir= should be a pathlib.Path" @@ -344,9 +345,6 @@ class Datasette: self.jinja_env.filters["quote_plus"] = urllib.parse.quote_plus self.jinja_env.filters["escape_sqlite"] = escape_sqlite self.jinja_env.filters["to_css_class"] = to_css_class - # pylint: disable=no-member - pm.hook.prepare_jinja2_environment(env=self.jinja_env, datasette=self) - self._register_renderers() self._permission_checks = collections.deque(maxlen=200) self._root_token = secrets.token_hex(32) @@ -389,8 +387,16 @@ class Datasette: return Urls(self) async def invoke_startup(self): + # This must be called for Datasette to be in a usable state + if self._startup_invoked: + return + for hook in pm.hook.prepare_jinja2_environment( + env=self.jinja_env, datasette=self + ): + await await_me_maybe(hook) for hook in pm.hook.startup(datasette=self): await await_me_maybe(hook) + self._startup_invoked = True def sign(self, value, namespace="default"): return URLSafeSerializer(self._secret, namespace).dumps(value) @@ -933,6 +939,8 @@ class Datasette: async def render_template( self, templates, context=None, request=None, view_name=None ): + if not self._startup_invoked: + raise Exception("render_template() called before await ds.invoke_startup()") context = context or {} if isinstance(templates, Template): template = templates @@ -1495,34 +1503,42 @@ class DatasetteClient: return path async def get(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.get(self._fix(path), **kwargs) async def options(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.options(self._fix(path), **kwargs) async def head(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.head(self._fix(path), **kwargs) async def post(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.post(self._fix(path), **kwargs) async def put(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.put(self._fix(path), **kwargs) async def patch(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.patch(self._fix(path), **kwargs) async def delete(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.delete(self._fix(path), **kwargs) async def request(self, method, path, **kwargs): + await self.ds.invoke_startup() avoid_path_rewrites = kwargs.pop("avoid_path_rewrites", None) async with httpx.AsyncClient(app=self.app) as client: return await client.request( diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index 640c94e6..b28fc575 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -147,6 +147,7 @@ class TestClient: content_type=None, if_none_match=None, ): + await self.ds.invoke_startup() headers = headers or {} if content_type: headers["content-type"] = content_type diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 62ec5c90..f208e727 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -88,6 +88,8 @@ You can now use this filter in your custom templates like so:: Table name: {{ table|uppercase }} +This function can return an awaitable function if it needs to run any async code. + Examples: `datasette-edit-templates `_ .. _plugin_hook_extra_template_vars: diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 992b4b0e..41f50e56 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -52,6 +52,36 @@ Then run the tests using pytest like so:: pytest +.. _testing_plugins_datasette_test_instance: + +Setting up a Datasette test instance +------------------------------------ + +The above example shows the easiest way to start writing tests against a Datasette instance: + +.. code-block:: python + + from datasette.app import Datasette + import pytest + + + @pytest.mark.asyncio + async def test_plugin_is_installed(): + datasette = Datasette(memory=True) + response = await datasette.client.get("/-/plugins.json") + assert response.status_code == 200 + +Creating a ``Datasette()`` instance like this as useful shortcut in tests, but there is one detail you need to be aware of. It's important to ensure that the async method ``.invoke_startup()`` is called on that instance. You can do that like this: + +.. code-block:: python + + datasette = Datasette(memory=True) + await datasette.invoke_startup() + +This method registers any :ref:`plugin_hook_startup` or :ref:`plugin_hook_prepare_jinja2_environment` plugins that might themselves need to make async calls. + +If you are using ``await datasette.client.get()`` and similar methods then you don't need to worry about this - those method calls ensure that ``.invoke_startup()`` has been called for you. + .. _testing_plugins_pdb: Using pdb for errors thrown inside Datasette diff --git a/tests/fixtures.py b/tests/fixtures.py index 82d8452e..5a875cd2 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -71,6 +71,7 @@ EXPECTED_PLUGINS = [ "handle_exception", "menu_links", "permission_allowed", + "prepare_jinja2_environment", "register_routes", "render_cell", "startup", diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index d49a7a34..1a41de38 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -143,8 +143,14 @@ def extra_template_vars( @hookimpl def prepare_jinja2_environment(env, datasette): - env.filters["format_numeric"] = lambda s: f"{float(s):,.0f}" - env.filters["to_hello"] = lambda s: datasette._HELLO + async def select_times_three(s): + db = datasette.get_database() + return (await db.execute("select 3 * ?", [int(s)])).first()[0] + + async def inner(): + env.filters["select_times_three"] = select_times_three + + return inner @hookimpl diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index 4df02343..cee80703 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -126,6 +126,12 @@ def permission_allowed(datasette, actor, action): return inner +@hookimpl +def prepare_jinja2_environment(env, datasette): + env.filters["format_numeric"] = lambda s: f"{float(s):,.0f}" + env.filters["to_hello"] = lambda s: datasette._HELLO + + @hookimpl def startup(datasette): async def inner(): diff --git a/tests/test_internals_datasette_client.py b/tests/test_internals_datasette_client.py index 8c5b5bd3..497bf475 100644 --- a/tests/test_internals_datasette_client.py +++ b/tests/test_internals_datasette_client.py @@ -1,10 +1,12 @@ from .fixtures import app_client import httpx import pytest +import pytest_asyncio -@pytest.fixture -def datasette(app_client): +@pytest_asyncio.fixture +async def datasette(app_client): + await app_client.ds.invoke_startup() return app_client.ds diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 590d88f6..0ae3abf3 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -546,11 +546,13 @@ def test_hook_register_output_renderer_can_render(app_client): @pytest.mark.asyncio async def test_hook_prepare_jinja2_environment(app_client): app_client.ds._HELLO = "HI" + await app_client.ds.invoke_startup() template = app_client.ds.jinja_env.from_string( - "Hello there, {{ a|format_numeric }}, {{ a|to_hello }}", {"a": 3412341} + "Hello there, {{ a|format_numeric }}, {{ a|to_hello }}, {{ b|select_times_three }}", + {"a": 3412341, "b": 5}, ) rendered = await app_client.ds.render_template(template) - assert "Hello there, 3,412,341, HI" == rendered + assert "Hello there, 3,412,341, HI, 15" == rendered def test_hook_publish_subcommand(): diff --git a/tests/test_routes.py b/tests/test_routes.py index 5ae55d21..d467abe1 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -59,6 +59,7 @@ def test_routes(routes, path, expected_class, expected_matches): @pytest_asyncio.fixture async def ds_with_route(): ds = Datasette() + await ds.invoke_startup() ds.remove_database("_memory") db = Database(ds, is_memory=True, memory_name="route-name-db") ds.add_database(db, name="original-name", route="custom-route-name") From df851c117db031dec50dd4ef1ca34745920ac77a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 19 Sep 2022 16:46:39 -0700 Subject: [PATCH 090/891] Validate settings.json keys on startup, closes #1816 Refs #1814 --- datasette/app.py | 4 ++++ tests/test_config_dir.py | 20 ++++++++++++++++++-- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index ea3e7b43..8873ce28 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -292,6 +292,10 @@ class Datasette: raise StartupError("config.json should be renamed to settings.json") if config_dir and (config_dir / "settings.json").exists() and not settings: settings = json.loads((config_dir / "settings.json").read_text()) + # Validate those settings + for key in settings: + if key not in DEFAULT_SETTINGS: + raise StartupError("Invalid setting '{key}' in settings.json") self._settings = dict(DEFAULT_SETTINGS, **(settings or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index fe927c42..e365515b 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -5,6 +5,7 @@ import pytest from datasette.app import Datasette from datasette.cli import cli from datasette.utils.sqlite import sqlite3 +from datasette.utils import StartupError from .fixtures import TestClient as _TestClient from click.testing import CliRunner @@ -27,9 +28,8 @@ body { margin-top: 3em} @pytest.fixture(scope="session") -def config_dir_client(tmp_path_factory): +def config_dir(tmp_path_factory): config_dir = tmp_path_factory.mktemp("config-dir") - plugins_dir = config_dir / "plugins" plugins_dir.mkdir() (plugins_dir / "hooray.py").write_text(PLUGIN, "utf-8") @@ -77,7 +77,23 @@ def config_dir_client(tmp_path_factory): ), "utf-8", ) + return config_dir + +def test_invalid_settings(config_dir): + previous = (config_dir / "settings.json").read_text("utf-8") + (config_dir / "settings.json").write_text( + json.dumps({"invalid": "invalid-setting"}), "utf-8" + ) + try: + with pytest.raises(StartupError): + ds = Datasette([], config_dir=config_dir) + finally: + (config_dir / "settings.json").write_text(previous, "utf-8") + + +@pytest.fixture(scope="session") +def config_dir_client(config_dir): ds = Datasette([], config_dir=config_dir) yield _TestClient(ds) From cb1e093fd361b758120aefc1a444df02462389a3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 19 Sep 2022 18:15:40 -0700 Subject: [PATCH 091/891] Fixed error message, closes #1816 --- datasette/app.py | 4 +++- tests/test_config_dir.py | 3 ++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 8873ce28..03d1dacc 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -295,7 +295,9 @@ class Datasette: # Validate those settings for key in settings: if key not in DEFAULT_SETTINGS: - raise StartupError("Invalid setting '{key}' in settings.json") + raise StartupError( + "Invalid setting '{}' in settings.json".format(key) + ) self._settings = dict(DEFAULT_SETTINGS, **(settings or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index e365515b..f5ecf0d6 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -86,8 +86,9 @@ def test_invalid_settings(config_dir): json.dumps({"invalid": "invalid-setting"}), "utf-8" ) try: - with pytest.raises(StartupError): + with pytest.raises(StartupError) as ex: ds = Datasette([], config_dir=config_dir) + assert ex.value.args[0] == "Invalid setting 'invalid' in settings.json" finally: (config_dir / "settings.json").write_text(previous, "utf-8") From 212137a90b4291db9605e039f198564dae59c5d0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 26 Sep 2022 14:14:25 -0700 Subject: [PATCH 092/891] Release 0.63a0 Refs #1786, #1787, #1789, #1794, #1800, #1804, #1805, #1808, #1809, #1816 --- datasette/version.py | 2 +- docs/changelog.rst | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 0453346c..e5ad585f 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.62" +__version__ = "0.63a0" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index f9dcc980..bd93f4cb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,23 @@ Changelog ========= +.. _v0_63a0: + +0.63a0 (2022-09-26) +------------------- + +- The :ref:`plugin_hook_prepare_jinja2_environment` plugin hook now accepts an optional ``datasette`` argument. Hook implementations can also now return an ``async`` function which will be awaited automatically. (:issue:`1809`) +- ``--load-extension`` option now supports entrypoints. Thanks, Alex Garcia. (`#1789 `__) +- New tutorial: `Cleaning data with sqlite-utils and Datasette `__. +- Facet size can now be set per-table with the new ``facet_size`` table metadata option. (:issue:`1804`) +- ``truncate_cells_html`` setting now also affects long URLs in columns. (:issue:`1805`) +- ``Database(is_mutable=)`` now defaults to ``True``. (:issue:`1808`) +- Non-JavaScript textarea now increases height to fit the SQL query. (:issue:`1786`) +- More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) +- Datasette no longer enforces upper bounds on its depenedencies. (:issue:`1800`) +- Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) +- The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) + .. _v0_62: 0.62 (2022-08-14) From 5f9f567acbc58c9fcd88af440e68034510fb5d2b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 26 Sep 2022 16:06:01 -0700 Subject: [PATCH 093/891] Show SQL query when reporting time limit error, closes #1819 --- datasette/database.py | 5 ++++- datasette/views/base.py | 21 +++++++++++++-------- tests/test_api.py | 12 +++++++++++- tests/test_html.py | 10 +++++++--- 4 files changed, 35 insertions(+), 13 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 44467370..46094bd7 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -476,7 +476,10 @@ class WriteTask: class QueryInterrupted(Exception): - pass + def __init__(self, e, sql, params): + self.e = e + self.sql = sql + self.params = params class MultipleValues(Exception): diff --git a/datasette/views/base.py b/datasette/views/base.py index 221e1882..67aa3a42 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -1,10 +1,12 @@ import asyncio import csv import hashlib -import re import sys +import textwrap import time import urllib +from markupsafe import escape + import pint @@ -24,11 +26,9 @@ from datasette.utils import ( path_with_removed_args, path_with_format, sqlite3, - HASH_LENGTH, ) from datasette.utils.asgi import ( AsgiStream, - Forbidden, NotFound, Response, BadRequest, @@ -371,13 +371,18 @@ class DataView(BaseView): ) = response_or_template_contexts else: data, extra_template_data, templates = response_or_template_contexts - except QueryInterrupted: + except QueryInterrupted as ex: raise DatasetteError( - """ - SQL query took too long. The time limit is controlled by the + textwrap.dedent( + """ +

SQL query took too long. The time limit is controlled by the sql_time_limit_ms - configuration option. - """, + configuration option.

+
{}
+ """.format( + escape(ex.sql) + ) + ).strip(), title="SQL Interrupted", status=400, message_is_html=True, diff --git a/tests/test_api.py b/tests/test_api.py index 7a2bf91f..ad74d16e 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -656,7 +656,17 @@ def test_custom_sql(app_client): def test_sql_time_limit(app_client_shorter_time_limit): response = app_client_shorter_time_limit.get("/fixtures.json?sql=select+sleep(0.5)") assert 400 == response.status - assert "SQL Interrupted" == response.json["title"] + assert response.json == { + "ok": False, + "error": ( + "

SQL query took too long. The time limit is controlled by the\n" + 'sql_time_limit_ms\n' + "configuration option.

\n" + "
select sleep(0.5)
" + ), + "status": 400, + "title": "SQL Interrupted", + } def test_custom_sql_time_limit(app_client): diff --git a/tests/test_html.py b/tests/test_html.py index bf915247..a99b0b6c 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -168,10 +168,14 @@ def test_disallowed_custom_sql_pragma(app_client): def test_sql_time_limit(app_client_shorter_time_limit): response = app_client_shorter_time_limit.get("/fixtures?sql=select+sleep(0.5)") assert 400 == response.status - expected_html_fragment = """ + expected_html_fragments = [ + """ sql_time_limit_ms - """.strip() - assert expected_html_fragment in response.text + """.strip(), + "
select sleep(0.5)
", + ] + for expected_html_fragment in expected_html_fragments: + assert expected_html_fragment in response.text def test_row_page_does_not_truncate(): From 7fb4ea4e39a15e1f7d3202949794d98af1cfa272 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 27 Sep 2022 21:06:40 -0700 Subject: [PATCH 094/891] Update note about render_cell signature, refs #1826 --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index f208e727..c9cab8ab 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -9,7 +9,7 @@ Each plugin can implement one or more hooks using the ``@hookimpl`` decorator ag When you implement a plugin hook you can accept any or all of the parameters that are documented as being passed to that hook. -For example, you can implement the ``render_cell`` plugin hook like this even though the full documented hook signature is ``render_cell(value, column, table, database, datasette)``: +For example, you can implement the ``render_cell`` plugin hook like this even though the full documented hook signature is ``render_cell(row, value, column, table, database, datasette)``: .. code-block:: python From 984b1df12cf19a6731889fc0665bb5f622e07b7c Mon Sep 17 00:00:00 2001 From: Adam Simpson Date: Wed, 28 Sep 2022 00:21:36 -0400 Subject: [PATCH 095/891] Add documentation for serving via OpenRC (#1825) * Add documentation for serving via OpenRC --- docs/deploying.rst | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index d4ad8836..c8552758 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -74,18 +74,30 @@ Once the service has started you can confirm that Datasette is running on port 8 curl 127.0.0.1:8000/-/versions.json # Should output JSON showing the installed version -Datasette will not be accessible from outside the server because it is listening on ``127.0.0.1``. You can expose it by instead listening on ``0.0.0.0``, but a better way is to set up a proxy such as ``nginx``. +Datasette will not be accessible from outside the server because it is listening on ``127.0.0.1``. You can expose it by instead listening on ``0.0.0.0``, but a better way is to set up a proxy such as ``nginx`` - see :ref:`deploying_proxy`. -Ubuntu offer `a tutorial on installing nginx `__. Once it is installed you can add configuration to proxy traffic through to Datasette that looks like this:: +.. _deploying_openrc: - server { - server_name mysubdomain.myhost.net; +Running Datasette using OpenRC +=============================== +OpenRC is the service manager on non-systemd Linux distributions like `Alpine Linux `__ and `Gentoo `__. - location / { - proxy_pass http://127.0.0.1:8000/; - proxy_set_header Host $host; - } - } +Create an init script at ``/etc/init.d/datasette`` with the following contents: + +.. code-block:: sh + + #!/sbin/openrc-run + + name="datasette" + command="datasette" + command_args="serve -h 0.0.0.0 /path/to/db.db" + command_background=true + pidfile="/run/${RC_SVCNAME}.pid" + +You then need to configure the service to run at boot and start it:: + + rc-update add datasette + rc-service datasette start .. _deploying_buildpacks: From 34defdc10aa293294ca01cfab70780755447e1d7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 28 Sep 2022 17:39:36 -0700 Subject: [PATCH 096/891] Browse the plugins directory --- docs/writing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index 01ee8c90..a3fc88ec 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -234,7 +234,7 @@ To avoid accidentally conflicting with a database file that may be loaded into D - ``/-/upload-excel`` -Try to avoid registering URLs that clash with other plugins that your users might have installed. There is no central repository of reserved URL paths (yet) but you can review existing plugins by browsing the `datasette-plugin topic `__ on GitHub. +Try to avoid registering URLs that clash with other plugins that your users might have installed. There is no central repository of reserved URL paths (yet) but you can review existing plugins by browsing the `plugins directory `. If your plugin includes functionality that relates to a specific database you could also register a URL route like this: From c92c4318e9892101f75fa158410c0a12c1d80b6e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 30 Sep 2022 10:55:40 -0700 Subject: [PATCH 097/891] Bump furo from 2022.9.15 to 2022.9.29 (#1827) Bumps [furo](https://github.com/pradyunsg/furo) from 2022.9.15 to 2022.9.29. - [Release notes](https://github.com/pradyunsg/furo/releases) - [Changelog](https://github.com/pradyunsg/furo/blob/main/docs/changelog.md) - [Commits](https://github.com/pradyunsg/furo/compare/2022.09.15...2022.09.29) --- updated-dependencies: - dependency-name: furo dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index afcba1f0..fe258adb 100644 --- a/setup.py +++ b/setup.py @@ -65,7 +65,7 @@ setup( setup_requires=["pytest-runner"], extras_require={ "docs": [ - "furo==2022.9.15", + "furo==2022.9.29", "sphinx-autobuild", "codespell", "blacken-docs", From 883e326dd6ef95f854f7750ef2d4b0e17082fa96 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 2 Oct 2022 14:26:16 -0700 Subject: [PATCH 098/891] Drop word-wrap: anywhere, refs #1828, #1805 --- datasette/static/app.css | 1 - 1 file changed, 1 deletion(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index 08b724f6..712b9925 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -446,7 +446,6 @@ th { } table a:link { text-decoration: none; - word-wrap: anywhere; } .rows-and-columns td:before { display: block; From 4218c9cd742b79b1e3cb80878e42b7e39d16ded2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 4 Oct 2022 11:45:36 -0700 Subject: [PATCH 099/891] reST markup fix --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index c9cab8ab..832a76b0 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -268,7 +268,7 @@ you have one: def extra_js_urls(): return ["/-/static-plugins/your-plugin/app.js"] -Note that `your-plugin` here should be the hyphenated plugin name - the name that is displayed in the list on the `/-/plugins` debug page. +Note that ``your-plugin`` here should be the hyphenated plugin name - the name that is displayed in the list on the ``/-/plugins`` debug page. If your code uses `JavaScript modules `__ you should include the ``"module": True`` key. See :ref:`customization_css_and_javascript` for more details. From b6ba117b7978b58b40e3c3c2b723b92c3010ed53 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 4 Oct 2022 18:25:52 -0700 Subject: [PATCH 100/891] Clarify request or None for two hooks --- docs/plugin_hooks.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 832a76b0..b61f953a 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1281,7 +1281,7 @@ menu_links(datasette, actor, request) ``actor`` - dictionary or None The currently authenticated :ref:`actor `. -``request`` - :ref:`internals_request` +``request`` - :ref:`internals_request` or None The current HTTP request. This can be ``None`` if the request object is not available. This hook allows additional items to be included in the menu displayed by Datasette's top right menu icon. @@ -1330,7 +1330,7 @@ table_actions(datasette, actor, database, table, request) ``table`` - string The name of the table. -``request`` - :ref:`internals_request` +``request`` - :ref:`internals_request` or None The current HTTP request. This can be ``None`` if the request object is not available. This hook allows table actions to be displayed in a menu accessed via an action icon at the top of the table page. It should return a list of ``{"href": "...", "label": "..."}`` menu items. From bbf33a763537a1d913180b22bd3b5fe4a5e5b252 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 4 Oct 2022 21:32:11 -0700 Subject: [PATCH 101/891] Test for bool(results), closes #1832 --- tests/test_internals_database.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 9e81c1d6..4e33beed 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -30,6 +30,14 @@ async def test_results_first(db): assert isinstance(row, sqlite3.Row) +@pytest.mark.asyncio +@pytest.mark.parametrize("expected", (True, False)) +async def test_results_bool(db, expected): + where = "" if expected else "where pk = 0" + results = await db.execute("select * from facetable {}".format(where)) + assert bool(results) is expected + + @pytest.mark.parametrize( "query,expected", [ From eff112498ecc499323c26612d707908831446d25 Mon Sep 17 00:00:00 2001 From: Forest Gregg Date: Thu, 6 Oct 2022 16:06:06 -0400 Subject: [PATCH 102/891] Useuse inspect data for hash and file size on startup Thanks, @fgregg Closes #1834 --- datasette/database.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 46094bd7..d75bd70c 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -48,9 +48,13 @@ class Database: self._read_connection = None self._write_connection = None if not self.is_mutable and not self.is_memory: - p = Path(path) - self.hash = inspect_hash(p) - self.cached_size = p.stat().st_size + if self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self.hash = self.ds.inspect_data[self.name]["hash"] + self.cached_size = self.ds.inspect_data[self.name]["size"] + else: + p = Path(path) + self.hash = inspect_hash(p) + self.cached_size = p.stat().st_size @property def cached_table_counts(self): From b7fec7f9020b79c1fe60cc5a2def86b50eeb5af9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 7 Oct 2022 16:03:09 -0700 Subject: [PATCH 103/891] .sqlite/.sqlite3 extensions for config directory mode Closes #1646 --- datasette/app.py | 5 ++++- docs/settings.rst | 2 +- tests/test_config_dir.py | 11 +++++------ 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 03d1dacc..32a911c2 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -217,7 +217,10 @@ class Datasette: self._secret = secret or secrets.token_hex(32) self.files = tuple(files or []) + tuple(immutables or []) if config_dir: - self.files += tuple([str(p) for p in config_dir.glob("*.db")]) + db_files = [] + for ext in ("db", "sqlite", "sqlite3"): + db_files.extend(config_dir.glob("*.{}".format(ext))) + self.files += tuple(str(f) for f in db_files) if ( config_dir and (config_dir / "inspect-data.json").exists() diff --git a/docs/settings.rst b/docs/settings.rst index 8437fb04..a6d50543 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -46,7 +46,7 @@ Datasette will detect the files in that directory and automatically configure it The files that can be included in this directory are as follows. All are optional. -* ``*.db`` - SQLite database files that will be served by Datasette +* ``*.db`` (or ``*.sqlite3`` or ``*.sqlite``) - SQLite database files that will be served by Datasette * ``metadata.json`` - :ref:`metadata` for those databases - ``metadata.yaml`` or ``metadata.yml`` can be used as well * ``inspect-data.json`` - the result of running ``datasette inspect *.db --inspect-file=inspect-data.json`` from the configuration directory - any database files listed here will be treated as immutable, so they should not be changed while Datasette is running * ``settings.json`` - settings that would normally be passed using ``--setting`` - here they should be stored as a JSON object of key/value pairs diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index f5ecf0d6..c2af3836 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -49,7 +49,7 @@ def config_dir(tmp_path_factory): (config_dir / "metadata.json").write_text(json.dumps(METADATA), "utf-8") (config_dir / "settings.json").write_text(json.dumps(SETTINGS), "utf-8") - for dbname in ("demo.db", "immutable.db"): + for dbname in ("demo.db", "immutable.db", "j.sqlite3", "k.sqlite"): db = sqlite3.connect(str(config_dir / dbname)) db.executescript( """ @@ -151,12 +151,11 @@ def test_databases(config_dir_client): response = config_dir_client.get("/-/databases.json") assert 200 == response.status databases = response.json - assert 2 == len(databases) + assert 4 == len(databases) databases.sort(key=lambda d: d["name"]) - assert "demo" == databases[0]["name"] - assert databases[0]["is_mutable"] - assert "immutable" == databases[1]["name"] - assert not databases[1]["is_mutable"] + for db, expected_name in zip(databases, ("demo", "immutable", "j", "k")): + assert expected_name == db["name"] + assert db["is_mutable"] == (expected_name != "immutable") @pytest.mark.parametrize("filename", ("metadata.yml", "metadata.yaml")) From 1a5e5f2aa951e5bd731067a49819efba68fbe8ef Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 13 Oct 2022 14:42:52 -0700 Subject: [PATCH 104/891] Refactor breadcrumbs to respect permissions, refs #1831 --- datasette/app.py | 40 ++++++++++++++++++++++ datasette/templates/_crumbs.html | 15 ++++++++ datasette/templates/base.html | 4 +-- datasette/templates/database.html | 9 ----- datasette/templates/error.html | 7 ---- datasette/templates/logout.html | 7 ---- datasette/templates/permissions_debug.html | 7 ---- datasette/templates/query.html | 8 ++--- datasette/templates/row.html | 9 ++--- datasette/templates/show_json.html | 7 ---- datasette/templates/table.html | 8 ++--- tests/test_permissions.py | 1 + tests/test_plugins.py | 2 +- 13 files changed, 65 insertions(+), 59 deletions(-) create mode 100644 datasette/templates/_crumbs.html diff --git a/datasette/app.py b/datasette/app.py index 32a911c2..5fa4955c 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -631,6 +631,44 @@ class Datasette: else: return [] + async def _crumb_items(self, request, table=None, database=None): + crumbs = [] + # Top-level link + if await self.permission_allowed( + actor=request.actor, action="view-instance", default=True + ): + crumbs.append({"href": self.urls.instance(), "label": "home"}) + # Database link + if database: + if await self.permission_allowed( + actor=request.actor, + action="view-database", + resource=database, + default=True, + ): + crumbs.append( + { + "href": self.urls.database(database), + "label": database, + } + ) + # Table link + if table: + assert database, "table= requires database=" + if await self.permission_allowed( + actor=request.actor, + action="view-table", + resource=(database, table), + default=True, + ): + crumbs.append( + { + "href": self.urls.table(database, table), + "label": table, + } + ) + return crumbs + async def permission_allowed(self, actor, action, resource=None, default=False): """Check permissions using the permissions_allowed plugin hook""" result = None @@ -1009,6 +1047,8 @@ class Datasette: template_context = { **context, **{ + "request": request, + "crumb_items": self._crumb_items, "urls": self.urls, "actor": request.actor if request else None, "menu_links": menu_links, diff --git a/datasette/templates/_crumbs.html b/datasette/templates/_crumbs.html new file mode 100644 index 00000000..bd1ff0da --- /dev/null +++ b/datasette/templates/_crumbs.html @@ -0,0 +1,15 @@ +{% macro nav(request, database=None, table=None) -%} +{% if crumb_items is defined %} + {% set items=crumb_items(request=request, database=database, table=table) %} + {% if items %} +

+ {% for item in items %} + {{ item.label }} + {% if not loop.last %} + / + {% endif %} + {% endfor %} +

+ {% endif %} +{% endif %} +{%- endmacro %} diff --git a/datasette/templates/base.html b/datasette/templates/base.html index c3a71acb..87c939ac 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -1,4 +1,4 @@ - +{% import "_crumbs.html" as crumbs with context %} {% block title %}{% endblock %} @@ -17,7 +17,7 @@