From 58652dd925bb7509b43905423ec00083bd374dc1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 19 Jan 2022 20:12:46 -0800 Subject: [PATCH 0001/1003] Hidden tables sqlite1/2/3/4, closes #1587 --- datasette/database.py | 4 +++- tests/test_api.py | 12 ++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/datasette/database.py b/datasette/database.py index e908d1ea..06dc8da5 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -345,7 +345,9 @@ class Database: """ select name from sqlite_master where rootpage = 0 - and sql like '%VIRTUAL TABLE%USING FTS%' + and ( + sql like '%VIRTUAL TABLE%USING FTS%' + ) or name in ('sqlite_stat1', 'sqlite_stat2', 'sqlite_stat3', 'sqlite_stat4') """ ) ).rows diff --git a/tests/test_api.py b/tests/test_api.py index 574ebb41..47ec3a8c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1030,3 +1030,15 @@ async def test_db_path(app_client): # Previously this broke if path was a pathlib.Path: await datasette.refresh_schemas() + + +@pytest.mark.asyncio +async def test_hidden_sqlite_stat1_table(): + ds = Datasette() + db = ds.add_memory_database("db") + await db.execute_write("create table normal (id integer primary key, name text)") + await db.execute_write("create index idx on normal (name)") + await db.execute_write("analyze") + data = (await ds.client.get("/db.json?_show_hidden=1")).json() + tables = [(t["name"], t["hidden"]) for t in data["tables"]] + assert tables == [("normal", False), ("sqlite_stat1", True)] From fae3983c51f4a3aca8335f3e01ff85ef27076fbf Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 19 Jan 2022 20:31:22 -0800 Subject: [PATCH 0002/1003] Drop support for Python 3.6, closes #1577 Refs #1606 --- .github/workflows/publish.yml | 6 +++--- .github/workflows/test.yml | 2 +- README.md | 2 +- docs/contributing.rst | 2 +- docs/installation.rst | 2 +- docs/introspection.rst | 8 ++++---- setup.py | 3 +-- 7 files changed, 12 insertions(+), 13 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 17c6ae9b..3cfc67da 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"] + python-version: ["3.7", "3.8", "3.9", "3.10"] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} @@ -38,7 +38,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: '3.9' + python-version: '3.10' - uses: actions/cache@v2 name: Configure pip caching with: @@ -66,7 +66,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: '3.9' + python-version: '3.10' - uses: actions/cache@v2 name: Configure pip caching with: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 704931a6..78c289bb 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,7 +7,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"] + python-version: ["3.7", "3.8", "3.9", "3.10"] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} diff --git a/README.md b/README.md index ce15ccf4..107d81da 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ You can also install it using `pip` or `pipx`: pip install datasette -Datasette requires Python 3.6 or higher. We also have [detailed installation instructions](https://docs.datasette.io/en/stable/installation.html) covering other options such as Docker. +Datasette requires Python 3.7 or higher. We also have [detailed installation instructions](https://docs.datasette.io/en/stable/installation.html) covering other options such as Docker. ## Basic usage diff --git a/docs/contributing.rst b/docs/contributing.rst index 07f2a0e4..b74f2f36 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -19,7 +19,7 @@ General guidelines Setting up a development environment ------------------------------------ -If you have Python 3.6 or higher installed on your computer (on OS X the quickest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps. +If you have Python 3.7 or higher installed on your computer (on OS X the quickest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps. If you want to use GitHub to publish your changes, first `create a fork of datasette `__ under your own GitHub account. diff --git a/docs/installation.rst b/docs/installation.rst index ac3dcca2..e8bef9cd 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -56,7 +56,7 @@ If the latest packaged release of Datasette has not yet been made available thro Using pip --------- -Datasette requires Python 3.6 or higher. Visit `InstallPython3.com `__ for step-by-step installation guides for your operating system. +Datasette requires Python 3.7 or higher. Visit `InstallPython3.com `__ for step-by-step installation guides for your operating system. You can install Datasette and its dependencies using ``pip``:: diff --git a/docs/introspection.rst b/docs/introspection.rst index d1a0a854..e08ca911 100644 --- a/docs/introspection.rst +++ b/docs/introspection.rst @@ -38,11 +38,11 @@ Shows the version of Datasette, Python and SQLite. `Versions example =3.2.10,<3.5.0", "click>=7.1.1,<8.1.0", @@ -91,6 +91,5 @@ setup( "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.6", ], ) From 14e320329f756b7d8e298c4e2251d8a0b194c9c4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 19 Jan 2022 20:38:49 -0800 Subject: [PATCH 0003/1003] Hidden tables data_licenses, KNN, KNN2 for SpatiaLite, closes #1601 --- datasette/database.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/datasette/database.py b/datasette/database.py index 06dc8da5..6ce87215 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -365,6 +365,9 @@ class Database: "sqlite_sequence", "views_geometry_columns", "virts_geometry_columns", + "data_licenses", + "KNN", + "KNN2", ] + [ r[0] for r in ( From 43c30ce0236ebbc7e9cec98a3822265eb2691430 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 19 Jan 2022 21:04:09 -0800 Subject: [PATCH 0004/1003] Use cog to maintain default plugin list in plugins.rst, closes #1600 Also fixed a bug I spotted where datasette.filters showed the same hook three times. --- datasette/app.py | 2 +- docs/plugins.rst | 97 ++++++++++++++++++++++++++++++++++++++++++++---- 2 files changed, 91 insertions(+), 8 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index bd663509..0a89a9f3 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -770,7 +770,7 @@ class Datasette: "static": p["static_path"] is not None, "templates": p["templates_path"] is not None, "version": p.get("version"), - "hooks": p["hooks"], + "hooks": list(set(p["hooks"])), } for p in ps ] diff --git a/docs/plugins.rst b/docs/plugins.rst index 020030f1..4a2c0194 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -91,36 +91,119 @@ You can also use the ``datasette plugins`` command:: } ] +.. [[[cog + from datasette import cli + from click.testing import CliRunner + import textwrap, json + cog.out("\n") + result = CliRunner().invoke(cli.cli, ["plugins", "--all"]) + # cog.out() with text containing newlines was unindenting for some reason + cog.outl("If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette::\n") + plugins = [p for p in json.loads(result.output) if p["name"].startswith("datasette.")] + indented = textwrap.indent(json.dumps(plugins, indent=4), " ") + for line in indented.split("\n"): + cog.outl(line) + cog.out("\n\n") +.. ]]] + If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette:: - $ datasette plugins --all [ + { + "name": "datasette.publish.heroku", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "publish_subcommand" + ] + }, { "name": "datasette.sql_functions", "static": false, "templates": false, - "version": null + "version": null, + "hooks": [ + "prepare_connection" + ] }, { - "name": "datasette.publish.cloudrun", + "name": "datasette.actor_auth_cookie", "static": false, "templates": false, - "version": null + "version": null, + "hooks": [ + "actor_from_request" + ] + }, + { + "name": "datasette.blob_renderer", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "register_output_renderer" + ] }, { "name": "datasette.facets", "static": false, "templates": false, - "version": null + "version": null, + "hooks": [ + "register_facet_classes" + ] }, { - "name": "datasette.publish.heroku", + "name": "datasette.default_magic_parameters", "static": false, "templates": false, - "version": null + "version": null, + "hooks": [ + "register_magic_parameters" + ] + }, + { + "name": "datasette.default_permissions", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "permission_allowed" + ] + }, + { + "name": "datasette.default_menu_links", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "menu_links" + ] + }, + { + "name": "datasette.filters", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "filters_from_request" + ] + }, + { + "name": "datasette.publish.cloudrun", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "publish_subcommand" + ] } ] + +.. [[[end]]] + You can add the ``--plugins-dir=`` option to include any plugins found in that directory. .. _plugins_configuration: From e1770766ce3ae6669305662ba618be610367af77 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 19 Jan 2022 21:14:04 -0800 Subject: [PATCH 0005/1003] Return plugins and hooks in predictable order --- datasette/app.py | 3 ++- docs/plugins.rst | 58 +++++++++++++++++++++++------------------------ tests/test_cli.py | 4 +--- 3 files changed, 32 insertions(+), 33 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 0a89a9f3..49858a4a 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -764,13 +764,14 @@ class Datasette: should_show_all = all if not should_show_all: ps = [p for p in ps if p["name"] not in DEFAULT_PLUGINS] + ps.sort(key=lambda p: p["name"]) return [ { "name": p["name"], "static": p["static_path"] is not None, "templates": p["templates_path"] is not None, "version": p.get("version"), - "hooks": list(set(p["hooks"])), + "hooks": list(sorted(set(p["hooks"]))), } for p in ps ] diff --git a/docs/plugins.rst b/docs/plugins.rst index 4a2c0194..f2ed02f7 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -109,24 +109,6 @@ You can also use the ``datasette plugins`` command:: If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette:: [ - { - "name": "datasette.publish.heroku", - "static": false, - "templates": false, - "version": null, - "hooks": [ - "publish_subcommand" - ] - }, - { - "name": "datasette.sql_functions", - "static": false, - "templates": false, - "version": null, - "hooks": [ - "prepare_connection" - ] - }, { "name": "datasette.actor_auth_cookie", "static": false, @@ -145,15 +127,6 @@ If you run ``datasette plugins --all`` it will include default plugins that ship "register_output_renderer" ] }, - { - "name": "datasette.facets", - "static": false, - "templates": false, - "version": null, - "hooks": [ - "register_facet_classes" - ] - }, { "name": "datasette.default_magic_parameters", "static": false, @@ -163,6 +136,15 @@ If you run ``datasette plugins --all`` it will include default plugins that ship "register_magic_parameters" ] }, + { + "name": "datasette.default_menu_links", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "menu_links" + ] + }, { "name": "datasette.default_permissions", "static": false, @@ -173,12 +155,12 @@ If you run ``datasette plugins --all`` it will include default plugins that ship ] }, { - "name": "datasette.default_menu_links", + "name": "datasette.facets", "static": false, "templates": false, "version": null, "hooks": [ - "menu_links" + "register_facet_classes" ] }, { @@ -198,6 +180,24 @@ If you run ``datasette plugins --all`` it will include default plugins that ship "hooks": [ "publish_subcommand" ] + }, + { + "name": "datasette.publish.heroku", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "publish_subcommand" + ] + }, + { + "name": "datasette.sql_functions", + "static": false, + "templates": false, + "version": null, + "hooks": [ + "prepare_connection" + ] } ] diff --git a/tests/test_cli.py b/tests/test_cli.py index 763fe2e7..bbc5df30 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -106,9 +106,7 @@ def test_spatialite_error_if_cannot_find_load_extension_spatialite(): def test_plugins_cli(app_client): runner = CliRunner() result1 = runner.invoke(cli, ["plugins"]) - assert sorted(EXPECTED_PLUGINS, key=lambda p: p["name"]) == sorted( - json.loads(result1.output), key=lambda p: p["name"] - ) + assert json.loads(result1.output) == EXPECTED_PLUGINS # Try with --all result2 = runner.invoke(cli, ["plugins", "--all"]) names = [p["name"] for p in json.loads(result2.output)] From 0467723ee57c2cbc0f02daa47cef632dd4651df0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 19 Jan 2022 21:46:03 -0800 Subject: [PATCH 0006/1003] New, improved favicon - refs #1603 --- datasette/app.py | 11 ++++++++++- datasette/static/favicon.png | Bin 0 -> 1207 bytes tests/test_html.py | 4 +++- 3 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 datasette/static/favicon.png diff --git a/datasette/app.py b/datasette/app.py index 49858a4a..b2942cd9 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -70,6 +70,7 @@ from .utils.asgi import ( Response, asgi_static, asgi_send, + asgi_send_file, asgi_send_html, asgi_send_json, asgi_send_redirect, @@ -178,9 +179,17 @@ SETTINGS = ( DEFAULT_SETTINGS = {option.name: option.default for option in SETTINGS} +FAVICON_PATH = app_root / "datasette" / "static" / "favicon.png" + async def favicon(request, send): - await asgi_send(send, "", 200) + await asgi_send_file( + send, + str(FAVICON_PATH), + content_type="image/png", + chunk_size=4096, + headers={"Cache-Control": "max-age=3600, immutable, public"}, + ) class Datasette: diff --git a/datasette/static/favicon.png b/datasette/static/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..36d6334fd7714da87aad44bba0d9c2a869cd0b86 GIT binary patch literal 1207 zcmeAS@N?(olHy`uVBq!ia0vp^86eET1|(%=wk`xxoCO|{#S9GG!XV7ZFl&wkP>``W z$lZxy-8q?;Kn_c~qpu?a!^VE@KZ&eBu8*gSV@L(#+gXPB%B~{E@6V2KU$RE|a=`mo z=TfEn3fnR!zH@Oseo)=wmh*$NvI}i5UR0HpZD-+iO=*9nWijR7qiH4)<$WST)$;i@ z7Pn&6W#%ORmVfzw^0n8MkLM=U{Z2c!d+{f!Nh;ZFoU_%ggt_1E7u_MKO{uKonKnPKDs9jE<8O=B=Jm9FIzh^cEPgJ8#X~AG#rFrX&j=YooK{g8%^w5yH&A zu=@`Lb!Nbnmw#>8dOX(lIu939d((l1P%H5Z9eGn54i6jk3bDLT6V z(?LnOy0g1j|L5P)W(y179CBjQ4UMUHd=0k78I~oh-Vxm-($q3(!3(t?#dH69edwFe z8oYk{U(GqSF+Hy4isG-QHQ(}?v_>-}(Im|H|CQYv-*+cVXYPIVVUa9r1zW3x~{B3L5XDjDF@~uA} zp6fgF*VMny^fq~}43T|4*J6vWv}(7)Icsgx-y80q|26qW>$K~W*PdB@%So*DsOB-_ z=bN*RF3o)Fy}u;n^4wRK`eQW3!Y|a>)NJLO@oekkJdvlXd#cw}1p8iomBlP^=5O7< pA8K}rANu~Q{$t Date: Wed, 19 Jan 2022 21:52:00 -0800 Subject: [PATCH 0007/1003] No need to send this, it's got a default, refs #1603 --- datasette/app.py | 1 - 1 file changed, 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index b2942cd9..09d7d034 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -187,7 +187,6 @@ async def favicon(request, send): send, str(FAVICON_PATH), content_type="image/png", - chunk_size=4096, headers={"Cache-Control": "max-age=3600, immutable, public"}, ) From b01c9b68d151e1656fc180815c0d8480e35fc961 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 19 Jan 2022 21:54:41 -0800 Subject: [PATCH 0008/1003] Oops I pushed the wrong favicon, refs #1603 --- datasette/static/favicon.png | Bin 1207 -> 1358 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/datasette/static/favicon.png b/datasette/static/favicon.png index 36d6334fd7714da87aad44bba0d9c2a869cd0b86..a27139ae6ecb09660f96d262d3779de29f7b5d4f 100644 GIT binary patch literal 1358 zcmV-U1+n^xP)C0000;P)t-s0001% zmb`Fqr$j`OPfwIOI*uG1hYt>g|NsB|{QdCp_1@p^(bD6=!P>XE+_AIRsjA42k-T+v zrB+v&Dk+Hr0*3$qfB*n>9JfE)00008bW%=J07QSN(*FMbUcFRk000E7Nkl@%`WQlKxzNVr%CWzi#uW>voplSwP4@{_OZL9ExLB6vg4y?TeGK zxnWMl)7zgdqJ`^`=cF%a;XEnfZ~_DX0hkjY04$p0#Qg^g4mhGP`9S3ZfVHOsyFUU7 z#G%tFCv-O;3Iy9H0!|1BHhuALbX|-?j<*B7==YsgcaaKiLv)L zR~e4ruNx2mhbRC_!pP!O;Q=uhM_!kwFpBR0sbAoGjp31uE6rmB2!vqppAi3=e@S_*n0v zGty(YfGe8&)i!jlbNTZ8k6gf6ZwrwM*c>(yU^fP)5dafI5T+Q;E%LO90Kjouuqqgu z7@dQT7){iyRiobWOrHWV_GJXwv`t$_RNXqRjB#SH)J`Q=U;o0$dY0_27Q z$RHIUH;sTSKQ96sw}B@SP)h{JpA><;4XoA%KC&m=ek@?4VT8>`!{HJ`ce-oqfT)#Y zG_zt{LtImb)Rd<|@j4?y#CNGBW;`PT5d#3;r52f4oE0EAMGWT_dBSW`iX%=j3pmE` zD&UQ<=^Ru&E_v$f}|CN-MwPV)mi1yKN z&k6T7@C`De4~@XjZUetOC*%{AUGI*Bany_z!BPFZm06OIaB4as!7nEye4bD&o+lK` z<_Xc_#$WG!Ey%GRu`WS=5JJd8cZsZlW@~p?wG9Bmwok!=u6XmF za9=zA4x{0o59~SN-Uc%Fgx*9x%?T?{h5sFgo{D*{mJ_}|7*$_P$@mOOwek#!nw%k- z&xkn1IkU*qe%uR<%OW5Mi#%ceeL~O7P>ama&b-vxJEU{P4q7v=TjVJa)|f4CwtJt@ zPtyg|m(~l)4}YIxz;1prQLx-0;}3cxWlgs^^}S-R&EEeN*$DIOO*C~-R< zX5L0jdwPaSLA1l+>8@9c&tY)|*l^D)M{zuxv&jcn`mvbX`kWVkc6=QE0tPO=y`~{k QD*ylh07*qoM6N<$g1B&Oe*gdg literal 1207 zcmeAS@N?(olHy`uVBq!ia0vp^86eET1|(%=wk`xxoCO|{#S9GG!XV7ZFl&wkP>``W z$lZxy-8q?;Kn_c~qpu?a!^VE@KZ&eBu8*gSV@L(#+gXPB%B~{E@6V2KU$RE|a=`mo z=TfEn3fnR!zH@Oseo)=wmh*$NvI}i5UR0HpZD-+iO=*9nWijR7qiH4)<$WST)$;i@ z7Pn&6W#%ORmVfzw^0n8MkLM=U{Z2c!d+{f!Nh;ZFoU_%ggt_1E7u_MKO{uKonKnPKDs9jE<8O=B=Jm9FIzh^cEPgJ8#X~AG#rFrX&j=YooK{g8%^w5yH&A zu=@`Lb!Nbnmw#>8dOX(lIu939d((l1P%H5Z9eGn54i6jk3bDLT6V z(?LnOy0g1j|L5P)W(y179CBjQ4UMUHd=0k78I~oh-Vxm-($q3(!3(t?#dH69edwFe z8oYk{U(GqSF+Hy4isG-QHQ(}?v_>-}(Im|H|CQYv-*+cVXYPIVVUa9r1zW3x~{B3L5XDjDF@~uA} zp6fgF*VMny^fq~}43T|4*J6vWv}(7)Icsgx-y80q|26qW>$K~W*PdB@%So*DsOB-_ z=bN*RF3o)Fy}u;n^4wRK`eQW3!Y|a>)NJLO@oekkJdvlXd#cw}1p8iomBlP^=5O7< pA8K}rANu~Q{$t Date: Wed, 19 Jan 2022 21:57:14 -0800 Subject: [PATCH 0009/1003] Make test_favicon flexible to changing icon sizes, refs #1603 --- tests/test_html.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_html.py b/tests/test_html.py index 735d12ff..aa718857 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -64,7 +64,7 @@ def test_favicon(app_client): response = app_client.get("/favicon.ico") assert response.status == 200 assert response.headers["cache-control"] == "max-age=3600, immutable, public" - assert response.headers["content-length"] == "1207" + assert int(response.headers["content-length"]) > 100 assert response.headers["content-type"] == "image/png" From 150967d98ef6c5b6064587e7ed30cbdd9b992b8e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 20 Jan 2022 10:43:15 -0800 Subject: [PATCH 0010/1003] Hand-edited pixel favicon, refs #1603 --- datasette/static/favicon.png | Bin 1358 -> 208 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/datasette/static/favicon.png b/datasette/static/favicon.png index a27139ae6ecb09660f96d262d3779de29f7b5d4f..4993163f703c425c6512dec380dafe2ca52051a7 100644 GIT binary patch literal 208 zcmeAS@N?(olHy`uVBq!ia0vp^3LwnF3?v&v(vJfvi2$DvS0Mc#47TsQvvJdnm8(v& zGBmL;l)e{q>Icd(l?3?(GYEgu?Rf^|rFyzJhE&{2KERTb>7mfr*l2iDf^!C=sKnw1 z79FNc4F-(`OP(xAS+ay@QOJ}j3QP)SnoLihzI2EP<5Qdv5^(o|!kVa_4lxb=X6adN yPl8f;beC`>@CQ#ili=ZWZBd)t&CVH}j0{Q7(vQX;{-qAIkipZ{&t;ucLK6Tps7T!a literal 1358 zcmV-U1+n^xP)C0000;P)t-s0001% zmb`Fqr$j`OPfwIOI*uG1hYt>g|NsB|{QdCp_1@p^(bD6=!P>XE+_AIRsjA42k-T+v zrB+v&Dk+Hr0*3$qfB*n>9JfE)00008bW%=J07QSN(*FMbUcFRk000E7Nkl@%`WQlKxzNVr%CWzi#uW>voplSwP4@{_OZL9ExLB6vg4y?TeGK zxnWMl)7zgdqJ`^`=cF%a;XEnfZ~_DX0hkjY04$p0#Qg^g4mhGP`9S3ZfVHOsyFUU7 z#G%tFCv-O;3Iy9H0!|1BHhuALbX|-?j<*B7==YsgcaaKiLv)L zR~e4ruNx2mhbRC_!pP!O;Q=uhM_!kwFpBR0sbAoGjp31uE6rmB2!vqppAi3=e@S_*n0v zGty(YfGe8&)i!jlbNTZ8k6gf6ZwrwM*c>(yU^fP)5dafI5T+Q;E%LO90Kjouuqqgu z7@dQT7){iyRiobWOrHWV_GJXwv`t$_RNXqRjB#SH)J`Q=U;o0$dY0_27Q z$RHIUH;sTSKQ96sw}B@SP)h{JpA><;4XoA%KC&m=ek@?4VT8>`!{HJ`ce-oqfT)#Y zG_zt{LtImb)Rd<|@j4?y#CNGBW;`PT5d#3;r52f4oE0EAMGWT_dBSW`iX%=j3pmE` zD&UQ<=^Ru&E_v$f}|CN-MwPV)mi1yKN z&k6T7@C`De4~@XjZUetOC*%{AUGI*Bany_z!BPFZm06OIaB4as!7nEye4bD&o+lK` z<_Xc_#$WG!Ey%GRu`WS=5JJd8cZsZlW@~p?wG9Bmwok!=u6XmF za9=zA4x{0o59~SN-Uc%Fgx*9x%?T?{h5sFgo{D*{mJ_}|7*$_P$@mOOwek#!nw%k- z&xkn1IkU*qe%uR<%OW5Mi#%ceeL~O7P>ama&b-vxJEU{P4q7v=TjVJa)|f4CwtJt@ zPtyg|m(~l)4}YIxz;1prQLx-0;}3cxWlgs^^}S-R&EEeN*$DIOO*C~-R< zX5L0jdwPaSLA1l+>8@9c&tY)|*l^D)M{zuxv&jcn`mvbX`kWVkc6=QE0tPO=y`~{k QD*ylh07*qoM6N<$g1B&Oe*gdg From ffca55dfd7cc9b53522c2e5a2fa1ff67c9beadf2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 20 Jan 2022 14:40:44 -0800 Subject: [PATCH 0011/1003] Show link to /stable/ on /latest/ pages, refs #1608 --- docs/_templates/layout.html | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html index db16b428..785cdc7c 100644 --- a/docs/_templates/layout.html +++ b/docs/_templates/layout.html @@ -26,3 +26,36 @@ {% include "searchbox.html" %} {% endblock %} + +{% block footer %} +{{ super() }} + +{% endblock %} From d194db4204b732af57138e1fb0924ec77354dd58 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 20 Jan 2022 18:01:47 -0800 Subject: [PATCH 0012/1003] Output pip freeze to show installed packages, refs #1609 --- .github/workflows/test.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 78c289bb..2caf9447 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -24,6 +24,7 @@ jobs: - name: Install dependencies run: | pip install -e '.[test]' + pip freeze - name: Run tests run: | pytest -n auto -m "not serial" From 68cc1e2dbb0b841af7a7691ea6b4e7d31b09cc5e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Jan 2022 10:28:05 -0800 Subject: [PATCH 0013/1003] Move queries to top of database page, refs #1612 --- datasette/templates/database.html | 24 ++++++++++++++---------- tests/test_html.py | 30 +++++++++++++++++++++++++----- 2 files changed, 39 insertions(+), 15 deletions(-) diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 2d182d1b..c1e39bd1 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -67,10 +67,23 @@ {% endif %} +{% if queries %} +

Queries

+ +{% endif %} + +{% if tables %} +

Tables

+{% endif %} + {% for table in tables %} {% if show_hidden or not table.hidden %}
-

{{ table.name }}{% if table.private %} 🔒{% endif %}{% if table.hidden %} (hidden){% endif %}

+

{{ table.name }}{% if table.private %} 🔒{% endif %}{% if table.hidden %} (hidden){% endif %}

{% for column in table.columns %}{{ column }}{% if not loop.last %}, {% endif %}{% endfor %}

{% if table.count is none %}Many rows{% else %}{{ "{:,}".format(table.count) }} row{% if table.count == 1 %}{% else %}s{% endif %}{% endif %}

@@ -90,15 +103,6 @@ {% endif %} -{% if queries %} -

Queries

- -{% endif %} - {% if allow_download %}

Download SQLite DB: {{ database }}.db {{ format_bytes(size) }}

{% endif %} diff --git a/tests/test_html.py b/tests/test_html.py index aa718857..1bbf335c 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -110,12 +110,32 @@ def test_database_page_redirects_with_url_hash(app_client_with_hash): def test_database_page(app_client): response = app_client.get("/fixtures") - assert ( - b"

pk, foreign_key_with_label, foreign_key_with_blank_label, " - b"foreign_key_with_no_label, foreign_key_compound_pk1, " - b"foreign_key_compound_pk2

" - ) in response.body soup = Soup(response.body, "html.parser") + # Should have a

+

{% else %}
{% if query %}{{ query.sql }}{% endif %}
{% endif %} diff --git a/tests/test_html.py b/tests/test_html.py index 409fec68..be21bd84 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -695,10 +695,8 @@ def test_query_error(app_client): response = app_client.get("/fixtures?sql=select+*+from+notatable") html = response.text assert '

no such table: notatable

' in html - assert ( - '' - in html - ) + assert '" in html assert "0 results" not in html From 09a41662e70b788469157bb58ed9ca4acdf2f904 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Aug 2022 09:10:48 -0700 Subject: [PATCH 0172/1003] Fix typo --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index c6f35d06..30bd75b7 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -874,7 +874,7 @@ canned_queries(datasette, database, actor) ``actor`` - dictionary or None The currently authenticated :ref:`actor `. -Ues this hook to return a dictionary of additional :ref:`canned query ` definitions for the specified database. The return value should be the same shape as the JSON described in the :ref:`canned query ` documentation. +Use this hook to return a dictionary of additional :ref:`canned query ` definitions for the specified database. The return value should be the same shape as the JSON described in the :ref:`canned query ` documentation. .. code-block:: python From 6c0ba7c00c2ae3ecbb5309efa59079cea1c850b3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Aug 2022 14:52:04 -0700 Subject: [PATCH 0173/1003] Improved CLI reference documentation, refs #1787 --- datasette/cli.py | 2 +- docs/changelog.rst | 2 +- docs/cli-reference.rst | 325 ++++++++++++++++++++++++++++++--------- docs/getting_started.rst | 50 ------ docs/index.rst | 2 +- docs/publish.rst | 2 + 6 files changed, 259 insertions(+), 124 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 8781747c..f2a03d53 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -282,7 +282,7 @@ def package( port, **extra_metadata, ): - """Package specified SQLite files into a new datasette Docker container""" + """Package SQLite files into a Datasette Docker container""" if not shutil.which("docker"): click.secho( ' The package command requires "docker" to be installed and configured ', diff --git a/docs/changelog.rst b/docs/changelog.rst index 1225c63f..f9dcc980 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -621,7 +621,7 @@ See also `Datasette 0.49: The annotated release notes `__ for conversations about the project that go beyond just bug reports and issues. - Datasette can now be installed on macOS using Homebrew! Run ``brew install simonw/datasette/datasette``. See :ref:`installation_homebrew`. (:issue:`335`) - Two new commands: ``datasette install name-of-plugin`` and ``datasette uninstall name-of-plugin``. These are equivalent to ``pip install`` and ``pip uninstall`` but automatically run in the same virtual environment as Datasette, so users don't have to figure out where that virtual environment is - useful for installations created using Homebrew or ``pipx``. See :ref:`plugins_installing`. (:issue:`925`) -- A new command-line option, ``datasette --get``, accepts a path to a URL within the Datasette instance. It will run that request through Datasette (without starting a web server) and print out the response. See :ref:`getting_started_datasette_get` for an example. (:issue:`926`) +- A new command-line option, ``datasette --get``, accepts a path to a URL within the Datasette instance. It will run that request through Datasette (without starting a web server) and print out the response. See :ref:`cli_datasette_get` for an example. (:issue:`926`) .. _v0_46: diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 415af13c..a1e56774 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -4,44 +4,34 @@ CLI reference =============== -This page lists the ``--help`` for every ``datasette`` CLI command. +The ``datasette`` CLI tool provides a number of commands. + +Running ``datasette`` without specifying a command runs the default command, ``datasette serve``. See :ref:`cli_help_serve___help` for the full list of options for that command. .. [[[cog from datasette import cli from click.testing import CliRunner import textwrap - commands = [ - ["--help"], - ["serve", "--help"], - ["serve", "--help-settings"], - ["plugins", "--help"], - ["publish", "--help"], - ["publish", "cloudrun", "--help"], - ["publish", "heroku", "--help"], - ["package", "--help"], - ["inspect", "--help"], - ["install", "--help"], - ["uninstall", "--help"], - ] - cog.out("\n") - for command in commands: - title = "datasette " + " ".join(command) - ref = "_cli_help_" + ("_".join(command).replace("-", "_")) - cog.out(".. {}:\n\n".format(ref)) - cog.out(title + "\n") - cog.out(("=" * len(title)) + "\n\n") + def help(args): + title = "datasette " + " ".join(args) cog.out("::\n\n") - result = CliRunner().invoke(cli.cli, command) + result = CliRunner().invoke(cli.cli, args) output = result.output.replace("Usage: cli ", "Usage: datasette ") cog.out(textwrap.indent(output, ' ')) cog.out("\n\n") .. ]]] +.. [[[end]]] .. _cli_help___help: datasette --help ================ +Running ``datasette --help`` shows a list of all of the available commands. + +.. [[[cog + help(["--help"]) +.. ]]] :: Usage: datasette [OPTIONS] COMMAND [ARGS]... @@ -59,17 +49,34 @@ datasette --help serve* Serve up specified SQLite database files with a web UI inspect Generate JSON summary of provided database files install Install plugins and packages from PyPI into the same... - package Package specified SQLite files into a new datasette Docker... + package Package SQLite files into a Datasette Docker container plugins List currently installed plugins publish Publish specified SQLite database files to the internet along... uninstall Uninstall plugins and Python packages from the Datasette... +.. [[[end]]] + +Additional commands added by plugins that use the :ref:`plugin_hook_register_commands` hook will be listed here as well. + .. _cli_help_serve___help: -datasette serve --help -====================== +datasette serve +=============== +This command starts the Datasette web application running on your machine:: + + datasette serve mydatabase.db + +Or since this is the default command you can run this instead:: + + datasette mydatabase.db + +Once started you can access it at ``http://localhost:8001`` + +.. [[[cog + help(["serve", "--help"]) +.. ]]] :: Usage: datasette serve [OPTIONS] [FILES]... @@ -121,11 +128,75 @@ datasette serve --help --help Show this message and exit. +.. [[[end]]] + + +.. _cli_datasette_get: + +datasette --get +--------------- + +The ``--get`` option to ``datasette serve`` (or just ``datasette``) specifies the path to a page within Datasette and causes Datasette to output the content from that path without starting the web server. + +This means that all of Datasette's functionality can be accessed directly from the command-line. + +For example:: + + $ datasette --get '/-/versions.json' | jq . + { + "python": { + "version": "3.8.5", + "full": "3.8.5 (default, Jul 21 2020, 10:48:26) \n[Clang 11.0.3 (clang-1103.0.32.62)]" + }, + "datasette": { + "version": "0.46+15.g222a84a.dirty" + }, + "asgi": "3.0", + "uvicorn": "0.11.8", + "sqlite": { + "version": "3.32.3", + "fts_versions": [ + "FTS5", + "FTS4", + "FTS3" + ], + "extensions": { + "json1": null + }, + "compile_options": [ + "COMPILER=clang-11.0.3", + "ENABLE_COLUMN_METADATA", + "ENABLE_FTS3", + "ENABLE_FTS3_PARENTHESIS", + "ENABLE_FTS4", + "ENABLE_FTS5", + "ENABLE_GEOPOLY", + "ENABLE_JSON1", + "ENABLE_PREUPDATE_HOOK", + "ENABLE_RTREE", + "ENABLE_SESSION", + "MAX_VARIABLE_NUMBER=250000", + "THREADSAFE=1" + ] + } + } + +The exit code will be 0 if the request succeeds and 1 if the request produced an HTTP status code other than 200 - e.g. a 404 or 500 error. + +This lets you use ``datasette --get /`` to run tests against a Datasette application in a continuous integration environment such as GitHub Actions. + .. _cli_help_serve___help_settings: datasette serve --help-settings -=============================== +------------------------------- +This command outputs all of the available Datasette :ref:`settings `. + +These can be passed to ``datasette serve`` using ``datasette serve --setting name value``. + +.. [[[cog + help(["--help-settings"]) +.. ]]] :: Settings: @@ -170,11 +241,18 @@ datasette serve --help-settings +.. [[[end]]] + .. _cli_help_plugins___help: -datasette plugins --help -======================== +datasette plugins +================= +Output JSON showing all currently installed plugins, their versions, whether they include static files or templates and which :ref:`plugin_hooks` they use. + +.. [[[cog + help(["plugins", "--help"]) +.. ]]] :: Usage: datasette plugins [OPTIONS] @@ -187,11 +265,110 @@ datasette plugins --help --help Show this message and exit. +.. [[[end]]] + +Example output: + +.. code-block:: json + + [ + { + "name": "datasette-geojson", + "static": false, + "templates": false, + "version": "0.3.1", + "hooks": [ + "register_output_renderer" + ] + }, + { + "name": "datasette-geojson-map", + "static": true, + "templates": false, + "version": "0.4.0", + "hooks": [ + "extra_body_script", + "extra_css_urls", + "extra_js_urls" + ] + }, + { + "name": "datasette-leaflet", + "static": true, + "templates": false, + "version": "0.2.2", + "hooks": [ + "extra_body_script", + "extra_template_vars" + ] + } + ] + + +.. _cli_help_install___help: + +datasette install +================= + +Install new Datasette plugins. This command works like ``pip install`` but ensures that your plugins will be installed into the same environment as Datasette. + +This command:: + + datasette install datasette-cluster-map + +Would install the `datasette-cluster-map `__ plugin. + +.. [[[cog + help(["install", "--help"]) +.. ]]] +:: + + Usage: datasette install [OPTIONS] PACKAGES... + + Install plugins and packages from PyPI into the same environment as Datasette + + Options: + -U, --upgrade Upgrade packages to latest version + --help Show this message and exit. + + +.. [[[end]]] + +.. _cli_help_uninstall___help: + +datasette uninstall +=================== + +Uninstall one or more plugins. + +.. [[[cog + help(["uninstall", "--help"]) +.. ]]] +:: + + Usage: datasette uninstall [OPTIONS] PACKAGES... + + Uninstall plugins and Python packages from the Datasette environment + + Options: + -y, --yes Don't ask for confirmation + --help Show this message and exit. + + +.. [[[end]]] + .. _cli_help_publish___help: -datasette publish --help -======================== +datasette publish +================= +Shows a list of available deployment targets for :ref:`publishing data ` with Datasette. + +Additional deployment targets can be added by plugins that use the :ref:`plugin_hook_publish_subcommand` hook. + +.. [[[cog + help(["publish", "--help"]) +.. ]]] :: Usage: datasette publish [OPTIONS] COMMAND [ARGS]... @@ -207,11 +384,19 @@ datasette publish --help heroku Publish databases to Datasette running on Heroku +.. [[[end]]] + + .. _cli_help_publish_cloudrun___help: -datasette publish cloudrun --help -================================= +datasette publish cloudrun +========================== +See :ref:`publish_cloud_run`. + +.. [[[cog + help(["publish", "cloudrun", "--help"]) +.. ]]] :: Usage: datasette publish cloudrun [OPTIONS] [FILES]... @@ -256,11 +441,19 @@ datasette publish cloudrun --help --help Show this message and exit. +.. [[[end]]] + + .. _cli_help_publish_heroku___help: -datasette publish heroku --help -=============================== +datasette publish heroku +======================== +See :ref:`publish_heroku`. + +.. [[[cog + help(["publish", "heroku", "--help"]) +.. ]]] :: Usage: datasette publish heroku [OPTIONS] [FILES]... @@ -297,16 +490,23 @@ datasette publish heroku --help --help Show this message and exit. +.. [[[end]]] + .. _cli_help_package___help: -datasette package --help -======================== +datasette package +================= +Package SQLite files into a Datasette Docker container, see :ref:`cli_package`. + +.. [[[cog + help(["package", "--help"]) +.. ]]] :: Usage: datasette package [OPTIONS] FILES... - Package specified SQLite files into a new datasette Docker container + Package SQLite files into a Datasette Docker container Options: -t, --tag TEXT Name for the resulting Docker container, can @@ -335,11 +535,26 @@ datasette package --help --help Show this message and exit. +.. [[[end]]] + + .. _cli_help_inspect___help: -datasette inspect --help -======================== +datasette inspect +================= +Outputs JSON representing introspected data about one or more SQLite database files. + +If you are opening an immutable database, you can pass this file to the ``--inspect-data`` option to improve Datasette's performance by allowing it to skip running row counts against the database when it first starts running:: + + datasette inspect mydatabase.db > inspect-data.json + datasette serve -i mydatabase.db --inspect-file inspect-data.json + +This performance optimization is used automatically by some of the ``datasette publish`` commands. You are unlikely to need to apply this optimization manually. + +.. [[[cog + help(["inspect", "--help"]) +.. ]]] :: Usage: datasette inspect [OPTIONS] [FILES]... @@ -355,36 +570,4 @@ datasette inspect --help --help Show this message and exit. -.. _cli_help_install___help: - -datasette install --help -======================== - -:: - - Usage: datasette install [OPTIONS] PACKAGES... - - Install plugins and packages from PyPI into the same environment as Datasette - - Options: - -U, --upgrade Upgrade packages to latest version - --help Show this message and exit. - - -.. _cli_help_uninstall___help: - -datasette uninstall --help -========================== - -:: - - Usage: datasette uninstall [OPTIONS] PACKAGES... - - Uninstall plugins and Python packages from the Datasette environment - - Options: - -y, --yes Don't ask for confirmation - --help Show this message and exit. - - .. [[[end]]] diff --git a/docs/getting_started.rst b/docs/getting_started.rst index a9eaa404..6515ef8d 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -138,53 +138,3 @@ JSON in a more convenient format: } ] } - -.. _getting_started_datasette_get: - -datasette --get ---------------- - -The ``--get`` option can specify the path to a page within Datasette and cause Datasette to output the content from that path without starting the web server. This means that all of Datasette's functionality can be accessed directly from the command-line. For example:: - - $ datasette --get '/-/versions.json' | jq . - { - "python": { - "version": "3.8.5", - "full": "3.8.5 (default, Jul 21 2020, 10:48:26) \n[Clang 11.0.3 (clang-1103.0.32.62)]" - }, - "datasette": { - "version": "0.46+15.g222a84a.dirty" - }, - "asgi": "3.0", - "uvicorn": "0.11.8", - "sqlite": { - "version": "3.32.3", - "fts_versions": [ - "FTS5", - "FTS4", - "FTS3" - ], - "extensions": { - "json1": null - }, - "compile_options": [ - "COMPILER=clang-11.0.3", - "ENABLE_COLUMN_METADATA", - "ENABLE_FTS3", - "ENABLE_FTS3_PARENTHESIS", - "ENABLE_FTS4", - "ENABLE_FTS5", - "ENABLE_GEOPOLY", - "ENABLE_JSON1", - "ENABLE_PREUPDATE_HOOK", - "ENABLE_RTREE", - "ENABLE_SESSION", - "MAX_VARIABLE_NUMBER=250000", - "THREADSAFE=1" - ] - } - } - -The exit code will be 0 if the request succeeds and 1 if the request produced an HTTP status code other than 200 - e.g. a 404 or 500 error. This means you can use ``datasette --get /`` to run tests against a Datasette application in a continuous integration environment such as GitHub Actions. - -Running ``datasette`` without specifying a command runs the default command, ``datasette serve``. See :ref:`cli_help_serve___help` for the full list of options for that command. diff --git a/docs/index.rst b/docs/index.rst index efe196b3..5a9cc7ed 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -40,6 +40,7 @@ Contents getting_started installation ecosystem + cli-reference pages publish deploying @@ -61,6 +62,5 @@ Contents plugin_hooks testing_plugins internals - cli-reference contributing changelog diff --git a/docs/publish.rst b/docs/publish.rst index 9c7c99cc..dd8566ed 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -56,6 +56,8 @@ Cloud Run provides a URL on the ``.run.app`` domain, but you can also point your See :ref:`cli_help_publish_cloudrun___help` for the full list of options for this command. +.. _publish_heroku: + Publishing to Heroku -------------------- From aff3df03d4fe0806ce432d1818f6643cdb2a854e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Aug 2022 14:55:08 -0700 Subject: [PATCH 0174/1003] Ignore ro which stands for read only Refs #1787 where it caused tests to break --- docs/codespell-ignore-words.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/codespell-ignore-words.txt b/docs/codespell-ignore-words.txt index a625cde5..d6744d05 100644 --- a/docs/codespell-ignore-words.txt +++ b/docs/codespell-ignore-words.txt @@ -1 +1 @@ -AddWordsToIgnoreHere +ro From 0d9d33955b503c88a2c712144d97f094baa5d46d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Aug 2022 16:06:12 -0700 Subject: [PATCH 0175/1003] Clarify you can publish multiple files, closes #1788 --- docs/publish.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/publish.rst b/docs/publish.rst index dd8566ed..d817ed31 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -31,7 +31,7 @@ Publishing to Google Cloud Run You will first need to install and configure the Google Cloud CLI tools by following `these instructions `__. -You can then publish a database to Google Cloud Run using the following command:: +You can then publish one or more SQLite database files to Google Cloud Run using the following command:: datasette publish cloudrun mydatabase.db --service=my-database @@ -63,7 +63,7 @@ Publishing to Heroku To publish your data using `Heroku `__, first create an account there and install and configure the `Heroku CLI tool `_. -You can publish a database to Heroku using the following command:: +You can publish one or more databases to Heroku using the following command:: datasette publish heroku mydatabase.db @@ -138,7 +138,7 @@ If a plugin has any :ref:`plugins_configuration_secret` you can use the ``--plug datasette package ================= -If you have docker installed (e.g. using `Docker for Mac `_) you can use the ``datasette package`` command to create a new Docker image in your local repository containing the datasette app bundled together with your selected SQLite databases:: +If you have docker installed (e.g. using `Docker for Mac `_) you can use the ``datasette package`` command to create a new Docker image in your local repository containing the datasette app bundled together with one or more SQLite databases:: datasette package mydatabase.db From 663ac431fe7202c85967568d82b2034f92b9aa43 Mon Sep 17 00:00:00 2001 From: Manuel Kaufmann Date: Sat, 20 Aug 2022 02:04:16 +0200 Subject: [PATCH 0176/1003] Use Read the Docs action v1 (#1778) Read the Docs repository was renamed from `readthedocs/readthedocs-preview` to `readthedocs/actions/`. Now, the `preview` action is under `readthedocs/actions/preview` and is tagged as `v1` --- .github/workflows/documentation-links.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/documentation-links.yml b/.github/workflows/documentation-links.yml index e7062a46..a54bd83a 100644 --- a/.github/workflows/documentation-links.yml +++ b/.github/workflows/documentation-links.yml @@ -11,6 +11,6 @@ jobs: documentation-links: runs-on: ubuntu-latest steps: - - uses: readthedocs/readthedocs-preview@main + - uses: readthedocs/actions/preview@v1 with: project-slug: "datasette" From 1d64c9a8dac45b9a3452acf8e76dfadea2b0bc49 Mon Sep 17 00:00:00 2001 From: Alex Garcia Date: Tue, 23 Aug 2022 11:34:30 -0700 Subject: [PATCH 0177/1003] Add new entrypoint option to --load-extensions. (#1789) Thanks, @asg017 --- .gitignore | 6 ++++ datasette/app.py | 8 ++++- datasette/cli.py | 4 ++- datasette/utils/__init__.py | 11 ++++++ tests/ext.c | 48 ++++++++++++++++++++++++++ tests/test_load_extensions.py | 65 +++++++++++++++++++++++++++++++++++ 6 files changed, 140 insertions(+), 2 deletions(-) create mode 100644 tests/ext.c create mode 100644 tests/test_load_extensions.py diff --git a/.gitignore b/.gitignore index 066009f0..277ff653 100644 --- a/.gitignore +++ b/.gitignore @@ -118,3 +118,9 @@ ENV/ .DS_Store node_modules .*.swp + +# In case someone compiled tests/ext.c for test_load_extensions, don't +# include it in source control. +tests/*.dylib +tests/*.so +tests/*.dll \ No newline at end of file diff --git a/datasette/app.py b/datasette/app.py index 1a9afc10..bb9232c9 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -559,7 +559,13 @@ class Datasette: if self.sqlite_extensions: conn.enable_load_extension(True) for extension in self.sqlite_extensions: - conn.execute("SELECT load_extension(?)", [extension]) + # "extension" is either a string path to the extension + # or a 2-item tuple that specifies which entrypoint to load. + if isinstance(extension, tuple): + path, entrypoint = extension + conn.execute("SELECT load_extension(?, ?)", [path, entrypoint]) + else: + conn.execute("SELECT load_extension(?)", [extension]) if self.setting("cache_size_kb"): conn.execute(f"PRAGMA cache_size=-{self.setting('cache_size_kb')}") # pylint: disable=no-member diff --git a/datasette/cli.py b/datasette/cli.py index f2a03d53..6eb42712 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -21,6 +21,7 @@ from .app import ( pm, ) from .utils import ( + LoadExtension, StartupError, check_connection, find_spatialite, @@ -128,9 +129,10 @@ def sqlite_extensions(fn): return click.option( "sqlite_extensions", "--load-extension", + type=LoadExtension(), envvar="SQLITE_EXTENSIONS", multiple=True, - help="Path to a SQLite extension to load", + help="Path to a SQLite extension to load, and optional entrypoint", )(fn) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index d148cc2c..0fc87d51 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -833,6 +833,17 @@ class StaticMount(click.ParamType): self.fail(f"{value} is not a valid directory path", param, ctx) return path, dirpath +# The --load-extension parameter can optionally include a specific entrypoint. +# This is done by appending ":entrypoint_name" after supplying the path to the extension +class LoadExtension(click.ParamType): + name = "path:entrypoint?" + + def convert(self, value, param, ctx): + if ":" not in value: + return value + path, entrypoint = value.split(":", 1) + return path, entrypoint + def format_bytes(bytes): current = float(bytes) diff --git a/tests/ext.c b/tests/ext.c new file mode 100644 index 00000000..5fe970d9 --- /dev/null +++ b/tests/ext.c @@ -0,0 +1,48 @@ +/* +** This file implements a SQLite extension with multiple entrypoints. +** +** The default entrypoint, sqlite3_ext_init, has a single function "a". +** The 1st alternate entrypoint, sqlite3_ext_b_init, has a single function "b". +** The 2nd alternate entrypoint, sqlite3_ext_c_init, has a single function "c". +** +** Compiling instructions: +** https://www.sqlite.org/loadext.html#compiling_a_loadable_extension +** +*/ + +#include "sqlite3ext.h" + +SQLITE_EXTENSION_INIT1 + +// SQL function that returns back the value supplied during sqlite3_create_function() +static void func(sqlite3_context *context, int argc, sqlite3_value **argv) { + sqlite3_result_text(context, (char *) sqlite3_user_data(context), -1, SQLITE_STATIC); +} + + +// The default entrypoint, since it matches the "ext.dylib"/"ext.so" name +#ifdef _WIN32 +__declspec(dllexport) +#endif +int sqlite3_ext_init(sqlite3 *db, char **pzErrMsg, const sqlite3_api_routines *pApi) { + SQLITE_EXTENSION_INIT2(pApi); + return sqlite3_create_function(db, "a", 0, 0, "a", func, 0, 0); +} + +// Alternate entrypoint #1 +#ifdef _WIN32 +__declspec(dllexport) +#endif +int sqlite3_ext_b_init(sqlite3 *db, char **pzErrMsg, const sqlite3_api_routines *pApi) { + SQLITE_EXTENSION_INIT2(pApi); + return sqlite3_create_function(db, "b", 0, 0, "b", func, 0, 0); +} + +// Alternate entrypoint #2 +#ifdef _WIN32 +__declspec(dllexport) +#endif +int sqlite3_ext_c_init(sqlite3 *db, char **pzErrMsg, const sqlite3_api_routines *pApi) { + SQLITE_EXTENSION_INIT2(pApi); + return sqlite3_create_function(db, "c", 0, 0, "c", func, 0, 0); +} diff --git a/tests/test_load_extensions.py b/tests/test_load_extensions.py new file mode 100644 index 00000000..360bc8f3 --- /dev/null +++ b/tests/test_load_extensions.py @@ -0,0 +1,65 @@ +from datasette.app import Datasette +import pytest +from pathlib import Path + +# not necessarily a full path - the full compiled path looks like "ext.dylib" +# or another suffix, but sqlite will, under the hood, decide which file +# extension to use based on the operating system (apple=dylib, windows=dll etc) +# this resolves to "./ext", which is enough for SQLite to calculate the rest +COMPILED_EXTENSION_PATH = str(Path(__file__).parent / "ext") + +# See if ext.c has been compiled, based off the different possible suffixes. +def has_compiled_ext(): + for ext in ["dylib", "so", "dll"]: + path = Path(__file__).parent / f"ext.{ext}" + if path.is_file(): + return True + return False + + +@pytest.mark.asyncio +@pytest.mark.skipif(not has_compiled_ext(), reason="Requires compiled ext.c") +async def test_load_extension_default_entrypoint(): + + # The default entrypoint only loads a() and NOT b() or c(), so those + # should fail. + ds = Datasette(sqlite_extensions=[COMPILED_EXTENSION_PATH]) + + response = await ds.client.get("/_memory.json?sql=select+a()") + assert response.status_code == 200 + assert response.json()["rows"][0][0] == "a" + + response = await ds.client.get("/_memory.json?sql=select+b()") + assert response.status_code == 400 + assert response.json()["error"] == "no such function: b" + + response = await ds.client.get("/_memory.json?sql=select+c()") + assert response.status_code == 400 + assert response.json()["error"] == "no such function: c" + + +@pytest.mark.asyncio +@pytest.mark.skipif(not has_compiled_ext(), reason="Requires compiled ext.c") +async def test_load_extension_multiple_entrypoints(): + + # Load in the default entrypoint and the other 2 custom entrypoints, now + # all a(), b(), and c() should run successfully. + ds = Datasette( + sqlite_extensions=[ + COMPILED_EXTENSION_PATH, + (COMPILED_EXTENSION_PATH, "sqlite3_ext_b_init"), + (COMPILED_EXTENSION_PATH, "sqlite3_ext_c_init"), + ] + ) + + response = await ds.client.get("/_memory.json?sql=select+a()") + assert response.status_code == 200 + assert response.json()["rows"][0][0] == "a" + + response = await ds.client.get("/_memory.json?sql=select+b()") + assert response.status_code == 200 + assert response.json()["rows"][0][0] == "b" + + response = await ds.client.get("/_memory.json?sql=select+c()") + assert response.status_code == 200 + assert response.json()["rows"][0][0] == "c" From fd1086c6867f3e3582b1eca456e4ea95f6cecf8b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 23 Aug 2022 11:35:41 -0700 Subject: [PATCH 0178/1003] Applied Black, refs #1789 --- datasette/app.py | 4 ++-- datasette/utils/__init__.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index bb9232c9..f2a6763a 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -559,8 +559,8 @@ class Datasette: if self.sqlite_extensions: conn.enable_load_extension(True) for extension in self.sqlite_extensions: - # "extension" is either a string path to the extension - # or a 2-item tuple that specifies which entrypoint to load. + # "extension" is either a string path to the extension + # or a 2-item tuple that specifies which entrypoint to load. if isinstance(extension, tuple): path, entrypoint = extension conn.execute("SELECT load_extension(?, ?)", [path, entrypoint]) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 0fc87d51..bbaa0510 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -833,6 +833,7 @@ class StaticMount(click.ParamType): self.fail(f"{value} is not a valid directory path", param, ctx) return path, dirpath + # The --load-extension parameter can optionally include a specific entrypoint. # This is done by appending ":entrypoint_name" after supplying the path to the extension class LoadExtension(click.ParamType): From 456dc155d491a009942ace71a4e1827cddc6b93d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 23 Aug 2022 11:40:36 -0700 Subject: [PATCH 0179/1003] Ran cog, refs #1789 --- docs/cli-reference.rst | 95 +++++++++++++++++++++++------------------- 1 file changed, 51 insertions(+), 44 deletions(-) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index a1e56774..f8419d58 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -84,48 +84,53 @@ Once started you can access it at ``http://localhost:8001`` Serve up specified SQLite database files with a web UI Options: - -i, --immutable PATH Database files to open in immutable mode - -h, --host TEXT Host for server. Defaults to 127.0.0.1 which means - only connections from the local machine will be - allowed. Use 0.0.0.0 to listen to all IPs and allow - access from other machines. - -p, --port INTEGER RANGE Port for server, defaults to 8001. Use -p 0 to - automatically assign an available port. - [0<=x<=65535] - --uds TEXT Bind to a Unix domain socket - --reload Automatically reload if code or metadata change - detected - useful for development - --cors Enable CORS by serving Access-Control-Allow-Origin: - * - --load-extension TEXT Path to a SQLite extension to load - --inspect-file TEXT Path to JSON file created using "datasette inspect" - -m, --metadata FILENAME Path to JSON/YAML file containing license/source - metadata - --template-dir DIRECTORY Path to directory containing custom templates - --plugins-dir DIRECTORY Path to directory containing custom plugins - --static MOUNT:DIRECTORY Serve static files from this directory at /MOUNT/... - --memory Make /_memory database available - --config CONFIG Deprecated: set config option using - configname:value. Use --setting instead. - --setting SETTING... Setting, see - docs.datasette.io/en/stable/settings.html - --secret TEXT Secret used for signing secure values, such as - signed cookies - --root Output URL that sets a cookie authenticating the - root user - --get TEXT Run an HTTP GET request against this path, print - results and exit - --version-note TEXT Additional note to show on /-/versions - --help-settings Show available settings - --pdb Launch debugger on any errors - -o, --open Open Datasette in your web browser - --create Create database files if they do not exist - --crossdb Enable cross-database joins using the /_memory - database - --nolock Ignore locking, open locked files in read-only mode - --ssl-keyfile TEXT SSL key file - --ssl-certfile TEXT SSL certificate file - --help Show this message and exit. + -i, --immutable PATH Database files to open in immutable mode + -h, --host TEXT Host for server. Defaults to 127.0.0.1 which + means only connections from the local machine + will be allowed. Use 0.0.0.0 to listen to all + IPs and allow access from other machines. + -p, --port INTEGER RANGE Port for server, defaults to 8001. Use -p 0 to + automatically assign an available port. + [0<=x<=65535] + --uds TEXT Bind to a Unix domain socket + --reload Automatically reload if code or metadata + change detected - useful for development + --cors Enable CORS by serving Access-Control-Allow- + Origin: * + --load-extension PATH:ENTRYPOINT? + Path to a SQLite extension to load, and + optional entrypoint + --inspect-file TEXT Path to JSON file created using "datasette + inspect" + -m, --metadata FILENAME Path to JSON/YAML file containing + license/source metadata + --template-dir DIRECTORY Path to directory containing custom templates + --plugins-dir DIRECTORY Path to directory containing custom plugins + --static MOUNT:DIRECTORY Serve static files from this directory at + /MOUNT/... + --memory Make /_memory database available + --config CONFIG Deprecated: set config option using + configname:value. Use --setting instead. + --setting SETTING... Setting, see + docs.datasette.io/en/stable/settings.html + --secret TEXT Secret used for signing secure values, such as + signed cookies + --root Output URL that sets a cookie authenticating + the root user + --get TEXT Run an HTTP GET request against this path, + print results and exit + --version-note TEXT Additional note to show on /-/versions + --help-settings Show available settings + --pdb Launch debugger on any errors + -o, --open Open Datasette in your web browser + --create Create database files if they do not exist + --crossdb Enable cross-database joins using the /_memory + database + --nolock Ignore locking, open locked files in read-only + mode + --ssl-keyfile TEXT SSL key file + --ssl-certfile TEXT SSL certificate file + --help Show this message and exit. .. [[[end]]] @@ -566,8 +571,10 @@ This performance optimization is used automatically by some of the ``datasette p Options: --inspect-file TEXT - --load-extension TEXT Path to a SQLite extension to load - --help Show this message and exit. + --load-extension PATH:ENTRYPOINT? + Path to a SQLite extension to load, and + optional entrypoint + --help Show this message and exit. .. [[[end]]] From ba35105eee2d3ba620e4f230028a02b2e2571df2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 23 Aug 2022 17:11:45 -0700 Subject: [PATCH 0180/1003] Test `--load-extension` in GitHub Actions (#1792) * Run the --load-extension test, refs #1789 * Ran cog, refs #1789 --- .github/workflows/test.yml | 3 +++ tests/test_api.py | 2 +- tests/test_html.py | 4 ++-- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 90b6555e..e38d5ee9 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -24,6 +24,9 @@ jobs: key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} restore-keys: | ${{ runner.os }}-pip- + - name: Build extension for --load-extension test + run: |- + (cd tests && gcc ext.c -fPIC -shared -o ext.so) - name: Install dependencies run: | pip install -e '.[test]' diff --git a/tests/test_api.py b/tests/test_api.py index 253c1718..f6db2f9d 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -36,7 +36,7 @@ def test_homepage(app_client): # 4 hidden FTS tables + no_primary_key (hidden in metadata) assert d["hidden_tables_count"] == 6 # 201 in no_primary_key, plus 6 in other hidden tables: - assert d["hidden_table_rows_sum"] == 207 + assert d["hidden_table_rows_sum"] == 207, response.json assert d["views_count"] == 4 diff --git a/tests/test_html.py b/tests/test_html.py index be21bd84..d6e969ad 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -115,7 +115,7 @@ def test_database_page(app_client): assert fragment in response.text # And views - views_ul = soup.find("h2", text="Views").find_next_sibling("ul") + views_ul = soup.find("h2", string="Views").find_next_sibling("ul") assert views_ul is not None assert [ ("/fixtures/paginated_view", "paginated_view"), @@ -128,7 +128,7 @@ def test_database_page(app_client): ] == sorted([(a["href"], a.text) for a in views_ul.find_all("a")]) # And a list of canned queries - queries_ul = soup.find("h2", text="Queries").find_next_sibling("ul") + queries_ul = soup.find("h2", string="Queries").find_next_sibling("ul") assert queries_ul is not None assert [ ("/fixtures/from_async_hook", "from_async_hook"), From 51030df1869b3b574dd3584d1563415776b9cd4e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 5 Sep 2022 11:35:40 -0700 Subject: [PATCH 0181/1003] Don't use upper bound dependencies any more See https://iscinumpy.dev/post/bound-version-constraints/ for the rationale behind this change. Closes #1800 --- setup.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/setup.py b/setup.py index a1c51d0b..b2e50b38 100644 --- a/setup.py +++ b/setup.py @@ -42,21 +42,21 @@ setup( include_package_data=True, python_requires=">=3.7", install_requires=[ - "asgiref>=3.2.10,<3.6.0", - "click>=7.1.1,<8.2.0", + "asgiref>=3.2.10", + "click>=7.1.1", "click-default-group-wheel>=1.2.2", - "Jinja2>=2.10.3,<3.1.0", - "hupper~=1.9", + "Jinja2>=2.10.3", + "hupper>=1.9", "httpx>=0.20", - "pint~=0.9", - "pluggy>=1.0,<1.1", - "uvicorn~=0.11", - "aiofiles>=0.4,<0.9", - "janus>=0.6.2,<1.1", + "pint>=0.9", + "pluggy>=1.0", + "uvicorn>=0.11", + "aiofiles>=0.4", + "janus>=0.6.2", "asgi-csrf>=0.9", - "PyYAML>=5.3,<7.0", - "mergedeep>=1.1.1,<1.4.0", - "itsdangerous>=1.1,<3.0", + "PyYAML>=5.3", + "mergedeep>=1.1.1", + "itsdangerous>=1.1", ], entry_points=""" [console_scripts] @@ -72,14 +72,14 @@ setup( "sphinx-copybutton", ], "test": [ - "pytest>=5.2.2,<7.2.0", - "pytest-xdist>=2.2.1,<2.6", - "pytest-asyncio>=0.17,<0.20", - "beautifulsoup4>=4.8.1,<4.12.0", + "pytest>=5.2.2", + "pytest-xdist>=2.2.1", + "pytest-asyncio>=0.17", + "beautifulsoup4>=4.8.1", "black==22.6.0", "blacken-docs==1.12.1", - "pytest-timeout>=1.4.2,<2.2", - "trustme>=0.7,<0.10", + "pytest-timeout>=1.4.2", + "trustme>=0.7", "cogapp>=3.3.0", ], "rich": ["rich"], From 294ecd45f7801971dbeef383d0c5456ee95ab839 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Sep 2022 11:51:51 -0700 Subject: [PATCH 0182/1003] Bump black from 22.6.0 to 22.8.0 (#1797) Bumps [black](https://github.com/psf/black) from 22.6.0 to 22.8.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/22.6.0...22.8.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b2e50b38..92fa60d0 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ setup( "pytest-xdist>=2.2.1", "pytest-asyncio>=0.17", "beautifulsoup4>=4.8.1", - "black==22.6.0", + "black==22.8.0", "blacken-docs==1.12.1", "pytest-timeout>=1.4.2", "trustme>=0.7", From b91e17280c05bbb9cf97432081bdcea8665879f9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 5 Sep 2022 16:50:53 -0700 Subject: [PATCH 0183/1003] Run tests in serial, refs #1802 --- .github/workflows/test.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e38d5ee9..9c8c48ef 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -33,8 +33,7 @@ jobs: pip freeze - name: Run tests run: | - pytest -n auto -m "not serial" - pytest -m "serial" + pytest - name: Check if cog needs to be run run: | cog --check docs/*.rst From b2b901e8c4b939e50ee1117ffcd2881ed8a8e3bf Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 5 Sep 2022 17:05:23 -0700 Subject: [PATCH 0184/1003] Skip SpatiaLite test if no conn.enable_load_extension() Ran into this problem while working on #1802 --- tests/test_spatialite.py | 2 ++ tests/utils.py | 8 ++++++++ 2 files changed, 10 insertions(+) diff --git a/tests/test_spatialite.py b/tests/test_spatialite.py index 8b98c5d6..c07a30e8 100644 --- a/tests/test_spatialite.py +++ b/tests/test_spatialite.py @@ -1,5 +1,6 @@ from datasette.app import Datasette from datasette.utils import find_spatialite, SpatialiteNotFound, SPATIALITE_FUNCTIONS +from .utils import has_load_extension import pytest @@ -13,6 +14,7 @@ def has_spatialite(): @pytest.mark.asyncio @pytest.mark.skipif(not has_spatialite(), reason="Requires SpatiaLite") +@pytest.mark.skipif(not has_load_extension(), reason="Requires enable_load_extension") async def test_spatialite_version_info(): ds = Datasette(sqlite_extensions=["spatialite"]) response = await ds.client.get("/-/versions.json") diff --git a/tests/utils.py b/tests/utils.py index 972300db..191ead9b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,3 +1,6 @@ +from datasette.utils.sqlite import sqlite3 + + def assert_footer_links(soup): footer_links = soup.find("footer").findAll("a") assert 4 == len(footer_links) @@ -22,3 +25,8 @@ def inner_html(soup): # This includes the parent tag - so remove that inner_html = html.split(">", 1)[1].rsplit("<", 1)[0] return inner_html.strip() + + +def has_load_extension(): + conn = sqlite3.connect(":memory:") + return hasattr(conn, "enable_load_extension") From 1c29b925d300d1ee17047504473f2517767aa05b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 5 Sep 2022 17:10:52 -0700 Subject: [PATCH 0185/1003] Run tests in serial again Because this didn't fix the issue I'm seeing in #1802 Revert "Run tests in serial, refs #1802" This reverts commit b91e17280c05bbb9cf97432081bdcea8665879f9. --- .github/workflows/test.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9c8c48ef..e38d5ee9 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -33,7 +33,8 @@ jobs: pip freeze - name: Run tests run: | - pytest + pytest -n auto -m "not serial" + pytest -m "serial" - name: Check if cog needs to be run run: | cog --check docs/*.rst From 64288d827f7ff97f825e10f714da3f781ecf9345 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 5 Sep 2022 17:40:19 -0700 Subject: [PATCH 0186/1003] Workaround for test failure: RuntimeError: There is no current event loop (#1803) * Remove ensure_eventloop hack * Hack to recover from intermittent RuntimeError calling asyncio.Lock() --- datasette/app.py | 10 +++++++++- tests/test_cli.py | 27 ++++++++++----------------- 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f2a6763a..c6bbdaf0 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -231,7 +231,15 @@ class Datasette: self.inspect_data = inspect_data self.immutables = set(immutables or []) self.databases = collections.OrderedDict() - self._refresh_schemas_lock = asyncio.Lock() + try: + self._refresh_schemas_lock = asyncio.Lock() + except RuntimeError as rex: + # Workaround for intermittent test failure, see: + # https://github.com/simonw/datasette/issues/1802 + if "There is no current event loop in thread" in str(rex): + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + self._refresh_schemas_lock = asyncio.Lock() self.crossdb = crossdb self.nolock = nolock if memory or crossdb or not self.files: diff --git a/tests/test_cli.py b/tests/test_cli.py index d0f6e26c..f0d28037 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -22,13 +22,6 @@ from unittest import mock import urllib -@pytest.fixture -def ensure_eventloop(): - # Workaround for "Event loop is closed" error - if asyncio.get_event_loop().is_closed(): - asyncio.set_event_loop(asyncio.new_event_loop()) - - def test_inspect_cli(app_client): runner = CliRunner() result = runner.invoke(cli, ["inspect", "fixtures.db"]) @@ -72,7 +65,7 @@ def test_serve_with_inspect_file_prepopulates_table_counts_cache(): ), ) def test_spatialite_error_if_attempt_to_open_spatialite( - ensure_eventloop, spatialite_paths, should_suggest_load_extension + spatialite_paths, should_suggest_load_extension ): with mock.patch("datasette.utils.SPATIALITE_PATHS", spatialite_paths): runner = CliRunner() @@ -199,14 +192,14 @@ def test_version(): @pytest.mark.parametrize("invalid_port", ["-1", "0.5", "dog", "65536"]) -def test_serve_invalid_ports(ensure_eventloop, invalid_port): +def test_serve_invalid_ports(invalid_port): runner = CliRunner(mix_stderr=False) result = runner.invoke(cli, ["--port", invalid_port]) assert result.exit_code == 2 assert "Invalid value for '-p'" in result.stderr -def test_setting(ensure_eventloop): +def test_setting(): runner = CliRunner() result = runner.invoke( cli, ["--setting", "default_page_size", "5", "--get", "/-/settings.json"] @@ -215,14 +208,14 @@ def test_setting(ensure_eventloop): assert json.loads(result.output)["default_page_size"] == 5 -def test_setting_type_validation(ensure_eventloop): +def test_setting_type_validation(): runner = CliRunner(mix_stderr=False) result = runner.invoke(cli, ["--setting", "default_page_size", "dog"]) assert result.exit_code == 2 assert '"default_page_size" should be an integer' in result.stderr -def test_config_deprecated(ensure_eventloop): +def test_config_deprecated(): # The --config option should show a deprecation message runner = CliRunner(mix_stderr=False) result = runner.invoke( @@ -233,14 +226,14 @@ def test_config_deprecated(ensure_eventloop): assert "will be deprecated in" in result.stderr -def test_sql_errors_logged_to_stderr(ensure_eventloop): +def test_sql_errors_logged_to_stderr(): runner = CliRunner(mix_stderr=False) result = runner.invoke(cli, ["--get", "/_memory.json?sql=select+blah"]) assert result.exit_code == 1 assert "sql = 'select blah', params = {}: no such column: blah\n" in result.stderr -def test_serve_create(ensure_eventloop, tmpdir): +def test_serve_create(tmpdir): runner = CliRunner() db_path = tmpdir / "does_not_exist_yet.db" assert not db_path.exists() @@ -258,7 +251,7 @@ def test_serve_create(ensure_eventloop, tmpdir): assert db_path.exists() -def test_serve_duplicate_database_names(ensure_eventloop, tmpdir): +def test_serve_duplicate_database_names(tmpdir): "'datasette db.db nested/db.db' should attach two databases, /db and /db_2" runner = CliRunner() db_1_path = str(tmpdir / "db.db") @@ -273,7 +266,7 @@ def test_serve_duplicate_database_names(ensure_eventloop, tmpdir): assert {db["name"] for db in databases} == {"db", "db_2"} -def test_serve_deduplicate_same_database_path(ensure_eventloop, tmpdir): +def test_serve_deduplicate_same_database_path(tmpdir): "'datasette db.db db.db' should only attach one database, /db" runner = CliRunner() db_path = str(tmpdir / "db.db") @@ -287,7 +280,7 @@ def test_serve_deduplicate_same_database_path(ensure_eventloop, tmpdir): @pytest.mark.parametrize( "filename", ["test-database (1).sqlite", "database (1).sqlite"] ) -def test_weird_database_names(ensure_eventloop, tmpdir, filename): +def test_weird_database_names(tmpdir, filename): # https://github.com/simonw/datasette/issues/1181 runner = CliRunner() db_path = str(tmpdir / filename) From c9d1943aede436fa3413fd49bc56335cbda4ad07 Mon Sep 17 00:00:00 2001 From: Daniel Rech Date: Tue, 6 Sep 2022 02:45:41 +0200 Subject: [PATCH 0187/1003] Fix word break in facets by adding ul.tight-bullets li word-break: break-all (#1794) Thanks, @dmr --- datasette/static/app.css | 1 + 1 file changed, 1 insertion(+) diff --git a/datasette/static/app.css b/datasette/static/app.css index af3e14d5..712b9925 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -260,6 +260,7 @@ ul.bullets li { ul.tight-bullets li { list-style-type: disc; margin-bottom: 0; + word-break: break-all; } a.not-underlined { text-decoration: none; From d80775a48d20917633792fdc9525f075d3bc2c7a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 5 Sep 2022 17:44:44 -0700 Subject: [PATCH 0188/1003] Raise error if it's not about loops, refs #1802 --- datasette/app.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/datasette/app.py b/datasette/app.py index c6bbdaf0..aeb81687 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -240,6 +240,8 @@ class Datasette: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) self._refresh_schemas_lock = asyncio.Lock() + else: + raise self.crossdb = crossdb self.nolock = nolock if memory or crossdb or not self.files: From 8430c3bc7dd22b173c1a8c6cd7180e3b31240cd1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 6 Sep 2022 08:59:19 -0700 Subject: [PATCH 0189/1003] table facet_size in metadata, refs #1804 --- datasette/facets.py | 14 +++++++++++--- tests/test_facets.py | 17 +++++++++++++++++ 2 files changed, 28 insertions(+), 3 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index b15a758c..e70d42df 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -102,11 +102,19 @@ class Facet: def get_facet_size(self): facet_size = self.ds.setting("default_facet_size") max_returned_rows = self.ds.setting("max_returned_rows") + table_facet_size = None + if self.table: + tables_metadata = self.ds.metadata("tables", database=self.database) or {} + table_metadata = tables_metadata.get(self.table) or {} + if table_metadata: + table_facet_size = table_metadata.get("facet_size") custom_facet_size = self.request.args.get("_facet_size") - if custom_facet_size == "max": - facet_size = max_returned_rows - elif custom_facet_size and custom_facet_size.isdigit(): + if custom_facet_size and custom_facet_size.isdigit(): facet_size = int(custom_facet_size) + elif table_facet_size: + facet_size = table_facet_size + if facet_size == "max": + facet_size = max_returned_rows return min(facet_size, max_returned_rows) async def suggest(self): diff --git a/tests/test_facets.py b/tests/test_facets.py index c28dc43c..cbee23b0 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -581,6 +581,23 @@ async def test_facet_size(): ) data5 = response5.json() assert len(data5["facet_results"]["city"]["results"]) == 20 + # Now try messing with facet_size in the table metadata + ds._metadata_local = { + "databases": { + "test_facet_size": {"tables": {"neighbourhoods": {"facet_size": 6}}} + } + } + response6 = await ds.client.get("/test_facet_size/neighbourhoods.json?_facet=city") + data6 = response6.json() + assert len(data6["facet_results"]["city"]["results"]) == 6 + # Setting it to max bumps it up to 50 again + ds._metadata_local["databases"]["test_facet_size"]["tables"]["neighbourhoods"][ + "facet_size" + ] = "max" + data7 = ( + await ds.client.get("/test_facet_size/neighbourhoods.json?_facet=city") + ).json() + assert len(data7["facet_results"]["city"]["results"]) == 20 def test_other_types_of_facet_in_metadata(): From 303c6c733d95a6133558ec1b468f5bea5827d0d2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 6 Sep 2022 11:05:00 -0700 Subject: [PATCH 0190/1003] Fix for incorrectly handled _facet_size=max, refs #1804 --- datasette/facets.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index e70d42df..7fb0c68b 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -109,12 +109,19 @@ class Facet: if table_metadata: table_facet_size = table_metadata.get("facet_size") custom_facet_size = self.request.args.get("_facet_size") - if custom_facet_size and custom_facet_size.isdigit(): - facet_size = int(custom_facet_size) - elif table_facet_size: - facet_size = table_facet_size - if facet_size == "max": - facet_size = max_returned_rows + if custom_facet_size: + if custom_facet_size == "max": + facet_size = max_returned_rows + elif custom_facet_size.isdigit(): + facet_size = int(custom_facet_size) + else: + # Invalid value, ignore it + custom_facet_size = None + if table_facet_size and not custom_facet_size: + if table_facet_size == "max": + facet_size = max_returned_rows + else: + facet_size = table_facet_size return min(facet_size, max_returned_rows) async def suggest(self): From 0a7815d2038255a0834c955066a2a16c01f707b2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 6 Sep 2022 11:06:49 -0700 Subject: [PATCH 0191/1003] Documentation for facet_size in metadata, closes #1804 --- docs/facets.rst | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/facets.rst b/docs/facets.rst index 2a2eb039..6c9d99bd 100644 --- a/docs/facets.rst +++ b/docs/facets.rst @@ -129,6 +129,22 @@ You can specify :ref:`array ` or :ref:`date ] } +You can change the default facet size (the number of results shown for each facet) for a table using ``facet_size``: + +.. code-block:: json + + { + "databases": { + "sf-trees": { + "tables": { + "Street_Tree_List": { + "facets": ["qLegalStatus"], + "facet_size": 10 + } + } + } + } + } Suggested facets ---------------- From d0476897e10249bb4867473722270d02491c2c1f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 6 Sep 2022 11:24:30 -0700 Subject: [PATCH 0192/1003] Fixed Sphinx warning about language = None --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 4ef6b768..8965974a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -71,7 +71,7 @@ release = "" # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. From ff9c87197dde8b09f9787ee878804cb6842ea5dc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 6 Sep 2022 11:26:21 -0700 Subject: [PATCH 0193/1003] Fixed Sphinx warnings on cli-reference page --- docs/cli-reference.rst | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index f8419d58..4a8465cb 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -14,7 +14,7 @@ Running ``datasette`` without specifying a command runs the default command, ``d import textwrap def help(args): title = "datasette " + " ".join(args) - cog.out("::\n\n") + cog.out("\n::\n\n") result = CliRunner().invoke(cli.cli, args) output = result.output.replace("Usage: cli ", "Usage: datasette ") cog.out(textwrap.indent(output, ' ')) @@ -32,6 +32,7 @@ Running ``datasette --help`` shows a list of all of the available commands. .. [[[cog help(["--help"]) .. ]]] + :: Usage: datasette [OPTIONS] COMMAND [ARGS]... @@ -77,6 +78,7 @@ Once started you can access it at ``http://localhost:8001`` .. [[[cog help(["serve", "--help"]) .. ]]] + :: Usage: datasette serve [OPTIONS] [FILES]... @@ -202,6 +204,7 @@ These can be passed to ``datasette serve`` using ``datasette serve --setting nam .. [[[cog help(["--help-settings"]) .. ]]] + :: Settings: @@ -258,6 +261,7 @@ Output JSON showing all currently installed plugins, their versions, whether the .. [[[cog help(["plugins", "--help"]) .. ]]] + :: Usage: datasette plugins [OPTIONS] @@ -326,6 +330,7 @@ Would install the `datasette-cluster-map Date: Tue, 6 Sep 2022 16:50:43 -0700 Subject: [PATCH 0194/1003] truncate_cells_html now affects URLs too, refs #1805 --- datasette/utils/__init__.py | 10 ++++++++++ datasette/views/database.py | 11 ++++++++--- datasette/views/table.py | 8 ++++++-- tests/fixtures.py | 9 +++++---- tests/test_api.py | 2 +- tests/test_table_api.py | 11 +++++++---- tests/test_table_html.py | 11 +++++++++++ tests/test_utils.py | 20 ++++++++++++++++++++ 8 files changed, 68 insertions(+), 14 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index bbaa0510..2bdea673 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1167,3 +1167,13 @@ def resolve_routes(routes, path): if match is not None: return match, view return None, None + + +def truncate_url(url, length): + if (not length) or (len(url) <= length): + return url + bits = url.rsplit(".", 1) + if len(bits) == 2 and 1 <= len(bits[1]) <= 4 and "/" not in bits[1]: + rest, ext = bits + return rest[: length - 1 - len(ext)] + "…." + ext + return url[: length - 1] + "…" diff --git a/datasette/views/database.py b/datasette/views/database.py index 77632b9d..fc344245 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -20,6 +20,7 @@ from datasette.utils import ( path_with_format, path_with_removed_args, sqlite3, + truncate_url, InvalidSql, ) from datasette.utils.asgi import AsgiFileDownload, NotFound, Response, Forbidden @@ -371,6 +372,7 @@ class QueryView(DataView): async def extra_template(): display_rows = [] + truncate_cells = self.ds.setting("truncate_cells_html") for row in results.rows if results else []: display_row = [] for column, value in zip(results.columns, row): @@ -396,9 +398,12 @@ class QueryView(DataView): if value in ("", None): display_value = Markup(" ") elif is_url(str(display_value).strip()): - display_value = Markup( - '{url}'.format( - url=escape(value.strip()) + display_value = markupsafe.Markup( + '{truncated_url}'.format( + url=markupsafe.escape(value.strip()), + truncated_url=markupsafe.escape( + truncate_url(value.strip(), truncate_cells) + ), ) ) elif isinstance(display_value, bytes): diff --git a/datasette/views/table.py b/datasette/views/table.py index 49c30c9c..60c092f9 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -24,6 +24,7 @@ from datasette.utils import ( path_with_removed_args, path_with_replaced_args, to_css_class, + truncate_url, urlsafe_components, value_as_boolean, ) @@ -966,8 +967,11 @@ async def display_columns_and_rows( display_value = markupsafe.Markup(" ") elif is_url(str(value).strip()): display_value = markupsafe.Markup( - '{url}'.format( - url=markupsafe.escape(value.strip()) + '{truncated_url}'.format( + url=markupsafe.escape(value.strip()), + truncated_url=markupsafe.escape( + truncate_url(value.strip(), truncate_cells) + ), ) ) elif column in table_metadata.get("units", {}) and value != "": diff --git a/tests/fixtures.py b/tests/fixtures.py index c145ac78..82d8452e 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -598,23 +598,24 @@ CREATE TABLE roadside_attractions ( pk integer primary key, name text, address text, + url text, latitude real, longitude real ); INSERT INTO roadside_attractions VALUES ( - 1, "The Mystery Spot", "465 Mystery Spot Road, Santa Cruz, CA 95065", + 1, "The Mystery Spot", "465 Mystery Spot Road, Santa Cruz, CA 95065", "https://www.mysteryspot.com/", 37.0167, -122.0024 ); INSERT INTO roadside_attractions VALUES ( - 2, "Winchester Mystery House", "525 South Winchester Boulevard, San Jose, CA 95128", + 2, "Winchester Mystery House", "525 South Winchester Boulevard, San Jose, CA 95128", "https://winchestermysteryhouse.com/", 37.3184, -121.9511 ); INSERT INTO roadside_attractions VALUES ( - 3, "Burlingame Museum of PEZ Memorabilia", "214 California Drive, Burlingame, CA 94010", + 3, "Burlingame Museum of PEZ Memorabilia", "214 California Drive, Burlingame, CA 94010", null, 37.5793, -122.3442 ); INSERT INTO roadside_attractions VALUES ( - 4, "Bigfoot Discovery Museum", "5497 Highway 9, Felton, CA 95018", + 4, "Bigfoot Discovery Museum", "5497 Highway 9, Felton, CA 95018", "https://www.bigfootdiscoveryproject.com/", 37.0414, -122.0725 ); diff --git a/tests/test_api.py b/tests/test_api.py index f6db2f9d..7a2bf91f 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -339,7 +339,7 @@ def test_database_page(app_client): }, { "name": "roadside_attractions", - "columns": ["pk", "name", "address", "latitude", "longitude"], + "columns": ["pk", "name", "address", "url", "latitude", "longitude"], "primary_keys": ["pk"], "count": 4, "hidden": False, diff --git a/tests/test_table_api.py b/tests/test_table_api.py index e56a72b5..0db04434 100644 --- a/tests/test_table_api.py +++ b/tests/test_table_api.py @@ -615,11 +615,12 @@ def test_table_through(app_client): response = app_client.get( '/fixtures/roadside_attractions.json?_through={"table":"roadside_attraction_characteristics","column":"characteristic_id","value":"1"}' ) - assert [ + assert response.json["rows"] == [ [ 3, "Burlingame Museum of PEZ Memorabilia", "214 California Drive, Burlingame, CA 94010", + None, 37.5793, -122.3442, ], @@ -627,13 +628,15 @@ def test_table_through(app_client): 4, "Bigfoot Discovery Museum", "5497 Highway 9, Felton, CA 95018", + "https://www.bigfootdiscoveryproject.com/", 37.0414, -122.0725, ], - ] == response.json["rows"] + ] + assert ( - 'where roadside_attraction_characteristics.characteristic_id = "1"' - == response.json["human_description_en"] + response.json["human_description_en"] + == 'where roadside_attraction_characteristics.characteristic_id = "1"' ) diff --git a/tests/test_table_html.py b/tests/test_table_html.py index f3808ea3..8e37468f 100644 --- a/tests/test_table_html.py +++ b/tests/test_table_html.py @@ -69,6 +69,17 @@ def test_table_cell_truncation(): td.string for td in table.findAll("td", {"class": "col-neighborhood-b352a7"}) ] + # URLs should be truncated too + response2 = client.get("/fixtures/roadside_attractions") + assert response2.status == 200 + table = Soup(response2.body, "html.parser").find("table") + tds = table.findAll("td", {"class": "col-url"}) + assert [str(td) for td in tds] == [ + 'http…', + 'http…', + '\xa0', + 'http…', + ] def test_add_filter_redirects(app_client): diff --git a/tests/test_utils.py b/tests/test_utils.py index df788767..d71a612d 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -626,3 +626,23 @@ def test_tilde_encoding(original, expected): assert actual == expected # And test round-trip assert original == utils.tilde_decode(actual) + + +@pytest.mark.parametrize( + "url,length,expected", + ( + ("https://example.com/", 5, "http…"), + ("https://example.com/foo/bar", 15, "https://exampl…"), + ("https://example.com/foo/bar/baz.jpg", 30, "https://example.com/foo/ba….jpg"), + # Extensions longer than 4 characters are not treated specially: + ("https://example.com/foo/bar/baz.jpeg2", 30, "https://example.com/foo/bar/b…"), + ( + "https://example.com/foo/bar/baz.jpeg2", + None, + "https://example.com/foo/bar/baz.jpeg2", + ), + ), +) +def test_truncate_url(url, length, expected): + actual = utils.truncate_url(url, length) + assert actual == expected From 5aa359b86907d11b3ee601510775a85a90224da8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 6 Sep 2022 16:58:30 -0700 Subject: [PATCH 0195/1003] Apply cell truncation on query page too, refs #1805 --- datasette/views/database.py | 7 ++++++- tests/test_html.py | 19 +++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index fc344245..affbc540 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -428,7 +428,12 @@ class QueryView(DataView): "" if len(value) == 1 else "s", ) ) - + else: + display_value = str(value) + if truncate_cells and len(display_value) > truncate_cells: + display_value = ( + display_value[:truncate_cells] + "\u2026" + ) display_row.append(display_value) display_rows.append(display_row) diff --git a/tests/test_html.py b/tests/test_html.py index d6e969ad..bf915247 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -186,6 +186,25 @@ def test_row_page_does_not_truncate(): ] +def test_query_page_truncates(): + with make_app_client(settings={"truncate_cells_html": 5}) as client: + response = client.get( + "/fixtures?" + + urllib.parse.urlencode( + { + "sql": "select 'this is longer than 5' as a, 'https://example.com/' as b" + } + ) + ) + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + tds = table.findAll("td") + assert [str(td) for td in tds] == [ + 'this …', + 'http…', + ] + + @pytest.mark.parametrize( "path,expected_classes", [ From bf8d84af5422606597be893cedd375020cb2b369 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 6 Sep 2022 20:34:59 -0700 Subject: [PATCH 0196/1003] word-wrap: anywhere on links in cells, refs #1805 --- datasette/static/app.css | 1 + 1 file changed, 1 insertion(+) diff --git a/datasette/static/app.css b/datasette/static/app.css index 712b9925..08b724f6 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -446,6 +446,7 @@ th { } table a:link { text-decoration: none; + word-wrap: anywhere; } .rows-and-columns td:before { display: block; From fb7e70d5e72a951efe4b29ad999d8915c032d021 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Sep 2022 09:19:20 -0700 Subject: [PATCH 0197/1003] Database(is_mutable=) now defaults to True, closes #1808 Refs https://github.com/simonw/datasette-upload-dbs/issues/6 --- datasette/database.py | 3 +-- docs/internals.rst | 9 +++++---- tests/test_internals_database.py | 1 + tests/test_internals_datasette.py | 2 +- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index fa558045..44467370 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -28,7 +28,7 @@ AttachedDatabase = namedtuple("AttachedDatabase", ("seq", "name", "file")) class Database: def __init__( - self, ds, path=None, is_mutable=False, is_memory=False, memory_name=None + self, ds, path=None, is_mutable=True, is_memory=False, memory_name=None ): self.name = None self.route = None @@ -39,7 +39,6 @@ class Database: self.memory_name = memory_name if memory_name is not None: self.is_memory = True - self.is_mutable = True self.hash = None self.cached_size = None self._cached_table_counts = None diff --git a/docs/internals.rst b/docs/internals.rst index 20797e98..adeec1d8 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -426,12 +426,13 @@ The ``db`` parameter should be an instance of the ``datasette.database.Database` Database( datasette, path="path/to/my-new-database.db", - is_mutable=True, ) ) This will add a mutable database and serve it at ``/my-new-database``. +Use ``is_mutable=False`` to add an immutable database. + ``.add_database()`` returns the Database instance, with its name set as the ``database.name`` attribute. Any time you are working with a newly added database you should use the return value of ``.add_database()``, for example: .. code-block:: python @@ -671,8 +672,8 @@ Instances of the ``Database`` class can be used to execute queries against attac .. _database_constructor: -Database(ds, path=None, is_mutable=False, is_memory=False, memory_name=None) ----------------------------------------------------------------------------- +Database(ds, path=None, is_mutable=True, is_memory=False, memory_name=None) +--------------------------------------------------------------------------- The ``Database()`` constructor can be used by plugins, in conjunction with :ref:`datasette_add_database`, to create and register new databases. @@ -685,7 +686,7 @@ The arguments are as follows: Path to a SQLite database file on disk. ``is_mutable`` - boolean - Set this to ``True`` if it is possible that updates will be made to that database - otherwise Datasette will open it in immutable mode and any changes could cause undesired behavior. + Set this to ``False`` to cause Datasette to open the file in immutable mode. ``is_memory`` - boolean Use this to create non-shared memory connections. diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 551f67e1..9e81c1d6 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -499,6 +499,7 @@ def test_mtime_ns_is_none_for_memory(app_client): def test_is_mutable(app_client): + assert Database(app_client.ds, is_memory=True).is_mutable is True assert Database(app_client.ds, is_memory=True, is_mutable=True).is_mutable is True assert Database(app_client.ds, is_memory=True, is_mutable=False).is_mutable is False diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index 1dc14cab..249920fe 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -58,7 +58,7 @@ async def test_datasette_constructor(): "route": "_memory", "path": None, "size": 0, - "is_mutable": False, + "is_mutable": True, "is_memory": True, "hash": None, } From 610425460b519e9c16d386cb81aa081c9d730ef0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 10 Sep 2022 14:24:26 -0700 Subject: [PATCH 0198/1003] Add --nolock to the README Chrome demo Refs #1744 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1af20129..af95b85e 100644 --- a/README.md +++ b/README.md @@ -48,7 +48,7 @@ This will start a web server on port 8001 - visit http://localhost:8001/ to acce Use Chrome on OS X? You can run datasette against your browser history like so: - datasette ~/Library/Application\ Support/Google/Chrome/Default/History + datasette ~/Library/Application\ Support/Google/Chrome/Default/History --nolock Now visiting http://localhost:8001/History/downloads will show you a web interface to browse your downloads data: From b40872f5e5ae5dad331c58f75451e2d206565196 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 14 Sep 2022 14:31:54 -0700 Subject: [PATCH 0199/1003] prepare_jinja2_environment(datasette) argument, refs #1809 --- datasette/app.py | 2 +- datasette/hookspecs.py | 2 +- docs/plugin_hooks.rst | 9 +++++++-- tests/plugins/my_plugin.py | 3 ++- tests/test_plugins.py | 5 +++-- 5 files changed, 14 insertions(+), 7 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index aeb81687..db686670 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -345,7 +345,7 @@ class Datasette: self.jinja_env.filters["escape_sqlite"] = escape_sqlite self.jinja_env.filters["to_css_class"] = to_css_class # pylint: disable=no-member - pm.hook.prepare_jinja2_environment(env=self.jinja_env) + pm.hook.prepare_jinja2_environment(env=self.jinja_env, datasette=self) self._register_renderers() self._permission_checks = collections.deque(maxlen=200) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index a5fb536f..34e19664 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -26,7 +26,7 @@ def prepare_connection(conn, database, datasette): @hookspec -def prepare_jinja2_environment(env): +def prepare_jinja2_environment(env, datasette): """Modify Jinja2 template environment e.g. register custom template tags""" diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 30bd75b7..62ec5c90 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -61,12 +61,15 @@ Examples: `datasette-jellyfish `_, for @@ -85,6 +88,8 @@ You can now use this filter in your custom templates like so:: Table name: {{ table|uppercase }} +Examples: `datasette-edit-templates `_ + .. _plugin_hook_extra_template_vars: extra_template_vars(template, database, table, columns, view_name, request, datasette) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 53613b7d..d49a7a34 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -142,8 +142,9 @@ def extra_template_vars( @hookimpl -def prepare_jinja2_environment(env): +def prepare_jinja2_environment(env, datasette): env.filters["format_numeric"] = lambda s: f"{float(s):,.0f}" + env.filters["to_hello"] = lambda s: datasette._HELLO @hookimpl diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 948a40b8..590d88f6 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -545,11 +545,12 @@ def test_hook_register_output_renderer_can_render(app_client): @pytest.mark.asyncio async def test_hook_prepare_jinja2_environment(app_client): + app_client.ds._HELLO = "HI" template = app_client.ds.jinja_env.from_string( - "Hello there, {{ a|format_numeric }}", {"a": 3412341} + "Hello there, {{ a|format_numeric }}, {{ a|to_hello }}", {"a": 3412341} ) rendered = await app_client.ds.render_template(template) - assert "Hello there, 3,412,341" == rendered + assert "Hello there, 3,412,341, HI" == rendered def test_hook_publish_subcommand(): From 2ebcffe2226ece2a5a86722790d486a480338632 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Sep 2022 12:50:52 -0700 Subject: [PATCH 0200/1003] Bump furo from 2022.6.21 to 2022.9.15 (#1812) Bumps [furo](https://github.com/pradyunsg/furo) from 2022.6.21 to 2022.9.15. - [Release notes](https://github.com/pradyunsg/furo/releases) - [Changelog](https://github.com/pradyunsg/furo/blob/main/docs/changelog.md) - [Commits](https://github.com/pradyunsg/furo/compare/2022.06.21...2022.09.15) --- updated-dependencies: - dependency-name: furo dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 92fa60d0..afcba1f0 100644 --- a/setup.py +++ b/setup.py @@ -65,7 +65,7 @@ setup( setup_requires=["pytest-runner"], extras_require={ "docs": [ - "furo==2022.6.21", + "furo==2022.9.15", "sphinx-autobuild", "codespell", "blacken-docs", From ddc999ad1296e8c69cffede3e367dda059b8adad Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 16 Sep 2022 20:38:15 -0700 Subject: [PATCH 0201/1003] Async support for prepare_jinja2_environment, closes #1809 --- datasette/app.py | 22 ++++++++++++++--- datasette/utils/testing.py | 1 + docs/plugin_hooks.rst | 2 ++ docs/testing_plugins.rst | 30 ++++++++++++++++++++++++ tests/fixtures.py | 1 + tests/plugins/my_plugin.py | 10 ++++++-- tests/plugins/my_plugin_2.py | 6 +++++ tests/test_internals_datasette_client.py | 6 +++-- tests/test_plugins.py | 6 +++-- tests/test_routes.py | 1 + 10 files changed, 76 insertions(+), 9 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index db686670..ea3e7b43 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -208,6 +208,7 @@ class Datasette: crossdb=False, nolock=False, ): + self._startup_invoked = False assert config_dir is None or isinstance( config_dir, Path ), "config_dir= should be a pathlib.Path" @@ -344,9 +345,6 @@ class Datasette: self.jinja_env.filters["quote_plus"] = urllib.parse.quote_plus self.jinja_env.filters["escape_sqlite"] = escape_sqlite self.jinja_env.filters["to_css_class"] = to_css_class - # pylint: disable=no-member - pm.hook.prepare_jinja2_environment(env=self.jinja_env, datasette=self) - self._register_renderers() self._permission_checks = collections.deque(maxlen=200) self._root_token = secrets.token_hex(32) @@ -389,8 +387,16 @@ class Datasette: return Urls(self) async def invoke_startup(self): + # This must be called for Datasette to be in a usable state + if self._startup_invoked: + return + for hook in pm.hook.prepare_jinja2_environment( + env=self.jinja_env, datasette=self + ): + await await_me_maybe(hook) for hook in pm.hook.startup(datasette=self): await await_me_maybe(hook) + self._startup_invoked = True def sign(self, value, namespace="default"): return URLSafeSerializer(self._secret, namespace).dumps(value) @@ -933,6 +939,8 @@ class Datasette: async def render_template( self, templates, context=None, request=None, view_name=None ): + if not self._startup_invoked: + raise Exception("render_template() called before await ds.invoke_startup()") context = context or {} if isinstance(templates, Template): template = templates @@ -1495,34 +1503,42 @@ class DatasetteClient: return path async def get(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.get(self._fix(path), **kwargs) async def options(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.options(self._fix(path), **kwargs) async def head(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.head(self._fix(path), **kwargs) async def post(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.post(self._fix(path), **kwargs) async def put(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.put(self._fix(path), **kwargs) async def patch(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.patch(self._fix(path), **kwargs) async def delete(self, path, **kwargs): + await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.delete(self._fix(path), **kwargs) async def request(self, method, path, **kwargs): + await self.ds.invoke_startup() avoid_path_rewrites = kwargs.pop("avoid_path_rewrites", None) async with httpx.AsyncClient(app=self.app) as client: return await client.request( diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index 640c94e6..b28fc575 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -147,6 +147,7 @@ class TestClient: content_type=None, if_none_match=None, ): + await self.ds.invoke_startup() headers = headers or {} if content_type: headers["content-type"] = content_type diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 62ec5c90..f208e727 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -88,6 +88,8 @@ You can now use this filter in your custom templates like so:: Table name: {{ table|uppercase }} +This function can return an awaitable function if it needs to run any async code. + Examples: `datasette-edit-templates `_ .. _plugin_hook_extra_template_vars: diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 992b4b0e..41f50e56 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -52,6 +52,36 @@ Then run the tests using pytest like so:: pytest +.. _testing_plugins_datasette_test_instance: + +Setting up a Datasette test instance +------------------------------------ + +The above example shows the easiest way to start writing tests against a Datasette instance: + +.. code-block:: python + + from datasette.app import Datasette + import pytest + + + @pytest.mark.asyncio + async def test_plugin_is_installed(): + datasette = Datasette(memory=True) + response = await datasette.client.get("/-/plugins.json") + assert response.status_code == 200 + +Creating a ``Datasette()`` instance like this as useful shortcut in tests, but there is one detail you need to be aware of. It's important to ensure that the async method ``.invoke_startup()`` is called on that instance. You can do that like this: + +.. code-block:: python + + datasette = Datasette(memory=True) + await datasette.invoke_startup() + +This method registers any :ref:`plugin_hook_startup` or :ref:`plugin_hook_prepare_jinja2_environment` plugins that might themselves need to make async calls. + +If you are using ``await datasette.client.get()`` and similar methods then you don't need to worry about this - those method calls ensure that ``.invoke_startup()`` has been called for you. + .. _testing_plugins_pdb: Using pdb for errors thrown inside Datasette diff --git a/tests/fixtures.py b/tests/fixtures.py index 82d8452e..5a875cd2 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -71,6 +71,7 @@ EXPECTED_PLUGINS = [ "handle_exception", "menu_links", "permission_allowed", + "prepare_jinja2_environment", "register_routes", "render_cell", "startup", diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index d49a7a34..1a41de38 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -143,8 +143,14 @@ def extra_template_vars( @hookimpl def prepare_jinja2_environment(env, datasette): - env.filters["format_numeric"] = lambda s: f"{float(s):,.0f}" - env.filters["to_hello"] = lambda s: datasette._HELLO + async def select_times_three(s): + db = datasette.get_database() + return (await db.execute("select 3 * ?", [int(s)])).first()[0] + + async def inner(): + env.filters["select_times_three"] = select_times_three + + return inner @hookimpl diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index 4df02343..cee80703 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -126,6 +126,12 @@ def permission_allowed(datasette, actor, action): return inner +@hookimpl +def prepare_jinja2_environment(env, datasette): + env.filters["format_numeric"] = lambda s: f"{float(s):,.0f}" + env.filters["to_hello"] = lambda s: datasette._HELLO + + @hookimpl def startup(datasette): async def inner(): diff --git a/tests/test_internals_datasette_client.py b/tests/test_internals_datasette_client.py index 8c5b5bd3..497bf475 100644 --- a/tests/test_internals_datasette_client.py +++ b/tests/test_internals_datasette_client.py @@ -1,10 +1,12 @@ from .fixtures import app_client import httpx import pytest +import pytest_asyncio -@pytest.fixture -def datasette(app_client): +@pytest_asyncio.fixture +async def datasette(app_client): + await app_client.ds.invoke_startup() return app_client.ds diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 590d88f6..0ae3abf3 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -546,11 +546,13 @@ def test_hook_register_output_renderer_can_render(app_client): @pytest.mark.asyncio async def test_hook_prepare_jinja2_environment(app_client): app_client.ds._HELLO = "HI" + await app_client.ds.invoke_startup() template = app_client.ds.jinja_env.from_string( - "Hello there, {{ a|format_numeric }}, {{ a|to_hello }}", {"a": 3412341} + "Hello there, {{ a|format_numeric }}, {{ a|to_hello }}, {{ b|select_times_three }}", + {"a": 3412341, "b": 5}, ) rendered = await app_client.ds.render_template(template) - assert "Hello there, 3,412,341, HI" == rendered + assert "Hello there, 3,412,341, HI, 15" == rendered def test_hook_publish_subcommand(): diff --git a/tests/test_routes.py b/tests/test_routes.py index 5ae55d21..d467abe1 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -59,6 +59,7 @@ def test_routes(routes, path, expected_class, expected_matches): @pytest_asyncio.fixture async def ds_with_route(): ds = Datasette() + await ds.invoke_startup() ds.remove_database("_memory") db = Database(ds, is_memory=True, memory_name="route-name-db") ds.add_database(db, name="original-name", route="custom-route-name") From df851c117db031dec50dd4ef1ca34745920ac77a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 19 Sep 2022 16:46:39 -0700 Subject: [PATCH 0202/1003] Validate settings.json keys on startup, closes #1816 Refs #1814 --- datasette/app.py | 4 ++++ tests/test_config_dir.py | 20 ++++++++++++++++++-- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index ea3e7b43..8873ce28 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -292,6 +292,10 @@ class Datasette: raise StartupError("config.json should be renamed to settings.json") if config_dir and (config_dir / "settings.json").exists() and not settings: settings = json.loads((config_dir / "settings.json").read_text()) + # Validate those settings + for key in settings: + if key not in DEFAULT_SETTINGS: + raise StartupError("Invalid setting '{key}' in settings.json") self._settings = dict(DEFAULT_SETTINGS, **(settings or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index fe927c42..e365515b 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -5,6 +5,7 @@ import pytest from datasette.app import Datasette from datasette.cli import cli from datasette.utils.sqlite import sqlite3 +from datasette.utils import StartupError from .fixtures import TestClient as _TestClient from click.testing import CliRunner @@ -27,9 +28,8 @@ body { margin-top: 3em} @pytest.fixture(scope="session") -def config_dir_client(tmp_path_factory): +def config_dir(tmp_path_factory): config_dir = tmp_path_factory.mktemp("config-dir") - plugins_dir = config_dir / "plugins" plugins_dir.mkdir() (plugins_dir / "hooray.py").write_text(PLUGIN, "utf-8") @@ -77,7 +77,23 @@ def config_dir_client(tmp_path_factory): ), "utf-8", ) + return config_dir + +def test_invalid_settings(config_dir): + previous = (config_dir / "settings.json").read_text("utf-8") + (config_dir / "settings.json").write_text( + json.dumps({"invalid": "invalid-setting"}), "utf-8" + ) + try: + with pytest.raises(StartupError): + ds = Datasette([], config_dir=config_dir) + finally: + (config_dir / "settings.json").write_text(previous, "utf-8") + + +@pytest.fixture(scope="session") +def config_dir_client(config_dir): ds = Datasette([], config_dir=config_dir) yield _TestClient(ds) From cb1e093fd361b758120aefc1a444df02462389a3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 19 Sep 2022 18:15:40 -0700 Subject: [PATCH 0203/1003] Fixed error message, closes #1816 --- datasette/app.py | 4 +++- tests/test_config_dir.py | 3 ++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 8873ce28..03d1dacc 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -295,7 +295,9 @@ class Datasette: # Validate those settings for key in settings: if key not in DEFAULT_SETTINGS: - raise StartupError("Invalid setting '{key}' in settings.json") + raise StartupError( + "Invalid setting '{}' in settings.json".format(key) + ) self._settings = dict(DEFAULT_SETTINGS, **(settings or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index e365515b..f5ecf0d6 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -86,8 +86,9 @@ def test_invalid_settings(config_dir): json.dumps({"invalid": "invalid-setting"}), "utf-8" ) try: - with pytest.raises(StartupError): + with pytest.raises(StartupError) as ex: ds = Datasette([], config_dir=config_dir) + assert ex.value.args[0] == "Invalid setting 'invalid' in settings.json" finally: (config_dir / "settings.json").write_text(previous, "utf-8") From 212137a90b4291db9605e039f198564dae59c5d0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 26 Sep 2022 14:14:25 -0700 Subject: [PATCH 0204/1003] Release 0.63a0 Refs #1786, #1787, #1789, #1794, #1800, #1804, #1805, #1808, #1809, #1816 --- datasette/version.py | 2 +- docs/changelog.rst | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 0453346c..e5ad585f 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.62" +__version__ = "0.63a0" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index f9dcc980..bd93f4cb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,23 @@ Changelog ========= +.. _v0_63a0: + +0.63a0 (2022-09-26) +------------------- + +- The :ref:`plugin_hook_prepare_jinja2_environment` plugin hook now accepts an optional ``datasette`` argument. Hook implementations can also now return an ``async`` function which will be awaited automatically. (:issue:`1809`) +- ``--load-extension`` option now supports entrypoints. Thanks, Alex Garcia. (`#1789 `__) +- New tutorial: `Cleaning data with sqlite-utils and Datasette `__. +- Facet size can now be set per-table with the new ``facet_size`` table metadata option. (:issue:`1804`) +- ``truncate_cells_html`` setting now also affects long URLs in columns. (:issue:`1805`) +- ``Database(is_mutable=)`` now defaults to ``True``. (:issue:`1808`) +- Non-JavaScript textarea now increases height to fit the SQL query. (:issue:`1786`) +- More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) +- Datasette no longer enforces upper bounds on its depenedencies. (:issue:`1800`) +- Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) +- The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) + .. _v0_62: 0.62 (2022-08-14) From 5f9f567acbc58c9fcd88af440e68034510fb5d2b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 26 Sep 2022 16:06:01 -0700 Subject: [PATCH 0205/1003] Show SQL query when reporting time limit error, closes #1819 --- datasette/database.py | 5 ++++- datasette/views/base.py | 21 +++++++++++++-------- tests/test_api.py | 12 +++++++++++- tests/test_html.py | 10 +++++++--- 4 files changed, 35 insertions(+), 13 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 44467370..46094bd7 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -476,7 +476,10 @@ class WriteTask: class QueryInterrupted(Exception): - pass + def __init__(self, e, sql, params): + self.e = e + self.sql = sql + self.params = params class MultipleValues(Exception): diff --git a/datasette/views/base.py b/datasette/views/base.py index 221e1882..67aa3a42 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -1,10 +1,12 @@ import asyncio import csv import hashlib -import re import sys +import textwrap import time import urllib +from markupsafe import escape + import pint @@ -24,11 +26,9 @@ from datasette.utils import ( path_with_removed_args, path_with_format, sqlite3, - HASH_LENGTH, ) from datasette.utils.asgi import ( AsgiStream, - Forbidden, NotFound, Response, BadRequest, @@ -371,13 +371,18 @@ class DataView(BaseView): ) = response_or_template_contexts else: data, extra_template_data, templates = response_or_template_contexts - except QueryInterrupted: + except QueryInterrupted as ex: raise DatasetteError( - """ - SQL query took too long. The time limit is controlled by the + textwrap.dedent( + """ +

SQL query took too long. The time limit is controlled by the sql_time_limit_ms - configuration option. - """, + configuration option.

+
{}
+ """.format( + escape(ex.sql) + ) + ).strip(), title="SQL Interrupted", status=400, message_is_html=True, diff --git a/tests/test_api.py b/tests/test_api.py index 7a2bf91f..ad74d16e 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -656,7 +656,17 @@ def test_custom_sql(app_client): def test_sql_time_limit(app_client_shorter_time_limit): response = app_client_shorter_time_limit.get("/fixtures.json?sql=select+sleep(0.5)") assert 400 == response.status - assert "SQL Interrupted" == response.json["title"] + assert response.json == { + "ok": False, + "error": ( + "

SQL query took too long. The time limit is controlled by the\n" + 'sql_time_limit_ms\n' + "configuration option.

\n" + "
select sleep(0.5)
" + ), + "status": 400, + "title": "SQL Interrupted", + } def test_custom_sql_time_limit(app_client): diff --git a/tests/test_html.py b/tests/test_html.py index bf915247..a99b0b6c 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -168,10 +168,14 @@ def test_disallowed_custom_sql_pragma(app_client): def test_sql_time_limit(app_client_shorter_time_limit): response = app_client_shorter_time_limit.get("/fixtures?sql=select+sleep(0.5)") assert 400 == response.status - expected_html_fragment = """ + expected_html_fragments = [ + """ sql_time_limit_ms - """.strip() - assert expected_html_fragment in response.text + """.strip(), + "
select sleep(0.5)
", + ] + for expected_html_fragment in expected_html_fragments: + assert expected_html_fragment in response.text def test_row_page_does_not_truncate(): From 7fb4ea4e39a15e1f7d3202949794d98af1cfa272 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 27 Sep 2022 21:06:40 -0700 Subject: [PATCH 0206/1003] Update note about render_cell signature, refs #1826 --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index f208e727..c9cab8ab 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -9,7 +9,7 @@ Each plugin can implement one or more hooks using the ``@hookimpl`` decorator ag When you implement a plugin hook you can accept any or all of the parameters that are documented as being passed to that hook. -For example, you can implement the ``render_cell`` plugin hook like this even though the full documented hook signature is ``render_cell(value, column, table, database, datasette)``: +For example, you can implement the ``render_cell`` plugin hook like this even though the full documented hook signature is ``render_cell(row, value, column, table, database, datasette)``: .. code-block:: python From 984b1df12cf19a6731889fc0665bb5f622e07b7c Mon Sep 17 00:00:00 2001 From: Adam Simpson Date: Wed, 28 Sep 2022 00:21:36 -0400 Subject: [PATCH 0207/1003] Add documentation for serving via OpenRC (#1825) * Add documentation for serving via OpenRC --- docs/deploying.rst | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index d4ad8836..c8552758 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -74,18 +74,30 @@ Once the service has started you can confirm that Datasette is running on port 8 curl 127.0.0.1:8000/-/versions.json # Should output JSON showing the installed version -Datasette will not be accessible from outside the server because it is listening on ``127.0.0.1``. You can expose it by instead listening on ``0.0.0.0``, but a better way is to set up a proxy such as ``nginx``. +Datasette will not be accessible from outside the server because it is listening on ``127.0.0.1``. You can expose it by instead listening on ``0.0.0.0``, but a better way is to set up a proxy such as ``nginx`` - see :ref:`deploying_proxy`. -Ubuntu offer `a tutorial on installing nginx `__. Once it is installed you can add configuration to proxy traffic through to Datasette that looks like this:: +.. _deploying_openrc: - server { - server_name mysubdomain.myhost.net; +Running Datasette using OpenRC +=============================== +OpenRC is the service manager on non-systemd Linux distributions like `Alpine Linux `__ and `Gentoo `__. - location / { - proxy_pass http://127.0.0.1:8000/; - proxy_set_header Host $host; - } - } +Create an init script at ``/etc/init.d/datasette`` with the following contents: + +.. code-block:: sh + + #!/sbin/openrc-run + + name="datasette" + command="datasette" + command_args="serve -h 0.0.0.0 /path/to/db.db" + command_background=true + pidfile="/run/${RC_SVCNAME}.pid" + +You then need to configure the service to run at boot and start it:: + + rc-update add datasette + rc-service datasette start .. _deploying_buildpacks: From 34defdc10aa293294ca01cfab70780755447e1d7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 28 Sep 2022 17:39:36 -0700 Subject: [PATCH 0208/1003] Browse the plugins directory --- docs/writing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index 01ee8c90..a3fc88ec 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -234,7 +234,7 @@ To avoid accidentally conflicting with a database file that may be loaded into D - ``/-/upload-excel`` -Try to avoid registering URLs that clash with other plugins that your users might have installed. There is no central repository of reserved URL paths (yet) but you can review existing plugins by browsing the `datasette-plugin topic `__ on GitHub. +Try to avoid registering URLs that clash with other plugins that your users might have installed. There is no central repository of reserved URL paths (yet) but you can review existing plugins by browsing the `plugins directory `. If your plugin includes functionality that relates to a specific database you could also register a URL route like this: From c92c4318e9892101f75fa158410c0a12c1d80b6e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 30 Sep 2022 10:55:40 -0700 Subject: [PATCH 0209/1003] Bump furo from 2022.9.15 to 2022.9.29 (#1827) Bumps [furo](https://github.com/pradyunsg/furo) from 2022.9.15 to 2022.9.29. - [Release notes](https://github.com/pradyunsg/furo/releases) - [Changelog](https://github.com/pradyunsg/furo/blob/main/docs/changelog.md) - [Commits](https://github.com/pradyunsg/furo/compare/2022.09.15...2022.09.29) --- updated-dependencies: - dependency-name: furo dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index afcba1f0..fe258adb 100644 --- a/setup.py +++ b/setup.py @@ -65,7 +65,7 @@ setup( setup_requires=["pytest-runner"], extras_require={ "docs": [ - "furo==2022.9.15", + "furo==2022.9.29", "sphinx-autobuild", "codespell", "blacken-docs", From 883e326dd6ef95f854f7750ef2d4b0e17082fa96 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 2 Oct 2022 14:26:16 -0700 Subject: [PATCH 0210/1003] Drop word-wrap: anywhere, refs #1828, #1805 --- datasette/static/app.css | 1 - 1 file changed, 1 deletion(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index 08b724f6..712b9925 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -446,7 +446,6 @@ th { } table a:link { text-decoration: none; - word-wrap: anywhere; } .rows-and-columns td:before { display: block; From 4218c9cd742b79b1e3cb80878e42b7e39d16ded2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 4 Oct 2022 11:45:36 -0700 Subject: [PATCH 0211/1003] reST markup fix --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index c9cab8ab..832a76b0 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -268,7 +268,7 @@ you have one: def extra_js_urls(): return ["/-/static-plugins/your-plugin/app.js"] -Note that `your-plugin` here should be the hyphenated plugin name - the name that is displayed in the list on the `/-/plugins` debug page. +Note that ``your-plugin`` here should be the hyphenated plugin name - the name that is displayed in the list on the ``/-/plugins`` debug page. If your code uses `JavaScript modules `__ you should include the ``"module": True`` key. See :ref:`customization_css_and_javascript` for more details. From b6ba117b7978b58b40e3c3c2b723b92c3010ed53 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 4 Oct 2022 18:25:52 -0700 Subject: [PATCH 0212/1003] Clarify request or None for two hooks --- docs/plugin_hooks.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 832a76b0..b61f953a 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1281,7 +1281,7 @@ menu_links(datasette, actor, request) ``actor`` - dictionary or None The currently authenticated :ref:`actor `. -``request`` - :ref:`internals_request` +``request`` - :ref:`internals_request` or None The current HTTP request. This can be ``None`` if the request object is not available. This hook allows additional items to be included in the menu displayed by Datasette's top right menu icon. @@ -1330,7 +1330,7 @@ table_actions(datasette, actor, database, table, request) ``table`` - string The name of the table. -``request`` - :ref:`internals_request` +``request`` - :ref:`internals_request` or None The current HTTP request. This can be ``None`` if the request object is not available. This hook allows table actions to be displayed in a menu accessed via an action icon at the top of the table page. It should return a list of ``{"href": "...", "label": "..."}`` menu items. From bbf33a763537a1d913180b22bd3b5fe4a5e5b252 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 4 Oct 2022 21:32:11 -0700 Subject: [PATCH 0213/1003] Test for bool(results), closes #1832 --- tests/test_internals_database.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 9e81c1d6..4e33beed 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -30,6 +30,14 @@ async def test_results_first(db): assert isinstance(row, sqlite3.Row) +@pytest.mark.asyncio +@pytest.mark.parametrize("expected", (True, False)) +async def test_results_bool(db, expected): + where = "" if expected else "where pk = 0" + results = await db.execute("select * from facetable {}".format(where)) + assert bool(results) is expected + + @pytest.mark.parametrize( "query,expected", [ From eff112498ecc499323c26612d707908831446d25 Mon Sep 17 00:00:00 2001 From: Forest Gregg Date: Thu, 6 Oct 2022 16:06:06 -0400 Subject: [PATCH 0214/1003] Useuse inspect data for hash and file size on startup Thanks, @fgregg Closes #1834 --- datasette/database.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 46094bd7..d75bd70c 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -48,9 +48,13 @@ class Database: self._read_connection = None self._write_connection = None if not self.is_mutable and not self.is_memory: - p = Path(path) - self.hash = inspect_hash(p) - self.cached_size = p.stat().st_size + if self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self.hash = self.ds.inspect_data[self.name]["hash"] + self.cached_size = self.ds.inspect_data[self.name]["size"] + else: + p = Path(path) + self.hash = inspect_hash(p) + self.cached_size = p.stat().st_size @property def cached_table_counts(self): From b7fec7f9020b79c1fe60cc5a2def86b50eeb5af9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 7 Oct 2022 16:03:09 -0700 Subject: [PATCH 0215/1003] .sqlite/.sqlite3 extensions for config directory mode Closes #1646 --- datasette/app.py | 5 ++++- docs/settings.rst | 2 +- tests/test_config_dir.py | 11 +++++------ 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 03d1dacc..32a911c2 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -217,7 +217,10 @@ class Datasette: self._secret = secret or secrets.token_hex(32) self.files = tuple(files or []) + tuple(immutables or []) if config_dir: - self.files += tuple([str(p) for p in config_dir.glob("*.db")]) + db_files = [] + for ext in ("db", "sqlite", "sqlite3"): + db_files.extend(config_dir.glob("*.{}".format(ext))) + self.files += tuple(str(f) for f in db_files) if ( config_dir and (config_dir / "inspect-data.json").exists() diff --git a/docs/settings.rst b/docs/settings.rst index 8437fb04..a6d50543 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -46,7 +46,7 @@ Datasette will detect the files in that directory and automatically configure it The files that can be included in this directory are as follows. All are optional. -* ``*.db`` - SQLite database files that will be served by Datasette +* ``*.db`` (or ``*.sqlite3`` or ``*.sqlite``) - SQLite database files that will be served by Datasette * ``metadata.json`` - :ref:`metadata` for those databases - ``metadata.yaml`` or ``metadata.yml`` can be used as well * ``inspect-data.json`` - the result of running ``datasette inspect *.db --inspect-file=inspect-data.json`` from the configuration directory - any database files listed here will be treated as immutable, so they should not be changed while Datasette is running * ``settings.json`` - settings that would normally be passed using ``--setting`` - here they should be stored as a JSON object of key/value pairs diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index f5ecf0d6..c2af3836 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -49,7 +49,7 @@ def config_dir(tmp_path_factory): (config_dir / "metadata.json").write_text(json.dumps(METADATA), "utf-8") (config_dir / "settings.json").write_text(json.dumps(SETTINGS), "utf-8") - for dbname in ("demo.db", "immutable.db"): + for dbname in ("demo.db", "immutable.db", "j.sqlite3", "k.sqlite"): db = sqlite3.connect(str(config_dir / dbname)) db.executescript( """ @@ -151,12 +151,11 @@ def test_databases(config_dir_client): response = config_dir_client.get("/-/databases.json") assert 200 == response.status databases = response.json - assert 2 == len(databases) + assert 4 == len(databases) databases.sort(key=lambda d: d["name"]) - assert "demo" == databases[0]["name"] - assert databases[0]["is_mutable"] - assert "immutable" == databases[1]["name"] - assert not databases[1]["is_mutable"] + for db, expected_name in zip(databases, ("demo", "immutable", "j", "k")): + assert expected_name == db["name"] + assert db["is_mutable"] == (expected_name != "immutable") @pytest.mark.parametrize("filename", ("metadata.yml", "metadata.yaml")) From 1a5e5f2aa951e5bd731067a49819efba68fbe8ef Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 13 Oct 2022 14:42:52 -0700 Subject: [PATCH 0216/1003] Refactor breadcrumbs to respect permissions, refs #1831 --- datasette/app.py | 40 ++++++++++++++++++++++ datasette/templates/_crumbs.html | 15 ++++++++ datasette/templates/base.html | 4 +-- datasette/templates/database.html | 9 ----- datasette/templates/error.html | 7 ---- datasette/templates/logout.html | 7 ---- datasette/templates/permissions_debug.html | 7 ---- datasette/templates/query.html | 8 ++--- datasette/templates/row.html | 9 ++--- datasette/templates/show_json.html | 7 ---- datasette/templates/table.html | 8 ++--- tests/test_permissions.py | 1 + tests/test_plugins.py | 2 +- 13 files changed, 65 insertions(+), 59 deletions(-) create mode 100644 datasette/templates/_crumbs.html diff --git a/datasette/app.py b/datasette/app.py index 32a911c2..5fa4955c 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -631,6 +631,44 @@ class Datasette: else: return [] + async def _crumb_items(self, request, table=None, database=None): + crumbs = [] + # Top-level link + if await self.permission_allowed( + actor=request.actor, action="view-instance", default=True + ): + crumbs.append({"href": self.urls.instance(), "label": "home"}) + # Database link + if database: + if await self.permission_allowed( + actor=request.actor, + action="view-database", + resource=database, + default=True, + ): + crumbs.append( + { + "href": self.urls.database(database), + "label": database, + } + ) + # Table link + if table: + assert database, "table= requires database=" + if await self.permission_allowed( + actor=request.actor, + action="view-table", + resource=(database, table), + default=True, + ): + crumbs.append( + { + "href": self.urls.table(database, table), + "label": table, + } + ) + return crumbs + async def permission_allowed(self, actor, action, resource=None, default=False): """Check permissions using the permissions_allowed plugin hook""" result = None @@ -1009,6 +1047,8 @@ class Datasette: template_context = { **context, **{ + "request": request, + "crumb_items": self._crumb_items, "urls": self.urls, "actor": request.actor if request else None, "menu_links": menu_links, diff --git a/datasette/templates/_crumbs.html b/datasette/templates/_crumbs.html new file mode 100644 index 00000000..bd1ff0da --- /dev/null +++ b/datasette/templates/_crumbs.html @@ -0,0 +1,15 @@ +{% macro nav(request, database=None, table=None) -%} +{% if crumb_items is defined %} + {% set items=crumb_items(request=request, database=database, table=table) %} + {% if items %} +

+ {% for item in items %} + {{ item.label }} + {% if not loop.last %} + / + {% endif %} + {% endfor %} +

+ {% endif %} +{% endif %} +{%- endmacro %} diff --git a/datasette/templates/base.html b/datasette/templates/base.html index c3a71acb..87c939ac 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -1,4 +1,4 @@ - +{% import "_crumbs.html" as crumbs with context %} {% block title %}{% endblock %} @@ -17,7 +17,7 @@