From 1f9cca33b41cc93357f9eb347d70ff20b2a4d0d5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 19 Feb 2021 15:47:52 -0800 Subject: [PATCH 0001/1304] Resizable SQL editor using cm-resize, refs #1236 --- datasette/static/cm-resize-1.0.1.min.js | 8 ++++++++ datasette/templates/_codemirror.html | 8 +++++++- datasette/templates/_codemirror_foot.html | 1 + 3 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 datasette/static/cm-resize-1.0.1.min.js diff --git a/datasette/static/cm-resize-1.0.1.min.js b/datasette/static/cm-resize-1.0.1.min.js new file mode 100644 index 00000000..27c2dfe2 --- /dev/null +++ b/datasette/static/cm-resize-1.0.1.min.js @@ -0,0 +1,8 @@ +/*! + * cm-resize v1.0.1 + * https://github.com/Sphinxxxx/cm-resize + * + * Copyright 2017-2018 Andreas Borgen (https://github.com/Sphinxxxx) + * Released under the MIT license. + */ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):e.cmResize=t()}(this,function(){"use strict";return document.documentElement.firstElementChild.appendChild(document.createElement("style")).textContent=".cm-resize-handle{display:block;position:absolute;bottom:0;right:0;z-index:99;width:18px;height:18px;background:url(\"data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='30' height='30' viewBox='0,0 16,16'%3E%3Cpath stroke='gray' stroke-width='2' d='M-1,12 l18,-18 M-1,18 l18,-18 M-1,24 l18,-18 M-1,30 l18,-18'/%3E%3C/svg%3E\") center/cover;box-shadow:inset -1px -1px 0 0 silver;cursor:nwse-resize}",function(r,e){var t,c=(e=e||{}).minWidth||200,l=e.minHeight||100,s=!1!==e.resizableWidth,d=!1!==e.resizableHeight,n=e.cssClass||"cm-resize-handle",o=r.display.wrapper,i=e.handle||((t=o.appendChild(document.createElement("div"))).className=n,t),a=o.querySelector(".CodeMirror-vscrollbar"),u=o.querySelector(".CodeMirror-hscrollbar");function h(){e.handle||(a.style.bottom="18px",u.style.right="18px")}r.on("update",h),h();var f=void 0,m=void 0;return function(e){var t=Element.prototype;t.matches||(t.matches=t.msMatchesSelector||t.webkitMatchesSelector),t.closest||(t.closest=function(e){var t=this;do{if(t.matches(e))return t;t="svg"===t.tagName?t.parentNode:t.parentElement}while(t);return null});var l=(e=e||{}).container||document.documentElement,n=e.selector,o=e.callback||console.log,i=e.callbackDragStart,a=e.callbackDragEnd,r=e.callbackClick,c=e.propagateEvents,s=!1!==e.roundCoords,d=!1!==e.dragOutside,u=e.handleOffset||!1!==e.handleOffset,h=null;switch(u){case"center":h=!0;break;case"topleft":case"top-left":h=!1}var f=void 0,m=void 0,p=void 0;function v(e,t,n,o){var i=e.clientX,a=e.clientY;function r(e,t,n){return Math.max(t,Math.min(e,n))}if(t){var c=t.getBoundingClientRect();i-=c.left,a-=c.top,n&&(i-=n[0],a-=n[1]),o&&(i=r(i,0,c.width),a=r(a,0,c.height)),t!==l&&(null!==h?h:"circle"===t.nodeName||"ellipse"===t.nodeName)&&(i-=c.width/2,a-=c.height/2)}return s?[Math.round(i),Math.round(a)]:[i,a]}function g(e){e.preventDefault(),c||e.stopPropagation()}function w(e){(f=n?n instanceof Element?n.contains(e.target)?n:null:e.target.closest(n):{})&&(g(e),m=n&&u?v(e,f):[0,0],p=v(e,l,m),s&&(p=p.map(Math.round)),i&&i(f,p))}function b(e){if(f){g(e);var t=v(e,l,m,!d);o(f,t,p)}}function E(e){if(f){if(a||r){var t=v(e,l,m,!d);r&&p[0]===t[0]&&p[1]===t[1]&&r(f,p),a&&a(f,t,p)}f=null}}function x(e){E(C(e))}function M(e){return void 0!==e.buttons?1===e.buttons:1===e.which}function k(e,t){1===e.touches.length?t(C(e)):E(e)}function C(e){var t=e.targetTouches[0];return t||(t=e.changedTouches[0]),t.preventDefault=e.preventDefault.bind(e),t.stopPropagation=e.stopPropagation.bind(e),t}l.addEventListener("mousedown",function(e){M(e)&&w(e)}),l.addEventListener("touchstart",function(e){k(e,w)}),window.addEventListener("mousemove",function(e){f&&(M(e)?b(e):E(e))}),window.addEventListener("touchmove",function(e){k(e,b)}),window.addEventListener("mouseup",function(e){f&&!M(e)&&E(e)}),l.addEventListener("touchend",x),l.addEventListener("touchcancel",x)}({container:i.offsetParent,selector:i,callbackDragStart:function(e,t){f=t,m=[o.clientWidth,o.clientHeight]},callback:function(e,t){var n=t[0]-f[0],o=t[1]-f[1],i=s?Math.max(c,m[0]+n):null,a=d?Math.max(l,m[1]+o):null;r.setSize(i,a)}}),i}}); \ No newline at end of file diff --git a/datasette/templates/_codemirror.html b/datasette/templates/_codemirror.html index b31235d2..a17eaf9b 100644 --- a/datasette/templates/_codemirror.html +++ b/datasette/templates/_codemirror.html @@ -2,7 +2,13 @@ + diff --git a/datasette/templates/_codemirror_foot.html b/datasette/templates/_codemirror_foot.html index 4019d448..ee09cff1 100644 --- a/datasette/templates/_codemirror_foot.html +++ b/datasette/templates/_codemirror_foot.html @@ -23,6 +23,7 @@ window.onload = () => { editor.setValue(sqlFormatter.format(editor.getValue())); }) } + cmResize(editor, {resizableWidth: false}); } if (sqlFormat && readOnly) { const formatted = sqlFormatter.format(readOnly.innerHTML); From 42caabf7e9e6e4d69ef6dd7de16f2cd96bc79d5b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 22 Feb 2021 09:35:41 -0800 Subject: [PATCH 0002/1304] Fixed typo --- docs/testing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 8ea5e79b..1291a875 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -79,7 +79,7 @@ Using pytest fixtures A common pattern for Datasette plugins is to create a fixture which sets up a temporary test database and wraps it in a Datasette instance. -Here's an example that uses the `sqlite-utils library `__ to populate a temporary test database. It also sets the title of that table using a simulated ``metadata.json`` congiguration: +Here's an example that uses the `sqlite-utils library `__ to populate a temporary test database. It also sets the title of that table using a simulated ``metadata.json`` configuration: .. code-block:: python From 726f781c50e88f557437f6490b8479c3d6fabfc2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 22 Feb 2021 16:22:47 -0800 Subject: [PATCH 0003/1304] Fix for arraycontains bug, closes #1239 --- datasette/filters.py | 4 ++-- tests/test_filters.py | 8 ++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/datasette/filters.py b/datasette/filters.py index 152a26b4..2b859d99 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -150,7 +150,7 @@ class Filters: "arraycontains", "array contains", """rowid in ( - select {t}.rowid from {t}, json_each({t}.{c}) j + select {t}.rowid from {t}, json_each([{t}].[{c}]) j where j.value = :{p} )""", '{c} contains "{v}"', @@ -159,7 +159,7 @@ class Filters: "arraynotcontains", "array does not contain", """rowid not in ( - select {t}.rowid from {t}, json_each({t}.{c}) j + select {t}.rowid from {t}, json_each([{t}].[{c}]) j where j.value = :{p} )""", '{c} does not contain "{v}"', diff --git a/tests/test_filters.py b/tests/test_filters.py index 75a779b9..f22b7b5c 100644 --- a/tests/test_filters.py +++ b/tests/test_filters.py @@ -56,6 +56,14 @@ import pytest # Not in, and JSON array not in ((("foo__notin", "1,2,3"),), ["foo not in (:p0, :p1, :p2)"], ["1", "2", "3"]), ((("foo__notin", "[1,2,3]"),), ["foo not in (:p0, :p1, :p2)"], [1, 2, 3]), + # JSON arraycontains + ( + (("Availability+Info__arraycontains", "yes"),), + [ + "rowid in (\n select table.rowid from table, json_each([table].[Availability+Info]) j\n where j.value = :p0\n )" + ], + ["yes"], + ), ], ) def test_build_where(args, expected_where, expected_params): From afed51b1e36cf275c39e71c7cb262d6c5bdbaa31 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 26 Feb 2021 09:27:09 -0800 Subject: [PATCH 0004/1304] Note about where to find plugin examples, closes #1244 --- docs/writing_plugins.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index b43ecb27..6afee1c3 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -5,6 +5,8 @@ Writing plugins You can write one-off plugins that apply to just one Datasette instance, or you can write plugins which can be installed using ``pip`` and can be shipped to the Python Package Index (`PyPI `__) for other people to install. +Want to start by looking at an example? The `Datasette plugins directory `__ lists more than 50 open source plugins with code you can explore. The :ref:`plugin hooks ` page includes links to example plugins for each of the documented hooks. + .. _writing_plugins_one_off: Writing one-off plugins From cc6774cbaaba2359e0a92cfcc41ad988680075d6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Feb 2021 14:34:44 -0800 Subject: [PATCH 0005/1304] Upgrade httpx and remove xfail from tests, refs #1005 --- setup.py | 2 +- tests/test_api.py | 2 -- tests/test_html.py | 3 --- 3 files changed, 1 insertion(+), 6 deletions(-) diff --git a/setup.py b/setup.py index 34b6b396..15ee63fe 100644 --- a/setup.py +++ b/setup.py @@ -48,7 +48,7 @@ setup( "click-default-group~=1.2.2", "Jinja2>=2.10.3,<2.12.0", "hupper~=1.9", - "httpx>=0.15", + "httpx>=0.17", "pint~=0.9", "pluggy~=0.13.0", "uvicorn~=0.11", diff --git a/tests/test_api.py b/tests/test_api.py index 0b5401d6..caf23329 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -810,7 +810,6 @@ def test_table_shape_object_compound_primary_key(app_client): assert {"a,b": {"pk1": "a", "pk2": "b", "content": "c"}} == response.json -@pytest.mark.xfail def test_table_with_slashes_in_name(app_client): response = app_client.get( "/fixtures/table%2Fwith%2Fslashes.csv?_shape=objects&_format=json" @@ -1286,7 +1285,6 @@ def test_row_format_in_querystring(app_client): assert [{"id": "1", "content": "hello"}] == response.json["rows"] -@pytest.mark.xfail def test_row_strange_table_name(app_client): response = app_client.get( "/fixtures/table%2Fwith%2Fslashes.csv/3.json?_shape=objects" diff --git a/tests/test_html.py b/tests/test_html.py index e21bd64d..3482ec35 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -158,7 +158,6 @@ def test_row_redirects_with_url_hash(app_client_with_hash): assert response.status == 200 -@pytest.mark.xfail def test_row_strange_table_name_with_url_hash(app_client_with_hash): response = app_client_with_hash.get( "/fixtures/table%2Fwith%2Fslashes.csv/3", allow_redirects=False @@ -552,7 +551,6 @@ def test_facets_persist_through_filter_form(app_client): ] -@pytest.mark.xfail @pytest.mark.parametrize( "path,expected_classes", [ @@ -584,7 +582,6 @@ def test_css_classes_on_body(app_client, path, expected_classes): assert classes == expected_classes -@pytest.mark.xfail @pytest.mark.parametrize( "path,expected_considered", [ From 47eb885cc2c3aafa03645c330c6f597bee9b3b25 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Feb 2021 19:44:04 -0800 Subject: [PATCH 0006/1304] JSON faceting now suggested even if column has blank strings, closes #1246 --- datasette/facets.py | 11 ++++++++--- tests/test_facets.py | 22 ++++++++++++++++++++++ 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index 207d819d..01628760 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -279,6 +279,7 @@ class ArrayFacet(Facet): suggested_facet_sql = """ select distinct json_type({column}) from ({sql}) + where {column} is not null and {column} != '' """.format( column=escape_sqlite(column), sql=self.sql ) @@ -298,9 +299,13 @@ class ArrayFacet(Facet): v[0] for v in await self.ds.execute( self.database, - "select {column} from ({sql}) where {column} is not null and json_array_length({column}) > 0 limit 100".format( - column=escape_sqlite(column), sql=self.sql - ), + ( + "select {column} from ({sql}) " + "where {column} is not null " + "and {column} != '' " + "and json_array_length({column}) > 0 " + "limit 100" + ).format(column=escape_sqlite(column), sql=self.sql), self.params, truncate=False, custom_time_limit=self.ds.setting( diff --git a/tests/test_facets.py b/tests/test_facets.py index 1e19dc3a..31518682 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -1,3 +1,5 @@ +from datasette.app import Datasette +from datasette.database import Database from datasette.facets import ColumnFacet, ArrayFacet, DateFacet from datasette.utils.asgi import Request from datasette.utils import detect_json1 @@ -325,3 +327,23 @@ async def test_date_facet_results(app_client): "truncated": False, } } == buckets + + +@pytest.mark.asyncio +async def test_json_array_with_blanks_and_nulls(): + ds = Datasette([], memory=True) + db = ds.add_database(Database(ds, memory_name="test_json_array")) + await db.execute_write("create table foo(json_column text)", block=True) + for value in ('["a", "b", "c"]', '["a", "b"]', "", None): + await db.execute_write( + "insert into foo (json_column) values (?)", [value], block=True + ) + response = await ds.client.get("/test_json_array/foo.json") + data = response.json() + assert data["suggested_facets"] == [ + { + "name": "json_column", + "type": "array", + "toggle_url": "http://localhost/test_json_array/foo.json?_facet_array=json_column", + } + ] From 7c87532acc4e9d92caa1c4ee29a3446200928018 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Feb 2021 20:02:18 -0800 Subject: [PATCH 0007/1304] New .add_memory_database() method, closes #1247 --- datasette/app.py | 3 +++ docs/internals.rst | 29 ++++++++++++++++++++--------- tests/test_internals_database.py | 4 ++-- 3 files changed, 25 insertions(+), 11 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index e3272c6e..02d432df 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -390,6 +390,9 @@ class Datasette: self.databases[name] = db return db + def add_memory_database(self, memory_name): + return self.add_database(Database(self, memory_name=memory_name)) + def remove_database(self, name): self.databases.pop(name) diff --git a/docs/internals.rst b/docs/internals.rst index 713f5d7d..e3bb83fd 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -273,7 +273,25 @@ The ``db`` parameter should be an instance of the ``datasette.database.Database` This will add a mutable database and serve it at ``/my-new-database``. -To create a shared in-memory database named ``statistics``, use the following: +``.add_database()`` returns the Database instance, with its name set as the ``database.name`` attribute. Any time you are working with a newly added database you should use the return value of ``.add_database()``, for example: + +.. code-block:: python + + db = datasette.add_database(Database(datasette, memory_name="statistics")) + await db.execute_write("CREATE TABLE foo(id integer primary key)", block=True) + +.. _datasette_add_memory_database: + +.add_memory_database(name) +-------------------------- + +Adds a shared in-memory database with the specified name: + +.. code-block:: python + + datasette.add_memory_database("statistics") + +This is a shortcut for the following: .. code-block:: python @@ -284,14 +302,7 @@ To create a shared in-memory database named ``statistics``, use the following: memory_name="statistics" )) -This database will be served at ``/statistics``. - -``.add_database()`` returns the Database instance, with its name set as the ``database.name`` attribute. Any time you are working with a newly added database you should use the return value of ``.add_database()``, for example: - -.. code-block:: python - - db = datasette.add_database(Database(datasette, memory_name="statistics")) - await db.execute_write("CREATE TABLE foo(id integer primary key)", block=True) +Using either of these pattern will result in the in-memory database being served at ``/statistics``. .. _datasette_remove_database: diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 086f1a48..b60aaa8e 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -479,9 +479,9 @@ async def test_attached_databases(app_client_two_attached_databases_crossdb_enab async def test_database_memory_name(app_client): ds = app_client.ds foo1 = ds.add_database(Database(ds, memory_name="foo")) - foo2 = ds.add_database(Database(ds, memory_name="foo")) + foo2 = ds.add_memory_database("foo") bar1 = ds.add_database(Database(ds, memory_name="bar")) - bar2 = ds.add_database(Database(ds, memory_name="bar")) + bar2 = ds.add_memory_database("bar") for db in (foo1, foo2, bar1, bar2): table_names = await db.table_names() assert table_names == [] From 4f9a2f1f47dcf7e8561d68a8a07f5009a13cfdb3 Mon Sep 17 00:00:00 2001 From: David Boucha Date: Wed, 3 Mar 2021 22:46:10 -0700 Subject: [PATCH 0008/1304] Fix small typo (#1243) Thanks, @UtahDave --- docs/deploying.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index 4e04ea1d..0f892f83 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -58,7 +58,7 @@ Add a random value for the ``DATASETTE_SECRET`` - this will be used to sign Data $ python3 -c 'import secrets; print(secrets.token_hex(32))' -This configuration will run Datasette against all database files contained in the ``/home/ubunt/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`config_dir` for details. +This configuration will run Datasette against all database files contained in the ``/home/ubuntu/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`config_dir` for details. You can start the Datasette process running using the following:: From d0fd833b8cdd97e1b91d0f97a69b494895d82bee Mon Sep 17 00:00:00 2001 From: Bob Whitelock Date: Sun, 7 Mar 2021 07:41:17 +0000 Subject: [PATCH 0009/1304] Add compile option to Dockerfile to fix failing test (fixes #696) (#1223) This test was failing when run inside the Docker container: `test_searchable[/fixtures/searchable.json?_search=te*+AND+do*&_searchmode=raw-expected_rows3]`, with this error: ``` def test_searchable(app_client, path, expected_rows): response = app_client.get(path) > assert expected_rows == response.json["rows"] E AssertionError: assert [[1, 'barry c...sel', 'puma']] == [] E Left contains 2 more items, first extra item: [1, 'barry cat', 'terry dog', 'panther'] E Full diff: E + [] E - [[1, 'barry cat', 'terry dog', 'panther'], E - [2, 'terry dog', 'sara weasel', 'puma']] ``` The issue was that the version of sqlite3 built inside the Docker container was built with FTS3 and FTS4 enabled, but without the `SQLITE_ENABLE_FTS3_PARENTHESIS` compile option passed, which adds support for using `AND` and `NOT` within `match` expressions (see https://sqlite.org/fts3.html#compiling_and_enabling_fts3_and_fts4 and https://www.sqlite.org/compile.html). Without this, the `AND` used in the search in this test was being interpreted as a literal string, and so no matches were found. Adding this compile option fixes this. Thanks, @bobwhitelock --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index aba701ab..f4b14146 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,7 +7,7 @@ RUN apt update \ RUN wget "https://www.sqlite.org/2020/sqlite-autoconf-3310100.tar.gz" && tar xzf sqlite-autoconf-3310100.tar.gz \ - && cd sqlite-autoconf-3310100 && ./configure --disable-static --enable-fts5 --enable-json1 CFLAGS="-g -O2 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS4=1 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_JSON1" \ + && cd sqlite-autoconf-3310100 && ./configure --disable-static --enable-fts5 --enable-json1 CFLAGS="-g -O2 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS -DSQLITE_ENABLE_FTS4=1 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_JSON1" \ && make && make install RUN wget "http://www.gaia-gis.it/gaia-sins/freexl-sources/freexl-1.0.5.tar.gz" && tar zxf freexl-1.0.5.tar.gz \ From a1bcd2fbe5e47bb431045f65eeceb5eb3a6718d5 Mon Sep 17 00:00:00 2001 From: Jean-Baptiste Pressac Date: Wed, 10 Mar 2021 19:26:39 +0100 Subject: [PATCH 0010/1304] Minor typo in IP adress (#1256) 127.0.01 replaced by 127.0.0.1 --- docs/deploying.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index 0f892f83..48261b59 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -67,7 +67,7 @@ You can start the Datasette process running using the following:: You can confirm that Datasette is running on port 8000 like so:: - curl 127.0.01:8000/-/versions.json + curl 127.0.0.1:8000/-/versions.json # Should output JSON showing the installed version Datasette will not be accessible from outside the server because it is listening on ``127.0.0.1``. You can expose it by instead listening on ``0.0.0.0``, but a better way is to set up a proxy such as ``nginx``. From 8e18c7943181f228ce5ebcea48deb59ce50bee1f Mon Sep 17 00:00:00 2001 From: Konstantin Baikov <4488943+kbaikov@users.noreply.github.com> Date: Thu, 11 Mar 2021 17:15:49 +0100 Subject: [PATCH 0011/1304] Use context manager instead of plain open (#1211) Context manager with open closes the files after usage. When the object is already a pathlib.Path i used read_text write_text functions In some cases pathlib.Path.open were used in context manager, it is basically the same as builtin open. Thanks, Konstantin Baikov! --- datasette/app.py | 13 ++++++------- datasette/cli.py | 13 +++++++------ datasette/publish/cloudrun.py | 6 ++++-- datasette/publish/heroku.py | 17 ++++++++++------- datasette/utils/__init__.py | 6 ++++-- setup.py | 3 ++- tests/conftest.py | 6 ++---- tests/fixtures.py | 5 +++-- tests/test_cli.py | 3 ++- tests/test_cli_serve_get.py | 3 ++- tests/test_docs.py | 8 ++++---- tests/test_package.py | 6 ++++-- tests/test_plugins.py | 3 ++- tests/test_publish_cloudrun.py | 32 ++++++++++++++++++++------------ tests/test_publish_heroku.py | 12 ++++++++---- tests/test_utils.py | 18 ++++++++++++------ update-docs-help.py | 2 +- 17 files changed, 93 insertions(+), 63 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 02d432df..f43ec205 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -212,7 +212,7 @@ class Datasette: and (config_dir / "inspect-data.json").exists() and not inspect_data ): - inspect_data = json.load((config_dir / "inspect-data.json").open()) + inspect_data = json.loads((config_dir / "inspect-data.json").read_text()) if immutables is None: immutable_filenames = [i["file"] for i in inspect_data.values()] immutables = [ @@ -269,7 +269,7 @@ class Datasette: if config_dir and (config_dir / "config.json").exists(): raise StartupError("config.json should be renamed to settings.json") if config_dir and (config_dir / "settings.json").exists() and not config: - config = json.load((config_dir / "settings.json").open()) + config = json.loads((config_dir / "settings.json").read_text()) self._settings = dict(DEFAULT_SETTINGS, **(config or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note @@ -450,11 +450,10 @@ class Datasette: def app_css_hash(self): if not hasattr(self, "_app_css_hash"): - self._app_css_hash = hashlib.sha1( - open(os.path.join(str(app_root), "datasette/static/app.css")) - .read() - .encode("utf8") - ).hexdigest()[:6] + with open(os.path.join(str(app_root), "datasette/static/app.css")) as fp: + self._app_css_hash = hashlib.sha1(fp.read().encode("utf8")).hexdigest()[ + :6 + ] return self._app_css_hash async def get_canned_queries(self, database_name, actor): diff --git a/datasette/cli.py b/datasette/cli.py index 96a41740..2fa039a0 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -125,13 +125,13 @@ def cli(): @sqlite_extensions def inspect(files, inspect_file, sqlite_extensions): app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions) - if inspect_file == "-": - out = sys.stdout - else: - out = open(inspect_file, "w") loop = asyncio.get_event_loop() inspect_data = loop.run_until_complete(inspect_(files, sqlite_extensions)) - out.write(json.dumps(inspect_data, indent=2)) + if inspect_file == "-": + sys.stdout.write(json.dumps(inspect_data, indent=2)) + else: + with open(inspect_file, "w") as fp: + fp.write(json.dumps(inspect_data, indent=2)) async def inspect_(files, sqlite_extensions): @@ -475,7 +475,8 @@ def serve( inspect_data = None if inspect_file: - inspect_data = json.load(open(inspect_file)) + with open(inspect_file) as fp: + inspect_data = json.load(fp) metadata_data = None if metadata: diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 7f9e89e2..bad223a1 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -141,9 +141,11 @@ def publish_subcommand(publish): if show_files: if os.path.exists("metadata.json"): print("=== metadata.json ===\n") - print(open("metadata.json").read()) + with open("metadata.json") as fp: + print(fp.read()) print("\n==== Dockerfile ====\n") - print(open("Dockerfile").read()) + with open("Dockerfile") as fp: + print(fp.read()) print("\n====================\n") image_id = f"gcr.io/{project}/{name}" diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index c0c70e12..19fe3fbe 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -171,9 +171,11 @@ def temporary_heroku_directory( os.chdir(tmp.name) if metadata_content: - open("metadata.json", "w").write(json.dumps(metadata_content, indent=2)) + with open("metadata.json", "w") as fp: + fp.write(json.dumps(metadata_content, indent=2)) - open("runtime.txt", "w").write("python-3.8.7") + with open("runtime.txt", "w") as fp: + fp.write("python-3.8.7") if branch: install = [ @@ -182,11 +184,11 @@ def temporary_heroku_directory( else: install = ["datasette"] + list(install) - open("requirements.txt", "w").write("\n".join(install)) + with open("requirements.txt", "w") as fp: + fp.write("\n".join(install)) os.mkdir("bin") - open("bin/post_compile", "w").write( - "datasette inspect --inspect-file inspect-data.json" - ) + with open("bin/post_compile", "w") as fp: + fp.write("datasette inspect --inspect-file inspect-data.json") extras = [] if template_dir: @@ -218,7 +220,8 @@ def temporary_heroku_directory( procfile_cmd = "web: datasette serve --host 0.0.0.0 {quoted_files} --cors --port $PORT --inspect-file inspect-data.json {extras}".format( quoted_files=quoted_files, extras=" ".join(extras) ) - open("Procfile", "w").write(procfile_cmd) + with open("Procfile", "w") as fp: + fp.write(procfile_cmd) for path, filename in zip(file_paths, file_names): link_or_copy(path, os.path.join(tmp.name, filename)) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 47ca0551..1fedb69c 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -428,8 +428,10 @@ def temporary_docker_directory( ) os.chdir(datasette_dir) if metadata_content: - open("metadata.json", "w").write(json.dumps(metadata_content, indent=2)) - open("Dockerfile", "w").write(dockerfile) + with open("metadata.json", "w") as fp: + fp.write(json.dumps(metadata_content, indent=2)) + with open("Dockerfile", "w") as fp: + fp.write(dockerfile) for path, filename in zip(file_paths, file_names): link_or_copy(path, os.path.join(datasette_dir, filename)) if template_dir: diff --git a/setup.py b/setup.py index 15ee63fe..3540e30a 100644 --- a/setup.py +++ b/setup.py @@ -17,7 +17,8 @@ def get_version(): os.path.dirname(os.path.abspath(__file__)), "datasette", "version.py" ) g = {} - exec(open(path).read(), g) + with open(path) as fp: + exec(fp.read(), g) return g["__version__"] diff --git a/tests/conftest.py b/tests/conftest.py index b00ea006..ad3eb9f1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -75,10 +75,8 @@ def check_permission_actions_are_documented(): from datasette.plugins import pm content = ( - (pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst") - .open() - .read() - ) + pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst" + ).read_text() permissions_re = re.compile(r"\.\. _permissions_([^\s:]+):") documented_permission_actions = set(permissions_re.findall(content)).union( UNDOCUMENTED_PERMISSIONS diff --git a/tests/fixtures.py b/tests/fixtures.py index 30113ff2..2fd8e9cb 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -789,7 +789,8 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename): conn.executescript(GENERATED_COLUMNS_SQL) print(f"Test tables written to {db_filename}") if metadata: - open(metadata, "w").write(json.dumps(METADATA, indent=4)) + with open(metadata, "w") as fp: + fp.write(json.dumps(METADATA, indent=4)) print(f"- metadata written to {metadata}") if plugins_path: path = pathlib.Path(plugins_path) @@ -798,7 +799,7 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename): test_plugins = pathlib.Path(__file__).parent / "plugins" for filepath in test_plugins.glob("*.py"): newpath = path / filepath.name - newpath.write_text(filepath.open().read()) + newpath.write_text(filepath.read_text()) print(f" Wrote plugin: {newpath}") if extra_db_filename: if pathlib.Path(extra_db_filename).exists(): diff --git a/tests/test_cli.py b/tests/test_cli.py index 8ddd32f6..e094ccb6 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -49,7 +49,8 @@ def test_inspect_cli_writes_to_file(app_client): cli, ["inspect", "fixtures.db", "--inspect-file", "foo.json"] ) assert 0 == result.exit_code, result.output - data = json.load(open("foo.json")) + with open("foo.json") as fp: + data = json.load(fp) assert ["fixtures"] == list(data.keys()) diff --git a/tests/test_cli_serve_get.py b/tests/test_cli_serve_get.py index aaa692e5..90fbfe3b 100644 --- a/tests/test_cli_serve_get.py +++ b/tests/test_cli_serve_get.py @@ -14,7 +14,8 @@ def test_serve_with_get(tmp_path_factory): @hookimpl def startup(datasette): - open("{}", "w").write("hello") + with open("{}", "w") as fp: + fp.write("hello") """.format( str(plugins_dir / "hello.txt") ), diff --git a/tests/test_docs.py b/tests/test_docs.py index 44b0810a..efd267b9 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -19,13 +19,13 @@ def get_headings(content, underline="-"): def get_labels(filename): - content = (docs_path / filename).open().read() + content = (docs_path / filename).read_text() return set(label_re.findall(content)) @pytest.fixture(scope="session") def settings_headings(): - return get_headings((docs_path / "settings.rst").open().read(), "~") + return get_headings((docs_path / "settings.rst").read_text(), "~") @pytest.mark.parametrize("setting", app.SETTINGS) @@ -43,7 +43,7 @@ def test_settings_are_documented(settings_headings, setting): ), ) def test_help_includes(name, filename): - expected = open(str(docs_path / filename)).read() + expected = (docs_path / filename).read_text() runner = CliRunner() result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88) actual = f"$ datasette {name} --help\n\n{result.output}" @@ -55,7 +55,7 @@ def test_help_includes(name, filename): @pytest.fixture(scope="session") def plugin_hooks_content(): - return (docs_path / "plugin_hooks.rst").open().read() + return (docs_path / "plugin_hooks.rst").read_text() @pytest.mark.parametrize( diff --git a/tests/test_package.py b/tests/test_package.py index 3248b3a4..bb939643 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -32,7 +32,8 @@ def test_package(mock_call, mock_which): capture = CaptureDockerfile() mock_call.side_effect = capture with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["package", "test.db", "--secret", "sekrit"]) assert 0 == result.exit_code mock_call.assert_has_calls([mock.call(["docker", "build", "."])]) @@ -47,7 +48,8 @@ def test_package_with_port(mock_call, mock_which): mock_call.side_effect = capture runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["package", "test.db", "-p", "8080", "--secret", "sekrit"] ) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 715c7c17..ee6f1efa 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -225,7 +225,8 @@ def test_plugin_config_env_from_list(app_client): def test_plugin_config_file(app_client): - open(TEMP_PLUGIN_SECRET_FILE, "w").write("FROM_FILE") + with open(TEMP_PLUGIN_SECRET_FILE, "w") as fp: + fp.write("FROM_FILE") assert {"foo": "FROM_FILE"} == app_client.ds.plugin_config("file-plugin") # Ensure secrets aren't visible in /-/metadata.json metadata = app_client.get("/-/metadata.json") diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 2ef90705..7881ebae 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -11,7 +11,8 @@ def test_publish_cloudrun_requires_gcloud(mock_which): mock_which.return_value = False runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"]) assert result.exit_code == 1 assert "Publishing to Google Cloud requires gcloud" in result.output @@ -40,7 +41,8 @@ def test_publish_cloudrun_prompts_for_service( mock_which.return_value = True runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "cloudrun", "test.db"], input="input-service" ) @@ -81,7 +83,8 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which): mock_which.return_value = True runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"] ) @@ -120,7 +123,8 @@ def test_publish_cloudrun_memory( mock_which.return_value = True runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "cloudrun", "test.db", "--service", "test", "--memory", memory], @@ -152,17 +156,19 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which): runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") - open("metadata.yml", "w").write( - textwrap.dedent( - """ + with open("test.db", "w") as fp: + fp.write("data") + with open("metadata.yml", "w") as fp: + fp.write( + textwrap.dedent( + """ title: Hello from metadata YAML plugins: datasette-auth-github: foo: bar """ - ).strip() - ) + ).strip() + ) result = runner.invoke( cli.cli, [ @@ -228,7 +234,8 @@ def test_publish_cloudrun_apt_get_install(mock_call, mock_output, mock_which): runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, [ @@ -295,7 +302,8 @@ def test_publish_cloudrun_extra_options( runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, [ diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py index c7a38031..c011ab43 100644 --- a/tests/test_publish_heroku.py +++ b/tests/test_publish_heroku.py @@ -8,7 +8,8 @@ def test_publish_heroku_requires_heroku(mock_which): mock_which.return_value = False runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["publish", "heroku", "test.db"]) assert result.exit_code == 1 assert "Publishing to Heroku requires heroku" in result.output @@ -22,7 +23,8 @@ def test_publish_heroku_installs_plugin(mock_call, mock_check_output, mock_which mock_check_output.side_effect = lambda s: {"['heroku', 'plugins']": b""}[repr(s)] runner = CliRunner() with runner.isolated_filesystem(): - open("t.db", "w").write("data") + with open("t.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["publish", "heroku", "t.db"], input="y\n") assert 0 != result.exit_code mock_check_output.assert_has_calls( @@ -54,7 +56,8 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which): }[repr(s)] runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "heroku", "test.db", "--tar", "gtar"] ) @@ -88,7 +91,8 @@ def test_publish_heroku_plugin_secrets(mock_call, mock_check_output, mock_which) }[repr(s)] runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, [ diff --git a/tests/test_utils.py b/tests/test_utils.py index 56306339..ecef6f7a 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -232,7 +232,8 @@ def test_to_css_class(s, expected): def test_temporary_docker_directory_uses_hard_link(): with tempfile.TemporaryDirectory() as td: os.chdir(td) - open("hello", "w").write("world") + with open("hello", "w") as fp: + fp.write("world") # Default usage of this should use symlink with utils.temporary_docker_directory( files=["hello"], @@ -249,7 +250,8 @@ def test_temporary_docker_directory_uses_hard_link(): secret="secret", ) as temp_docker: hello = os.path.join(temp_docker, "hello") - assert "world" == open(hello).read() + with open(hello) as fp: + assert "world" == fp.read() # It should be a hard link assert 2 == os.stat(hello).st_nlink @@ -260,7 +262,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): mock_link.side_effect = OSError with tempfile.TemporaryDirectory() as td: os.chdir(td) - open("hello", "w").write("world") + with open("hello", "w") as fp: + fp.write("world") # Default usage of this should use symlink with utils.temporary_docker_directory( files=["hello"], @@ -277,7 +280,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): secret=None, ) as temp_docker: hello = os.path.join(temp_docker, "hello") - assert "world" == open(hello).read() + with open(hello) as fp: + assert "world" == fp.read() # It should be a copy, not a hard link assert 1 == os.stat(hello).st_nlink @@ -285,7 +289,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): def test_temporary_docker_directory_quotes_args(): with tempfile.TemporaryDirectory() as td: os.chdir(td) - open("hello", "w").write("world") + with open("hello", "w") as fp: + fp.write("world") with utils.temporary_docker_directory( files=["hello"], name="t", @@ -301,7 +306,8 @@ def test_temporary_docker_directory_quotes_args(): secret="secret", ) as temp_docker: df = os.path.join(temp_docker, "Dockerfile") - df_contents = open(df).read() + with open(df) as fp: + df_contents = fp.read() assert "'$PWD'" in df_contents assert "'--$HOME'" in df_contents assert "ENV DATASETTE_SECRET 'secret'" in df_contents diff --git a/update-docs-help.py b/update-docs-help.py index 3a192575..292d1dcd 100644 --- a/update-docs-help.py +++ b/update-docs-help.py @@ -18,7 +18,7 @@ def update_help_includes(): result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88) actual = f"$ datasette {name} --help\n\n{result.output}" actual = actual.replace("Usage: cli ", "Usage: datasette ") - open(docs_path / filename, "w").write(actual) + (docs_path / filename).write_text(actual) if __name__ == "__main__": From c4f1ec7f33fd7d5b93f0f895dafb5351cc3bfc5b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Mar 2021 14:32:23 -0700 Subject: [PATCH 0012/1304] Documentation for Response.asgi_send(), closes #1266 --- docs/internals.rst | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/docs/internals.rst b/docs/internals.rst index e3bb83fd..18032406 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -138,6 +138,28 @@ Each of these responses will use the correct corresponding content-type - ``text Each of the helper methods take optional ``status=`` and ``headers=`` arguments, documented above. +.. _internals_response_asgi_send: + +Returning a response with .asgi_send(send) +------------------------------------------ + + +In most cases you will return ``Response`` objects from your own view functions. You can also use a ``Response`` instance to respond at a lower level via ASGI, for example if you are writing code that uses the :ref:`plugin_asgi_wrapper` hook. + +Create a ``Response`` object and then use ``await response.asgi_send(send)``, passing the ASGI ``send`` function. For example: + +.. code-block:: python + + async def require_authorization(scope, recieve, send): + response = Response.text( + "401 Authorization Required", + headers={ + "www-authenticate": 'Basic realm="Datasette", charset="UTF-8"' + }, + status=401, + ) + await response.asgi_send(send) + .. _internals_response_set_cookie: Setting cookies with response.set_cookie() From 6ad544df5e6bd027a8e27317041e6168aee07459 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 23 Mar 2021 09:19:41 -0700 Subject: [PATCH 0013/1304] Fixed master -> main in a bunch of places, mainly docs --- datasette/cli.py | 2 +- datasette/publish/common.py | 2 +- datasette/templates/patterns.html | 16 ++++++++-------- docs/contributing.rst | 2 +- docs/custom_templates.rst | 2 +- docs/datasette-package-help.txt | 2 +- docs/datasette-publish-cloudrun-help.txt | 2 +- docs/datasette-publish-heroku-help.txt | 2 +- docs/plugin_hooks.rst | 4 ++-- docs/publish.rst | 4 ++-- docs/spatialite.rst | 2 +- tests/fixtures.py | 4 ++-- tests/test_html.py | 9 ++++----- 13 files changed, 26 insertions(+), 27 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 2fa039a0..42b5c115 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -191,7 +191,7 @@ def plugins(all, plugins_dir): help="Path to JSON/YAML file containing metadata to publish", ) @click.option("--extra-options", help="Extra options to pass to datasette serve") -@click.option("--branch", help="Install datasette from a GitHub branch e.g. master") +@click.option("--branch", help="Install datasette from a GitHub branch e.g. main") @click.option( "--template-dir", type=click.Path(exists=True, file_okay=False, dir_okay=True), diff --git a/datasette/publish/common.py b/datasette/publish/common.py index b6570290..29665eb3 100644 --- a/datasette/publish/common.py +++ b/datasette/publish/common.py @@ -19,7 +19,7 @@ def add_common_publish_arguments_and_options(subcommand): "--extra-options", help="Extra options to pass to datasette serve" ), click.option( - "--branch", help="Install datasette from a GitHub branch e.g. master" + "--branch", help="Install datasette from a GitHub branch e.g. main" ), click.option( "--template-dir", diff --git a/datasette/templates/patterns.html b/datasette/templates/patterns.html index 984c1bf6..3f9b5a16 100644 --- a/datasette/templates/patterns.html +++ b/datasette/templates/patterns.html @@ -70,10 +70,10 @@

Data license: - Apache License 2.0 + Apache License 2.0 · Data source: - + tests/fixtures.py · About: @@ -118,10 +118,10 @@

Data license: - Apache License 2.0 + Apache License 2.0 · Data source: - + tests/fixtures.py · About: @@ -177,10 +177,10 @@

Data license: - Apache License 2.0 + Apache License 2.0 · Data source: - + tests/fixtures.py · About: @@ -478,10 +478,10 @@