From 9f0987cb57a82a7d2fe0c679fc909e5b39593ee4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Oct 2020 22:55:10 -0700 Subject: [PATCH 0001/1455] cursor: pointer; on the new menu icons Refs #1064, #1066 --- datasette/static/app.css | 2 ++ 1 file changed, 2 insertions(+) diff --git a/datasette/static/app.css b/datasette/static/app.css index 95457766..a1eb2099 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -180,6 +180,7 @@ h6, .page-header details > summary { list-style: none; display: inline; + cursor: pointer; } .page-header details > summary::-webkit-details-marker { display: none; @@ -341,6 +342,7 @@ details.nav-menu > summary { display: inline; float: right; position: relative; + cursor: pointer; } details.nav-menu > summary::-webkit-details-marker { display: none; From 222f79bb4c6e2aa5426cc5ff25f1b2461e18a300 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 08:41:57 -0700 Subject: [PATCH 0002/1455] debug-menu permission, closes #1068 Also added tests for navigation menu logic. --- datasette/default_menu_links.py | 7 +++++- datasette/default_permissions.py | 2 +- datasette/views/special.py | 3 ++- docs/authentication.rst | 9 ++++++++ tests/test_html.py | 38 ++++++++++++++++++++++++++++++++ tests/test_permissions.py | 3 ++- 6 files changed, 58 insertions(+), 4 deletions(-) diff --git a/datasette/default_menu_links.py b/datasette/default_menu_links.py index 11374fb5..0b135410 100644 --- a/datasette/default_menu_links.py +++ b/datasette/default_menu_links.py @@ -3,7 +3,10 @@ from datasette import hookimpl @hookimpl def menu_links(datasette, actor): - if actor and actor.get("id") == "root": + async def inner(): + if not await datasette.permission_allowed(actor, "debug-menu"): + return [] + return [ {"href": datasette.urls.path("/-/databases"), "label": "Databases"}, { @@ -38,3 +41,5 @@ def menu_links(datasette, actor): {"href": datasette.urls.path("/-/actor"), "label": "Debug actor"}, {"href": datasette.urls.path("/-/patterns"), "label": "Pattern portfolio"}, ] + + return inner diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index ddd45940..9f1d9c62 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -5,7 +5,7 @@ from datasette.utils import actor_matches_allow @hookimpl(tryfirst=True) def permission_allowed(datasette, actor, action, resource): async def inner(): - if action == "permissions-debug": + if action in ("permissions-debug", "debug-menu"): if actor and actor.get("id") == "root": return True elif action == "view-instance": diff --git a/datasette/views/special.py b/datasette/views/special.py index a9fc59b7..397dbc8c 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -96,7 +96,8 @@ class PermissionsDebugView(BaseView): return await self.render( ["permissions_debug.html"], request, - {"permission_checks": reversed(self.ds._permission_checks)}, + # list() avoids error if check is performed during template render: + {"permission_checks": list(reversed(self.ds._permission_checks))}, ) diff --git a/docs/authentication.rst b/docs/authentication.rst index f6c5d801..62ed7e8b 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -522,3 +522,12 @@ permissions-debug Actor is allowed to view the ``/-/permissions`` debug page. Default *deny*. + +.. _permissions_debug_menu: + +debug-menu +---------- + +Controls if the various debug pages are displayed in the navigation menu. + +Default *deny*. diff --git a/tests/test_html.py b/tests/test_html.py index 95b5128a..fed643a9 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1507,3 +1507,41 @@ def test_edit_sql_link_not_shown_if_user_lacks_permission(permission_allowed): assert "Edit SQL" in response.text else: assert "Edit SQL" not in response.text + + +@pytest.mark.parametrize( + "actor_id,should_have_links,should_not_have_links", + [ + (None, None, None), + ("test", None, ["/-/permissions"]), + ("root", ["/-/permissions", "/-/allow-debug", "/-/metadata"], None), + ], +) +def test_navigation_menu_links( + app_client, actor_id, should_have_links, should_not_have_links +): + cookies = {} + if actor_id: + cookies = {"ds_actor": app_client.actor_cookie({"id": actor_id})} + html = app_client.get("/", cookies=cookies).text + soup = Soup(html, "html.parser") + details = soup.find("nav").find("details") + if not actor_id: + # Should not show a menu + assert details is None + return + # They are logged in: should show a menu + assert details is not None + # And a rogout form + assert details.find("form") is not None + if should_have_links: + for link in should_have_links: + assert ( + details.find("a", {"href": link}) is not None + ), "{} expected but missing from nav menu".format(link) + + if should_not_have_links: + for link in should_not_have_links: + assert ( + details.find("a", {"href": link}) is None + ), "{} found but should not have been in nav menu".format(link) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 4d1b09b8..60883eef 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -310,10 +310,11 @@ def test_permissions_checked(app_client, path, permissions): def test_permissions_debug(app_client): app_client.ds._permission_checks.clear() - assert 403 == app_client.get("/-/permissions").status + assert app_client.get("/-/permissions").status == 403 # With the cookie it should work cookie = app_client.actor_cookie({"id": "root"}) response = app_client.get("/-/permissions", cookies={"ds_actor": cookie}) + assert response.status == 200 # Should show one failure and one success soup = Soup(response.body, "html.parser") check_divs = soup.findAll("div", {"class": "check"}) From fcf43589eb6a1f1d0432772a639fd35711c48e0c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 08:53:44 -0700 Subject: [PATCH 0003/1455] Link to homepage in nav on show-json page --- datasette/templates/show_json.html | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/datasette/templates/show_json.html b/datasette/templates/show_json.html index b9e49eb2..fd88756f 100644 --- a/datasette/templates/show_json.html +++ b/datasette/templates/show_json.html @@ -4,6 +4,13 @@ {% block body_class %}show-json{% endblock %} +{% block nav %} +

+ home +

+ {{ super() }} +{% endblock %} + {% block content %}

{{ filename }}

From 81dea4b07ab2b6f4eaaf248307d2b588472054a1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 10:47:18 -0700 Subject: [PATCH 0004/1455] load_template() plugin hook Closes #1042 --- datasette/app.py | 34 ++++++++++++++++++++++++++++++++-- datasette/hookspecs.py | 5 +++++ datasette/templates/base.html | 6 +++++- datasette/views/base.py | 10 +--------- docs/plugin_hooks.rst | 18 ++++++++++++++++++ tests/fixtures.py | 1 + tests/plugins/my_plugin.py | 6 ++++++ tests/test_plugins.py | 5 +++++ 8 files changed, 73 insertions(+), 12 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 8cff6577..4b28e715 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -21,7 +21,7 @@ from pathlib import Path from markupsafe import Markup from itsdangerous import URLSafeSerializer import jinja2 -from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader, escape +from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader from jinja2.environment import Template from jinja2.exceptions import TemplateNotFound import uvicorn @@ -713,12 +713,41 @@ class Datasette: self, templates, context=None, request=None, view_name=None ): context = context or {} + templates_considered = [] if isinstance(templates, Template): template = templates else: if isinstance(templates, str): templates = [templates] - template = self.jinja_env.select_template(templates) + + # Give plugins first chance at loading the template + break_outer = False + plugin_template_source = None + plugin_template_name = None + template_name = None + for template_name in templates: + if break_outer: + break + plugin_template_source = pm.hook.load_template( + template=template_name, + request=request, + datasette=self, + ) + plugin_template_source = await await_me_maybe(plugin_template_source) + if plugin_template_source: + break_outer = True + plugin_template_name = template_name + break + if plugin_template_source is not None: + template = self.jinja_env.from_string(plugin_template_source) + else: + template = self.jinja_env.select_template(templates) + for template_name in templates: + from_plugin = template_name == plugin_template_name + used = from_plugin or template_name == template.name + templates_considered.append( + {"name": template_name, "used": used, "from_plugin": from_plugin} + ) body_scripts = [] # pylint: disable=no-member for extra_script in pm.hook.extra_body_script( @@ -783,6 +812,7 @@ class Datasette: ), "base_url": self.config("base_url"), "csrftoken": request.scope["csrftoken"] if request else lambda: "", + "templates_considered": templates_considered, }, **extra_template_vars, } diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 78070e67..ca84b355 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -49,6 +49,11 @@ def extra_template_vars( "Extra template variables to be made available to the template - can return dict or callable or awaitable" +@hookspec(firstresult=True) +def load_template(template, request, datasette): + "Load the specified template, returning the template code as a string" + + @hookspec def publish_subcommand(publish): "Subcommands for 'datasette publish'" diff --git a/datasette/templates/base.html b/datasette/templates/base.html index d860df37..e29c2ea5 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -79,6 +79,10 @@ document.body.addEventListener('click', (ev) => { {% endfor %} -{% if select_templates %}{% endif %} +{% if templates_considered %} + +{% endif %} diff --git a/datasette/views/base.py b/datasette/views/base.py index 6ca78934..ed2631c5 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -8,7 +8,6 @@ import urllib import pint from datasette import __version__ -from datasette.plugins import pm from datasette.database import QueryInterrupted from datasette.utils import ( await_me_maybe, @@ -119,22 +118,15 @@ class BaseView: async def render(self, templates, request, context=None): context = context or {} - template = self.ds.jinja_env.select_template(templates) template_context = { **context, **{ "database_color": self.database_color, - "select_templates": [ - "{}{}".format( - "*" if template_name == template.name else "", template_name - ) - for template_name in templates - ], }, } return Response.html( await self.ds.render_template( - template, template_context, request=request, view_name=self.name + templates, template_context, request=request, view_name=self.name ) ) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 1c28c72e..3c57b6a8 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -271,6 +271,24 @@ You can also return an awaitable function that returns a string. Example: `datasette-cluster-map `_ +.. _plugin_hook_load_template: + +load_template(template, request, datasette) +------------------------------------------- + +``template`` - string + The template that is being rendered, e.g. ``database.html`` + +``request`` - object or None + The current HTTP :ref:`internals_request`. This can be ``None`` if the request object is not available. + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` + +Load the source code for a template from a custom location. Hooks should return a string, or ``None`` if the template is not found. + +Datasette will fall back to serving templates from files on disk if the requested template cannot be loaded by any plugins. + .. _plugin_hook_publish_subcommand: publish_subcommand(publish) diff --git a/tests/fixtures.py b/tests/fixtures.py index 2f8383ef..9f3052b7 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -43,6 +43,7 @@ EXPECTED_PLUGINS = [ "extra_js_urls", "extra_template_vars", "forbidden", + "load_template", "menu_links", "permission_allowed", "prepare_connection", diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 8fc6a1b4..9dbb3f40 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -308,3 +308,9 @@ def table_actions(datasette, database, table, actor): }, {"href": datasette.urls.instance(), "label": "Table: {}".format(table)}, ] + + +@hookimpl +def load_template(template, request): + if template == "show_json.html" and request.args.get("_special"): + return "

Special show_json: {{ filename }}

" diff --git a/tests/test_plugins.py b/tests/test_plugins.py index be36a517..f8888798 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -801,3 +801,8 @@ def test_hook_table_actions(app_client): {"label": "Database: fixtures", "href": "/"}, {"label": "Table: facetable", "href": "/"}, ] + + +def test_hook_load_template(app_client): + response = app_client.get("/-/databases?_special=1") + assert response.text == "

Special show_json: databases.json

" From a7d9e24ece665eef7c6dfc5f32855c98bd45d335 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 10:52:45 -0700 Subject: [PATCH 0005/1455] Update release process with explicit version, refs #1054 --- docs/contributing.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 39d4c3a2..375f6b89 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -159,12 +159,12 @@ We increment ``patch`` for bugfix releass. :ref:`contributing_alpha_beta` may have an additional ``a0`` or ``b0`` prefix - the integer component will be incremented with each subsequent alpha or beta. -To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__:: +To release a new version, first create a commit that updates the version number in ``datasette/version.py`` and the :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__:: # Update changelog - git commit -m "Release notes for 0.43 - - Refs #581, #770, #729, #706, #751, #706, #744, #771, #773" -a + git commit -m " Release 0.51a1 + + Refs #1056, #1039, #998, #1045, #1033, #1036, #1034, #976, #1057, #1058, #1053, #1064, #1066" -a git push Referencing the issues that are part of the release in the commit message ensures the name of the release shows up on those issue pages, e.g. `here `__. From 0cb29498c796267c5e4a5545ede8058b7ca03a94 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 10:54:47 -0700 Subject: [PATCH 0006/1455] Fixed bug with python tests/fixtures.py https://github.com/simonw/datasette/runs/1333357885?check_suite_focus=true --- datasette/views/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/datasette/views/base.py b/datasette/views/base.py index ed2631c5..813ee452 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -8,6 +8,7 @@ import urllib import pint from datasette import __version__ +from datasette.plugins import pm from datasette.database import QueryInterrupted from datasette.utils import ( await_me_maybe, From 59ab24af6bd9b517b53162fbffac1d0116100e0d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 10:56:02 -0700 Subject: [PATCH 0007/1455] Release 0.51a2 Refs #1068, #1042, #1054 --- datasette/version.py | 2 +- docs/changelog.rst | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 9a89c8e6..2f4bc37e 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.51a1" +__version__ = "0.51a2" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 893a0ee5..262400c8 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,14 @@ Changelog ========= +.. _v0_51_a2: + +0.51a2 (2020-10-30) +------------------- + +- New :ref:`plugin_hook_load_template` plugin hook. (`#1042 `__) +- New :ref:`permissions_debug_menu` permission. (`#1068 `__) + .. _v0_51_a1: 0.51a1 (2020-10-29) From 393f1b49d70e9f58bc193c6a28afff4ec9459a2e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 13:12:01 -0700 Subject: [PATCH 0008/1455] Updated nav in pattern portfolio --- datasette/templates/patterns.html | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/datasette/templates/patterns.html b/datasette/templates/patterns.html index ac9e2e46..62ef1322 100644 --- a/datasette/templates/patterns.html +++ b/datasette/templates/patterns.html @@ -38,11 +38,8 @@ fixtures / attraction_characteristic

-
- testuser · -
- -
+
+ testuser
From a2a709072059c6b3da365df9a332ca744c2079e9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 13:12:57 -0700 Subject: [PATCH 0009/1455] Display messages in right place, closes #1071 --- datasette/templates/base.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/templates/base.html b/datasette/templates/base.html index e29c2ea5..7e9c6c05 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -44,7 +44,6 @@ {% endif %} {% endblock %} -
{% block messages %} {% if show_messages %} {% for message, message_type in show_messages() %} @@ -53,6 +52,7 @@ {% endif %} {% endblock %} +
{% block content %} {% endblock %}
From f0a740ac21cba11ded8717f49d664f9549cd2f83 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 09:21:22 -0700 Subject: [PATCH 0010/1455] Remove load_plugin hook - closes #1073 Refs #1042 This reverts commit 81dea4b07ab2b6f4eaaf248307d2b588472054a1. --- datasette/app.py | 34 ++-------------------------------- datasette/hookspecs.py | 5 ----- datasette/templates/base.html | 6 +----- datasette/views/base.py | 9 ++++++++- docs/plugin_hooks.rst | 18 ------------------ tests/fixtures.py | 1 - tests/plugins/my_plugin.py | 6 ------ tests/test_plugins.py | 5 ----- 8 files changed, 11 insertions(+), 73 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 4b28e715..8cff6577 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -21,7 +21,7 @@ from pathlib import Path from markupsafe import Markup from itsdangerous import URLSafeSerializer import jinja2 -from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader +from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader, escape from jinja2.environment import Template from jinja2.exceptions import TemplateNotFound import uvicorn @@ -713,41 +713,12 @@ class Datasette: self, templates, context=None, request=None, view_name=None ): context = context or {} - templates_considered = [] if isinstance(templates, Template): template = templates else: if isinstance(templates, str): templates = [templates] - - # Give plugins first chance at loading the template - break_outer = False - plugin_template_source = None - plugin_template_name = None - template_name = None - for template_name in templates: - if break_outer: - break - plugin_template_source = pm.hook.load_template( - template=template_name, - request=request, - datasette=self, - ) - plugin_template_source = await await_me_maybe(plugin_template_source) - if plugin_template_source: - break_outer = True - plugin_template_name = template_name - break - if plugin_template_source is not None: - template = self.jinja_env.from_string(plugin_template_source) - else: - template = self.jinja_env.select_template(templates) - for template_name in templates: - from_plugin = template_name == plugin_template_name - used = from_plugin or template_name == template.name - templates_considered.append( - {"name": template_name, "used": used, "from_plugin": from_plugin} - ) + template = self.jinja_env.select_template(templates) body_scripts = [] # pylint: disable=no-member for extra_script in pm.hook.extra_body_script( @@ -812,7 +783,6 @@ class Datasette: ), "base_url": self.config("base_url"), "csrftoken": request.scope["csrftoken"] if request else lambda: "", - "templates_considered": templates_considered, }, **extra_template_vars, } diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index ca84b355..78070e67 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -49,11 +49,6 @@ def extra_template_vars( "Extra template variables to be made available to the template - can return dict or callable or awaitable" -@hookspec(firstresult=True) -def load_template(template, request, datasette): - "Load the specified template, returning the template code as a string" - - @hookspec def publish_subcommand(publish): "Subcommands for 'datasette publish'" diff --git a/datasette/templates/base.html b/datasette/templates/base.html index 7e9c6c05..611ba9f6 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -79,10 +79,6 @@ document.body.addEventListener('click', (ev) => { {% endfor %} -{% if templates_considered %} - -{% endif %} +{% if select_templates %}{% endif %} diff --git a/datasette/views/base.py b/datasette/views/base.py index 813ee452..6ca78934 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -119,15 +119,22 @@ class BaseView: async def render(self, templates, request, context=None): context = context or {} + template = self.ds.jinja_env.select_template(templates) template_context = { **context, **{ "database_color": self.database_color, + "select_templates": [ + "{}{}".format( + "*" if template_name == template.name else "", template_name + ) + for template_name in templates + ], }, } return Response.html( await self.ds.render_template( - templates, template_context, request=request, view_name=self.name + template, template_context, request=request, view_name=self.name ) ) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 3c57b6a8..1c28c72e 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -271,24 +271,6 @@ You can also return an awaitable function that returns a string. Example: `datasette-cluster-map `_ -.. _plugin_hook_load_template: - -load_template(template, request, datasette) -------------------------------------------- - -``template`` - string - The template that is being rendered, e.g. ``database.html`` - -``request`` - object or None - The current HTTP :ref:`internals_request`. This can be ``None`` if the request object is not available. - -``datasette`` - :ref:`internals_datasette` - You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` - -Load the source code for a template from a custom location. Hooks should return a string, or ``None`` if the template is not found. - -Datasette will fall back to serving templates from files on disk if the requested template cannot be loaded by any plugins. - .. _plugin_hook_publish_subcommand: publish_subcommand(publish) diff --git a/tests/fixtures.py b/tests/fixtures.py index 9f3052b7..2f8383ef 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -43,7 +43,6 @@ EXPECTED_PLUGINS = [ "extra_js_urls", "extra_template_vars", "forbidden", - "load_template", "menu_links", "permission_allowed", "prepare_connection", diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 9dbb3f40..8fc6a1b4 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -308,9 +308,3 @@ def table_actions(datasette, database, table, actor): }, {"href": datasette.urls.instance(), "label": "Table: {}".format(table)}, ] - - -@hookimpl -def load_template(template, request): - if template == "show_json.html" and request.args.get("_special"): - return "

Special show_json: {{ filename }}

" diff --git a/tests/test_plugins.py b/tests/test_plugins.py index f8888798..be36a517 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -801,8 +801,3 @@ def test_hook_table_actions(app_client): {"label": "Database: fixtures", "href": "/"}, {"label": "Table: facetable", "href": "/"}, ] - - -def test_hook_load_template(app_client): - response = app_client.get("/-/databases?_special=1") - assert response.text == "

Special show_json: databases.json

" From d6db47f5c19f77e735279762d99720dc644bff48 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 10:25:32 -0700 Subject: [PATCH 0011/1455] Deploy demo plugins to latest.datasette.io, refs #1074 --- .github/workflows/deploy-latest.yml | 3 ++- tests/fixtures.py | 2 +- tests/plugins/my_plugin.py | 23 ++++++++++++++++++++++- tests/test_html.py | 8 +++++--- 4 files changed, 30 insertions(+), 6 deletions(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 8445f1d8..73b97a19 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -31,7 +31,7 @@ jobs: - name: Run tests run: pytest - name: Build fixtures.db - run: python tests/fixtures.py fixtures.db fixtures.json + run: python tests/fixtures.py fixtures.db fixtures.json plugins - name: Build docs.db run: |- cd docs @@ -50,6 +50,7 @@ jobs: gcloud config set project datasette-222320 datasette publish cloudrun fixtures.db \ -m fixtures.json \ + --plugins-dir=plugins \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--config template_debug:1" \ diff --git a/tests/fixtures.py b/tests/fixtures.py index 2f8383ef..5cbfc72f 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -267,7 +267,7 @@ def generate_sortable_rows(num): METADATA = { "title": "Datasette Fixtures", - "description": "An example SQLite database demonstrating Datasette", + "description_html": 'An example SQLite database demonstrating Datasette. Sign in as root user', "license": "Apache License 2.0", "license_url": "https://github.com/simonw/datasette/blob/master/LICENSE", "source": "tests/fixtures.py", diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 8fc6a1b4..b487cdf0 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -177,7 +177,7 @@ def actor_from_request(datasette, request): def asgi_wrapper(): def wrap(app): async def maybe_set_actor_in_scope(scope, recieve, send): - if b"_actor_in_scope" in scope["query_string"]: + if b"_actor_in_scope" in scope.get("query_string", b""): scope = dict(scope, actor={"id": "from-scope"}) print(scope) await app(scope, recieve, send) @@ -237,12 +237,33 @@ def register_routes(): await datasette.render_template("render_message.html", request=request) ) + def login_as_root(datasette, request): + # Mainly for the latest.datasette.io demo + if request.method == "POST": + response = Response.redirect("/") + response.set_cookie( + "ds_actor", datasette.sign({"a": {"id": "root"}}, "actor") + ) + return response + return Response.html( + """ +
+

+ +

+
+ """.format( + request.path, request.scope["csrftoken"]() + ) + ) + return [ (r"/one/$", one), (r"/two/(?P.*)$", two), (r"/three/$", three), (r"/post/$", post), (r"/csrftoken-form/$", csrftoken_form), + (r"/login-as-root$", login_as_root), (r"/not-async/$", not_async), (r"/add-message/$", add_message), (r"/render-message/$", render_message), diff --git a/tests/test_html.py b/tests/test_html.py index fed643a9..7c068085 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -23,7 +23,7 @@ def test_homepage(app_client_two_attached_databases): soup = Soup(response.body, "html.parser") assert "Datasette Fixtures" == soup.find("h1").text assert ( - "An example SQLite database demonstrating Datasette" + "An example SQLite database demonstrating Datasette. Sign in as root user" == soup.select(".metadata-description")[0].text.strip() ) # Should be two attached databases @@ -949,8 +949,9 @@ def test_index_metadata(app_client): assert response.status == 200 soup = Soup(response.body, "html.parser") assert "Datasette Fixtures" == soup.find("h1").text - assert "An example SQLite database demonstrating Datasette" == inner_html( - soup.find("div", {"class": "metadata-description"}) + assert ( + 'An example SQLite database demonstrating Datasette. Sign in as root user' + == inner_html(soup.find("div", {"class": "metadata-description"})) ) assert_footer_links(soup) @@ -1451,6 +1452,7 @@ def test_base_url_config(app_client_base_url_prefix, path): "https://github.com/simonw/datasette", "https://github.com/simonw/datasette/blob/master/LICENSE", "https://github.com/simonw/datasette/blob/master/tests/fixtures.py", + "/login-as-root", # Only used for the latest.datasette.io demo } and not href.startswith("https://plugin-example.com/") ): From b84cfe1b08ec3a881767e30122b7d4c0fa03f9e4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 10:40:09 -0700 Subject: [PATCH 0012/1455] Confirm table actions work on views, closes #1067 --- tests/test_plugins.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index be36a517..6a4ea60a 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -784,7 +784,8 @@ def test_hook_menu_links(app_client): ] -def test_hook_table_actions(app_client): +@pytest.mark.parametrize("table_or_view", ["facetable", "simple_view"]) +def test_hook_table_actions(app_client, table_or_view): def get_table_actions_links(html): soup = Soup(html, "html.parser") details = soup.find("details", {"class": "table-menu-links"}) @@ -792,12 +793,12 @@ def test_hook_table_actions(app_client): return [] return [{"label": a.text, "href": a["href"]} for a in details.select("a")] - response = app_client.get("/fixtures/facetable") + response = app_client.get("/fixtures/{}".format(table_or_view)) assert get_table_actions_links(response.text) == [] - response_2 = app_client.get("/fixtures/facetable?_bot=1") + response_2 = app_client.get("/fixtures/{}?_bot=1".format(table_or_view)) assert get_table_actions_links(response_2.text) == [ {"label": "From async", "href": "/"}, {"label": "Database: fixtures", "href": "/"}, - {"label": "Table: facetable", "href": "/"}, + {"label": "Table: {}".format(table_or_view), "href": "/"}, ] From 11eb1e026f3d84cb771f8d6e204939cbaee130cd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 11:16:28 -0700 Subject: [PATCH 0013/1455] datasette.urls.table(..., format="json"), closes #1035 Also improved tests for datasette.urls and added format= to some other methods --- datasette/app.py | 42 +++++++++++++++++++++++++----------- datasette/utils/__init__.py | 10 +++++---- datasette/views/base.py | 10 +++++---- datasette/views/database.py | 4 ++-- docs/internals.rst | 14 ++++++------ tests/test_internals_urls.py | 42 +++++++++++++++++++++++++++++------- tests/test_utils.py | 10 ++++++--- 7 files changed, 92 insertions(+), 40 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 8cff6577..3a06d911 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -53,6 +53,7 @@ from .utils import ( format_bytes, module_from_path, parse_metadata, + path_with_format, resolve_env_secrets, sqlite3, to_css_class, @@ -1285,13 +1286,16 @@ class Urls: def __init__(self, ds): self.ds = ds - def path(self, path): + def path(self, path, format=None): if path.startswith("/"): path = path[1:] - return self.ds.config("base_url") + path + path = self.ds.config("base_url") + path + if format is not None: + path = path_with_format(path=path, format=format) + return path - def instance(self): - return self.path("") + def instance(self, format=None): + return self.path("", format=format) def static(self, path): return self.path("-/static/{}".format(path)) @@ -1302,21 +1306,33 @@ class Urls: def logout(self): return self.path("-/logout") - def database(self, database): + def database(self, database, format=None): db = self.ds.databases[database] if self.ds.config("hash_urls") and db.hash: - return self.path("{}-{}".format(database, db.hash[:HASH_LENGTH])) + path = self.path( + "{}-{}".format(database, db.hash[:HASH_LENGTH]), format=format + ) else: - return self.path(database) + path = self.path(database, format=format) + return path - def table(self, database, table): - return "{}/{}".format(self.database(database), urllib.parse.quote_plus(table)) + def table(self, database, table, format=None): + path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(table)) + if format is not None: + path = path_with_format(path=path, format=format) + return path - def query(self, database, query): - return "{}/{}".format(self.database(database), urllib.parse.quote_plus(query)) + def query(self, database, query, format=None): + path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(query)) + if format is not None: + path = path_with_format(path=path, format=format) + return path - def row(self, database, table, row_path): - return "{}/{}".format(self.table(database, table), row_path) + def row(self, database, table, row_path, format=None): + path = "{}/{}".format(self.table(database, table), row_path) + if format is not None: + path = path_with_format(path=path, format=format) + return path def row_blob(self, database, table, row_path, column): return self.table(database, table) + "/{}.blob?_blob_column={}".format( diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 33decbfc..bf361784 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -678,9 +678,11 @@ async def resolve_table_and_format( return table_and_format, None -def path_with_format(request, format, extra_qs=None, replace_format=None): +def path_with_format( + *, request=None, path=None, format=None, extra_qs=None, replace_format=None +): qs = extra_qs or {} - path = request.path + path = request.path if request else path if replace_format and path.endswith(".{}".format(replace_format)): path = path[: -(1 + len(replace_format))] if "." in path: @@ -689,11 +691,11 @@ def path_with_format(request, format, extra_qs=None, replace_format=None): path = "{}.{}".format(path, format) if qs: extra = urllib.parse.urlencode(sorted(qs.items())) - if request.query_string: + if request and request.query_string: path = "{}?{}&{}".format(path, request.query_string, extra) else: path = "{}?{}".format(path, extra) - elif request.query_string: + elif request and request.query_string: path = "{}?{}".format(path, request.query_string) return path diff --git a/datasette/views/base.py b/datasette/views/base.py index 6ca78934..430489c1 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -333,8 +333,8 @@ class DataView(BaseView): cell = self.ds.absolute_url( request, path_with_format( - request, - "blob", + request=request, + format="blob", extra_qs={ "_blob_column": column, "_blob_hash": hashlib.sha256( @@ -535,11 +535,13 @@ class DataView(BaseView): it_can_render = await await_me_maybe(it_can_render) if it_can_render: renderers[key] = path_with_format( - request, key, {**url_labels_extra} + request=request, format=key, extra_qs={**url_labels_extra} ) url_csv_args = {"_size": "max", **url_labels_extra} - url_csv = path_with_format(request, "csv", url_csv_args) + url_csv = path_with_format( + request=request, format="csv", extra_qs=url_csv_args + ) url_csv_path = url_csv.split("?")[0] context = { **data, diff --git a/datasette/views/database.py b/datasette/views/database.py index 8b9e8833..3ed60f4e 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -346,8 +346,8 @@ class QueryView(DataView): ) elif isinstance(display_value, bytes): blob_url = path_with_format( - request, - "blob", + request=request, + format="blob", extra_qs={ "_blob_column": column, "_blob_hash": hashlib.sha256( diff --git a/docs/internals.rst b/docs/internals.rst index 4ebeb983..ee7fe6e4 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -396,10 +396,10 @@ datasette.urls The ``datasette.urls`` object contains methods for building URLs to pages within Datasette. Plugins should use this to link to pages, since these methods take into account any :ref:`config_base_url` configuration setting that might be in effect. -``datasette.urls.instance()`` - Returns the URL to the Datasette instance root page. This is usually ``"/"`` +``datasette.urls.instance(format=None)`` + Returns the URL to the Datasette instance root page. This is usually ``"/"``. -``datasette.urls.path(path)`` +``datasette.urls.path(path, format=None)`` Takes a path and returns the full path, taking ``base_url`` into account. For example, ``datasette.urls.path("-/logout")`` will return the path to the logout page, which will be ``"/-/logout"`` by default or ``/prefix-path/-/logout`` if ``base_url`` is set to ``/prefix-path/`` @@ -423,13 +423,13 @@ The ``datasette.urls`` object contains methods for building URLs to pages within ``datasette.url.static_plugins("datasette_cluster_map", "datasette-cluster-map.js")`` would return ``"/-/static-plugins/datasette_cluster_map/datasette-cluster-map.js"`` -``datasette.urls.database(database_name)`` +``datasette.urls.database(database_name, format=None)`` Returns the URL to a database page, for example ``"/fixtures"`` -``datasette.urls.table(database_name, table_name)`` +``datasette.urls.table(database_name, table_name, format=None)`` Returns the URL to a table page, for example ``"/fixtures/facetable"`` -``datasette.urls.query(database_name, query_name)`` +``datasette.urls.query(database_name, query_name, format=None)`` Returns the URL to a query page, for example ``"/fixtures/pragma_cache_size"`` These functions can be accessed via the ``{{ urls }}`` object in Datasette templates, for example: @@ -441,6 +441,8 @@ These functions can be accessed via the ``{{ urls }}`` object in Datasette templ facetable table pragma_cache_size query +Use the ``format="json"`` (or ``"csv"`` or other formats supported by plugins) arguments to get back URLs to the JSON representation. This is usually the path with ``.json`` added on the end, but it may use ``?_format=json`` in cases where the path already includes ``.json``, for example a URL to a table named ``table.json``. + .. _internals_database: Database class diff --git a/tests/test_internals_urls.py b/tests/test_internals_urls.py index 6498ee43..005903df 100644 --- a/tests/test_internals_urls.py +++ b/tests/test_internals_urls.py @@ -82,18 +82,44 @@ def test_logout(ds, base_url, expected): @pytest.mark.parametrize( - "base_url,expected", + "base_url,format,expected", [ - ("/", "/:memory:"), - ("/prefix/", "/prefix/:memory:"), + ("/", None, "/:memory:"), + ("/prefix/", None, "/prefix/:memory:"), + ("/", "json", "/:memory:.json"), ], ) -def test_database(ds, base_url, expected): +def test_database(ds, base_url, format, expected): ds._config["base_url"] = base_url - assert ds.urls.database(":memory:") == expected - # Do table and query while we are here - assert ds.urls.table(":memory:", "name") == expected + "/name" - assert ds.urls.query(":memory:", "name") == expected + "/name" + assert ds.urls.database(":memory:", format=format) == expected + + +@pytest.mark.parametrize( + "base_url,name,format,expected", + [ + ("/", "name", None, "/:memory:/name"), + ("/prefix/", "name", None, "/prefix/:memory:/name"), + ("/", "name", "json", "/:memory:/name.json"), + ("/", "name.json", "json", "/:memory:/name.json?_format=json"), + ], +) +def test_table_and_query(ds, base_url, name, format, expected): + ds._config["base_url"] = base_url + assert ds.urls.table(":memory:", name, format=format) == expected + assert ds.urls.query(":memory:", name, format=format) == expected + + +@pytest.mark.parametrize( + "base_url,format,expected", + [ + ("/", None, "/:memory:/facetable/1"), + ("/prefix/", None, "/prefix/:memory:/facetable/1"), + ("/", "json", "/:memory:/facetable/1.json"), + ], +) +def test_row(ds, base_url, format, expected): + ds._config["base_url"] = base_url + assert ds.urls.row(":memory:", "facetable", "1", format=format) == expected @pytest.mark.parametrize("base_url", ["/", "/prefix/"]) diff --git a/tests/test_utils.py b/tests/test_utils.py index bae3b685..2d2ff52d 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -382,15 +382,19 @@ def test_table_columns(): ) def test_path_with_format(path, format, extra_qs, expected): request = Request.fake(path) - actual = utils.path_with_format(request, format, extra_qs) + actual = utils.path_with_format(request=request, format=format, extra_qs=extra_qs) assert expected == actual def test_path_with_format_replace_format(): request = Request.fake("/foo/bar.csv") - assert utils.path_with_format(request, "blob") == "/foo/bar.csv?_format=blob" assert ( - utils.path_with_format(request, "blob", replace_format="csv") == "/foo/bar.blob" + utils.path_with_format(request=request, format="blob") + == "/foo/bar.csv?_format=blob" + ) + assert ( + utils.path_with_format(request=request, format="blob", replace_format="csv") + == "/foo/bar.blob" ) From c1d386ef67786f07d69e566b8e054e92949a844f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 11:43:36 -0700 Subject: [PATCH 0014/1455] Refactor Urls into url_builder.py Refs #1026 --- datasette/app.py | 60 +--------------------------------------- datasette/url_builder.py | 60 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 59 deletions(-) create mode 100644 datasette/url_builder.py diff --git a/datasette/app.py b/datasette/app.py index 3a06d911..860f4563 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -40,6 +40,7 @@ from .views.special import ( ) from .views.table import RowView, TableView from .renderer import json_renderer +from .url_builder import Urls from .database import Database, QueryInterrupted from .utils import ( @@ -53,7 +54,6 @@ from .utils import ( format_bytes, module_from_path, parse_metadata, - path_with_format, resolve_env_secrets, sqlite3, to_css_class, @@ -1280,61 +1280,3 @@ class DatasetteClient: async def request(self, method, path, **kwargs): async with httpx.AsyncClient(app=self.app) as client: return await client.request(method, self._fix(path), **kwargs) - - -class Urls: - def __init__(self, ds): - self.ds = ds - - def path(self, path, format=None): - if path.startswith("/"): - path = path[1:] - path = self.ds.config("base_url") + path - if format is not None: - path = path_with_format(path=path, format=format) - return path - - def instance(self, format=None): - return self.path("", format=format) - - def static(self, path): - return self.path("-/static/{}".format(path)) - - def static_plugins(self, plugin, path): - return self.path("-/static-plugins/{}/{}".format(plugin, path)) - - def logout(self): - return self.path("-/logout") - - def database(self, database, format=None): - db = self.ds.databases[database] - if self.ds.config("hash_urls") and db.hash: - path = self.path( - "{}-{}".format(database, db.hash[:HASH_LENGTH]), format=format - ) - else: - path = self.path(database, format=format) - return path - - def table(self, database, table, format=None): - path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(table)) - if format is not None: - path = path_with_format(path=path, format=format) - return path - - def query(self, database, query, format=None): - path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(query)) - if format is not None: - path = path_with_format(path=path, format=format) - return path - - def row(self, database, table, row_path, format=None): - path = "{}/{}".format(self.table(database, table), row_path) - if format is not None: - path = path_with_format(path=path, format=format) - return path - - def row_blob(self, database, table, row_path, column): - return self.table(database, table) + "/{}.blob?_blob_column={}".format( - row_path, urllib.parse.quote_plus(column) - ) diff --git a/datasette/url_builder.py b/datasette/url_builder.py new file mode 100644 index 00000000..c1bf629b --- /dev/null +++ b/datasette/url_builder.py @@ -0,0 +1,60 @@ +from .utils import path_with_format, HASH_LENGTH +import urllib + + +class Urls: + def __init__(self, ds): + self.ds = ds + + def path(self, path, format=None): + if path.startswith("/"): + path = path[1:] + path = self.ds.config("base_url") + path + if format is not None: + path = path_with_format(path=path, format=format) + return path + + def instance(self, format=None): + return self.path("", format=format) + + def static(self, path): + return self.path("-/static/{}".format(path)) + + def static_plugins(self, plugin, path): + return self.path("-/static-plugins/{}/{}".format(plugin, path)) + + def logout(self): + return self.path("-/logout") + + def database(self, database, format=None): + db = self.ds.databases[database] + if self.ds.config("hash_urls") and db.hash: + path = self.path( + "{}-{}".format(database, db.hash[:HASH_LENGTH]), format=format + ) + else: + path = self.path(database, format=format) + return path + + def table(self, database, table, format=None): + path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(table)) + if format is not None: + path = path_with_format(path=path, format=format) + return path + + def query(self, database, query, format=None): + path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(query)) + if format is not None: + path = path_with_format(path=path, format=format) + return path + + def row(self, database, table, row_path, format=None): + path = "{}/{}".format(self.table(database, table), row_path) + if format is not None: + path = path_with_format(path=path, format=format) + return path + + def row_blob(self, database, table, row_path, column): + return self.table(database, table) + "/{}.blob?_blob_column={}".format( + row_path, urllib.parse.quote_plus(column) + ) From 7a67bc7a569509d65b3a8661e0ad2c65f0b09166 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 12:11:40 -0700 Subject: [PATCH 0015/1455] datasette.urls methods will not apply base_url prefix twice, refs #1026 --- datasette/url_builder.py | 17 +++++++------- datasette/utils/__init__.py | 23 ++++++++++++++++++ docs/internals.rst | 2 ++ tests/test_internals_urls.py | 45 ++++++++++++++++++++++++++++-------- 4 files changed, 70 insertions(+), 17 deletions(-) diff --git a/datasette/url_builder.py b/datasette/url_builder.py index c1bf629b..bcc4f39d 100644 --- a/datasette/url_builder.py +++ b/datasette/url_builder.py @@ -1,4 +1,4 @@ -from .utils import path_with_format, HASH_LENGTH +from .utils import path_with_format, HASH_LENGTH, PrefixedUrlString import urllib @@ -7,12 +7,13 @@ class Urls: self.ds = ds def path(self, path, format=None): - if path.startswith("/"): - path = path[1:] - path = self.ds.config("base_url") + path + if not isinstance(path, PrefixedUrlString): + if path.startswith("/"): + path = path[1:] + path = self.ds.config("base_url") + path if format is not None: path = path_with_format(path=path, format=format) - return path + return PrefixedUrlString(path) def instance(self, format=None): return self.path("", format=format) @@ -40,19 +41,19 @@ class Urls: path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(table)) if format is not None: path = path_with_format(path=path, format=format) - return path + return PrefixedUrlString(path) def query(self, database, query, format=None): path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(query)) if format is not None: path = path_with_format(path=path, format=format) - return path + return PrefixedUrlString(path) def row(self, database, table, row_path, format=None): path = "{}/{}".format(self.table(database, table), row_path) if format is not None: path = path_with_format(path=path, format=format) - return path + return PrefixedUrlString(path) def row_blob(self, database, table, row_path, column): return self.table(database, table) + "/{}.blob?_blob_column={}".format( diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index bf361784..21fa944c 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1010,3 +1010,26 @@ async def initial_path_for_datasette(datasette): else: path = datasette.urls.instance() return path + + +class PrefixedUrlString(str): + def __add__(self, other): + return type(self)(super().__add__(other)) + + def __getattribute__(self, name): + if name in dir(str): + + def method(self, *args, **kwargs): + value = getattr(super(), name)(*args, **kwargs) + if isinstance(value, str): + return type(self)(value) + elif isinstance(value, list): + return [type(self)(i) for i in value] + elif isinstance(value, tuple): + return tuple(type(self)(i) for i in value) + else: + return value + + return method.__get__(self) + else: + return super().__getattribute__(name) diff --git a/docs/internals.rst b/docs/internals.rst index ee7fe6e4..8594e36a 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -443,6 +443,8 @@ These functions can be accessed via the ``{{ urls }}`` object in Datasette templ Use the ``format="json"`` (or ``"csv"`` or other formats supported by plugins) arguments to get back URLs to the JSON representation. This is usually the path with ``.json`` added on the end, but it may use ``?_format=json`` in cases where the path already includes ``.json``, for example a URL to a table named ``table.json``. +These methods each return a ``datasette.utils.PrefixedUrlString`` object, which is a subclass of the Python ``str`` type. This allows the logic that considers the ``base_url`` setting to detect if that prefix has already been applied to the path. + .. _internals_database: Database class diff --git a/tests/test_internals_urls.py b/tests/test_internals_urls.py index 005903df..a56d735b 100644 --- a/tests/test_internals_urls.py +++ b/tests/test_internals_urls.py @@ -1,4 +1,5 @@ from datasette.app import Datasette +from datasette.utils import PrefixedUrlString from .fixtures import app_client_with_hash import pytest @@ -20,7 +21,17 @@ def ds(): ) def test_path(ds, base_url, path, expected): ds._config["base_url"] = base_url - assert ds.urls.path(path) == expected + actual = ds.urls.path(path) + assert actual == expected + assert isinstance(actual, PrefixedUrlString) + + +def test_path_applied_twice_does_not_double_prefix(ds): + ds._config["base_url"] = "/prefix/" + path = ds.urls.path("/") + assert path == "/prefix/" + path = ds.urls.path(path) + assert path == "/prefix/" @pytest.mark.parametrize( @@ -32,7 +43,9 @@ def test_path(ds, base_url, path, expected): ) def test_instance(ds, base_url, expected): ds._config["base_url"] = base_url - assert ds.urls.instance() == expected + actual = ds.urls.instance() + assert actual == expected + assert isinstance(actual, PrefixedUrlString) @pytest.mark.parametrize( @@ -44,7 +57,9 @@ def test_instance(ds, base_url, expected): ) def test_static(ds, base_url, file, expected): ds._config["base_url"] = base_url - assert ds.urls.static(file) == expected + actual = ds.urls.static(file) + assert actual == expected + assert isinstance(actual, PrefixedUrlString) @pytest.mark.parametrize( @@ -66,7 +81,9 @@ def test_static(ds, base_url, file, expected): ) def test_static_plugins(ds, base_url, plugin, file, expected): ds._config["base_url"] = base_url - assert ds.urls.static_plugins(plugin, file) == expected + actual = ds.urls.static_plugins(plugin, file) + assert actual == expected + assert isinstance(actual, PrefixedUrlString) @pytest.mark.parametrize( @@ -78,7 +95,9 @@ def test_static_plugins(ds, base_url, plugin, file, expected): ) def test_logout(ds, base_url, expected): ds._config["base_url"] = base_url - assert ds.urls.logout() == expected + actual = ds.urls.logout() + assert actual == expected + assert isinstance(actual, PrefixedUrlString) @pytest.mark.parametrize( @@ -91,7 +110,9 @@ def test_logout(ds, base_url, expected): ) def test_database(ds, base_url, format, expected): ds._config["base_url"] = base_url - assert ds.urls.database(":memory:", format=format) == expected + actual = ds.urls.database(":memory:", format=format) + assert actual == expected + assert isinstance(actual, PrefixedUrlString) @pytest.mark.parametrize( @@ -105,8 +126,12 @@ def test_database(ds, base_url, format, expected): ) def test_table_and_query(ds, base_url, name, format, expected): ds._config["base_url"] = base_url - assert ds.urls.table(":memory:", name, format=format) == expected - assert ds.urls.query(":memory:", name, format=format) == expected + actual1 = ds.urls.table(":memory:", name, format=format) + assert actual1 == expected + assert isinstance(actual1, PrefixedUrlString) + actual2 = ds.urls.query(":memory:", name, format=format) + assert actual2 == expected + assert isinstance(actual2, PrefixedUrlString) @pytest.mark.parametrize( @@ -119,7 +144,9 @@ def test_table_and_query(ds, base_url, name, format, expected): ) def test_row(ds, base_url, format, expected): ds._config["base_url"] = base_url - assert ds.urls.row(":memory:", "facetable", "1", format=format) == expected + actual = ds.urls.row(":memory:", "facetable", "1", format=format) + assert actual == expected + assert isinstance(actual, PrefixedUrlString) @pytest.mark.parametrize("base_url", ["/", "/prefix/"]) From 84bc7244c106ab6175b8315a2d917cf29ea53c4d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 12:29:42 -0700 Subject: [PATCH 0016/1455] datasette.client now applies base_url, closes #1026 --- datasette/app.py | 4 +++ datasette/utils/asgi.py | 4 +-- docs/internals.rst | 12 +++++++ tests/plugins/my_plugin.py | 4 +++ tests/test_internals_datasette_client.py | 45 ++++++++++++++++++------ 5 files changed, 56 insertions(+), 13 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 860f4563..8db650e9 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -44,6 +44,7 @@ from .url_builder import Urls from .database import Database, QueryInterrupted from .utils import ( + PrefixedUrlString, async_call_with_supported_arguments, await_me_maybe, call_with_supported_arguments, @@ -1242,9 +1243,12 @@ class NotFoundExplicit(NotFound): class DatasetteClient: def __init__(self, ds): + self.ds = ds self.app = ds.app() def _fix(self, path): + if not isinstance(path, PrefixedUrlString): + path = self.ds.urls.path(path) if path.startswith("/"): path = "http://localhost{}".format(path) return path diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index f438f829..e4c8ce5c 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -387,9 +387,9 @@ class Response: ) @classmethod - def json(cls, body, status=200, headers=None): + def json(cls, body, status=200, headers=None, default=None): return cls( - json.dumps(body), + json.dumps(body, default=default), status=status, headers=headers, content_type="application/json; charset=utf-8", diff --git a/docs/internals.rst b/docs/internals.rst index 8594e36a..d3d0be8e 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -387,6 +387,18 @@ It offers the following methods: ``await datasette.client.request(method, path, **kwargs)`` - returns HTTPX Response Execute an internal request with the given HTTP method against that path. +These methods can be used with :ref:`internals_datasette_urls` - for example: + +.. code-block:: python + + table_json = ( + await datasette.client.get( + datasette.urls.table("fixtures", "facetable", format="json") + ) + ).json() + +``datasette.client`` methods automatically take the current :ref:`config_base_url` setting into account, whether or not you use the ``datasette.urls`` family of methods to construct the path. + For documentation on available ``**kwargs`` options and the shape of the HTTPX Response object refer to the `HTTPX Async documentation `__. .. _internals_datasette_urls: diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index b487cdf0..767c363d 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -257,6 +257,9 @@ def register_routes(): ) ) + def asgi_scope(scope): + return Response.json(scope, default=repr) + return [ (r"/one/$", one), (r"/two/(?P.*)$", two), @@ -267,6 +270,7 @@ def register_routes(): (r"/not-async/$", not_async), (r"/add-message/$", add_message), (r"/render-message/$", render_message), + (r"/asgi-scope$", asgi_scope), ] diff --git a/tests/test_internals_datasette_client.py b/tests/test_internals_datasette_client.py index d73fbb06..0b1c5f0e 100644 --- a/tests/test_internals_datasette_client.py +++ b/tests/test_internals_datasette_client.py @@ -31,14 +31,37 @@ async def test_client_methods(datasette, method, path, expected_status): @pytest.mark.asyncio -async def test_client_post(datasette): - response = await datasette.client.post( - "/-/messages", - data={ - "message": "A message", - }, - allow_redirects=False, - ) - assert isinstance(response, httpx.Response) - assert response.status_code == 302 - assert "ds_messages" in response.cookies +@pytest.mark.parametrize("prefix", [None, "/prefix/"]) +async def test_client_post(datasette, prefix): + original_base_url = datasette._config["base_url"] + try: + if prefix is not None: + datasette._config["base_url"] = prefix + response = await datasette.client.post( + "/-/messages", + data={ + "message": "A message", + }, + allow_redirects=False, + ) + assert isinstance(response, httpx.Response) + assert response.status_code == 302 + assert "ds_messages" in response.cookies + finally: + datasette._config["base_url"] = original_base_url + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "prefix,expected_path", [(None, "/asgi-scope"), ("/prefix/", "/prefix/asgi-scope")] +) +async def test_client_path(datasette, prefix, expected_path): + original_base_url = datasette._config["base_url"] + try: + if prefix is not None: + datasette._config["base_url"] = prefix + response = await datasette.client.get("/asgi-scope") + path = response.json()["path"] + assert path == expected_path + finally: + datasette._config["base_url"] = original_base_url From bf18b9ba175a7b25fb8b765847397dd6efb8bb7b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 12:47:42 -0700 Subject: [PATCH 0017/1455] Stop using plugin-example.com, closes #1074 --- tests/plugins/my_plugin.py | 6 +++--- tests/plugins/my_plugin_2.py | 4 ++-- tests/test_html.py | 2 +- tests/test_plugins.py | 18 +++++++++--------- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 767c363d..cd2c8e23 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -29,7 +29,7 @@ def prepare_connection(conn, database, datasette): def extra_css_urls(template, database, table, view_name, columns, request, datasette): async def inner(): return [ - "https://plugin-example.com/{}/extra-css-urls-demo.css".format( + "https://plugin-example.datasette.io/{}/extra-css-urls-demo.css".format( base64.b64encode( json.dumps( { @@ -57,10 +57,10 @@ def extra_css_urls(template, database, table, view_name, columns, request, datas def extra_js_urls(): return [ { - "url": "https://plugin-example.com/jquery.js", + "url": "https://plugin-example.datasette.io/jquery.js", "sri": "SRIHASH", }, - "https://plugin-example.com/plugin1.js", + "https://plugin-example.datasette.io/plugin1.js", ] diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index 7d8095ed..6cd222e6 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -8,10 +8,10 @@ import json def extra_js_urls(): return [ { - "url": "https://plugin-example.com/jquery.js", + "url": "https://plugin-example.datasette.io/jquery.js", "sri": "SRIHASH", }, - "https://plugin-example.com/plugin2.js", + "https://plugin-example.datasette.io/plugin2.js", ] diff --git a/tests/test_html.py b/tests/test_html.py index 7c068085..79b6138d 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1454,7 +1454,7 @@ def test_base_url_config(app_client_base_url_prefix, path): "https://github.com/simonw/datasette/blob/master/tests/fixtures.py", "/login-as-root", # Only used for the latest.datasette.io demo } - and not href.startswith("https://plugin-example.com/") + and not href.startswith("https://plugin-example.datasette.io/") ): # If this has been made absolute it may start http://localhost/ if href.startswith("http://localhost/"): diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 6a4ea60a..5e3d6dc3 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -125,7 +125,7 @@ def test_hook_extra_js_urls(app_client): == { "integrity": "SRIHASH", "crossorigin": "anonymous", - "src": "https://plugin-example.com/jquery.js", + "src": "https://plugin-example.datasette.io/jquery.js", } ] @@ -135,7 +135,7 @@ def test_plugins_with_duplicate_js_urls(app_client): response = app_client.get("/fixtures") # This test is a little tricky, as if the user has any other plugins in # their current virtual environment those may affect what comes back too. - # What matters is that https://plugin-example.com/jquery.js is only there once + # What matters is that https://plugin-example.datasette.io/jquery.js is only there once # and it comes before plugin1.js and plugin2.js which could be in either # order scripts = Soup(response.body, "html.parser").findAll("script") @@ -143,16 +143,16 @@ def test_plugins_with_duplicate_js_urls(app_client): # No duplicates allowed: assert len(srcs) == len(set(srcs)) # jquery.js loaded once: - assert 1 == srcs.count("https://plugin-example.com/jquery.js") + assert 1 == srcs.count("https://plugin-example.datasette.io/jquery.js") # plugin1.js and plugin2.js are both there: - assert 1 == srcs.count("https://plugin-example.com/plugin1.js") - assert 1 == srcs.count("https://plugin-example.com/plugin2.js") + assert 1 == srcs.count("https://plugin-example.datasette.io/plugin1.js") + assert 1 == srcs.count("https://plugin-example.datasette.io/plugin2.js") # jquery comes before them both - assert srcs.index("https://plugin-example.com/jquery.js") < srcs.index( - "https://plugin-example.com/plugin1.js" + assert srcs.index("https://plugin-example.datasette.io/jquery.js") < srcs.index( + "https://plugin-example.datasette.io/plugin1.js" ) - assert srcs.index("https://plugin-example.com/jquery.js") < srcs.index( - "https://plugin-example.com/plugin2.js" + assert srcs.index("https://plugin-example.datasette.io/jquery.js") < srcs.index( + "https://plugin-example.datasette.io/plugin2.js" ) From a4ca26a2659d21779adf625183061d8879954c15 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 13:35:47 -0700 Subject: [PATCH 0018/1455] Address PrefixedUrlString bug in #1075 --- datasette/app.py | 3 +++ datasette/utils/__init__.py | 7 +++++-- tests/fixtures.py | 1 + tests/test_api.py | 1 + tests/test_cli_serve_get.py | 1 + tests/test_html.py | 5 +++++ 6 files changed, 16 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 8db650e9..1271e52f 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -822,6 +822,9 @@ class Datasette: if url in seen_urls: continue seen_urls.add(url) + if url.startswith("/"): + # Take base_url into account: + url = self.urls.path(url) if sri: output.append({"url": url, "sri": sri}) else: diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 21fa944c..a7d96401 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1,8 +1,8 @@ import asyncio from contextlib import contextmanager +import click from collections import OrderedDict, namedtuple import base64 -import click import hashlib import inspect import itertools @@ -1016,8 +1016,11 @@ class PrefixedUrlString(str): def __add__(self, other): return type(self)(super().__add__(other)) + def __str__(self): + return super().__str__() + def __getattribute__(self, name): - if name in dir(str): + if not name.startswith("__") and name in dir(str): def method(self, *args, **kwargs): value = getattr(super(), name)(*args, **kwargs) diff --git a/tests/fixtures.py b/tests/fixtures.py index 5cbfc72f..d2ac661d 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -274,6 +274,7 @@ METADATA = { "source_url": "https://github.com/simonw/datasette/blob/master/tests/fixtures.py", "about": "About Datasette", "about_url": "https://github.com/simonw/datasette", + "extra_css_urls": ["/static/extra-css-urls.css"], "plugins": { "name-of-plugin": {"depth": "root"}, "env-plugin": {"foo": {"$env": "FOO_ENV"}}, diff --git a/tests/test_api.py b/tests/test_api.py index 5e9c1a0a..18e4b9e4 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1852,6 +1852,7 @@ def test_paginate_using_link_header(app_client, qs): num_pages = 0 while path: response = app_client.get(path) + assert response.status == 200 num_pages += 1 link = response.headers.get("link") if link: diff --git a/tests/test_cli_serve_get.py b/tests/test_cli_serve_get.py index 8f1665a9..39236dd8 100644 --- a/tests/test_cli_serve_get.py +++ b/tests/test_cli_serve_get.py @@ -61,6 +61,7 @@ def test_serve_with_get_exit_code_for_error(tmp_path_factory): "--get", "/this-is-404", ], + catch_exceptions=False, ) assert result.exit_code == 1 assert "404" in result.output diff --git a/tests/test_html.py b/tests/test_html.py index 79b6138d..006c223d 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1466,6 +1466,11 @@ def test_base_url_config(app_client_base_url_prefix, path): } +def test_base_url_affects_metadata_extra_css_urls(app_client_base_url_prefix): + html = app_client_base_url_prefix.get("/").text + assert '' in html + + @pytest.mark.parametrize( "path,expected", [ From 6bb41c4b33dbd1015c181cd43465b645298c3c88 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 13:48:39 -0700 Subject: [PATCH 0019/1455] Fix for test_paginate_using_link_header --- tests/test_api.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_api.py b/tests/test_api.py index 18e4b9e4..3365bf57 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1859,6 +1859,7 @@ def test_paginate_using_link_header(app_client, qs): assert link.startswith("<") assert link.endswith('>; rel="next"') path = link[1:].split(">")[0] + path = path.replace("http://localhost", "") else: path = None assert num_pages == 21 From 1fe15f4dc110622754d9dbeafe0f93c79fde9022 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 14:13:57 -0700 Subject: [PATCH 0020/1455] Docs: Running Datasette behind a proxy, closes #1027 --- docs/deploying.rst | 54 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/docs/deploying.rst b/docs/deploying.rst index b0647b2f..e777f296 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -110,3 +110,57 @@ If you want to build SQLite files or download them as part of the deployment pro wget https://fivethirtyeight.datasettes.com/fivethirtyeight.db `simonw/buildpack-datasette-demo `__ is an example GitHub repository showing a simple Datasette configuration that can be deployed to a buildpack-supporting host. + +.. _deploying_proxy: + +Running Datasette behind a proxy +================================ + +You may wish to run Datasette behind an Apache or nginx proxy, using a path within your existing site. + +You can use the :ref:`config_base_url` configuration setting to tell Datasette to serve traffic with a specific URL prefix. For example, you could run Datasette like this:: + + datasette my-database.db --config base_url:/my-datasette/ -p 8009 + +This will run Datasette with the following URLs: + +- ``http://127.0.0.1:8009/my-datasette/`` - the Datasette homepage +- ``http://127.0.0.1:8009/my-datasette/my-database`` - the page for the ``my-database.db`` database +- ``http://127.0.0.1:8009/my-datasette/my-database/some_table`` - the page for the ``some_table`` table + +You can now set your nginx or Apache server to proxy the ``/my-datasette/`` path to this Datasette instance. + +Nginx proxy configuration +------------------------- + +Here is an example of an `nginx `__ configuration file that will proxy traffic to Datasette:: + + daemon off; + + events { + worker_connections 1024; + } + + http { + server { + listen 80; + + location /my-datasette { + proxy_pass http://127.0.0.1:8009; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + } + } + +Apache proxy configuration +-------------------------- + +For `Apache `__, you can use the ``ProxyPass`` directive. First make sure the following lines are uncommented:: + + LoadModule proxy_module lib/httpd/modules/mod_proxy.so + LoadModule proxy_http_module lib/httpd/modules/mod_proxy_http.so + +Then add this directive to proxy traffic:: + + ProxyPass /datasette-prefix/ http://127.0.0.1:8009/datasette-prefix/ From fa4de7551cbaf5e08f022d106605252d2a4332ec Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 14:37:58 -0700 Subject: [PATCH 0021/1455] Binary data documentation, closes #1047 --- docs/binary_data.png | Bin 0 -> 5572 bytes docs/binary_data.rst | 68 +++++++++++++++++++++++++++++++++++++++++++ docs/changelog.rst | 2 +- docs/csv_export.rst | 2 +- docs/index.rst | 1 + 5 files changed, 71 insertions(+), 2 deletions(-) create mode 100644 docs/binary_data.png create mode 100644 docs/binary_data.rst diff --git a/docs/binary_data.png b/docs/binary_data.png new file mode 100644 index 0000000000000000000000000000000000000000..2c5d0bdda89fc03e1632c9843ac9a11321e367bb GIT binary patch literal 5572 zcmd5=S5OlSmj&q^r9~p3f^-lR3B8DP5>QY~%5MMcGCXz<_>71br! zKYKSL%|FwnH2d@)FfcQ*&^`Why1nyjW@T%bywEl{QQ9_~Qq@&lTWg1UIfd!-w>#XJ947xNz3@XLk75EeM3Wi<(d#HG8 zVI*s9Mnl>)Nt-~+3_p8)qs#83rgrU0z1p=aDmG7Q zhjIElO8t5jS>4o@P~bd6>+~7;o|v^O>rNP3*`M>)DDm2eY6F-p6adzv3Ss?8F?Em_ z=#259kA}RT%4@mtivgqy2)_S$*N639(ia1ek6P%N4yCj~)6++6 zmsRE0FyzgKMW?8nn;*P=^;y9GrS_m+{6$29 ze3mm}zuNZDk@dii=(*u{#mBM%)|?1+UGZRm7LKA3H?Hp11{*vk{dfz#u6-EDC>^=D z5bMth732k#v#;gbbQ&L|nxrr3c0?JqJN_$}_MfoP`$lUmNU;rQ~2B3w|VAZ9SSY&Gu# z9A#g@CPmC*=qTfeW0$m5?>RP)p4mXEXf{5|{AzQ{;tOo>%h)^&v4OrT?WtWkEpMS> zJvkz?DCe~-pgvY_OC;Y`fHGK$At%Or8t$!1l0G5$D1}WTF&5gj#`kLUWpsxDc&z8AFi=lb4_2 zrL)?IJ&!58sZyZ2QuI6vEiC+}%{XKpU zJ0{7fPG(gK_-)8~P@4QkEkR~{vA`&yM0>l)<{hi>6t^4!Eh632m_h~qU7k#2uq|k^ z#Zie;V$FxUo}#kSJ5{yr7#&QTlhvx&DIEt~j>)u~!-Xi_ee20m%}~SnK5>Y1I0x`@ zsgEcRfE=xh&UdhLi`emp^s^-a1tCUO8`q~7mPt*vIQo#oOB!?dAj&`yuF@PZe@FsH z?9njucA4#CrJKru8~*HAvr!MiQ%U+?;vR|Yg;warKA)H5k%3ZP+#~(t34#4Cf1cS! z{ijAB_8je%T_3XOsvY~VrVn!E!&nA65D|0mCRi}560mx1&R6Y4Pn1lpl3|x{_5o0c zrOVyP635ZW{#QPI6goLcxJ2!*Evy){qoP{Ki}g(vQZnw!h+wD|(5WrC{yGoHimgK! z9~rSkyh#E_(ZpJV#I7n)`#JRo`to2}wJJ%Bv8tS4 z;1Oy!+mP%|57h;wEQJr?X&@?_qh-k(X*(Ho&m}_-4@I*247zY|=+7!dyK*uwp_O&l zSc)^-^qaAze;-@_60tN|?E?y4`s1rhT^T^qj_$;MjwF6~t8h46-QkpRDmRFyd|qx9+|QxU}@Y>-GxR<#G*Pc zB33JEo$rkvv=9B)R`{9JHadHz#W&J&q52WVLx1soc5|jhVy5DTfEp$oBAhKh(gKxS zRT#M{{2xP{y-LZ93vT(hInB)T{pt$>WFcHr28uS}6{0N`OIxgd(vic$4xQ41fKE=2 zDzMI*@cg|49*#g5dO;>wvxh5Rmvj)5Dnu!Yr}�?>toCDIo1_3+ z%I}?(HBnQqqG`_E9vo)ceiak?H}oTH4;d&$@lYO#S1uSKZ8N2;zWW7PwuRr-uFd_y z#ylOgPH%7b8y@wmd?$ap0yCWn*Rf}hDSnSXA<@Z9T_U^|WwC#ggjg_&YaseGFosJN z@LY%y*IukoZG~9Z20qH*B3= zQ2c`={6|PfxwkjT9Dw{&RZt%Hev$7|Hqy0mLs_sYAe2t7xAhnQST*z$vO@HPyd~0J z_=@6E+YlToIRp*~wX?GZt+U(^UEcUc?6FDG!gV`D%l{D00+OYx9bX(?=``xn!X@N! z!{d;k=Q9pArYK)K%5#;8^pi3J?V?z*SLBGPu`P2R%GgTZh8~f)TbPo1Lh@X_`B3t< z7oxf-7*1IJoc|_)jrzrNHT^bAVzITFnIollO;J^$J(@hv_X8#?)YtZvKGrvQ4aUD4 zf)h>nQ=gv?3plMs2-{E|+-Z*u>Z{MG22!q>+?u0qv>>-4vU52+J?NC!H!v=v3%@0+ z&_iHiScRFmK#ZlGb*mR)+j>aGv*Fm5u4)`ilpg{oEU^9211V1}wMM+UHT(%h1J)Yg z(Go61)lKER?z+c7Y5qbMg>u^F+;D&Q^0?S*<<}0+ILt|17o^^P#Rb5bGtFoN8bF}H z>@P<6LE$i8<+7!(ZahilWQVf`8ML4Y10{b@h3i4wT$Ezy5i?>)s*z``Z|3eTD~iZ^ z$cL=^+P^*lmFQ8eN}BXqxD#$Fc3W*{iOe_o4upQFi{fQCFcUr(ABlS?@O#DB;{-#J z`GZ}>>{~d1K_8e)H(a`CSLtDE3hQjMT$gy2Dq(SRy!6i5apYo{AK^~xRkOp-34bt* z!8aqHBh1anl`!v^siA?QTMPvO9&b|E%hx){VkQKGHsZSg1kF}!ui%JhCD)*I$+kDP zz9-@2{wdW67u>oI9)SSi<7zS1By~ntD$1v_2wqm{Ob`3IllgRZ&Ba2XWrD=$L(Uk~ zMjp_(aozW!h{ldy6*2Ff+ZR}O)IAETg=`$$_9JTu_f9S1=f3i9Q=fh|$d z1-=U)7!zAst;GNZ6~OUz-Zh69tJTh0j;)frqg9uzWt-JUWY9Q+hS$Ms^u&bX?nQ0L z=P0MD{!F>Z+;i^Lj{F?oQRg<*RXrS0*Q9^p)1&juUu+bM-wHpsdppBnsf-05BqD1h z73U9YE?L5sA%|MK3HnH$R^u)`ES%lF(pKf@rjzDSXk2zB4dnT9rYdFEH zyu=pI+h(hCCe(=nv$a-^#Q|m;tmm|hYNj0n%kvF-7w=Z>pMmDbWv5AFJ=NVy4L55)8#FtCh2qik$FvM++4c7D1 zCf=u=B90bF+UC(Fu?9t|RH}G6z1=Bu=lCb9eABiRJMjgt;_2!O_SU0^M;Y-m<)e1% z0}^x<4y9+Zm90_8YDiESNQ|p~*yT)H{W+Fm8#GfqYPUVmLu)S@rTM`7sJy{z-0k+! z^7HU$p6cAuff=UrGL@~RpJ{g*;YAVCCBg$mMyE?Z9pS{Tt~G2~Ww zSMUFc=_k~M5gk9)2I>+q&_h%RGJ2q+8IvmAgb8U??p$y+tE-tXD zp-UgdAxF|?5v}Bp>xPWfx+*C$+P#63PsidNZ#=;>@s04C1}4RX*pxTHPXLyy!0!WP zqS3Wcep-^!i_e66_T(s+4EVQ&^A%0_zp}7AX&kJwvfbtKX}lYlJtEd{`;}5-W7ojA`j*YjVl>dd zvYqSS%^4 z{1qTuoac{}ibU)6S{6jT9our$o9# zM4j6##tT=$gTLy|)6Mcv3?ac|9D5uK1QdApz8?+4=+Jxyx6KD*jYrNA`p?bnq-?&G z{5xVSeh4sszr=wk#zKP|G_?jEN*6BZ?av6s{cb5R%AajPoHzr|BO3Sdu zO(~nmMiFQ3cZl}fPIiTNl_&^4y-UhRXPNJGR4c$RplQrCDUppKvGOt&IagYQgH$9}iHvJ94ifqf>m-2bwY$Er$sCdUk6Vf2t3rp-ECaALtFZs)KWY+rDG)snGOYCgmQW5CGeO#?-W zmzhSpdAl1^ZIV(j$_`=T;!mCXr5$Nu`;VZ(`V)(9Ag6!dS0G0>{=f`8?L2_;g^Gge z$qWl%3A#Xh9{1|P^0M?S{WlZAgGozkhg*C5 z2h{Zw=L`eQE7nKH@n2ZgYYqf%@F`cOaNfqiR_j_`K{;?|zR8nRMc`^YDIu4i}^>2mW~i8u74>5p9f)O zL|z2?;qK^Ol4bz;16`dz8at<-55+b5q6^TZlf)nvl48<_MLl3V))N#v%^k45KySHd z;@x+>aBS>pb6?>OhEpRB7&y;0`^0~!+4$R<>_SxMw7CCS7{i~KAl=Q>uJn@39H4x= zv^SSa>~vka+g9@g)A0lExE<@r&~;OaIIOx{4*Z5|%Iv2Es`#oe@ij|-{uit|hAn6z zV-TJ>t>BcoVe4_*i7f)`!^-b0C}KB@;li%@J{9G~-%uT_(vER>?7C@lSLTPh=k$PY zAHK7du!;I`r~a{knj3VLbR}cp99{zPQ=r#8ifoURhn-v5Wz0 zW(*x1%6c*>xxY3~O=8Pq{Pcd}<*?fL1V30~GO2nx?B+QJzx5Xwc`)KyujRE;qmzwJ z-Q|ZaJeYcqL?m+I12-HH(hQ(0=&TYWYUqC}7WFX!`jRh${J?i=@OU zKRnp!dY}|mV0+Oc9`{rAo`>jCeGv1Gweps?m}c#+g_t-0M$A{moON0VR6u^B(>%M` ztyH*^ge>M#N`__. + +That page links to the binary value downloads. Those links look like this: + +https://latest.datasette.io/fixtures.blob?sql=select+data+from+binary_data&_blob_column=data&_blob_hash=f3088978da8f9aea479ffc7f631370b968d2e855eeb172bea7f6c7a04262bb6d + +These ``.blob`` links are also returned in the ``.csv`` exports Datasette provides for binary tables and queries, since the CSV format does not have a mechanism for representing binary data. + +Binary plugins +-------------- + +Several Datasette plugins are available that change the way Datasette treats binary data. + +- `datasette-render-binary `__ modifies +- https://github.com/simonw/datasette-render-images +- https://github.com/simonw/datasette-media \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 262400c8..fc566a37 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,7 +9,7 @@ Changelog 0.51a2 (2020-10-30) ------------------- -- New :ref:`plugin_hook_load_template` plugin hook. (`#1042 `__) +- New ``load_template`` plugin hook. (`#1042 `__) - New :ref:`permissions_debug_menu` permission. (`#1068 `__) .. _v0_51_a1: diff --git a/docs/csv_export.rst b/docs/csv_export.rst index 9b7f8188..b5cc599a 100644 --- a/docs/csv_export.rst +++ b/docs/csv_export.rst @@ -1,6 +1,6 @@ .. _csv_export: -CSV Export +CSV export ========== Any Datasette table, view or custom SQL query can be exported as CSV. diff --git a/docs/index.rst b/docs/index.rst index 9096efd9..6b55da8c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -46,6 +46,7 @@ Contents authentication performance csv_export + binary_data facets full_text_search spatialite From d53d747e6a9dbc294c0565bc5eefe9aa16989316 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 15:13:39 -0700 Subject: [PATCH 0022/1455] Release 0.51 Refs #1014, #1016, #1019, #1023, #1027, #1028, #1033, #1034, #1036, #1039 Closes #1076 --- README.md | 1 + datasette/version.py | 2 +- docs/changelog.rst | 82 +++++++++++++++++++++++++--------------- docs/datasette-0.51.png | Bin 0 -> 47637 bytes 4 files changed, 53 insertions(+), 32 deletions(-) create mode 100644 docs/datasette-0.51.png diff --git a/README.md b/README.md index 8670936c..c101a4ed 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,7 @@ Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly new ## News + * 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. * 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). * 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). * 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. diff --git a/datasette/version.py b/datasette/version.py index 2f4bc37e..f6e9ce97 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.51a2" +__version__ = "0.51" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index fc566a37..b9120c52 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,45 +4,65 @@ Changelog ========= -.. _v0_51_a2: +.. _v0_51: -0.51a2 (2020-10-30) -------------------- +0.51 (2020-10-31) +----------------- -- New ``load_template`` plugin hook. (`#1042 `__) +A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. + +New visual design +~~~~~~~~~~~~~~~~~ + +Datasette is no longer white and grey with blue and purple links! `Natalie Downe `__ has been working on a visual refresh, the first iteration of which is included in this release. (`#1056 `__) + +.. image:: datasette-0.51.png + :width: 740px + :alt: Screenshot showing Datasette's new visual look + +Plugins can now add links within Datasette +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A number of existing Datasette plugins add new pages to the Datasette interface, providig tools for things like `uploading CSVs `__, `editing table schemas `__ or `configuring full-text search `__. + +Plugins like this can now link to themselves from other parts of Datasette interface. The :ref:`plugin_hook_menu_links` hook (`#1064 `__) lets plugins add links to Datasette's new top-right application menu, and the :ref:`plugin_hook_table_actions` hook (`#1066 `__) adds links to a new "table actions" menu on the table page. + +The demo at `latest.datasette.io `__ now includes some example plugins. To see the new table actions menu first `sign into that demo as root `__ and then visit the `facetable `__ table to see the new cog icon menu at the top of the page. + +Binary data +~~~~~~~~~~~ + +SQLite tables can contain binary data in ``BLOB`` columns. Datasette now provides links for users to download this data directly from Datasette, and uses those links to make binary data available from CSV exports. See :ref:`binary` for more details. (`#1036 `__ and `#1034 `__). + +URL building +~~~~~~~~~~~~ + +The new :ref:`internals_datasette_urls` family of methods can be used to generate URLs to key pages within the Datasette interface, both within custom templates and Datasette plugins. See :ref:`writing_plugins_building_urls` for more details. (`#904 `__) + +Running Datasette behind a proxy +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The :ref:`config_base_url` configuration option is designed to help run Datasette on a specific path behind a proxy - for example if you want to run an instance of Datasette at ``/my-datasette/`` within your existing site's URL hierarchy, proxied behind nginx or Apache. + +Support for this configuration option has been greatly improved (`#1023 `__), and guidelines for using it are now available in a new documentation section on :ref:`deploying_proxy`. (`#1027 `__) + +Smaller changes +~~~~~~~~~~~~~~~ + +- Wide tables shown within Datasette now scroll horizontally (`#998 `__). This is achieved using a new ``
`` element which may impact the implementation of some plugins (for example `this change to datasette-cluster-map `__). - New :ref:`permissions_debug_menu` permission. (`#1068 `__) - -.. _v0_51_a1: - -0.51a1 (2020-10-29) -------------------- - -- New colour scheme and improved visual design, courtesy of Natalie Downe. (`#1056 `__) -- scale-in animation for column action menu. (`#1039 `__) -- Wide tables now scroll horizontally. (`#998 `__) -- Option to pass a list of templates to ``.render_template()`` is now documented. (`#1045 `__) -- New ``datasette.urls.static_plugins()`` method. (`#1033 `__) -- ``BLOB`` column values can now be downloaded directly from the Datasette UI. (`#1036 `__) -- ``.csv`` exports now link to direct ``BLOB`` downloads. (`#1034 `__) -- ``datasette -o`` option now opens the most relevant page. (`#976 `__) -- ``datasette --cors`` option now enables access to ``/database.db`` downloads. (`#1057 `__) -- Database file downloads now implement cascading permissions, so you can download a database if you have ``view-database-download`` permission even if you do not have permission to access the Datasette instance. (`#1058 `__) -- New documentation on :ref:`writing_plugins_designing_urls`. (`#1053 `__) -- New navigation menu plus a :ref:`plugin_hook_menu_links` plugin hook to customize it. (`#1064 `__) -- :ref:`plugin_hook_table_actions` plugin hook for the new table actions menu. (`#1066 `__) - -.. _v0_51_a0: - -0.51a0 (2020-10-19) -------------------- - -- Fixed a bunch of issues relating to the :ref:`config_base_url` setting. (`#1023 `__) -- New ``datasette.urls`` URL builder for plugins, see :ref:`writing_plugins_building_urls`. (`#904 `__) - Removed ``--debug`` option, which didn't do anything. (`#814 `__) - ``Link:`` HTTP header pagination. (`#1014 `__) - ``x`` button for clearing filters. (`#1016 `__) - Edit SQL button on canned queries, (`#1019 `__) - ``--load-extension=spatialite`` shortcut. (`#1028 `__) +- scale-in animation for column action menu. (`#1039 `__) +- Option to pass a list of templates to ``.render_template()`` is now documented. (`#1045 `__) +- New ``datasette.urls.static_plugins()`` method. (`#1033 `__) +- ``datasette -o`` option now opens the most relevant page. (`#976 `__) +- ``datasette --cors`` option now enables access to ``/database.db`` downloads. (`#1057 `__) +- Database file downloads now implement cascading permissions, so you can download a database if you have ``view-database-download`` permission even if you do not have permission to access the Datasette instance. (`#1058 `__) +- New documentation on :ref:`writing_plugins_designing_urls`. (`#1053 `__) .. _v0_50_2: diff --git a/docs/datasette-0.51.png b/docs/datasette-0.51.png new file mode 100644 index 0000000000000000000000000000000000000000..eef83b9f6de49114baad50cfaac899f050559849 GIT binary patch literal 47637 zcmd?QWmFtdus=8i0)$`*9$W&!T?P$K(BKXMg8Sf3aAyX0f;$8cPC|ge9fA`WnBXwD z@8rF=pZ0(DytDgu_rreb)75qB-m3en>h7vr(H~Xiaj@QC0RRA;4+=6G0Kju70DvZr ziHfAGXtUiQFH#?swPc^3o^J1MuW#@6kB?_JHb=%LYQ`p8+kZeh`YI}_Kpll1d9B(t ziJ?(xx-sQkewX6jIiGFZ_Ic=8Y{24jhOfx@6Jv#10D!iH4>FQk-b)9ot4_;8_z{0N zu0v7KY0x(w*7vh^CNg&AJhyNhN{-z=suQ9z1FuDCn6Gr719Fz?umD2*Art`B1R9Ls zWO^vd1`HDf5CZ-WlT7N0Pb&0~lD&yK$`hmh{;sTEUp2n;oTE`&SmGjP@X@B`vL1lZ`0Xez4FwT``4fDA5d!-#;`oYpY>sJ0o zEI{3-vF&oY60h>8w07p!fccRF2P+96)B>|aINmEnbU>j3cjK+b_wqL!=UKW1BP}Ew zRhSFkeo<4vEFXMLz_BcnxEl|@Tp-d#%PF3-WEp}1+jTDx_hyK9{qNlD@@Ozlbj{D+NWwpeow4m5qf4 z4yIq4=Crk%wX9hTjyOHC^kdXVtDF4>J@I($CD-a4)9*iOiHrXdp8KiD9Bj>sV5szT zls7zO@8Y4%Oq15!(e_!)&ZUB-ZE3eG*~aKrRP!vl$3y1!7(8$DgNSQle@RvjWZcF1 zT-s*^yzj8CP;tJLWDzxktnM zLD$rzMvHtz{CZpiP6eyiF}$f;P>{d|*+>3PNrP+dN_4}p0H#U)4Tici%MIp%E$iE3 z<2+mXI_1NE6{@Tg^eyXP9yQHLUKH#ecI^qnwuc7Hu7tlI-YuXom#KwxDp@^-1!zee zpr#z=in zw%u!c*+A$@d5-qt$DIb;44kNAf%n`ePc7B`yQhIe(mPt*jUgJ&BCM;AYTfphVT-Rq==A($ldHfwLYK_=MGPtIo*HGgW4a`V|% z2MaJ?6vc^CyBzp=)y%?KK(DMZlyW0AS#Ljr({J!UtlT#$<%x`-AQxB%dg{=5bI~X= zeRV`%H|Pe#AZmNMJ~hm}A`vMHcmuP=8L(fc+)|Ez}h}QFJbpF4i-1Q2!mn$N7V?N)6}?9}O7% z6d$Dfm9?_O>7cOtei^R|7^*qX^I5DMD*qbpIn|JeC>bxJp>9=f`MGqqyspGzi-)-R zw74T%B|K5pu=HXB<93Fo?sayKMbX7qx??etip23{4eLs;P|=%KllVg4RqZ?JrdK`B z+xi)=vWJD!N6ndnsdp1YJMD~`sI*~4_KGcI{JX0(>4&LP84G1m=Xo4u>`-zg*Cfb+ zg9O@%8zn}Y57@5}_Pf}DH84O!x{HpwhT8ydeeaycl={uie&yq{Zif6t+lJYDZs^@N z#>DKbU#jJUDEFd0d%AkeI@DITu1!(%Wkl?kC)%v{7?Oa%a#JFJiyayekV7Gv5VY8l zz1rgX4p2nPEV&W#5~unqXs+T(k+oKTqBq*@iyIbUbO98xGxEGd;k}C#E`AOe6^#i7 z`0)bxKYhC*k50o!i-D;MMF}AQ1#g5=&eCf9Nc-_Z^+Wal$vs?W!0A8mkOjE>x z1Tp|^5c8UiJlq7Eelj^Ob{9Kr6SNG*Frh%Sd)OyzvD2zuG>Do=TnMMmo4y;Hi|Xx- zl@`wxz8a72Y`(a!+ktmE(n-8+KcDJM%|WQMT==H(ZkszkC-ruPE;#xf%jC5@5iGT; zzz%&ghvhTvyNvVHDn>mx;C0~?*DYWq%asfjCk#H9;(p7yT^nQ^PC@Ez4E4iaZt#v|2#sCCD-RaVT=j0OYUgapy&nYvE@?+HrhPOR1vJiDrR zZ{kaELEVcR+8+7NXwlX{y0aeyA;ex|&my`xVU`AVcO|&TRvJ47iOZu!E@MiLW$}o$ z^#$n2jr{TYhB)l(Mr`;jzn-o6_?FDgasR`x^~Kq$_N|4e7=-M46Fjj$b-4_W=vis?0O({K{iq5!{<5G0zVc zixK1Mk8aTA4U#$OLtp1UQ^ewKs?xk>7w|<^f+*5v)8Mi}C00lBcQ@vbcXAaJ=-gBh zqGI<$R4^)pYs#^7H@4@4g5pA?!u|L)aCW5UV0Afduln`#PZc3FWS}3UCy#aQBHZ=A z6uCMMowiNS3`s`DzvNisb5H-&RoRRf^+?O3I>iO3Th|(D#I~5|?ES2l`idT}y?J@pJx*3e z>6hx;!v_bTZ<=lK*Dg4utevgPgaz7YO-HVsYUEs0;Y(>?P9F1Vi6#I`UwayreZG|F zODPe+TMf*V+e}Esx@_dWa>r6qmGnAUA8R_gjR>aC|Gf(Sb5=4>uSVv@^77yF%H?VS z18!OsDHr9cI}SDhZOJ34-pE|QNh!_5?UocMQIbl_NT>O{+G=_xY(V%#=Q{=^%VdDoE)_saUn2o8T-|4H%s?J_gOg2W)vqHN-ue6jcEeIx5vlG~e= zl{dn4fA)mL@6V+#BzMzwJr6cTQ-TAi;>i6n6|{lmz0x2xBzE7P(oXj z4w&~adONy$8Oka^G=+^&kcy~c@lLB}&ohWS^#H4d=I{#^Amo1AgUJn>Jnh3hg`nAqnPLX?R}6V* zZLZsy@7Woh&T$13XTD+p0iC>1;64-wDQJnvv;xvmSX>@*PstDR`r6_sr1A^}(7FFo zlCF%Df}m?wF0Up0K>);o$NGmIA~G)*c;7eP6kj1}03{zCDomvStdYRFqik&FF+uWI zoraIx>gA=j@5bQa@ZQfmIG!Dju;`<7#mgy2$7>`^eF|2e2 zjOhU;=MX;9wiI+`vVy*BoslzH7nE7TLnzZVQD<4+U*bfZ(NW$qKV*cRYh`X=oTF^4 z@`ET73{@z#QAd>r16JZJsy2<+g;#Xu8;;~TLQ?V#6YE!lWRy~V8pgfYh2+PoBtFY) ze1=pFWSn@p%{T-W9=gC2II$YviyOD&Y4s3{Zl`Wj1zEV|Uip%%q)Z>m?WVoitMqa? z+gCvN6Xol?T1Ec5t~$b!nhK(d8Ce&nND?Yb=*3l~1)Q3O2qET}Q%N;WFElc0A+h#O z?M7(M&h%AnQ23r}f+GctR)2>WW=8h&)rBj-+YMX>N~Q>KtzUt<*S^WnSSzP;vsF$x z7ly7>zARR=WP-{jJ#WgdC8gG>Vp*Q%9r%uKf$d+F*LZG~IdF^L`4$lD_1vd?Mvz2Z zg(P9Th~Vq#(Rm-WXpO~eKc9JlE3wBI1KpJ0IAyY5KY8x%HCax~2ZKMm#BBpWKdbh=`SuJKs&gI8m}aAjV6 zMfN&d)+IxiL3V=SjT>3!cF9r5&P(44KyT!8;0K4iM~ANXdrnnj7`-_KK%5JT0@y%7 zW)J}QFaY3*5}9ZS9;ZbTg(GP&0DmcvPrkbS&k|Rn0f4Xe|FbgxpIGPrQszo@R<$8l z*C?=+@Iz31q}jLgH&rn`umTEbHu+Eb-Vp5+TXT5I_zGU>2Zg1u`yV3P-1FT3nlOZ44 z65h!BRz^`!K6DI>Y%Bj-p!5<$z3ttOEenq>k-5d`+kQzfq%S_H#?`ky9>wP@LJXr5 zS^DFqI*VAN=}}%8v5a${Tn_Q?tVXY_k83!WsbDCjKDU@<9BZYS65rSt#*O z`Pj{1AoddBY&yS?)Ae+wy#Dc6p9rSGeX|%K!Tt-}xwrU*MlIRM3es|;!PATk=41Nf zte??wp#)>UO9uE4m4F8HiQGI7HR_1{yMc5&*p?8_LyH;i zN%LimJdTV%EX}jSg8Wkdb-`u5i23bEuUj$bdl1?r6fS8%nbKC#;;5+>#0y#Zyv0LYtg!X3#mYaN_zQ z;Y8aAeR6b+Q})63xSeUxLn4kxM_hoEdO$&4+h^fB#CGRdb&5vG@Jh9PgzdNNffn6T z9vldLCmZ}xfrpQ`h6KN9Qb;xjmO=gTWX&ickHq!FMjIyC*^;UvO$FA;dL zoz>%WwdComG)4QQ<%6R;DN~XhVn^d=)pwHOMoitMgN>>BLz#?440EtqyAQ3#gRKF1 z4HS{m7=_){jkpN@pPZ&~?NOD2k+G0%qm+2a!>1NI7-*<+qzMX7(q%X@>eFIpAp=Ia{cW!FQ7&nh9iGL*sNcYoO}FryAU z>W%__vIk|q7CefS%TGDKrL!IqypJGv8m{Iw(Gu}uBO8suT-dp8HXBnJm){Q{RsBpu zD+Ydx4jmI+-QQ6ll6bUci;DUUQc(nm<@AGcSX^Q{6>e65_Cq(2RE-OjmgZmZn>Ue-1hZS3Crpa zQO9;`XcqPp4(}w##XtcATVsTd`NL8gLao^i@_DfpszW_YFjw9|ZhPn1J5y02+q1tf zVyCp2U`pcO<_h?oM7Rj>w(?s=i>!#mq9ifyIQ*J`ETbHZ5vn8{O;%fbZtXw|aJY3_3!@QAQn&(c1}+r~ z=i=0YdvbZ@s5(%?eGas1Jbc^({p6`ql$e=YHd*vTP+Ru6-JG`e#L)Ns)j;h8&|e`IZCSUJmRs|2wmcb1oQWQ`wHaOoLchvn<3gW+R)WtK-Xogoz!rp zh?DT40Roz=phV9LvrxHdW+BbWTh{y{B9e{sLelT=EAh9E8|~DP;w`4nW^&AyiAK$P zO+0|J*VB%Rjqm>Gt@{AhwC&i)eF*29eAZrZ?Z@#&e0S~)&B`V>JBA3Bsj~`2@@lSM zR$jtAHRI?9pG!#@bKJ;FiBBl;?S6MpJX;Zr<~E-DLIjEe5iEGbzcV9`rl-In!t%DX zJ&gK&%|>;NgEw0|Y0ap<{<4kAr-p|H{^bhd`67nho$Bvd?TLfD5&j&! zZTc0L{l*}n6LSnd^+&T+Rq7BrGO!m1E$B&{xq;iDKWo)s3~`ADBP zjHc;qYm(lyfJm@yNPv5ZWw%<$ix{Cp^7QfUb^7xZIhkKgl<3iOE8m{KcDK^d@9upr zIrv5R4C{5GD9F^VGPf`xOH3sXGb^IpjY&LKHsa^9n$V=K(-$WaQPB4}#e;=NB9I`> zdvAa4E1+L&p-ldePa;J_V_OPo1+!As*R-_nyu-E&lA%~9vu*v7zslVIV(dfqiTUrSPmwh0nY%h-{Z2mPn?*tI2O??? z)Ges1!FMPq25jBog)WrA$ci0c|=W zMQK_A>_`;KRejhn%glau+67jMbiPzy94l35!u2|*r-6&+(mU7c3=o!u8FLXB;jjCA za@YA2?te*nYMf}aLakMOlNQ1@6X^yk9qI%dvbNa>ZiaW>y_M>=`Fcz|GTUm#M+G#W z9T4~>ok$^-P`E%A_m+0uFHng&*7^jrF9t%Z^r-h?f?Xxaiyz`s_|d^Wc{9@!ih@>@ zN~QeMC<#9Q0BjAd0r9y9MG{4e=jTb#(&`h(%)?jkFqNu|ThLRn`+*^c0-xi+%k3pI z!QH2NiT=;}E?dP{ThMtnG)~HACLs21J6*^CFjg`QfG@P%lKmN_a$mds_A5y~aQ+nr z6_CY>dEx#@6p^?C?MRu+X3i3|nVuAw;qPGfgI#{5W`Vup1XPf7y-73*eYttIfli@Z zbG}8|coC9K0(#4^`b|IKJ|g4800t+{}eIEw{_&?PjWB3EM42c|eV zB=PxFIcl)}0DjkfVZ{_8Di6Jj1uL1o*JFsYXOS%-3mu6|HW34fK~&*RVXhRRc{dq0 zR7NO1fijLv&|sx6%6H|djCm3V*yHpPRoLHn$D-P@Tzq^&?Odo+b?zLjnUmgJRMU@( zA#9!XfFXkmD|*j%pz6>?3gGy>FK>UD4JKEfRqkSZHod%QsBcYfW${`_UCh@&CySLDhTnU1P*`_WIXCdeG`bKh$ zO50i@cQ7fBIO2OJoYMI2`VMyqC0oAyrl@AY*u5dv;1u;)oLd(8gqib_EBTM-N~0%G zN7-|VmTR9(PMMl_GkpDr5`51-_m;Rr_e4SQ(_Vkdf4~1rKfBe$vvCyqs7(Sn!R4>c z_J2r*<&hNXZ0gJB_`%9Qdq(yztPI$CaM*1Jx+WNDFJ)K6i`=HID$ozAvzlcET3wq> zpDt#&bK!{`j-7yDkS(GlM>pz>w6LEq+x9U0kvj4IXdK>Bk)ZQ+`F*vHrfc^z!>&u7 zrzA+vTZrA6l}uRcK)0Ca*6woed3u=D>GXycRT2qiuqoxS*)=G*U-umufrx;7Kp zkIN27obZ8p*Pnfa_GP{NacDY>b#_73W~J_TsejLUJ0LrEG?<-Bs)%_t$KZPWsT~NC zd4}QEC3Nx+gZd$btlqGwPxdou6G^V88Yffg3!e$;#!y_sq&xni@}8MnD@H1JAT>MV z=2_}&!Z6X$Kbm zRzQ75b(1>$weBRa*r&^-9l@)+=j^=kQ`kPe!uWD5HGxa2I|EcR0Z3n*Gs4L8Y2-DB zu&xT9L+JyF2eK;;e?)0;E~-dEU44!MmtIN&s`iV^Od1(EPTN3=^qe08P!BwM7Q!`& zo3&ruX1ME+Oin;r4bxmlwi<6Qu;`H zRkFmJz5@*f(zUZWOMjx1D>5Y6!S~TiQ&F6nw`KiX>%oD8`L^1y)^*6FH{`h+KCF7A zUN6yT=KL=M4=E~hfk&*>A2pT7Z;p<44+)d81@+9H90qr|4=848A|DlJ3R2R6h_him zT6t@gEQvkW$nqW>;()BMewXv$m&k9gzyMj(Ldw&mHzj&!8}h6PQ^p}%e;}wTteMP9 zfxaKG$~@_(E%$$=8IP3|Qd2*rf%`gnx*`h0@0T^ia0S*f2|!^VDg>E43pzDs6`eVl zIbsPwTY{e=Xgk@a89ht*a*z8yf=r}c1Nd!wMQFWBD$h#8Hf=X4-Nj$N3se1J?{F1# z`T{T5no4gTM5&#+d}wd`{`022B+6jzcgXtWh81QTP`GE$dBlGG#SOThV3n@Kpmi=JKGMlDx${p_XWiX|!v8j2%Qi}JJNZpsKm`p%T+n}Ox*)%!c(cC#lyyug zV1w1p{tEuIiLf;v=kXAK2`&WF6WWK^Aj8Ao3$VOmj$2TLzP)u_KZ+SBlON`HeNJvR z6p(>6BLwo}8^)eLzKXT_pjD=ujmyq7t+Ml$LS25mG($c&!vHc%y;Mlb*P%g!Iy3+0 z1ggerA%?)+QvVSmjseYy`}jq6)RZ`MZm`Y+4ST#M@SU8e`VDBT2GUE=-MS=77fJ+T zeM+`s%V1ujsv&nDc>Lw`(TK}%TKi`Cp>>C#I68p3^>0O;nzVz5uFD6y8~TEh;PYiE zl<^v-U)|31G2irI8ryR`TU;^+DZg=y6^5?WAx(JjXTM3>3u1S`$-ZB;C^}PJ{rs{{ zQny(rY||PES{K?$$lrHo8JM;>-?_NHS9pUTz&AM9jZ?Sk)Bqi$df-v?&3*#ekLU)K zq`5&kN^;l8>oXkH6v{@T{6A(gXzhL^4Bo%x_b+*#2CASJEsw?cP~PU45&#T=ylaPl zyJfPnQ$CHYjV<%EP^&U*CyV5Wh%v{bh2_&+-*-m!;2N=&bzeJO=F*f6=A_^L(67y# zwY+_zy}qA`T0T{`zpH~YQ(qjyQzdC4KHQN6k)4CcQygR-wf<}~+)y8sR$iZu*4zYy zwNqJuP36ouI@@i85P~yRNZ{uEKecry%uUYA4VOg?r zT{x4Wp60*=Y@A8fI;_zre%K|}M)Og~{x&+!>rmZxBS&PH=)c&dQk-t7hOUj_F=JNSG-+FEOK*Oz-ZehEF{VU1L zKukcQ4(!6wz^!gLM)dD|(QU%=Wq?Gs?zPp66;2XB-XUbdl?e8YkjRG$mP&Qw<06zN z<`l~F4gU6H5^Apn(G6c&e5&y~UzjX7G7;-spW69;(P^Q^{LsOgzUu8E8sNk(apiM! z{oP$PQ9fQ#0v|+Lxb4i+eSO;`M+8|Lp{juL`-ip>4L6q4K@UBG(Lvy`B-NdRAAbv$ zrjTWnjUgWx2c=YbpzQko^{vUoYra-pH=WQ(_iqg!vbkcTO5||?6JrLa7ui;+l1@p( z$w4t!Z~TYy)I!AupaID8GIBiacHHL}8j^Gn?Xt)AxyQKzf#z{Yk==XG9!VA2$g z?wP4osG8el$9oZ05rQFPA+ABb)+!%&N>MV@i2Zb{NBUJAd|tye<>mdDD!`i-YH!tg z-T&rBbpJefg4Z*E*1FA-81U|b5fEJP3I^a3LMnzXk33|$fh^LHQXuQ5p~&N{pInd! zs<8hZcKz{xhsiHl*dpi!FVuq^B{=8>V(5{s$fy<2D}i>SgQD8=rG8!&)#&O$)Gu%g zuqIkiYYZm0FEi~k;Qi%cXdmJIAH+g4u%UgasfEh%t6C8eHG3G2vGAHG3VY|A2#iN1 zpoKJ#X@0G)WBKL68Q*+&frY#7$EgOo{L(;zL&=6KHrS-2^;-!w`!RB1t9L`{Q{U+F zJrOfcHAh4~)YjU-i7meM$@*vxAj=yaSWf|k=@2QAPhg(Aj;}npsL(@}V}D|zb|f~Y zb4F>L1r*Zhg$86X(1CR%NZj2OBB^*7<+$khex3gK-oVz*gC&l=SB1LEEz_REi+ItYSbjLRa70^gYa8c<) zb*dZls%OHJbHzfcE-Y6dIZU5bnii@)7f5v@cgwE%WKjXDBWnAc{uHnk`w@Z{Mk8;0 z7h^Od6R6W_R61OpdY=pRN(}dbSs({Z1yZ%8)lb6(sd^5MA}zNc9W8bsc#$;nvRg5o z_r02tg^7rICur*x&*S|nUNhtTR*yB?r~w(aWJ1j48FYK=J&AF4VyeF=_?WE6{YpAB zrHM)tk{CrJuYKnOv;3|=c3@fGjiCK2-hSILaI4v|pxAA}37@D=t^cRDA;g+JLiBfx z|HwdD#&msYqG*bPV~IRR|2R+cP1xDiIyG`G9OPU(VhG>$;@4hpVEvEnUdgVRI~RU6 zpAiZVOR9=(>X9oCE?h{1y_ql=nyITEDlw?D1M6N+3?lfqG;h9DLodD-DsUil*AkI$ z7`cv`z4wG-WTA9a-tR-6p61BQN;TU-Eg(M@rL;>OC9>ro;Kqr1NTHg)XFWGMu&tF7;X z)yIijMf=`%_H~Bh98-ntfiw3l>k``{gVNj3%t#Jq*bLdLaUJv!n z{#%v`zy4zlU}xa_fUbqD%jd9>=dd~Ny7+CPJ-GFc7KhY*p9fReXWBtGAvahjdTKGo z#rl5a0>%)$6Hm7$?rrGG>%CoQ^z=en(+@A5kW9#8NGxm*coTF*+(QH zX-Y$c{`6{QRw(qD#9G)Yp5%&ET5BE4d%oAI?bOcBNjZU{2T=KBNv9VV9xzFk*K$`( z`Q%9pSQqo{4|9-ofBimxm73Y|S)E=W0)8lj3ij{*n3izQ?+V|l!V+f>wx7y}Q}9nE z>V~_)q=#r)X)2#gSYP})tnhpJl!M)!YFkN$@J81R@s~{Kbai#4G0mgNpJ^7XF`PqK zNQ@s~Js`DnpE)G~5AGil8ioa0PVg!uP2{hRLlT+(shCG!Cc-|bxzFUJrrOGB~@=0g46A0KlSk&wFnO`&)ao=os(AcLiSysAtE z1eT)RG}=02tK!1RK(H4eAq3*$nJdc81h!A-In3?TI}q$xJ*Om^4|V|8ZOr*}n@4>W z2dQ59DmnHr(T#uPE(hVLz7Pk%~m7|V6!-M)7&0%mCDj)4fD>_tlphs zeNTp-wJC*Q7IFTwiZ_TfEc1owA{l4YIXulkO?Bb^dOp}_x2u#s2sc9&T(<;~fDc7) zxRdSI;q6YVzk~{dbn;dp^pVGQhZ4((1ji?^~# z_`5sQ?0U~Y+-mW;+593%751hfXPGY$F9@A5I$w8FmTjtbEex0QK-mk{fCxbPSFFD^ zE2Ukp+hz}ZR)%j;U|j`wdgdmHt!}Us4q>znkF0FNWH$*A-S^R}o5vee=(D{Hw69DD zKIwl_b1jhgrz!8CXVN`6ghz6lCTN~9L0jh$RtY%?`gd-G&+L?_gi7e)1e(gblYSWG z$}jUJF|#78bOw_CVXKxN?^d(&Kz>fgjtj2CiUVUBN|EISuL2{7m|+8DysU z)lT+R{$>f#@|zgU5>$O_I7p(5<6BNoCz}B0imBYtd;F=@Nn`xPU3S1t3PBS~pn=Nc^A5)G1`xkP@Q=?(qQF6__94E)UNcN82*u43|q~ z=CA-ST|Wl*ke=UC!fLj^y37rG|1;D$5Tw4d5Dw4Td2QpTB!R8OQKfSo$!qQ+z>9@F zW-9)<#ajY_@^3qGt8}1yle;$Tbg{Z}d#ICrSbnz9x`(EvQAjIpLTOl!PC>1kr1_8s z>_0+>uLuGS`$kF}W#DAESg$+obI~(YfG?M+-X_aI^D}9j_8or*%!nUO-JzXuC~n;W zg(`Ym$!yS-G>VJy_TWz3vnKRg1Hd}YdTpzLB*rwQT^$ko8fTdmC=wvAd4kfX&HB&# z!rFCfdLedKqzh3xsa?)+u3GsB$!nBiz{3cI&u8n6$PFCJyGCt2&vbLfysyF^X*FP>86B-lXR_RPit2X#S|M$5pt=zZlx}Tw&M_qS+qW;;aHbL~g zPHs0%$+OElP#U*zI2m5nvFI0O#s^6cA;f|%wvfY4Fu&steCbh0WYeO58^^xcDt5${S!iRsoh27?SvpU`pwyLasYnWo z*M_Vvzdq|_F?5sTklKi1Qvk&k08U0c*|UKrq!hjU6Z=0B{6+t{y0yHjv}!vHU%-{k zji}fpN)4bMA7cAkDp^%tH4m{Sq||8-m8~@G%&j#bQ~3uiL3xg9=2JNd<-wTPzlmtZ z@O0i!CM#_$nql-LAMoX_{c_1(Wnnu*fr8C2iL2`gSRR zO9u(It$6rHGr8}EmP6c=Kt#bm$Wl@Qg|4kKXJ>bb@L6}v#Mg;^1Z(BVPT&LYnDion{e=C59x z;~Ob);%}$^1R{R?nV+4WhxTs94ISMxh_9e_2@!z zV-%M2^DdFT&R$HIz;;-5#!mZAVImbzpaOo>X;EwH16y@_l(DzlYvnA>XZ=73ccq{k z^t%U%BVN1!hPSH0fGj@3Y8qem=uP$r0P`fcW7<@p1~t!|%b?77f(}ez_|*}#Dz}CpbC4A9Qe~J&i4=5{z_k1n)Z{x01BTUYt?HcbQjMr>Gm%xT~D#c1s7wy`7Alg(@ zcc>4tbC`m5Wvl&cn-bjg$B)wyv3%RJlA z2-oltcTg|p;@Olvp`G{ebxZYC(z@;_@qx0_0Fps+Vor{+E#N>&c?*1^&W(BkSVJW| z?(!b`lup0C%`AyZ8N)y?cwq8QsQ#rzv;w-!~I6rL7 z{5WnNns-d8HV`mu9kr>Jy6`0SGXq1W<~cwU*{m`20@w=+qxb}p!sPvlyW24ye3lBeuh3et7dnn5I)-Qs#{eDzsDg*#|`*`=?nD`jS$Fx=o*QFGEwYG5R zVQ-IYWwFNcv3Fd2M4`pFb}0mN9)?C!&{X8UgE@NfLr@t?&cgg1JDlh+6cphve4HKQ z>kakOESJ;PHurHR_XGQyTj%Q%32xfi_C0$d!Kf4@hzn)5H@ire1DZ>aFu~S3iUFA_Nj16!L-O{e~|G zA*d9@h!U}P(&7m&i*nFq$3{9BATqpDti8LzfQ;Z~?ZV@!LCy0SQi522z`X#nd0kDV!MbI0xg7Y4yuYcfJ18bzwpGgnifT_zD!W%xgOrxOCSt(7 z0Bmp!X761^u(1tZxJ6+$Jsdt8a+}A`aNLY7IS3baY{afhfRUYM%hQvo zm<8C7)R5}9U*GjYT-&hky?yawrT;F?V(h>CgDW!s!1gcy(2vYNeEXNAApV!6*hD5N z^!_C&{!f_%+N22qz1oh5%e%5|vOYo9|s&lsx zz~~1fT#BP#@dKh(FR#nTS)=pOaL)a~{Pc#qty3dkMciIJ>*G4+fACf7)oUrHc%^PC ziM=>`Em>QV!j&$RtU5BK@+O3$*U|x`s1)5+o<=twp0=Wr_|tov2Jf)M_&dd1iFKNG zjaK`FPHEQiyI4$QBdwwUaO!MWvB8{Y-;edr^|;I^+pyPNHf7r-&Iu4&BC7N{Bn&*# zeb2*Fkt|uA`d>p{HDwSdYZRukHFjY*OF6|mU?LtKO~6<7FWuy+&cE?eIjA`}|75jC zQ3IoXPdcMX*KlgHVSC&1DI435b>7_RYo&TO-56fjuU6j5TQhK3M@$*bRZmjoY`4yn zuh}~dWE#t8h|yZg*LGm$u!ua5mg;p)VnCuO4DnNgn(1_)OOai4bxhI1ujE7aoA&MD zX$qL+2vdJsck%SiP}hq(slVx5^9mvw$bHgn7y`1t&09Ue-cet8wiGIAuvkKTre7)S zSXXrFz;XGzc{=kt@^+aJ$#GrdwECFgAZv2I`bNe(|PZVl;ccpCmx4(zCMd z`W}c7Zf(pq9iW+){m-El$f4b>=8-kYvwmw-Gc~YD!aD`rQQzRB4!-W@Df^kghAtEm z62p33wSG9I4<U=pzy-egXBNJs2tU=pHwHsT~cD>b?IoW*}QDq zajflUxq+SjxDo~>yS#>ZIN}d(CMWop9%ku8~i&H7t! z|As6}{x!4n;J<>zEW52V3>^5uek{Od5>{|G(}fXyR-P{p__p~a@XTPo*d_ag)7W)W zBdgJY&W1DGe?y?(D0zZ3Fm*iJ*n4)NeY?&Q6bqX|r!Dk}Cj2>TZ*4Wm^a=k1p`ado zhD#cX@PorvOeowow8N#FlY}ya7nNn6n!VTXe+49bxBU%+)e5ds?dK3 zhU^HCFE#E0T;67{j1ZhuUHoy@j+|-WtPZ#dfmbMI85IJM?)D6zx5W6A9HQ~t5#?n0 z6%6BFPqUXp4dA~S`2SmPhySmbmSMjfllF{KB+oO|-qt9wv&{mY5UtdcDfOrUS9^(^au&gW_J5WsQLFs5qKB(+d zNrus=DIT6r(2sl~4Ij%7?VqM*3q&^glcWaj1+yhGVr7 z3Zu>!j5$g^18*nF92M@1Bm=90Xuhb0h(6j;;a3U+3tLt5K1BV4$Z>B8-En_+t{ojLgKARga)3?*TX9G`l?SY?nCAA2UFz^a8ZkNQ~6C8<*p>sHSPE5BnBr2mK#FMO3jtV;o7C^*2jCvz0iovgc)FJ_y~TTRfPTZ zp;=|v;WxykAQR&qB^Y3@0y%U z!vs+`1ks`xA4R;nMaf4AvxfIdv=jCjFEP&#N&TCi?+URsY-bQNz%Gr zpiTueWe4CzoJIg%7%71p9aB2R2m_|N>AI7ff2v>q4_l* zYc>v8{nBmtmKPdOC!hkBE4Io-mTssg9CG(3TN^9ylitvvIN?4m6wsH7*0BU|Ik zD#HnUQi-S_O|~@I)6*LrQ;!nq>vAz>M)piqykt4pFj$7iq(pH%mN|(3+q)9(avcqC zZX4E5pB9bSc5g^0(g0|9&>Yesv2&TNq~VA@Eq<)%Nvwf(Hf;NqY}`c7XfHF|Z)a;s z#P62Jei|e>EJLNHzua5<=BP>>x2w_~i;x*R{L1ltUEu1~j%t;>Ucb+%@^0UFH2-iO z=8EJ&d2wwq-BN!p$p3ZCvFH{H$+EX2M-qk1K@XoKdC>92DxO_s> zeV0B1m>_@KWW3gyEAQWtOsqhk-s3LwMUX44?vqxu#ySPT$7!?j<3iQ)nEf^*o$C*8 zI#C2Y9cHJQj?T2z6b-M995I6j65Q?RerNOL1seE?{x-1B?%Y{d85NRtJ+r4es6U*2 zNU_d0?aV=yzpY=4>-X>Iw(Uj5-qTh(v+CG8s>}nYFjhLbcA|f$nl2w+dDxv6=dT)|R`u4zD5u~UTRIn{k}1hIGW9TdS*zLBavoWhp^c`PbM?8O7rR#1 zK=hYzG(ZfSCooG@w@B8bppp z%hpwK6749~3%1!hh?vk}G@^n`Z&*GE0k;&>QHI@1XV@sA zGnnOadcIoWuE8%0oTUaAJl#7`Wi;8$(FqE+Y2Zdy1#FY>*d>%o1xNkb%O|?dFf3qe54y)N zwxYm;H_*;lIEFkdc?3OB;8{lH(N#zA$FGP5YF$UK@)7C?OyR)I03IL)H~^l)LhDU z6|!JIM*P!NW9LXY3jC_>1Or=u6a$riA?XDeP?aQoE*A+{mc_LktvO2vYbG-Ky{XZP zCm6wrdCgHP@O^>&koHKQl}i;|k`QU|Tp{C7XGliPK@P);{W#{|tln5BY-D*Yk0J7) zU@8AwS0K<7%AbYgwdG#KIDtp6xq0_jyhX9^6uOfgjo>B%cW0*EP?%1#@$a=e(fMO9 zvc~HuZfyt{0Odo~Ct5V!jWxyK7)=tODMuV0yQww}x%g*tB}qgTY9N33Wr&jG3%3&2 zqlY@QooLYFBR923R8XO`HsEhM;Gly~Fr{WxrNlx*l|o$?g>H`hFu?xJ&-M$$Jll02 z*dL-^IUP;3Q)nyrxNnJJ>6T~;LOFA0^2-ONMmDSn>{L8sQ%y&;Y7Kl<6Lsqb3_flM$vqQ-Fc21YLa z_%H4-A_`CYau`MDf-~$3ZtEG%0*$wNAO*X-ugo&&ynofYZjF6$&h?R>6;VR<~)1{5`3*9QZVuA zO`)3jty~AedZ8(w`pHY3yVNc$4D7NaaMyc;;^Bz`UBG~WsCt*Tgdi%z$BS%$8(}n0 zHA8*AnK#Ts#>Qm+CukjjLn($+bJsXj&i!Y#4ZzK)Q7`tYqtOu>A?X7d*x}~}(YS@~ zaWj&1&j*$!nG~gLSIJ1*<&^u~_}EFDSf|7oUHq!N4&?yRPfF7SWQH3;PsQN{g}tW$ zp+2wWaliOKtoZ|J1w8_rV`yT|RQa8LLAvOjpz!(F7PSPlh#}j69bov|F8B7IP|d}F z{`qJYu}iNlHN&@@cKvC4`0eUS&m~;aJ^dBbZ`f&NA%D>48`41y7t)$VI&p8|YL0Xq zH4D|UOe47NqCf8XA-1s+j$diM&{-Ch(tbAEtprr5<#W_DuTrf9MSVI*tf4LbNo*pn z4dSo}lfvJt7{N$c^VBjCj+3SDae93N8evpT#V>ss6Hj?3V;XyTRQ{z!43C%cS%m1J zy?{5)u!EU$Ki?8d&w@bOWt)f0I~4yp-s8EI+cLj*i6P7555}wr=O%0+3y`9W%KbLw z2wyqPK{b`ZgTb)lgj8m!MtVUTdo7UJA&&h{6+gtk|GvURN-M6d?%k#MWF6Zoq`l^2 zjg?{*;;aAdbSBmkA3+1<4}Lt8`w1YqXn?2;2x~4(I_lkOaaLRg)bTU{MK9Efkq`0& zN3X$!)KzpI1L9QtH&&%P_`tZ&3hJ*PVlcruWG=YHx1NmK2*Bbym0yOodm3Bbfr$7I zEn~j13ZSpJh{UVvjh#@EFWvjEsZh*9+dARZpSd}igtb$=ZN&rAmBdmgMKwCQiUB{c z@GY=S)BAI*eoZCHQrD54ew%{PwNQ$gA(DT6I-^P02;pZ$ zHQ0tj@EZj{=&3)FGf4VF7~_~{H#&#og&l-yPMv3LbP z1vXKr+~b7gv;65-iDY295O+HNu0v==bY8JLD+zbY$Nm1B_Z_eC;sP5C%HR2*S(gpR z*I{}u80P7?R~AJjlrC}z`SA!9c|*iJr@kc7fVCAyQ`^J@%SM+i;kTnI32MK^3^C`o z6KlT>E@_d&pIO>7Qb#j5$K33S7^`>~`9`1U{pm!XBL*MN4H#-roeMnh(p!Gx;aKq& zd`INbd~EaYx;e4k#^$Jt5fl6fwdG<$`xf_-fi*2?*Fy}oWMGnh^Qkfp>fF^|lo6Q9&PR>#`eUb#2d=LplZ9GF1hThwi=j4w_qx)R_d6>?lSf5* zm%dLaR9PZKJw+oQ8?+&x??}AJRGu;j#YsJ*MF`G}9$M~sjI;-Qr6>r&;DiNIuAH4d zI|RS@yyJU*Bc%&n3q=u?lo9k|(O!k))EU%}wmzv^%sFCU-D9EN+=cEU8pHRK(cSKZ zA};anAhI+m8FzKyl(EQjqSqe2XpA=V3D}b5LcQUL&Nmnv-B12=JkuC+Nff=VaH8&q zbP!74c zK5sx7bLo5^FY8Jcb6LwnFN8g~BEk%du_hB6jauiywmrxOQ^r`IP!RzKd6Eij|8rfl zry3MzkjDpv(P>RKGR^z{(@oRVvNVsEUZTkbp?IS)Mm|5Fl<>5_cUfflRcxFdjVRp@xRx0Cqd9POn*o_j~=(xA@T|p0wQa^QOf? z63ktk=BZ$ELg&elpK}7XMJmKOak3TN!ItaYy0vP=IlO@)lFFuE&e1+U*vqenO(#Jb6h%?tOybOUG}0e639L)ySWr2(flo9 z-lE@J_L*HNWS?OHWai*6QN8|8nEoXh6vRvaqT`Bnk6|Dba{e%UB`YaeG0b|}E(#w* z2FI58O?9K+2a3Rj)gnVbK&ALmUS|VEjg~EF^xV(M_x`fDq#9>ES6k3K+M9aC18wTq zO>hw^ADvd``Abdrm(REja^tV=|4e0h9$TQfUO6_u{|{ zw{{=23V-T*dT-NZu=j!tewl!N=Ln`tIXdw_XZRR*S>QaR#6v?nzyY{-2^SaN#>WZU ztjxi~n;gV;q~Zs>@(YlOM(7Z1?7|d{AJgT3MPPKzx+5FP{-zfI+|t{VQ`KC4MiE$7 zurC7gF#qc`e(X!ukV^(U+&-Bo76P}LQ`p|51&(EKrYj5oym(H3l-~3u4oIS6I|1SK zLff%3($!t9=x#qgJ}3_HqM^czm0}~$x)z#z$vb~_`^?ttNI6`2T8+Y*CDg1CfWm!D z?`Z@SqIM6NKPI8ydrC{ty_D8A$??_2@Hg zLF)IyJ9agY{iDe7il=^?3$l)KJ-yRu7p6QH@0}`S?0ia$^096Zx+kTV$?izhmpJ5$ z(qj4YV}^L>k`d=EG<=nXQQ zg5qVKk4W7k+*G!X1k967<4X$lY5z(578O3Ee1!%kEzgqY>r_1XTH>@%3MNIVW$wMJ z^;Vi7T$P@tk6-&hTXMv=w%n;OE^>3W6k%&~+V3P#r&pu&soN~Aqe&vnOex<{fpLpV zhI~G*ifeSj^^0z?;~S?@SuXK>?9=bKus)=&NVwLx2S4;l+q57vj0R z&_$C**2_gl_`s0>$zpk3JeHCv59oiNkMpW8skny((cNf^-ASI#CNp^-kP^AVk z770nR`&bZoZ)Gr5(ijWFs@&ZV$NOZ`(W`Q-JUTI*6x8bJr29!1wO`r67b~Tq@<<;G zjjYH=m0*283`gltwDMrMvG3B?wq{oeP9Be-9lD`b4aI{e%&|lr$2SIYq9y_yW9;+A z@Z7011))9>N+LW&{}QX(Xf=;$6Na)R>b8&~NcX?!@MbJnMDf}rG-&%y(oIauE3|d8 zF>&-D>(tiogj=8CpJ(q{H3!XIw{GM|n9ch%!sLGT=cUn*kpW>`o1RDO-@l+<1#f^M z&xigj6cW6Qc3ntuEETl_uyqm*P@wdgoiC@8X*ehM#Q02|X$gDxt_vmCAHny8=uhkx zk4futIRZ(+M8qXvjWn|h@E)SAuP;@1F=5bj@5BLDC=!s<7;kKO0ecx0j3({+N$PMt z_f5@*x3&75rXWW+yjERMf1#gs;9Fg^_61*Mbr3N=mf3t1_~a2!MzFu|*_ z?oa57KjIc9j&Tv7&{9Q=%55&=>J&>Ve=A&N-SNIR30pviKsp6(le4qOiD2Um*?T0ZkAtIc1R^7r;+v{(&c;$wQpBp0SA zLI&9VS2nZC;RdNc)0cWt*G9UqdjE=_9b^?KoFQ=pmS>xDeeK2iYjnx}bsqbq{|-2m z@gd%e=cA0pKLU$W{Jab`wNF!in6%<=?z!0hnwkC7j^OYb7#m*>UoV)JY8!n6Cb~hq z(~nLp{=%~UmgiP{aqpq(C^nSJ#D)y&pK}(ss9b+v3;I`5tvlrghVy? zZt(9A+M01m>s4!Ff7k&79fRS|>hYY(ZNCmw;4fQg@sBV+cB)n}gJDyqP=O}})87;s z)MTBkb-^cDI4NrmgPWK<+&U=xpP*rEe8byCR1;3Zoq6EE_1O(C19F5dL=zvJr?3M( zumi~mR2>a)P}HiMJL@%aP_3W4Ip@Y+uyo0B99Rnq0Vrg@KcPa5i7z%omk2- zr2cF|!<``La*UjBJEzA>2a~d-?LE&VR*pyh^wWC>Zhvnq-))Ri=-8JGA8|qsPztp7 zPF-LEyM-HBWRox9q!|N-SS_KqkGv|=82(euXOtc@nKmeiAP$>#zb*NYFMf+VJg879 zdOjts=<^os&*qOXvA=UJNhpe+#s3lQ_`gB)+9#zccg(dyr6Y}Y8uA(UIN^iy7gKdJ zb50-?Fl^vbCWaG;3X%J3xt(&ULqm0LM>gXe-kfZHW&} zpLW#}4^t)u8E3|Y`EBNIG{gd3^!@+D?|Yl)g>lfKU`#FewSNZ7gP9hALFqjfAD3l6HGf6x$3(q8b z|Lx&*=DeLPDP@ofrU4$WiO@5cQJ@VD0Fa{ zGQ_X--ZKZp!5?8pDH*e5EwDflEVjjfQB!`N8?5=d@b%+SPWoN>)G?g>io~`!v{37B z3p={jh8n-arY`L1Vux}j#5lnzEdAIhN0W2WG{4E>3o#I^4PD_{3=7y_%tR-33G- z42fDOY`tCo^DC0y(ODwEVj#6GJlTj(oZv_ZiwUE^o9?rOvhIC;fRdVU9j|*);C^LO z_)0$*&mlaY>o-*8T*>=43i1a4&1J0ng)_njzF3DXGu;l=S9wila zhuXrfl!NGIKO>1uBF#Ns62xrq!7KNK+_aA<_lC!69dj<=SB#Ee%noAI31}-EkKCUz z{VkQHDrj)k8irN=q^;Pu=>VHh?n&3qB>uq!_NA7~ze2k-;Li0o22wE(LjQifqvZki zV`eYil69*8qP2$YM?gmLl7g0!o<=mUwKa3nMh~@lOzzTJG8V57$U3KmL=dVec`vIK zh6Iq$ucQA^yfaOYT9u>xToOTWXKn#*uo4_)Qf83t;MSYm$CftQW}6Ytfr%6 z82T>bi4Wj=$`6;^G?`BLns8yOi0c8jU5i`qRy*{b?l~D^mkwCuPS?Q(BZo9(lX(A; zTklRMgK4hli7Np+bIDvmpEM9TWCe;3WhN{;%4Eykc0dUfgAAykqJ$({d0e zn|@mA#N>Jf-D@&4s6X+tM}tgISqqwu3cO42DZUs{VHA)K&$9|^Fu7b1;8p8Tyf2Y- zo*CM-rhd@ps2nQ(Bej{EWahTi!+D>VMD>@XKhy4nTl4+pTPZ8wgDqRwg1{Sn$MRQS!@(t5h%byBAICpC77Dsfk!4pxJOene? z|Cqsh?P4q+XtW#EeJO2gN4bBOsIb-_LzQ1Veos(_ViZ~;>L7dX(lw9WAKOL*y20#{yJ5>>67^4Ssuna7O}nB>7RABT|i_>p1 z>wDK_a8tzz6P&F4%RorW3e(!*^@|gbp}pTJUHSEE66nBUx((Z?f$a_Bev?$X2H!v_ zd72@yKjV*`JRMl{y2ieY9#56&4)8?f8?+4hES4CHCw95^V|Q#_k)WfcwR50ydV+nQIfNn{KYFMF4Ng%{Tfr9)CxHMG>!{9*$4$3aWkb||baL`# zmsv=h$nUH+HB2kJl;ZF`DGJFz=}*%=E9TKZHGkG#zc>m#xbw4ha7Z+V`^y^$F#tF0 zoE%t$UOvim(a_kWk+;>c)02j#@)bW6e5XKqpOg83R^rNmi$zAa<^Q@T!`;WkniAk| z@afWO^CN+XFUObKHjgDF+ip|$DTJQnf{IiO?uk<&DNLP` z&h``zs16^K7XoZD%Jz-zMs;^V%+Hg~i*mSaRj$oK$!5Q^ZBP6=n?#hRZ$_BZTO^-{ za^8(MrI=j}M>jZjo%|j#=BUy&Sfhf}mgz&gYs*Fc2E-9zh{5r*KGV;{8eXqFB#FR1 zw{AobNuu(nRZIC@>EB=Z$%8n%fG(k*I&tw(r;!g1ud)wPex;PWCS=?{8ySu=%{@HV zoN6vxHn|qf$}kvuptw)XCc*2{!p|Bt2Myf3nAf(Y(O5P0)4srGCVk{V$dX6->NsE8 zs33b}#2OaIA%+h1E|~XCi}d4?=KxY9!zNIE!P%Ua;dQC;H``kP@Ef*o%60ZXtj$*R zO8h$HQ%z$i+Iq5I1m$eZ!j9~;wovumYW^6nfQ0)4z0adme%s+zI)no=(93_w`JTn*V`d08}={5MbeMDZ{Y~Bef-j!YnDVn$;!OL2)$e0 zA@AHWiYiq*O2<=yBVk~b>*rf4EkiS}KOg;r`~>TRH|ZMQ+G{lL@_Eaj?Pg!0w53H; zF37F&IFt@+=d~Ft z;lB*o)d2YFqt^0RR?@p1-Q?17h0Vc@-iUF>M_kdTY-Q?!Fl58AQ zyVC96vf83ly}?5pIggeTcWmeK?QFP_>Z0fKr>~-ZHEDPMM*G*^!5$sExZ>Wha#;z| z-YkE|zYWaXD60@H&sO_cyPnn5acc40V%NR?;TMKIa@%8?bPj-*(AZuws!5 zfDe*Sr&=rR-;j9unNs6zYTwn>N-&zc*XDZUjuw0$)pDcQl+U=i(d5v?+}zN5jDGqX zN%2}8;TNj_n-KTyXE;BvV1u#2MTeyNmya(tCmJSOvJN8IaE7m5CsQ|Xz0kYragU=s zX#RPIJj^m%Bm~{R@}A#)7V7dZbx=XhZ)4JouR5POAJQGP50-l+P_s}H)XPzTT6*55 zU-qf@(8WmEY)e43Wi|G&cL{SN=U(3Q^QG1DMd=+3sX&qY zeniX4TbPf|>nWtlYLp~&YB9l@F;V!-xB=1P<>?H{v1!*rmBipuJ)byzYzurFqbtLj zvs~9gN;k3?k8j}a*`n{{M(JFos#&U8Depp;&}N37C$FDrZ>ANLJgDMIJ5Gf0R<%og zo?i^J+&?d8Bdp;CwDoo=b*Bxzugr$vR+OTD_!gdl7VA(R8WV+`=f5Q&v zbv68nd$bmBdgsa2pZ?(_rK5eaZ(7663Mh1SR{AJmvUf$uO=9jp=ph0o9(_i7==dzN z`*!%O^ z8cADnCa@`=)s2nfrNbUQ4-W$EWV^(yE%qm(ieI zhC&fKk@Es*8`0=AjV&#K1?HlS4T&te{}*U)rtHV^pVfK&Q;tVoBa5ZZ^cM`kG@3d* zN|n|pn43Z$^UmLkJCG`aq2LWndBXHe(Q03@$;>^Gv&X-U+DXp&WT`7Tb<*EK|S%1-)W z;EOP)G~2e0#8bbL;4Co|RUqjT5$n7yGNNUf}UB{HzoCdk-%&nU$FY>arSqe=-sZrDBQ<|L>hSP89 z+MCnz@!=*AXXg51Mx;>=s<%VGKh-eOc=%!9(z{&+psn@nu7#VT0B~iPA8V zFG&`OW+^0ctsYBC4QGP{`7VV1q(bg_kRl%KxYE*~e2Cnk5W_7}Ds&$<5iyof%7AAo z%MaI=#=LSpVYWhp?Ns=^GAl`O0a;SzB{>dYm-aM061054x~U*xKrB=c;QMErGLYpm z8!}p)1TRSn3M)c8U*~;|3DAXzsT~CrSCIGx;RHJU(^o^`WSTl;tO}K{ueE6&v7s&%KJzK8PEMXEMoD zWCTL{!`^Bm86(8xo{Au-K5TU5Z$*QM${>XZJq)eeKnvlr23F&2pM9_+ihw0?YSMhg z7h31cMAdDuczR>JJl+vwAvxc=`z;lCVnR*_cgzK9mv`hmWiEDDqYe5VyVX-HJMf&2%zz(krfy_f7-hOvCJ zq`(HFjF2i?AKAHwDUE*#b7CMP^LR7Q?q$=yI4B@6^F(&<^@6^eeIXZRbsi zQFwiovRgtlTbLx=Y3lMe{DSSb=x-iFmeHiAkQ0$lzjk^JUI79Cl9Bn1F`#Kwef-je z7Yp-AR5d*>6ZCO61uSd6JiKd>g=wJ4@@;3IhkOFJAE^G_)rO6Hgy7Y<_&vJc4tB{k zuU4w!;=V?(FRB@qlb&2p;kx2n zYiFQsLDO_qKXg6>%pmW6#g^mzjZ4tddjgx>q>?XOaH-9_;bVkQ3d!vGbXEQtxjV|- zPe^v$0(*M+-G@Auh>Tc->Nxq;{`WZz9oFtZ+GDiy>zWCUjr~t&r?$t4Zu{tU1@6Iy zQkAO6Z$~Ne9FDtJWHS?ZnTz7DB4_44jKu%RicB&pvaCW1PjO7^^ZlfwrsPQ&{B^Xs z_R83pJ}Sr@_8naOZ%YyP&!p#1IAr%uctO85?|4pqAw6bpzSP-CM^Nf$32m=tx%d#Z zDe=`W_=nU&H#oY4%$iT`;?)dKlf}}dSfmFnD_n)C?l~T!z>ODXu=7v??WFrQI#pRv zi#~#1;44Kqu0fsB{!^1ILeo~jD7n}Z8EVrLL9i@3!skRY6v0HdImZx6QSHJ+vmwJQ zO@-HaXco6kdx6ns1$6o%SbL@c>^iu%-t-%sWDyoAj^^l<-iQcM%4)o`r<09iJxw$# zKR@*qa)&b|-8ly2F`vVbJNnYNZRF6)-WdkZ{J^A#T|L=AUSi48Ku@(SQ38mcWzp%A zlK1ibm7~&!(V^MUzWlOZVfDw|8D+s zdsYa$+HAeX>EIO<|L9%wSTc<@!_H{fUo59w@l zZmFW4dh{#>Y1)*)Z7HZcWM;>Xg*C#SO6f}qWv+7Ljz(IYo#i+-KaB^Oh}}sF=kZ6c zD&-~X#|(xo0Q|9XgK#s;vCE?BP$zWiB&jms*#2YVIpWBMb13B+uYvmYA-F{k7T4)c z7_2=}6o#QQvkIjBkye2mCohL3NwPqIqglZTY| zX~>!5EwuJw(}p_J_uX0`OsvuS}kgTDC(52@`Yg7c%U-W zdo0c`eUL<2%30q#2^wv&g^=F}eI1d|#ABI={V$1NME?GqIY*!<;4`pD7vfOPUi*>a z2WV8ALFi|0Knm2+G@>WKJ2!lZxrb|V%gni_s~>jPSJHh+RWPed9ybdpsD~m`3qoPm1h0vMMgLd8WxqQ8cfF@aDccwI)9JRrMJiZ zB;itQVCqVti;(tyJJtRHu_-)7s0F`s=f|)Lqxpw!6|G1%m`LI2 zpONUN4+q2vk@0T;Dc@3r7AB%v-kFiP(+yL1Zy<_5A=ES_M}Cv{g~vKVHpbl`{sFK* zJ#>x3ofyl&2w&H0{`l|_fLDjBK^dk}TEG|o)^jWLeOaz&S& z@i@%I*k<08JyJ7>i`;QHejOMmKaHKirrqOvn@TtFW%IQuT`gcU53ky9cx&ix3yk&Y zSt%774TbE>A}myj8D!n+M%?`+goy8Z{s$XK;71zX;+1+rfr~P*z12ce{14CijSp#t^R-atwC@k2Bma_U? zduI`D<^j@+Zy{7k0p_&Bz|x;Mh(!(wgyR- z*7^*g-)w=pR&{DrbHZa1f!7?e+tDuT`3~ShEj9DJ;^EeV$Lxh>%PFN_m(%sjY2x9R zM%?^qx+)BI9=%h;0ugJnudv<31TNgsl0tc=-p9R&zt|kz-stVV^?UM}&Rw;}TyYJ< zK8Zk_M39reftz(;CRt!My|-M~5*zGZ-us^IUb4#Nr4V^m<#)dM@<30uvxA=qwjl}s z)H*8||1LvtM}HZ0A4`dIUKg`_-$_0EztO44NJ zP`97<7}PXKP3Xaji>AuNao_S9t`L9ND!Ke(lnsU|XLphEqdt|UarZ)KA;oasXN9n+Y?6oBcY|E${J-uab~ z3rxZpr6z2|wwEB{3kNpJ7SF);urJ17l4<<2PFQPnCS&k@_h^?PS))P2Cxk1i zJ3LZRbr=bWzxyA-+XGs#I%ZF4+dAk_`OxQIqAh3=q?KCrx@9lQx_03fuT0Pn9VJ55 z4nIy63{L7MloE0R5h-4esB|q@5F9*7EZu_0GvPIQq=U91{^iy6^kr!sHcjDwd~rC( zOb6KKY^TsEU1u`vzdX-8*z+g86H{52=1LOv_HNZS>vAY2Com?a*u%-NaHH+%P#{Mp zEz5K|#pK$S=*GgUu}V%5%1z@c1ZYa8fxD{<*Y$1{WJ4bNy^kG_MuVPT4yF%op7%SD z6+n({^z=f!YoJyLJKo^z9^YTqNF+Nim+VFZ%-4;X9r)oD=%wfW=LdY~?XtH?Dv^AT zd_bZUx}7>A0$9u4WUXB#?Hf9!vRmALIQMI^P?0PZY)%YBpU1Gso?8i0U$RSpJi)&mFRWJcvf}BbH85CGOg?IuDQt>@%`(YC7m&)eNTle zf^}YT7vnm9pS#2)D%n0(msIXP+Ak>Cd8L&|7*@y7KvFOSEh>+ZPkzMnn0l?^U-Q0hn2f({0&4aFRq!GMJjLg1~pimSo&<{7sV zd6)U2Wv}@FHIf+ocC4_Knz(5ZU#}mnZUWKG=Nltwi5zbxdiyFqa0Qek4uI{W(8KN` z^*isSi|P^m3E}H`1$RaxVU34TkzwgrB0JHYKu#?#E|^(^K!H_E=C1LX7A}ul&oEqz z=qa6z4}K;Bz*9~IZ@$b~A#}n{BS|%bd548)GJ8jsJxHFW1KZHFW;L=}fap>X2%ac^ zXK9z2?B-!B8af3YzQqi5Dj$M}rMn?bm%rruhkXpX@KqeJ936J+0F!9FCp+co>ch%O zS|1$wk<^nm1c69a2?N>4Q<(-_8O!U*-q4Kp?k`_AvBdf6jr#TNl0f%);{1J(F)v zQxJ|xNVk&4qrOU?hQap@3(NVz184>%Y8s|UK?KdhZU&yJLA_NNlFjgM=metYpy%;7 zw%z-5C6?`b^~tZ+73BURGW!Mx(7`NE*b|1kruJG9#1NO@o!rRw2crnZ;C`h0>R){e zIeQ*df)h|%)8hA6t5p6X{jW;VI7upPx_yMX%*!>-K=P84%p4A&$_M`Myv?Gs*?zWs zSwMIj*zAiV8?DcQuU+1c3^a26@XB_Q$@z7w9Y#H&88$^o)s~V#iTda-&64${Ri7h( z43)jPt{ipysfNU^spq_Y5^@#GxY@n?YRzK$I-|JJnn}$!rst#kxrJH4 zu*lghc&))VygtIWN0;s>Gf)7hXm{w63R}OMJxqMjs5JuNVV$Z|b=9~B+fj}SV99Y- z)J1FQtIv`dM}GXPnlKWZ|Dtl;H;v8mv2vL3#N?#hPr}}_W z|6w|P(5xf7bk+N1$N~a_4-__maIkgarQBleJLuyL$`?W~EQDkc6-o0(0DwCH}ok70#RSduvVOzbEMA*q}@}Mtmo|JbIzpndCY#pCW zbsz%%TQ#jlKoRed3Hypy6#b#noO3nwtF$7B0CTHs-ca$y05LzbCC#KM{fkH-=lV%&K5x7Uu2zi&AlW7^cB4W=6*FpWWDp6^G)IW!TejjzR zQS50jAWM>C)UFyGB>#z*BqO+mi0E4z)wU4dDBx9SXM- zC#^bX>_bVnE~0O4jFcoF)X^(saXkwnfWKJ`MHyk8$+wS&Le7Rx2sDWvZ^C#azOx>o zhrr4fD&m7jvlx#E`yYL!t`Ak+=OoLVvcW9{4^3=iXGv69iwxBgJb&+pa=7pI#4vPX zW5F|^9a)R`@Ej|ySALaCO_%;3H*B24NDlA~ykCD3g?6sGX@9tg^t;);0{wmi&tGwx zElIiQE4c_a=+BRWu((LU|C)T`aQ!UST0dAN+Dz;Zbvpm^=5$krh?~k&PuPJcZ{>ru zf1veYTJC-T4@SOlRfH*8Tx)#F$Bbn5`t;#jE2`{egSSZ2GZ3-F%GN)DBQt;-VxeA2 zDX0XqH>7Dh)d^)IO3$`J<)c@OOVaYtrl>4*0jFN@aaR<3`;KjeiC$QY zY+l0(k<*3Fd^8XD)#eJUH1=K=swE^eSB?7!S0kpJc&(#R%~hP~!hG~BW67L!uEE)n zNO<)^;ucgOnQ!#B17Xr%YVr9uTblp*H4O>UgcXrR+0dVh?yXaJWlZFMKKpi`eRJ>m zU7;kaVkIT9?;4B!N$I_4#jfQI$ZQS|UM<$}W@zSou0W-aEYuKXvM7w~;DqAgkK@B7 zm}<09%^tMKvLaS{8)nocRbicfd}%9ZnNu?%O?hnAXsB^b3|WaNMf^6m)SHvSUCF}N z$FzXbhhX*PQmc0H+`LMFIB1E}b^s+lXPk67llIF|jk(0yP{HkGn#=`Jq?QbnQt7sr|!Fy#mZ9Tw{VI+X*T0<^sN-d_B)8w6kln%%vbz@sS)ngL>QKkiw@s z`9<%QrbKXA==g{=h;-#92oJZ20%B=LzK;0d-mYRFO9O0^@T-_&MCvw}nJ7g_G zb)b}au05~ET-`3_Q11Vn;UfB+1-A70WuJx7uKK@4QKkmaEeihnt*E>pRHeNI!pO{~ ze1g!mzK*CK`m~wtPeCR&vxJ%ayKZZw_O%8S4^^V|Ytryw%w2NM7jps;u1wFHnQ;k# z+d%=q(Bz>O8BR`>tf`7^@&l;&R2odL3>B-dXmrl{KU&HM6951U}zf&7&lYIBQxCb}YhpB_5am^1hoNx?#vO zZTSp?(I7`%ZeE!i*WG`ZQGtfb1nVyuhR60V0CO716$cQFpPio9Z?E&C*29O+ku>(4 zJ&ND@9fkq8qt|1L|5tbK8P??Xw2RY2k=`-1M2d7okkEUiNK-&U2a(=FSE_W7K%^>F zI?@HCi1ZSQfFK>IL6A=9f^g#dx_|H9`~T&9_+RH-*U87^d7imvX02H>nfqC*r)nuf zY2g}*LUwsm>-I~hs0z?{iIEC6zQC~{Ynpkj_>+}Wc)f3)LJFgOUIg^#krn0?W-2Hf zFmZ8Ck7F+ncUpC5dKnj_qby$zFrpabj{sM(a88e%#d-je`|DRw@_7^d;lS<UZtTLt&4uQI;WOUe(!n}6krIfn~4E7JWZ5XDj^z9NVJmMK&2eEazTYEEZQ9$ zYArbq32{H(sB6~?Iq-xVf4#X$>?H64mk-KDV7&lr@)?f)7&1PchkeNujQ+O2h$n?; z5gXlG=A=zg4$F^3ZKqsi2%_I???s|gFY`25vw;kBh%2pq~(rzcd5`u#h?Vsjp z@^NoT#3Jp4N|9NDYRb={)Uo4mkoo&>lR?w{eU?MOQ&;RTIGT)p5&^s!=tI9$2;RZ0 zF*mgj#A@NEv@MqYd~1{!vJIf&x<;eoTGyuz1T=VElFR$V3nmRrZ_sl#hl| zYv1WRNc9qy@A|bziSIsnGe@?%$|v){QqWbP;#Ot5-tz*AYEDEKP{E0f2Cg!NoULD3 z9idu-SgO#seT{ge7Qhbt#$at1DGQD~TGkuE}JOwr_ zHsVbLL{o1EeYE`WOJol|+rDRDU+{$~t?|=fB~FtJxH<+ zJ(MTWv719ybVwTQsL7M0@f`X z6lArFNyuD*rcJ%azI!q%yenH~{9~}&FU^RALe?lKg!pr_lY8+y){==LfXzTWBBI&! zp`fC|8~LLwkDeIQ+uKFDNy5p0EhE6tyJ!^ntP%;GGsEX%tE{(syr|=naMF@W2 z_+sf%bXVE;sxpY_J*`9o7++MNK-vbX_>2koiK-r)K$feXTw6$=Q0XX3di;30WWDqx z7cJ3>Zn#fHr?&?yrKYKgw_?n-49YKr(P64Cb7FoqAB{K8nmNAjJ8pQVAT0*ORw|W@ z+RyEZm$=ZVrSF|2mHnZU89W9L5saqP7Kd-R6TPS~8z`Gzu!?!~EV4=}PT!ys>ExbO z4OTT;^?E~W-@IDGcl!2Az_EfL1xb_Fuoa4dAi_--<#f#>yK#oNk#D2@OVE#fY=?A~ zs{~)kJk#QH0vUfFtN%kM^mV&73W?hq_v*4SPeUT-|tl* zSH*BTdKcGehC;M&xnwb zg`DNH$`n@%l+H4$iNxRAuDD|vu&wta)s3&C6B3*mN3b>=m9OiUG>Ym*hE-53CB}-w z174AD8vR>-8(3JdYg?B6G)N>9Zlb0wt946;=TIU(*|uL+$}nvB%V! z@MG8k_680((VZC=;Ljr>yVII0Ku6mM;R!wWQCT9T-}p#(4^nvX!PmSoqzS-jdFg(j zcx#hWL=Je77NhU%J{9`_NfaTo2Pg8zc2z$0QW!~Lo{(4j0iH^3qo{v;9O^{) zg8`a|nTv@VD}|AI=Tl8yjCE)amllh*&c`0c;3#+x66nBd;>F z^<&+OIDB1BGUSHjgRKixXh3LPI;Zmafbv__n|ry|(5I4{DC*+2+9YIR+KpSSheS@k ztuP2c{CvxF>DOmXjp1t>+y*QeOfCxtncgLeS9!r(mVMtab% z>v1aAfkLa|Q8YcfT?XHw)=K8m7(xmbt2I>IRCQ@zV^a@<2b(7UsZRxmFwYazu4`y> ze{I0ex1|v>7jylFw;myk{hgl^x(@IApzn?5fk&>lABre_Wlth`hR#lmQ~AcWiZ2S+ zTfuV+cHR6&?S&Q@(;F)bM6>gircC9rtY?@c_w~XgG1?wi<@AkTfk9LslRyQI%#;-C(mC6|^5fime_J;=7dB&Z&+rC!EbR-Iski zF1Z1^6T?_8sNl)Ts&nFh zU6Y$U$b5J$zAcGSRydY8M6TbVg=_5oD8B()RgS?eG#yUA-g~=%WwIEqh&Tu2D+mG< z>$uV6UIABsyTW3kGw`q~F7KL%UHHb`9)XcJSWI>v9IRh)Z764}>2e zoNl@Q{OujXfPn7e^H%Q#>9QVb5nA{aYmR)_ZF>WfBd%{}L#_4s)mZ z*Oep944L6Ap@@;3|IusRluY~IXpE;`BqiP(JvbM$kEi4!N`^yht}>lE)KTZYf7eNV z3o8OcBqbM2RB`aOJ49c|ZYal7P?7|4{SOg#W>^sm3VrJ%x#DukOu7SWFV8(I33)iz z7ylum{I`gk8)uiyb-htE_!>=_qQS%UOSjOM(*F?QjTPaK{|_6CG+{N%4E~3R+TSAn zWlP9!%?|heA!72kh^XewdF-j#_YZhkjsz~xb*&TE$I=p4^fp@4c;2m%=}jp)3WiT^ zR&__cP7L0KPe%DuEJPFh=)4qONiNBqjB4p(zAH0F+CAJY*WOK;t2ok`gSDKh&=gI+ zAF;?=8hPA7SSyXU;I@pW=!o_u!EC5*Js549_RtEkPXpp6+>D)pfYK#X=0Kz%Y1%D8 z>mGdj(c1lW<(Q6CG96AHgTX00Cq1&J8=TRr)8QKPL}Hx6A59+{NG>fFPl~DVbv&S< zgqKf7cij&$cGI4}EqSWP<@VnXP+UU^M=Q#NH-Lk=Pgt7>^yG*s9ls_gGMWvC+7Mkk ztI%0yT-Nes-|K>m38@-`q+lLD_^-`%TC%}5(upg5e{d*flQr&JbW6tXiDT?=oDLcj z^L3jd!&^4s8PsV9_g#aGL-D{2zb1Q-i??Q!jeWTdGbXp91pLFYBjmBeTqNxln&6AN z3)5Fj;h8LSA;P7n9cuyqwDAlHGO|49QK2Xup*bEGhC8u$0tR`SWox=(WSM@l=lwAy z6xg&4Xk;>wKZ3>L{mPB4VR^>Ici&95mR*kUQD$l6hg4-ji?~&UDPR8JEPsVZ$fQS= z`W;$K`Cy=A@*|O@`oyp~LsfAC8J#Zt^M01PPi5S_5+T~H@Fc#Z&ql(ehAR`l{O zy*KaOTnuFr>Y2+kqg>s(H9kHSM%Q`-vM$GCB)*XsSvPs7WKFXM`yc%coCf>V(JJtK zICUM&0BW1Jh)7&F8|Lzkn4Ny$qZjhlp0t-mQ_8o~GB8iOit~D+d!IhnvhzP`Nn@pM zHY<>LtMYPA9wg#1)>sSZP#;``SdBV(&*0HMK@t}5<|fyi?4a5L&6t=n;&*f&DtV)j? z-V(4j3=^(TM&FlWyQB$8yxUs&wj4Iz1BL^C`5t|xak=Mr6r#z0 z+)eZAGE8Y9*KTjKz*WR+ zhStg4pbHGxA?CBYt%V$X)=O;7y2hmH*Fg-Wb-kG|MiJ^D?E&k;z=kH)abs$&5m`af z7(gXI9ef|bG}cHD=JV(LCg>vHvW!X+;Fsqq=&R=4@9t7U&?+P2puw?3vSh*7uw^OK z5Du*K&n!}GrzZ7(c|@fMVdz@ZCp~YBG+2a2RT5Z!+9O5-6A{#VX8cj^LrAA}fp@R8 z=g>HB-O*zWUdp-)Ynz4iBxKsXKlS^`pbM;Y@XWK*@Rt9?it6MnDsU2t^Wt`FixOm# zN__wk^6=SXsXZ=&jx;cQ@amWi)c|fwP(}dXy5Pk3+xD-$e=^&wdYuIJ#lv|Px1~8z z#f4q$%3>Cs(8Wv2F^Zp@3gfP0MrLK&Gmx(nm4Os`Gt^Wap9PZ4^U_RFWeOA03y4eJ z6IKYrcocJPQX;N^MzU#zgYrEBTeHfthybpG$reXzVS=pv&+k7kPhOy4P_-N?< z8DE#gACAT+pi)P9utNkIEi_7n@JlwM6R-&5@)4#y)Bq@g`~7|zchIzjv{JS&B;h^z z=>6A>I0`^spns@9goW}n>CekkK6Ze&DJ}RN4jlaz}ypxu8)0vMr&C1%r zyzO3U1slzMp|AAz?m`X@wGMg!x#U=Lxzlf1fcN@pf~%0%cob6boEAGCg_b)z=lciK zJ>R|xl0bYQ$<{IgK!1l!#n=OphHX+xH?7Q+sZa7wg}>J@pa*)5EoC_p95vx01MscT zO6sJAiaz~mPWbb*5_@_`Iv7fJ8(TwbP>%|bFYblBPP9o=&hrBoLwk3VJbW^$=Xg)8 zq`SJl40jV1u(`4P@MjEXdVBb_UieiZ z**is{h{@VGyh2@z)cf~)YsEQ*n=^GVn=7GVg#{&(a#{I#H~^B9n@{G7UNK8Dsy1go z9`ayeW-|f(Sra-upcD!HKb{RDx16K`39hq&=TF~9m25lQp;RKnbkFiWbeUR z4^NfltNoVm}P`a2-(BdV%< zn=$0tm(iSm$-?gKCTues;>XXgj0Jp`=FJyKjs8QAGBU})hp81oba?yUlJH`)xTsV^DW&UXZ4H*!muKw3L^EyN|DE7_w#o$`q#T%odFpnL zo{q}=RWkf{lD=Ol`Ts!n{eQzV|C^;toEu3KU6ty$H8c`<>h$yM2IfpG5jRrO;tB=m zh|~G3z_R|y*D!_#=5#!7`qQJ?w5WzrD{)QOJDg@Da`H_(efqV4+2f_j`2yWwy`)to z>~2rT>_V^>BJZ@M-&KUURd)ro5yQYxZNJ+7Eq>DNk)`j(v82cUo6U<6i_bS-`05xO zij2ToSpL?xICH@sS9JA8MEwAij8Kh$bdrQDfFiUmwCoFWz#8>4S&F-@FFdX=JUpjN z94}9^_Xey#L!TKcQQX)pd)G=TLy(<1==E8IlmdmB^2Fuo*;9(7YK?tJ4KE=yms&z; za-MQt>gQpP5ZbDWR&OD)O&cHG(fiZ7@Z_$uk|k`_Kw8-m{qPNM$z=DG-atvOWx&x{ zM=jwkmWh>oZuK=*LP6CumGQ-EcPfSJ-?C4J@kgztj38vrf{DMR@*l*7u=m9aJat4| zf?=POP>L_J1<@<-8}V*;?>AcZ@QbAeVd> zdI)}!j@<%vt;G%dFYXp2ZwCOyQCiQ4^Pd(*(-r<#YICm_xNs<nkpWz=%|%iya;nI(q%MXxnU9DyVgB)z=o} zqPWMiqZCr^f_fqn*(Fl($mbjWj#`OB(|@FPiEV*)LNDKe~(f;&ippDP4KO%{RPS|$fkdX1W>XdBU(%;N;4lAS_=a#hiV zw|P>6!|cW6Z^G_Q6dt$9&VP^2Jk`lPU?0O%bzP-=rjI@V1ZPR*Qw$ok< zAfs!Nm^3y65C)bo3vk0x`{RZx!*)$L9_WoswHr#%w9iy4#8ef1?T2j$e!~Js;t7uDw0}@nppu^r&$@?`pqd3!^mqhfcrRM@ zo#P;vYO3V{dQAzGIj4)JR^}V`REq1sDIgaD&}os^6UyvZX-B^e_Hv%UQtDBWtqJV=Z*wcz?)vl_S}{@vv00Y*=EX z$oD~fFq{_-Bg#Gz(LrzIEERj!lpr#rTNqU#{$!Uk~q=7)@&zjS}TOkmF2 z9~9@`w|Zn?KXZ4xHn!!)SST?hGP-!6?tWe~!RxPn1oZ1&N}zX|4yN~S&zRaL!_!WL zrXJ4t0OB~;PAe1Q3ue-;9JWetJPM)~l??xs3J0?KL(j4IqYpmHUTzw;XbY`H-4 zt`dhfsjDLAsMWxk)*}vj?;Akg3`{|79aDR=6e~GK{(`S#%Q3p39 zc1}KPbH$;SJG52^AP}-It(AgjyuIODPo~zClC@Q`>HR;PX$j6*nImbD(skc$Jy-LS zBu1x|TLOtoj2g)~;Hg65x4F7#o_HzU!?CUjL3tC0exV>6AXRa#oWuB4TzlPO%Xw>6 zluo`jZp~NqV@B7KfeJF$w#goof<3jo40ul2O%0K0KPwHDZX!@WkKelRy8Xm;)KPm% zUKeenc~eC$9~I;F!>*%yibBR4HF!S+RIEBGF`o`vmj+@ZO9}2_do?Q(%uuDzAL0?? zY^QiGgUpc#GHRR*-p!_X{uXPz6!mtE)`x_Cm4TbE((5`Y<$67v&20oDNjPL`Cs8aM89ON^pUg59Wbe;x2 z)|GpJ{@%R9SnJr62aZjD>ZqNY?lCIpsPR$Mx>qkQCi9dX>ZzvnQhO5b8jdE%V@nJn zbDpax5Mb{7`N16+cdM<|&k*fGeVz&8Sq4(K41K}P6n~_Gh_y1Zuhd5$o6(&eD5yE( znAhLYM5}0o7zyy^aKmexfK_cl_u(zMsM1Y7Yn5yZX8sxHRk9;3N)Gr3`MsRf5E%J( zk)tXVQoaftRetv#qOMgilINjz^4KkhAPDQRaXN_I=oi1LYtC(qe$IM@L;-3&`f@T8 z6pV!Ri@#oArpNv!uZb=Sy+_e#EgIvL&uOq{OOu<9n{AS@XXQUJfStx4b1 zU`&}SZwEq9L0cRHFQQw1vO!!| zXlp12Gi{nJW>Uc#WXY{B`ALO+VCn|^vptw_CNVDaS+ByL#j}ho+f$qfDON(zJyM(c z5@9O?{%nb)k7)lT#d87VxD5v5cCm+~T`!V;(Hs>TY#qw~y<;hfoKD)ZD5}i>{XD4l zAq#TqTA50Sy5-K$-LC1@RzV&X&os}$>sMRz}8m5eNQ+cdD~8^Sdk6>SX8^yZ6Yo#X2& z1cxQ7UilV^gnn-5Xg@9X(t@?=75*-66+s-4zgZA+y55EJ7dcbUJ4&86^7qux39-QY zYPRWTl6c40fxkqS@5?#k?|YDp?hZn>eRTm{2)W`tFP2!?4hOBH8m_`f zdT;}HBMH~URLpHIgkwG0Ib5AUOg~kTtriqnpz*->ZY!y6$$r1NSwCp?)Y&z;Jt|ls}HS?bdiy*37jVLEs@u z`YrX^hq>5{0>V_sQ94x$_;f!03Q#W?Pj&RB=7@9u`f*5f00Um$BclodHP+;XXj6x8 z^CWXw|9zd{42J$f@zJHtym}0@5Vc4)AQMjQues?^#dh>vN0Y7Y$za2Fab|uK!1=q_Y(tQl`Gx)JD^tDWPf5X{6}md zfb!X4F&8E5{aV}!z;LHDQYbpJ2XrNfJk0fiqF3lODgPsgac4@do2f;xY}lBKtB(_dDlHR3U#Vo6N@NoKzY6{#44DMKF2bjwxGne6Cl|SlC~ByoX+J^(oOHHH`#h z{YEq71^q`~2|lm0@DjL1?GF%Z4fqR)ZNiBrBH|V(O@4LoH1}RG43o=ca$?`M@%TOn zL-91={6B!$cG7@k>MBCPy=i-Y>AC$S=u1ta3YvB33?us&CfQkG2*ayr)2o-li(w0w z4*g-N&NKPH@@zO`P}1oKYtR3N2ad(lbjfVd`ZwUJzk&OT{1XoviwA!GPvCLCf$v%V z6HotdJQ%xw0B2;y0$=F(C!SZo@vM#f6Sx!>c)%Bd;&q8n_!SGPQEyaRv9{aCX3|lH zd!1Ka@ADtK-e4nhsnDNfsiz62K3^4Bw`u;S@AXb0%Im^`M9n~CyohNutT6u*-cu<= zj8qUlt0TiXc2wZju8jomn8m^KWhez(-I;rDWT&47Ph3~of`L(jJymS$oi5P(OSHAudSkB%=^IzR3~nZ zjtk@dqwtK|Y#Y-m3CcPlB_Bg?;(LzVX;S#y$xg{dkOIp2^2{5%L)n$ZY#V(Ok-nTg z+w^@p~dHiK~R!I*Ds39i4j#dS&nr0Hws15$KX^Vg3<;my84@~DNuVhobd8&<@ z7P4QV@x;l3Ws<2ZgdAzrMn+k3G1hLO|R`{|rKL@V}D z1}hOo{bf0;5qj3^L;0r~hDDB>BtNS?$Jbhc`h3LuMk>4GEQ4zIyZj~Cc)3lMPKroh zy5L;LH)!QPT~3Iic_FKcU@PQ#yeYAY?~F*RIlopo_B^PuA0HO69XmggMC~>i^!E;k zhUFX8|FB}hcBr~ORH3?F4fW#xs7Tmzr&X)|p5e_BRPrLlkKle~0oezRJ}`txe<)8;EM* z6x+A<@u%jjv`=b0>`sk12T7Lh#!^GI)Cd!-Picx*Gdc%mLUA53tSVj~6b>QK8oLc%9irL#^MDG9F50 zhHN=SDgDbm1!C*llXGHyc75fe3h|T;pzB}WwyjPQHx`d6IJ;eVJBlhiJDN81UiXD1 zAU2eOb_oKfW6UZO``bS`0AhXFP9yP=`B&O+VENlb@oer6g%ss_BRRL=e>|9dicFY! zwy$DQW{_cQKGI&VFv5*!&nHu3Hv;W-kcWw~W;(HlbEf4}Pkn5rNr>D7OwRc&8=~Lu>niOhNK4n+A;i=; zwp9A=cOEdk^aRssQbqJ`p2L__+KsV&tv zU-;1Z7ul|+IiE8z5NgY4p_!OsO@V=b`KsOYBpTR=?e3U&5XZ>)!}d8?%Yw2sLG1Z+ zbuRiFYN0(Bh>+n94Aag+J^Tv$bYE8m!}ksuz&r728t4d6RjT1#0ZH#L;n&!*>a!xK zuRR$0*W8fd$N0kp{fn$yU5iyPL_elq@Q6vXXgOH@Xqi|dFhhJu*{(WOSctt0Vbw$1 z`w$~-NMZ1su=i%`FE~KK`e+KZ_J+pR@V3+tR#R{l9=5rY)~-zQySbwt{8jAU>An>@ z64gPe{KV(({?gOb+vZS{G-~pNp#QtI4 zWDoX5^<$R@>)Ap*h!q=3_*3i({uBCnql+X3t`lX7oMSa2^#WIs8UgP$3z&XU}akGt>o=1+W*-vdWc@1NcM z(7VlxVc?l6-U`ytx_X1M3-IK}G;F7X?o~6%c&SBbAiMHHBV^;`nZ-bTLJbpN6qX75 zBnXlBnwx8D%XYWtDd)p4hP9ip^XkkPnc;m@?eWBiu4w_~D~rhQ9q$;T3!`&F??`?) zWjSY`R#e4qB;@?)mN1%aT9h$lc8wa_(J8P7GQ_sf4I^Vc9^dy#ihLd#hiS$(vkI26 z@h=)Ad|Gl;MGvWTT?I-8zTpm|g^IhLeS+6v8#bDsvmYuvjk+T3mGm554MED!8N z9-XY@0r&~=0Z}WO4cZjL=XV*IKb7M9GM&BTVEm(%krVnb{T|2gFEZ6i?sNj-ck@s= zoge6-#RP|!dnT8aDH|cW_E=9?m>TU5vF8V%b6aplZ)i9$h2p2;)7*?{R&CpWKLFT! z4tCnPq&mB-#I*Wl+CiAp9$ap<54>F;D&+g4*MS-TjM~x<7%~jDczvRL)UXz&s2HrZE z;WdtntV&`~)a3V06Em|LVE#~BSnMDXb#yfYz;p5*{uA)eRE3K6Tv#60JMC=o5Yz{r z*wh*FPdFFX?YF)!veFx*oL+`mT0JPDC$C!in%n&?HNNgAROYFQxp&i4n!&O^?Q_<% zmOqKXev@e{%fk71Ex5i!eGrnyDjampsEo$LvTX>)u+b1p{o((9eE{Ql{?S4-vEewQ){mXTDMeqHQ@d$gnVGkN%SsfrMmPz2>5$4>ENc$4)NdsPIRl6|qFH>e7E1bn5c@JnC9(R~)Xi krF{?Y;4k=gW^vcJJx7aub-H(_aIhbBRc)0@CCeB83qAjf)&Kwi literal 0 HcmV?d00001 From f0bd2d05f5f7832df4879822afb99d2096c00d48 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 15:24:54 -0700 Subject: [PATCH 0023/1455] Link to global-power-plants demo instead of sf-trees --- docs/getting_started.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 2f0a7962..52434fdc 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -6,8 +6,8 @@ Play with a live demo The best way to experience Datasette for the first time is with a demo: +* `global-power-plants.datasettes.com `__ provides a searchable database of power plants around the world, using data from the `World Resources Institude `__ rendered using the `datasette-cluster-map `__ plugin. * `fivethirtyeight.datasettes.com `__ shows Datasette running against over 400 datasets imported from the `FiveThirtyEight GitHub repository `__. -* `sf-trees.datasettes.com `__ demonstrates the `datasette-cluster-map `__ plugin running against 190,000 trees imported from `data.sfgov.org `__. .. _getting_started_glitch: From 7788d62fa679fa87d3f34a3466295b0ae06598dd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 20:28:16 -0700 Subject: [PATCH 0024/1455] Expanded the Binary plugins section --- docs/binary_data.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/binary_data.rst b/docs/binary_data.rst index 593cf78d..6902af00 100644 --- a/docs/binary_data.rst +++ b/docs/binary_data.rst @@ -63,6 +63,6 @@ Binary plugins Several Datasette plugins are available that change the way Datasette treats binary data. -- `datasette-render-binary `__ modifies -- https://github.com/simonw/datasette-render-images -- https://github.com/simonw/datasette-media \ No newline at end of file +- `datasette-render-binary `__ modifies Datasette's default interface to show an automatic guess at what type of binary data is being stored, along with a visual representation of the binary value that displays ASCII strings directly in the interface. +- `datasette-render-images `__ detects common image formats and renders them as images directly in the Datasette interface. +- `datasette-media `__ allows Datasette interfaces to be configured to serve binary files from configured SQL queries, and includes the ability to resize images directly before serving them. From 4785172bbcb9edd22b6955b415cd18cd4d83f0aa Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 20:33:47 -0700 Subject: [PATCH 0025/1455] Release 0.51.1 --- datasette/version.py | 2 +- docs/changelog.rst | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index f6e9ce97..2d949370 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.51" +__version__ = "0.51.1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index b9120c52..97d5d251 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_51_1: + +0.51.1 (2020-10-31) +------------------- + +- Improvements to the new :ref:`binary` documentation page. + .. _v0_51: 0.51 (2020-10-31) From 59b252a0c020d687259ab85e06f0636feefa0dd0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 21:45:42 -0700 Subject: [PATCH 0026/1455] Link to annotated release notes for 0.51 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index c101a4ed..a10ccfd3 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly new ## News - * 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. + * 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. [Annotated release notes](https://simonwillison.net/2020/Nov/1/datasette-0-51/). * 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). * 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). * 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. From b61f6cceb5682f9154ba72259c0c9c7503a605bf Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 1 Nov 2020 09:22:13 -0800 Subject: [PATCH 0027/1455] Add nav menu to pattern portfolio --- datasette/templates/patterns.html | 38 +++++++++++++++++++++---------- 1 file changed, 26 insertions(+), 12 deletions(-) diff --git a/datasette/templates/patterns.html b/datasette/templates/patterns.html index 62ef1322..25fb6008 100644 --- a/datasette/templates/patterns.html +++ b/datasette/templates/patterns.html @@ -9,19 +9,33 @@ +
-
-
- -
- - -
- -
- - -

Pattern Portfolio

From 7b194920702358b65739a6e8bd3adb765ffa346a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 2 Nov 2020 10:27:25 -0800 Subject: [PATCH 0028/1455] database_actions() plugin hook, closes #1077 --- datasette/hookspecs.py | 5 +++++ datasette/static/app.css | 6 +++--- datasette/templates/database.html | 25 ++++++++++++++++++++++++- datasette/templates/table.html | 6 +++--- datasette/views/database.py | 15 +++++++++++++++ docs/plugin_hooks.rst | 16 ++++++++++++++++ tests/fixtures.py | 1 + tests/plugins/my_plugin.py | 11 +++++++++++ tests/test_api.py | 1 + tests/test_plugins.py | 19 ++++++++++++++++++- 10 files changed, 97 insertions(+), 8 deletions(-) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 78070e67..a305ca6a 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -107,3 +107,8 @@ def menu_links(datasette, actor): @hookspec def table_actions(datasette, actor, database, table): "Links for the table actions menu" + + +@hookspec +def database_actions(datasette, actor, database): + "Links for the database actions menu" diff --git a/datasette/static/app.css b/datasette/static/app.css index a1eb2099..675285c1 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -360,11 +360,11 @@ details .nav-menu-inner { display: block; } -/* Table actions menu */ -.table-menu-links { +/* Table/database actions menu */ +.actions-menu-links { position: relative; } -.table-menu-links .dropdown-menu { +.actions-menu-links .dropdown-menu { position: absolute; top: 2rem; right: 0; diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 3b89d68b..7065f2c2 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -18,7 +18,30 @@ {% block content %} -

{{ metadata.title or database }}{% if private %} 🔒{% endif %}

+ + {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 13f6a832..5034b62e 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -28,12 +28,12 @@
- - - - -

.bd for /database/table

+
-

roadside_attraction_characteristics

+ +

Data license: Apache License 2.0 @@ -257,7 +274,6 @@

-

2 extra where clauses

    @@ -269,7 +285,6 @@
-

View and edit SQL

@@ -278,11 +293,6 @@ Suggested facets: tags, created (date), tags (array)

- - - - -
@@ -420,16 +430,6 @@ ); - - - - - - - - - -

.bd for /database/table/row

roadside_attractions: 2

@@ -474,16 +474,6 @@
- - - - - - - - - -

.ft

+ +{% include "_close_open_menus.html" %} + From 13d1228d80c91d382a05b1a9549ed02c300ef851 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 2 Nov 2020 12:02:50 -0800 Subject: [PATCH 0030/1455] /dbname/tablename/-/modify-table-schema is OK after all Refs #1053, #296 --- docs/writing_plugins.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index 29fcca13..dfcda8a9 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -227,10 +227,11 @@ If your plugin includes functionality that relates to a specific database you co - ``/dbname/-/upload-excel`` -Reserving routes under ``/dbname/tablename/-/...`` is not a good idea because a table could conceivably include a row with a primary key value of ``-``. Instead, you could use a pattern like this: +Or for a specific table like this: -- ``/dbname/-/upload-excel/tablename`` +- ``/dbname/tablename/-/modify-table-schema`` +Note that a row could have a primary key of ``-`` and this URL scheme will still work, because Datasette row pages do not ever have a trailing slash followed by additional path components. .. _writing_plugins_building_urls: From 2a981e2ac1d13125973904b777d00ea75e8df4e6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 11 Nov 2020 15:37:37 -0800 Subject: [PATCH 0031/1455] Blank foreign key labels now show as hyphens, closes #1086 --- datasette/templates/table.html | 4 +- datasette/views/table.py | 2 +- tests/fixtures.py | 6 ++- tests/test_api.py | 76 ++++++++++++++++++++++------------ tests/test_csv.py | 6 +-- tests/test_html.py | 30 +++++++++----- 6 files changed, 80 insertions(+), 44 deletions(-) diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 5034b62e..077332dc 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -156,9 +156,9 @@
    {% for facet_value in facet_info.results %} {% if not facet_value.selected %} -
  • {{ (facet_value.label if facet_value.label is not none else "_") }} {{ "{:,}".format(facet_value.count) }}
  • +
  • {{ (facet_value.label | string()) or "-" }} {{ "{:,}".format(facet_value.count) }}
  • {% else %} -
  • {{ facet_value.label }} · {{ "{:,}".format(facet_value.count) }}
  • +
  • {{ facet_value.label or "-" }} · {{ "{:,}".format(facet_value.count) }}
  • {% endif %} {% endfor %} {% if facet_info.truncated %} diff --git a/datasette/views/table.py b/datasette/views/table.py index 65fe7f8b..d29ef201 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -195,7 +195,7 @@ class RowTableShared(DataView): table=urllib.parse.quote_plus(other_table), link_id=urllib.parse.quote_plus(str(value)), id=str(jinja2.escape(value)), - label=str(jinja2.escape(label)), + label=str(jinja2.escape(label)) or "-", ) ) elif value in ("", None): diff --git a/tests/fixtures.py b/tests/fixtures.py index a48cfb46..bd530398 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -386,8 +386,10 @@ CREATE INDEX idx_compound_three_primary_keys_content ON compound_three_primary_k CREATE TABLE foreign_key_references ( pk varchar(30) primary key, foreign_key_with_label varchar(30), + foreign_key_with_blank_label varchar(30), foreign_key_with_no_label varchar(30), FOREIGN KEY (foreign_key_with_label) REFERENCES simple_primary_key(id), + FOREIGN KEY (foreign_key_with_blank_label) REFERENCES simple_primary_key(id), FOREIGN KEY (foreign_key_with_no_label) REFERENCES primary_key_multiple_columns(id) ); @@ -622,8 +624,8 @@ INSERT INTO simple_primary_key VALUES (4, 'RENDER_CELL_DEMO'); INSERT INTO primary_key_multiple_columns VALUES (1, 'hey', 'world'); INSERT INTO primary_key_multiple_columns_explicit_label VALUES (1, 'hey', 'world2'); -INSERT INTO foreign_key_references VALUES (1, 1, 1); -INSERT INTO foreign_key_references VALUES (2, null, null); +INSERT INTO foreign_key_references VALUES (1, 1, 3, 1); +INSERT INTO foreign_key_references VALUES (2, null, null, null); INSERT INTO complex_foreign_keys VALUES (1, 1, 2, 1); INSERT INTO custom_foreign_key_label VALUES (1, 1); diff --git a/tests/test_api.py b/tests/test_api.py index 1a43e7f4..d6d683b7 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -49,21 +49,21 @@ def test_homepage_sort_by_relationships(app_client): tables = [ t["name"] for t in response.json["fixtures"]["tables_and_views_truncated"] ] - assert [ + assert tables == [ "simple_primary_key", + "foreign_key_references", "complex_foreign_keys", "roadside_attraction_characteristics", "searchable_tags", - "foreign_key_references", - ] == tables + ] def test_database_page(app_client): response = app_client.get("/fixtures.json") assert response.status == 200 data = response.json - assert "fixtures" == data["database"] - assert [ + assert data["database"] == "fixtures" + assert data["tables"] == [ { "name": "123_starts_with_digits", "columns": ["content"], @@ -232,7 +232,12 @@ def test_database_page(app_client): }, { "name": "foreign_key_references", - "columns": ["pk", "foreign_key_with_label", "foreign_key_with_no_label"], + "columns": [ + "pk", + "foreign_key_with_label", + "foreign_key_with_blank_label", + "foreign_key_with_no_label", + ], "primary_keys": ["pk"], "count": 2, "hidden": False, @@ -245,6 +250,11 @@ def test_database_page(app_client): "column": "foreign_key_with_no_label", "other_column": "id", }, + { + "other_table": "simple_primary_key", + "column": "foreign_key_with_blank_label", + "other_column": "id", + }, { "other_table": "simple_primary_key", "column": "foreign_key_with_label", @@ -403,6 +413,11 @@ def test_database_page(app_client): "fts_table": None, "foreign_keys": { "incoming": [ + { + "other_table": "foreign_key_references", + "column": "id", + "other_column": "foreign_key_with_blank_label", + }, { "other_table": "foreign_key_references", "column": "id", @@ -548,7 +563,7 @@ def test_database_page(app_client): "foreign_keys": {"incoming": [], "outgoing": []}, "private": False, }, - ] == data["tables"] + ] def test_no_files_uses_memory_database(app_client_no_files): @@ -1203,32 +1218,38 @@ def test_row_foreign_key_tables(app_client): "/fixtures/simple_primary_key/1.json?_extras=foreign_key_tables" ) assert response.status == 200 - assert [ + assert response.json["foreign_key_tables"] == [ { - "column": "id", - "count": 1, - "other_column": "foreign_key_with_label", "other_table": "foreign_key_references", - }, - { - "column": "id", - "count": 1, - "other_column": "f3", - "other_table": "complex_foreign_keys", - }, - { "column": "id", + "other_column": "foreign_key_with_blank_label", "count": 0, - "other_column": "f2", - "other_table": "complex_foreign_keys", }, { + "other_table": "foreign_key_references", "column": "id", + "other_column": "foreign_key_with_label", "count": 1, - "other_column": "f1", - "other_table": "complex_foreign_keys", }, - ] == response.json["foreign_key_tables"] + { + "other_table": "complex_foreign_keys", + "column": "id", + "other_column": "f3", + "count": 1, + }, + { + "other_table": "complex_foreign_keys", + "column": "id", + "other_column": "f2", + "count": 0, + }, + { + "other_table": "complex_foreign_keys", + "column": "id", + "other_column": "f1", + "count": 1, + }, + ] def test_unit_filters(app_client): @@ -1593,13 +1614,14 @@ def test_expand_label(app_client): "/fixtures/foreign_key_references.json?_shape=object" "&_label=foreign_key_with_label&_size=1" ) - assert { + assert response.json == { "1": { "pk": "1", "foreign_key_with_label": {"value": "1", "label": "hello"}, + "foreign_key_with_blank_label": "3", "foreign_key_with_no_label": "1", } - } == response.json + } @pytest.mark.parametrize( @@ -1790,11 +1812,13 @@ def test_null_foreign_keys_are_not_expanded(app_client): { "pk": "1", "foreign_key_with_label": {"value": "1", "label": "hello"}, + "foreign_key_with_blank_label": {"value": "3", "label": ""}, "foreign_key_with_no_label": {"value": "1", "label": "1"}, }, { "pk": "2", "foreign_key_with_label": None, + "foreign_key_with_blank_label": None, "foreign_key_with_no_label": None, }, ] == response.json diff --git a/tests/test_csv.py b/tests/test_csv.py index 3e91fb04..209bce2b 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -42,9 +42,9 @@ pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,com ) EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV = """ -pk,foreign_key_with_label,foreign_key_with_label_label,foreign_key_with_no_label,foreign_key_with_no_label_label -1,1,hello,1,1 -2,,,, +pk,foreign_key_with_label,foreign_key_with_label_label,foreign_key_with_blank_label,foreign_key_with_blank_label_label,foreign_key_with_no_label,foreign_key_with_no_label_label +1,1,hello,3,,1,1 +2,,,,,, """.lstrip().replace( "\n", "\r\n" ) diff --git a/tests/test_html.py b/tests/test_html.py index 006c223d..7fca8a68 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -464,7 +464,7 @@ def test_facet_display(app_client): ], } ) - assert [ + assert actual == [ { "name": "city_id", "items": [ @@ -520,7 +520,7 @@ def test_facet_display(app_client): }, ], }, - ] == actual + ] def test_facets_persist_through_filter_form(app_client): @@ -801,37 +801,47 @@ def test_table_html_foreign_key_links(app_client): response = app_client.get("/fixtures/foreign_key_references") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") - expected = [ + actual = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] + assert actual == [ [ '1', 'hello\xa01', + '-\xa03', '1', ], [ '2', '\xa0', + '\xa0', '\xa0', ], ] - assert expected == [ - [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") - ] + + +def test_table_html_foreign_key_facets(app_client): + response = app_client.get( + "/fixtures/foreign_key_references?_facet=foreign_key_with_blank_label" + ) + assert response.status == 200 + assert ( + '
  • ' + "- 1
  • " + ) in response.text def test_table_html_disable_foreign_key_links_with_labels(app_client): response = app_client.get("/fixtures/foreign_key_references?_labels=off&_size=1") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") - expected = [ + actual = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] + assert actual == [ [ '1', '1', + '3', '1', ] ] - assert expected == [ - [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") - ] def test_table_html_foreign_key_custom_label_column(app_client): From e8e0a6f284ca953b2980186c4356594c07bd1929 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 11 Nov 2020 16:02:58 -0800 Subject: [PATCH 0032/1455] Use FTS4 in fixtures Closes #1081 --- tests/fixtures.py | 2 +- tests/test_api.py | 28 ++++++++++++++++------------ tests/test_internals_database.py | 7 ++++--- 3 files changed, 21 insertions(+), 16 deletions(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index bd530398..183b8ca4 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -488,7 +488,7 @@ INSERT INTO searchable_tags (searchable_id, tag) VALUES ; CREATE VIRTUAL TABLE "searchable_fts" - USING FTS3 (text1, text2, [name with . and spaces], content="searchable"); + USING FTS4 (text1, text2, [name with . and spaces], content="searchable"); INSERT INTO "searchable_fts" (rowid, text1, text2, [name with . and spaces]) SELECT rowid, text1, text2, [name with . and spaces] FROM searchable; diff --git a/tests/test_api.py b/tests/test_api.py index d6d683b7..93097574 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -37,9 +37,9 @@ def test_homepage(app_client): assert len(d["tables_and_views_truncated"]) == 5 assert d["tables_and_views_more"] is True # 4 hidden FTS tables + no_primary_key (hidden in metadata) - assert d["hidden_tables_count"] == 5 - # 201 in no_primary_key, plus 5 in other hidden tables: - assert d["hidden_table_rows_sum"] == 206 + assert d["hidden_tables_count"] == 6 + # 201 in no_primary_key, plus 6 in other hidden tables: + assert d["hidden_table_rows_sum"] == 207 assert d["views_count"] == 4 @@ -512,7 +512,7 @@ def test_database_page(app_client): }, { "name": "searchable_fts", - "columns": ["text1", "text2", "name with . and spaces", "content"], + "columns": ["text1", "text2", "name with . and spaces"], "primary_keys": [], "count": 2, "hidden": True, @@ -521,14 +521,8 @@ def test_database_page(app_client): "private": False, }, { - "name": "searchable_fts_content", - "columns": [ - "docid", - "c0text1", - "c1text2", - "c2name with . and spaces", - "c3content", - ], + "name": "searchable_fts_docsize", + "columns": ["docid", "size"], "primary_keys": ["docid"], "count": 2, "hidden": True, @@ -563,6 +557,16 @@ def test_database_page(app_client): "foreign_keys": {"incoming": [], "outgoing": []}, "private": False, }, + { + "name": "searchable_fts_stat", + "columns": ["id", "value"], + "primary_keys": ["id"], + "count": 1, + "hidden": True, + "fts_table": None, + "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, + }, ] diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 8042cf53..e5938f3b 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -297,7 +297,7 @@ async def test_get_all_foreign_keys(db): @pytest.mark.asyncio async def test_table_names(db): table_names = await db.table_names() - assert [ + assert table_names == [ "simple_primary_key", "primary_key_multiple_columns", "primary_key_multiple_columns_explicit_label", @@ -316,9 +316,10 @@ async def test_table_names(db): "searchable", "searchable_tags", "searchable_fts", - "searchable_fts_content", "searchable_fts_segments", "searchable_fts_segdir", + "searchable_fts_docsize", + "searchable_fts_stat", "select", "infinity", "facet_cities", @@ -327,7 +328,7 @@ async def test_table_names(db): "roadside_attractions", "attraction_characteristic", "roadside_attraction_characteristics", - ] == table_names + ] @pytest.mark.asyncio From 253f2d9a3cc96edcb47b33c6971300d0ff15d4dc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 11 Nov 2020 20:36:44 -0800 Subject: [PATCH 0033/1455] Use correct QueryInterrupted exception on row page, closes #1088 --- datasette/views/table.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index d29ef201..9ed45df1 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -19,7 +19,6 @@ from datasette.utils import ( path_with_added_args, path_with_removed_args, path_with_replaced_args, - sqlite3, to_css_class, urlsafe_components, value_as_boolean, @@ -1040,7 +1039,7 @@ class RowView(RowTableShared): ) try: rows = list(await db.execute(sql, {"id": pk_values[0]})) - except sqlite3.OperationalError: + except QueryInterrupted: # Almost certainly hit the timeout return [] From 5eb8e9bf250b26e30b017d39a392c33973997656 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 12 Nov 2020 12:07:19 -0800 Subject: [PATCH 0034/1455] Removed words that minimize involved difficulty, closes #1089 --- docs/changelog.rst | 8 ++++---- docs/contributing.rst | 8 ++++---- docs/deploying.rst | 4 ++-- docs/ecosystem.rst | 2 +- docs/internals.rst | 2 +- docs/metadata.rst | 2 +- docs/plugin_hooks.rst | 2 +- docs/publish.rst | 4 ++-- docs/sql_queries.rst | 2 +- docs/writing_plugins.rst | 4 ++-- 10 files changed, 19 insertions(+), 19 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 97d5d251..34bd95d4 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -239,7 +239,7 @@ Better plugin documentation The plugin documentation has been re-arranged into four sections, including a brand new section on testing plugins. (`#687 `__) - :ref:`plugins` introduces Datasette's plugin system and describes how to install and configure plugins. -- :ref:`writing_plugins` describes how to author plugins, from simple one-off plugins to packaged plugins that can be published to PyPI. It also describes how to start a plugin using the new `datasette-plugin `__ cookiecutter template. +- :ref:`writing_plugins` describes how to author plugins, from one-off single file plugins to packaged plugins that can be published to PyPI. It also describes how to start a plugin using the new `datasette-plugin `__ cookiecutter template. - :ref:`plugin_hooks` is a full list of detailed documentation for every Datasette plugin hook. - :ref:`testing_plugins` describes how to write tests for Datasette plugins, using `pytest `__ and `HTTPX `__. @@ -277,7 +277,7 @@ Authentication Prior to this release the Datasette ecosystem has treated authentication as exclusively the realm of plugins, most notably through `datasette-auth-github `__. -0.44 introduces :ref:`authentication` as core Datasette concepts (`#699 `__). This makes it easier for different plugins can share responsibility for authenticating requests - you might have one plugin that handles user accounts and another one that allows automated access via API keys, for example. +0.44 introduces :ref:`authentication` as core Datasette concepts (`#699 `__). This enables different plugins to share responsibility for authenticating requests - you might have one plugin that handles user accounts and another one that allows automated access via API keys, for example. You'll need to install plugins if you want full user accounts, but default Datasette can now authenticate a single root user with the new ``--root`` command-line option, which outputs a one-time use URL to :ref:`authenticate as a root actor ` (`#784 `__):: @@ -572,7 +572,7 @@ Also in this release: 0.32 (2019-11-14) ----------------- -Datasette now renders templates using `Jinja async mode `__. This makes it easy for plugins to provide custom template functions that perform asynchronous actions, for example the new `datasette-template-sql `__ plugin which allows custom templates to directly execute SQL queries and render their results. (`#628 `__) +Datasette now renders templates using `Jinja async mode `__. This means plugins can provide custom template functions that perform asynchronous actions, for example the new `datasette-template-sql `__ plugin which allows custom templates to directly execute SQL queries and render their results. (`#628 `__) .. _v0_31_2: @@ -1881,7 +1881,7 @@ as a more powerful alternative to SQL views. This will write those values into the metadata.json that is packaged with the app. If you also pass ``--metadata=metadata.json`` that file will be updated with the extra values before being written into the Docker image. -- Added simple production-ready Dockerfile (`#94`_) [Andrew +- Added production-ready Dockerfile (`#94`_) [Andrew Cutler] - New ``?_sql_time_limit_ms=10`` argument to database and table page (`#95`_) - SQL syntax highlighting with Codemirror (`#89`_) [Tom Dyson] diff --git a/docs/contributing.rst b/docs/contributing.rst index 375f6b89..ca194001 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -19,7 +19,7 @@ General guidelines Setting up a development environment ------------------------------------ -If you have Python 3.6 or higher installed on your computer (on OS X the easiest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps. +If you have Python 3.6 or higher installed on your computer (on OS X the quickest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps. If you want to use GitHub to publish your changes, first `create a fork of datasette `__ under your own GitHub account. @@ -27,7 +27,7 @@ Now clone that repository somewhere on your computer:: git clone git@github.com:YOURNAME/datasette -If you just want to get started without creating your own fork, you can do this instead:: +If you want to get started without creating your own fork, you can do this instead:: git clone git@github.com:simonw/datasette @@ -47,9 +47,9 @@ Once you have done this, you can run the Datasette unit tests from inside your ` pytest -To run Datasette itself, just type ``datasette``. +To run Datasette itself, type ``datasette``. -You're going to need at least one SQLite database. An easy way to get started is to use the fixtures database that Datasette uses for its own tests. +You're going to need at least one SQLite database. A quick way to get started is to use the fixtures database that Datasette uses for its own tests. You can create a copy of that database by running this command:: diff --git a/docs/deploying.rst b/docs/deploying.rst index e777f296..3eeaaad8 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -4,7 +4,7 @@ Deploying Datasette ===================== -The easiest way to deploy a Datasette instance on the internet is to use the ``datasette publish`` command, described in :ref:`publishing`. This can be used to quickly deploy Datasette to a number of hosting providers including Heroku, Google Cloud Run and Vercel. +The quickest way to deploy a Datasette instance on the internet is to use the ``datasette publish`` command, described in :ref:`publishing`. This can be used to quickly deploy Datasette to a number of hosting providers including Heroku, Google Cloud Run and Vercel. You can deploy Datasette to other hosting providers using the instructions on this page. @@ -109,7 +109,7 @@ If you want to build SQLite files or download them as part of the deployment pro wget https://fivethirtyeight.datasettes.com/fivethirtyeight.db -`simonw/buildpack-datasette-demo `__ is an example GitHub repository showing a simple Datasette configuration that can be deployed to a buildpack-supporting host. +`simonw/buildpack-datasette-demo `__ is an example GitHub repository showing a Datasette configuration that can be deployed to a buildpack-supporting host. .. _deploying_proxy: diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst index 4b80e71e..2ab4224a 100644 --- a/docs/ecosystem.rst +++ b/docs/ecosystem.rst @@ -68,7 +68,7 @@ For example, to create a SQLite database of the `City of Dallas Payment Register Datasette Plugins ================= -Datasette's :ref:`plugin system ` makes it easy to enhance Datasette with additional functionality. +Datasette's :ref:`plugin system ` allows developers to enhance Datasette with additional functionality. datasette-graphql ----------------- diff --git a/docs/internals.rst b/docs/internals.rst index d3d0be8e..92496490 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -119,7 +119,7 @@ For example: content_type="application/xml; charset=utf-8" ) -The easiest way to create responses is using the ``Response.text(...)``, ``Response.html(...)``, ``Response.json(...)`` or ``Response.redirect(...)`` helper methods: +The quickest way to create responses is using the ``Response.text(...)``, ``Response.html(...)``, ``Response.json(...)`` or ``Response.redirect(...)`` helper methods: .. code-block:: python diff --git a/docs/metadata.rst b/docs/metadata.rst index 471a52e3..87c81ff6 100644 --- a/docs/metadata.rst +++ b/docs/metadata.rst @@ -310,7 +310,7 @@ Here's an example of a ``metadata.yml`` file, re-using an example from :ref:`can where neighborhood like '%' || :text || '%' order by neighborhood; title: Search neighborhoods description_html: |- -

    This demonstrates simple LIKE search +

    This demonstrates basic LIKE search The ``metadata.yml`` file is passed to Datasette using the same ``--metadata`` option:: diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 6f8d269d..8407a259 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -471,7 +471,7 @@ It can also return a dictionary with the following keys. This format is **deprec ``headers`` - dictionary, optional Extra HTTP headers to be returned in the response. -A simple example of an output renderer callback function: +An example of an output renderer callback function: .. code-block:: python diff --git a/docs/publish.rst b/docs/publish.rst index 45048ce1..a905ac92 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -71,7 +71,7 @@ You can specify a custom app name by passing ``-n my-app-name`` to the publish c Publishing to Vercel -------------------- -`Vercel `__ - previously known as Zeit Now - provides a layer over AWS Lambda to allow for easy, scale-to-zero deployment. You can deploy Datasette instances to Vercel using the `datasette-publish-vercel `__ plugin. +`Vercel `__ - previously known as Zeit Now - provides a layer over AWS Lambda to allow for quick, scale-to-zero deployment. You can deploy Datasette instances to Vercel using the `datasette-publish-vercel `__ plugin. :: @@ -85,7 +85,7 @@ Not every feature is supported: consult the `datasette-publish-vercel README `__ is a `competitively priced `__ Docker-compatible hosting platform that makes it easy to run applications in globally distributed data centers close to your end users. You can deploy Datasette instances to Fly using the `datasette-publish-fly `__ plugin. +`Fly `__ is a `competitively priced `__ Docker-compatible hosting platform that supports running applications in globally distributed data centers close to your end users. You can deploy Datasette instances to Fly using the `datasette-publish-fly `__ plugin. :: diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index 0ce506cb..ec4c860e 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -64,7 +64,7 @@ If you want to bundle some pre-written SQL queries with your Datasette-hosted database you can do so in two ways. The first is to include SQL views in your database - Datasette will then list those views on your database index page. -The easiest way to create views is with the SQLite command-line interface:: +The quickest way to create views is with the SQLite command-line interface:: $ sqlite3 sf-trees.db SQLite version 3.19.3 2017-06-27 16:48:08 diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index dfcda8a9..60d5056a 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -10,7 +10,7 @@ You can write one-off plugins that apply to just one Datasette instance, or you Writing one-off plugins ----------------------- -The easiest way to write a plugin is to create a ``my_plugin.py`` file and drop it into your ``plugins/`` directory. Here is an example plugin, which adds a new custom SQL function called ``hello_world()`` which takes no arguments and returns the string ``Hello world!``. +The quickest way to start writing a plugin is to create a ``my_plugin.py`` file and drop it into your ``plugins/`` directory. Here is an example plugin, which adds a new custom SQL function called ``hello_world()`` which takes no arguments and returns the string ``Hello world!``. .. code-block:: python @@ -37,7 +37,7 @@ Starting an installable plugin using cookiecutter Plugins that can be installed should be written as Python packages using a ``setup.py`` file. -The easiest way to start writing one an installable plugin is to use the `datasette-plugin `__ cookiecutter template. This creates a new plugin structure for you complete with an example test and GitHub Actions workflows for testing and publishing your plugin. +The quickest way to start writing one an installable plugin is to use the `datasette-plugin `__ cookiecutter template. This creates a new plugin structure for you complete with an example test and GitHub Actions workflows for testing and publishing your plugin. `Install cookiecutter `__ and then run this command to start building a plugin using the template:: From 200284e1a7541af62c7df5467acfb7edd0ee934a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 15 Nov 2020 08:43:13 -0800 Subject: [PATCH 0035/1455] Clarified how --plugin-secret works --- docs/plugins.rst | 30 ++++++++++++++++++++++++++---- 1 file changed, 26 insertions(+), 4 deletions(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index 1c0dd588..06e2ec00 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -123,7 +123,6 @@ If you run ``datasette plugins --all`` it will include default plugins that ship You can add the ``--plugins-dir=`` option to include any plugins found in that directory. - .. _plugins_configuration: Plugin configuration @@ -131,7 +130,9 @@ Plugin configuration Plugins can have their own configuration, embedded in a :ref:`metadata` file. Configuration options for plugins live within a ``"plugins"`` key in that file, which can be included at the root, database or table level. -Here is an example of some plugin configuration for a specific table:: +Here is an example of some plugin configuration for a specific table: + +.. code-block:: json { "databases: { @@ -159,7 +160,9 @@ Secret configuration values Any values embedded in ``metadata.json`` will be visible to anyone who views the ``/-/metadata`` page of your Datasette instance. Some plugins may need configuration that should stay secret - API keys for example. There are two ways in which you can store secret configuration values. -**As environment variables**. If your secret lives in an environment variable that is available to the Datasette process, you can indicate that the configuration value should be read from that environment variable like so:: +**As environment variables**. If your secret lives in an environment variable that is available to the Datasette process, you can indicate that the configuration value should be read from that environment variable like so: + +.. code-block:: json { "plugins": { @@ -171,7 +174,9 @@ Any values embedded in ``metadata.json`` will be visible to anyone who views the } } -**As values in separate files**. Your secrets can also live in files on disk. To specify a secret should be read from a file, provide the full file path like this:: +**As values in separate files**. Your secrets can also live in files on disk. To specify a secret should be read from a file, provide the full file path like this: + +.. code-block:: json { "plugins": { @@ -190,3 +195,20 @@ If you are publishing your data using the :ref:`datasette publish ` --install=datasette-auth-github \ --plugin-secret datasette-auth-github client_id your_client_id \ --plugin-secret datasette-auth-github client_secret your_client_secret + +This will set the necessary environment variables and add the following to the deployed ``metadata.json``: + +.. code-block:: json + + { + "plugins": { + "datasette-auth-github": { + "client_id": { + "$env": "DATASETTE_AUTH_GITHUB_CLIENT_ID" + }, + "client_secret": { + "$env": "DATASETTE_AUTH_GITHUB_CLIENT_SECRET" + } + } + } + } From 6fd35be64de221eba4945ca24e8e1678f6142a73 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 15 Nov 2020 08:45:26 -0800 Subject: [PATCH 0036/1455] Fixed invalid JSON in exampl --- docs/plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index 06e2ec00..3e756a9e 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -135,7 +135,7 @@ Here is an example of some plugin configuration for a specific table: .. code-block:: json { - "databases: { + "databases": { "sf-trees": { "tables": { "Street_Tree_List": { From 30e64c8d3b3728a86c3ca42a75322cc3feb5b0c8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 15 Nov 2020 15:24:22 -0800 Subject: [PATCH 0037/1455] Use f-strings in place of .format() Code transformed like so: pip install flynt flynt . black . --- datasette/app.py | 14 +++--- datasette/blob_renderer.py | 6 +-- datasette/cli.py | 20 +++----- datasette/database.py | 12 ++--- datasette/facets.py | 20 ++++---- datasette/filters.py | 18 +++---- datasette/inspect.py | 2 +- datasette/publish/cloudrun.py | 8 ++- datasette/publish/common.py | 4 +- datasette/publish/heroku.py | 14 ++---- datasette/renderer.py | 4 +- datasette/tracer.py | 4 +- datasette/url_builder.py | 14 +++--- datasette/utils/__init__.py | 91 +++++++++++++++------------------- datasette/utils/asgi.py | 2 +- datasette/utils/testing.py | 6 +-- datasette/views/base.py | 22 ++++---- datasette/views/database.py | 8 ++- datasette/views/special.py | 4 +- datasette/views/table.py | 88 +++++++++++++------------------- tests/fixtures.py | 12 ++--- tests/plugins/my_plugin.py | 16 +++--- tests/test_api.py | 8 +-- tests/test_auth.py | 2 +- tests/test_canned_queries.py | 8 +-- tests/test_cli.py | 2 +- tests/test_docs.py | 6 +-- tests/test_filters.py | 4 +- tests/test_html.py | 41 +++++++-------- tests/test_internals_urls.py | 2 +- tests/test_messages.py | 2 +- tests/test_plugins.py | 12 ++--- tests/test_publish_cloudrun.py | 10 ++-- tests/test_utils.py | 2 +- update-docs-help.py | 2 +- 35 files changed, 213 insertions(+), 277 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 1271e52f..b2bdb746 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -231,7 +231,7 @@ class Datasette: is_mutable = path not in self.immutables db = Database(self, path, is_mutable=is_mutable, is_memory=is_memory) if db.name in self.databases: - raise Exception("Multiple files with same stem: {}".format(db.name)) + raise Exception(f"Multiple files with same stem: {db.name}") self.add_database(db.name, db) self.cache_headers = cache_headers self.cors = cors @@ -455,9 +455,9 @@ class Datasette: if self.sqlite_extensions: conn.enable_load_extension(True) for extension in self.sqlite_extensions: - conn.execute("SELECT load_extension('{}')".format(extension)) + conn.execute(f"SELECT load_extension('{extension}')") if self.config("cache_size_kb"): - conn.execute("PRAGMA cache_size=-{}".format(self.config("cache_size_kb"))) + conn.execute(f"PRAGMA cache_size=-{self.config('cache_size_kb')}") # pylint: disable=no-member pm.hook.prepare_connection(conn=conn, database=database, datasette=self) @@ -860,7 +860,7 @@ class Datasette: if plugin["static_path"]: add_route( asgi_static(plugin["static_path"]), - "/-/static-plugins/{}/(?P.*)$".format(plugin["name"]), + f"/-/static-plugins/{plugin['name']}/(?P.*)$", ) # Support underscores in name in addition to hyphens, see https://github.com/simonw/datasette/issues/611 add_route( @@ -1156,7 +1156,7 @@ class DatasetteRouter: info = {} message = str(exception) traceback.print_exc() - templates = ["{}.html".format(status), "error.html"] + templates = [f"{status}.html", "error.html"] info.update( { "ok": False, @@ -1234,7 +1234,7 @@ def route_pattern_from_filepath(filepath): re_bits = ["/"] for bit in _curly_re.split(filepath): if _curly_re.match(bit): - re_bits.append("(?P<{}>[^/]*)".format(bit[1:-1])) + re_bits.append(f"(?P<{bit[1:-1]}>[^/]*)") else: re_bits.append(re.escape(bit)) return re.compile("^" + "".join(re_bits) + "$") @@ -1253,7 +1253,7 @@ class DatasetteClient: if not isinstance(path, PrefixedUrlString): path = self.ds.urls.path(path) if path.startswith("/"): - path = "http://localhost{}".format(path) + path = f"http://localhost{path}" return path async def get(self, path, **kwargs): diff --git a/datasette/blob_renderer.py b/datasette/blob_renderer.py index 794b153e..217b3638 100644 --- a/datasette/blob_renderer.py +++ b/datasette/blob_renderer.py @@ -9,10 +9,10 @@ _BLOB_HASH = "_blob_hash" async def render_blob(datasette, database, rows, columns, request, table, view_name): if _BLOB_COLUMN not in request.args: - raise BadRequest("?{}= is required".format(_BLOB_COLUMN)) + raise BadRequest(f"?{_BLOB_COLUMN}= is required") blob_column = request.args[_BLOB_COLUMN] if blob_column not in columns: - raise BadRequest("{} is not a valid column".format(blob_column)) + raise BadRequest(f"{blob_column} is not a valid column") # If ?_blob_hash= provided, use that to select the row - otherwise use first row blob_hash = None @@ -42,7 +42,7 @@ async def render_blob(datasette, database, rows, columns, request, table, view_n filename = "-".join(filename_bits) + ".blob" headers = { "X-Content-Type-Options": "nosniff", - "Content-Disposition": 'attachment; filename="{}"'.format(filename), + "Content-Disposition": f'attachment; filename="{filename}"', } return Response( body=value or b"", diff --git a/datasette/cli.py b/datasette/cli.py index 04d2950b..99075078 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -33,12 +33,12 @@ class Config(click.ParamType): def convert(self, config, param, ctx): if ":" not in config: - self.fail('"{}" should be name:value'.format(config), param, ctx) + self.fail(f'"{config}" should be name:value', param, ctx) return name, value = config.split(":", 1) if name not in DEFAULT_CONFIG: self.fail( - "{} is not a valid option (--help-config to see all)".format(name), + f"{name} is not a valid option (--help-config to see all)", param, ctx, ) @@ -49,13 +49,11 @@ class Config(click.ParamType): try: return name, value_as_boolean(value) except ValueAsBooleanError: - self.fail( - '"{}" should be on/off/true/false/1/0'.format(name), param, ctx - ) + self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx) return elif isinstance(default, int): if not value.isdigit(): - self.fail('"{}" should be an integer'.format(name), param, ctx) + self.fail(f'"{name}" should be an integer', param, ctx) return return name, int(value) elif isinstance(default, str): @@ -203,7 +201,7 @@ def package( version_note, secret, port, - **extra_metadata + **extra_metadata, ): "Package specified SQLite files into a new datasette Docker container" if not shutil.which("docker"): @@ -389,7 +387,7 @@ def serve( with formatter.section("Config options"): formatter.write_dl( [ - (option.name, "{} (default={})".format(option.help, option.default)) + (option.name, f"{option.help} (default={option.default})") for option in CONFIG_OPTIONS ] ) @@ -470,7 +468,7 @@ def serve( path = asyncio.get_event_loop().run_until_complete( initial_path_for_datasette(ds) ) - url = "http://{}:{}{}".format(host, port, path) + url = f"http://{host}:{port}{path}" webbrowser.open(url) uvicorn.run( ds.app(), host=host, port=port, log_level="info", lifespan="on", workers=1 @@ -491,7 +489,5 @@ async def check_databases(ds): ) except ConnectionProblem as e: raise click.UsageError( - "Connection to {} failed check: {}".format( - database.path, str(e.args[0]) - ) + f"Connection to {database.path} failed check: {str(e.args[0])}" ) diff --git a/datasette/database.py b/datasette/database.py index a9f39253..ea1424a5 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -57,7 +57,7 @@ class Database: if write: qs = "" return sqlite3.connect( - "file:{}{}".format(self.path, qs), uri=True, check_same_thread=False + f"file:{self.path}{qs}", uri=True, check_same_thread=False ) async def execute_write(self, sql, params=None, block=False): @@ -191,7 +191,7 @@ class Database: try: table_count = ( await self.execute( - "select count(*) from [{}]".format(table), + f"select count(*) from [{table}]", custom_time_limit=limit, ) ).rows[0][0] @@ -362,13 +362,13 @@ class Database: if self.is_memory: tags.append("memory") if self.hash: - tags.append("hash={}".format(self.hash)) + tags.append(f"hash={self.hash}") if self.size is not None: - tags.append("size={}".format(self.size)) + tags.append(f"size={self.size}") tags_str = "" if tags: - tags_str = " ({})".format(", ".join(tags)) - return "".format(self.name, tags_str) + tags_str = f" ({', '.join(tags)})" + return f"" class WriteTask: diff --git a/datasette/facets.py b/datasette/facets.py index 1712db9b..a818a9e9 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -86,7 +86,7 @@ class Facet: self.database = database # For foreign key expansion. Can be None for e.g. canned SQL queries: self.table = table - self.sql = sql or "select * from [{}]".format(table) + self.sql = sql or f"select * from [{table}]" self.params = params or [] self.metadata = metadata # row_count can be None, in which case we calculate it ourselves: @@ -114,7 +114,7 @@ class Facet: # Detect column names using the "limit 0" trick return ( await self.ds.execute( - self.database, "select * from ({}) limit 0".format(sql), params or [] + self.database, f"select * from ({sql}) limit 0", params or [] ) ).columns @@ -123,7 +123,7 @@ class Facet: self.row_count = ( await self.ds.execute( self.database, - "select count(*) from ({})".format(self.sql), + f"select count(*) from ({self.sql})", self.params, ) ).rows[0][0] @@ -371,14 +371,14 @@ class ArrayFacet(Facet): pairs = self.get_querystring_pairs() for row in facet_rows: value = str(row["value"]) - selected = ("{}__arraycontains".format(column), value) in pairs + selected = (f"{column}__arraycontains", value) in pairs if selected: toggle_path = path_with_removed_args( - self.request, {"{}__arraycontains".format(column): value} + self.request, {f"{column}__arraycontains": value} ) else: toggle_path = path_with_added_args( - self.request, {"{}__arraycontains".format(column): value} + self.request, {f"{column}__arraycontains": value} ) facet_results_values.append( { @@ -482,16 +482,14 @@ class DateFacet(Facet): } facet_rows = facet_rows_results.rows[:facet_size] for row in facet_rows: - selected = str(args.get("{}__date".format(column))) == str( - row["value"] - ) + selected = str(args.get(f"{column}__date")) == str(row["value"]) if selected: toggle_path = path_with_removed_args( - self.request, {"{}__date".format(column): str(row["value"])} + self.request, {f"{column}__date": str(row["value"])} ) else: toggle_path = path_with_added_args( - self.request, {"{}__date".format(column): row["value"]} + self.request, {f"{column}__date": row["value"]} ) facet_results_values.append( { diff --git a/datasette/filters.py b/datasette/filters.py index 4891154a..1524b32a 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -43,7 +43,7 @@ class TemplatedFilter(Filter): kwargs = {"c": column} converted = None else: - kwargs = {"c": column, "p": "p{}".format(param_counter), "t": table} + kwargs = {"c": column, "p": f"p{param_counter}", "t": table} return self.sql_template.format(**kwargs), converted def human_clause(self, column, value): @@ -69,12 +69,12 @@ class InFilter(Filter): def where_clause(self, table, column, value, param_counter): values = self.split_value(value) - params = [":p{}".format(param_counter + i) for i in range(len(values))] - sql = "{} in ({})".format(escape_sqlite(column), ", ".join(params)) + params = [f":p{param_counter + i}" for i in range(len(values))] + sql = f"{escape_sqlite(column)} in ({', '.join(params)})" return sql, values def human_clause(self, column, value): - return "{} in {}".format(column, json.dumps(self.split_value(value))) + return f"{column} in {json.dumps(self.split_value(value))}" class NotInFilter(InFilter): @@ -83,12 +83,12 @@ class NotInFilter(InFilter): def where_clause(self, table, column, value, param_counter): values = self.split_value(value) - params = [":p{}".format(param_counter + i) for i in range(len(values))] - sql = "{} not in ({})".format(escape_sqlite(column), ", ".join(params)) + params = [f":p{param_counter + i}" for i in range(len(values))] + sql = f"{escape_sqlite(column)} not in ({', '.join(params)})" return sql, values def human_clause(self, column, value): - return "{} not in {}".format(column, json.dumps(self.split_value(value))) + return f"{column} not in {json.dumps(self.split_value(value))}" class Filters: @@ -221,7 +221,7 @@ class Filters: s = " and ".join(and_bits) if not s: return "" - return "where {}".format(s) + return f"where {s}" def selections(self): "Yields (column, lookup, value) tuples" @@ -265,7 +265,7 @@ class Filters: if not isinstance(param, list): param = [param] for individual_param in param: - param_id = "p{}".format(i) + param_id = f"p{i}" params[param_id] = individual_param i += 1 return sql_bits, params diff --git a/datasette/inspect.py b/datasette/inspect.py index 2324c02c..4d538e5f 100644 --- a/datasette/inspect.py +++ b/datasette/inspect.py @@ -47,7 +47,7 @@ def inspect_tables(conn, database_metadata): try: count = conn.execute( - "select count(*) from {}".format(escape_sqlite(table)) + f"select count(*) from {escape_sqlite(table)}" ).fetchone()[0] except sqlite3.OperationalError: # This can happen when running against a FTS virtual table diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 8f99dc2e..54f55fcb 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -100,9 +100,7 @@ def publish_subcommand(publish): extra_metadata["plugins"] = {} for plugin_name, plugin_setting, setting_value in plugin_secret: environment_variable = ( - "{}_{}".format(plugin_name, plugin_setting) - .upper() - .replace("-", "_") + f"{plugin_name}_{plugin_setting}".upper().replace("-", "_") ) environment_variables[environment_variable] = setting_value extra_metadata["plugins"].setdefault(plugin_name, {})[ @@ -133,8 +131,8 @@ def publish_subcommand(publish): print(open("Dockerfile").read()) print("\n====================\n") - image_id = "gcr.io/{project}/{name}".format(project=project, name=name) - check_call("gcloud builds submit --tag {}".format(image_id), shell=True) + image_id = f"gcr.io/{project}/{name}" + check_call(f"gcloud builds submit --tag {image_id}", shell=True) check_call( "gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}".format( image_id, service, " --memory {}".format(memory) if memory else "" diff --git a/datasette/publish/common.py b/datasette/publish/common.py index 49a4798e..b6570290 100644 --- a/datasette/publish/common.py +++ b/datasette/publish/common.py @@ -85,9 +85,7 @@ def fail_if_publish_binary_not_installed(binary, publish_target, install_link): err=True, ) click.echo( - "Follow the instructions at {install_link}".format( - install_link=install_link - ), + f"Follow the instructions at {install_link}", err=True, ) sys.exit(1) diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index 24305de5..c772b476 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -83,9 +83,7 @@ def publish_subcommand(publish): extra_metadata["plugins"] = {} for plugin_name, plugin_setting, setting_value in plugin_secret: environment_variable = ( - "{}_{}".format(plugin_name, plugin_setting) - .upper() - .replace("-", "_") + f"{plugin_name}_{plugin_setting}".upper().replace("-", "_") ) environment_variables[environment_variable] = setting_value extra_metadata["plugins"].setdefault(plugin_name, {})[ @@ -129,9 +127,7 @@ def publish_subcommand(publish): app_name = json.loads(create_output)["name"] for key, value in environment_variables.items(): - call( - ["heroku", "config:set", "-a", app_name, "{}={}".format(key, value)] - ) + call(["heroku", "config:set", "-a", app_name, f"{key}={value}"]) tar_option = [] if tar: tar_option = ["--tar", tar] @@ -181,9 +177,7 @@ def temporary_heroku_directory( if branch: install = [ - "https://github.com/simonw/datasette/archive/{branch}.zip".format( - branch=branch - ) + f"https://github.com/simonw/datasette/archive/{branch}.zip" ] + list(install) else: install = ["datasette"] + list(install) @@ -216,7 +210,7 @@ def temporary_heroku_directory( link_or_copy_directory( os.path.join(saved_cwd, path), os.path.join(tmp.name, mount_point) ) - extras.extend(["--static", "{}:{}".format(mount_point, mount_point)]) + extras.extend(["--static", f"{mount_point}:{mount_point}"]) quoted_files = " ".join( ["-i {}".format(shlex.quote(file_name)) for file_name in file_names] diff --git a/datasette/renderer.py b/datasette/renderer.py index bcde8516..d779b44f 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -82,7 +82,7 @@ def json_renderer(args, data, view_name): status_code = 400 data = { "ok": False, - "error": "Invalid _shape: {}".format(shape), + "error": f"Invalid _shape: {shape}", "status": 400, "title": None, } @@ -96,7 +96,7 @@ def json_renderer(args, data, view_name): content_type = "application/json; charset=utf-8" headers = {} if next_url: - headers["link"] = '<{}>; rel="next"'.format(next_url) + headers["link"] = f'<{next_url}>; rel="next"' return Response( body, status=status_code, headers=headers, content_type=content_type ) diff --git a/datasette/tracer.py b/datasette/tracer.py index a638b140..8f666767 100644 --- a/datasette/tracer.py +++ b/datasette/tracer.py @@ -28,7 +28,7 @@ def get_task_id(): def trace(type, **kwargs): assert not TRACE_RESERVED_KEYS.intersection( kwargs.keys() - ), ".trace() keyword parameters cannot include {}".format(TRACE_RESERVED_KEYS) + ), f".trace() keyword parameters cannot include {TRACE_RESERVED_KEYS}" task_id = get_task_id() if task_id is None: yield @@ -124,7 +124,7 @@ class AsgiTracer: content_type = "" if "text/html" in content_type and b"" in accumulated_body: extra = json.dumps(trace_info, indent=2) - extra_html = "

    {}
    ".format(extra).encode("utf8") + extra_html = f"
    {extra}
    ".encode("utf8") accumulated_body = accumulated_body.replace(b"", extra_html) elif "json" in content_type and accumulated_body.startswith(b"{"): data = json.loads(accumulated_body.decode("utf8")) diff --git a/datasette/url_builder.py b/datasette/url_builder.py index bcc4f39d..697f60ae 100644 --- a/datasette/url_builder.py +++ b/datasette/url_builder.py @@ -19,10 +19,10 @@ class Urls: return self.path("", format=format) def static(self, path): - return self.path("-/static/{}".format(path)) + return self.path(f"-/static/{path}") def static_plugins(self, plugin, path): - return self.path("-/static-plugins/{}/{}".format(plugin, path)) + return self.path(f"-/static-plugins/{plugin}/{path}") def logout(self): return self.path("-/logout") @@ -30,27 +30,25 @@ class Urls: def database(self, database, format=None): db = self.ds.databases[database] if self.ds.config("hash_urls") and db.hash: - path = self.path( - "{}-{}".format(database, db.hash[:HASH_LENGTH]), format=format - ) + path = self.path(f"{database}-{db.hash[:HASH_LENGTH]}", format=format) else: path = self.path(database, format=format) return path def table(self, database, table, format=None): - path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(table)) + path = f"{self.database(database)}/{urllib.parse.quote_plus(table)}" if format is not None: path = path_with_format(path=path, format=format) return PrefixedUrlString(path) def query(self, database, query, format=None): - path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(query)) + path = f"{self.database(database)}/{urllib.parse.quote_plus(query)}" if format is not None: path = path_with_format(path=path, format=format) return PrefixedUrlString(path) def row(self, database, table, row_path, format=None): - path = "{}/{}".format(self.table(database, table), row_path) + path = f"{self.table(database, table)}/{row_path}" if format is not None: path = path_with_format(path=path, format=format) return PrefixedUrlString(path) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index a7d96401..02b59b2b 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -115,13 +115,10 @@ def compound_keys_after_sql(pks, start_index=0): last = pks_left[-1] rest = pks_left[:-1] and_clauses = [ - "{} = :p{}".format(escape_sqlite(pk), (i + start_index)) - for i, pk in enumerate(rest) + f"{escape_sqlite(pk)} = :p{i + start_index}" for i, pk in enumerate(rest) ] - and_clauses.append( - "{} > :p{}".format(escape_sqlite(last), (len(rest) + start_index)) - ) - or_clauses.append("({})".format(" and ".join(and_clauses))) + and_clauses.append(f"{escape_sqlite(last)} > :p{len(rest) + start_index}") + or_clauses.append(f"({' and '.join(and_clauses)})") pks_left.pop() or_clauses.reverse() return "({})".format("\n or\n".join(or_clauses)) @@ -195,7 +192,7 @@ allowed_pragmas = ( ) disallawed_sql_res = [ ( - re.compile("pragma(?!_({}))".format("|".join(allowed_pragmas))), + re.compile(f"pragma(?!_({'|'.join(allowed_pragmas)}))"), "Statement may not contain PRAGMA", ) ] @@ -215,7 +212,7 @@ def validate_sql_select(sql): def append_querystring(url, querystring): op = "&" if ("?" in url) else "?" - return "{}{}{}".format(url, op, querystring) + return f"{url}{op}{querystring}" def path_with_added_args(request, args, path=None): @@ -230,7 +227,7 @@ def path_with_added_args(request, args, path=None): current.extend([(key, value) for key, value in args if value is not None]) query_string = urllib.parse.urlencode(current) if query_string: - query_string = "?{}".format(query_string) + query_string = f"?{query_string}" return path + query_string @@ -259,7 +256,7 @@ def path_with_removed_args(request, args, path=None): current.append((key, value)) query_string = urllib.parse.urlencode(current) if query_string: - query_string = "?{}".format(query_string) + query_string = f"?{query_string}" return path + query_string @@ -275,7 +272,7 @@ def path_with_replaced_args(request, args, path=None): current.extend([p for p in args if p[1] is not None]) query_string = urllib.parse.urlencode(current) if query_string: - query_string = "?{}".format(query_string) + query_string = f"?{query_string}" return path + query_string @@ -285,7 +282,7 @@ _boring_keyword_re = re.compile(r"^[a-zA-Z_][a-zA-Z0-9_]*$") def escape_css_string(s): return _css_re.sub( - lambda m: "\\" + ("{:X}".format(ord(m.group())).zfill(6)), + lambda m: "\\" + (f"{ord(m.group()):X}".zfill(6)), s.replace("\r\n", "\n"), ) @@ -294,7 +291,7 @@ def escape_sqlite(s): if _boring_keyword_re.match(s) and (s.lower() not in reserved_words): return s else: - return "[{}]".format(s) + return f"[{s}]" def make_dockerfile( @@ -319,27 +316,27 @@ def make_dockerfile( cmd.extend(["-i", filename]) cmd.extend(["--cors", "--inspect-file", "inspect-data.json"]) if metadata_file: - cmd.extend(["--metadata", "{}".format(metadata_file)]) + cmd.extend(["--metadata", f"{metadata_file}"]) if template_dir: cmd.extend(["--template-dir", "templates/"]) if plugins_dir: cmd.extend(["--plugins-dir", "plugins/"]) if version_note: - cmd.extend(["--version-note", "{}".format(version_note)]) + cmd.extend(["--version-note", f"{version_note}"]) if static: for mount_point, _ in static: - cmd.extend(["--static", "{}:{}".format(mount_point, mount_point)]) + cmd.extend(["--static", f"{mount_point}:{mount_point}"]) if extra_options: for opt in extra_options.split(): - cmd.append("{}".format(opt)) + cmd.append(f"{opt}") cmd = [shlex.quote(part) for part in cmd] # port attribute is a (fixed) env variable and should not be quoted cmd.extend(["--port", "$PORT"]) cmd = " ".join(cmd) if branch: - install = [ - "https://github.com/simonw/datasette/archive/{}.zip".format(branch) - ] + list(install) + install = [f"https://github.com/simonw/datasette/archive/{branch}.zip"] + list( + install + ) else: install = ["datasette"] + list(install) @@ -449,7 +446,7 @@ def detect_primary_keys(conn, table): " Figure out primary keys for a table. " table_info_rows = [ row - for row in conn.execute('PRAGMA table_info("{}")'.format(table)).fetchall() + for row in conn.execute(f'PRAGMA table_info("{table}")').fetchall() if row[-1] ] table_info_rows.sort(key=lambda row: row[-1]) @@ -457,7 +454,7 @@ def detect_primary_keys(conn, table): def get_outbound_foreign_keys(conn, table): - infos = conn.execute("PRAGMA foreign_key_list([{}])".format(table)).fetchall() + infos = conn.execute(f"PRAGMA foreign_key_list([{table}])").fetchall() fks = [] for info in infos: if info is not None: @@ -476,7 +473,7 @@ def get_all_foreign_keys(conn): for table in tables: table_to_foreign_keys[table] = {"incoming": [], "outgoing": []} for table in tables: - infos = conn.execute("PRAGMA foreign_key_list([{}])".format(table)).fetchall() + infos = conn.execute(f"PRAGMA foreign_key_list([{table}])").fetchall() for info in infos: if info is not None: id, seq, table_name, from_, to_, on_update, on_delete, match = info @@ -544,9 +541,7 @@ def table_columns(conn, table): def table_column_details(conn, table): return [ Column(*r) - for r in conn.execute( - "PRAGMA table_info({});".format(escape_sqlite(table)) - ).fetchall() + for r in conn.execute(f"PRAGMA table_info({escape_sqlite(table)});").fetchall() ] @@ -562,9 +557,7 @@ def filters_should_redirect(special_args): if "__" in filter_op: filter_op, filter_value = filter_op.split("__", 1) if filter_column: - redirect_params.append( - ("{}__{}".format(filter_column, filter_op), filter_value) - ) + redirect_params.append((f"{filter_column}__{filter_op}", filter_value)) for key in ("_filter_column", "_filter_op", "_filter_value"): if key in special_args: redirect_params.append((key, None)) @@ -573,17 +566,17 @@ def filters_should_redirect(special_args): for column_key in column_keys: number = column_key.split("_")[-1] column = special_args[column_key] - op = special_args.get("_filter_op_{}".format(number)) or "exact" - value = special_args.get("_filter_value_{}".format(number)) or "" + op = special_args.get(f"_filter_op_{number}") or "exact" + value = special_args.get(f"_filter_value_{number}") or "" if "__" in op: op, value = op.split("__", 1) if column: - redirect_params.append(("{}__{}".format(column, op), value)) + redirect_params.append((f"{column}__{op}", value)) redirect_params.extend( [ - ("_filter_column_{}".format(number), None), - ("_filter_op_{}".format(number), None), - ("_filter_value_{}".format(number), None), + (f"_filter_column_{number}", None), + (f"_filter_op_{number}", None), + (f"_filter_value_{number}", None), ] ) return redirect_params @@ -672,7 +665,7 @@ async def resolve_table_and_format( # Check if table ends with a known format formats = list(allowed_formats) + ["csv", "jsono"] for _format in formats: - if table_and_format.endswith(".{}".format(_format)): + if table_and_format.endswith(f".{_format}"): table = table_and_format[: -(len(_format) + 1)] return table, _format return table_and_format, None @@ -683,20 +676,20 @@ def path_with_format( ): qs = extra_qs or {} path = request.path if request else path - if replace_format and path.endswith(".{}".format(replace_format)): + if replace_format and path.endswith(f".{replace_format}"): path = path[: -(1 + len(replace_format))] if "." in path: qs["_format"] = format else: - path = "{}.{}".format(path, format) + path = f"{path}.{format}" if qs: extra = urllib.parse.urlencode(sorted(qs.items())) if request and request.query_string: - path = "{}?{}&{}".format(path, request.query_string, extra) + path = f"{path}?{request.query_string}&{extra}" else: - path = "{}?{}".format(path, extra) + path = f"{path}?{extra}" elif request and request.query_string: - path = "{}?{}".format(path, request.query_string) + path = f"{path}?{request.query_string}" return path @@ -742,9 +735,7 @@ class LimitedWriter: async def write(self, bytes): self.bytes_count += len(bytes) if self.limit_bytes and (self.bytes_count > self.limit_bytes): - raise WriteLimitExceeded( - "CSV contains more than {} bytes".format(self.limit_bytes) - ) + raise WriteLimitExceeded(f"CSV contains more than {self.limit_bytes} bytes") await self.writer.write(bytes) @@ -763,14 +754,14 @@ class StaticMount(click.ParamType): def convert(self, value, param, ctx): if ":" not in value: self.fail( - '"{}" should be of format mountpoint:directory'.format(value), + f'"{value}" should be of format mountpoint:directory', param, ctx, ) path, dirpath = value.split(":", 1) dirpath = os.path.abspath(dirpath) if not os.path.exists(dirpath) or not os.path.isdir(dirpath): - self.fail("%s is not a valid directory path" % value, param, ctx) + self.fail(f"{value} is not a valid directory path", param, ctx) return path, dirpath @@ -781,9 +772,9 @@ def format_bytes(bytes): break current = current / 1024 if unit == "bytes": - return "{} {}".format(int(current), unit) + return f"{int(current)} {unit}" else: - return "{:.1f} {}".format(current, unit) + return f"{current:.1f} {unit}" _escape_fts_re = re.compile(r'\s+|(".*?")') @@ -820,7 +811,7 @@ class MultiParams: self._data = new_data def __repr__(self): - return "".format(self._data) + return f"" def __contains__(self, key): return key in self._data @@ -867,7 +858,7 @@ def check_connection(conn): for table in tables: try: conn.execute( - "PRAGMA table_info({});".format(escape_sqlite(table)), + f"PRAGMA table_info({escape_sqlite(table)});", ) except sqlite3.OperationalError as e: if e.args[0] == "no such module: VirtualSpatialIndex": diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index e4c8ce5c..ce78a597 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -260,7 +260,7 @@ async def asgi_send_file( ): headers = headers or {} if filename: - headers["content-disposition"] = 'attachment; filename="{}"'.format(filename) + headers["content-disposition"] = f'attachment; filename="{filename}"' first = True headers["content-length"] = str((await aiofiles.os.stat(str(filepath))).st_size) async with aiofiles.open(str(filepath), mode="rb") as fp: diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index 8a8810e7..bcbc1c7a 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -32,7 +32,7 @@ class TestResponse: return any( h for h in self.httpx_response.headers.get_list("set-cookie") - if h.startswith('{}="";'.format(cookie)) + if h.startswith(f'{cookie}="";') ) @property @@ -125,9 +125,7 @@ class TestClient: if allow_redirects and response.status in (301, 302): assert ( redirect_count < self.max_redirects - ), "Redirected {} times, max_redirects={}".format( - redirect_count, self.max_redirects - ) + ), f"Redirected {redirect_count} times, max_redirects={self.max_redirects}" location = response.headers["Location"] return await self._request( location, allow_redirects=True, redirect_count=redirect_count + 1 diff --git a/datasette/views/base.py b/datasette/views/base.py index 430489c1..b3a54bcc 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -125,9 +125,7 @@ class BaseView: **{ "database_color": self.database_color, "select_templates": [ - "{}{}".format( - "*" if template_name == template.name else "", template_name - ) + f"{'*' if template_name == template.name else ''}{template_name}" for template_name in templates ], }, @@ -165,11 +163,11 @@ class DataView(BaseView): def redirect(self, request, path, forward_querystring=True, remove_args=None): if request.query_string and "?" not in path and forward_querystring: - path = "{}?{}".format(path, request.query_string) + path = f"{path}?{request.query_string}" if remove_args: path = path_with_removed_args(request, remove_args, path=path) r = Response.redirect(path) - r.headers["Link"] = "<{}>; rel=preload".format(path) + r.headers["Link"] = f"<{path}>; rel=preload" if self.ds.cors: r.headers["Access-Control-Allow-Origin"] = "*" return r @@ -184,7 +182,7 @@ class DataView(BaseView): # No matching DB found, maybe it's a name-hash? name_bit, hash_bit = db_name.rsplit("-", 1) if name_bit not in self.ds.databases: - raise NotFound("Database not found: {}".format(name)) + raise NotFound(f"Database not found: {name}") else: name = name_bit hash = hash_bit @@ -194,7 +192,7 @@ class DataView(BaseView): try: db = self.ds.databases[name] except KeyError: - raise NotFound("Database not found: {}".format(name)) + raise NotFound(f"Database not found: {name}") # Verify the hash expected = "000" @@ -217,11 +215,11 @@ class DataView(BaseView): ) kwargs["table"] = table if _format: - kwargs["as_format"] = ".{}".format(_format) + kwargs["as_format"] = f".{_format}" elif kwargs.get("table"): kwargs["table"] = urllib.parse.unquote_plus(kwargs["table"]) - should_redirect = self.ds.urls.path("{}-{}".format(name, expected)) + should_redirect = self.ds.urls.path(f"{name}-{expected}") if kwargs.get("table"): should_redirect += "/" + urllib.parse.quote_plus(kwargs["table"]) if kwargs.get("pk_path"): @@ -294,7 +292,7 @@ class DataView(BaseView): for column in data["columns"]: headings.append(column) if column in expanded_columns: - headings.append("{}_label".format(column)) + headings.append(f"{column}_label") async def stream_fn(r): nonlocal data @@ -505,7 +503,7 @@ class DataView(BaseView): elif isinstance(result, Response): r = result else: - assert False, "{} should be dict or Response".format(result) + assert False, f"{result} should be dict or Response" else: extras = {} if callable(extra_template_data): @@ -581,7 +579,7 @@ class DataView(BaseView): if ttl == 0: ttl_header = "no-cache" else: - ttl_header = "max-age={}".format(ttl) + ttl_header = f"max-age={ttl}" response.headers["Cache-Control"] = ttl_header response.headers["Referrer-Policy"] = "no-referrer" if self.ds.cors: diff --git a/datasette/views/database.py b/datasette/views/database.py index 9a7b96fd..d4ed8570 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -140,7 +140,7 @@ class DatabaseView(DataView): and not db.is_mutable and database != ":memory:", }, - ("database-{}.html".format(to_css_class(database)), "database.html"), + (f"database-{to_css_class(database)}.html", "database.html"), ) @@ -233,7 +233,7 @@ class QueryView(DataView): if _size: extra_args["page_size"] = _size - templates = ["query-{}.html".format(to_css_class(database)), "query.html"] + templates = [f"query-{to_css_class(database)}.html", "query.html"] # Execute query - as write or as read if write: @@ -324,9 +324,7 @@ class QueryView(DataView): if canned_query: templates.insert( 0, - "query-{}-{}.html".format( - to_css_class(database), to_css_class(canned_query) - ), + f"query-{to_css_class(database)}-{to_css_class(canned_query)}.html", ) allow_execute_sql = await self.ds.permission_allowed( diff --git a/datasette/views/special.py b/datasette/views/special.py index 397dbc8c..9750dd06 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -111,13 +111,13 @@ class AllowDebugView(BaseView): actor = json.loads(actor_input) actor_input = json.dumps(actor, indent=4) except json.decoder.JSONDecodeError as ex: - errors.append("Actor JSON error: {}".format(ex)) + errors.append(f"Actor JSON error: {ex}") allow_input = request.args.get("allow") or '{"id": "*"}' try: allow = json.loads(allow_input) allow_input = json.dumps(allow, indent=4) except json.decoder.JSONDecodeError as ex: - errors.append("Allow JSON error: {}".format(ex)) + errors.append(f"Allow JSON error: {ex}") result = None if not errors: diff --git a/datasette/views/table.py b/datasette/views/table.py index 9ed45df1..09c2d740 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -212,13 +212,11 @@ class RowTableShared(DataView): # representation, which we have to round off to avoid ugliness. In the vast # majority of cases this rounding will be inconsequential. I hope. value = round(value.to_compact(), 6) - display_value = jinja2.Markup( - "{:~P}".format(value).replace(" ", " ") - ) + display_value = jinja2.Markup(f"{value:~P}".replace(" ", " ")) else: display_value = str(value) if truncate_cells and len(display_value) > truncate_cells: - display_value = display_value[:truncate_cells] + u"\u2026" + display_value = display_value[:truncate_cells] + "\u2026" cells.append( { @@ -307,7 +305,7 @@ class TableView(RowTableShared): is_view = bool(await db.get_view_definition(table)) table_exists = bool(await db.table_exists(table)) if not is_view and not table_exists: - raise NotFound("Table not found: {}".format(table)) + raise NotFound(f"Table not found: {table}") await self.check_permissions( request, @@ -330,7 +328,7 @@ class TableView(RowTableShared): use_rowid = not pks and not is_view if use_rowid: - select = "rowid, {}".format(select_columns) + select = f"rowid, {select_columns}" order_by = "rowid" order_by_pks = "rowid" else: @@ -424,7 +422,7 @@ class TableView(RowTableShared): raise DatasetteError( "Invalid _through - could not find corresponding foreign key" ) - param = "p{}".format(len(params)) + param = f"p{len(params)}" where_clauses.append( "{our_pk} in (select {our_column} from {through_table} where {other_column} = :{param})".format( through_table=escape_sqlite(through_table), @@ -436,7 +434,7 @@ class TableView(RowTableShared): ) params[param] = value extra_human_descriptions.append( - '{}.{} = "{}"'.format(through_table, other_column, value) + f'{through_table}.{other_column} = "{value}"' ) # _search support: @@ -462,7 +460,7 @@ class TableView(RowTableShared): else "escape_fts(:search)", ) ) - extra_human_descriptions.append('search matches "{}"'.format(search)) + extra_human_descriptions.append(f'search matches "{search}"') params["search"] = search else: # More complex: search against specific columns @@ -481,11 +479,9 @@ class TableView(RowTableShared): ) ) extra_human_descriptions.append( - 'search column "{}" matches "{}"'.format( - search_col, search_text - ) + f'search column "{search_col}" matches "{search_text}"' ) - params["search_{}".format(i)] = search_text + params[f"search_{i}"] = search_text sortable_columns = set() @@ -506,15 +502,15 @@ class TableView(RowTableShared): if sort: if sort not in sortable_columns: - raise DatasetteError("Cannot sort table by {}".format(sort)) + raise DatasetteError(f"Cannot sort table by {sort}") order_by = escape_sqlite(sort) if sort_desc: if sort_desc not in sortable_columns: - raise DatasetteError("Cannot sort table by {}".format(sort_desc)) + raise DatasetteError(f"Cannot sort table by {sort_desc}") - order_by = "{} desc".format(escape_sqlite(sort_desc)) + order_by = f"{escape_sqlite(sort_desc)} desc" from_sql = "from {table_name} {where}".format( table_name=escape_sqlite(table), @@ -525,14 +521,14 @@ class TableView(RowTableShared): # Copy of params so we can mutate them later: from_sql_params = dict(**params) - count_sql = "select count(*) {}".format(from_sql) + count_sql = f"select count(*) {from_sql}" _next = _next or special_args.get("_next") offset = "" if _next: if is_view: # _next is an offset - offset = " offset {}".format(int(_next)) + offset = f" offset {int(_next)}" else: components = urlsafe_components(_next) # If a sort order is applied, the first of these is the sort value @@ -546,8 +542,8 @@ class TableView(RowTableShared): # Figure out the SQL for next-based-on-primary-key first next_by_pk_clauses = [] if use_rowid: - next_by_pk_clauses.append("rowid > :p{}".format(len(params))) - params["p{}".format(len(params))] = components[0] + next_by_pk_clauses.append(f"rowid > :p{len(params)}") + params[f"p{len(params)}"] = components[0] else: # Apply the tie-breaker based on primary keys if len(components) == len(pks): @@ -556,7 +552,7 @@ class TableView(RowTableShared): compound_keys_after_sql(pks, param_len) ) for i, pk_value in enumerate(components): - params["p{}".format(param_len + i)] = pk_value + params[f"p{param_len + i}"] = pk_value # Now add the sort SQL, which may incorporate next_by_pk_clauses if sort or sort_desc: @@ -590,17 +586,17 @@ class TableView(RowTableShared): next_clauses=" and ".join(next_by_pk_clauses), ) ) - params["p{}".format(len(params))] = sort_value - order_by = "{}, {}".format(order_by, order_by_pks) + params[f"p{len(params)}"] = sort_value + order_by = f"{order_by}, {order_by_pks}" else: where_clauses.extend(next_by_pk_clauses) where_clause = "" if where_clauses: - where_clause = "where {} ".format(" and ".join(where_clauses)) + where_clause = f"where {' and '.join(where_clauses)} " if order_by: - order_by = "order by {} ".format(order_by) + order_by = f"order by {order_by} " extra_args = {} # Handle ?_size=500 @@ -617,9 +613,7 @@ class TableView(RowTableShared): raise BadRequest("_size must be a positive integer") if page_size > self.ds.max_returned_rows: - raise BadRequest( - "_size must be <= {}".format(self.ds.max_returned_rows) - ) + raise BadRequest(f"_size must be <= {self.ds.max_returned_rows}") extra_args["page_size"] = page_size else: @@ -631,9 +625,7 @@ class TableView(RowTableShared): where=where_clause, order_by=order_by, ) - sql = "{sql_no_limit} limit {limit}{offset}".format( - sql_no_limit=sql_no_limit.rstrip(), limit=page_size + 1, offset=offset - ) + sql = f"{sql_no_limit.rstrip()} limit {page_size + 1}{offset}" if request.args.get("_timelimit"): extra_args["custom_time_limit"] = int(request.args.get("_timelimit")) @@ -645,7 +637,7 @@ class TableView(RowTableShared): if ( not db.is_mutable and self.ds.inspect_data - and count_sql == "select count(*) from {} ".format(table) + and count_sql == f"select count(*) from {table} " ): try: filtered_table_rows_count = self.ds.inspect_data[database]["tables"][ @@ -763,7 +755,7 @@ class TableView(RowTableShared): prefix = "$null" else: prefix = urllib.parse.quote_plus(str(prefix)) - next_value = "{},{}".format(prefix, next_value) + next_value = f"{prefix},{next_value}" added_args = {"_next": next_value} if sort: added_args["_sort"] = sort @@ -879,12 +871,8 @@ class TableView(RowTableShared): "sort_desc": sort_desc, "disable_sort": is_view, "custom_table_templates": [ - "_table-{}-{}.html".format( - to_css_class(database), to_css_class(table) - ), - "_table-table-{}-{}.html".format( - to_css_class(database), to_css_class(table) - ), + f"_table-{to_css_class(database)}-{to_css_class(table)}.html", + f"_table-table-{to_css_class(database)}-{to_css_class(table)}.html", "_table.html", ], "metadata": metadata, @@ -918,7 +906,7 @@ class TableView(RowTableShared): }, extra_template, ( - "table-{}-{}.html".format(to_css_class(database), to_css_class(table)), + f"table-{to_css_class(database)}-{to_css_class(table)}.html", "table.html", ), ) @@ -931,13 +919,11 @@ async def _sql_params_pks(db, table, pk_values): if use_rowid: select = "rowid, *" pks = ["rowid"] - wheres = ['"{}"=:p{}'.format(pk, i) for i, pk in enumerate(pks)] - sql = "select {} from {} where {}".format( - select, escape_sqlite(table), " AND ".join(wheres) - ) + wheres = [f'"{pk}"=:p{i}' for i, pk in enumerate(pks)] + sql = f"select {select} from {escape_sqlite(table)} where {' AND '.join(wheres)}" params = {} for i, pk_value in enumerate(pk_values): - params["p{}".format(i)] = pk_value + params[f"p{i}"] = pk_value return sql, params, pks @@ -960,7 +946,7 @@ class RowView(RowTableShared): columns = [r[0] for r in results.description] rows = list(results.rows) if not rows: - raise NotFound("Record not found: {}".format(pk_values)) + raise NotFound(f"Record not found: {pk_values}") async def template_data(): display_columns, display_rows = await self.display_columns_and_rows( @@ -981,12 +967,8 @@ class RowView(RowTableShared): "display_columns": display_columns, "display_rows": display_rows, "custom_table_templates": [ - "_table-{}-{}.html".format( - to_css_class(database), to_css_class(table) - ), - "_table-row-{}-{}.html".format( - to_css_class(database), to_css_class(table) - ), + f"_table-{to_css_class(database)}-{to_css_class(table)}.html", + f"_table-row-{to_css_class(database)}-{to_css_class(table)}.html", "_table.html", ], "metadata": (self.ds.metadata("databases") or {}) @@ -1014,7 +996,7 @@ class RowView(RowTableShared): data, template_data, ( - "row-{}-{}.html".format(to_css_class(database), to_css_class(table)), + f"row-{to_css_class(database)}-{to_css_class(table)}.html", "row.html", ), ) diff --git a/tests/fixtures.py b/tests/fixtures.py index 183b8ca4..3abca821 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -247,7 +247,7 @@ def generate_compound_rows(num): for a, b, c in itertools.islice( itertools.product(string.ascii_lowercase, repeat=3), num ): - yield a, b, c, "{}-{}-{}".format(a, b, c) + yield a, b, c, f"{a}-{b}-{c}" def generate_sortable_rows(num): @@ -258,7 +258,7 @@ def generate_sortable_rows(num): yield { "pk1": a, "pk2": b, - "content": "{}-{}".format(a, b), + "content": f"{a}-{b}", "sortable": rand.randint(-100, 100), "sortable_with_nulls": rand.choice([None, rand.random(), rand.random()]), "sortable_with_nulls_2": rand.choice([None, rand.random(), rand.random()]), @@ -742,7 +742,7 @@ def cli(db_filename, metadata, plugins_path, recreate): if pathlib.Path(db_filename).exists(): if not recreate: raise click.ClickException( - "{} already exists, use --recreate to reset it".format(db_filename) + f"{db_filename} already exists, use --recreate to reset it" ) else: pathlib.Path(db_filename).unlink() @@ -751,10 +751,10 @@ def cli(db_filename, metadata, plugins_path, recreate): for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) - print("Test tables written to {}".format(db_filename)) + print(f"Test tables written to {db_filename}") if metadata: open(metadata, "w").write(json.dumps(METADATA, indent=4)) - print("- metadata written to {}".format(metadata)) + print(f"- metadata written to {metadata}") if plugins_path: path = pathlib.Path(plugins_path) if not path.exists(): @@ -763,7 +763,7 @@ def cli(db_filename, metadata, plugins_path, recreate): for filepath in test_plugins.glob("*.py"): newpath = path / filepath.name newpath.write_text(filepath.open().read()) - print(" Wrote plugin: {}".format(newpath)) + print(f" Wrote plugin: {newpath}") if __name__ == "__main__": diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 4ac3953b..3f5ec832 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -130,7 +130,7 @@ def extra_template_vars( @hookimpl def prepare_jinja2_environment(env): - env.filters["format_numeric"] = lambda s: "{:,.0f}".format(float(s)) + env.filters["format_numeric"] = lambda s: f"{float(s):,.0f}" @hookimpl @@ -207,7 +207,7 @@ def register_routes(): async def two(request): name = request.url_vars["name"] greeting = request.args.get("greeting") - return Response.text("{} {}".format(greeting, name)) + return Response.text(f"{greeting} {name}") async def three(scope, send): await asgi_send_json( @@ -281,11 +281,7 @@ def startup(datasette): @hookimpl def canned_queries(datasette, database, actor): - return { - "from_hook": "select 1, '{}' as actor_id".format( - actor["id"] if actor else "null" - ) - } + return {"from_hook": f"select 1, '{actor['id'] if actor else 'null'}' as actor_id"} @hookimpl @@ -329,9 +325,9 @@ def table_actions(datasette, database, table, actor): return [ { "href": datasette.urls.instance(), - "label": "Database: {}".format(database), + "label": f"Database: {database}", }, - {"href": datasette.urls.instance(), "label": "Table: {}".format(table)}, + {"href": datasette.urls.instance(), "label": f"Table: {table}"}, ] @@ -341,6 +337,6 @@ def database_actions(datasette, database, actor): return [ { "href": datasette.urls.instance(), - "label": "Database: {}".format(database), + "label": f"Database: {database}", } ] diff --git a/tests/test_api.py b/tests/test_api.py index 93097574..3d48d350 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -918,7 +918,7 @@ def test_paginate_compound_keys_with_extra_filters(app_client): ], ) def test_sortable(app_client, query_string, sort_key, human_description_en): - path = "/fixtures/sortable.json?_shape=objects&{}".format(query_string) + path = f"/fixtures/sortable.json?_shape=objects&{query_string}" fetched = [] page = 0 while path: @@ -969,8 +969,8 @@ def test_sortable_columns_metadata(app_client): assert "Cannot sort table by content" == response.json["error"] # no_primary_key has ALL sort options disabled for column in ("content", "a", "b", "c"): - response = app_client.get("/fixtures/sortable.json?_sort={}".format(column)) - assert "Cannot sort table by {}".format(column) == response.json["error"] + response = app_client.get(f"/fixtures/sortable.json?_sort={column}") + assert f"Cannot sort table by {column}" == response.json["error"] @pytest.mark.parametrize( @@ -1877,7 +1877,7 @@ def test_binary_data_in_json(app_client, path, expected_json, expected_text): ], ) def test_paginate_using_link_header(app_client, qs): - path = "/fixtures/compound_three_primary_keys.json{}".format(qs) + path = f"/fixtures/compound_three_primary_keys.json{qs}" num_pages = 0 while path: response = app_client.get(path) diff --git a/tests/test_auth.py b/tests/test_auth.py index 34138aa6..5f3985db 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -7,7 +7,7 @@ import time def test_auth_token(app_client): "The /-/auth-token endpoint sets the correct cookie" assert app_client.ds._root_token is not None - path = "/-/auth-token?token={}".format(app_client.ds._root_token) + path = f"/-/auth-token?token={app_client.ds._root_token}" response = app_client.get( path, allow_redirects=False, diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index 9620c693..65f23cc7 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -82,7 +82,7 @@ def test_insert(canned_write_client): def test_canned_query_form_csrf_hidden_field( canned_write_client, query_name, expect_csrf_hidden_field ): - response = canned_write_client.get("/data/{}".format(query_name)) + response = canned_write_client.get(f"/data/{query_name}") html = response.text fragment = '".format(expected_considered) in response.text - ) + assert f"" in response.text def test_table_html_simple_primary_key(app_client): @@ -607,9 +605,7 @@ def test_table_html_simple_primary_key(app_client): for expected_col, th in zip(("content",), ths[1:]): a = th.find("a") assert expected_col == a.string - assert a["href"].endswith( - "/simple_primary_key?_size=3&_sort={}".format(expected_col) - ) + assert a["href"].endswith(f"/simple_primary_key?_size=3&_sort={expected_col}") assert ["nofollow"] == a["rel"] assert [ [ @@ -730,11 +726,11 @@ def test_table_html_no_primary_key(app_client): '{}'.format( i, i ), - '{}'.format(i), - '{}'.format(i), - 'a{}'.format(i), - 'b{}'.format(i), - 'c{}'.format(i), + f'{i}', + f'{i}', + f'a{i}', + f'b{i}', + f'c{i}', ] for i in range(1, 51) ] @@ -782,8 +778,8 @@ def test_table_html_compound_primary_key(app_client): for expected_col, th in zip(("pk1", "pk2", "content"), ths[1:]): a = th.find("a") assert expected_col == a.string - assert th["class"] == ["col-{}".format(expected_col)] - assert a["href"].endswith("/compound_primary_key?_sort={}".format(expected_col)) + assert th["class"] == [f"col-{expected_col}"] + assert a["href"].endswith(f"/compound_primary_key?_sort={expected_col}") expected = [ [ 'a,b', @@ -1100,9 +1096,7 @@ def test_404(app_client, path): response = app_client.get(path) assert 404 == response.status assert ( - 'Edit SQL' if expected: assert expected_link in response.text else: @@ -1555,10 +1548,10 @@ def test_navigation_menu_links( for link in should_have_links: assert ( details.find("a", {"href": link}) is not None - ), "{} expected but missing from nav menu".format(link) + ), f"{link} expected but missing from nav menu" if should_not_have_links: for link in should_not_have_links: assert ( details.find("a", {"href": link}) is None - ), "{} found but should not have been in nav menu".format(link) + ), f"{link} found but should not have been in nav menu" diff --git a/tests/test_internals_urls.py b/tests/test_internals_urls.py index a56d735b..89290911 100644 --- a/tests/test_internals_urls.py +++ b/tests/test_internals_urls.py @@ -157,7 +157,7 @@ def test_database_hashed(app_client_with_hash, base_url): ds._config["base_url"] = base_url db_hash = ds.get_database("fixtures").hash assert len(db_hash) == 64 - expected = "{}fixtures-{}".format(base_url, db_hash[:7]) + expected = f"{base_url}fixtures-{db_hash[:7]}" assert ds.urls.database("fixtures") == expected assert ds.urls.table("fixtures", "name") == expected + "/name" assert ds.urls.query("fixtures", "name") == expected + "/name" diff --git a/tests/test_messages.py b/tests/test_messages.py index 830244e1..3af5439a 100644 --- a/tests/test_messages.py +++ b/tests/test_messages.py @@ -11,7 +11,7 @@ import pytest ], ) def test_add_message_sets_cookie(app_client, qs, expected): - response = app_client.get("/fixtures.message?{}".format(qs)) + response = app_client.get(f"/fixtures.message?{qs}") signed = response.cookies["ds_messages"] decoded = app_client.ds.unsign(signed, "messages") assert expected == decoded diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 3819c872..51faeccb 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -34,7 +34,7 @@ def test_plugin_hooks_have_tests(plugin_hook): for test in tests_in_this_module: if plugin_hook in test: ok = True - assert ok, "Plugin hook is missing tests: {}".format(plugin_hook) + assert ok, f"Plugin hook is missing tests: {plugin_hook}" def test_hook_plugins_dir_plugin_prepare_connection(app_client): @@ -398,7 +398,7 @@ def view_names_client(tmp_path_factory): def test_view_names(view_names_client, path, view_name): response = view_names_client.get(path) assert response.status == 200 - assert "view_name:{}".format(view_name) == response.text + assert f"view_name:{view_name}" == response.text def test_hook_register_output_renderer_no_parameters(app_client): @@ -659,7 +659,7 @@ def test_hook_register_routes_csrftoken(restore_working_directory, tmpdir_factor with make_app_client(template_dir=templates) as client: response = client.get("/csrftoken-form/") expected_token = client.ds._last_request.scope["csrftoken"]() - assert "CSRFTOKEN: {}".format(expected_token) == response.text + assert f"CSRFTOKEN: {expected_token}" == response.text def test_hook_register_routes_asgi(app_client): @@ -793,14 +793,14 @@ def test_hook_table_actions(app_client, table_or_view): return [] return [{"label": a.text, "href": a["href"]} for a in details.select("a")] - response = app_client.get("/fixtures/{}".format(table_or_view)) + response = app_client.get(f"/fixtures/{table_or_view}") assert get_table_actions_links(response.text) == [] - response_2 = app_client.get("/fixtures/{}?_bot=1".format(table_or_view)) + response_2 = app_client.get(f"/fixtures/{table_or_view}?_bot=1") assert get_table_actions_links(response_2.text) == [ {"label": "From async", "href": "/"}, {"label": "Database: fixtures", "href": "/"}, - {"label": "Table: {}".format(table_or_view), "href": "/"}, + {"label": f"Table: {table_or_view}", "href": "/"}, ] diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index a4eca49f..e629bba0 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -62,7 +62,7 @@ Service name: input-service tag = "gcr.io/myproject/datasette" mock_call.assert_has_calls( [ - mock.call("gcloud builds submit --tag {}".format(tag), shell=True), + mock.call(f"gcloud builds submit --tag {tag}", shell=True), mock.call( "gcloud run deploy --allow-unauthenticated --platform=managed --image {} input-service".format( tag @@ -86,10 +86,10 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which): cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"] ) assert 0 == result.exit_code - tag = "gcr.io/{}/datasette".format(mock_output.return_value) + tag = f"gcr.io/{mock_output.return_value}/datasette" mock_call.assert_has_calls( [ - mock.call("gcloud builds submit --tag {}".format(tag), shell=True), + mock.call(f"gcloud builds submit --tag {tag}", shell=True), mock.call( "gcloud run deploy --allow-unauthenticated --platform=managed --image {} test".format( tag @@ -129,10 +129,10 @@ def test_publish_cloudrun_memory( assert 2 == result.exit_code return assert 0 == result.exit_code - tag = "gcr.io/{}/datasette".format(mock_output.return_value) + tag = f"gcr.io/{mock_output.return_value}/datasette" mock_call.assert_has_calls( [ - mock.call("gcloud builds submit --tag {}".format(tag), shell=True), + mock.call(f"gcloud builds submit --tag {tag}", shell=True), mock.call( "gcloud run deploy --allow-unauthenticated --platform=managed --image {} test --memory {}".format( tag, memory diff --git a/tests/test_utils.py b/tests/test_utils.py index 2d2ff52d..07e6f870 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -457,7 +457,7 @@ def test_check_connection_passes(): def test_call_with_supported_arguments(): def foo(a, b): - return "{}+{}".format(a, b) + return f"{a}+{b}" assert "1+2" == utils.call_with_supported_arguments(foo, a=1, b=2) assert "1+2" == utils.call_with_supported_arguments(foo, a=1, b=2, c=3) diff --git a/update-docs-help.py b/update-docs-help.py index c007e23c..3a192575 100644 --- a/update-docs-help.py +++ b/update-docs-help.py @@ -16,7 +16,7 @@ def update_help_includes(): for name, filename in includes: runner = CliRunner() result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88) - actual = "$ datasette {} --help\n\n{}".format(name, result.output) + actual = f"$ datasette {name} --help\n\n{result.output}" actual = actual.replace("Usage: cli ", "Usage: datasette ") open(docs_path / filename, "w").write(actual) From 4bac9f18f9d04e5ed10f072502bcc508e365438e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 21 Nov 2020 15:33:04 -0800 Subject: [PATCH 0038/1455] Fix off-screen action menu bug, refs #1084 --- datasette/static/app.css | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index 675285c1..b9378a9e 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -361,13 +361,13 @@ details .nav-menu-inner { } /* Table/database actions menu */ -.actions-menu-links { +.page-header { position: relative; } .actions-menu-links .dropdown-menu { position: absolute; - top: 2rem; - right: 0; + top: calc(100% + 10px); + left: -10px; } /* Components ============================================================== */ From 3159263f05ac4baf968929d59384d9223a539071 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 24 Nov 2020 12:01:47 -0800 Subject: [PATCH 0039/1455] New --setting to replace --config, closes #992 --- datasette/cli.py | 56 +++++++++++++++++++++++++++++++++-- docs/datasette-serve-help.txt | 5 ++-- tests/test_cli.py | 36 ++++++++++++++++++++++ 3 files changed, 93 insertions(+), 4 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 99075078..9e696aa8 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -2,6 +2,7 @@ import asyncio import uvicorn import click from click import formatting +from click.types import CompositeParamType from click_default_group import DefaultGroup import json import os @@ -29,6 +30,7 @@ from .version import __version__ class Config(click.ParamType): + # This will be removed in Datasette 1.0 in favour of class Setting name = "config" def convert(self, config, param, ctx): @@ -63,6 +65,39 @@ class Config(click.ParamType): self.fail("Invalid option") +class Setting(CompositeParamType): + name = "setting" + arity = 2 + + def convert(self, config, param, ctx): + name, value = config + if name not in DEFAULT_CONFIG: + self.fail( + f"{name} is not a valid option (--help-config to see all)", + param, + ctx, + ) + return + # Type checking + default = DEFAULT_CONFIG[name] + if isinstance(default, bool): + try: + return name, value_as_boolean(value) + except ValueAsBooleanError: + self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx) + return + elif isinstance(default, int): + if not value.isdigit(): + self.fail(f'"{name}" should be an integer', param, ctx) + return + return name, int(value) + elif isinstance(default, str): + return name, value + else: + # Should never happen: + self.fail("Invalid option") + + @click.group(cls=DefaultGroup, default="serve", default_if_no_args=True) @click.version_option(version=__version__) def cli(): @@ -330,7 +365,14 @@ def uninstall(packages, yes): @click.option( "--config", type=Config(), - help="Set config option using configname:value docs.datasette.io/en/stable/config.html", + help="Deprecated: set config option using configname:value. Use --setting instead.", + multiple=True, +) +@click.option( + "--setting", + "settings", + type=Setting(), + help="Setting, see docs.datasette.io/en/stable/config.html", multiple=True, ) @click.option( @@ -372,6 +414,7 @@ def serve( static, memory, config, + settings, secret, root, get, @@ -410,6 +453,15 @@ def serve( if metadata: metadata_data = parse_metadata(metadata.read()) + combined_config = {} + if config: + click.echo( + "--config name:value will be deprecated in Datasette 1.0, use --setting name value instead", + err=True, + ) + combined_config.update(config) + combined_config.update(settings) + kwargs = dict( immutables=immutable, cache_headers=not reload, @@ -420,7 +472,7 @@ def serve( template_dir=template_dir, plugins_dir=plugins_dir, static_mounts=static, - config=dict(config), + config=combined_config, memory=memory, secret=secret, version_note=version_note, diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index 5a63d4c4..bdaf0894 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -25,9 +25,10 @@ Options: --plugins-dir DIRECTORY Path to directory containing custom plugins --static MOUNT:DIRECTORY Serve static files from this directory at /MOUNT/... --memory Make :memory: database available - --config CONFIG Set config option using configname:value - docs.datasette.io/en/stable/config.html + --config CONFIG Deprecated: set config option using configname:value. Use + --setting instead. + --setting SETTING... Setting, see docs.datasette.io/en/stable/config.html --secret TEXT Secret used for signing secure values, such as signed cookies diff --git a/tests/test_cli.py b/tests/test_cli.py index aa39b0ee..99aea053 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -4,6 +4,7 @@ from .fixtures import ( TestClient as _TestClient, EXPECTED_PLUGINS, ) +import asyncio from datasette.plugins import DEFAULT_PLUGINS from datasette.cli import cli, serve from datasette.version import __version__ @@ -17,6 +18,13 @@ import textwrap from unittest import mock +@pytest.fixture +def ensure_eventloop(): + # Workaround for "Event loop is closed" error + if asyncio.get_event_loop().is_closed(): + asyncio.set_event_loop(asyncio.new_event_loop()) + + def test_inspect_cli(app_client): runner = CliRunner() result = runner.invoke(cli, ["inspect", "fixtures.db"]) @@ -115,6 +123,7 @@ def test_metadata_yaml(): static=[], memory=False, config=[], + settings=[], secret=None, root=False, version_note=None, @@ -163,3 +172,30 @@ def test_version(): runner = CliRunner() result = runner.invoke(cli, ["--version"]) assert result.output == f"cli, version {__version__}\n" + + +def test_setting(ensure_eventloop): + runner = CliRunner() + result = runner.invoke( + cli, ["--setting", "default_page_size", "5", "--get", "/-/config.json"] + ) + assert result.exit_code == 0, result.output + assert json.loads(result.output)["default_page_size"] == 5 + + +def test_setting_type_validation(ensure_eventloop): + runner = CliRunner(mix_stderr=False) + result = runner.invoke(cli, ["--setting", "default_page_size", "dog"]) + assert result.exit_code == 2 + assert '"default_page_size" should be an integer' in result.stderr + + +def test_config_deprecated(ensure_eventloop): + # The --config option should show a deprecation message + runner = CliRunner(mix_stderr=False) + result = runner.invoke( + cli, ["--config", "allow_download:off", "--get", "/-/config.json"] + ) + assert result.exit_code == 0 + assert not json.loads(result.output)["allow_download"] + assert "will be deprecated in" in result.stderr From 2a3d5b720b96d5ad79ccad655f6575bb71aae302 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 24 Nov 2020 12:19:14 -0800 Subject: [PATCH 0040/1455] Redirect /-/config to /-/settings, closes #1103 --- datasette/app.py | 20 ++++++++++++++++++-- datasette/default_menu_links.py | 4 ++-- docs/introspection.rst | 8 ++++---- tests/test_api.py | 17 +++++++++++++++-- tests/test_cli.py | 4 ++-- tests/test_config_dir.py | 2 +- tests/test_permissions.py | 2 +- 7 files changed, 43 insertions(+), 14 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index b2bdb746..36df6032 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -66,6 +66,7 @@ from .utils.asgi import ( Forbidden, NotFound, Request, + Response, asgi_static, asgi_send, asgi_send_html, @@ -884,8 +885,16 @@ class Datasette: r"/-/plugins(?P(\.json)?)$", ) add_route( - JsonDataView.as_view(self, "config.json", lambda: self._config), - r"/-/config(?P(\.json)?)$", + JsonDataView.as_view(self, "settings.json", lambda: self._config), + r"/-/settings(?P(\.json)?)$", + ) + add_route( + permanent_redirect("/-/settings.json"), + r"/-/config.json", + ) + add_route( + permanent_redirect("/-/settings"), + r"/-/config", ) add_route( JsonDataView.as_view(self, "threads.json", self._threads), @@ -1224,6 +1233,13 @@ def wrap_view(view_fn, datasette): return async_view_fn +def permanent_redirect(path): + return wrap_view( + lambda request, send: Response.redirect(path, status=301), + datasette=None, + ) + + _curly_re = re.compile(r"(\{.*?\})") diff --git a/datasette/default_menu_links.py b/datasette/default_menu_links.py index 0b135410..56f481ef 100644 --- a/datasette/default_menu_links.py +++ b/datasette/default_menu_links.py @@ -22,8 +22,8 @@ def menu_links(datasette, actor): "label": "Metadata", }, { - "href": datasette.urls.path("/-/config"), - "label": "Config", + "href": datasette.urls.path("/-/settings"), + "label": "Settings", }, { "href": datasette.urls.path("/-/permissions"), diff --git a/docs/introspection.rst b/docs/introspection.rst index 698ba95f..a0402b9d 100644 --- a/docs/introspection.rst +++ b/docs/introspection.rst @@ -89,10 +89,10 @@ Add ``?all=1`` to include details of the default plugins baked into Datasette. .. _JsonDataView_config: -/-/config ---------- +/-/settings +----------- -Shows the :ref:`config` options for this instance of Datasette. `Config example `_: +Shows the :ref:`config` options for this instance of Datasette. `Settings example `_: .. code-block:: json @@ -110,7 +110,7 @@ Shows the :ref:`config` options for this instance of Datasette. `Config example /-/databases ------------ -Shows currently attached databases. `Databases example `_: +Shows currently attached databases. `Databases example `_: .. code-block:: json diff --git a/tests/test_api.py b/tests/test_api.py index 3d48d350..2bab6c30 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1324,8 +1324,8 @@ def test_versions_json(app_client): assert "compile_options" in response.json["sqlite"] -def test_config_json(app_client): - response = app_client.get("/-/config.json") +def test_settings_json(app_client): + response = app_client.get("/-/settings.json") assert { "default_page_size": 50, "default_facet_size": 30, @@ -1350,6 +1350,19 @@ def test_config_json(app_client): } == response.json +@pytest.mark.parametrize( + "path,expected_redirect", + ( + ("/-/config.json", "/-/settings.json"), + ("/-/config", "/-/settings"), + ), +) +def test_config_redirects_to_settings(app_client, path, expected_redirect): + response = app_client.get(path, allow_redirects=False) + assert response.status == 301 + assert response.headers["Location"] == expected_redirect + + def test_page_size_matching_max_returned_rows( app_client_returned_rows_matches_page_size, ): diff --git a/tests/test_cli.py b/tests/test_cli.py index 99aea053..36b9a092 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -177,7 +177,7 @@ def test_version(): def test_setting(ensure_eventloop): runner = CliRunner() result = runner.invoke( - cli, ["--setting", "default_page_size", "5", "--get", "/-/config.json"] + cli, ["--setting", "default_page_size", "5", "--get", "/-/settings.json"] ) assert result.exit_code == 0, result.output assert json.loads(result.output)["default_page_size"] == 5 @@ -194,7 +194,7 @@ def test_config_deprecated(ensure_eventloop): # The --config option should show a deprecation message runner = CliRunner(mix_stderr=False) result = runner.invoke( - cli, ["--config", "allow_download:off", "--get", "/-/config.json"] + cli, ["--config", "allow_download:off", "--get", "/-/settings.json"] ) assert result.exit_code == 0 assert not json.loads(result.output)["allow_download"] diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index 15c7a5c4..34bd1d7e 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -86,7 +86,7 @@ def test_metadata(config_dir_client): def test_config(config_dir_client): - response = config_dir_client.get("/-/config.json") + response = config_dir_client.get("/-/settings.json") assert 200 == response.status assert 60 == response.json["default_cache_ttl"] diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 60883eef..3b7e1654 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -378,7 +378,7 @@ def view_instance_client(): "/-/metadata", "/-/versions", "/-/plugins", - "/-/config", + "/-/settings", "/-/threads", "/-/databases", "/-/actor", From 33eadb8782d5b3e179df7dfa08f6d376ded2acd3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 24 Nov 2020 12:37:29 -0800 Subject: [PATCH 0041/1455] config.json is now settings.json, closes #1104 --- datasette/app.py | 7 +++++-- datasette/cli.py | 3 +++ datasette/utils/__init__.py | 4 ++++ docs/config.rst | 8 ++++---- tests/test_config_dir.py | 17 ++++++++++++++--- 5 files changed, 30 insertions(+), 9 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 36df6032..0e42b7c6 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -45,6 +45,7 @@ from .database import Database, QueryInterrupted from .utils import ( PrefixedUrlString, + StartupError, async_call_with_supported_arguments, await_me_maybe, call_with_supported_arguments, @@ -265,8 +266,10 @@ class Datasette: if config_dir and (config_dir / "static").is_dir() and not static_mounts: static_mounts = [("static", str((config_dir / "static").resolve()))] self.static_mounts = static_mounts or [] - if config_dir and (config_dir / "config.json").exists() and not config: - config = json.load((config_dir / "config.json").open()) + if config_dir and (config_dir / "config.json").exists(): + raise StartupError("config.json should be renamed to settings.json") + if config_dir and (config_dir / "settings.json").exists() and not config: + config = json.load((config_dir / "settings.json").open()) self._config = dict(DEFAULT_CONFIG, **(config or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note diff --git a/datasette/cli.py b/datasette/cli.py index 9e696aa8..95e1418c 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -14,6 +14,7 @@ from runpy import run_module import webbrowser from .app import Datasette, DEFAULT_CONFIG, CONFIG_OPTIONS, pm from .utils import ( + StartupError, check_connection, parse_metadata, ConnectionProblem, @@ -488,6 +489,8 @@ def serve( ds = Datasette(files, **kwargs) except SpatialiteNotFound: raise click.ClickException("Could not find SpatiaLite extension") + except StartupError as e: + raise click.ClickException(e.args[0]) if return_instance: # Private utility mechanism for writing unit tests diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 02b59b2b..d62302e9 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1027,3 +1027,7 @@ class PrefixedUrlString(str): return method.__get__(self) else: return super().__getattribute__(name) + + +class StartupError(Exception): + pass diff --git a/docs/config.rst b/docs/config.rst index 0883e532..27b73d44 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -50,15 +50,15 @@ The files that can be included in this directory are as follows. All are optiona * ``*.db`` - SQLite database files that will be served by Datasette * ``metadata.json`` - :ref:`metadata` for those databases - ``metadata.yaml`` or ``metadata.yml`` can be used as well * ``inspect-data.json`` - the result of running ``datasette inspect`` - any database files listed here will be treated as immutable, so they should not be changed while Datasette is running -* ``config.json`` - settings that would normally be passed using ``--config`` - here they should be stored as a JSON object of key/value pairs +* ``settings.json`` - settings that would normally be passed using ``--setting`` - here they should be stored as a JSON object of key/value pairs * ``templates/`` - a directory containing :ref:`customization_custom_templates` * ``plugins/`` - a directory containing plugins, see :ref:`writing_plugins_one_off` * ``static/`` - a directory containing static files - these will be served from ``/static/filename.txt``, see :ref:`customization_static_files` -Configuration options ---------------------- +Settings +-------- -The followig options can be set using ``--config name:value``, or by storing them in the ``config.json`` file for use with :ref:`config_dir`. +The following options can be set using ``--setting name value``, or by storing them in the ``settings.json`` file for use with :ref:`config_dir`. default_page_size ~~~~~~~~~~~~~~~~~ diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index 34bd1d7e..cd158474 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -3,7 +3,9 @@ import pytest import sqlite3 from datasette.app import Datasette +from datasette.cli import cli from .fixtures import TestClient as _TestClient +from click.testing import CliRunner PLUGIN = """ from datasette import hookimpl @@ -15,7 +17,7 @@ def extra_template_vars(): } """ METADATA = {"title": "This is from metadata"} -CONFIG = { +SETTINGS = { "default_cache_ttl": 60, } CSS = """ @@ -44,7 +46,7 @@ def config_dir_client(tmp_path_factory): (static_dir / "hello.css").write_text(CSS, "utf-8") (config_dir / "metadata.json").write_text(json.dumps(METADATA), "utf-8") - (config_dir / "config.json").write_text(json.dumps(CONFIG), "utf-8") + (config_dir / "settings.json").write_text(json.dumps(SETTINGS), "utf-8") for dbname in ("demo.db", "immutable.db"): db = sqlite3.connect(str(config_dir / dbname)) @@ -85,12 +87,21 @@ def test_metadata(config_dir_client): assert METADATA == response.json -def test_config(config_dir_client): +def test_settings(config_dir_client): response = config_dir_client.get("/-/settings.json") assert 200 == response.status assert 60 == response.json["default_cache_ttl"] +def test_error_on_config_json(tmp_path_factory): + config_dir = tmp_path_factory.mktemp("config-dir") + (config_dir / "config.json").write_text(json.dumps(SETTINGS), "utf-8") + runner = CliRunner(mix_stderr=False) + result = runner.invoke(cli, [str(config_dir), "--get", "/-/settings.json"]) + assert result.exit_code == 1 + assert "config.json should be renamed to settings.json" in result.stderr + + def test_plugins(config_dir_client): response = config_dir_client.get("/-/plugins.json") assert 200 == response.status From 5a77f7a6494c74372bedfef6185e1beed7bea5dc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 24 Nov 2020 13:22:33 -0800 Subject: [PATCH 0042/1455] Updated docs renaming config to settings - config.html is now settings.html - ConfigOption in app.py is now Setting - updated documentation unit tests Refs #1106 --- .github/workflows/deploy-latest.yml | 4 +- datasette/app.py | 56 +++++++-------- datasette/cli.py | 12 ++-- docs/changelog.rst | 18 ++--- docs/csv_export.rst | 6 +- docs/deploying.rst | 8 +-- docs/index.rst | 2 +- docs/internals.rst | 6 +- docs/introspection.rst | 2 +- docs/pages.rst | 2 +- docs/performance.rst | 6 +- docs/plugin_hooks.rst | 2 +- docs/publish.rst | 4 +- docs/{config.rst => settings.rst} | 108 ++++++++++++++++------------ docs/spatialite.rst | 2 +- docs/writing_plugins.rst | 2 +- tests/test_docs.py | 10 +-- 17 files changed, 131 insertions(+), 119 deletions(-) rename docs/{config.rst => settings.rst} (80%) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 73b97a19..7a41bda2 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -53,11 +53,11 @@ jobs: --plugins-dir=plugins \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ - --extra-options="--config template_debug:1" \ + --extra-options="--setting template_debug 1" \ --service=datasette-latest # Deploy docs.db to a different service datasette publish cloudrun docs.db \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ - --extra-options="--config template_debug:1" \ + --extra-options="--setting template_debug 1" \ --service=datasette-docs-latest diff --git a/datasette/app.py b/datasette/app.py index 0e42b7c6..3bb6ce79 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -82,91 +82,85 @@ app_root = Path(__file__).parent.parent MEMORY = object() -ConfigOption = collections.namedtuple("ConfigOption", ("name", "default", "help")) -CONFIG_OPTIONS = ( - ConfigOption("default_page_size", 100, "Default page size for the table view"), - ConfigOption( +Setting = collections.namedtuple("Setting", ("name", "default", "help")) +SETTINGS = ( + Setting("default_page_size", 100, "Default page size for the table view"), + Setting( "max_returned_rows", 1000, "Maximum rows that can be returned from a table or custom query", ), - ConfigOption( + Setting( "num_sql_threads", 3, "Number of threads in the thread pool for executing SQLite queries", ), - ConfigOption( - "sql_time_limit_ms", 1000, "Time limit for a SQL query in milliseconds" - ), - ConfigOption( + Setting("sql_time_limit_ms", 1000, "Time limit for a SQL query in milliseconds"), + Setting( "default_facet_size", 30, "Number of values to return for requested facets" ), - ConfigOption( - "facet_time_limit_ms", 200, "Time limit for calculating a requested facet" - ), - ConfigOption( + Setting("facet_time_limit_ms", 200, "Time limit for calculating a requested facet"), + Setting( "facet_suggest_time_limit_ms", 50, "Time limit for calculating a suggested facet", ), - ConfigOption( + Setting( "hash_urls", False, "Include DB file contents hash in URLs, for far-future caching", ), - ConfigOption( + Setting( "allow_facet", True, "Allow users to specify columns to facet using ?_facet= parameter", ), - ConfigOption( + Setting( "allow_download", True, "Allow users to download the original SQLite database files", ), - ConfigOption("suggest_facets", True, "Calculate and display suggested facets"), - ConfigOption( + Setting("suggest_facets", True, "Calculate and display suggested facets"), + Setting( "default_cache_ttl", 5, "Default HTTP cache TTL (used in Cache-Control: max-age= header)", ), - ConfigOption( + Setting( "default_cache_ttl_hashed", 365 * 24 * 60 * 60, "Default HTTP cache TTL for hashed URL pages", ), - ConfigOption( - "cache_size_kb", 0, "SQLite cache size in KB (0 == use SQLite default)" - ), - ConfigOption( + Setting("cache_size_kb", 0, "SQLite cache size in KB (0 == use SQLite default)"), + Setting( "allow_csv_stream", True, "Allow .csv?_stream=1 to download all rows (ignoring max_returned_rows)", ), - ConfigOption( + Setting( "max_csv_mb", 100, "Maximum size allowed for CSV export in MB - set 0 to disable this limit", ), - ConfigOption( + Setting( "truncate_cells_html", 2048, "Truncate cells longer than this in HTML table view - set 0 to disable", ), - ConfigOption( + Setting( "force_https_urls", False, "Force URLs in API output to always use https:// protocol", ), - ConfigOption( + Setting( "template_debug", False, "Allow display of template debug information with ?_context=1", ), - ConfigOption("base_url", "/", "Datasette URLs should use this base path"), + Setting("base_url", "/", "Datasette URLs should use this base path"), ) -DEFAULT_CONFIG = {option.name: option.default for option in CONFIG_OPTIONS} +DEFAULT_SETTINGS = {option.name: option.default for option in SETTINGS} async def favicon(request, send): @@ -270,7 +264,7 @@ class Datasette: raise StartupError("config.json should be renamed to settings.json") if config_dir and (config_dir / "settings.json").exists() and not config: config = json.load((config_dir / "settings.json").open()) - self._config = dict(DEFAULT_CONFIG, **(config or {})) + self._config = dict(DEFAULT_SETTINGS, **(config or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note self.executor = futures.ThreadPoolExecutor( @@ -358,7 +352,7 @@ class Datasette: def config_dict(self): # Returns a fully resolved config dictionary, useful for templates - return {option.name: self.config(option.name) for option in CONFIG_OPTIONS} + return {option.name: self.config(option.name) for option in SETTINGS} def metadata(self, key=None, database=None, table=None, fallback=True): """ diff --git a/datasette/cli.py b/datasette/cli.py index 95e1418c..5feab51e 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -12,7 +12,7 @@ from subprocess import call import sys from runpy import run_module import webbrowser -from .app import Datasette, DEFAULT_CONFIG, CONFIG_OPTIONS, pm +from .app import Datasette, DEFAULT_SETTINGS, SETTINGS, pm from .utils import ( StartupError, check_connection, @@ -39,7 +39,7 @@ class Config(click.ParamType): self.fail(f'"{config}" should be name:value', param, ctx) return name, value = config.split(":", 1) - if name not in DEFAULT_CONFIG: + if name not in DEFAULT_SETTINGS: self.fail( f"{name} is not a valid option (--help-config to see all)", param, @@ -47,7 +47,7 @@ class Config(click.ParamType): ) return # Type checking - default = DEFAULT_CONFIG[name] + default = DEFAULT_SETTINGS[name] if isinstance(default, bool): try: return name, value_as_boolean(value) @@ -72,7 +72,7 @@ class Setting(CompositeParamType): def convert(self, config, param, ctx): name, value = config - if name not in DEFAULT_CONFIG: + if name not in DEFAULT_SETTINGS: self.fail( f"{name} is not a valid option (--help-config to see all)", param, @@ -80,7 +80,7 @@ class Setting(CompositeParamType): ) return # Type checking - default = DEFAULT_CONFIG[name] + default = DEFAULT_SETTINGS[name] if isinstance(default, bool): try: return name, value_as_boolean(value) @@ -432,7 +432,7 @@ def serve( formatter.write_dl( [ (option.name, f"{option.help} (default={option.default})") - for option in CONFIG_OPTIONS + for option in SETTINGS ] ) click.echo(formatter.getvalue()) diff --git a/docs/changelog.rst b/docs/changelog.rst index 34bd95d4..2916b373 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -49,7 +49,7 @@ The new :ref:`internals_datasette_urls` family of methods can be used to generat Running Datasette behind a proxy ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The :ref:`config_base_url` configuration option is designed to help run Datasette on a specific path behind a proxy - for example if you want to run an instance of Datasette at ``/my-datasette/`` within your existing site's URL hierarchy, proxied behind nginx or Apache. +The :ref:`setting_base_url` configuration option is designed to help run Datasette on a specific path behind a proxy - for example if you want to run an instance of Datasette at ``/my-datasette/`` within your existing site's URL hierarchy, proxied behind nginx or Apache. Support for this configuration option has been greatly improved (`#1023 `__), and guidelines for using it are now available in a new documentation section on :ref:`deploying_proxy`. (`#1027 `__) @@ -353,9 +353,9 @@ Signed values and secrets Both flash messages and user authentication needed a way to sign values and set signed cookies. Two new methods are now available for plugins to take advantage of this mechanism: :ref:`datasette_sign` and :ref:`datasette_unsign`. -Datasette will generate a secret automatically when it starts up, but to avoid resetting the secret (and hence invalidating any cookies) every time the server restarts you should set your own secret. You can pass a secret to Datasette using the new ``--secret`` option or with a ``DATASETTE_SECRET`` environment variable. See :ref:`config_secret` for more details. +Datasette will generate a secret automatically when it starts up, but to avoid resetting the secret (and hence invalidating any cookies) every time the server restarts you should set your own secret. You can pass a secret to Datasette using the new ``--secret`` option or with a ``DATASETTE_SECRET`` environment variable. See :ref:`settings_secret` for more details. -You can also set a secret when you deploy Datasette using ``datasette publish`` or ``datasette package`` - see :ref:`config_publish_secrets`. +You can also set a secret when you deploy Datasette using ``datasette publish`` or ``datasette package`` - see :ref:`settings_publish_secrets`. Plugins can now sign values and verify their signatures using the :ref:`datasette.sign() ` and :ref:`datasette.unsign() ` methods. @@ -450,7 +450,7 @@ A small release which provides improved internal methods for use in plugins, alo You can now create :ref:`custom pages ` within your Datasette instance using a custom template file. For example, adding a template file called ``templates/pages/about.html`` will result in a new page being served at ``/about`` on your instance. See the :ref:`custom pages documentation ` for full details, including how to return custom HTTP headers, redirects and status codes. (`#648 `__) -:ref:`config_dir` (`#731 `__) allows you to define a custom Datasette instance as a directory. So instead of running the following:: +:ref:`settings_dir` (`#731 `__) allows you to define a custom Datasette instance as a directory. So instead of running the following:: $ datasette one.db two.db \ --metadata.json \ @@ -480,7 +480,7 @@ Also in this release: * Datasette :ref:`metadata` can now be provided as a YAML file as an optional alternative to JSON. See :ref:`metadata_yaml`. (`#713 `__) * Removed support for ``datasette publish now``, which used the the now-retired Zeit Now v1 hosting platform. A new plugin, `datasette-publish-now `__, can be installed to publish data to Zeit (`now Vercel `__) Now v2. (`#710 `__) * Fixed a bug where the ``extra_template_vars(request, view_name)`` plugin hook was not receiving the correct ``view_name``. (`#716 `__) -* Variables added to the template context by the ``extra_template_vars()`` plugin hook are now shown in the ``?_context=1`` debugging mode (see :ref:`config_template_debug`). (`#693 `__) +* Variables added to the template context by the ``extra_template_vars()`` plugin hook are now shown in the ``?_context=1`` debugging mode (see :ref:`settings_template_debug`). (`#693 `__) * Fixed a bug where the "templates considered" HTML comment was no longer being displayed. (`#689 `__) * Fixed a ``datasette publish`` bug where ``--plugin-secret`` would over-ride plugin configuration in the provided ``metadata.json`` file. (`#724 `__) * Added a new CSS class for customizing the canned query page. (`#727 `__) @@ -490,7 +490,7 @@ Also in this release: 0.39 (2020-03-24) ----------------- -* New :ref:`config_base_url` configuration setting for serving up the correct links while running Datasette under a different URL prefix. (`#394 `__) +* New :ref:`setting_base_url` configuration setting for serving up the correct links while running Datasette under a different URL prefix. (`#394 `__) * New metadata settings ``"sort"`` and ``"sort_desc"`` for setting the default sort order for a table. See :ref:`metadata_default_sort`. (`#702 `__) * Sort direction arrow now displays by default on the primary key. This means you only have to click once (not twice) to sort in reverse order. (`#677 `__) * New ``await Request(scope, receive).post_vars()`` method for accessing POST form variables. (`#700 `__) @@ -565,7 +565,7 @@ Also in this release: * asyncio task information is now included on the ``/-/threads`` debug page * Bumped Uvicorn dependency 0.11 * You can now use ``--port 0`` to listen on an available port -* New :ref:`config_template_debug` setting for debugging templates, e.g. https://latest.datasette.io/fixtures/roadside_attractions?_context=1 (`#654 `__) +* New :ref:`settings_template_debug` setting for debugging templates, e.g. https://latest.datasette.io/fixtures/roadside_attractions?_context=1 (`#654 `__) .. _v0_32: @@ -1000,7 +1000,7 @@ Check out the :ref:`CSV export documentation ` for more details, or try the feature out on https://fivethirtyeight.datasettes.com/fivethirtyeight/bechdel%2Fmovies -If your table has more than :ref:`config_max_returned_rows` (default 1,000) +If your table has more than :ref:`settings_max_returned_rows` (default 1,000) Datasette provides the option to *stream all rows*. This option takes advantage of async Python and Datasette's efficient :ref:`pagination ` to iterate through the entire matching result set and stream it back as a @@ -1020,7 +1020,7 @@ table, using the new ``_labels=on`` querystring option. See New configuration settings ~~~~~~~~~~~~~~~~~~~~~~~~~~ -Datasette's :ref:`config` now also supports boolean settings. A number of new +Datasette's :ref:`settings` now also supports boolean settings. A number of new configuration options have been added: * ``num_sql_threads`` - the number of threads used to execute SQLite queries. Defaults to 3. diff --git a/docs/csv_export.rst b/docs/csv_export.rst index b5cc599a..704cc19d 100644 --- a/docs/csv_export.rst +++ b/docs/csv_export.rst @@ -23,7 +23,7 @@ file, which looks like this and has the following options: the ``city_id`` column is accompanied by a ``city_id_label`` column. * **stream all rows** - by default CSV files only contain the first - :ref:`config_max_returned_rows` records. This option will cause Datasette to + :ref:`settings_max_returned_rows` records. This option will cause Datasette to loop through every matching record and return them as a single CSV file. You can try that out on https://latest.datasette.io/fixtures/facetable?_size=4 @@ -40,9 +40,9 @@ Since databases can get pretty large, by default this option is capped at 100MB if a table returns more than 100MB of data the last line of the CSV will be a truncation error message. -You can increase or remove this limit using the :ref:`config_max_csv_mb` config +You can increase or remove this limit using the :ref:`settings_max_csv_mb` config setting. You can also disable the CSV export feature entirely using -:ref:`config_allow_csv_stream`. +:ref:`settings_allow_csv_stream`. A note on URLs -------------- diff --git a/docs/deploying.rst b/docs/deploying.rst index 3eeaaad8..4ca0e82a 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -58,7 +58,7 @@ Add a random value for the ``DATASETTE_SECRET`` - this will be used to sign Data $ python3 -c 'import secrets; print(secrets.token_hex(32))' -This configuration will run Datasette against all database files contained in the ``/home/ubunt/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`config_dir` for details. +This configuration will run Datasette against all database files contained in the ``/home/ubunt/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`settings_dir` for details. You can start the Datasette process running using the following:: @@ -101,7 +101,7 @@ The ``Procfile`` lets the hosting platform know how to run the command that serv web: datasette . -h 0.0.0.0 -p $PORT --cors -The ``$PORT`` environment variable is provided by the hosting platform. ``--cors`` enables CORS requests from JavaScript running on other websites to your domain - omit this if you don't want to allow CORS. You can add additional Datasette :ref:`config` options here too. +The ``$PORT`` environment variable is provided by the hosting platform. ``--cors`` enables CORS requests from JavaScript running on other websites to your domain - omit this if you don't want to allow CORS. You can add additional Datasette :ref:`settings` options here too. These two files should be enough to deploy Datasette on any host that supports buildpacks. Datasette will serve any SQLite files that are included in the root directory of the application. @@ -118,9 +118,9 @@ Running Datasette behind a proxy You may wish to run Datasette behind an Apache or nginx proxy, using a path within your existing site. -You can use the :ref:`config_base_url` configuration setting to tell Datasette to serve traffic with a specific URL prefix. For example, you could run Datasette like this:: +You can use the :ref:`setting_base_url` configuration setting to tell Datasette to serve traffic with a specific URL prefix. For example, you could run Datasette like this:: - datasette my-database.db --config base_url:/my-datasette/ -p 8009 + datasette my-database.db --setting base_url /my-datasette/ -p 8009 This will run Datasette with the following URLs: diff --git a/docs/index.rst b/docs/index.rst index 6b55da8c..ff8db04b 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -51,7 +51,7 @@ Contents full_text_search spatialite metadata - config + settings introspection custom_templates plugins diff --git a/docs/internals.rst b/docs/internals.rst index 92496490..cec1268f 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -350,7 +350,7 @@ Returns the absolute URL for the given path, including the protocol and host. Fo absolute_url = datasette.absolute_url(request, "/dbname/table.json") # Would return "http://localhost:8001/dbname/table.json" -The current request object is used to determine the hostname and protocol that should be used for the returned URL. The :ref:`config_force_https_urls` configuration setting is taken into account. +The current request object is used to determine the hostname and protocol that should be used for the returned URL. The :ref:`settings_force_https_urls` configuration setting is taken into account. .. _internals_datasette_client: @@ -397,7 +397,7 @@ These methods can be used with :ref:`internals_datasette_urls` - for example: ) ).json() -``datasette.client`` methods automatically take the current :ref:`config_base_url` setting into account, whether or not you use the ``datasette.urls`` family of methods to construct the path. +``datasette.client`` methods automatically take the current :ref:`setting_base_url` setting into account, whether or not you use the ``datasette.urls`` family of methods to construct the path. For documentation on available ``**kwargs`` options and the shape of the HTTPX Response object refer to the `HTTPX Async documentation `__. @@ -406,7 +406,7 @@ For documentation on available ``**kwargs`` options and the shape of the HTTPX R datasette.urls -------------- -The ``datasette.urls`` object contains methods for building URLs to pages within Datasette. Plugins should use this to link to pages, since these methods take into account any :ref:`config_base_url` configuration setting that might be in effect. +The ``datasette.urls`` object contains methods for building URLs to pages within Datasette. Plugins should use this to link to pages, since these methods take into account any :ref:`setting_base_url` configuration setting that might be in effect. ``datasette.urls.instance(format=None)`` Returns the URL to the Datasette instance root page. This is usually ``"/"``. diff --git a/docs/introspection.rst b/docs/introspection.rst index a0402b9d..d1a0a854 100644 --- a/docs/introspection.rst +++ b/docs/introspection.rst @@ -92,7 +92,7 @@ Add ``?all=1`` to include details of the default plugins baked into Datasette. /-/settings ----------- -Shows the :ref:`config` options for this instance of Datasette. `Settings example `_: +Shows the :ref:`settings` for this instance of Datasette. `Settings example `_: .. code-block:: json diff --git a/docs/pages.rst b/docs/pages.rst index db970ead..5f77bec7 100644 --- a/docs/pages.rst +++ b/docs/pages.rst @@ -66,7 +66,7 @@ Row Every row in every Datasette table has its own URL. This means individual records can be linked to directly. -Table cells with extremely long text contents are truncated on the table view according to the :ref:`config_truncate_cells_html` setting. If a cell has been truncated the full length version of that cell will be available on the row page. +Table cells with extremely long text contents are truncated on the table view according to the :ref:`settings_truncate_cells_html` setting. If a cell has been truncated the full length version of that cell will be available on the row page. Rows which are the targets of foreign key references from other tables will show a link to a filtered search for all records that reference that row. Here's an example from the Registers of Members Interests database: diff --git a/docs/performance.rst b/docs/performance.rst index d7f852d5..1d24adce 100644 --- a/docs/performance.rst +++ b/docs/performance.rst @@ -56,7 +56,7 @@ Using a caching proxy in this way could enable a Datasette-backed visualization Datasette's integration with HTTP caches can be enabled using a combination of configuration options and querystring arguments. -The :ref:`config_default_cache_ttl` setting sets the default HTTP cache TTL for all Datasette pages. This is 5 seconds unless you change it - you can set it to 0 if you wish to disable HTTP caching entirely. +The :ref:`settings_default_cache_ttl` setting sets the default HTTP cache TTL for all Datasette pages. This is 5 seconds unless you change it - you can set it to 0 if you wish to disable HTTP caching entirely. You can also change the cache timeout on a per-request basis using the ``?_ttl=10`` querystring parameter. This can be useful when you are working with the Datasette JSON API - you may decide that a specific query can be cached for a longer time, or maybe you need to set ``?_ttl=0`` for some requests for example if you are running a SQL ``order by random()`` query. @@ -65,9 +65,9 @@ Hashed URL mode When you open a database file in immutable mode using the ``-i`` option, Datasette calculates a SHA-256 hash of the contents of that file on startup. This content hash can then optionally be used to create URLs that are guaranteed to change if the contents of the file changes in the future. This results in URLs that can then be cached indefinitely by both browsers and caching proxies - an enormous potential performance optimization. -You can enable these hashed URLs in two ways: using the :ref:`config_hash_urls` configuration setting (which affects all requests to Datasette) or via the ``?_hash=1`` querystring parameter (which only applies to the current request). +You can enable these hashed URLs in two ways: using the :ref:`settings_hash_urls` configuration setting (which affects all requests to Datasette) or via the ``?_hash=1`` querystring parameter (which only applies to the current request). -With hashed URLs enabled, any request to e.g. ``/mydatabase/mytable`` will 302 redirect to ``mydatabase-455fe3a/mytable``. The URL containing the hash will be served with a very long cache expire header - configured using :ref:`config_default_cache_ttl_hashed` which defaults to 365 days. +With hashed URLs enabled, any request to e.g. ``/mydatabase/mytable`` will 302 redirect to ``mydatabase-455fe3a/mytable``. The URL containing the hash will be served with a very long cache expire header - configured using :ref:`settings_default_cache_ttl_hashed` which defaults to 365 days. Since these responses are cached for a long time, you may wish to build API clients against the non-hashed version of these URLs. These 302 redirects are served extremely quickly, so this should still be a performant way to work against the Datasette API. diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 8407a259..72b09367 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1020,7 +1020,7 @@ This example adds a new menu item but only if the signed in user is ``"root"``: {"href": datasette.urls.path("/-/edit-schema"), "label": "Edit schema"}, ] -Using :ref:`internals_datasette_urls` here ensures that links in the menu will take the :ref:`config_base_url` setting into account. +Using :ref:`internals_datasette_urls` here ensures that links in the menu will take the :ref:`setting_base_url` setting into account. .. _plugin_hook_table_actions: diff --git a/docs/publish.rst b/docs/publish.rst index a905ac92..d5015e21 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -135,7 +135,7 @@ If you have docker installed (e.g. using `Docker for Mac 79e1dc9af1c1 @@ -154,7 +154,7 @@ Here's example output for the package command:: Step 6/7 : EXPOSE 8001 ---> Using cache ---> 8e83844b0fed - Step 7/7 : CMD datasette serve parlgov.db --port 8001 --inspect-file inspect-data.json --config sql_time_limit_ms:2500 + Step 7/7 : CMD datasette serve parlgov.db --port 8001 --inspect-file inspect-data.json --setting sql_time_limit_ms 2500 ---> Using cache ---> 1bd380ea8af3 Successfully built 1bd380ea8af3 diff --git a/docs/config.rst b/docs/settings.rst similarity index 80% rename from docs/config.rst rename to docs/settings.rst index 27b73d44..350fd048 100644 --- a/docs/config.rst +++ b/docs/settings.rst @@ -1,20 +1,19 @@ -.. _config: +.. _settings: -Configuration -============= +Settings +======== -Using \-\-config ----------------- +Using \-\-setting +----------------- -Datasette provides a number of configuration options. These can be set using the ``--config name:value`` option to ``datasette serve``. +Datasette supports a number of settings. These can be set using the ``--setting name value`` option to ``datasette serve``. -You can set multiple configuration options at once like this:: +You can set multiple settings at once like this:: datasette mydatabase.db \ - --config default_page_size:50 \ - --config sql_time_limit_ms:3500 \ - --config max_returned_rows:2000 - + --setting default_page_size 50 \ + --setting sql_time_limit_ms 3500 \ + --setting max_returned_rows 2000 .. _config_dir: @@ -60,12 +59,16 @@ Settings The following options can be set using ``--setting name value``, or by storing them in the ``settings.json`` file for use with :ref:`config_dir`. +.. _setting_default_page_size: + default_page_size ~~~~~~~~~~~~~~~~~ -The default number of rows returned by the table page. You can over-ride this on a per-page basis using the ``?_size=80`` querystring parameter, provided you do not specify a value higher than the ``max_returned_rows`` setting. You can set this default using ``--config`` like so:: +The default number of rows returned by the table page. You can over-ride this on a per-page basis using the ``?_size=80`` querystring parameter, provided you do not specify a value higher than the ``max_returned_rows`` setting. You can set this default using ``--setting`` like so:: - datasette mydatabase.db --config default_page_size:50 + datasette mydatabase.db --setting default_page_size 50 + +.. _setting_sql_time_limit_ms: sql_time_limit_ms ~~~~~~~~~~~~~~~~~ @@ -74,7 +77,7 @@ By default, queries have a time limit of one second. If a query takes longer tha If this time limit is too short for you, you can customize it using the ``sql_time_limit_ms`` limit - for example, to increase it to 3.5 seconds:: - datasette mydatabase.db --config sql_time_limit_ms:3500 + datasette mydatabase.db --setting sql_time_limit_ms 3500 You can optionally set a lower time limit for an individual query using the ``?_timelimit=100`` querystring argument:: @@ -82,7 +85,7 @@ You can optionally set a lower time limit for an individual query using the ``?_ This would set the time limit to 100ms for that specific query. This feature is useful if you are working with databases of unknown size and complexity - a query that might make perfect sense for a smaller table could take too long to execute on a table with millions of rows. By setting custom time limits you can execute queries "optimistically" - e.g. give me an exact count of rows matching this query but only if it takes less than 100ms to calculate. -.. _config_max_returned_rows: +.. _setting_max_returned_rows: max_returned_rows ~~~~~~~~~~~~~~~~~ @@ -91,7 +94,9 @@ Datasette returns a maximum of 1,000 rows of data at a time. If you execute a qu You can increase or decrease this limit like so:: - datasette mydatabase.db --config max_returned_rows:2000 + datasette mydatabase.db --setting max_returned_rows 2000 + +.. _setting_num_sql_threads: num_sql_threads ~~~~~~~~~~~~~~~ @@ -100,7 +105,9 @@ Maximum number of threads in the thread pool Datasette uses to execute SQLite qu :: - datasette mydatabase.db --config num_sql_threads:10 + datasette mydatabase.db --setting num_sql_threads 10 + +.. _setting_allow_facet: allow_facet ~~~~~~~~~~~ @@ -111,21 +118,27 @@ This is enabled by default. If disabled, facets will still be displayed if they Here's how to disable this feature:: - datasette mydatabase.db --config allow_facet:off + datasette mydatabase.db --setting allow_facet off + +.. _setting_default_facet_size: default_facet_size ~~~~~~~~~~~~~~~~~~ The default number of unique rows returned by :ref:`facets` is 30. You can customize it like this:: - datasette mydatabase.db --config default_facet_size:50 + datasette mydatabase.db --setting default_facet_size 50 + +.. _setting_facet_time_limit_ms: facet_time_limit_ms ~~~~~~~~~~~~~~~~~~~ This is the time limit Datasette allows for calculating a facet, which defaults to 200ms:: - datasette mydatabase.db --config facet_time_limit_ms:1000 + datasette mydatabase.db --setting facet_time_limit_ms 1000 + +.. _setting_facet_suggest_time_limit_ms: facet_suggest_time_limit_ms ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -134,23 +147,27 @@ When Datasette calculates suggested facets it needs to run a SQL query for every You can increase this time limit like so:: - datasette mydatabase.db --config facet_suggest_time_limit_ms:500 + datasette mydatabase.db --setting facet_suggest_time_limit_ms 500 + +.. _setting_suggest_facets: suggest_facets ~~~~~~~~~~~~~~ Should Datasette calculate suggested facets? On by default, turn this off like so:: - datasette mydatabase.db --config suggest_facets:off + datasette mydatabase.db --setting suggest_facets off + +.. _setting_allow_download: allow_download ~~~~~~~~~~~~~~ Should users be able to download the original SQLite database using a link on the database index page? This is turned on by default - to disable database downloads, use the following:: - datasette mydatabase.db --config allow_download:off + datasette mydatabase.db --setting allow_download off -.. _config_default_cache_ttl: +.. _setting_default_cache_ttl: default_cache_ttl ~~~~~~~~~~~~~~~~~ @@ -159,19 +176,20 @@ Default HTTP caching max-age header in seconds, used for ``Cache-Control: max-ag :: - datasette mydatabase.db --config default_cache_ttl:60 + datasette mydatabase.db --setting default_cache_ttl 60 -.. _config_default_cache_ttl_hashed: +.. _setting_default_cache_ttl_hashed: default_cache_ttl_hashed ~~~~~~~~~~~~~~~~~~~~~~~~ -Default HTTP caching max-age for responses served using using the :ref:`hashed-urls mechanism `. Defaults to 365 days (31536000 seconds). +Default HTTP caching max-age for responses served using using the :ref:`hashed-urls mechanism `. Defaults to 365 days (31536000 seconds). :: - datasette mydatabase.db --config default_cache_ttl_hashed:10000 + datasette mydatabase.db --setting default_cache_ttl_hashed 10000 +.. _setting_cache_size_kb: cache_size_kb ~~~~~~~~~~~~~ @@ -180,9 +198,9 @@ Sets the amount of memory SQLite uses for its `per-connection cache Date: Tue, 24 Nov 2020 14:06:32 -0800 Subject: [PATCH 0043/1455] Renamed datasette.config() to .setting(), closes #1107 --- datasette/app.py | 32 ++++++++++++------------ datasette/facets.py | 22 ++++++++-------- datasette/url_builder.py | 4 +-- datasette/views/base.py | 10 ++++---- datasette/views/database.py | 4 +-- datasette/views/table.py | 10 ++++---- docs/internals.rst | 16 +++++++++++- tests/test_internals_datasette.py | 12 +++++++++ tests/test_internals_datasette_client.py | 12 ++++----- tests/test_internals_urls.py | 24 +++++++++--------- 10 files changed, 86 insertions(+), 60 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 3bb6ce79..88d5ecc6 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -264,15 +264,15 @@ class Datasette: raise StartupError("config.json should be renamed to settings.json") if config_dir and (config_dir / "settings.json").exists() and not config: config = json.load((config_dir / "settings.json").open()) - self._config = dict(DEFAULT_SETTINGS, **(config or {})) + self._settings = dict(DEFAULT_SETTINGS, **(config or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note self.executor = futures.ThreadPoolExecutor( - max_workers=self.config("num_sql_threads") + max_workers=self.setting("num_sql_threads") ) - self.max_returned_rows = self.config("max_returned_rows") - self.sql_time_limit_ms = self.config("sql_time_limit_ms") - self.page_size = self.config("default_page_size") + self.max_returned_rows = self.setting("max_returned_rows") + self.sql_time_limit_ms = self.setting("sql_time_limit_ms") + self.page_size = self.setting("default_page_size") # Execute plugins in constructor, to ensure they are available # when the rest of `datasette inspect` executes if self.plugins_dir: @@ -347,12 +347,12 @@ class Datasette: def remove_database(self, name): self.databases.pop(name) - def config(self, key): - return self._config.get(key, None) + def setting(self, key): + return self._settings.get(key, None) def config_dict(self): # Returns a fully resolved config dictionary, useful for templates - return {option.name: self.config(option.name) for option in SETTINGS} + return {option.name: self.setting(option.name) for option in SETTINGS} def metadata(self, key=None, database=None, table=None, fallback=True): """ @@ -454,8 +454,8 @@ class Datasette: conn.enable_load_extension(True) for extension in self.sqlite_extensions: conn.execute(f"SELECT load_extension('{extension}')") - if self.config("cache_size_kb"): - conn.execute(f"PRAGMA cache_size=-{self.config('cache_size_kb')}") + if self.setting("cache_size_kb"): + conn.execute(f"PRAGMA cache_size=-{self.setting('cache_size_kb')}") # pylint: disable=no-member pm.hook.prepare_connection(conn=conn, database=database, datasette=self) @@ -567,7 +567,7 @@ class Datasette: def absolute_url(self, request, path): url = urllib.parse.urljoin(request.url, path) - if url.startswith("http://") and self.config("force_https_urls"): + if url.startswith("http://") and self.setting("force_https_urls"): url = "https://" + url[len("http://") :] return url @@ -781,12 +781,12 @@ class Datasette: "extra_js_urls": await self._asset_urls( "extra_js_urls", template, context, request, view_name ), - "base_url": self.config("base_url"), + "base_url": self.setting("base_url"), "csrftoken": request.scope["csrftoken"] if request else lambda: "", }, **extra_template_vars, } - if request and request.args.get("_context") and self.config("template_debug"): + if request and request.args.get("_context") and self.setting("template_debug"): return "
    {}
    ".format( jinja2.escape(json.dumps(template_context, default=repr, indent=4)) ) @@ -882,7 +882,7 @@ class Datasette: r"/-/plugins(?P(\.json)?)$", ) add_route( - JsonDataView.as_view(self, "settings.json", lambda: self._config), + JsonDataView.as_view(self, "settings.json", lambda: self._settings), r"/-/settings(?P(\.json)?)$", ) add_route( @@ -1001,7 +1001,7 @@ class DatasetteRouter: async def route_path(self, scope, receive, send, path): # Strip off base_url if present before routing - base_url = self.ds.config("base_url") + base_url = self.ds.setting("base_url") if base_url != "/" and path.startswith(base_url): path = "/" + path[len(base_url) :] request = Request(scope, receive) @@ -1016,7 +1016,7 @@ class DatasetteRouter: scope_modifications = {} # Apply force_https_urls, if set if ( - self.ds.config("force_https_urls") + self.ds.setting("force_https_urls") and scope["type"] == "http" and scope.get("scheme") != "https" ): diff --git a/datasette/facets.py b/datasette/facets.py index a818a9e9..8ad5a423 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -136,7 +136,7 @@ class ColumnFacet(Facet): async def suggest(self): row_count = await self.get_row_count() columns = await self.get_columns(self.sql, self.params) - facet_size = self.ds.config("default_facet_size") + facet_size = self.ds.setting("default_facet_size") suggested_facets = [] already_enabled = [c["config"]["simple"] for c in self.get_configs()] for column in columns: @@ -158,7 +158,7 @@ class ColumnFacet(Facet): suggested_facet_sql, self.params, truncate=False, - custom_time_limit=self.ds.config("facet_suggest_time_limit_ms"), + custom_time_limit=self.ds.setting("facet_suggest_time_limit_ms"), ) num_distinct_values = len(distinct_values) if ( @@ -188,7 +188,7 @@ class ColumnFacet(Facet): qs_pairs = self.get_querystring_pairs() - facet_size = self.ds.config("default_facet_size") + facet_size = self.ds.setting("default_facet_size") for source_and_config in self.get_configs(): config = source_and_config["config"] source = source_and_config["source"] @@ -208,7 +208,7 @@ class ColumnFacet(Facet): facet_sql, self.params, truncate=False, - custom_time_limit=self.ds.config("facet_time_limit_ms"), + custom_time_limit=self.ds.setting("facet_time_limit_ms"), ) facet_results_values = [] facet_results[column] = { @@ -290,7 +290,7 @@ class ArrayFacet(Facet): suggested_facet_sql, self.params, truncate=False, - custom_time_limit=self.ds.config("facet_suggest_time_limit_ms"), + custom_time_limit=self.ds.setting("facet_suggest_time_limit_ms"), log_sql_errors=False, ) types = tuple(r[0] for r in results.rows) @@ -305,7 +305,7 @@ class ArrayFacet(Facet): ), self.params, truncate=False, - custom_time_limit=self.ds.config( + custom_time_limit=self.ds.setting( "facet_suggest_time_limit_ms" ), log_sql_errors=False, @@ -335,7 +335,7 @@ class ArrayFacet(Facet): facet_results = {} facets_timed_out = [] - facet_size = self.ds.config("default_facet_size") + facet_size = self.ds.setting("default_facet_size") for source_and_config in self.get_configs(): config = source_and_config["config"] source = source_and_config["source"] @@ -354,7 +354,7 @@ class ArrayFacet(Facet): facet_sql, self.params, truncate=False, - custom_time_limit=self.ds.config("facet_time_limit_ms"), + custom_time_limit=self.ds.setting("facet_time_limit_ms"), ) facet_results_values = [] facet_results[column] = { @@ -421,7 +421,7 @@ class DateFacet(Facet): suggested_facet_sql, self.params, truncate=False, - custom_time_limit=self.ds.config("facet_suggest_time_limit_ms"), + custom_time_limit=self.ds.setting("facet_suggest_time_limit_ms"), log_sql_errors=False, ) values = tuple(r[0] for r in results.rows) @@ -446,7 +446,7 @@ class DateFacet(Facet): facet_results = {} facets_timed_out = [] args = dict(self.get_querystring_pairs()) - facet_size = self.ds.config("default_facet_size") + facet_size = self.ds.setting("default_facet_size") for source_and_config in self.get_configs(): config = source_and_config["config"] source = source_and_config["source"] @@ -467,7 +467,7 @@ class DateFacet(Facet): facet_sql, self.params, truncate=False, - custom_time_limit=self.ds.config("facet_time_limit_ms"), + custom_time_limit=self.ds.setting("facet_time_limit_ms"), ) facet_results_values = [] facet_results[column] = { diff --git a/datasette/url_builder.py b/datasette/url_builder.py index 697f60ae..3034b664 100644 --- a/datasette/url_builder.py +++ b/datasette/url_builder.py @@ -10,7 +10,7 @@ class Urls: if not isinstance(path, PrefixedUrlString): if path.startswith("/"): path = path[1:] - path = self.ds.config("base_url") + path + path = self.ds.setting("base_url") + path if format is not None: path = path_with_format(path=path, format=format) return PrefixedUrlString(path) @@ -29,7 +29,7 @@ class Urls: def database(self, database, format=None): db = self.ds.databases[database] - if self.ds.config("hash_urls") and db.hash: + if self.ds.setting("hash_urls") and db.hash: path = self.path(f"{database}-{db.hash[:HASH_LENGTH]}", format=format) else: path = self.path(database, format=format) diff --git a/datasette/views/base.py b/datasette/views/base.py index b3a54bcc..bde8449f 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -230,7 +230,7 @@ class DataView(BaseView): should_redirect += kwargs["as_db"] if ( - (self.ds.config("hash_urls") or "_hash" in request.args) + (self.ds.setting("hash_urls") or "_hash" in request.args) and # Redirect only if database is immutable not self.ds.databases[name].is_mutable @@ -260,7 +260,7 @@ class DataView(BaseView): stream = request.args.get("_stream") if stream: # Some quick sanity checks - if not self.ds.config("allow_csv_stream"): + if not self.ds.setting("allow_csv_stream"): raise BadRequest("CSV streaming is disabled") if request.args.get("_next"): raise BadRequest("_next not allowed for CSV streaming") @@ -296,7 +296,7 @@ class DataView(BaseView): async def stream_fn(r): nonlocal data - writer = csv.writer(LimitedWriter(r, self.ds.config("max_csv_mb"))) + writer = csv.writer(LimitedWriter(r, self.ds.setting("max_csv_mb"))) first = True next = None while first or (next and stream): @@ -566,9 +566,9 @@ class DataView(BaseView): ttl = request.args.get("_ttl", None) if ttl is None or not ttl.isdigit(): if correct_hash_provided: - ttl = self.ds.config("default_cache_ttl_hashed") + ttl = self.ds.setting("default_cache_ttl_hashed") else: - ttl = self.ds.config("default_cache_ttl") + ttl = self.ds.setting("default_cache_ttl") return self.set_response_headers(r, ttl) diff --git a/datasette/views/database.py b/datasette/views/database.py index d4ed8570..17c78150 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -136,7 +136,7 @@ class DatabaseView(DataView): "show_hidden": request.args.get("_show_hidden"), "editable": True, "metadata": metadata, - "allow_download": self.ds.config("allow_download") + "allow_download": self.ds.setting("allow_download") and not db.is_mutable and database != ":memory:", }, @@ -161,7 +161,7 @@ class DatabaseDownload(DataView): db = self.ds.databases[database] if db.is_memory: raise DatasetteError("Cannot download :memory: database", status=404) - if not self.ds.config("allow_download") or db.is_mutable: + if not self.ds.setting("allow_download") or db.is_mutable: raise Forbidden("Database download is forbidden") if not db.path: raise DatasetteError("Cannot download database", status=404) diff --git a/datasette/views/table.py b/datasette/views/table.py index 09c2d740..a0de2a8e 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -121,7 +121,7 @@ class RowTableShared(DataView): } cell_rows = [] - base_url = self.ds.config("base_url") + base_url = self.ds.setting("base_url") for row in rows: cells = [] # Unless we are a view, the first column is a link - either to the rowid @@ -654,7 +654,7 @@ class TableView(RowTableShared): pass # facets support - if not self.ds.config("allow_facet") and any( + if not self.ds.setting("allow_facet") and any( arg.startswith("_facet") for arg in request.args ): raise BadRequest("_facet= is not allowed") @@ -772,8 +772,8 @@ class TableView(RowTableShared): suggested_facets = [] if ( - self.ds.config("suggest_facets") - and self.ds.config("allow_facet") + self.ds.setting("suggest_facets") + and self.ds.setting("allow_facet") and not _next ): for facet in facet_instances: @@ -801,7 +801,7 @@ class TableView(RowTableShared): results.description, rows, link_column=not is_view, - truncate_cells=self.ds.config("truncate_cells_html"), + truncate_cells=self.ds.setting("truncate_cells_html"), ) metadata = ( (self.ds.metadata("databases") or {}) diff --git a/docs/internals.rst b/docs/internals.rst index cec1268f..78d4e5d2 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -350,7 +350,21 @@ Returns the absolute URL for the given path, including the protocol and host. Fo absolute_url = datasette.absolute_url(request, "/dbname/table.json") # Would return "http://localhost:8001/dbname/table.json" -The current request object is used to determine the hostname and protocol that should be used for the returned URL. The :ref:`settings_force_https_urls` configuration setting is taken into account. +The current request object is used to determine the hostname and protocol that should be used for the returned URL. The :ref:`setting_force_https_urls` configuration setting is taken into account. + +.setting(key) +------------- + +``key`` - string + The name of the setting, e.g. ``base_url``. + +Returns the configured value for the specified :ref:`setting `. This can be a string, boolean or integer depending on the requested setting. + +For example: + +.. code-block:: python + + downloads_are_allowed = datasette.setting("allow_download") .. _internals_datasette_client: diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index 0be0b932..56bc2fb4 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -33,3 +33,15 @@ def test_sign_unsign(datasette, value, namespace): assert value == datasette.unsign(signed, *extra_args) with pytest.raises(BadSignature): datasette.unsign(signed[:-1] + ("!" if signed[-1] != "!" else ":")) + + +@pytest.mark.parametrize( + "setting,expected", + ( + ("base_url", "/"), + ("max_csv_mb", 100), + ("allow_csv_stream", True), + ), +) +def test_datasette_setting(datasette, setting, expected): + assert datasette.setting(setting) == expected diff --git a/tests/test_internals_datasette_client.py b/tests/test_internals_datasette_client.py index 0b1c5f0e..c538bef1 100644 --- a/tests/test_internals_datasette_client.py +++ b/tests/test_internals_datasette_client.py @@ -33,10 +33,10 @@ async def test_client_methods(datasette, method, path, expected_status): @pytest.mark.asyncio @pytest.mark.parametrize("prefix", [None, "/prefix/"]) async def test_client_post(datasette, prefix): - original_base_url = datasette._config["base_url"] + original_base_url = datasette._settings["base_url"] try: if prefix is not None: - datasette._config["base_url"] = prefix + datasette._settings["base_url"] = prefix response = await datasette.client.post( "/-/messages", data={ @@ -48,7 +48,7 @@ async def test_client_post(datasette, prefix): assert response.status_code == 302 assert "ds_messages" in response.cookies finally: - datasette._config["base_url"] = original_base_url + datasette._settings["base_url"] = original_base_url @pytest.mark.asyncio @@ -56,12 +56,12 @@ async def test_client_post(datasette, prefix): "prefix,expected_path", [(None, "/asgi-scope"), ("/prefix/", "/prefix/asgi-scope")] ) async def test_client_path(datasette, prefix, expected_path): - original_base_url = datasette._config["base_url"] + original_base_url = datasette._settings["base_url"] try: if prefix is not None: - datasette._config["base_url"] = prefix + datasette._settings["base_url"] = prefix response = await datasette.client.get("/asgi-scope") path = response.json()["path"] assert path == expected_path finally: - datasette._config["base_url"] = original_base_url + datasette._settings["base_url"] = original_base_url diff --git a/tests/test_internals_urls.py b/tests/test_internals_urls.py index 89290911..fd05c1b6 100644 --- a/tests/test_internals_urls.py +++ b/tests/test_internals_urls.py @@ -20,14 +20,14 @@ def ds(): ], ) def test_path(ds, base_url, path, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual = ds.urls.path(path) assert actual == expected assert isinstance(actual, PrefixedUrlString) def test_path_applied_twice_does_not_double_prefix(ds): - ds._config["base_url"] = "/prefix/" + ds._settings["base_url"] = "/prefix/" path = ds.urls.path("/") assert path == "/prefix/" path = ds.urls.path(path) @@ -42,7 +42,7 @@ def test_path_applied_twice_does_not_double_prefix(ds): ], ) def test_instance(ds, base_url, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual = ds.urls.instance() assert actual == expected assert isinstance(actual, PrefixedUrlString) @@ -56,7 +56,7 @@ def test_instance(ds, base_url, expected): ], ) def test_static(ds, base_url, file, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual = ds.urls.static(file) assert actual == expected assert isinstance(actual, PrefixedUrlString) @@ -80,7 +80,7 @@ def test_static(ds, base_url, file, expected): ], ) def test_static_plugins(ds, base_url, plugin, file, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual = ds.urls.static_plugins(plugin, file) assert actual == expected assert isinstance(actual, PrefixedUrlString) @@ -94,7 +94,7 @@ def test_static_plugins(ds, base_url, plugin, file, expected): ], ) def test_logout(ds, base_url, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual = ds.urls.logout() assert actual == expected assert isinstance(actual, PrefixedUrlString) @@ -109,7 +109,7 @@ def test_logout(ds, base_url, expected): ], ) def test_database(ds, base_url, format, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual = ds.urls.database(":memory:", format=format) assert actual == expected assert isinstance(actual, PrefixedUrlString) @@ -125,7 +125,7 @@ def test_database(ds, base_url, format, expected): ], ) def test_table_and_query(ds, base_url, name, format, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual1 = ds.urls.table(":memory:", name, format=format) assert actual1 == expected assert isinstance(actual1, PrefixedUrlString) @@ -143,7 +143,7 @@ def test_table_and_query(ds, base_url, name, format, expected): ], ) def test_row(ds, base_url, format, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual = ds.urls.row(":memory:", "facetable", "1", format=format) assert actual == expected assert isinstance(actual, PrefixedUrlString) @@ -152,9 +152,9 @@ def test_row(ds, base_url, format, expected): @pytest.mark.parametrize("base_url", ["/", "/prefix/"]) def test_database_hashed(app_client_with_hash, base_url): ds = app_client_with_hash.ds - original_base_url = ds._config["base_url"] + original_base_url = ds._settings["base_url"] try: - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url db_hash = ds.get_database("fixtures").hash assert len(db_hash) == 64 expected = f"{base_url}fixtures-{db_hash[:7]}" @@ -163,4 +163,4 @@ def test_database_hashed(app_client_with_hash, base_url): assert ds.urls.query("fixtures", "name") == expected + "/name" finally: # Reset this since fixture is shared with other tests - ds._config["base_url"] = original_base_url + ds._settings["base_url"] = original_base_url From 37d18a5bce08c9ee53c080f613bae84fc2ccc853 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 24 Nov 2020 19:05:35 -0800 Subject: [PATCH 0044/1455] datasette publish cloudrun --apt-get-install, closes #1110 --- datasette/publish/cloudrun.py | 8 +++ datasette/utils/__init__.py | 27 +++++--- docs/datasette-publish-cloudrun-help.txt | 1 + tests/test_publish_cloudrun.py | 78 +++++++++++++++++++++--- 4 files changed, 97 insertions(+), 17 deletions(-) diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 54f55fcb..54f06da0 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -36,6 +36,12 @@ def publish_subcommand(publish): callback=_validate_memory, help="Memory to allocate in Cloud Run, e.g. 1Gi", ) + @click.option( + "--apt-get-install", + "apt_get_extras", + multiple=True, + help="Additional packages to apt-get install", + ) def cloudrun( files, metadata, @@ -60,6 +66,7 @@ def publish_subcommand(publish): spatialite, show_files, memory, + apt_get_extras, ): fail_if_publish_binary_not_installed( "gcloud", "Google Cloud", "https://cloud.google.com/sdk/" @@ -122,6 +129,7 @@ def publish_subcommand(publish): secret, extra_metadata, environment_variables, + apt_get_extras=apt_get_extras, ): if show_files: if os.path.exists("metadata.json"): diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index d62302e9..54a5b247 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -47,11 +47,10 @@ reserved_words = set( ).split() ) -SPATIALITE_DOCKERFILE_EXTRAS = r""" +APT_GET_DOCKERFILE_EXTRAS = r""" RUN apt-get update && \ - apt-get install -y python3-dev gcc libsqlite3-mod-spatialite && \ + apt-get install -y {} && \ rm -rf /var/lib/apt/lists/* -ENV SQLITE_EXTENSIONS /usr/lib/x86_64-linux-gnu/mod_spatialite.so """ # Can replace with sqlite-utils when I add that dependency @@ -308,10 +307,12 @@ def make_dockerfile( secret, environment_variables=None, port=8001, + apt_get_extras=None, ): cmd = ["datasette", "serve", "--host", "0.0.0.0"] environment_variables = environment_variables or {} environment_variables["DATASETTE_SECRET"] = secret + apt_get_extras = apt_get_extras or [] for filename in files: cmd.extend(["-i", filename]) cmd.extend(["--cors", "--inspect-file", "inspect-data.json"]) @@ -340,28 +341,38 @@ def make_dockerfile( else: install = ["datasette"] + list(install) + apt_get_extras_ = [] + apt_get_extras_.extend(apt_get_extras) + apt_get_extras = apt_get_extras_ + if spatialite: + apt_get_extras.extend(["python3-dev", "gcc", "libsqlite3-mod-spatialite"]) + environment_variables[ + "SQLITE_EXTENSIONS" + ] = "/usr/lib/x86_64-linux-gnu/mod_spatialite.so" return """ FROM python:3.8 COPY . /app WORKDIR /app -{spatialite_extras} +{apt_get_extras} {environment_variables} RUN pip install -U {install_from} RUN datasette inspect {files} --inspect-file inspect-data.json ENV PORT {port} EXPOSE {port} CMD {cmd}""".format( + apt_get_extras=APT_GET_DOCKERFILE_EXTRAS.format(" ".join(apt_get_extras)) + if apt_get_extras + else "", environment_variables="\n".join( [ "ENV {} '{}'".format(key, value) for key, value in environment_variables.items() ] ), - files=" ".join(files), - cmd=cmd, install_from=" ".join(install), - spatialite_extras=SPATIALITE_DOCKERFILE_EXTRAS if spatialite else "", + files=" ".join(files), port=port, + cmd=cmd, ).strip() @@ -382,6 +393,7 @@ def temporary_docker_directory( extra_metadata=None, environment_variables=None, port=8001, + apt_get_extras=None, ): extra_metadata = extra_metadata or {} tmp = tempfile.TemporaryDirectory() @@ -415,6 +427,7 @@ def temporary_docker_directory( secret, environment_variables, port=port, + apt_get_extras=apt_get_extras, ) os.chdir(datasette_dir) if metadata_content: diff --git a/docs/datasette-publish-cloudrun-help.txt b/docs/datasette-publish-cloudrun-help.txt index a625bd10..8cf293d9 100644 --- a/docs/datasette-publish-cloudrun-help.txt +++ b/docs/datasette-publish-cloudrun-help.txt @@ -30,4 +30,5 @@ Options: --spatialite Enable SpatialLite extension --show-files Output the generated Dockerfile and metadata.json --memory TEXT Memory to allocate in Cloud Run, e.g. 1Gi + --apt-get-install TEXT Additional packages to apt-get install --help Show this message and exit. diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index e629bba0..7adef39d 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -182,22 +182,26 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which): "x-secret", ], ) + assert result.exit_code == 0 dockerfile = ( result.output.split("==== Dockerfile ====\n")[1] .split("\n====================\n")[0] .strip() ) - expected = """FROM python:3.8 -COPY . /app -WORKDIR /app + expected = textwrap.dedent( + r""" + FROM python:3.8 + COPY . /app + WORKDIR /app -ENV DATASETTE_AUTH_GITHUB_CLIENT_ID 'x-client-id' -ENV DATASETTE_SECRET 'x-secret' -RUN pip install -U datasette -RUN datasette inspect test.db --inspect-file inspect-data.json -ENV PORT 8001 -EXPOSE 8001 -CMD datasette serve --host 0.0.0.0 -i test.db --cors --inspect-file inspect-data.json --metadata metadata.json --port $PORT""".strip() + ENV DATASETTE_AUTH_GITHUB_CLIENT_ID 'x-client-id' + ENV DATASETTE_SECRET 'x-secret' + RUN pip install -U datasette + RUN datasette inspect test.db --inspect-file inspect-data.json + ENV PORT 8001 + EXPOSE 8001 + CMD datasette serve --host 0.0.0.0 -i test.db --cors --inspect-file inspect-data.json --metadata metadata.json --port $PORT""" + ).strip() assert expected == dockerfile metadata = ( result.output.split("=== metadata.json ===\n")[1] @@ -213,3 +217,57 @@ CMD datasette serve --host 0.0.0.0 -i test.db --cors --inspect-file inspect-data } }, } == json.loads(metadata) + + +@mock.patch("shutil.which") +@mock.patch("datasette.publish.cloudrun.check_output") +@mock.patch("datasette.publish.cloudrun.check_call") +def test_publish_cloudrun_apt_get_install(mock_call, mock_output, mock_which): + mock_which.return_value = True + mock_output.return_value = "myproject" + + runner = CliRunner() + with runner.isolated_filesystem(): + open("test.db", "w").write("data") + result = runner.invoke( + cli.cli, + [ + "publish", + "cloudrun", + "test.db", + "--service", + "datasette", + "--show-files", + "--secret", + "x-secret", + "--apt-get-install", + "ripgrep", + "--spatialite", + ], + ) + assert result.exit_code == 0 + dockerfile = ( + result.output.split("==== Dockerfile ====\n")[1] + .split("\n====================\n")[0] + .strip() + ) + expected = textwrap.dedent( + r""" + FROM python:3.8 + COPY . /app + WORKDIR /app + + RUN apt-get update && \ + apt-get install -y ripgrep python3-dev gcc libsqlite3-mod-spatialite && \ + rm -rf /var/lib/apt/lists/* + + ENV DATASETTE_SECRET 'x-secret' + ENV SQLITE_EXTENSIONS '/usr/lib/x86_64-linux-gnu/mod_spatialite.so' + RUN pip install -U datasette + RUN datasette inspect test.db --inspect-file inspect-data.json + ENV PORT 8001 + EXPOSE 8001 + CMD datasette serve --host 0.0.0.0 -i test.db --cors --inspect-file inspect-data.json --port $PORT + """ + ).strip() + assert expected == dockerfile From bbde835a1fec01458e8d00929e7bab6d6a5ba948 Mon Sep 17 00:00:00 2001 From: Jeff Triplett Date: Sat, 28 Nov 2020 13:53:48 -0600 Subject: [PATCH 0045/1455] Fix --metadata doc usage (#1112) Thanks, @jefftriplett. --- docs/changelog.rst | 4 ++-- docs/settings.rst | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2916b373..20181ca9 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -453,7 +453,7 @@ You can now create :ref:`custom pages ` within your Datasette inst :ref:`settings_dir` (`#731 `__) allows you to define a custom Datasette instance as a directory. So instead of running the following:: $ datasette one.db two.db \ - --metadata.json \ + --metadata=metadata.json \ --template-dir=templates/ \ --plugins-dir=plugins \ --static css:css @@ -770,7 +770,7 @@ Small changes 0.28 (2019-05-19) ----------------- -A `salmagundi `__ of new features! +A `salmagundi `__ of new features! .. _v0_28_databases_that_change: diff --git a/docs/settings.rst b/docs/settings.rst index 350fd048..156893e0 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -23,7 +23,7 @@ Configuration directory mode Normally you configure Datasette using command-line options. For a Datasette instance with custom templates, custom plugins, a static directory and several databases this can get quite verbose:: $ datasette one.db two.db \ - --metadata.json \ + --metadata=metadata.json \ --template-dir=templates/ \ --plugins-dir=plugins \ --static css:css From 50cc6af01672526791900df7c8834a62fa094852 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 28 Nov 2020 15:34:56 -0800 Subject: [PATCH 0046/1455] Fixed some broken internal links, refs #1106 --- docs/changelog.rst | 14 +++++++------- docs/csv_export.rst | 6 +++--- docs/deploying.rst | 2 +- docs/pages.rst | 2 +- docs/performance.rst | 6 +++--- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 20181ca9..15992020 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -353,9 +353,9 @@ Signed values and secrets Both flash messages and user authentication needed a way to sign values and set signed cookies. Two new methods are now available for plugins to take advantage of this mechanism: :ref:`datasette_sign` and :ref:`datasette_unsign`. -Datasette will generate a secret automatically when it starts up, but to avoid resetting the secret (and hence invalidating any cookies) every time the server restarts you should set your own secret. You can pass a secret to Datasette using the new ``--secret`` option or with a ``DATASETTE_SECRET`` environment variable. See :ref:`settings_secret` for more details. +Datasette will generate a secret automatically when it starts up, but to avoid resetting the secret (and hence invalidating any cookies) every time the server restarts you should set your own secret. You can pass a secret to Datasette using the new ``--secret`` option or with a ``DATASETTE_SECRET`` environment variable. See :ref:`setting_secret` for more details. -You can also set a secret when you deploy Datasette using ``datasette publish`` or ``datasette package`` - see :ref:`settings_publish_secrets`. +You can also set a secret when you deploy Datasette using ``datasette publish`` or ``datasette package`` - see :ref:`setting_publish_secrets`. Plugins can now sign values and verify their signatures using the :ref:`datasette.sign() ` and :ref:`datasette.unsign() ` methods. @@ -450,7 +450,7 @@ A small release which provides improved internal methods for use in plugins, alo You can now create :ref:`custom pages ` within your Datasette instance using a custom template file. For example, adding a template file called ``templates/pages/about.html`` will result in a new page being served at ``/about`` on your instance. See the :ref:`custom pages documentation ` for full details, including how to return custom HTTP headers, redirects and status codes. (`#648 `__) -:ref:`settings_dir` (`#731 `__) allows you to define a custom Datasette instance as a directory. So instead of running the following:: +:ref:`config_dir` (`#731 `__) allows you to define a custom Datasette instance as a directory. So instead of running the following:: $ datasette one.db two.db \ --metadata=metadata.json \ @@ -480,7 +480,7 @@ Also in this release: * Datasette :ref:`metadata` can now be provided as a YAML file as an optional alternative to JSON. See :ref:`metadata_yaml`. (`#713 `__) * Removed support for ``datasette publish now``, which used the the now-retired Zeit Now v1 hosting platform. A new plugin, `datasette-publish-now `__, can be installed to publish data to Zeit (`now Vercel `__) Now v2. (`#710 `__) * Fixed a bug where the ``extra_template_vars(request, view_name)`` plugin hook was not receiving the correct ``view_name``. (`#716 `__) -* Variables added to the template context by the ``extra_template_vars()`` plugin hook are now shown in the ``?_context=1`` debugging mode (see :ref:`settings_template_debug`). (`#693 `__) +* Variables added to the template context by the ``extra_template_vars()`` plugin hook are now shown in the ``?_context=1`` debugging mode (see :ref:`setting_template_debug`). (`#693 `__) * Fixed a bug where the "templates considered" HTML comment was no longer being displayed. (`#689 `__) * Fixed a ``datasette publish`` bug where ``--plugin-secret`` would over-ride plugin configuration in the provided ``metadata.json`` file. (`#724 `__) * Added a new CSS class for customizing the canned query page. (`#727 `__) @@ -565,7 +565,7 @@ Also in this release: * asyncio task information is now included on the ``/-/threads`` debug page * Bumped Uvicorn dependency 0.11 * You can now use ``--port 0`` to listen on an available port -* New :ref:`settings_template_debug` setting for debugging templates, e.g. https://latest.datasette.io/fixtures/roadside_attractions?_context=1 (`#654 `__) +* New :ref:`setting_template_debug` setting for debugging templates, e.g. https://latest.datasette.io/fixtures/roadside_attractions?_context=1 (`#654 `__) .. _v0_32: @@ -941,7 +941,7 @@ A number of small new features: - ``datasette publish heroku`` now supports ``--extra-options``, fixes `#334 `_ - Custom error message if SpatiaLite is needed for specified database, closes `#331 `_ -- New config option: ``truncate_cells_html`` for :ref:`truncating long cell values ` in HTML view - closes `#330 `_ +- New config option: ``truncate_cells_html`` for :ref:`truncating long cell values ` in HTML view - closes `#330 `_ - Documentation for :ref:`datasette publish and datasette package `, closes `#337 `_ - Fixed compatibility with Python 3.7 - ``datasette publish heroku`` now supports app names via the ``-n`` option, which can also be used to overwrite an existing application [Russ Garrett] @@ -1000,7 +1000,7 @@ Check out the :ref:`CSV export documentation ` for more details, or try the feature out on https://fivethirtyeight.datasettes.com/fivethirtyeight/bechdel%2Fmovies -If your table has more than :ref:`settings_max_returned_rows` (default 1,000) +If your table has more than :ref:`setting_max_returned_rows` (default 1,000) Datasette provides the option to *stream all rows*. This option takes advantage of async Python and Datasette's efficient :ref:`pagination ` to iterate through the entire matching result set and stream it back as a diff --git a/docs/csv_export.rst b/docs/csv_export.rst index 704cc19d..0bda20ef 100644 --- a/docs/csv_export.rst +++ b/docs/csv_export.rst @@ -23,7 +23,7 @@ file, which looks like this and has the following options: the ``city_id`` column is accompanied by a ``city_id_label`` column. * **stream all rows** - by default CSV files only contain the first - :ref:`settings_max_returned_rows` records. This option will cause Datasette to + :ref:`setting_max_returned_rows` records. This option will cause Datasette to loop through every matching record and return them as a single CSV file. You can try that out on https://latest.datasette.io/fixtures/facetable?_size=4 @@ -40,9 +40,9 @@ Since databases can get pretty large, by default this option is capped at 100MB if a table returns more than 100MB of data the last line of the CSV will be a truncation error message. -You can increase or remove this limit using the :ref:`settings_max_csv_mb` config +You can increase or remove this limit using the :ref:`setting_max_csv_mb` config setting. You can also disable the CSV export feature entirely using -:ref:`settings_allow_csv_stream`. +:ref:`setting_allow_csv_stream`. A note on URLs -------------- diff --git a/docs/deploying.rst b/docs/deploying.rst index 4ca0e82a..d1abe6a3 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -58,7 +58,7 @@ Add a random value for the ``DATASETTE_SECRET`` - this will be used to sign Data $ python3 -c 'import secrets; print(secrets.token_hex(32))' -This configuration will run Datasette against all database files contained in the ``/home/ubunt/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`settings_dir` for details. +This configuration will run Datasette against all database files contained in the ``/home/ubunt/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`config_dir` for details. You can start the Datasette process running using the following:: diff --git a/docs/pages.rst b/docs/pages.rst index 5f77bec7..0941c960 100644 --- a/docs/pages.rst +++ b/docs/pages.rst @@ -66,7 +66,7 @@ Row Every row in every Datasette table has its own URL. This means individual records can be linked to directly. -Table cells with extremely long text contents are truncated on the table view according to the :ref:`settings_truncate_cells_html` setting. If a cell has been truncated the full length version of that cell will be available on the row page. +Table cells with extremely long text contents are truncated on the table view according to the :ref:`setting_truncate_cells_html` setting. If a cell has been truncated the full length version of that cell will be available on the row page. Rows which are the targets of foreign key references from other tables will show a link to a filtered search for all records that reference that row. Here's an example from the Registers of Members Interests database: diff --git a/docs/performance.rst b/docs/performance.rst index 1d24adce..2727416d 100644 --- a/docs/performance.rst +++ b/docs/performance.rst @@ -56,7 +56,7 @@ Using a caching proxy in this way could enable a Datasette-backed visualization Datasette's integration with HTTP caches can be enabled using a combination of configuration options and querystring arguments. -The :ref:`settings_default_cache_ttl` setting sets the default HTTP cache TTL for all Datasette pages. This is 5 seconds unless you change it - you can set it to 0 if you wish to disable HTTP caching entirely. +The :ref:`setting_default_cache_ttl` setting sets the default HTTP cache TTL for all Datasette pages. This is 5 seconds unless you change it - you can set it to 0 if you wish to disable HTTP caching entirely. You can also change the cache timeout on a per-request basis using the ``?_ttl=10`` querystring parameter. This can be useful when you are working with the Datasette JSON API - you may decide that a specific query can be cached for a longer time, or maybe you need to set ``?_ttl=0`` for some requests for example if you are running a SQL ``order by random()`` query. @@ -65,9 +65,9 @@ Hashed URL mode When you open a database file in immutable mode using the ``-i`` option, Datasette calculates a SHA-256 hash of the contents of that file on startup. This content hash can then optionally be used to create URLs that are guaranteed to change if the contents of the file changes in the future. This results in URLs that can then be cached indefinitely by both browsers and caching proxies - an enormous potential performance optimization. -You can enable these hashed URLs in two ways: using the :ref:`settings_hash_urls` configuration setting (which affects all requests to Datasette) or via the ``?_hash=1`` querystring parameter (which only applies to the current request). +You can enable these hashed URLs in two ways: using the :ref:`setting_hash_urls` configuration setting (which affects all requests to Datasette) or via the ``?_hash=1`` querystring parameter (which only applies to the current request). -With hashed URLs enabled, any request to e.g. ``/mydatabase/mytable`` will 302 redirect to ``mydatabase-455fe3a/mytable``. The URL containing the hash will be served with a very long cache expire header - configured using :ref:`settings_default_cache_ttl_hashed` which defaults to 365 days. +With hashed URLs enabled, any request to e.g. ``/mydatabase/mytable`` will 302 redirect to ``mydatabase-455fe3a/mytable``. The URL containing the hash will be served with a very long cache expire header - configured using :ref:`setting_default_cache_ttl_hashed` which defaults to 365 days. Since these responses are cached for a long time, you may wish to build API clients against the non-hashed version of these URLs. These 302 redirects are served extremely quickly, so this should still be a performant way to work against the Datasette API. From a8e66f9065fb55a3863cc05dfb2ce52f9618cdb7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 28 Nov 2020 15:54:35 -0800 Subject: [PATCH 0047/1455] Release 0.52 Refs #992, #1103, #1104, #1107, #1077, #1110, #1089, #1086, #1088, #1084 --- README.md | 1 + datasette/version.py | 2 +- docs/changelog.rst | 24 ++++++++++++++++++++++++ docs/internals.rst | 2 ++ 4 files changed, 28 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index a10ccfd3..c0019e9b 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,7 @@ Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly new ## News + * 28th November 2020: [Datasette 0.52](https://docs.datasette.io/en/stable/changelog.html#v0-52) - `--config` is now `--setting`, new `database_actions` plugin hook, `datasette publish cloudrun --apt-get-install` option and several bug fixes. * 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. [Annotated release notes](https://simonwillison.net/2020/Nov/1/datasette-0-51/). * 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). * 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). diff --git a/datasette/version.py b/datasette/version.py index 2d949370..3b84c97b 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.51.1" +__version__ = "0.52" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 15992020..49772638 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,30 @@ Changelog ========= +.. _v0_52: + +0.52 (2020-11-28) +----------------- + +This release includes a number of changes relating to an internal rebranding effort: Datasette's **configuration** mechanism (things like ``datasette --config default_page_size:10``) has been renamed to **settings**. + +- New ``--setting default_page_size 10`` option as a replacement for ``--config default_page_size:10`` (note the lack of a colon). The ``--config`` option is deprecated but will continue working until Datasette 1.0. (`#992 `__) +- The ``/-/config`` introspection page is now ``/-/settings``, and the previous page redirects to the new one. (`#1103 `__) +- The ``config.json`` file in :ref:`config_dir` is now called ``settings.json``. (`#1104 `__) +- The undocumented ``datasette.config()`` internal method has been replaced by a documented :ref:`datasette_setting` method. (`#1107 `__) + +Also in this release: + +- New plugin hook: :ref:`plugin_hook_database_actions`, which adds menu items to a new cog menu shown at the top of the database page. (`#1077 `__) +- ``datasette publish cloudrun`` has a new ``--apt-get-install`` option that can be used to install additional Ubuntu packages as part of the deployment. This is useful for deploying the new `datasette-ripgrep plugin `__. (`#1110 `__) +- Swept the documentation to remove words that minimize involved difficulty. (`#1089 `__) + +And some bug fixes: + +- Foreign keys linking to rows with blank label columns now display as a hyphen, allowing those links to be clicked. (`#1086 `__) +- Fixed bug where row pages could sometimes 500 if the underlying queries exceeded a time limit. (`#1088 `__) +- Fixed a bug where the table action menu could appear partially obscured by the edge of the page. (`#1084 `__) + .. _v0_51_1: 0.51.1 (2020-10-31) diff --git a/docs/internals.rst b/docs/internals.rst index 78d4e5d2..ff566f69 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -352,6 +352,8 @@ Returns the absolute URL for the given path, including the protocol and host. Fo The current request object is used to determine the hostname and protocol that should be used for the returned URL. The :ref:`setting_force_https_urls` configuration setting is taken into account. +.. _datasette_setting: + .setting(key) ------------- From 12877d7a48e2aa28bb5e780f929a218f7265d849 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 28 Nov 2020 23:44:57 -0800 Subject: [PATCH 0048/1455] Plugin testing docs now recommend datasette.client, closes #1102 --- docs/testing_plugins.rst | 57 +++++++++++++++++++--------------------- 1 file changed, 27 insertions(+), 30 deletions(-) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 76f69a6a..d8ebdc77 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -11,24 +11,24 @@ If you use the template described in :ref:`writing_plugins_cookiecutter` your pl from datasette.app import Datasette import pytest - import httpx + @pytest.mark.asyncio async def test_plugin_is_installed(): - app = Datasette([], memory=True).app() - async with httpx.AsyncClient(app=app) as client: - response = await client.get("http://localhost/-/plugins.json") - assert 200 == response.status_code - installed_plugins = {p["name"] for p in response.json()} - assert "datasette-plugin-template-demo" in installed_plugins + datasette = Datasette([], memory=True) + response = await datasette.client.get("/-/plugins.json") + assert response.status_code == 200 + installed_plugins = {p["name"] for p in response.json()} + assert "datasette-plugin-template-demo" in installed_plugins -This test uses the `HTTPX `__ Python library to run mock HTTP requests through a fresh instance of Datasette. This is the recommended way to write tests against a Datasette instance. -It also uses the `pytest-asyncio `__ package to add support for ``async def`` test functions running under pytest. +This test uses the :ref:`internals_datasette_client` object to exercise a test instance of Datasette. ``datasette.client`` is a wrapper around the `HTTPX `__ Python library which can imitate HTTP requests using ASGI. This is the recommended way to write tests against a Datasette instance. + +This test also uses the `pytest-asyncio `__ package to add support for ``async def`` test functions running under pytest. You can install these packages like so:: - pip install pytest pytest-asyncio httpx + pip install pytest pytest-asyncio If you are building an installable package you can add them as test dependencies to your ``setup.py`` module like this: @@ -38,7 +38,7 @@ If you are building an installable package you can add them as test dependencies name="datasette-my-plugin", # ... extras_require={ - "test": ["pytest", "pytest-asyncio", "httpx"] + "test": ["pytest", "pytest-asyncio"] }, tests_require=["datasette-my-plugin[test]"], ) @@ -65,12 +65,11 @@ Here's an example that uses the `sqlite-utils library Some dogs" in response.text + async def test_example_table_html(datasette): + response = await datasette.client.get("/test/dogs") + assert ">Some dogs" in response.text -Here the ``ds()`` function defines the fixture, which is than automatically passed to the two test functions based on pytest automatically matching their ``ds`` function parameters. +Here the ``datasette()`` function defines the fixture, which is than automatically passed to the two test functions based on pytest automatically matching their ``datasette`` function parameters. The ``@pytest.fixture(scope="session")`` line here ensures the fixture is reused for the full ``pytest`` execution session. This means that the temporary database file will be created once and reused for each test. @@ -119,5 +116,5 @@ If you want to create that test database repeatedly for every individual test fu .. code-block:: python @pytest.fixture - def ds(tmp_path_factory): - # ... + def datasette(tmp_path_factory): + # This fixture will be executed repeatedly for every test From e800ffcf7cc6a915eb554b369c654f87162575e5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 29 Nov 2020 09:37:43 -0800 Subject: [PATCH 0049/1455] /usr/local/lib/mod_spatialite.so Closes #1114 --- datasette/utils/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 54a5b247..d326c773 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -57,6 +57,7 @@ RUN apt-get update && \ SPATIALITE_PATHS = ( "/usr/lib/x86_64-linux-gnu/mod_spatialite.so", "/usr/local/lib/mod_spatialite.dylib", + "/usr/local/lib/mod_spatialite.so", ) # Length of hash subset used in hashed URLs: HASH_LENGTH = 7 From deb0be4ae56f191f121239b29e83dd53b62d6305 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 29 Nov 2020 11:30:17 -0800 Subject: [PATCH 0050/1455] Fix bug where compound foreign keys produced broken links, closes #1098 --- datasette/utils/__init__.py | 51 +++++++++++++++++++++----------- tests/fixtures.py | 7 +++-- tests/test_api.py | 14 +++++++-- tests/test_csv.py | 6 ++-- tests/test_html.py | 6 ++++ tests/test_internals_database.py | 33 ++++++++++++++++++--- 6 files changed, 88 insertions(+), 29 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index d326c773..d467383d 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1,7 +1,7 @@ import asyncio from contextlib import contextmanager import click -from collections import OrderedDict, namedtuple +from collections import OrderedDict, namedtuple, Counter import base64 import hashlib import inspect @@ -474,9 +474,25 @@ def get_outbound_foreign_keys(conn, table): if info is not None: id, seq, table_name, from_, to_, on_update, on_delete, match = info fks.append( - {"column": from_, "other_table": table_name, "other_column": to_} + { + "column": from_, + "other_table": table_name, + "other_column": to_, + "id": id, + "seq": seq, + } ) - return fks + # Filter out compound foreign keys by removing any where "id" is not unique + id_counts = Counter(fk["id"] for fk in fks) + return [ + { + "column": fk["column"], + "other_table": fk["other_table"], + "other_column": fk["other_column"], + } + for fk in fks + if id_counts[fk["id"]] == 1 + ] def get_all_foreign_keys(conn): @@ -487,20 +503,21 @@ def get_all_foreign_keys(conn): for table in tables: table_to_foreign_keys[table] = {"incoming": [], "outgoing": []} for table in tables: - infos = conn.execute(f"PRAGMA foreign_key_list([{table}])").fetchall() - for info in infos: - if info is not None: - id, seq, table_name, from_, to_, on_update, on_delete, match = info - if table_name not in table_to_foreign_keys: - # Weird edge case where something refers to a table that does - # not actually exist - continue - table_to_foreign_keys[table_name]["incoming"].append( - {"other_table": table, "column": to_, "other_column": from_} - ) - table_to_foreign_keys[table]["outgoing"].append( - {"other_table": table_name, "column": from_, "other_column": to_} - ) + fks = get_outbound_foreign_keys(conn, table) + for fk in fks: + table_name = fk["other_table"] + from_ = fk["column"] + to_ = fk["other_column"] + if table_name not in table_to_foreign_keys: + # Weird edge case where something refers to a table that does + # not actually exist + continue + table_to_foreign_keys[table_name]["incoming"].append( + {"other_table": table, "column": to_, "other_column": from_} + ) + table_to_foreign_keys[table]["outgoing"].append( + {"other_table": table_name, "column": from_, "other_column": to_} + ) return table_to_foreign_keys diff --git a/tests/fixtures.py b/tests/fixtures.py index 3abca821..f95a2d6b 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -388,9 +388,12 @@ CREATE TABLE foreign_key_references ( foreign_key_with_label varchar(30), foreign_key_with_blank_label varchar(30), foreign_key_with_no_label varchar(30), + foreign_key_compound_pk1 varchar(30), + foreign_key_compound_pk2 varchar(30), FOREIGN KEY (foreign_key_with_label) REFERENCES simple_primary_key(id), FOREIGN KEY (foreign_key_with_blank_label) REFERENCES simple_primary_key(id), FOREIGN KEY (foreign_key_with_no_label) REFERENCES primary_key_multiple_columns(id) + FOREIGN KEY (foreign_key_compound_pk1, foreign_key_compound_pk2) REFERENCES compound_primary_key(pk1, pk2) ); CREATE TABLE sortable ( @@ -624,8 +627,8 @@ INSERT INTO simple_primary_key VALUES (4, 'RENDER_CELL_DEMO'); INSERT INTO primary_key_multiple_columns VALUES (1, 'hey', 'world'); INSERT INTO primary_key_multiple_columns_explicit_label VALUES (1, 'hey', 'world2'); -INSERT INTO foreign_key_references VALUES (1, 1, 3, 1); -INSERT INTO foreign_key_references VALUES (2, null, null, null); +INSERT INTO foreign_key_references VALUES (1, 1, 3, 1, 'a', 'b'); +INSERT INTO foreign_key_references VALUES (2, null, null, null, null, null); INSERT INTO complex_foreign_keys VALUES (1, 1, 2, 1); INSERT INTO custom_foreign_key_label VALUES (1, 1); diff --git a/tests/test_api.py b/tests/test_api.py index 2bab6c30..848daf9c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -237,6 +237,8 @@ def test_database_page(app_client): "foreign_key_with_label", "foreign_key_with_blank_label", "foreign_key_with_no_label", + "foreign_key_compound_pk1", + "foreign_key_compound_pk2", ], "primary_keys": ["pk"], "count": 2, @@ -1637,6 +1639,8 @@ def test_expand_label(app_client): "foreign_key_with_label": {"value": "1", "label": "hello"}, "foreign_key_with_blank_label": "3", "foreign_key_with_no_label": "1", + "foreign_key_compound_pk1": "a", + "foreign_key_compound_pk2": "b", } } @@ -1821,24 +1825,28 @@ def test_common_prefix_database_names(app_client_conflicting_database_names): assert db_name == data["database"] -def test_null_foreign_keys_are_not_expanded(app_client): +def test_null_and_compound_foreign_keys_are_not_expanded(app_client): response = app_client.get( "/fixtures/foreign_key_references.json?_shape=array&_labels=on" ) - assert [ + assert response.json == [ { "pk": "1", "foreign_key_with_label": {"value": "1", "label": "hello"}, "foreign_key_with_blank_label": {"value": "3", "label": ""}, "foreign_key_with_no_label": {"value": "1", "label": "1"}, + "foreign_key_compound_pk1": "a", + "foreign_key_compound_pk2": "b", }, { "pk": "2", "foreign_key_with_label": None, "foreign_key_with_blank_label": None, "foreign_key_with_no_label": None, + "foreign_key_compound_pk1": None, + "foreign_key_compound_pk2": None, }, - ] == response.json + ] def test_inspect_file_used_for_count(app_client_immutable_and_inspect_file): diff --git a/tests/test_csv.py b/tests/test_csv.py index 209bce2b..0fd665a9 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -42,9 +42,9 @@ pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,com ) EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV = """ -pk,foreign_key_with_label,foreign_key_with_label_label,foreign_key_with_blank_label,foreign_key_with_blank_label_label,foreign_key_with_no_label,foreign_key_with_no_label_label -1,1,hello,3,,1,1 -2,,,,,, +pk,foreign_key_with_label,foreign_key_with_label_label,foreign_key_with_blank_label,foreign_key_with_blank_label_label,foreign_key_with_no_label,foreign_key_with_no_label_label,foreign_key_compound_pk1,foreign_key_compound_pk2 +1,1,hello,3,,1,1,a,b +2,,,,,,,, """.lstrip().replace( "\n", "\r\n" ) diff --git a/tests/test_html.py b/tests/test_html.py index d53dbabc..ecbf89b4 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -804,12 +804,16 @@ def test_table_html_foreign_key_links(app_client): 'hello\xa01', '-\xa03', '1', + 'a', + 'b', ], [ '2', '\xa0', '\xa0', '\xa0', + '\xa0', + '\xa0', ], ] @@ -836,6 +840,8 @@ def test_table_html_disable_foreign_key_links_with_labels(app_client): '1', '3', '1', + 'a', + 'b', ] ] diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index e5938f3b..7c8f478c 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -267,7 +267,7 @@ async def test_table_column_details(db, table, expected): @pytest.mark.asyncio async def test_get_all_foreign_keys(db): all_foreign_keys = await db.get_all_foreign_keys() - assert { + assert all_foreign_keys["roadside_attraction_characteristics"] == { "incoming": [], "outgoing": [ { @@ -281,8 +281,8 @@ async def test_get_all_foreign_keys(db): "other_column": "pk", }, ], - } == all_foreign_keys["roadside_attraction_characteristics"] - assert { + } + assert all_foreign_keys["attraction_characteristic"] == { "incoming": [ { "other_table": "roadside_attraction_characteristics", @@ -291,7 +291,32 @@ async def test_get_all_foreign_keys(db): } ], "outgoing": [], - } == all_foreign_keys["attraction_characteristic"] + } + assert all_foreign_keys["compound_primary_key"] == { + # No incoming because these are compound foreign keys, which we currently ignore + "incoming": [], + "outgoing": [], + } + assert all_foreign_keys["foreign_key_references"] == { + "incoming": [], + "outgoing": [ + { + "other_table": "primary_key_multiple_columns", + "column": "foreign_key_with_no_label", + "other_column": "id", + }, + { + "other_table": "simple_primary_key", + "column": "foreign_key_with_blank_label", + "other_column": "id", + }, + { + "other_table": "simple_primary_key", + "column": "foreign_key_with_label", + "other_column": "id", + }, + ], + } @pytest.mark.asyncio From 242bc89fdf2e775e340d69a4e851b3a9accb31c6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 29 Nov 2020 11:38:29 -0800 Subject: [PATCH 0051/1455] Release 0.52.1 Refs #1098, #1102, #1114 --- datasette/version.py | 2 +- docs/changelog.rst | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 3b84c97b..119295b3 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52" +__version__ = "0.52.1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 49772638..a77cf5a5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,15 @@ Changelog ========= +.. _v0_52_1: + +0.52.1 (2020-11-29) +------------------- + +- Documentation on :ref:`testing_plugins` now recommends using :ref:`internals_datasette_client`. (`#1102 `__) +- Fix bug where compound foreign keys produced broken links. (`#1098 `__) +- ``datasette --load-module=spatialite`` now also checks for ``/usr/local/lib/mod_spatialite.so``. Thanks, Dan Peterson. (`#1114 `__) + .. _v0_52: 0.52 (2020-11-28) From 09033c08bec8555e0e893e077afa10a7a75d7d35 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 29 Nov 2020 12:13:16 -0800 Subject: [PATCH 0052/1455] Suggest --load-extension=spatialite, closes #1115 --- datasette/cli.py | 12 ++++++++++-- tests/test_cli.py | 29 ++++++++++++++++++++++------- 2 files changed, 32 insertions(+), 9 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 5feab51e..e84695e3 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -16,6 +16,7 @@ from .app import Datasette, DEFAULT_SETTINGS, SETTINGS, pm from .utils import ( StartupError, check_connection, + find_spatialite, parse_metadata, ConnectionProblem, SpatialiteConnectionProblem, @@ -537,10 +538,17 @@ async def check_databases(ds): try: await database.execute_fn(check_connection) except SpatialiteConnectionProblem: + suggestion = "" + try: + find_spatialite() + suggestion = "\n\nTry adding the --load-extension=spatialite option." + except SpatialiteNotFound: + pass raise click.UsageError( "It looks like you're trying to load a SpatiaLite" - " database without first loading the SpatiaLite module." - "\n\nRead more: https://docs.datasette.io/en/stable/spatialite.html" + + " database without first loading the SpatiaLite module." + + suggestion + + "\n\nRead more: https://docs.datasette.io/en/stable/spatialite.html" ) except ConnectionProblem as e: raise click.UsageError( diff --git a/tests/test_cli.py b/tests/test_cli.py index 36b9a092..409408ae 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -59,13 +59,28 @@ def test_serve_with_inspect_file_prepopulates_table_counts_cache(): assert {"hithere": 44} == db.cached_table_counts -def test_spatialite_error_if_attempt_to_open_spatialite(): - runner = CliRunner() - result = runner.invoke( - cli, ["serve", str(pathlib.Path(__file__).parent / "spatialite.db")] - ) - assert result.exit_code != 0 - assert "trying to load a SpatiaLite database" in result.output +@pytest.mark.parametrize( + "spatialite_paths,should_suggest_load_extension", + ( + ([], False), + (["/tmp"], True), + ), +) +def test_spatialite_error_if_attempt_to_open_spatialite( + spatialite_paths, should_suggest_load_extension +): + with mock.patch("datasette.utils.SPATIALITE_PATHS", spatialite_paths): + runner = CliRunner() + result = runner.invoke( + cli, ["serve", str(pathlib.Path(__file__).parent / "spatialite.db")] + ) + assert result.exit_code != 0 + assert "It looks like you're trying to load a SpatiaLite" in result.output + suggestion = "--load-extension=spatialite" + if should_suggest_load_extension: + assert suggestion in result.output + else: + assert suggestion not in result.output @mock.patch("datasette.utils.SPATIALITE_PATHS", ["/does/not/exist"]) From 4777362bf2692bc72b221ec47c3e6216151d1b89 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 29 Nov 2020 12:19:24 -0800 Subject: [PATCH 0053/1455] Work around CI bug with ensure_eventloop, refs #1115 --- tests/test_cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_cli.py b/tests/test_cli.py index 409408ae..c52960fb 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -67,7 +67,7 @@ def test_serve_with_inspect_file_prepopulates_table_counts_cache(): ), ) def test_spatialite_error_if_attempt_to_open_spatialite( - spatialite_paths, should_suggest_load_extension + ensure_eventloop, spatialite_paths, should_suggest_load_extension ): with mock.patch("datasette.utils.SPATIALITE_PATHS", spatialite_paths): runner = CliRunner() From c745c2715ab5933d7629a76bab4684632383f807 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 29 Nov 2020 12:27:34 -0800 Subject: [PATCH 0054/1455] Moved comment for clarity --- datasette/database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/database.py b/datasette/database.py index ea1424a5..71c45ba0 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -247,12 +247,12 @@ class Database: ) if explicit_label_column: return explicit_label_column - # If a table has two columns, one of which is ID, then label_column is the other one column_names = await self.execute_fn(lambda conn: table_columns(conn, table)) # Is there a name or title column? name_or_title = [c for c in column_names if c in ("name", "title")] if name_or_title: return name_or_title[0] + # If a table has two columns, one of which is ID, then label_column is the other one if ( column_names and len(column_names) == 2 From 37f87b5e52e7f8ddd1c4ffcf368bd7a62a406a6d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 12:01:15 -0800 Subject: [PATCH 0055/1455] Support for generated columns, closes #1116 --- datasette/utils/__init__.py | 12 +++---- tests/test_api.py | 55 ++++++++++++++++++++++++++++++-- tests/test_internals_database.py | 17 ++++++++++ 3 files changed, 76 insertions(+), 8 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index d467383d..28df2ef1 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -64,7 +64,7 @@ HASH_LENGTH = 7 # Can replace this with Column from sqlite_utils when I add that dependency Column = namedtuple( - "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk") + "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk", "hidden") ) @@ -460,11 +460,11 @@ def detect_primary_keys(conn, table): " Figure out primary keys for a table. " table_info_rows = [ row - for row in conn.execute(f'PRAGMA table_info("{table}")').fetchall() - if row[-1] + for row in conn.execute(f'PRAGMA table_xinfo("{table}")').fetchall() + if row["pk"] ] - table_info_rows.sort(key=lambda row: row[-1]) - return [str(r[1]) for r in table_info_rows] + table_info_rows.sort(key=lambda row: row["pk"]) + return [str(r["name"]) for r in table_info_rows] def get_outbound_foreign_keys(conn, table): @@ -572,7 +572,7 @@ def table_columns(conn, table): def table_column_details(conn, table): return [ Column(*r) - for r in conn.execute(f"PRAGMA table_info({escape_sqlite(table)});").fetchall() + for r in conn.execute(f"PRAGMA table_xinfo({escape_sqlite(table)});").fetchall() ] diff --git a/tests/test_api.py b/tests/test_api.py index 848daf9c..ebe50d10 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,5 +1,6 @@ +from datasette.app import Datasette from datasette.plugins import DEFAULT_PLUGINS -from datasette.utils import detect_json1 +from datasette.utils import detect_json1, sqlite3 from datasette.version import __version__ from .fixtures import ( # noqa app_client, @@ -514,7 +515,14 @@ def test_database_page(app_client): }, { "name": "searchable_fts", - "columns": ["text1", "text2", "name with . and spaces"], + "columns": [ + "text1", + "text2", + "name with . and spaces", + "searchable_fts", + "docid", + "__langid", + ], "primary_keys": [], "count": 2, "hidden": True, @@ -1913,3 +1921,46 @@ def test_paginate_using_link_header(app_client, qs): else: path = None assert num_pages == 21 + + +@pytest.mark.skipif( + tuple( + map( + int, + sqlite3.connect(":memory:") + .execute("select sqlite_version()") + .fetchone()[0] + .split("."), + ) + ) + < (3, 31, 0), + reason="generated columns were added in SQLite 3.31.0", +) +@pytest.mark.asyncio +async def test_generated_columns_are_visible_in_datasette(tmp_path_factory): + db_directory = tmp_path_factory.mktemp("dbs") + db_path = db_directory / "test.db" + conn = sqlite3.connect(str(db_path)) + conn.executescript( + """ + CREATE TABLE deeds ( + body TEXT, + id INT GENERATED ALWAYS AS (json_extract(body, '$.id')) STORED, + consideration INT GENERATED ALWAYS AS (json_extract(body, '$.consideration')) STORED + ); + INSERT INTO deeds (body) VALUES ('{ + "id": 1, + "consideration": "This is the consideration" + }'); + """ + ) + datasette = Datasette([db_path]) + response = await datasette.client.get("/test/deeds.json?_shape=array") + assert response.json() == [ + { + "rowid": 1, + "body": '{\n "id": 1,\n "consideration": "This is the consideration"\n }', + "id": 1, + "consideration": "This is the consideration", + } + ] diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 7c8f478c..56397dab 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -120,6 +120,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, + hidden=0, ), Column( cid=1, @@ -128,6 +129,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=2, @@ -136,6 +138,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=3, @@ -144,6 +147,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=4, @@ -152,6 +156,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=5, @@ -160,6 +165,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=6, @@ -168,6 +174,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=7, @@ -176,6 +183,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=8, @@ -184,6 +192,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=9, @@ -192,6 +201,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), ], ), @@ -205,6 +215,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, + hidden=0, ), Column( cid=1, @@ -213,6 +224,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=2, + hidden=0, ), Column( cid=2, @@ -221,6 +233,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=3, @@ -229,6 +242,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=4, @@ -237,6 +251,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=5, @@ -245,6 +260,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=6, @@ -253,6 +269,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), ], ), From dea3c508b39528e566d711c38a467b3d372d220b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 12:09:22 -0800 Subject: [PATCH 0056/1455] Revert "Support for generated columns, closes #1116" - it failed CI This reverts commit 37f87b5e52e7f8ddd1c4ffcf368bd7a62a406a6d. --- datasette/utils/__init__.py | 12 +++---- tests/test_api.py | 55 ++------------------------------ tests/test_internals_database.py | 17 ---------- 3 files changed, 8 insertions(+), 76 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 28df2ef1..d467383d 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -64,7 +64,7 @@ HASH_LENGTH = 7 # Can replace this with Column from sqlite_utils when I add that dependency Column = namedtuple( - "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk", "hidden") + "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk") ) @@ -460,11 +460,11 @@ def detect_primary_keys(conn, table): " Figure out primary keys for a table. " table_info_rows = [ row - for row in conn.execute(f'PRAGMA table_xinfo("{table}")').fetchall() - if row["pk"] + for row in conn.execute(f'PRAGMA table_info("{table}")').fetchall() + if row[-1] ] - table_info_rows.sort(key=lambda row: row["pk"]) - return [str(r["name"]) for r in table_info_rows] + table_info_rows.sort(key=lambda row: row[-1]) + return [str(r[1]) for r in table_info_rows] def get_outbound_foreign_keys(conn, table): @@ -572,7 +572,7 @@ def table_columns(conn, table): def table_column_details(conn, table): return [ Column(*r) - for r in conn.execute(f"PRAGMA table_xinfo({escape_sqlite(table)});").fetchall() + for r in conn.execute(f"PRAGMA table_info({escape_sqlite(table)});").fetchall() ] diff --git a/tests/test_api.py b/tests/test_api.py index ebe50d10..848daf9c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,6 +1,5 @@ -from datasette.app import Datasette from datasette.plugins import DEFAULT_PLUGINS -from datasette.utils import detect_json1, sqlite3 +from datasette.utils import detect_json1 from datasette.version import __version__ from .fixtures import ( # noqa app_client, @@ -515,14 +514,7 @@ def test_database_page(app_client): }, { "name": "searchable_fts", - "columns": [ - "text1", - "text2", - "name with . and spaces", - "searchable_fts", - "docid", - "__langid", - ], + "columns": ["text1", "text2", "name with . and spaces"], "primary_keys": [], "count": 2, "hidden": True, @@ -1921,46 +1913,3 @@ def test_paginate_using_link_header(app_client, qs): else: path = None assert num_pages == 21 - - -@pytest.mark.skipif( - tuple( - map( - int, - sqlite3.connect(":memory:") - .execute("select sqlite_version()") - .fetchone()[0] - .split("."), - ) - ) - < (3, 31, 0), - reason="generated columns were added in SQLite 3.31.0", -) -@pytest.mark.asyncio -async def test_generated_columns_are_visible_in_datasette(tmp_path_factory): - db_directory = tmp_path_factory.mktemp("dbs") - db_path = db_directory / "test.db" - conn = sqlite3.connect(str(db_path)) - conn.executescript( - """ - CREATE TABLE deeds ( - body TEXT, - id INT GENERATED ALWAYS AS (json_extract(body, '$.id')) STORED, - consideration INT GENERATED ALWAYS AS (json_extract(body, '$.consideration')) STORED - ); - INSERT INTO deeds (body) VALUES ('{ - "id": 1, - "consideration": "This is the consideration" - }'); - """ - ) - datasette = Datasette([db_path]) - response = await datasette.client.get("/test/deeds.json?_shape=array") - assert response.json() == [ - { - "rowid": 1, - "body": '{\n "id": 1,\n "consideration": "This is the consideration"\n }', - "id": 1, - "consideration": "This is the consideration", - } - ] diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 56397dab..7c8f478c 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -120,7 +120,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, - hidden=0, ), Column( cid=1, @@ -129,7 +128,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=2, @@ -138,7 +136,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=3, @@ -147,7 +144,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=4, @@ -156,7 +152,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=5, @@ -165,7 +160,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=6, @@ -174,7 +168,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=7, @@ -183,7 +176,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=8, @@ -192,7 +184,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=9, @@ -201,7 +192,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), ], ), @@ -215,7 +205,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, - hidden=0, ), Column( cid=1, @@ -224,7 +213,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=2, - hidden=0, ), Column( cid=2, @@ -233,7 +221,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=3, @@ -242,7 +229,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=4, @@ -251,7 +237,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=5, @@ -260,7 +245,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=6, @@ -269,7 +253,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), ], ), From 49b6297fb7513291110d86688c688700e6f6d9cc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 13:24:23 -0800 Subject: [PATCH 0057/1455] Typo fix: messagge_is_html, closes #1118 --- datasette/app.py | 2 +- datasette/views/base.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 88d5ecc6..922046d5 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1154,7 +1154,7 @@ class DatasetteRouter: status = exception.status info = exception.error_dict message = exception.message - if exception.messagge_is_html: + if exception.message_is_html: message = Markup(message) title = exception.title else: diff --git a/datasette/views/base.py b/datasette/views/base.py index bde8449f..5ba8fcb1 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -42,13 +42,13 @@ class DatasetteError(Exception): error_dict=None, status=500, template=None, - messagge_is_html=False, + message_is_html=False, ): self.message = message self.title = title self.error_dict = error_dict or {} self.status = status - self.messagge_is_html = messagge_is_html + self.message_is_html = message_is_html class BaseView: @@ -441,7 +441,7 @@ class DataView(BaseView): """, title="SQL Interrupted", status=400, - messagge_is_html=True, + message_is_html=True, ) except (sqlite3.OperationalError, InvalidSql) as e: raise DatasetteError(str(e), title="Invalid SQL", status=400) From 461670a0b87efa953141b449a9a261919864ceb3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 13:29:57 -0800 Subject: [PATCH 0058/1455] Support for generated columns * Support for generated columns, closes #1116 * Show SQLite version in pytest report header * Use table_info() if SQLite < 3.26.0 * Cache sqlite_version() rather than re-calculate every time * Adjust test_database_page for SQLite 3.26.0 or higher --- datasette/utils/__init__.py | 41 +++++++++++++------------ datasette/utils/sqlite.py | 28 ++++++++++++++++++ tests/conftest.py | 11 +++++++ tests/fixtures.py | 2 +- tests/test_api.py | 51 +++++++++++++++++++++++++++++++- tests/test_config_dir.py | 2 +- tests/test_internals_database.py | 20 ++++++++++++- tests/test_plugins.py | 4 +-- tests/test_utils.py | 2 +- 9 files changed, 135 insertions(+), 26 deletions(-) create mode 100644 datasette/utils/sqlite.py diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index d467383d..b951539d 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -19,15 +19,9 @@ import urllib import numbers import yaml from .shutil_backport import copytree +from .sqlite import sqlite3, sqlite_version from ..plugins import pm -try: - import pysqlite3 as sqlite3 -except ImportError: - import sqlite3 - -if hasattr(sqlite3, "enable_callback_tracebacks"): - sqlite3.enable_callback_tracebacks(True) # From https://www.sqlite.org/lang_keywords.html reserved_words = set( @@ -64,7 +58,7 @@ HASH_LENGTH = 7 # Can replace this with Column from sqlite_utils when I add that dependency Column = namedtuple( - "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk") + "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk", "hidden") ) @@ -458,13 +452,10 @@ def temporary_docker_directory( def detect_primary_keys(conn, table): " Figure out primary keys for a table. " - table_info_rows = [ - row - for row in conn.execute(f'PRAGMA table_info("{table}")').fetchall() - if row[-1] - ] - table_info_rows.sort(key=lambda row: row[-1]) - return [str(r[1]) for r in table_info_rows] + columns = table_column_details(conn, table) + pks = [column for column in columns if column.is_pk] + pks.sort(key=lambda column: column.is_pk) + return [column.name for column in pks] def get_outbound_foreign_keys(conn, table): @@ -570,10 +561,22 @@ def table_columns(conn, table): def table_column_details(conn, table): - return [ - Column(*r) - for r in conn.execute(f"PRAGMA table_info({escape_sqlite(table)});").fetchall() - ] + if sqlite_version() >= (3, 26, 0): + # table_xinfo was added in 3.26.0 + return [ + Column(*r) + for r in conn.execute( + f"PRAGMA table_xinfo({escape_sqlite(table)});" + ).fetchall() + ] + else: + # Treat hidden as 0 for all columns + return [ + Column(*(list(r) + [0])) + for r in conn.execute( + f"PRAGMA table_info({escape_sqlite(table)});" + ).fetchall() + ] filter_column_re = re.compile(r"^_filter_column_\d+$") diff --git a/datasette/utils/sqlite.py b/datasette/utils/sqlite.py new file mode 100644 index 00000000..9a043ccd --- /dev/null +++ b/datasette/utils/sqlite.py @@ -0,0 +1,28 @@ +try: + import pysqlite3 as sqlite3 +except ImportError: + import sqlite3 + +if hasattr(sqlite3, "enable_callback_tracebacks"): + sqlite3.enable_callback_tracebacks(True) + +_cached_sqlite_version = None + + +def sqlite_version(): + global _cached_sqlite_version + if _cached_sqlite_version is None: + _cached_sqlite_version = _sqlite_version() + return _cached_sqlite_version + + +def _sqlite_version(): + return tuple( + map( + int, + sqlite3.connect(":memory:") + .execute("select sqlite_version()") + .fetchone()[0] + .split("."), + ) + ) diff --git a/tests/conftest.py b/tests/conftest.py index 91b811e2..a963a4fd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,6 +3,11 @@ import pathlib import pytest import re +try: + import pysqlite3 as sqlite3 +except ImportError: + import sqlite3 + UNDOCUMENTED_PERMISSIONS = { "this_is_allowed", "this_is_denied", @@ -12,6 +17,12 @@ UNDOCUMENTED_PERMISSIONS = { } +def pytest_report_header(config): + return "SQLite: {}".format( + sqlite3.connect(":memory:").execute("select sqlite_version()").fetchone()[0] + ) + + def pytest_configure(config): import sys diff --git a/tests/fixtures.py b/tests/fixtures.py index f95a2d6b..b0c98f39 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -1,5 +1,5 @@ from datasette.app import Datasette -from datasette.utils import sqlite3 +from datasette.utils.sqlite import sqlite3 from datasette.utils.testing import TestClient import click import contextlib diff --git a/tests/test_api.py b/tests/test_api.py index 848daf9c..5676622e 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,5 +1,7 @@ +from datasette.app import Datasette from datasette.plugins import DEFAULT_PLUGINS from datasette.utils import detect_json1 +from datasette.utils.sqlite import sqlite3, sqlite_version from datasette.version import __version__ from .fixtures import ( # noqa app_client, @@ -514,7 +516,20 @@ def test_database_page(app_client): }, { "name": "searchable_fts", - "columns": ["text1", "text2", "name with . and spaces"], + "columns": [ + "text1", + "text2", + "name with . and spaces", + ] + + ( + [ + "searchable_fts", + "docid", + "__langid", + ] + if sqlite_version() >= (3, 26, 0) + else [] + ), "primary_keys": [], "count": 2, "hidden": True, @@ -1913,3 +1928,37 @@ def test_paginate_using_link_header(app_client, qs): else: path = None assert num_pages == 21 + + +@pytest.mark.skipif( + sqlite_version() < (3, 31, 0), + reason="generated columns were added in SQLite 3.31.0", +) +@pytest.mark.asyncio +async def test_generated_columns_are_visible_in_datasette(tmp_path_factory): + db_directory = tmp_path_factory.mktemp("dbs") + db_path = db_directory / "test.db" + conn = sqlite3.connect(str(db_path)) + conn.executescript( + """ + CREATE TABLE deeds ( + body TEXT, + id INT GENERATED ALWAYS AS (json_extract(body, '$.id')) STORED, + consideration INT GENERATED ALWAYS AS (json_extract(body, '$.consideration')) STORED + ); + INSERT INTO deeds (body) VALUES ('{ + "id": 1, + "consideration": "This is the consideration" + }'); + """ + ) + datasette = Datasette([db_path]) + response = await datasette.client.get("/test/deeds.json?_shape=array") + assert response.json() == [ + { + "rowid": 1, + "body": '{\n "id": 1,\n "consideration": "This is the consideration"\n }', + "id": 1, + "consideration": "This is the consideration", + } + ] diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index cd158474..015c6ace 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -1,9 +1,9 @@ import json import pytest -import sqlite3 from datasette.app import Datasette from datasette.cli import cli +from datasette.utils.sqlite import sqlite3 from .fixtures import TestClient as _TestClient from click.testing import CliRunner diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 7c8f478c..e50cf20e 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -2,7 +2,8 @@ Tests for the datasette.database.Database class """ from datasette.database import Database, Results, MultipleValues -from datasette.utils import sqlite3, Column +from datasette.utils.sqlite import sqlite3 +from datasette.utils import Column from .fixtures import app_client import pytest import time @@ -120,6 +121,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, + hidden=0, ), Column( cid=1, @@ -128,6 +130,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=2, @@ -136,6 +139,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=3, @@ -144,6 +148,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=4, @@ -152,6 +157,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=5, @@ -160,6 +166,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=6, @@ -168,6 +175,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=7, @@ -176,6 +184,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=8, @@ -184,6 +193,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=9, @@ -192,6 +202,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), ], ), @@ -205,6 +216,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, + hidden=0, ), Column( cid=1, @@ -213,6 +225,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=2, + hidden=0, ), Column( cid=2, @@ -221,6 +234,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=3, @@ -229,6 +243,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=4, @@ -237,6 +252,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=5, @@ -245,6 +261,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=6, @@ -253,6 +270,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), ], ), diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 51faeccb..4554cfd4 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -9,14 +9,14 @@ from .fixtures import ( from datasette.app import Datasette from datasette import cli from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm -from datasette.utils import sqlite3, CustomRow +from datasette.utils.sqlite import sqlite3 +from datasette.utils import CustomRow from jinja2.environment import Template import base64 import json import os import pathlib import re -import sqlite3 import textwrap import pytest import urllib diff --git a/tests/test_utils.py b/tests/test_utils.py index 07e6f870..56306339 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -4,11 +4,11 @@ Tests for various datasette helper functions. from datasette.app import Datasette from datasette import utils from datasette.utils.asgi import Request +from datasette.utils.sqlite import sqlite3 import json import os import pathlib import pytest -import sqlite3 import tempfile from unittest.mock import patch From 17cbbb1f7f230b39650afac62dd16476626001b5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 16:28:02 -0800 Subject: [PATCH 0059/1455] generated_columns table in fixtures.py, closes #1119 --- datasette/utils/__init__.py | 4 +- datasette/utils/sqlite.py | 8 ++++ tests/fixtures.py | 19 ++++++++- tests/test_api.py | 51 ++++++++++++----------- tests/test_internals_database.py | 70 +++++++++++++++++--------------- tests/test_plugins.py | 6 +-- 6 files changed, 93 insertions(+), 65 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index b951539d..2576090a 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -19,7 +19,7 @@ import urllib import numbers import yaml from .shutil_backport import copytree -from .sqlite import sqlite3, sqlite_version +from .sqlite import sqlite3, sqlite_version, supports_table_xinfo from ..plugins import pm @@ -561,7 +561,7 @@ def table_columns(conn, table): def table_column_details(conn, table): - if sqlite_version() >= (3, 26, 0): + if supports_table_xinfo(): # table_xinfo was added in 3.26.0 return [ Column(*r) diff --git a/datasette/utils/sqlite.py b/datasette/utils/sqlite.py index 9a043ccd..c8522f35 100644 --- a/datasette/utils/sqlite.py +++ b/datasette/utils/sqlite.py @@ -26,3 +26,11 @@ def _sqlite_version(): .split("."), ) ) + + +def supports_table_xinfo(): + return sqlite_version() >= (3, 26, 0) + + +def supports_generated_columns(): + return sqlite_version() >= (3, 31, 0) diff --git a/tests/fixtures.py b/tests/fixtures.py index b0c98f39..b52a531f 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -1,5 +1,5 @@ from datasette.app import Datasette -from datasette.utils.sqlite import sqlite3 +from datasette.utils.sqlite import sqlite3, sqlite_version, supports_generated_columns from datasette.utils.testing import TestClient import click import contextlib @@ -116,6 +116,8 @@ def make_app_client( immutables = [] conn = sqlite3.connect(filepath) conn.executescript(TABLES) + if supports_generated_columns(): + conn.executescript(GENERATED_COLUMNS_SQL) for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) @@ -699,6 +701,18 @@ INSERT INTO "searchable_fts" (rowid, text1, text2) SELECT rowid, text1, text2 FROM searchable; """ +GENERATED_COLUMNS_SQL = """ +CREATE TABLE generated_columns ( + body TEXT, + id INT GENERATED ALWAYS AS (json_extract(body, '$.number')) STORED, + consideration INT GENERATED ALWAYS AS (json_extract(body, '$.string')) STORED +); +INSERT INTO generated_columns (body) VALUES ('{ + "number": 1, + "string": "This is a string" +}'); +""" + def assert_permissions_checked(datasette, actions): # actions is a list of "action" or (action, resource) tuples @@ -754,6 +768,9 @@ def cli(db_filename, metadata, plugins_path, recreate): for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) + if supports_generated_columns(): + with conn: + conn.executescript(GENERATED_COLUMNS_SQL) print(f"Test tables written to {db_filename}") if metadata: open(metadata, "w").write(json.dumps(METADATA, indent=4)) diff --git a/tests/test_api.py b/tests/test_api.py index 5676622e..f82a8fe9 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,7 +1,7 @@ from datasette.app import Datasette from datasette.plugins import DEFAULT_PLUGINS from datasette.utils import detect_json1 -from datasette.utils.sqlite import sqlite3, sqlite_version +from datasette.utils.sqlite import sqlite3, sqlite_version, supports_table_xinfo from datasette.version import __version__ from .fixtures import ( # noqa app_client, @@ -19,6 +19,7 @@ from .fixtures import ( # noqa generate_compound_rows, generate_sortable_rows, make_app_client, + supports_generated_columns, EXPECTED_PLUGINS, METADATA, ) @@ -35,7 +36,7 @@ def test_homepage(app_client): assert response.json.keys() == {"fixtures": 0}.keys() d = response.json["fixtures"] assert d["name"] == "fixtures" - assert d["tables_count"] == 24 + assert d["tables_count"] == 25 if supports_generated_columns() else 24 assert len(d["tables_and_views_truncated"]) == 5 assert d["tables_and_views_more"] is True # 4 hidden FTS tables + no_primary_key (hidden in metadata) @@ -268,6 +269,22 @@ def test_database_page(app_client): }, "private": False, }, + ] + ( + [ + { + "columns": ["body", "id", "consideration"], + "count": 1, + "foreign_keys": {"incoming": [], "outgoing": []}, + "fts_table": None, + "hidden": False, + "name": "generated_columns", + "primary_keys": [], + "private": False, + } + ] + if supports_generated_columns() + else [] + ) + [ { "name": "infinity", "columns": ["value"], @@ -527,7 +544,7 @@ def test_database_page(app_client): "docid", "__langid", ] - if sqlite_version() >= (3, 26, 0) + if supports_table_xinfo() else [] ), "primary_keys": [], @@ -1934,31 +1951,13 @@ def test_paginate_using_link_header(app_client, qs): sqlite_version() < (3, 31, 0), reason="generated columns were added in SQLite 3.31.0", ) -@pytest.mark.asyncio -async def test_generated_columns_are_visible_in_datasette(tmp_path_factory): - db_directory = tmp_path_factory.mktemp("dbs") - db_path = db_directory / "test.db" - conn = sqlite3.connect(str(db_path)) - conn.executescript( - """ - CREATE TABLE deeds ( - body TEXT, - id INT GENERATED ALWAYS AS (json_extract(body, '$.id')) STORED, - consideration INT GENERATED ALWAYS AS (json_extract(body, '$.consideration')) STORED - ); - INSERT INTO deeds (body) VALUES ('{ - "id": 1, - "consideration": "This is the consideration" - }'); - """ - ) - datasette = Datasette([db_path]) - response = await datasette.client.get("/test/deeds.json?_shape=array") +async def test_generated_columns_are_visible_in_datasette(app_client): + response = app_client.get("/test/generated_columns.json?_shape=array") assert response.json() == [ { "rowid": 1, - "body": '{\n "id": 1,\n "consideration": "This is the consideration"\n }', - "id": 1, - "consideration": "This is the consideration", + "body": '{\n "number": 1,\n "string": "This is a string"\n }', + "number": 1, + "string": "This is a string", } ] diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index e50cf20e..49b8a1b3 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -2,7 +2,7 @@ Tests for the datasette.database.Database class """ from datasette.database import Database, Results, MultipleValues -from datasette.utils.sqlite import sqlite3 +from datasette.utils.sqlite import sqlite3, supports_generated_columns from datasette.utils import Column from .fixtures import app_client import pytest @@ -340,38 +340,42 @@ async def test_get_all_foreign_keys(db): @pytest.mark.asyncio async def test_table_names(db): table_names = await db.table_names() - assert table_names == [ - "simple_primary_key", - "primary_key_multiple_columns", - "primary_key_multiple_columns_explicit_label", - "compound_primary_key", - "compound_three_primary_keys", - "foreign_key_references", - "sortable", - "no_primary_key", - "123_starts_with_digits", - "Table With Space In Name", - "table/with/slashes.csv", - "complex_foreign_keys", - "custom_foreign_key_label", - "units", - "tags", - "searchable", - "searchable_tags", - "searchable_fts", - "searchable_fts_segments", - "searchable_fts_segdir", - "searchable_fts_docsize", - "searchable_fts_stat", - "select", - "infinity", - "facet_cities", - "facetable", - "binary_data", - "roadside_attractions", - "attraction_characteristic", - "roadside_attraction_characteristics", - ] + assert ( + table_names + == [ + "simple_primary_key", + "primary_key_multiple_columns", + "primary_key_multiple_columns_explicit_label", + "compound_primary_key", + "compound_three_primary_keys", + "foreign_key_references", + "sortable", + "no_primary_key", + "123_starts_with_digits", + "Table With Space In Name", + "table/with/slashes.csv", + "complex_foreign_keys", + "custom_foreign_key_label", + "units", + "tags", + "searchable", + "searchable_tags", + "searchable_fts", + "searchable_fts_segments", + "searchable_fts_segdir", + "searchable_fts_docsize", + "searchable_fts_stat", + "select", + "infinity", + "facet_cities", + "facetable", + "binary_data", + "roadside_attractions", + "attraction_characteristic", + "roadside_attraction_characteristics", + ] + + (["generated_columns"] if supports_generated_columns() else []) + ) @pytest.mark.asyncio diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 4554cfd4..dab5ef68 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -413,8 +413,7 @@ def test_hook_register_output_renderer_all_parameters(app_client): # Lots of 'at 0x103a4a690' in here - replace those so we can do # an easy comparison body = at_memory_re.sub(" at 0xXXX", response.text) - assert { - "1+1": 2, + assert json.loads(body) == { "datasette": "", "columns": [ "pk", @@ -451,7 +450,8 @@ def test_hook_register_output_renderer_all_parameters(app_client): "table": "facetable", "request": "", "view_name": "table", - } == json.loads(body) + "1+1": 2, + } # Test that query_name is set correctly query_response = app_client.get("/fixtures/pragma_cache_size.testall") assert "pragma_cache_size" == json.loads(query_response.body)["query_name"] From a970276b9999687b96c5e11ea1c817d814f5d267 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 17:19:09 -0800 Subject: [PATCH 0060/1455] Try pysqlite3 on latest.datasette.io --install=pysqlite3-binary to get a working demo of generated columns, refs #1119 --- .github/workflows/deploy-latest.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 7a41bda2..05f0bad1 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -54,6 +54,7 @@ jobs: --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1" \ + --install=pysqlite3-binary \ --service=datasette-latest # Deploy docs.db to a different service datasette publish cloudrun docs.db \ From 88ac538b41a4753c3de9b509c3a0e13077f66182 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 2 Dec 2020 15:47:37 -0800 Subject: [PATCH 0061/1455] transfer-encoding: chunked for DB downloads, refs #749 This should get >32MB downloads working on Cloud Run. --- datasette/views/database.py | 1 + tests/test_html.py | 1 + 2 files changed, 2 insertions(+) diff --git a/datasette/views/database.py b/datasette/views/database.py index 17c78150..f6fd579c 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -169,6 +169,7 @@ class DatabaseDownload(DataView): headers = {} if self.ds.cors: headers["Access-Control-Allow-Origin"] = "*" + headers["Transfer-Encoding"] = "chunked" return AsgiFileDownload( filepath, filename=os.path.basename(filepath), diff --git a/tests/test_html.py b/tests/test_html.py index ecbf89b4..b9d3afcd 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1028,6 +1028,7 @@ def test_database_download_for_immutable(): download_response.headers["content-disposition"] == 'attachment; filename="fixtures.db"' ) + assert download_response.headers["transfer-encoding"] == "chunked" def test_database_download_disallowed_for_mutable(app_client): From daae35be46ec5cb8a207aa20986a4fa62e94777e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Abdussamet=20Ko=C3=A7ak?= Date: Thu, 3 Dec 2020 03:33:36 +0300 Subject: [PATCH 0062/1455] Fix misaligned table actions cog Closes #1121. Thanks, @abdusco --- datasette/static/app.css | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index b9378a9e..9e498ab9 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -163,6 +163,8 @@ h6, } .page-header { + display: flex; + align-items: center; padding-left: 10px; border-left: 10px solid #666; margin-bottom: 0.75rem; @@ -175,11 +177,11 @@ h6, padding-right: 0.2em; } .page-header details { - display: inline; + display: inline-flex; } .page-header details > summary { list-style: none; - display: inline; + display: inline-flex; cursor: pointer; } .page-header details > summary::-webkit-details-marker { From a45a3dff3ea01a2382dcedae5923a7b821a12aec Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 2 Dec 2020 16:44:03 -0800 Subject: [PATCH 0063/1455] Fix for OPTIONS request against /db, closes #1100 --- datasette/utils/testing.py | 23 +++++++++++++++++++++++ datasette/views/base.py | 2 +- tests/test_api.py | 6 ++++++ 3 files changed, 30 insertions(+), 1 deletion(-) diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index bcbc1c7a..57b19ea5 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -99,6 +99,29 @@ class TestClient: content_type=content_type, ) + @async_to_sync + async def request( + self, + path, + allow_redirects=True, + redirect_count=0, + method="GET", + cookies=None, + headers=None, + post_body=None, + content_type=None, + ): + return await self._request( + path, + allow_redirects=allow_redirects, + redirect_count=redirect_count, + method=method, + cookies=cookies, + headers=headers, + post_body=post_body, + content_type=content_type, + ) + async def _request( self, path, diff --git a/datasette/views/base.py b/datasette/views/base.py index 5ba8fcb1..a93a6378 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -155,7 +155,7 @@ class DataView(BaseView): name = "" re_named_parameter = re.compile(":([a-zA-Z0-9_]+)") - def options(self, request, *args, **kwargs): + async def options(self, request, *args, **kwargs): r = Response.text("ok") if self.ds.cors: r.headers["Access-Control-Allow-Origin"] = "*" diff --git a/tests/test_api.py b/tests/test_api.py index f82a8fe9..016894b4 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1961,3 +1961,9 @@ async def test_generated_columns_are_visible_in_datasette(app_client): "string": "This is a string", } ] + + +def test_http_options_request(app_client): + response = app_client.request("/fixtures", method="OPTIONS") + assert response.status == 200 + assert response.text == "ok" From 13c960c03b46e35f3432063a19f3f528ca249e23 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 2 Dec 2020 16:49:43 -0800 Subject: [PATCH 0064/1455] Test is no longer order dependent, closes #1123 --- tests/test_plugins.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index dab5ef68..93b444ab 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -797,9 +797,11 @@ def test_hook_table_actions(app_client, table_or_view): assert get_table_actions_links(response.text) == [] response_2 = app_client.get(f"/fixtures/{table_or_view}?_bot=1") - assert get_table_actions_links(response_2.text) == [ - {"label": "From async", "href": "/"}, + assert sorted( + get_table_actions_links(response_2.text), key=lambda l: l["label"] + ) == [ {"label": "Database: fixtures", "href": "/"}, + {"label": "From async", "href": "/"}, {"label": f"Table: {table_or_view}", "href": "/"}, ] From e048791a9a2686f47d81a2c8aa88aa1966d82521 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 2 Dec 2020 16:57:40 -0800 Subject: [PATCH 0065/1455] Release 0.52.2 Refs #1116, #1115, #1100, #749, #1121 --- datasette/version.py | 2 +- docs/changelog.rst | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 119295b3..0353358a 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52.1" +__version__ = "0.52.2" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index a77cf5a5..6fb06beb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,17 @@ Changelog ========= +.. _v0_52_2: + +0.52.2 (2020-12-02) +------------------- + +- Generated columns from SQLite 3.31.0 or higher are now correctly displayed. (`#1116 `__) +- Error message if you attempt to open a SpatiaLite database now suggests using ``--load-extension=spatialite`` if it detects that the extension is available in a common location. (`#1115 `__) +- ``OPTIONS`` requests against the ``/database`` page no longer raise a 500 error. (`#1100 `__) +- Databases larger than 32MB that are published to Cloud Run can now be downloaded. (`#749 `__) +- Fix for misaligned cog icon on table and database pages. Thanks, Abdussamet Koçak. (`#1121 `__) + .. _v0_52_1: 0.52.1 (2020-11-29) From 6b4c55efea3e9d34d92cbe5f0066553ad9b14071 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 10:53:26 -0800 Subject: [PATCH 0066/1455] Fix for Amazon Linux static assets 404ing, refs #1124 --- datasette/utils/asgi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index ce78a597..31b0bdcd 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -294,7 +294,7 @@ def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None): return # Ensure full_path is within root_path to avoid weird "../" tricks try: - full_path.relative_to(root_path) + full_path.relative_to(root_path.resolve()) except ValueError: await asgi_send_html(send, "404", 404) return From 63efcb35ce879fe68ee02411c8dd2fd5f127cc32 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 11:02:53 -0800 Subject: [PATCH 0067/1455] More tweaks to root_path handling, refs #1124 --- datasette/utils/asgi.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 31b0bdcd..3b41c2d7 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -282,10 +282,12 @@ async def asgi_send_file( def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None): + root_path = Path(root_path) + async def inner_static(request, send): path = request.scope["url_route"]["kwargs"]["path"] try: - full_path = (Path(root_path) / path).resolve().absolute() + full_path = (root_path / path).resolve().absolute() except FileNotFoundError: await asgi_send_html(send, "404", 404) return From ca6e8e53dc9b094a5ce169d81a69d872546e595a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 11:05:12 -0800 Subject: [PATCH 0068/1455] More helpful 404 messages, refs #1124 --- datasette/utils/asgi.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 3b41c2d7..363f059f 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -289,7 +289,7 @@ def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None): try: full_path = (root_path / path).resolve().absolute() except FileNotFoundError: - await asgi_send_html(send, "404", 404) + await asgi_send_html(send, "404: Directory not found", 404) return if full_path.is_dir(): await asgi_send_html(send, "403: Directory listing is not allowed", 403) @@ -298,12 +298,12 @@ def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None): try: full_path.relative_to(root_path.resolve()) except ValueError: - await asgi_send_html(send, "404", 404) + await asgi_send_html(send, "404: Path not inside root path", 404) return try: await asgi_send_file(send, full_path, chunk_size=chunk_size) except FileNotFoundError: - await asgi_send_html(send, "404", 404) + await asgi_send_html(send, "404: File not found", 404) return return inner_static From 4cce5516661b24afeddaf35bee84b00fbf5c7f89 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 11:07:05 -0800 Subject: [PATCH 0069/1455] Release 0.52.3 Refs #1124 --- datasette/version.py | 2 +- docs/changelog.rst | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 0353358a..ab02947d 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52.2" +__version__ = "0.52.3" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6fb06beb..4fa7609c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_52_3: + +0.52.3 (2020-12-03) +------------------- + +- Fixed bug where static assets would 404 for Datasette installed on ARM Amazon Linux. (`#1124 `__) + .. _v0_52_2: 0.52.2 (2020-12-02) From 00185af74a91646d47aa54f2369c1a19a6f76a27 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 14:08:50 -0800 Subject: [PATCH 0070/1455] Show pysqlite3 version on /-/versions, if installed - #1125 --- datasette/app.py | 14 ++++++++++++-- datasette/utils/sqlite.py | 3 +++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 922046d5..b2f16257 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -9,6 +9,7 @@ import inspect from itsdangerous import BadSignature import json import os +import pkg_resources import re import secrets import sys @@ -57,7 +58,6 @@ from .utils import ( module_from_path, parse_metadata, resolve_env_secrets, - sqlite3, to_css_class, HASH_LENGTH, ) @@ -74,6 +74,10 @@ from .utils.asgi import ( asgi_send_json, asgi_send_redirect, ) +from .utils.sqlite import ( + sqlite3, + using_pysqlite3, +) from .tracer import AsgiTracer from .plugins import pm, DEFAULT_PLUGINS, get_plugins from .version import __version__ @@ -619,7 +623,7 @@ class Datasette: datasette_version = {"version": __version__} if self.version_note: datasette_version["note"] = self.version_note - return { + info = { "python": { "version": ".".join(map(str, sys.version_info[:3])), "full": sys.version, @@ -636,6 +640,12 @@ class Datasette: ], }, } + if using_pysqlite3: + try: + info["pysqlite3"] = pkg_resources.get_distribution("pysqlite3").version + except pkg_resources.DistributionNotFound: + pass + return info def _plugins(self, request=None, all=False): ps = list(get_plugins()) diff --git a/datasette/utils/sqlite.py b/datasette/utils/sqlite.py index c8522f35..342ff3fa 100644 --- a/datasette/utils/sqlite.py +++ b/datasette/utils/sqlite.py @@ -1,5 +1,8 @@ +using_pysqlite3 = False try: import pysqlite3 as sqlite3 + + using_pysqlite3 = True except ImportError: import sqlite3 From e2fea36540e952d8d72c1bd0af7144b85b7a4671 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 19:12:33 -0800 Subject: [PATCH 0071/1455] Switch to google-github-actions/setup-gcloud - refs #1126 --- .github/workflows/deploy-latest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 05f0bad1..2de0a8b6 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -39,7 +39,7 @@ jobs: sphinx-to-sqlite ../docs.db _build cd .. - name: Set up Cloud Run - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master + uses: google-github-actions/setup-gcloud@master with: version: '275.0.0' service_account_email: ${{ secrets.GCP_SA_EMAIL }} From 49d8fc056844d5a537d6cfd96dab0dd5686fe718 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 20:07:10 -0800 Subject: [PATCH 0072/1455] Try pysqlite3-binary version as well, refs #1125 --- datasette/app.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index b2f16257..9bc84df0 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -641,10 +641,12 @@ class Datasette: }, } if using_pysqlite3: - try: - info["pysqlite3"] = pkg_resources.get_distribution("pysqlite3").version - except pkg_resources.DistributionNotFound: - pass + for package in ("pysqlite3", "pysqlite3-binary"): + try: + info["pysqlite3"] = pkg_resources.get_distribution(package).version + break + except pkg_resources.DistributionNotFound: + pass return info def _plugins(self, request=None, all=False): From 42efb799ea9b362f0c7598f3ff3c4bf46c18e53f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 4 Dec 2020 21:20:12 -0800 Subject: [PATCH 0073/1455] Fixed invalid test for generated columns, refs #1119 --- tests/test_api.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index 016894b4..4339507c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1951,14 +1951,14 @@ def test_paginate_using_link_header(app_client, qs): sqlite_version() < (3, 31, 0), reason="generated columns were added in SQLite 3.31.0", ) -async def test_generated_columns_are_visible_in_datasette(app_client): - response = app_client.get("/test/generated_columns.json?_shape=array") - assert response.json() == [ +def test_generated_columns_are_visible_in_datasette(app_client): + response = app_client.get("/fixtures/generated_columns.json?_shape=array") + assert response.json == [ { "rowid": 1, - "body": '{\n "number": 1,\n "string": "This is a string"\n }', - "number": 1, - "string": "This is a string", + "body": '{\n "number": 1,\n "string": "This is a string"\n}', + "id": 1, + "consideration": "This is a string", } ] From eae103a82b92949189cf718794d2ad0424005460 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 4 Dec 2020 21:21:11 -0800 Subject: [PATCH 0074/1455] Write errors to stderr, closes #1131 --- datasette/database.py | 10 ++++++---- datasette/renderer.py | 1 - datasette/views/base.py | 4 +++- tests/test_cli.py | 7 +++++++ 4 files changed, 16 insertions(+), 6 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 71c45ba0..412e0c59 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -1,8 +1,8 @@ import asyncio -import contextlib from pathlib import Path import janus import queue +import sys import threading import uuid @@ -104,7 +104,8 @@ class Database: try: result = task.fn(conn) except Exception as e: - print(e) + sys.stderr.write("{}\n".format(e)) + sys.stderr.flush() result = e task.reply_queue.sync_q.put(result) @@ -156,11 +157,12 @@ class Database: if e.args == ("interrupted",): raise QueryInterrupted(e, sql, params) if log_sql_errors: - print( - "ERROR: conn={}, sql = {}, params = {}: {}".format( + sys.stderr.write( + "ERROR: conn={}, sql = {}, params = {}: {}\n".format( conn, repr(sql), params, e ) ) + sys.stderr.flush() raise if truncate: diff --git a/datasette/renderer.py b/datasette/renderer.py index d779b44f..258199fc 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -20,7 +20,6 @@ def convert_specific_columns_to_json(rows, columns, json_cols): try: value = json.loads(value) except (TypeError, ValueError) as e: - print(e) pass new_row.append(value) new_rows.append(new_row) diff --git a/datasette/views/base.py b/datasette/views/base.py index a93a6378..b8860b74 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -2,6 +2,7 @@ import asyncio import csv import hashlib import re +import sys import time import urllib @@ -362,7 +363,8 @@ class DataView(BaseView): new_row.append(cell) await writer.writerow(new_row) except Exception as e: - print("caught this", e) + sys.stderr.write("Caught this error: {}\n".format(e)) + sys.stderr.flush() await r.write(str(e)) return diff --git a/tests/test_cli.py b/tests/test_cli.py index c52960fb..a0ac7d7a 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -214,3 +214,10 @@ def test_config_deprecated(ensure_eventloop): assert result.exit_code == 0 assert not json.loads(result.output)["allow_download"] assert "will be deprecated in" in result.stderr + + +def test_sql_errors_logged_to_stderr(ensure_eventloop): + runner = CliRunner(mix_stderr=False) + result = runner.invoke(cli, ["--get", "/:memory:.json?sql=select+blah"]) + assert result.exit_code == 1 + assert "sql = 'select blah', params = {}: no such column: blah\n" in result.stderr From 705d1a1555c4791e9be3b884285b047223ab184f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Abdussamet=20Ko=C3=A7ak?= Date: Sat, 5 Dec 2020 22:35:03 +0300 Subject: [PATCH 0075/1455] Fix startup error on windows (#1128) Fixes https://github.com/simonw/datasette/issues/1094 This import isn't used at all, and causes error on startup on Windows. --- datasette/utils/asgi.py | 1 - 1 file changed, 1 deletion(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 363f059f..fc9adcff 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -1,5 +1,4 @@ import json -from os import EX_CANTCREAT from datasette.utils import MultiParams from mimetypes import guess_type from urllib.parse import parse_qs, urlunparse, parse_qsl From 2dc281645a76c550789ede80c1bc6f733fa9a82e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 5 Dec 2020 11:41:40 -0800 Subject: [PATCH 0076/1455] Release 0.52.4 Refs #1125, #1131, #1094 --- datasette/version.py | 2 +- docs/changelog.rst | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index ab02947d..ce06fe1d 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52.3" +__version__ = "0.52.4" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 4fa7609c..a9922ab3 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,15 @@ Changelog ========= +.. _v0_52_4: + +0.52.4 (2020-12-05) +------------------- + +- Show `pysqlite3 `__ version on ``/-/versions``, if installed. (`#1125 `__) +- Errors output by Datasette (e.g. for invalid SQL queries) now go to ``stderr``, not ``stdout``. (`#1131 `__) +- Fix for a startup error on windows caused by unneccessary ``from os import EX_CANTCREAT`` - thanks, Abdussamet Koçak. (`#1094 `__) + .. _v0_52_3: 0.52.3 (2020-12-03) From e5930e6f889617320454ab53ecc1c438377d49e6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 5 Dec 2020 11:42:42 -0800 Subject: [PATCH 0077/1455] Typo fix in release notes --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index a9922ab3..86d844f7 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -11,7 +11,7 @@ Changelog - Show `pysqlite3 `__ version on ``/-/versions``, if installed. (`#1125 `__) - Errors output by Datasette (e.g. for invalid SQL queries) now go to ``stderr``, not ``stdout``. (`#1131 `__) -- Fix for a startup error on windows caused by unneccessary ``from os import EX_CANTCREAT`` - thanks, Abdussamet Koçak. (`#1094 `__) +- Fix for a startup error on windows caused by unnecessary ``from os import EX_CANTCREAT`` - thanks, Abdussamet Koçak. (`#1094 `__) .. _v0_52_3: From e3143700a245d87bc532d44867b2e380b4225324 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 7 Dec 2020 11:00:10 -0800 Subject: [PATCH 0078/1455] Custom template for docs, linking to datasette.io --- docs/_templates/layout.html | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 docs/_templates/layout.html diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html new file mode 100644 index 00000000..b7b6f794 --- /dev/null +++ b/docs/_templates/layout.html @@ -0,0 +1,23 @@ +{%- extends "!layout.html" %} + +{% block sidebartitle %} + + + + + +{% if theme_display_version %} + {%- set nav_version = version %} + {% if READTHEDOCS and current_version %} + {%- set nav_version = current_version %} + {% endif %} + {% if nav_version %} +
    + {{ nav_version }} +
    + {% endif %} +{% endif %} + +{% include "searchbox.html" %} + +{% endblock %} From 62a6f70c64e4d04c15d9f386dcdf9cd465bbb0f6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 7 Dec 2020 12:10:05 -0800 Subject: [PATCH 0079/1455] Fixed Markdown indentation of news To make it easier to programmatically extract. --- README.md | 76 +++++++++++++++++++++++++++---------------------------- 1 file changed, 38 insertions(+), 38 deletions(-) diff --git a/README.md b/README.md index c0019e9b..89245cf1 100644 --- a/README.md +++ b/README.md @@ -25,53 +25,53 @@ Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly new ## News - * 28th November 2020: [Datasette 0.52](https://docs.datasette.io/en/stable/changelog.html#v0-52) - `--config` is now `--setting`, new `database_actions` plugin hook, `datasette publish cloudrun --apt-get-install` option and several bug fixes. - * 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. [Annotated release notes](https://simonwillison.net/2020/Nov/1/datasette-0-51/). - * 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). - * 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). - * 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. - * 11th August 2020: [Datasette 0.47](https://docs.datasette.io/en/stable/changelog.html#v0-47) - Datasette can now be installed using Homebrew! `brew install simonw/datasette/datasette`. Also new: `datasette install name-of-plugin` and `datasette uninstall name-of-plugin` commands, and `datasette --get '/-/versions.json'` to output the result of Datasette HTTP calls on the command-line. - * 9th August 2020: [Datasette 0.46](https://docs.datasette.io/en/stable/changelog.html#v0-46) - security fix relating to CSRF protection for writable canned queries, a new logo, new debugging tools, improved file downloads and more. - * 6th August 2020: [GraphQL in Datasette with the new datasette-graphql plugin](https://simonwillison.net/2020/Aug/7/datasette-graphql/) - * 24th July 2020: Two new plugins: [datasette-copyable and datasette-insert-api](https://simonwillison.net/2020/Jul/23/datasette-copyable-datasette-insert-api/). `datasette-copyable` adds copy-and-paste export options, and `datasette-insert-api` lets you create tables and insert or update data by POSTing JSON directly to Datasette. - * 1st July 2020: [Datasette 0.45](https://docs.datasette.io/en/stable/changelog.html#v0-45) - [Magic parameters for canned queries](https://docs.datasette.io/en/stable/sql_queries.html#canned-queries-magic-parameters), a log out feature, improved plugin documentation and four new plugin hooks. See also [Datasette 0.45: The annotated release notes](https://simonwillison.net/2020/Jul/1/datasette-045/). - * 20th June 2020: [A cookiecutter template for writing Datasette plugins](https://simonwillison.net/2020/Jun/20/cookiecutter-plugins/) - * 11th June 2020: [Datasette 0.44](https://docs.datasette.io/en/stable/changelog.html#v0-44) - [Authentication and permissions](https://docs.datasette.io/en/stable/authentication.html), [writable canned queries](https://docs.datasette.io/en/stable/sql_queries.html#writable-canned-queries), flash messages, new plugin hooks and much, much more. - * 28th May 2020: [Datasette 0.43](https://docs.datasette.io/en/stable/changelog.html#v0-43) - Redesigned [register_output_renderer](https://docs.datasette.io/en/stable/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes. - * 8th May 2020: [Datasette 0.42](https://docs.datasette.io/en/stable/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database. - * 6th May 2020: [Datasette 0.41](https://docs.datasette.io/en/stable/changelog.html#v0-41) - New mechanism for [creating custom pages](https://docs.datasette.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://docs.datasette.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements. - * 21st April 2020: [Datasette 0.40](https://docs.datasette.io/en/stable/changelog.html#v0-40) - Metadata can now be provided as YAML instead of JSON. Publishing to Zeit Now v1 is no longer supported, but Now v2 support is provided by the new [datasette-publish-now](https://github.com/simonw/datasette-publish-now) plugin. Various bug fixes. - * 24th March 2020: [Datasette 0.39](https://docs.datasette.io/en/stable/changelog.html#v0-39) - New `base_url` configuration option for running Datasette under a different URL prefix, `"sort"` and `"sort_desc"` metadata options for setting a default sort order for a table. - * 8th March 2020: [Datasette 0.38](https://docs.datasette.io/en/stable/changelog.html#v0-38) - New `--memory` option for `datasete publish cloudrun`, [Docker image](https://hub.docker.com/r/datasetteproject/datasette) upgraded to SQLite 3.31.1. - * 25th February 2020: [Datasette 0.37](https://docs.datasette.io/en/stable/changelog.html#v0-37) - new internal APIs enabling plugins to safely write to databases. Read more here: [Datasette Writes](https://simonwillison.net/2020/Feb/26/weeknotes-datasette-writes/). - * 21st February 2020: [Datasette 0.36](https://docs.datasette.io/en/stable/changelog.html#v0-36) - new internals documentation for plugins, `prepare_connection()` now accepts optional `database` and `datasette` arguments. - * 4th February 2020: [Datasette 0.35](https://docs.datasette.io/en/stable/changelog.html#v0-35) - new `.render_template()` method for plugins. - * 29th January 2020: [Datasette 0.34](https://docs.datasette.io/en/stable/changelog.html#v0-34) - improvements to search, `datasette publish cloudrun` and `datasette package`. - * 21st January 2020: [Deploying a data API using GitHub Actions and Cloud Run](https://simonwillison.net/2020/Jan/21/github-actions-cloud-run/) - how to use GitHub Actions and Google Cloud Run to automatically scrape data and deploy the result as an API with Datasette. - * 22nd December 2019: [Datasette 0.33](https://docs.datasette.io/en/stable/changelog.html#v0-33) - various small improvements. - * 19th December 2019: [Building tools to bring data-driven reporting to more newsrooms](https://medium.com/jsk-class-of-2020/building-tools-to-bring-data-driven-reporting-to-more-newsrooms-4520a0c9b3f2) - some notes on my JSK fellowship so far. - * 2nd December 2019: [Niche Museums](https://www.niche-museums.com/) is a new site entirely powered by Datasette, using custom templates and plugins. [niche-museums.com, powered by Datasette](https://simonwillison.net/2019/Nov/25/niche-museums/) describes how the site works, and [datasette-atom: Define an Atom feed using a custom SQL query](https://simonwillison.net/2019/Dec/3/datasette-atom/) describes how the new [datasette-atom plugin](https://github.com/simonw/datasette-atom) was used to add an Atom syndication feed to the site. - * 14th November 2019: [Datasette 0.32](https://docs.datasette.io/en/stable/changelog.html#v0-32) now uses asynchronous rendering in Jinja templates, which means template functions can perform asynchronous operations such as executing SQL queries. [datasette-template-sql](https://github.com/simonw/datasette-template-sql) is a new plugin uses this capability to add a new custom `sql(sql_query)` template function. - * 11th November 2019: [Datasette 0.31](https://docs.datasette.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5. - * 18th October 2019: [Datasette 0.30](https://docs.datasette.io/en/stable/changelog.html#v0-30) - * 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail. - * 7th July 2019: [Datasette 0.29](https://docs.datasette.io/en/stable/changelog.html#v0-29) - ASGI, new plugin hooks, facet by date and much, much more... +* 28th November 2020: [Datasette 0.52](https://docs.datasette.io/en/stable/changelog.html#v0-52) - `--config` is now `--setting`, new `database_actions` plugin hook, `datasette publish cloudrun --apt-get-install` option and several bug fixes. +* 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. [Annotated release notes](https://simonwillison.net/2020/Nov/1/datasette-0-51/). +* 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). +* 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). +* 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. +* 11th August 2020: [Datasette 0.47](https://docs.datasette.io/en/stable/changelog.html#v0-47) - Datasette can now be installed using Homebrew! `brew install simonw/datasette/datasette`. Also new: `datasette install name-of-plugin` and `datasette uninstall name-of-plugin` commands, and `datasette --get '/-/versions.json'` to output the result of Datasette HTTP calls on the command-line. +* 9th August 2020: [Datasette 0.46](https://docs.datasette.io/en/stable/changelog.html#v0-46) - security fix relating to CSRF protection for writable canned queries, a new logo, new debugging tools, improved file downloads and more. +* 6th August 2020: [GraphQL in Datasette with the new datasette-graphql plugin](https://simonwillison.net/2020/Aug/7/datasette-graphql/) +* 24th July 2020: Two new plugins: [datasette-copyable and datasette-insert-api](https://simonwillison.net/2020/Jul/23/datasette-copyable-datasette-insert-api/). `datasette-copyable` adds copy-and-paste export options, and `datasette-insert-api` lets you create tables and insert or update data by POSTing JSON directly to Datasette. +* 1st July 2020: [Datasette 0.45](https://docs.datasette.io/en/stable/changelog.html#v0-45) - [Magic parameters for canned queries](https://docs.datasette.io/en/stable/sql_queries.html#canned-queries-magic-parameters), a log out feature, improved plugin documentation and four new plugin hooks. See also [Datasette 0.45: The annotated release notes](https://simonwillison.net/2020/Jul/1/datasette-045/). +* 20th June 2020: [A cookiecutter template for writing Datasette plugins](https://simonwillison.net/2020/Jun/20/cookiecutter-plugins/) +* 11th June 2020: [Datasette 0.44](https://docs.datasette.io/en/stable/changelog.html#v0-44) - [Authentication and permissions](https://docs.datasette.io/en/stable/authentication.html), [writable canned queries](https://docs.datasette.io/en/stable/sql_queries.html#writable-canned-queries), flash messages, new plugin hooks and much, much more. +* 28th May 2020: [Datasette 0.43](https://docs.datasette.io/en/stable/changelog.html#v0-43) - Redesigned [register_output_renderer](https://docs.datasette.io/en/stable/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes. +* 8th May 2020: [Datasette 0.42](https://docs.datasette.io/en/stable/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database. +* 6th May 2020: [Datasette 0.41](https://docs.datasette.io/en/stable/changelog.html#v0-41) - New mechanism for [creating custom pages](https://docs.datasette.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://docs.datasette.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements. +* 21st April 2020: [Datasette 0.40](https://docs.datasette.io/en/stable/changelog.html#v0-40) - Metadata can now be provided as YAML instead of JSON. Publishing to Zeit Now v1 is no longer supported, but Now v2 support is provided by the new [datasette-publish-now](https://github.com/simonw/datasette-publish-now) plugin. Various bug fixes. +* 24th March 2020: [Datasette 0.39](https://docs.datasette.io/en/stable/changelog.html#v0-39) - New `base_url` configuration option for running Datasette under a different URL prefix, `"sort"` and `"sort_desc"` metadata options for setting a default sort order for a table. +* 8th March 2020: [Datasette 0.38](https://docs.datasette.io/en/stable/changelog.html#v0-38) - New `--memory` option for `datasete publish cloudrun`, [Docker image](https://hub.docker.com/r/datasetteproject/datasette) upgraded to SQLite 3.31.1. +* 25th February 2020: [Datasette 0.37](https://docs.datasette.io/en/stable/changelog.html#v0-37) - new internal APIs enabling plugins to safely write to databases. Read more here: [Datasette Writes](https://simonwillison.net/2020/Feb/26/weeknotes-datasette-writes/). +* 21st February 2020: [Datasette 0.36](https://docs.datasette.io/en/stable/changelog.html#v0-36) - new internals documentation for plugins, `prepare_connection()` now accepts optional `database` and `datasette` arguments. +* 4th February 2020: [Datasette 0.35](https://docs.datasette.io/en/stable/changelog.html#v0-35) - new `.render_template()` method for plugins. +* 29th January 2020: [Datasette 0.34](https://docs.datasette.io/en/stable/changelog.html#v0-34) - improvements to search, `datasette publish cloudrun` and `datasette package`. +* 21st January 2020: [Deploying a data API using GitHub Actions and Cloud Run](https://simonwillison.net/2020/Jan/21/github-actions-cloud-run/) - how to use GitHub Actions and Google Cloud Run to automatically scrape data and deploy the result as an API with Datasette. +* 22nd December 2019: [Datasette 0.33](https://docs.datasette.io/en/stable/changelog.html#v0-33) - various small improvements. +* 19th December 2019: [Building tools to bring data-driven reporting to more newsrooms](https://medium.com/jsk-class-of-2020/building-tools-to-bring-data-driven-reporting-to-more-newsrooms-4520a0c9b3f2) - some notes on my JSK fellowship so far. +* 2nd December 2019: [Niche Museums](https://www.niche-museums.com/) is a new site entirely powered by Datasette, using custom templates and plugins. [niche-museums.com, powered by Datasette](https://simonwillison.net/2019/Nov/25/niche-museums/) describes how the site works, and [datasette-atom: Define an Atom feed using a custom SQL query](https://simonwillison.net/2019/Dec/3/datasette-atom/) describes how the new [datasette-atom plugin](https://github.com/simonw/datasette-atom) was used to add an Atom syndication feed to the site. +* 14th November 2019: [Datasette 0.32](https://docs.datasette.io/en/stable/changelog.html#v0-32) now uses asynchronous rendering in Jinja templates, which means template functions can perform asynchronous operations such as executing SQL queries. [datasette-template-sql](https://github.com/simonw/datasette-template-sql) is a new plugin uses this capability to add a new custom `sql(sql_query)` template function. +* 11th November 2019: [Datasette 0.31](https://docs.datasette.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5. +* 18th October 2019: [Datasette 0.30](https://docs.datasette.io/en/stable/changelog.html#v0-30) +* 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail. +* 7th July 2019: [Datasette 0.29](https://docs.datasette.io/en/stable/changelog.html#v0-29) - ASGI, new plugin hooks, facet by date and much, much more... * [datasette-auth-github](https://github.com/simonw/datasette-auth-github) - a new plugin for Datasette 0.29 that lets you require users to authenticate against GitHub before accessing your Datasette instance. You can whitelist specific users, or you can restrict access to members of specific GitHub organizations or teams. * [datasette-cors](https://github.com/simonw/datasette-cors) - a plugin that lets you configure CORS access from a list of domains (or a set of domain wildcards) so you can make JavaScript calls to a Datasette instance from a specific set of other hosts. - * 23rd June 2019: [Porting Datasette to ASGI, and Turtles all the way down](https://simonwillison.net/2019/Jun/23/datasette-asgi/) - * 21st May 2019: The anonymized raw data from [the Stack Overflow Developer Survey 2019](https://stackoverflow.blog/2019/05/21/public-data-release-of-stack-overflows-2019-developer-survey/) has been [published in partnership with Glitch](https://glitch.com/culture/discover-insights-explore-developer-survey-results-2019/), powered by Datasette. - * 19th May 2019: [Datasette 0.28](https://docs.datasette.io/en/stable/changelog.html#v0-28) - a salmagundi of new features! +* 23rd June 2019: [Porting Datasette to ASGI, and Turtles all the way down](https://simonwillison.net/2019/Jun/23/datasette-asgi/) +* 21st May 2019: The anonymized raw data from [the Stack Overflow Developer Survey 2019](https://stackoverflow.blog/2019/05/21/public-data-release-of-stack-overflows-2019-developer-survey/) has been [published in partnership with Glitch](https://glitch.com/culture/discover-insights-explore-developer-survey-results-2019/), powered by Datasette. +* 19th May 2019: [Datasette 0.28](https://docs.datasette.io/en/stable/changelog.html#v0-28) - a salmagundi of new features! * No longer immutable! Datasette now supports [databases that change](https://docs.datasette.io/en/stable/changelog.html#supporting-databases-that-change). * [Faceting improvements](https://docs.datasette.io/en/stable/changelog.html#faceting-improvements-and-faceting-plugins) including facet-by-JSON-array and the ability to define custom faceting using plugins. * [datasette publish cloudrun](https://docs.datasette.io/en/stable/changelog.html#datasette-publish-cloudrun) lets you publish databases to Google's new Cloud Run hosting service. * New [register_output_renderer](https://docs.datasette.io/en/stable/changelog.html#register-output-renderer-plugins) plugin hook for adding custom output extensions to Datasette in addition to the default `.json` and `.csv`. * Dozens of other smaller features and tweaks - see [the release notes](https://docs.datasette.io/en/stable/changelog.html#v0-28) for full details. * Read more about this release here: [Datasette 0.28—and why master should always be releasable](https://simonwillison.net/2019/May/19/datasette-0-28/) - * 24th February 2019: [ +* 24th February 2019: [ sqlite-utils: a Python library and CLI tool for building SQLite databases](https://simonwillison.net/2019/Feb/25/sqlite-utils/) - a partner tool for easily creating SQLite databases for use with Datasette. - * 31st Janary 2019: [Datasette 0.27](https://docs.datasette.io/en/stable/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://docs.datasette.io/en/stable/ecosystem.html). - * 10th January 2019: [Datasette 0.26.1](https://docs.datasette.io/en/stable/changelog.html#v0-26-1) - SQLite upgrade in Docker image, `/-/versions` now shows SQLite compile options. - * 2nd January 2019: [Datasette 0.26](https://docs.datasette.io/en/stable/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument. +* 31st Janary 2019: [Datasette 0.27](https://docs.datasette.io/en/stable/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://docs.datasette.io/en/stable/ecosystem.html). +* 10th January 2019: [Datasette 0.26.1](https://docs.datasette.io/en/stable/changelog.html#v0-26-1) - SQLite upgrade in Docker image, `/-/versions` now shows SQLite compile options. +* 2nd January 2019: [Datasette 0.26](https://docs.datasette.io/en/stable/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument. * 18th December 2018: [Fast Autocomplete Search for Your Website](https://24ways.org/2018/fast-autocomplete-search-for-your-website/) - a new tutorial on using Datasette to build a JavaScript autocomplete search engine. * 3rd October 2018: [The interesting ideas in Datasette](https://simonwillison.net/2018/Oct/4/datasette-ideas/) - a write-up of some of the less obvious interesting ideas embedded in the Datasette project. * 19th September 2018: [Datasette 0.25](https://docs.datasette.io/en/stable/changelog.html#v0-25) - New plugin hooks, improved database view support and an easier way to use more recent versions of SQLite. From 8ae0f9f7f0d644b0161165a1084f53acd2786f7c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 7 Dec 2020 12:16:13 -0800 Subject: [PATCH 0080/1455] Fixed spelling of Janary --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 89245cf1..7861abbd 100644 --- a/README.md +++ b/README.md @@ -69,7 +69,7 @@ Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly new * Read more about this release here: [Datasette 0.28—and why master should always be releasable](https://simonwillison.net/2019/May/19/datasette-0-28/) * 24th February 2019: [ sqlite-utils: a Python library and CLI tool for building SQLite databases](https://simonwillison.net/2019/Feb/25/sqlite-utils/) - a partner tool for easily creating SQLite databases for use with Datasette. -* 31st Janary 2019: [Datasette 0.27](https://docs.datasette.io/en/stable/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://docs.datasette.io/en/stable/ecosystem.html). +* 31st January 2019: [Datasette 0.27](https://docs.datasette.io/en/stable/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://docs.datasette.io/en/stable/ecosystem.html). * 10th January 2019: [Datasette 0.26.1](https://docs.datasette.io/en/stable/changelog.html#v0-26-1) - SQLite upgrade in Docker image, `/-/versions` now shows SQLite compile options. * 2nd January 2019: [Datasette 0.26](https://docs.datasette.io/en/stable/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument. * 18th December 2018: [Fast Autocomplete Search for Your Website](https://24ways.org/2018/fast-autocomplete-search-for-your-website/) - a new tutorial on using Datasette to build a JavaScript autocomplete search engine. From 4c25b035b2370983c8dd5e0c8762e9154e379774 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 7 Dec 2020 14:41:03 -0800 Subject: [PATCH 0081/1455] arraynotcontains filter, closes #1132 --- datasette/filters.py | 11 ++++++++++- docs/json_api.rst | 7 ++++++- tests/test_api.py | 25 +++++++++++++++++++++++-- 3 files changed, 39 insertions(+), 4 deletions(-) diff --git a/datasette/filters.py b/datasette/filters.py index 1524b32a..edf2de99 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -154,7 +154,16 @@ class Filters: where j.value = :{p} )""", '{c} contains "{v}"', - ) + ), + TemplatedFilter( + "arraynotcontains", + "array does not contain", + """rowid not in ( + select {t}.rowid from {t}, json_each({t}.{c}) j + where j.value = :{p} + )""", + '{c} does not contain "{v}"', + ), ] if detect_json1() else [] diff --git a/docs/json_api.rst b/docs/json_api.rst index 8d45ac6f..582a6159 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -267,7 +267,12 @@ You can filter the data returned by the table based on column values using a que Rows where column does not match any of the provided values. The inverse of ``__in=``. Also supports JSON arrays. ``?column__arraycontains=value`` - Works against columns that contain JSON arrays - matches if any of the values in that array match. + Works against columns that contain JSON arrays - matches if any of the values in that array match the provided value. + + This is only available if the ``json1`` SQLite extension is enabled. + +``?column__arraynotcontains=value`` + Works against columns that contain JSON arrays - matches if none of the values in that array match the provided value. This is only available if the ``json1`` SQLite extension is enabled. diff --git a/tests/test_api.py b/tests/test_api.py index 4339507c..a4c30414 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1121,7 +1121,7 @@ def test_table_filter_queries_multiple_of_same_type(app_client): @pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module") def test_table_filter_json_arraycontains(app_client): response = app_client.get("/fixtures/facetable.json?tags__arraycontains=tag1") - assert [ + assert response.json["rows"] == [ [ 1, "2019-01-14 08:00:00", @@ -1146,7 +1146,28 @@ def test_table_filter_json_arraycontains(app_client): "[]", "two", ], - ] == response.json["rows"] + ] + + +@pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module") +def test_table_filter_json_arraynotcontains(app_client): + response = app_client.get( + "/fixtures/facetable.json?tags__arraynotcontains=tag3&tags__not=[]" + ) + assert response.json["rows"] == [ + [ + 1, + "2019-01-14 08:00:00", + 1, + 1, + "CA", + 1, + "Mission", + '["tag1", "tag2"]', + '[{"foo": "bar"}]', + "one", + ] + ] def test_table_filter_extra_where(app_client): From fe86d853089f324f92daa950cc56f4052bf78f98 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Dec 2020 11:45:45 -0800 Subject: [PATCH 0082/1455] datasette serve --create option, closes #1135 --- datasette/cli.py | 21 ++++++++++++++++++++- docs/datasette-serve-help.txt | 1 + tests/test_cli.py | 19 +++++++++++++++++++ 3 files changed, 40 insertions(+), 1 deletion(-) diff --git a/datasette/cli.py b/datasette/cli.py index e84695e3..32408d23 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -27,6 +27,7 @@ from .utils import ( StaticMount, ValueAsBooleanError, ) +from .utils.sqlite import sqlite3 from .utils.testing import TestClient from .version import __version__ @@ -299,7 +300,7 @@ def uninstall(packages, yes): @cli.command() -@click.argument("files", type=click.Path(exists=True), nargs=-1) +@click.argument("files", type=click.Path(), nargs=-1) @click.option( "-i", "--immutable", @@ -401,6 +402,11 @@ def uninstall(packages, yes): is_flag=True, help="Open Datasette in your web browser", ) +@click.option( + "--create", + is_flag=True, + help="Create database files if they do not exist", +) def serve( files, immutable, @@ -424,6 +430,7 @@ def serve( help_config, pdb, open_browser, + create, return_instance=False, ): """Serve up specified SQLite database files with a web UI""" @@ -486,6 +493,18 @@ def serve( kwargs["config_dir"] = pathlib.Path(files[0]) files = [] + # Verify list of files, create if needed (and --create) + for file in files: + if not pathlib.Path(file).exists(): + if create: + sqlite3.connect(file).execute("vacuum") + else: + raise click.ClickException( + "Invalid value for '[FILES]...': Path '{}' does not exist.".format( + file + ) + ) + try: ds = Datasette(files, **kwargs) except SpatialiteNotFound: diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index bdaf0894..079ec9f8 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -40,4 +40,5 @@ Options: --help-config Show available config options --pdb Launch debugger on any errors -o, --open Open Datasette in your web browser + --create Create database files if they do not exist --help Show this message and exit. diff --git a/tests/test_cli.py b/tests/test_cli.py index a0ac7d7a..3f6b1840 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -146,6 +146,7 @@ def test_metadata_yaml(): help_config=False, pdb=False, open_browser=False, + create=False, return_instance=True, ) client = _TestClient(ds) @@ -221,3 +222,21 @@ def test_sql_errors_logged_to_stderr(ensure_eventloop): result = runner.invoke(cli, ["--get", "/:memory:.json?sql=select+blah"]) assert result.exit_code == 1 assert "sql = 'select blah', params = {}: no such column: blah\n" in result.stderr + + +def test_serve_create(ensure_eventloop, tmpdir): + runner = CliRunner() + db_path = tmpdir / "does_not_exist_yet.db" + assert not db_path.exists() + result = runner.invoke( + cli, [str(db_path), "--create", "--get", "/-/databases.json"] + ) + assert result.exit_code == 0, result.output + databases = json.loads(result.output) + assert { + "name": "does_not_exist_yet", + "is_mutable": True, + "is_memory": False, + "hash": None, + }.items() <= databases[0].items() + assert db_path.exists() From 6000d1a724d0e28cdb102e7be83eac07a00b41e8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Dec 2020 11:56:44 -0800 Subject: [PATCH 0083/1455] Fix for combining ?_search_x and ?_searchmode=raw, closes #1134 --- datasette/views/table.py | 4 +++- tests/test_api.py | 7 +++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index a0de2a8e..3e9adf88 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -443,7 +443,9 @@ class TableView(RowTableShared): fts_table = fts_table or await db.fts_table(table) fts_pk = special_args.get("_fts_pk", table_metadata.get("fts_pk", "rowid")) search_args = dict( - pair for pair in special_args.items() if pair[0].startswith("_search") + pair + for pair in special_args.items() + if pair[0].startswith("_search") and pair[0] != "_searchmode" ) search = "" search_mode_raw = special_args.get("_searchmode") == "raw" diff --git a/tests/test_api.py b/tests/test_api.py index a4c30414..10755b95 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1035,6 +1035,13 @@ def test_sortable_columns_metadata(app_client): [2, "terry dog", "sara weasel", "puma"], ], ), + ( + # _searchmode=raw combined with _search_COLUMN + "/fixtures/searchable.json?_search_text2=te*&_searchmode=raw", + [ + [1, "barry cat", "terry dog", "panther"], + ], + ), ( "/fixtures/searchable.json?_search=weasel", [[2, "terry dog", "sara weasel", "puma"]], From 387b471b88788069191bc845224b7712d92e9c0b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Dec 2020 12:03:44 -0800 Subject: [PATCH 0084/1455] Release 0.52.5 Refs #1134 --- datasette/version.py | 2 +- docs/changelog.rst | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index ce06fe1d..b0a59018 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52.4" +__version__ = "0.52.5" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 86d844f7..c79e7c86 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_52_5: + +0.52.5 (2020-12-09) +------------------- + +- Fix for error caused by combining the ``_searchmode=raw`` and ``?_search_COLUMN`` parameters. (`#1134 `__) + .. _v0_52_4: 0.52.4 (2020-12-05) From 4c6407cd74070237fdad0dd6df4d016740806fbd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Dec 2020 12:14:33 -0800 Subject: [PATCH 0085/1455] Releasing bug fixes from a branch, closes #1136 --- docs/contributing.rst | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index ca194001..8cd9c210 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -204,6 +204,34 @@ You are welcome to try these out, but please be aware that details may change be Please join `discussions on the issue tracker `__ to share your thoughts and experiences with on alpha and beta features that you try out. +.. _contributing_bug_fix_branch: + +Releasing bug fixes from a branch +--------------------------------- + +If it's necessary to publish a bug fix release without shipping new features that have landed on ``main`` a release branch can be used. + +Create it from the relevant last tagged release like so:: + + git branch 0.52.x 0.52.4 + git checkout 0.52.x + +Next cherry-pick the commits containing the bug fixes:: + + git cherry-pick COMMIT + +Write the release notes in the branch, and update the version number in ``version.py``. Then push the branch:: + + git push -u origin 0.52.x + +Once the tests have completed, publish the release from that branch target using the GitHub `Draft a new release `__ form. + +Finally, cherry-pick the commit with the release notes and version number bump across to ``main``:: + + git checkout main + git cherry-pick COMMIT + git push + .. _contributing_upgrading_codemirror: Upgrading CodeMirror From e0b54d09115ded459e09e2e89e0962cfddcb0244 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 15:20:43 -0800 Subject: [PATCH 0086/1455] No longer using Wiki for examples --- README.md | 2 +- docs/index.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 7861abbd..71e488f7 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover [Explore a demo](https://fivethirtyeight.datasettes.com/fivethirtyeight), watch [a video about the project](https://www.youtube.com/watch?v=pTr1uLQTJNE) or try it out by [uploading and publishing your own CSV data](https://simonwillison.net/2019/Apr/23/datasette-glitch/). * Comprehensive documentation: https://docs.datasette.io/ -* Examples: https://github.com/simonw/datasette/wiki/Datasettes +* Examples: https://datasette.io/examples * Live demo of current main: https://latest.datasette.io/ * Support questions, feedback? Join our [GitHub Discussions forum](https://github.com/simonw/datasette/discussions) diff --git a/docs/index.rst b/docs/index.rst index ff8db04b..eafc5bdb 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -25,7 +25,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover `Explore a demo `__, watch `a presentation about the project `__ or :ref:`getting_started_glitch`. -More examples: https://github.com/simonw/datasette/wiki/Datasettes +More examples: https://datasette.io/examples Support questions, feedback? Join our `GitHub Discussions forum `__. From 7ef80d0145dc9a2a16c46823704517d7f35fbe45 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 15:24:16 -0800 Subject: [PATCH 0087/1455] News is now on datasette.io/news Closes #1137, closes #659 --- README.md | 83 ++++++------------------------------------------------- 1 file changed, 8 insertions(+), 75 deletions(-) diff --git a/README.md b/README.md index 71e488f7..16fc8f0e 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover [Explore a demo](https://fivethirtyeight.datasettes.com/fivethirtyeight), watch [a video about the project](https://www.youtube.com/watch?v=pTr1uLQTJNE) or try it out by [uploading and publishing your own CSV data](https://simonwillison.net/2019/Apr/23/datasette-glitch/). +* Latest [Datasette News](https://datasette.io/news) * Comprehensive documentation: https://docs.datasette.io/ * Examples: https://datasette.io/examples * Live demo of current main: https://latest.datasette.io/ @@ -23,83 +24,15 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly newsletter](https://datasette.substack.com/) for tips, tricks and news on what's new in the Datasette ecosystem. -## News - -* 28th November 2020: [Datasette 0.52](https://docs.datasette.io/en/stable/changelog.html#v0-52) - `--config` is now `--setting`, new `database_actions` plugin hook, `datasette publish cloudrun --apt-get-install` option and several bug fixes. -* 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. [Annotated release notes](https://simonwillison.net/2020/Nov/1/datasette-0-51/). -* 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). -* 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). -* 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. -* 11th August 2020: [Datasette 0.47](https://docs.datasette.io/en/stable/changelog.html#v0-47) - Datasette can now be installed using Homebrew! `brew install simonw/datasette/datasette`. Also new: `datasette install name-of-plugin` and `datasette uninstall name-of-plugin` commands, and `datasette --get '/-/versions.json'` to output the result of Datasette HTTP calls on the command-line. -* 9th August 2020: [Datasette 0.46](https://docs.datasette.io/en/stable/changelog.html#v0-46) - security fix relating to CSRF protection for writable canned queries, a new logo, new debugging tools, improved file downloads and more. -* 6th August 2020: [GraphQL in Datasette with the new datasette-graphql plugin](https://simonwillison.net/2020/Aug/7/datasette-graphql/) -* 24th July 2020: Two new plugins: [datasette-copyable and datasette-insert-api](https://simonwillison.net/2020/Jul/23/datasette-copyable-datasette-insert-api/). `datasette-copyable` adds copy-and-paste export options, and `datasette-insert-api` lets you create tables and insert or update data by POSTing JSON directly to Datasette. -* 1st July 2020: [Datasette 0.45](https://docs.datasette.io/en/stable/changelog.html#v0-45) - [Magic parameters for canned queries](https://docs.datasette.io/en/stable/sql_queries.html#canned-queries-magic-parameters), a log out feature, improved plugin documentation and four new plugin hooks. See also [Datasette 0.45: The annotated release notes](https://simonwillison.net/2020/Jul/1/datasette-045/). -* 20th June 2020: [A cookiecutter template for writing Datasette plugins](https://simonwillison.net/2020/Jun/20/cookiecutter-plugins/) -* 11th June 2020: [Datasette 0.44](https://docs.datasette.io/en/stable/changelog.html#v0-44) - [Authentication and permissions](https://docs.datasette.io/en/stable/authentication.html), [writable canned queries](https://docs.datasette.io/en/stable/sql_queries.html#writable-canned-queries), flash messages, new plugin hooks and much, much more. -* 28th May 2020: [Datasette 0.43](https://docs.datasette.io/en/stable/changelog.html#v0-43) - Redesigned [register_output_renderer](https://docs.datasette.io/en/stable/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes. -* 8th May 2020: [Datasette 0.42](https://docs.datasette.io/en/stable/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database. -* 6th May 2020: [Datasette 0.41](https://docs.datasette.io/en/stable/changelog.html#v0-41) - New mechanism for [creating custom pages](https://docs.datasette.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://docs.datasette.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements. -* 21st April 2020: [Datasette 0.40](https://docs.datasette.io/en/stable/changelog.html#v0-40) - Metadata can now be provided as YAML instead of JSON. Publishing to Zeit Now v1 is no longer supported, but Now v2 support is provided by the new [datasette-publish-now](https://github.com/simonw/datasette-publish-now) plugin. Various bug fixes. -* 24th March 2020: [Datasette 0.39](https://docs.datasette.io/en/stable/changelog.html#v0-39) - New `base_url` configuration option for running Datasette under a different URL prefix, `"sort"` and `"sort_desc"` metadata options for setting a default sort order for a table. -* 8th March 2020: [Datasette 0.38](https://docs.datasette.io/en/stable/changelog.html#v0-38) - New `--memory` option for `datasete publish cloudrun`, [Docker image](https://hub.docker.com/r/datasetteproject/datasette) upgraded to SQLite 3.31.1. -* 25th February 2020: [Datasette 0.37](https://docs.datasette.io/en/stable/changelog.html#v0-37) - new internal APIs enabling plugins to safely write to databases. Read more here: [Datasette Writes](https://simonwillison.net/2020/Feb/26/weeknotes-datasette-writes/). -* 21st February 2020: [Datasette 0.36](https://docs.datasette.io/en/stable/changelog.html#v0-36) - new internals documentation for plugins, `prepare_connection()` now accepts optional `database` and `datasette` arguments. -* 4th February 2020: [Datasette 0.35](https://docs.datasette.io/en/stable/changelog.html#v0-35) - new `.render_template()` method for plugins. -* 29th January 2020: [Datasette 0.34](https://docs.datasette.io/en/stable/changelog.html#v0-34) - improvements to search, `datasette publish cloudrun` and `datasette package`. -* 21st January 2020: [Deploying a data API using GitHub Actions and Cloud Run](https://simonwillison.net/2020/Jan/21/github-actions-cloud-run/) - how to use GitHub Actions and Google Cloud Run to automatically scrape data and deploy the result as an API with Datasette. -* 22nd December 2019: [Datasette 0.33](https://docs.datasette.io/en/stable/changelog.html#v0-33) - various small improvements. -* 19th December 2019: [Building tools to bring data-driven reporting to more newsrooms](https://medium.com/jsk-class-of-2020/building-tools-to-bring-data-driven-reporting-to-more-newsrooms-4520a0c9b3f2) - some notes on my JSK fellowship so far. -* 2nd December 2019: [Niche Museums](https://www.niche-museums.com/) is a new site entirely powered by Datasette, using custom templates and plugins. [niche-museums.com, powered by Datasette](https://simonwillison.net/2019/Nov/25/niche-museums/) describes how the site works, and [datasette-atom: Define an Atom feed using a custom SQL query](https://simonwillison.net/2019/Dec/3/datasette-atom/) describes how the new [datasette-atom plugin](https://github.com/simonw/datasette-atom) was used to add an Atom syndication feed to the site. -* 14th November 2019: [Datasette 0.32](https://docs.datasette.io/en/stable/changelog.html#v0-32) now uses asynchronous rendering in Jinja templates, which means template functions can perform asynchronous operations such as executing SQL queries. [datasette-template-sql](https://github.com/simonw/datasette-template-sql) is a new plugin uses this capability to add a new custom `sql(sql_query)` template function. -* 11th November 2019: [Datasette 0.31](https://docs.datasette.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5. -* 18th October 2019: [Datasette 0.30](https://docs.datasette.io/en/stable/changelog.html#v0-30) -* 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail. -* 7th July 2019: [Datasette 0.29](https://docs.datasette.io/en/stable/changelog.html#v0-29) - ASGI, new plugin hooks, facet by date and much, much more... - * [datasette-auth-github](https://github.com/simonw/datasette-auth-github) - a new plugin for Datasette 0.29 that lets you require users to authenticate against GitHub before accessing your Datasette instance. You can whitelist specific users, or you can restrict access to members of specific GitHub organizations or teams. - * [datasette-cors](https://github.com/simonw/datasette-cors) - a plugin that lets you configure CORS access from a list of domains (or a set of domain wildcards) so you can make JavaScript calls to a Datasette instance from a specific set of other hosts. -* 23rd June 2019: [Porting Datasette to ASGI, and Turtles all the way down](https://simonwillison.net/2019/Jun/23/datasette-asgi/) -* 21st May 2019: The anonymized raw data from [the Stack Overflow Developer Survey 2019](https://stackoverflow.blog/2019/05/21/public-data-release-of-stack-overflows-2019-developer-survey/) has been [published in partnership with Glitch](https://glitch.com/culture/discover-insights-explore-developer-survey-results-2019/), powered by Datasette. -* 19th May 2019: [Datasette 0.28](https://docs.datasette.io/en/stable/changelog.html#v0-28) - a salmagundi of new features! - * No longer immutable! Datasette now supports [databases that change](https://docs.datasette.io/en/stable/changelog.html#supporting-databases-that-change). - * [Faceting improvements](https://docs.datasette.io/en/stable/changelog.html#faceting-improvements-and-faceting-plugins) including facet-by-JSON-array and the ability to define custom faceting using plugins. - * [datasette publish cloudrun](https://docs.datasette.io/en/stable/changelog.html#datasette-publish-cloudrun) lets you publish databases to Google's new Cloud Run hosting service. - * New [register_output_renderer](https://docs.datasette.io/en/stable/changelog.html#register-output-renderer-plugins) plugin hook for adding custom output extensions to Datasette in addition to the default `.json` and `.csv`. - * Dozens of other smaller features and tweaks - see [the release notes](https://docs.datasette.io/en/stable/changelog.html#v0-28) for full details. - * Read more about this release here: [Datasette 0.28—and why master should always be releasable](https://simonwillison.net/2019/May/19/datasette-0-28/) -* 24th February 2019: [ -sqlite-utils: a Python library and CLI tool for building SQLite databases](https://simonwillison.net/2019/Feb/25/sqlite-utils/) - a partner tool for easily creating SQLite databases for use with Datasette. -* 31st January 2019: [Datasette 0.27](https://docs.datasette.io/en/stable/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://docs.datasette.io/en/stable/ecosystem.html). -* 10th January 2019: [Datasette 0.26.1](https://docs.datasette.io/en/stable/changelog.html#v0-26-1) - SQLite upgrade in Docker image, `/-/versions` now shows SQLite compile options. -* 2nd January 2019: [Datasette 0.26](https://docs.datasette.io/en/stable/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument. -* 18th December 2018: [Fast Autocomplete Search for Your Website](https://24ways.org/2018/fast-autocomplete-search-for-your-website/) - a new tutorial on using Datasette to build a JavaScript autocomplete search engine. -* 3rd October 2018: [The interesting ideas in Datasette](https://simonwillison.net/2018/Oct/4/datasette-ideas/) - a write-up of some of the less obvious interesting ideas embedded in the Datasette project. -* 19th September 2018: [Datasette 0.25](https://docs.datasette.io/en/stable/changelog.html#v0-25) - New plugin hooks, improved database view support and an easier way to use more recent versions of SQLite. -* 23rd July 2018: [Datasette 0.24](https://docs.datasette.io/en/stable/changelog.html#v0-24) - a number of small new features -* 29th June 2018: [datasette-vega](https://github.com/simonw/datasette-vega), a new plugin for visualizing data as bar, line or scatter charts -* 21st June 2018: [Datasette 0.23.1](https://docs.datasette.io/en/stable/changelog.html#v0-23-1) - minor bug fixes -* 18th June 2018: [Datasette 0.23: CSV, SpatiaLite and more](https://docs.datasette.io/en/stable/changelog.html#v0-23) - CSV export, foreign key expansion in JSON and CSV, new config options, improved support for SpatiaLite and a bunch of other improvements -* 23rd May 2018: [Datasette 0.22.1 bugfix](https://github.com/simonw/datasette/releases/tag/0.22.1) plus we now use [versioneer](https://github.com/warner/python-versioneer) -* 20th May 2018: [Datasette 0.22: Datasette Facets](https://simonwillison.net/2018/May/20/datasette-facets) -* 5th May 2018: [Datasette 0.21: New _shape=, new _size=, search within columns](https://github.com/simonw/datasette/releases/tag/0.21) -* 25th April 2018: [Exploring the UK Register of Members Interests with SQL and Datasette](https://simonwillison.net/2018/Apr/25/register-members-interests/) - a tutorial describing how [register-of-members-interests.datasettes.com](https://register-of-members-interests.datasettes.com/) was built ([source code here](https://github.com/simonw/register-of-members-interests)) -* 20th April 2018: [Datasette plugins, and building a clustered map visualization](https://simonwillison.net/2018/Apr/20/datasette-plugins/) - introducing Datasette's new plugin system and [datasette-cluster-map](https://pypi.org/project/datasette-cluster-map/), a plugin for visualizing data on a map -* 20th April 2018: [Datasette 0.20: static assets and templates for plugins](https://github.com/simonw/datasette/releases/tag/0.20) -* 16th April 2018: [Datasette 0.19: plugins preview](https://github.com/simonw/datasette/releases/tag/0.19) -* 14th April 2018: [Datasette 0.18: units](https://github.com/simonw/datasette/releases/tag/0.18) -* 9th April 2018: [Datasette 0.15: sort by column](https://github.com/simonw/datasette/releases/tag/0.15) -* 28th March 2018: [Baltimore Sun Public Salary Records](https://simonwillison.net/2018/Mar/28/datasette-in-the-wild/) - a data journalism project from the Baltimore Sun powered by Datasette - source code [is available here](https://github.com/baltimore-sun-data/salaries-datasette) -* 27th March 2018: [Cloud-first: Rapid webapp deployment using containers](https://wwwf.imperial.ac.uk/blog/research-software-engineering/2018/03/27/cloud-first-rapid-webapp-deployment-using-containers/) - a tutorial covering deploying Datasette using Microsoft Azure by the Research Software Engineering team at Imperial College London -* 28th January 2018: [Analyzing my Twitter followers with Datasette](https://simonwillison.net/2018/Jan/28/analyzing-my-twitter-followers/) - a tutorial on using Datasette to analyze follower data pulled from the Twitter API -* 17th January 2018: [Datasette Publish: a web app for publishing CSV files as an online database](https://simonwillison.net/2018/Jan/17/datasette-publish/) -* 12th December 2017: [Building a location to time zone API with SpatiaLite, OpenStreetMap and Datasette](https://simonwillison.net/2017/Dec/12/building-a-location-time-zone-api/) -* 9th December 2017: [Datasette 0.14: customization edition](https://github.com/simonw/datasette/releases/tag/0.14) -* 25th November 2017: [New in Datasette: filters, foreign keys and search](https://simonwillison.net/2017/Nov/25/new-in-datasette/) -* 13th November 2017: [Datasette: instantly create and publish an API for your SQLite databases](https://simonwillison.net/2017/Nov/13/datasette/) - ## Installation - pip3 install datasette +If you are on a Mac, [Homebrew](https://brew.sh/) is the easiest way to install Datasette: + + brew install datasette + +You can also install it using `pip` or `pipx`: + + pip install datasette Datasette requires Python 3.6 or higher. We also have [detailed installation instructions](https://docs.datasette.io/en/stable/installation.html) covering other options such as Docker. From 2c0aca4887ed65167606a5fd084f35d046e2a00a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 15:28:44 -0800 Subject: [PATCH 0088/1455] _header=off option for CSV export, closes #1133 --- datasette/views/base.py | 3 ++- docs/csv_export.rst | 16 ++++++++++++++++ tests/test_csv.py | 8 ++++++++ 3 files changed, 26 insertions(+), 1 deletion(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index b8860b74..76e03206 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -307,7 +307,8 @@ class DataView(BaseView): if not first: data, _, _ = await self.data(request, database, hash, **kwargs) if first: - await writer.writerow(headings) + if request.args.get("_header") != "off": + await writer.writerow(headings) first = False next = data.get("next") for row in data["rows"]: diff --git a/docs/csv_export.rst b/docs/csv_export.rst index 0bda20ef..7f0d8396 100644 --- a/docs/csv_export.rst +++ b/docs/csv_export.rst @@ -28,6 +28,22 @@ file, which looks like this and has the following options: You can try that out on https://latest.datasette.io/fixtures/facetable?_size=4 +.. _csv_export_url_parameters: + +URL parameters +-------------- + +The following options can be used to customize the CSVs returned by Datasette. + +``?_header=off`` + This removes the first row of the CSV file specifying the headings - only the row data will be returned. + +``?_stream=on`` + Stream all matching records, not just the first page of results. See below. + +``?_dl=on`` + Causes Datasette to return a ``content-disposition: attachment; filename="filename.csv"`` header. + Streaming all records --------------------- diff --git a/tests/test_csv.py b/tests/test_csv.py index 0fd665a9..6b17033c 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -64,6 +64,14 @@ def test_table_csv_cors_headers(app_client_with_cors): assert "*" == response.headers["Access-Control-Allow-Origin"] +def test_table_csv_no_header(app_client): + response = app_client.get("/fixtures/simple_primary_key.csv?_header=off") + assert response.status == 200 + assert not response.headers.get("Access-Control-Allow-Origin") + assert "text/plain; charset=utf-8" == response.headers["content-type"] + assert EXPECTED_TABLE_CSV.split("\r\n", 1)[1] == response.text + + def test_table_csv_with_labels(app_client): response = app_client.get("/fixtures/facetable.csv?_labels=1") assert response.status == 200 From 967cc05545480f09d421a7bf8b6dbfc27609a181 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 15:37:08 -0800 Subject: [PATCH 0089/1455] Powered by links to datasette.io, closes #1138 --- datasette/templates/_footer.html | 2 +- datasette/templates/patterns.html | 2 +- setup.py | 2 +- tests/test_html.py | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/datasette/templates/_footer.html b/datasette/templates/_footer.html index f930f445..b1380ae9 100644 --- a/datasette/templates/_footer.html +++ b/datasette/templates/_footer.html @@ -1,4 +1,4 @@ -Powered by Datasette +Powered by Datasette {% if query_ms %}· Query took {{ query_ms|round(3) }}ms{% endif %} {% if metadata %} {% if metadata.license or metadata.license_url %}· Data license: diff --git a/datasette/templates/patterns.html b/datasette/templates/patterns.html index 4ef2c29f..984c1bf6 100644 --- a/datasette/templates/patterns.html +++ b/datasette/templates/patterns.html @@ -476,7 +476,7 @@

    .ft

    -
    Powered by Datasette +
    Powered by Datasette · Data license: Apache License 2.0 · diff --git a/setup.py b/setup.py index 82696b38..e9eb1597 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ setup( long_description_content_type="text/markdown", author="Simon Willison", license="Apache License, Version 2.0", - url="https://github.com/simonw/datasette", + url="https://datasette.io/", project_urls={ "Documentation": "https://docs.datasette.io/en/stable/", "Changelog": "https://docs.datasette.io/en/stable/changelog.html", diff --git a/tests/test_html.py b/tests/test_html.py index b9d3afcd..8b0b1c8d 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1079,7 +1079,7 @@ def assert_footer_links(soup): assert "tests/fixtures.py" == source_link.text.strip() assert "Apache License 2.0" == license_link.text.strip() assert "About Datasette" == about_link.text.strip() - assert "https://github.com/simonw/datasette" == datasette_link["href"] + assert "https://datasette.io/" == datasette_link["href"] assert ( "https://github.com/simonw/datasette/blob/master/tests/fixtures.py" == source_link["href"] @@ -1461,7 +1461,7 @@ def test_base_url_config(app_client_base_url_prefix, path): not href.startswith("#") and href not in { - "https://github.com/simonw/datasette", + "https://datasette.io/", "https://github.com/simonw/datasette/blob/master/LICENSE", "https://github.com/simonw/datasette/blob/master/tests/fixtures.py", "/login-as-root", # Only used for the latest.datasette.io demo From 02bb373194000d2b15f61914e7c5fdb124275bcd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 17:38:16 -0800 Subject: [PATCH 0090/1455] Updated release process --- docs/contributing.rst | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 8cd9c210..24d5c8f0 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -181,17 +181,9 @@ You can generate the list of issue references for a specific release by pasting ), ].sort().join(", "); -For non-bugfix releases you may want to update the news section of ``README.md`` as part of the same commit. +To create the tag for the release, create `a new release `__ on GitHub matching the new version number. You can convert the release notes to Markdown by copying and pasting the rendered HTML into this `Paste to Markdown tool `__. -To tag and push the releaes, run the following:: - - git tag 0.25.2 - git push --tags - -Final steps once the release has deployed to https://pypi.org/project/datasette/ - -* Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases - you can convert the release notes to Markdown by copying and pasting the rendered HTML into this tool: https://euangoddard.github.io/clipboard2markdown/ -* Manually kick off a build of the `stable` branch on Read The Docs: https://readthedocs.org/projects/datasette/builds/ +Finally, post a news item about the release on `datasette.io `__ by editing the `news.yaml `__ file in that site's repository. .. _contributing_alpha_beta: From 0c616f732cee79db80cad830917666f41b344262 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 17:44:36 -0800 Subject: [PATCH 0091/1455] Release 0.53 Refs #1132, #1135, #1133, #1138, #1137 --- datasette/version.py | 2 +- docs/changelog.rst | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index b0a59018..a5edecfa 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52.5" +__version__ = "0.53" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index c79e7c86..c570642f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,20 @@ Changelog ========= +.. _v0_53: + +0.53 (2020-12-10) +----------------- + +Datasette has an official project website now, at https://datasette.io/. This release mainly updates the documentation to reflect the new site. + +- New ``?column__arraynotcontains=`` table filter. (`#1132 `__) +- ``datasette serve`` has a new ``--create`` option, which will create blank database files if they do not already exist rather than exiting with an error. (`#1135 `__) +- New ``?_header=off`` option for CSV export which omits the CSV header row, :ref:`documented here `. (`#1133 `__) +- "Powered by Datasette" link in the footer now links to https://datasette.io/. (`#1138 `__) +- Project news no longer lives in the README - it can now be found at https://datasette.io/news. (`#1137 `__) + + .. _v0_52_5: 0.52.5 (2020-12-09) From 6119bd797366a899119f1bba51c1c8cba2efc8fc Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Wed, 16 Dec 2020 13:44:39 -0800 Subject: [PATCH 0092/1455] Update pytest requirement from <6.2.0,>=5.2.2 to >=5.2.2,<6.3.0 (#1145) Updates the requirements on [pytest](https://github.com/pytest-dev/pytest) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/5.2.2...6.2.0) Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index e9eb1597..be94c1c6 100644 --- a/setup.py +++ b/setup.py @@ -68,7 +68,7 @@ setup( extras_require={ "docs": ["sphinx_rtd_theme", "sphinx-autobuild"], "test": [ - "pytest>=5.2.2,<6.2.0", + "pytest>=5.2.2,<6.3.0", "pytest-asyncio>=0.10,<0.15", "beautifulsoup4>=4.8.1,<4.10.0", "black==20.8b1", From 5e9895c67f08e9f42acedd3d6d29512ac446e15f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 17 Dec 2020 17:01:18 -0800 Subject: [PATCH 0093/1455] Database(memory_name=) for shared in-memory databases, closes #1151 --- datasette/database.py | 24 +++++++++++++++++++-- docs/internals.rst | 37 +++++++++++++++++++++++++++++--- tests/test_internals_database.py | 30 ++++++++++++++++++++++++++ 3 files changed, 86 insertions(+), 5 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 412e0c59..a977b362 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -24,11 +24,18 @@ connections = threading.local() class Database: - def __init__(self, ds, path=None, is_mutable=False, is_memory=False): + def __init__( + self, ds, path=None, is_mutable=False, is_memory=False, memory_name=None + ): self.ds = ds self.path = path self.is_mutable = is_mutable self.is_memory = is_memory + self.memory_name = memory_name + if memory_name is not None: + self.path = memory_name + self.is_memory = True + self.is_mutable = True self.hash = None self.cached_size = None self.cached_table_counts = None @@ -46,6 +53,16 @@ class Database: } def connect(self, write=False): + if self.memory_name: + uri = "file:{}?mode=memory&cache=shared".format(self.memory_name) + conn = sqlite3.connect( + uri, + uri=True, + check_same_thread=False, + ) + if not write: + conn.execute("PRAGMA query_only=1") + return conn if self.is_memory: return sqlite3.connect(":memory:") # mode=ro or immutable=1? @@ -215,7 +232,10 @@ class Database: @property def name(self): if self.is_memory: - return ":memory:" + if self.memory_name: + return ":memory:{}".format(self.memory_name) + else: + return ":memory:" else: return Path(self.path).stem diff --git a/docs/internals.rst b/docs/internals.rst index ff566f69..b68a1d8a 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -270,11 +270,16 @@ The ``db`` parameter should be an instance of the ``datasette.database.Database` This will add a mutable database from the provided file path. -The ``Database()`` constructor takes four arguments: the first is the ``datasette`` instance you are attaching to, the second is a ``path=``, then ``is_mutable`` and ``is_memory`` are both optional arguments. +To create a shared in-memory database named ``statistics``, use the following: -Use ``is_mutable`` if it is possible that updates will be made to that database - otherwise Datasette will open it in immutable mode and any changes could cause undesired behavior. +.. code-block:: python -Use ``is_memory`` if the connection is to an in-memory SQLite database. + from datasette.database import Database + + datasette.add_database("statistics", Database( + datasette, + memory_name="statistics" + )) .. _datasette_remove_database: @@ -480,6 +485,32 @@ Database class Instances of the ``Database`` class can be used to execute queries against attached SQLite databases, and to run introspection against their schemas. +.. _database_constructor: + +Database(ds, path=None, is_mutable=False, is_memory=False, memory_name=None) +---------------------------------------------------------------------------- + +The ``Database()`` constructor can be used by plugins, in conjunction with :ref:`datasette_add_database`, to create and register new databases. + +The arguments are as follows: + +``ds`` - :ref:`internals_datasette` (required) + The Datasette instance you are attaching this database to. + +``path`` - string + Path to a SQLite database file on disk. + +``is_mutable`` - boolean + Set this to ``True`` if it is possible that updates will be made to that database - otherwise Datasette will open it in immutable mode and any changes could cause undesired behavior. + +``is_memory`` - boolean + Use this to create non-shared memory connections. + +``memory_name`` - string or ``None`` + Use this to create a named in-memory database. Unlike regular memory databases these can be accessed by multiple threads and will persist an changes made to them for the lifetime of the Datasette server process. + +The first argument is the ``datasette`` instance you are attaching to, the second is a ``path=``, then ``is_mutable`` and ``is_memory`` are both optional arguments. + .. _database_execute: await db.execute(sql, ...) diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 49b8a1b3..dc1af48c 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -464,3 +464,33 @@ def test_mtime_ns_is_none_for_memory(app_client): def test_is_mutable(app_client): assert Database(app_client.ds, is_memory=True, is_mutable=True).is_mutable is True assert Database(app_client.ds, is_memory=True, is_mutable=False).is_mutable is False + + +@pytest.mark.asyncio +async def test_database_memory_name(app_client): + ds = app_client.ds + foo1 = Database(ds, memory_name="foo") + foo2 = Database(ds, memory_name="foo") + bar1 = Database(ds, memory_name="bar") + bar2 = Database(ds, memory_name="bar") + for db in (foo1, foo2, bar1, bar2): + table_names = await db.table_names() + assert table_names == [] + # Now create a table in foo + await foo1.execute_write("create table foo (t text)", block=True) + assert await foo1.table_names() == ["foo"] + assert await foo2.table_names() == ["foo"] + assert await bar1.table_names() == [] + assert await bar2.table_names() == [] + + +@pytest.mark.asyncio +async def test_in_memory_databases_forbid_writes(app_client): + ds = app_client.ds + db = Database(ds, memory_name="test") + with pytest.raises(sqlite3.OperationalError): + await db.execute("create table foo (t text)") + assert await db.table_names() == [] + # Using db.execute_write() should work: + await db.execute_write("create table foo (t text)", block=True) + assert await db.table_names() == ["foo"] From ebc7aa287c99fe6114b79aeab8efb8d4489a6182 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 18 Dec 2020 14:34:05 -0800 Subject: [PATCH 0094/1455] In-memory _schemas database tracking schemas of attached tables, closes #1150 --- datasette/app.py | 39 +++++++- datasette/cli.py | 3 + datasette/default_permissions.py | 2 + datasette/utils/__init__.py | 7 +- datasette/utils/schemas.py | 162 +++++++++++++++++++++++++++++++ datasette/views/base.py | 2 + tests/test_plugins.py | 2 +- tests/test_schemas.py | 68 +++++++++++++ 8 files changed, 279 insertions(+), 6 deletions(-) create mode 100644 datasette/utils/schemas.py create mode 100644 tests/test_schemas.py diff --git a/datasette/app.py b/datasette/app.py index 9bc84df0..cc8506e2 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -74,6 +74,7 @@ from .utils.asgi import ( asgi_send_json, asgi_send_redirect, ) +from .utils.schemas import init_schemas, populate_schema_tables from .utils.sqlite import ( sqlite3, using_pysqlite3, @@ -222,6 +223,11 @@ class Datasette: elif memory: self.files = (MEMORY,) + self.files self.databases = collections.OrderedDict() + # memory_name is a random string so that each Datasette instance gets its own + # unique in-memory named database - otherwise unit tests can fail with weird + # errors when different instances accidentally share an in-memory database + self.add_database("_schemas", Database(self, memory_name=secrets.token_hex())) + self._schemas_created = False for file in self.files: path = file is_memory = False @@ -326,6 +332,33 @@ class Datasette: self._root_token = secrets.token_hex(32) self.client = DatasetteClient(self) + async def refresh_schemas(self): + schema_db = self.databases["_schemas"] + if not self._schemas_created: + await init_schemas(schema_db) + self._schemas_created = True + + current_schema_versions = { + row["database_name"]: row["schema_version"] + for row in await schema_db.execute( + "select database_name, schema_version from databases" + ) + } + for database_name, db in self.databases.items(): + schema_version = (await db.execute("PRAGMA schema_version")).first()[0] + # Compare schema versions to see if we should skip it + if schema_version == current_schema_versions.get(database_name): + continue + await schema_db.execute_write( + """ + INSERT OR REPLACE INTO databases (database_name, path, is_memory, schema_version) + VALUES (?, ?, ?, ?) + """, + [database_name, db.path, db.is_memory, schema_version], + block=True, + ) + await populate_schema_tables(schema_db, db) + @property def urls(self): return Urls(self) @@ -342,7 +375,8 @@ class Datasette: def get_database(self, name=None): if name is None: - return next(iter(self.databases.values())) + # Return first no-_schemas database + name = [key for key in self.databases.keys() if key != "_schemas"][0] return self.databases[name] def add_database(self, name, db): @@ -590,7 +624,8 @@ class Datasette: "is_memory": d.is_memory, "hash": d.hash, } - for d in sorted(self.databases.values(), key=lambda d: d.name) + for name, d in sorted(self.databases.items(), key=lambda p: p[1].name) + if name != "_schemas" ] def _versions(self): diff --git a/datasette/cli.py b/datasette/cli.py index 32408d23..50367fb3 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -134,6 +134,9 @@ async def inspect_(files, sqlite_extensions): app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions) data = {} for name, database in app.databases.items(): + if name == "_schemas": + # Don't include the in-memory _schemas database + continue counts = await database.table_counts(limit=3600 * 1000) data[name] = { "hash": database.hash, diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 9f1d9c62..62cab83a 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -13,6 +13,8 @@ def permission_allowed(datasette, actor, action, resource): if allow is not None: return actor_matches_allow(actor, allow) elif action == "view-database": + if resource == "_schemas" and (actor is None or actor.get("id") != "root"): + return False database_allow = datasette.metadata("allow", database=resource) if database_allow is None: return None diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 2576090a..ac1d82f7 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1024,11 +1024,12 @@ def find_spatialite(): async def initial_path_for_datasette(datasette): "Return suggested path for opening this Datasette, based on number of DBs and tables" - if len(datasette.databases) == 1: - db_name = next(iter(datasette.databases.keys())) + databases = dict([p for p in datasette.databases.items() if p[0] != "_schemas"]) + if len(databases) == 1: + db_name = next(iter(databases.keys())) path = datasette.urls.database(db_name) # Does this DB only have one table? - db = next(iter(datasette.databases.values())) + db = next(iter(databases.values())) tables = await db.table_names() if len(tables) == 1: path = datasette.urls.table(db_name, tables[0]) diff --git a/datasette/utils/schemas.py b/datasette/utils/schemas.py new file mode 100644 index 00000000..4612e236 --- /dev/null +++ b/datasette/utils/schemas.py @@ -0,0 +1,162 @@ +async def init_schemas(db): + await db.execute_write( + """ + CREATE TABLE databases ( + "database_name" TEXT PRIMARY KEY, + "path" TEXT, + "is_memory" INTEGER, + "schema_version" INTEGER + ) + """, + block=True, + ) + await db.execute_write( + """ + CREATE TABLE tables ( + "database_name" TEXT, + "table_name" TEXT, + "rootpage" INTEGER, + "sql" TEXT, + PRIMARY KEY (database_name, table_name) + ) + """, + block=True, + ) + await db.execute_write( + """ + CREATE TABLE columns ( + "database_name" TEXT, + "table_name" TEXT, + "cid" INTEGER, + "name" TEXT, + "type" TEXT, + "notnull" INTEGER, + "default_value" TEXT, -- renamed from dflt_value + "is_pk" INTEGER, -- renamed from pk + "hidden" INTEGER, + PRIMARY KEY (database_name, table_name, name) + ) + """, + block=True, + ) + await db.execute_write( + """ + CREATE TABLE indexes ( + "database_name" TEXT, + "table_name" TEXT, + "seq" INTEGER, + "name" TEXT, + "unique" INTEGER, + "origin" TEXT, + "partial" INTEGER, + PRIMARY KEY (database_name, table_name, name) + ) + """, + block=True, + ) + await db.execute_write( + """ + CREATE TABLE foreign_keys ( + "database_name" TEXT, + "table_name" TEXT, + "id" INTEGER, + "seq" INTEGER, + "table" TEXT, + "from" TEXT, + "to" TEXT, + "on_update" TEXT, + "on_delete" TEXT, + "match" TEXT + ) + """, + block=True, + ) + + +async def populate_schema_tables(schema_db, db): + database_name = db.name + await schema_db.execute_write( + "delete from tables where database_name = ?", [database_name], block=True + ) + tables = (await db.execute("select * from sqlite_master where type = 'table'")).rows + for table in tables: + table_name = table["name"] + await schema_db.execute_write( + """ + insert into tables (database_name, table_name, rootpage, sql) + values (?, ?, ?, ?) + """, + [database_name, table_name, table["rootpage"], table["sql"]], + block=True, + ) + # And the columns + await schema_db.execute_write( + "delete from columns where database_name = ? and table_name = ?", + [database_name, table_name], + block=True, + ) + columns = await db.table_column_details(table_name) + for column in columns: + params = { + **{"database_name": database_name, "table_name": table_name}, + **column._asdict(), + } + await schema_db.execute_write( + """ + insert into columns ( + database_name, table_name, cid, name, type, "notnull", default_value, is_pk, hidden + ) VALUES ( + :database_name, :table_name, :cid, :name, :type, :notnull, :default_value, :is_pk, :hidden + ) + """, + params, + block=True, + ) + # And the foreign_keys + await schema_db.execute_write( + "delete from foreign_keys where database_name = ? and table_name = ?", + [database_name, table_name], + block=True, + ) + foreign_keys = ( + await db.execute(f"PRAGMA foreign_key_list([{table_name}])") + ).rows + for foreign_key in foreign_keys: + params = { + **{"database_name": database_name, "table_name": table_name}, + **dict(foreign_key), + } + await schema_db.execute_write( + """ + insert into foreign_keys ( + database_name, table_name, "id", seq, "table", "from", "to", on_update, on_delete, match + ) VALUES ( + :database_name, :table_name, :id, :seq, :table, :from, :to, :on_update, :on_delete, :match + ) + """, + params, + block=True, + ) + # And the indexes + await schema_db.execute_write( + "delete from indexes where database_name = ? and table_name = ?", + [database_name, table_name], + block=True, + ) + indexes = (await db.execute(f"PRAGMA index_list([{table_name}])")).rows + for index in indexes: + params = { + **{"database_name": database_name, "table_name": table_name}, + **dict(index), + } + await schema_db.execute_write( + """ + insert into indexes ( + database_name, table_name, seq, name, "unique", origin, partial + ) VALUES ( + :database_name, :table_name, :seq, :name, :unique, :origin, :partial + ) + """, + params, + block=True, + ) diff --git a/datasette/views/base.py b/datasette/views/base.py index 76e03206..73bf9459 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -115,6 +115,8 @@ class BaseView: return Response.text("Method not allowed", status=405) async def dispatch_request(self, request, *args, **kwargs): + if self.ds: + await self.ds.refresh_schemas() handler = getattr(self, request.method.lower(), None) return await handler(request, *args, **kwargs) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 93b444ab..61e7d4b5 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -293,7 +293,7 @@ def test_hook_extra_body_script(app_client, path, expected_extra_body_script): def test_hook_asgi_wrapper(app_client): response = app_client.get("/fixtures") - assert "fixtures" == response.headers["x-databases"] + assert "_schemas, fixtures" == response.headers["x-databases"] def test_hook_extra_template_vars(restore_working_directory): diff --git a/tests/test_schemas.py b/tests/test_schemas.py new file mode 100644 index 00000000..87656784 --- /dev/null +++ b/tests/test_schemas.py @@ -0,0 +1,68 @@ +from .fixtures import app_client +import pytest + + +def test_schemas_only_available_to_root(app_client): + cookie = app_client.actor_cookie({"id": "root"}) + assert app_client.get("/_schemas").status == 403 + assert app_client.get("/_schemas", cookies={"ds_actor": cookie}).status == 200 + + +def test_schemas_databases(app_client): + cookie = app_client.actor_cookie({"id": "root"}) + databases = app_client.get( + "/_schemas/databases.json?_shape=array", cookies={"ds_actor": cookie} + ).json + assert len(databases) == 2 + assert databases[0]["database_name"] == "_schemas" + assert databases[1]["database_name"] == "fixtures" + + +def test_schemas_tables(app_client): + cookie = app_client.actor_cookie({"id": "root"}) + tables = app_client.get( + "/_schemas/tables.json?_shape=array", cookies={"ds_actor": cookie} + ).json + assert len(tables) > 5 + table = tables[0] + assert set(table.keys()) == {"rootpage", "table_name", "database_name", "sql"} + + +def test_schemas_indexes(app_client): + cookie = app_client.actor_cookie({"id": "root"}) + indexes = app_client.get( + "/_schemas/indexes.json?_shape=array", cookies={"ds_actor": cookie} + ).json + assert len(indexes) > 5 + index = indexes[0] + assert set(index.keys()) == { + "partial", + "name", + "table_name", + "unique", + "seq", + "database_name", + "origin", + } + + +def test_schemas_foreign_keys(app_client): + cookie = app_client.actor_cookie({"id": "root"}) + foreign_keys = app_client.get( + "/_schemas/foreign_keys.json?_shape=array", cookies={"ds_actor": cookie} + ).json + assert len(foreign_keys) > 5 + foreign_key = foreign_keys[0] + assert set(foreign_key.keys()) == { + "table", + "seq", + "on_update", + "on_delete", + "to", + "rowid", + "id", + "match", + "database_name", + "table_name", + "from", + } From dcdfb2c301341d45b66683e3e3be72f9c7585b2f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 21 Dec 2020 11:48:06 -0800 Subject: [PATCH 0095/1455] Rename _schemas to _internal, closes #1156 --- datasette/app.py | 39 +++++++------------ datasette/cli.py | 4 +- datasette/default_permissions.py | 2 +- datasette/utils/__init__.py | 2 +- .../utils/{schemas.py => internal_db.py} | 20 +++++----- .../{test_schemas.py => test_internal_db.py} | 24 ++++++------ tests/test_plugins.py | 2 +- 7 files changed, 42 insertions(+), 51 deletions(-) rename datasette/utils/{schemas.py => internal_db.py} (91%) rename tests/{test_schemas.py => test_internal_db.py} (63%) diff --git a/datasette/app.py b/datasette/app.py index cc8506e2..f995e79d 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -74,7 +74,7 @@ from .utils.asgi import ( asgi_send_json, asgi_send_redirect, ) -from .utils.schemas import init_schemas, populate_schema_tables +from .utils.internal_db import init_internal_db, populate_schema_tables from .utils.sqlite import ( sqlite3, using_pysqlite3, @@ -85,8 +85,6 @@ from .version import __version__ app_root = Path(__file__).parent.parent -MEMORY = object() - Setting = collections.namedtuple("Setting", ("name", "default", "help")) SETTINGS = ( Setting("default_page_size", 100, "Default page size for the table view"), @@ -218,24 +216,17 @@ class Datasette: ] self.inspect_data = inspect_data self.immutables = set(immutables or []) - if not self.files: - self.files = [MEMORY] - elif memory: - self.files = (MEMORY,) + self.files self.databases = collections.OrderedDict() + if memory or not self.files: + self.add_database(":memory:", Database(self, ":memory:", is_memory=True)) # memory_name is a random string so that each Datasette instance gets its own # unique in-memory named database - otherwise unit tests can fail with weird # errors when different instances accidentally share an in-memory database - self.add_database("_schemas", Database(self, memory_name=secrets.token_hex())) - self._schemas_created = False + self.add_database("_internal", Database(self, memory_name=secrets.token_hex())) + self._interna_db_created = False for file in self.files: path = file - is_memory = False - if file is MEMORY: - path = None - is_memory = True - is_mutable = path not in self.immutables - db = Database(self, path, is_mutable=is_mutable, is_memory=is_memory) + db = Database(self, path, is_mutable=path not in self.immutables) if db.name in self.databases: raise Exception(f"Multiple files with same stem: {db.name}") self.add_database(db.name, db) @@ -333,14 +324,14 @@ class Datasette: self.client = DatasetteClient(self) async def refresh_schemas(self): - schema_db = self.databases["_schemas"] - if not self._schemas_created: - await init_schemas(schema_db) - self._schemas_created = True + internal_db = self.databases["_internal"] + if not self._interna_db_created: + await init_internal_db(internal_db) + self._interna_db_created = True current_schema_versions = { row["database_name"]: row["schema_version"] - for row in await schema_db.execute( + for row in await internal_db.execute( "select database_name, schema_version from databases" ) } @@ -349,7 +340,7 @@ class Datasette: # Compare schema versions to see if we should skip it if schema_version == current_schema_versions.get(database_name): continue - await schema_db.execute_write( + await internal_db.execute_write( """ INSERT OR REPLACE INTO databases (database_name, path, is_memory, schema_version) VALUES (?, ?, ?, ?) @@ -357,7 +348,7 @@ class Datasette: [database_name, db.path, db.is_memory, schema_version], block=True, ) - await populate_schema_tables(schema_db, db) + await populate_schema_tables(internal_db, db) @property def urls(self): @@ -376,7 +367,7 @@ class Datasette: def get_database(self, name=None): if name is None: # Return first no-_schemas database - name = [key for key in self.databases.keys() if key != "_schemas"][0] + name = [key for key in self.databases.keys() if key != "_internal"][0] return self.databases[name] def add_database(self, name, db): @@ -625,7 +616,7 @@ class Datasette: "hash": d.hash, } for name, d in sorted(self.databases.items(), key=lambda p: p[1].name) - if name != "_schemas" + if name != "_internal" ] def _versions(self): diff --git a/datasette/cli.py b/datasette/cli.py index 50367fb3..c342a35a 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -134,8 +134,8 @@ async def inspect_(files, sqlite_extensions): app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions) data = {} for name, database in app.databases.items(): - if name == "_schemas": - # Don't include the in-memory _schemas database + if name == "_internal": + # Don't include the in-memory _internal database continue counts = await database.table_counts(limit=3600 * 1000) data[name] = { diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 62cab83a..b58d8d1b 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -13,7 +13,7 @@ def permission_allowed(datasette, actor, action, resource): if allow is not None: return actor_matches_allow(actor, allow) elif action == "view-database": - if resource == "_schemas" and (actor is None or actor.get("id") != "root"): + if resource == "_internal" and (actor is None or actor.get("id") != "root"): return False database_allow = datasette.metadata("allow", database=resource) if database_allow is None: diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index ac1d82f7..34ee4630 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1024,7 +1024,7 @@ def find_spatialite(): async def initial_path_for_datasette(datasette): "Return suggested path for opening this Datasette, based on number of DBs and tables" - databases = dict([p for p in datasette.databases.items() if p[0] != "_schemas"]) + databases = dict([p for p in datasette.databases.items() if p[0] != "_internal"]) if len(databases) == 1: db_name = next(iter(databases.keys())) path = datasette.urls.database(db_name) diff --git a/datasette/utils/schemas.py b/datasette/utils/internal_db.py similarity index 91% rename from datasette/utils/schemas.py rename to datasette/utils/internal_db.py index 4612e236..a60fe1fe 100644 --- a/datasette/utils/schemas.py +++ b/datasette/utils/internal_db.py @@ -1,4 +1,4 @@ -async def init_schemas(db): +async def init_internal_db(db): await db.execute_write( """ CREATE TABLE databases ( @@ -73,15 +73,15 @@ async def init_schemas(db): ) -async def populate_schema_tables(schema_db, db): +async def populate_schema_tables(internal_db, db): database_name = db.name - await schema_db.execute_write( + await internal_db.execute_write( "delete from tables where database_name = ?", [database_name], block=True ) tables = (await db.execute("select * from sqlite_master where type = 'table'")).rows for table in tables: table_name = table["name"] - await schema_db.execute_write( + await internal_db.execute_write( """ insert into tables (database_name, table_name, rootpage, sql) values (?, ?, ?, ?) @@ -90,7 +90,7 @@ async def populate_schema_tables(schema_db, db): block=True, ) # And the columns - await schema_db.execute_write( + await internal_db.execute_write( "delete from columns where database_name = ? and table_name = ?", [database_name, table_name], block=True, @@ -101,7 +101,7 @@ async def populate_schema_tables(schema_db, db): **{"database_name": database_name, "table_name": table_name}, **column._asdict(), } - await schema_db.execute_write( + await internal_db.execute_write( """ insert into columns ( database_name, table_name, cid, name, type, "notnull", default_value, is_pk, hidden @@ -113,7 +113,7 @@ async def populate_schema_tables(schema_db, db): block=True, ) # And the foreign_keys - await schema_db.execute_write( + await internal_db.execute_write( "delete from foreign_keys where database_name = ? and table_name = ?", [database_name, table_name], block=True, @@ -126,7 +126,7 @@ async def populate_schema_tables(schema_db, db): **{"database_name": database_name, "table_name": table_name}, **dict(foreign_key), } - await schema_db.execute_write( + await internal_db.execute_write( """ insert into foreign_keys ( database_name, table_name, "id", seq, "table", "from", "to", on_update, on_delete, match @@ -138,7 +138,7 @@ async def populate_schema_tables(schema_db, db): block=True, ) # And the indexes - await schema_db.execute_write( + await internal_db.execute_write( "delete from indexes where database_name = ? and table_name = ?", [database_name, table_name], block=True, @@ -149,7 +149,7 @@ async def populate_schema_tables(schema_db, db): **{"database_name": database_name, "table_name": table_name}, **dict(index), } - await schema_db.execute_write( + await internal_db.execute_write( """ insert into indexes ( database_name, table_name, seq, name, "unique", origin, partial diff --git a/tests/test_schemas.py b/tests/test_internal_db.py similarity index 63% rename from tests/test_schemas.py rename to tests/test_internal_db.py index 87656784..9349fa3c 100644 --- a/tests/test_schemas.py +++ b/tests/test_internal_db.py @@ -2,36 +2,36 @@ from .fixtures import app_client import pytest -def test_schemas_only_available_to_root(app_client): +def test_internal_only_available_to_root(app_client): cookie = app_client.actor_cookie({"id": "root"}) - assert app_client.get("/_schemas").status == 403 - assert app_client.get("/_schemas", cookies={"ds_actor": cookie}).status == 200 + assert app_client.get("/_internal").status == 403 + assert app_client.get("/_internal", cookies={"ds_actor": cookie}).status == 200 -def test_schemas_databases(app_client): +def test_internal_databases(app_client): cookie = app_client.actor_cookie({"id": "root"}) databases = app_client.get( - "/_schemas/databases.json?_shape=array", cookies={"ds_actor": cookie} + "/_internal/databases.json?_shape=array", cookies={"ds_actor": cookie} ).json assert len(databases) == 2 - assert databases[0]["database_name"] == "_schemas" + assert databases[0]["database_name"] == "_internal" assert databases[1]["database_name"] == "fixtures" -def test_schemas_tables(app_client): +def test_internal_tables(app_client): cookie = app_client.actor_cookie({"id": "root"}) tables = app_client.get( - "/_schemas/tables.json?_shape=array", cookies={"ds_actor": cookie} + "/_internal/tables.json?_shape=array", cookies={"ds_actor": cookie} ).json assert len(tables) > 5 table = tables[0] assert set(table.keys()) == {"rootpage", "table_name", "database_name", "sql"} -def test_schemas_indexes(app_client): +def test_internal_indexes(app_client): cookie = app_client.actor_cookie({"id": "root"}) indexes = app_client.get( - "/_schemas/indexes.json?_shape=array", cookies={"ds_actor": cookie} + "/_internal/indexes.json?_shape=array", cookies={"ds_actor": cookie} ).json assert len(indexes) > 5 index = indexes[0] @@ -46,10 +46,10 @@ def test_schemas_indexes(app_client): } -def test_schemas_foreign_keys(app_client): +def test_internal_foreign_keys(app_client): cookie = app_client.actor_cookie({"id": "root"}) foreign_keys = app_client.get( - "/_schemas/foreign_keys.json?_shape=array", cookies={"ds_actor": cookie} + "/_internal/foreign_keys.json?_shape=array", cookies={"ds_actor": cookie} ).json assert len(foreign_keys) > 5 foreign_key = foreign_keys[0] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 61e7d4b5..8063460b 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -293,7 +293,7 @@ def test_hook_extra_body_script(app_client, path, expected_extra_body_script): def test_hook_asgi_wrapper(app_client): response = app_client.get("/fixtures") - assert "_schemas, fixtures" == response.headers["x-databases"] + assert "_internal, fixtures" == response.headers["x-databases"] def test_hook_extra_template_vars(restore_working_directory): From 810853c5f2fa560c6d303331c037f6443c145930 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 21 Dec 2020 13:49:14 -0800 Subject: [PATCH 0096/1455] Use time.perf_counter() instead of time.time(), closes #1157 --- datasette/tracer.py | 8 ++++---- datasette/utils/__init__.py | 4 ++-- datasette/views/base.py | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/datasette/tracer.py b/datasette/tracer.py index 8f666767..772f0405 100644 --- a/datasette/tracer.py +++ b/datasette/tracer.py @@ -37,9 +37,9 @@ def trace(type, **kwargs): if tracer is None: yield return - start = time.time() + start = time.perf_counter() yield - end = time.time() + end = time.perf_counter() trace_info = { "type": type, "start": start, @@ -74,7 +74,7 @@ class AsgiTracer: if b"_trace=1" not in scope.get("query_string", b"").split(b"&"): await self.app(scope, receive, send) return - trace_start = time.time() + trace_start = time.perf_counter() traces = [] accumulated_body = b"" @@ -109,7 +109,7 @@ class AsgiTracer: # We have all the body - modify it and send the result # TODO: What to do about Content-Type or other cases? trace_info = { - "request_duration_ms": 1000 * (time.time() - trace_start), + "request_duration_ms": 1000 * (time.perf_counter() - trace_start), "sum_trace_duration_ms": sum(t["duration_ms"] for t in traces), "num_traces": len(traces), "traces": traces, diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 34ee4630..0d45e11a 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -138,7 +138,7 @@ class CustomJSONEncoder(json.JSONEncoder): @contextmanager def sqlite_timelimit(conn, ms): - deadline = time.time() + (ms / 1000) + deadline = time.perf_counter() + (ms / 1000) # n is the number of SQLite virtual machine instructions that will be # executed between each check. It's hard to know what to pick here. # After some experimentation, I've decided to go with 1000 by default and @@ -148,7 +148,7 @@ def sqlite_timelimit(conn, ms): n = 1 def handler(): - if time.time() >= deadline: + if time.perf_counter() >= deadline: return 1 conn.set_progress_handler(handler, n) diff --git a/datasette/views/base.py b/datasette/views/base.py index 73bf9459..8a64f88e 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -425,7 +425,7 @@ class DataView(BaseView): kwargs["default_labels"] = True extra_template_data = {} - start = time.time() + start = time.perf_counter() status_code = 200 templates = [] try: @@ -457,7 +457,7 @@ class DataView(BaseView): except DatasetteError: raise - end = time.time() + end = time.perf_counter() data["query_ms"] = (end - start) * 1000 for key in ("source", "source_url", "license", "license_url"): value = self.ds.metadata(key) From bc1f1e1ce8562872b7532a167873193e787cef20 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 22 Dec 2020 11:04:29 -0800 Subject: [PATCH 0097/1455] Compound primary key for foreign_keys table in _internal --- datasette/utils/internal_db.py | 3 ++- tests/test_internal_db.py | 1 - 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/datasette/utils/internal_db.py b/datasette/utils/internal_db.py index a60fe1fe..959f422e 100644 --- a/datasette/utils/internal_db.py +++ b/datasette/utils/internal_db.py @@ -66,7 +66,8 @@ async def init_internal_db(db): "to" TEXT, "on_update" TEXT, "on_delete" TEXT, - "match" TEXT + "match" TEXT, + PRIMARY KEY (database_name, table_name, id, seq) ) """, block=True, diff --git a/tests/test_internal_db.py b/tests/test_internal_db.py index 9349fa3c..755ddae5 100644 --- a/tests/test_internal_db.py +++ b/tests/test_internal_db.py @@ -59,7 +59,6 @@ def test_internal_foreign_keys(app_client): "on_update", "on_delete", "to", - "rowid", "id", "match", "database_name", From 270de6527bc2afb8c5996c400099321c320ded31 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 22 Dec 2020 11:48:54 -0800 Subject: [PATCH 0098/1455] Foreign keys for _internal database Refs #1099 - Datasette now uses compound foreign keys internally, so it would be great to link them correctly. --- datasette/utils/internal_db.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/datasette/utils/internal_db.py b/datasette/utils/internal_db.py index 959f422e..5cd32381 100644 --- a/datasette/utils/internal_db.py +++ b/datasette/utils/internal_db.py @@ -17,7 +17,8 @@ async def init_internal_db(db): "table_name" TEXT, "rootpage" INTEGER, "sql" TEXT, - PRIMARY KEY (database_name, table_name) + PRIMARY KEY (database_name, table_name), + FOREIGN KEY (database_name) REFERENCES databases(database_name) ) """, block=True, @@ -34,7 +35,9 @@ async def init_internal_db(db): "default_value" TEXT, -- renamed from dflt_value "is_pk" INTEGER, -- renamed from pk "hidden" INTEGER, - PRIMARY KEY (database_name, table_name, name) + PRIMARY KEY (database_name, table_name, name), + FOREIGN KEY (database_name) REFERENCES databases(database_name), + FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name) ) """, block=True, @@ -49,7 +52,9 @@ async def init_internal_db(db): "unique" INTEGER, "origin" TEXT, "partial" INTEGER, - PRIMARY KEY (database_name, table_name, name) + PRIMARY KEY (database_name, table_name, name), + FOREIGN KEY (database_name) REFERENCES databases(database_name), + FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name) ) """, block=True, @@ -67,7 +72,9 @@ async def init_internal_db(db): "on_update" TEXT, "on_delete" TEXT, "match" TEXT, - PRIMARY KEY (database_name, table_name, id, seq) + PRIMARY KEY (database_name, table_name, id, seq), + FOREIGN KEY (database_name) REFERENCES databases(database_name), + FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name) ) """, block=True, From 8919f99c2f7f245aca7f94bd53d5ac9d04aa42b5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 22 Dec 2020 12:04:18 -0800 Subject: [PATCH 0099/1455] Improved .add_database() method design Closes #1155 - _internal now has a sensible name Closes #509 - Support opening multiple databases with the same stem --- datasette/app.py | 34 +++++++++++++++++--------- datasette/database.py | 42 +++++++++++++++++--------------- docs/internals.rst | 29 ++++++++++++++-------- tests/test_cli.py | 15 ++++++++++++ tests/test_internals_database.py | 12 ++++----- 5 files changed, 86 insertions(+), 46 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f995e79d..ad3ba07e 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -218,18 +218,18 @@ class Datasette: self.immutables = set(immutables or []) self.databases = collections.OrderedDict() if memory or not self.files: - self.add_database(":memory:", Database(self, ":memory:", is_memory=True)) + self.add_database(Database(self, is_memory=True), name=":memory:") # memory_name is a random string so that each Datasette instance gets its own # unique in-memory named database - otherwise unit tests can fail with weird # errors when different instances accidentally share an in-memory database - self.add_database("_internal", Database(self, memory_name=secrets.token_hex())) - self._interna_db_created = False + self.add_database( + Database(self, memory_name=secrets.token_hex()), name="_internal" + ) + self.internal_db_created = False for file in self.files: - path = file - db = Database(self, path, is_mutable=path not in self.immutables) - if db.name in self.databases: - raise Exception(f"Multiple files with same stem: {db.name}") - self.add_database(db.name, db) + self.add_database( + Database(self, file, is_mutable=file not in self.immutables) + ) self.cache_headers = cache_headers self.cors = cors metadata_files = [] @@ -325,9 +325,9 @@ class Datasette: async def refresh_schemas(self): internal_db = self.databases["_internal"] - if not self._interna_db_created: + if not self.internal_db_created: await init_internal_db(internal_db) - self._interna_db_created = True + self.internal_db_created = True current_schema_versions = { row["database_name"]: row["schema_version"] @@ -370,8 +370,20 @@ class Datasette: name = [key for key in self.databases.keys() if key != "_internal"][0] return self.databases[name] - def add_database(self, name, db): + def add_database(self, db, name=None): + if name is None: + # Pick a unique name for this database + suggestion = db.suggest_name() + name = suggestion + else: + suggestion = name + i = 2 + while name in self.databases: + name = "{}_{}".format(suggestion, i) + i += 1 + db.name = name self.databases[name] = db + return db def remove_database(self, name): self.databases.pop(name) diff --git a/datasette/database.py b/datasette/database.py index a977b362..cda36e6e 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -27,30 +27,44 @@ class Database: def __init__( self, ds, path=None, is_mutable=False, is_memory=False, memory_name=None ): + self.name = None self.ds = ds self.path = path self.is_mutable = is_mutable self.is_memory = is_memory self.memory_name = memory_name if memory_name is not None: - self.path = memory_name self.is_memory = True self.is_mutable = True self.hash = None self.cached_size = None - self.cached_table_counts = None + self._cached_table_counts = None self._write_thread = None self._write_queue = None if not self.is_mutable and not self.is_memory: p = Path(path) self.hash = inspect_hash(p) self.cached_size = p.stat().st_size - # Maybe use self.ds.inspect_data to populate cached_table_counts - if self.ds.inspect_data and self.ds.inspect_data.get(self.name): - self.cached_table_counts = { - key: value["count"] - for key, value in self.ds.inspect_data[self.name]["tables"].items() - } + + @property + def cached_table_counts(self): + if self._cached_table_counts is not None: + return self._cached_table_counts + # Maybe use self.ds.inspect_data to populate cached_table_counts + if self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self._cached_table_counts = { + key: value["count"] + for key, value in self.ds.inspect_data[self.name]["tables"].items() + } + return self._cached_table_counts + + def suggest_name(self): + if self.path: + return Path(self.path).stem + elif self.memory_name: + return self.memory_name + else: + return "db" def connect(self, write=False): if self.memory_name: @@ -220,7 +234,7 @@ class Database: except (QueryInterrupted, sqlite3.OperationalError, sqlite3.DatabaseError): counts[table] = None if not self.is_mutable: - self.cached_table_counts = counts + self._cached_table_counts = counts return counts @property @@ -229,16 +243,6 @@ class Database: return None return Path(self.path).stat().st_mtime_ns - @property - def name(self): - if self.is_memory: - if self.memory_name: - return ":memory:{}".format(self.memory_name) - else: - return ":memory:" - else: - return Path(self.path).stem - async def table_exists(self, table): results = await self.execute( "select 1 from sqlite_master where type='table' and name=?", params=(table,) diff --git a/docs/internals.rst b/docs/internals.rst index b68a1d8a..05cb8bd7 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -245,16 +245,16 @@ Returns the specified database object. Raises a ``KeyError`` if the database doe .. _datasette_add_database: -.add_database(name, db) ------------------------ - -``name`` - string - The unique name to use for this database. Also used in the URL. +.add_database(db, name=None) +---------------------------- ``db`` - datasette.database.Database instance The database to be attached. -The ``datasette.add_database(name, db)`` method lets you add a new database to the current Datasette instance. This database will then be served at URL path that matches the ``name`` parameter, e.g. ``/mynewdb/``. +``name`` - string, optional + The name to be used for this database - this will be used in the URL path, e.g. ``/dbname``. If not specified Datasette will pick one based on the filename or memory name. + +The ``datasette.add_database(db)`` method lets you add a new database to the current Datasette instance. The ``db`` parameter should be an instance of the ``datasette.database.Database`` class. For example: @@ -262,13 +262,13 @@ The ``db`` parameter should be an instance of the ``datasette.database.Database` from datasette.database import Database - datasette.add_database("my-new-database", Database( + datasette.add_database(Database( datasette, path="path/to/my-new-database.db", is_mutable=True )) -This will add a mutable database from the provided file path. +This will add a mutable database and serve it at ``/my-new-database``. To create a shared in-memory database named ``statistics``, use the following: @@ -276,11 +276,20 @@ To create a shared in-memory database named ``statistics``, use the following: from datasette.database import Database - datasette.add_database("statistics", Database( + datasette.add_database(Database( datasette, memory_name="statistics" )) +This database will be served at ``/statistics``. + +``.add_database()`` returns the Database instance, with its name set as the ``database.name`` attribute. Any time you are working with a newly added database you should use the return value of ``.add_database()``, for example: + +.. code-block:: python + + db = datasette.add_database(Database(datasette, memory_name="statistics")) + await db.execute_write("CREATE TABLE foo(id integer primary key)", block=True) + .. _datasette_remove_database: .remove_database(name) @@ -289,7 +298,7 @@ To create a shared in-memory database named ``statistics``, use the following: ``name`` - string The name of the database to be removed. -This removes a database that has been previously added. ``name=`` is the unique name of that database, also used in the URL for it. +This removes a database that has been previously added. ``name=`` is the unique name of that database, used in its URL path. .. _datasette_sign: diff --git a/tests/test_cli.py b/tests/test_cli.py index 3f6b1840..ff46d76f 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -8,6 +8,7 @@ import asyncio from datasette.plugins import DEFAULT_PLUGINS from datasette.cli import cli, serve from datasette.version import __version__ +from datasette.utils.sqlite import sqlite3 from click.testing import CliRunner import io import json @@ -240,3 +241,17 @@ def test_serve_create(ensure_eventloop, tmpdir): "hash": None, }.items() <= databases[0].items() assert db_path.exists() + + +def test_serve_duplicate_database_names(ensure_eventloop, tmpdir): + runner = CliRunner() + db_1_path = str(tmpdir / "db.db") + nested = tmpdir / "nested" + nested.mkdir() + db_2_path = str(tmpdir / "nested" / "db.db") + for path in (db_1_path, db_2_path): + sqlite3.connect(path).execute("vacuum") + result = runner.invoke(cli, [db_1_path, db_2_path, "--get", "/-/databases.json"]) + assert result.exit_code == 0, result.output + databases = json.loads(result.output) + assert {db["name"] for db in databases} == {"db", "db_2"} diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index dc1af48c..7eff9f7e 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -439,7 +439,7 @@ async def test_execute_write_fn_connection_exception(tmpdir, app_client): path = str(tmpdir / "immutable.db") sqlite3.connect(path).execute("vacuum") db = Database(app_client.ds, path=path, is_mutable=False) - app_client.ds.add_database("immutable-db", db) + app_client.ds.add_database(db, name="immutable-db") def write_fn(conn): assert False @@ -469,10 +469,10 @@ def test_is_mutable(app_client): @pytest.mark.asyncio async def test_database_memory_name(app_client): ds = app_client.ds - foo1 = Database(ds, memory_name="foo") - foo2 = Database(ds, memory_name="foo") - bar1 = Database(ds, memory_name="bar") - bar2 = Database(ds, memory_name="bar") + foo1 = ds.add_database(Database(ds, memory_name="foo")) + foo2 = ds.add_database(Database(ds, memory_name="foo")) + bar1 = ds.add_database(Database(ds, memory_name="bar")) + bar2 = ds.add_database(Database(ds, memory_name="bar")) for db in (foo1, foo2, bar1, bar2): table_names = await db.table_names() assert table_names == [] @@ -487,7 +487,7 @@ async def test_database_memory_name(app_client): @pytest.mark.asyncio async def test_in_memory_databases_forbid_writes(app_client): ds = app_client.ds - db = Database(ds, memory_name="test") + db = ds.add_database(Database(ds, memory_name="test")) with pytest.raises(sqlite3.OperationalError): await db.execute("create table foo (t text)") assert await db.table_names() == [] From 90eba4c3ca569c57e96bce314e7ac8caf67d884e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 22 Dec 2020 15:55:43 -0800 Subject: [PATCH 0100/1455] Prettier CREATE TABLE SQL for _internal --- datasette/utils/internal_db.py | 109 ++++++++++++++++++--------------- 1 file changed, 61 insertions(+), 48 deletions(-) diff --git a/datasette/utils/internal_db.py b/datasette/utils/internal_db.py index 5cd32381..e92625d5 100644 --- a/datasette/utils/internal_db.py +++ b/datasette/utils/internal_db.py @@ -1,82 +1,95 @@ +import textwrap + + async def init_internal_db(db): await db.execute_write( - """ + textwrap.dedent( + """ CREATE TABLE databases ( - "database_name" TEXT PRIMARY KEY, - "path" TEXT, - "is_memory" INTEGER, - "schema_version" INTEGER + database_name TEXT PRIMARY KEY, + path TEXT, + is_memory INTEGER, + schema_version INTEGER ) - """, + """ + ), block=True, ) await db.execute_write( - """ + textwrap.dedent( + """ CREATE TABLE tables ( - "database_name" TEXT, - "table_name" TEXT, - "rootpage" INTEGER, - "sql" TEXT, + database_name TEXT, + table_name TEXT, + rootpage INTEGER, + sql TEXT, PRIMARY KEY (database_name, table_name), FOREIGN KEY (database_name) REFERENCES databases(database_name) ) - """, + """ + ), block=True, ) await db.execute_write( - """ + textwrap.dedent( + """ CREATE TABLE columns ( - "database_name" TEXT, - "table_name" TEXT, - "cid" INTEGER, - "name" TEXT, - "type" TEXT, + database_name TEXT, + table_name TEXT, + cid INTEGER, + name TEXT, + type TEXT, "notnull" INTEGER, - "default_value" TEXT, -- renamed from dflt_value - "is_pk" INTEGER, -- renamed from pk - "hidden" INTEGER, + default_value TEXT, -- renamed from dflt_value + is_pk INTEGER, -- renamed from pk + hidden INTEGER, PRIMARY KEY (database_name, table_name, name), FOREIGN KEY (database_name) REFERENCES databases(database_name), FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name) ) - """, + """ + ), block=True, ) await db.execute_write( - """ + textwrap.dedent( + """ CREATE TABLE indexes ( - "database_name" TEXT, - "table_name" TEXT, - "seq" INTEGER, - "name" TEXT, + database_name TEXT, + table_name TEXT, + seq INTEGER, + name TEXT, "unique" INTEGER, - "origin" TEXT, - "partial" INTEGER, + origin TEXT, + partial INTEGER, PRIMARY KEY (database_name, table_name, name), FOREIGN KEY (database_name) REFERENCES databases(database_name), FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name) ) - """, + """ + ), block=True, ) await db.execute_write( - """ + textwrap.dedent( + """ CREATE TABLE foreign_keys ( - "database_name" TEXT, - "table_name" TEXT, - "id" INTEGER, - "seq" INTEGER, + database_name TEXT, + table_name TEXT, + id INTEGER, + seq INTEGER, "table" TEXT, "from" TEXT, "to" TEXT, - "on_update" TEXT, - "on_delete" TEXT, - "match" TEXT, + on_update TEXT, + on_delete TEXT, + match TEXT, PRIMARY KEY (database_name, table_name, id, seq), FOREIGN KEY (database_name) REFERENCES databases(database_name), FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name) ) - """, + """ + ), block=True, ) @@ -84,14 +97,14 @@ async def init_internal_db(db): async def populate_schema_tables(internal_db, db): database_name = db.name await internal_db.execute_write( - "delete from tables where database_name = ?", [database_name], block=True + "DELETE FROM tables WHERE database_name = ?", [database_name], block=True ) - tables = (await db.execute("select * from sqlite_master where type = 'table'")).rows + tables = (await db.execute("select * from sqlite_master WHERE type = 'table'")).rows for table in tables: table_name = table["name"] await internal_db.execute_write( """ - insert into tables (database_name, table_name, rootpage, sql) + INSERT INTO tables (database_name, table_name, rootpage, sql) values (?, ?, ?, ?) """, [database_name, table_name, table["rootpage"], table["sql"]], @@ -99,7 +112,7 @@ async def populate_schema_tables(internal_db, db): ) # And the columns await internal_db.execute_write( - "delete from columns where database_name = ? and table_name = ?", + "DELETE FROM columns WHERE database_name = ? and table_name = ?", [database_name, table_name], block=True, ) @@ -111,7 +124,7 @@ async def populate_schema_tables(internal_db, db): } await internal_db.execute_write( """ - insert into columns ( + INSERT INTO columns ( database_name, table_name, cid, name, type, "notnull", default_value, is_pk, hidden ) VALUES ( :database_name, :table_name, :cid, :name, :type, :notnull, :default_value, :is_pk, :hidden @@ -122,7 +135,7 @@ async def populate_schema_tables(internal_db, db): ) # And the foreign_keys await internal_db.execute_write( - "delete from foreign_keys where database_name = ? and table_name = ?", + "DELETE FROM foreign_keys WHERE database_name = ? and table_name = ?", [database_name, table_name], block=True, ) @@ -136,7 +149,7 @@ async def populate_schema_tables(internal_db, db): } await internal_db.execute_write( """ - insert into foreign_keys ( + INSERT INTO foreign_keys ( database_name, table_name, "id", seq, "table", "from", "to", on_update, on_delete, match ) VALUES ( :database_name, :table_name, :id, :seq, :table, :from, :to, :on_update, :on_delete, :match @@ -147,7 +160,7 @@ async def populate_schema_tables(internal_db, db): ) # And the indexes await internal_db.execute_write( - "delete from indexes where database_name = ? and table_name = ?", + "DELETE FROM indexes WHERE database_name = ? and table_name = ?", [database_name, table_name], block=True, ) @@ -159,7 +172,7 @@ async def populate_schema_tables(internal_db, db): } await internal_db.execute_write( """ - insert into indexes ( + INSERT INTO indexes ( database_name, table_name, seq, name, "unique", origin, partial ) VALUES ( :database_name, :table_name, :seq, :name, :unique, :origin, :partial From a882d679626438ba0d809944f06f239bcba8ee96 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miroslav=20=C5=A0ediv=C3=BD?= <6774676+eumiro@users.noreply.github.com> Date: Wed, 23 Dec 2020 18:04:32 +0100 Subject: [PATCH 0101/1455] Modernize code to Python 3.6+ (#1158) * Compact dict and set building * Remove redundant parentheses * Simplify chained conditions * Change method name to lowercase * Use triple double quotes for docstrings Thanks, @eumiro! --- datasette/app.py | 16 +++++++------- datasette/cli.py | 10 ++++----- datasette/facets.py | 4 +--- datasette/filters.py | 6 +++--- datasette/hookspecs.py | 42 ++++++++++++++++++------------------- datasette/inspect.py | 6 +++--- datasette/renderer.py | 2 +- datasette/utils/__init__.py | 20 +++++++++--------- datasette/utils/asgi.py | 18 +++++++--------- datasette/views/base.py | 6 +++--- datasette/views/table.py | 4 ++-- tests/fixtures.py | 2 +- tests/plugins/my_plugin.py | 2 +- tests/test_api.py | 4 ++-- tests/test_auth.py | 4 ++-- tests/test_cli.py | 2 +- tests/test_docs.py | 6 +++--- tests/test_permissions.py | 2 +- tests/test_plugins.py | 2 +- 19 files changed, 76 insertions(+), 82 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index ad3ba07e..bd62fd3b 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -429,7 +429,7 @@ class Datasette: return m def plugin_config(self, plugin_name, database=None, table=None, fallback=True): - "Return config for plugin, falling back from specified database/table" + """Return config for plugin, falling back from specified database/table""" plugins = self.metadata( "plugins", database=database, table=table, fallback=fallback ) @@ -523,7 +523,7 @@ class Datasette: return [] async def permission_allowed(self, actor, action, resource=None, default=False): - "Check permissions using the permissions_allowed plugin hook" + """Check permissions using the permissions_allowed plugin hook""" result = None for check in pm.hook.permission_allowed( datasette=self, @@ -570,7 +570,7 @@ class Datasette: ) async def expand_foreign_keys(self, database, table, column, values): - "Returns dict mapping (column, value) -> label" + """Returns dict mapping (column, value) -> label""" labeled_fks = {} db = self.databases[database] foreign_keys = await db.foreign_keys_for_table(table) @@ -613,7 +613,7 @@ class Datasette: return url def _register_custom_units(self): - "Register any custom units defined in the metadata.json with Pint" + """Register any custom units defined in the metadata.json with Pint""" for unit in self.metadata("custom_units") or []: ureg.define(unit) @@ -730,7 +730,7 @@ class Datasette: return {"actor": request.actor} def table_metadata(self, database, table): - "Fetch table-specific metadata." + """Fetch table-specific metadata.""" return ( (self.metadata("databases") or {}) .get(database, {}) @@ -739,7 +739,7 @@ class Datasette: ) def _register_renderers(self): - """ Register output renderers which output data in custom formats. """ + """Register output renderers which output data in custom formats.""" # Built-in renderers self.renderers["json"] = (json_renderer, lambda: True) @@ -880,7 +880,7 @@ class Datasette: return output def app(self): - "Returns an ASGI app function that serves the whole of Datasette" + """Returns an ASGI app function that serves the whole of Datasette""" routes = [] for routes_to_add in pm.hook.register_routes(): @@ -1287,7 +1287,7 @@ def permanent_redirect(path): ) -_curly_re = re.compile(r"(\{.*?\})") +_curly_re = re.compile(r"({.*?})") def route_pattern_from_filepath(filepath): diff --git a/datasette/cli.py b/datasette/cli.py index c342a35a..2a84bf30 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -152,7 +152,7 @@ async def inspect_(files, sqlite_extensions): @cli.group() def publish(): - "Publish specified SQLite database files to the internet along with a Datasette-powered interface and API" + """Publish specified SQLite database files to the internet along with a Datasette-powered interface and API""" pass @@ -168,7 +168,7 @@ pm.hook.publish_subcommand(publish=publish) help="Path to directory containing custom plugins", ) def plugins(all, plugins_dir): - "List currently available plugins" + """List currently available plugins""" app = Datasette([], plugins_dir=plugins_dir) click.echo(json.dumps(app._plugins(all=all), indent=4)) @@ -244,7 +244,7 @@ def package( port, **extra_metadata, ): - "Package specified SQLite files into a new datasette Docker container" + """Package specified SQLite files into a new datasette Docker container""" if not shutil.which("docker"): click.secho( ' The package command requires "docker" to be installed and configured ', @@ -284,7 +284,7 @@ def package( "-U", "--upgrade", is_flag=True, help="Upgrade packages to latest version" ) def install(packages, upgrade): - "Install Python packages - e.g. Datasette plugins - into the same environment as Datasette" + """Install Python packages - e.g. Datasette plugins - into the same environment as Datasette""" args = ["pip", "install"] if upgrade: args += ["--upgrade"] @@ -297,7 +297,7 @@ def install(packages, upgrade): @click.argument("packages", nargs=-1, required=True) @click.option("-y", "--yes", is_flag=True, help="Don't ask for confirmation") def uninstall(packages, yes): - "Uninstall Python packages (e.g. plugins) from the Datasette environment" + """Uninstall Python packages (e.g. plugins) from the Datasette environment""" sys.argv = ["pip", "uninstall"] + list(packages) + (["-y"] if yes else []) run_module("pip", run_name="__main__") diff --git a/datasette/facets.py b/datasette/facets.py index 8ad5a423..207d819d 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -162,10 +162,8 @@ class ColumnFacet(Facet): ) num_distinct_values = len(distinct_values) if ( - num_distinct_values - and num_distinct_values > 1 + 1 < num_distinct_values < row_count and num_distinct_values <= facet_size - and num_distinct_values < row_count # And at least one has n > 1 and any(r["n"] > 1 for r in distinct_values) ): diff --git a/datasette/filters.py b/datasette/filters.py index edf2de99..152a26b4 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -208,7 +208,7 @@ class Filters: self.ureg = ureg def lookups(self): - "Yields (lookup, display, no_argument) pairs" + """Yields (lookup, display, no_argument) pairs""" for filter in self._filters: yield filter.key, filter.display, filter.no_argument @@ -233,7 +233,7 @@ class Filters: return f"where {s}" def selections(self): - "Yields (column, lookup, value) tuples" + """Yields (column, lookup, value) tuples""" for key, value in self.pairs: if "__" in key: column, lookup = key.rsplit("__", 1) @@ -246,7 +246,7 @@ class Filters: return bool(self.pairs) def convert_unit(self, column, value): - "If the user has provided a unit in the query, convert it into the column unit, if present." + """If the user has provided a unit in the query, convert it into the column unit, if present.""" if column not in self.units: return value diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index a305ca6a..13a10680 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -7,108 +7,108 @@ hookimpl = HookimplMarker("datasette") @hookspec def startup(datasette): - "Fires directly after Datasette first starts running" + """Fires directly after Datasette first starts running""" @hookspec def asgi_wrapper(datasette): - "Returns an ASGI middleware callable to wrap our ASGI application with" + """Returns an ASGI middleware callable to wrap our ASGI application with""" @hookspec def prepare_connection(conn, database, datasette): - "Modify SQLite connection in some way e.g. register custom SQL functions" + """Modify SQLite connection in some way e.g. register custom SQL functions""" @hookspec def prepare_jinja2_environment(env): - "Modify Jinja2 template environment e.g. register custom template tags" + """Modify Jinja2 template environment e.g. register custom template tags""" @hookspec def extra_css_urls(template, database, table, columns, view_name, request, datasette): - "Extra CSS URLs added by this plugin" + """Extra CSS URLs added by this plugin""" @hookspec def extra_js_urls(template, database, table, columns, view_name, request, datasette): - "Extra JavaScript URLs added by this plugin" + """Extra JavaScript URLs added by this plugin""" @hookspec def extra_body_script( template, database, table, columns, view_name, request, datasette ): - "Extra JavaScript code to be included in + {% endfor %} {% block extra_head %}{% endblock %} diff --git a/docs/custom_templates.rst b/docs/custom_templates.rst index d37bb729..a7236873 100644 --- a/docs/custom_templates.rst +++ b/docs/custom_templates.rst @@ -5,6 +5,8 @@ Custom pages and templates Datasette provides a number of ways of customizing the way data is displayed. +.. _customization_css_and_javascript: + Custom CSS and JavaScript ------------------------- @@ -25,7 +27,12 @@ Your ``metadata.json`` file can include links that look like this: ] } -The extra CSS and JavaScript files will be linked in the ```` of every page. +The extra CSS and JavaScript files will be linked in the ```` of every page: + +.. code-block:: html + + + You can also specify a SRI (subresource integrity hash) for these assets: @@ -46,9 +53,39 @@ You can also specify a SRI (subresource integrity hash) for these assets: ] } +This will produce: + +.. code-block:: html + + + + Modern browsers will only execute the stylesheet or JavaScript if the SRI hash matches the content served. You can generate hashes using `www.srihash.org `_ +Items in ``"extra_js_urls"`` can specify ``"module": true`` if they reference JavaScript that uses `JavaScript modules `__. This configuration: + +.. code-block:: json + + { + "extra_js_urls": [ + { + "url": "https://example.datasette.io/module.js", + "module": true + } + ] + } + +Will produce this HTML: + +.. code-block:: html + + + CSS classes on the ~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 72b09367..d465307b 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -182,7 +182,7 @@ This can be a list of URLs: @hookimpl def extra_css_urls(): return [ - 'https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css' + "https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css" ] Or a list of dictionaries defining both a URL and an @@ -190,21 +190,17 @@ Or a list of dictionaries defining both a URL and an .. code-block:: python - from datasette import hookimpl - @hookimpl def extra_css_urls(): return [{ - 'url': 'https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css', - 'sri': 'sha384-9gVQ4dYFwwWSjIDZnLEWnxCjeSWFphJiwGPXr1jddIhOegiu1FwO5qRGvFXOdJZ4', + "url": "https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css", + "sri": "sha384-9gVQ4dYFwwWSjIDZnLEWnxCjeSWFphJiwGPXr1jddIhOegiu1FwO5qRGvFXOdJZ4", }] This function can also return an awaitable function, useful if it needs to run any async code: .. code-block:: python - from datasette import hookimpl - @hookimpl def extra_css_urls(datasette): async def inner(): @@ -233,8 +229,8 @@ return a list of URLs, a list of dictionaries or an awaitable function that retu @hookimpl def extra_js_urls(): return [{ - 'url': 'https://code.jquery.com/jquery-3.3.1.slim.min.js', - 'sri': 'sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo', + "url": "https://code.jquery.com/jquery-3.3.1.slim.min.js", + "sri": "sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo", }] You can also return URLs to files from your plugin's ``static/`` directory, if @@ -242,12 +238,21 @@ you have one: .. code-block:: python - from datasette import hookimpl - @hookimpl def extra_js_urls(): return [ - '/-/static-plugins/your-plugin/app.js' + "/-/static-plugins/your-plugin/app.js" + ] + +If your code uses `JavaScript modules `__ you should include the ``"module": True`` key. See :ref:`customization_css_and_javascript` for more details. + +.. code-block:: python + + @hookimpl + def extra_js_urls(): + return [{ + "url": "/-/static-plugins/your-plugin/app.js", + "module": True ] Examples: `datasette-cluster-map `_, `datasette-vega `_ diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 2e653e2b..1c86b4bc 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -61,6 +61,7 @@ def extra_js_urls(): "sri": "SRIHASH", }, "https://plugin-example.datasette.io/plugin1.js", + {"url": "https://plugin-example.datasette.io/plugin.module.js", "module": True}, ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 212de2b5..648e7abd 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -118,16 +118,19 @@ def test_hook_extra_css_urls(app_client, path, expected_decoded_object): def test_hook_extra_js_urls(app_client): response = app_client.get("/") scripts = Soup(response.body, "html.parser").findAll("script") - assert [ - s - for s in scripts - if s.attrs - == { + script_attrs = [s.attrs for s in scripts] + for attrs in [ + { "integrity": "SRIHASH", "crossorigin": "anonymous", "src": "https://plugin-example.datasette.io/jquery.js", - } - ] + }, + { + "src": "https://plugin-example.datasette.io/plugin.module.js", + "type": "module", + }, + ]: + assert any(s == attrs for s in script_attrs), "Expected: {}".format(attrs) def test_plugins_with_duplicate_js_urls(app_client): From c38c42948cbfddd587729413fd6082ba352eaece Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 13 Jan 2021 18:14:33 -0800 Subject: [PATCH 0123/1455] extra_body_script module support, closes #1187 --- datasette/app.py | 8 +++++++- datasette/templates/base.html | 2 +- docs/plugin_hooks.rst | 25 ++++++++++++++++++++----- tests/plugins/my_plugin.py | 3 ++- tests/test_plugins.py | 2 +- 5 files changed, 31 insertions(+), 9 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f8549fac..cfce8e0b 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -781,7 +781,13 @@ class Datasette: datasette=self, ): extra_script = await await_me_maybe(extra_script) - body_scripts.append(Markup(extra_script)) + if isinstance(extra_script, dict): + script = extra_script["script"] + module = bool(extra_script.get("module")) + else: + script = extra_script + module = False + body_scripts.append({"script": Markup(script), "module": module}) extra_template_vars = {} # pylint: disable=no-member diff --git a/datasette/templates/base.html b/datasette/templates/base.html index 3f3d4507..e61edc4f 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -62,7 +62,7 @@ {% include "_close_open_menus.html" %} {% for body_script in body_scripts %} - + {{ body_script.script }} {% endfor %} {% if select_templates %}{% endif %} diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index d465307b..0206daaa 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -168,7 +168,7 @@ Examples: `datasette-search-all extra_css_urls(template, database, table, columns, view_name, request, datasette) --------------------------------------------------------------------------------- -Same arguments as :ref:`extra_template_vars(...) ` +This takes the same arguments as :ref:`extra_template_vars(...) ` Return a list of extra CSS URLs that should be included on the page. These can take advantage of the CSS class hooks described in :ref:`customization`. @@ -217,7 +217,7 @@ Examples: `datasette-cluster-map ` +This takes the same arguments as :ref:`extra_template_vars(...) ` This works in the same way as ``extra_css_urls()`` but for JavaScript. You can return a list of URLs, a list of dictionaries or an awaitable function that returns those things: @@ -264,15 +264,30 @@ extra_body_script(template, database, table, columns, view_name, request, datase Extra JavaScript to be added to a ```` element: + +.. code-block:: python + + @hookimpl + def extra_body_script(): + return { + "module": True, + "script": "console.log('Your JavaScript goes here...')" + } + +This will add the following to the end of your page: + +.. code-block:: html + + Example: `datasette-cluster-map `_ diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 1c86b4bc..8d192d28 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -70,7 +70,7 @@ def extra_body_script( template, database, table, view_name, columns, request, datasette ): async def inner(): - return "var extra_body_script = {};".format( + script = "var extra_body_script = {};".format( json.dumps( { "template": template, @@ -90,6 +90,7 @@ def extra_body_script( } ) ) + return {"script": script, "module": True} return inner diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 648e7abd..715c7c17 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -288,7 +288,7 @@ def test_plugin_config_file(app_client): ], ) def test_hook_extra_body_script(app_client, path, expected_extra_body_script): - r = re.compile(r"") + r = re.compile(r"") json_data = r.search(app_client.get(path).text).group(1) actual_data = json.loads(json_data) assert expected_extra_body_script == actual_data From 7e3cfd9cf7aeddf153d907bc3ee08ae0cd489370 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 19 Jan 2021 12:27:45 -0800 Subject: [PATCH 0124/1455] Clarify the name of plugin used in /-/static-plugins/ --- docs/plugin_hooks.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 0206daaa..23e57278 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -244,6 +244,8 @@ you have one: "/-/static-plugins/your-plugin/app.js" ] +Note that `your-plugin` here should be the hyphenated plugin name - the name that is displayed in the list on the `/-/plugins` debug page. + If your code uses `JavaScript modules `__ you should include the ``"module": True`` key. See :ref:`customization_css_and_javascript` for more details. .. code-block:: python From 57f4d7b82f9c74298c67c5640207241925b70c02 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 19 Jan 2021 12:47:30 -0800 Subject: [PATCH 0125/1455] Release 0.54a0 Refs #1091, #1145, #1151, #1156, #1157, #1158, #1166, #1170, #1178, #1182, #1184, #1185, #1186, #1187 --- datasette/version.py | 2 +- docs/changelog.rst | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index a5edecfa..b19423a9 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.53" +__version__ = "0.54a0" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 40b9c5a3..ac2ac8c9 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,17 @@ Changelog ========= +.. _v0_54_a0: + +0.54a0 (2020-12-19) +------------------- + +**Alpha release**. Release notes in progress. + +- Improved support for named in-memory databases. (`#1151 `__) +- New ``_internal`` in-memory database tracking attached databases, tables and columns. (`#1150 `__) +- Support for JavaScript modules. (`#1186 `__, `#1187 `__) + .. _v0_53: 0.53 (2020-12-10) From 5378f023529107ff7edbd6ee4ecab6ac170a83db Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 19 Jan 2021 12:50:12 -0800 Subject: [PATCH 0126/1455] Better tool for extracting issue numbers --- docs/contributing.rst | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 24d5c8f0..3a4b2caa 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -169,17 +169,7 @@ To release a new version, first create a commit that updates the version number Referencing the issues that are part of the release in the commit message ensures the name of the release shows up on those issue pages, e.g. `here `__. -You can generate the list of issue references for a specific release by pasting the following into the browser devtools while looking at the :ref:`changelog` page (replace ``v0-44`` with the most recent version): - -.. code-block:: javascript - - [ - ...new Set( - Array.from( - document.getElementById("v0-44").querySelectorAll("a[href*=issues]") - ).map((a) => "#" + a.href.split("/issues/")[1]) - ), - ].sort().join(", "); +You can generate the list of issue references for a specific release by copying and pasting text from the release notes or GitHub changes-since-last-release view into this `Extract issue numbers from pasted text `__ tool. To create the tag for the release, create `a new release `__ on GitHub matching the new version number. You can convert the release notes to Markdown by copying and pasting the rendered HTML into this `Paste to Markdown tool `__. From 25c2933667680db045851b2cedcf4666d737d352 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 22 Jan 2021 16:46:16 -0800 Subject: [PATCH 0127/1455] publish heroku now uses python-3.8.7 --- datasette/publish/heroku.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index c772b476..c0c70e12 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -173,7 +173,7 @@ def temporary_heroku_directory( if metadata_content: open("metadata.json", "w").write(json.dumps(metadata_content, indent=2)) - open("runtime.txt", "w").write("python-3.8.6") + open("runtime.txt", "w").write("python-3.8.7") if branch: install = [ From f78e956eca1f363e3a3f93c69fd9fc31bed14629 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 24 Jan 2021 12:38:29 -0800 Subject: [PATCH 0128/1455] Plugin testing documentation on using pytest-httpx Closes #1198 --- docs/testing_plugins.rst | 71 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 71 insertions(+) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index bacfd57b..4261f639 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -118,3 +118,74 @@ If you want to create that test database repeatedly for every individual test fu @pytest.fixture def datasette(tmp_path_factory): # This fixture will be executed repeatedly for every test + +.. _testing_plugins_pytest_httpx: + +Testing outbound HTTP calls with pytest-httpx +--------------------------------------------- + +If your plugin makes outbound HTTP calls - for example datasette-auth-github or datasette-import-table - you may need to mock those HTTP requests in your tests. + +The `pytest-httpx `__ package is a useful library for mocking calls. It can be tricky to use with Datasette though since it mocks all HTTPX requests, and Datasette's own testing mechanism uses HTTPX internally. + +To avoid breaking your tests, you can return ``["localhost"]`` from the ``non_mocked_hosts()`` fixture. + +As an example, here's a very simple plugin which executes an HTTP response and returns the resulting content: + +.. code-block:: python + + from datasette import hookimpl + from datasette.utils.asgi import Response + import httpx + + + @hookimpl + def register_routes(): + return [ + (r"^/-/fetch-url$", fetch_url), + ] + + + async def fetch_url(datasette, request): + if request.method == "GET": + return Response.html( + """ +
    + + +
    """.format( + request.scope["csrftoken"]() + ) + ) + vars = await request.post_vars() + url = vars["url"] + return Response.text(httpx.get(url).text) + +Here's a test for that plugin that mocks the HTTPX outbound request: + +.. code-block:: python + + from datasette.app import Datasette + import pytest + + + @pytest.fixture + def non_mocked_hosts(): + # This ensures httpx-mock will not affect Datasette's own + # httpx calls made in the tests by datasette.client: + return ["localhost"] + + + async def test_outbound_http_call(httpx_mock): + httpx_mock.add_response( + url='https://www.example.com/', + data='Hello world', + ) + datasette = Datasette([], memory=True) + response = await datasette.client.post("/-/fetch-url", data={ + "url": "https://www.example.com/" + }) + asert response.text == "Hello world" + + outbound_request = httpx_mock.get_request() + assert outbound_request.url == "https://www.example.com/" From b6a7b58fa01af0cd5a5e94bd17d686d283a46819 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 24 Jan 2021 16:08:29 -0800 Subject: [PATCH 0129/1455] Initial docs for _internal database, closes #1154 --- docs/internals.rst | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/internals.rst b/docs/internals.rst index f7b0cc0b..4a2c0a8e 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -747,3 +747,19 @@ If your plugin implements a ``
    `` anywhere you will need to i .. code-block:: html + +.. _internals_internal: + +The _internal database +====================== + +.. warning:: + This API should be considered unstable - the structure of these tables may change prior to the release of Datasette 1.0. + +Datasette maintains an in-memory SQLite database with details of the the databases, tables and columns for all of the attached databases. + +By default all actors are denied access to the ``view-database`` permission for the ``_internal`` database, so the database is not visible to anyone unless they :ref:`sign in as root `. + +Plugins can access this database by calling ``db = datasette.get_database("_internal")`` and then executing queries using the :ref:`Database API `. + +You can explore an example of this database by `signing in as root `__ to the ``latest.datasette.io`` demo instance and then navigating to `latest.datasette.io/_internal `__. \ No newline at end of file From ffff3a4c5398a9f40b61d59736f386444da19289 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 24 Jan 2021 17:41:46 -0800 Subject: [PATCH 0130/1455] Easier way to run Prettier locally (#1203) Thanks, Ben Pickles - refs #1167 --- .github/workflows/prettier.yml | 2 +- package.json | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/prettier.yml b/.github/workflows/prettier.yml index d846cca7..9dfe7ee0 100644 --- a/.github/workflows/prettier.yml +++ b/.github/workflows/prettier.yml @@ -19,4 +19,4 @@ jobs: run: npm ci - name: Run prettier run: |- - npx --no-install prettier --check 'datasette/static/*[!.min].js' + npm run prettier -- --check diff --git a/package.json b/package.json index 67452d2f..5c6dfe61 100644 --- a/package.json +++ b/package.json @@ -3,5 +3,9 @@ "private": true, "devDependencies": { "prettier": "^2.2.1" + }, + "scripts": { + "fix": "npm run prettier -- --write", + "prettier": "prettier 'datasette/static/*[!.min].js'" } } From f3a155531807c586e62b8ff0e97b96a76e949c8d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 24 Jan 2021 17:58:15 -0800 Subject: [PATCH 0131/1455] Contributing docs for Black and Prettier, closes #1167 Refs #1203 --- docs/contributing.rst | 52 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index 3a4b2caa..2cf641fd 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -97,6 +97,58 @@ You can tell Datasette to open an interactive ``pdb`` debugger session if an err datasette --pdb fixtures.db +.. _contributing_formatting: + +Code formatting +--------------- + +Datasette uses opinionated code formatters: `Black `__ for Python and `Prettier `__ for JavaScript. + +These formatters are enforced by Datasette's continuous integration: if a commit includes Python or JavaScript code that does not match the style enforced by those tools, the tests will fail. + +When developing locally, you can verify and correct the formatting of your code using these tools. + +.. _contributing_formatting_black: + +Running Black +~~~~~~~~~~~~~ + +Black will be installed when you run ``pip install -e '.[test]'``. To test that your code complies with Black, run the following in your root ``datasette`` repository checkout:: + + $ black . --check + All done! ✨ 🍰 ✨ + 95 files would be left unchanged. + +If any of your code does not conform to Black you can run this to automatically fix those problems:: + + $ black . + reformatted ../datasette/setup.py + All done! ✨ 🍰 ✨ + 1 file reformatted, 94 files left unchanged. + +.. _contributing_formatting_prettier: + +Prettier +~~~~~~~~ + +To install Prettier, `install Node.js `__ and then run the following in the root of your ``datasette`` repository checkout:: + + $ npm install + +This will install Prettier in a ``node_modules`` directory. You can then check that your code matches the coding style like so:: + + $ npm run prettier -- --check + > prettier + > prettier 'datasette/static/*[!.min].js' "--check" + + Checking formatting... + [warn] datasette/static/plugins.js + [warn] Code style issues found in the above file(s). Forgot to run Prettier? + +You can fix any problems by running:: + + $ npm run fix + .. _contributing_documentation: Editing and building the documentation From 07e163561592c743e4117f72102fcd350a600909 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 24 Jan 2021 19:10:10 -0800 Subject: [PATCH 0132/1455] All ?_ parameters now copied to hidden form fields, closes #1194 --- datasette/views/table.py | 17 +++++------------ tests/test_html.py | 22 ++++++++++++++++++++++ 2 files changed, 27 insertions(+), 12 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index cc8ef9f1..0a3504b3 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -812,19 +812,12 @@ class TableView(RowTableShared): .get(table, {}) ) self.ds.update_with_inherited_metadata(metadata) + form_hidden_args = [] - # Add currently selected facets - for arg in special_args: - if arg == "_facet" or arg.startswith("_facet_"): - form_hidden_args.extend( - (arg, item) for item in request.args.getlist(arg) - ) - for arg in ("_fts_table", "_fts_pk"): - if arg in special_args: - form_hidden_args.append((arg, special_args[arg])) - if request.args.get("_where"): - for where_text in request.args.getlist("_where"): - form_hidden_args.append(("_where", where_text)) + for key in request.args: + if key.startswith("_"): + for value in request.args.getlist(key): + form_hidden_args.append((key, value)) # if no sort specified AND table has a single primary key, # set sort to that so arrow is displayed diff --git a/tests/test_html.py b/tests/test_html.py index c7dd9d97..08d17ca7 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1250,6 +1250,28 @@ def test_extra_where_clauses(app_client): ] +@pytest.mark.parametrize( + "path,expected_hidden", + [ + ("/fixtures/facetable?_size=10", [("_size", "10")]), + ( + "/fixtures/facetable?_size=10&_ignore=1&_ignore=2", + [ + ("_size", "10"), + ("_ignore", "1"), + ("_ignore", "2"), + ], + ), + ], +) +def test_other_hidden_form_fields(app_client, path, expected_hidden): + response = app_client.get(path) + soup = Soup(response.body, "html.parser") + inputs = soup.find("form").findAll("input") + hiddens = [i for i in inputs if i["type"] == "hidden"] + assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == expected_hidden + + def test_binary_data_display_in_table(app_client): response = app_client.get("/fixtures/binary_data") assert response.status == 200 From a5ede3cdd455e2bb1a1fb2f4e1b5a9855caf5179 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 24 Jan 2021 21:13:05 -0800 Subject: [PATCH 0133/1455] Fixed bug loading database called 'test-database (1).sqlite' Closes #1181. Also now ensures that database URLs have special characters URL-quoted. --- datasette/url_builder.py | 6 ++++-- datasette/views/base.py | 3 ++- docs/changelog.rst | 10 ++++++---- tests/test_api.py | 14 +++++++------- tests/test_cli.py | 23 +++++++++++++++++++++++ tests/test_html.py | 6 +++--- tests/test_internals_urls.py | 20 ++++++++++---------- 7 files changed, 55 insertions(+), 27 deletions(-) diff --git a/datasette/url_builder.py b/datasette/url_builder.py index 3034b664..2bcda869 100644 --- a/datasette/url_builder.py +++ b/datasette/url_builder.py @@ -30,9 +30,11 @@ class Urls: def database(self, database, format=None): db = self.ds.databases[database] if self.ds.setting("hash_urls") and db.hash: - path = self.path(f"{database}-{db.hash[:HASH_LENGTH]}", format=format) + path = self.path( + f"{urllib.parse.quote(database)}-{db.hash[:HASH_LENGTH]}", format=format + ) else: - path = self.path(database, format=format) + path = self.path(urllib.parse.quote(database), format=format) return path def table(self, database, table, format=None): diff --git a/datasette/views/base.py b/datasette/views/base.py index a21b9298..ba0f7d4c 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -181,6 +181,7 @@ class DataView(BaseView): async def resolve_db_name(self, request, db_name, **kwargs): hash = None name = None + db_name = urllib.parse.unquote_plus(db_name) if db_name not in self.ds.databases and "-" in db_name: # No matching DB found, maybe it's a name-hash? name_bit, hash_bit = db_name.rsplit("-", 1) @@ -191,7 +192,7 @@ class DataView(BaseView): hash = hash_bit else: name = db_name - name = urllib.parse.unquote_plus(name) + try: db = self.ds.databases[name] except KeyError: diff --git a/docs/changelog.rst b/docs/changelog.rst index ac2ac8c9..abc2f4f9 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,12 +4,14 @@ Changelog ========= -.. _v0_54_a0: +.. _v0_54: + +0.54 (2021-01-24) +----------------- + + -0.54a0 (2020-12-19) -------------------- -**Alpha release**. Release notes in progress. - Improved support for named in-memory databases. (`#1151 `__) - New ``_internal`` in-memory database tracking attached databases, tables and columns. (`#1150 `__) diff --git a/tests/test_api.py b/tests/test_api.py index 3b4f3437..0d1bddd3 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -609,17 +609,17 @@ def test_no_files_uses_memory_database(app_client_no_files): assert response.status == 200 assert { ":memory:": { + "name": ":memory:", "hash": None, "color": "f7935d", + "path": "/%3Amemory%3A", + "tables_and_views_truncated": [], + "tables_and_views_more": False, + "tables_count": 0, + "table_rows_sum": 0, + "show_table_row_counts": False, "hidden_table_rows_sum": 0, "hidden_tables_count": 0, - "name": ":memory:", - "show_table_row_counts": False, - "path": "/:memory:", - "table_rows_sum": 0, - "tables_count": 0, - "tables_and_views_more": False, - "tables_and_views_truncated": [], "views_count": 0, "private": False, } diff --git a/tests/test_cli.py b/tests/test_cli.py index 1d806bff..c42c22ea 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -17,6 +17,7 @@ import pytest import sys import textwrap from unittest import mock +import urllib @pytest.fixture @@ -255,3 +256,25 @@ def test_serve_duplicate_database_names(ensure_eventloop, tmpdir): assert result.exit_code == 0, result.output databases = json.loads(result.output) assert {db["name"] for db in databases} == {"db", "db_2"} + + +@pytest.mark.parametrize( + "filename", ["test-database (1).sqlite", "database (1).sqlite"] +) +def test_weird_database_names(ensure_eventloop, tmpdir, filename): + # https://github.com/simonw/datasette/issues/1181 + runner = CliRunner() + db_path = str(tmpdir / filename) + sqlite3.connect(db_path).execute("vacuum") + result1 = runner.invoke(cli, [db_path, "--get", "/"]) + assert result1.exit_code == 0, result1.output + filename_no_stem = filename.rsplit(".", 1)[0] + expected_link = '{}'.format( + urllib.parse.quote(filename_no_stem), filename_no_stem + ) + assert expected_link in result1.output + # Now try hitting that database page + result2 = runner.invoke( + cli, [db_path, "--get", "/{}".format(urllib.parse.quote(filename_no_stem))] + ) + assert result2.exit_code == 0, result2.output diff --git a/tests/test_html.py b/tests/test_html.py index 08d17ca7..6c33fba7 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -30,7 +30,7 @@ def test_homepage(app_client_two_attached_databases): # Should be two attached databases assert [ {"href": "/fixtures", "text": "fixtures"}, - {"href": "/extra database", "text": "extra database"}, + {"href": r"/extra%20database", "text": "extra database"}, ] == [{"href": a["href"], "text": a.text.strip()} for a in soup.select("h2 a")] # The first attached database should show count text and attached tables h2 = soup.select("h2")[1] @@ -44,8 +44,8 @@ def test_homepage(app_client_two_attached_databases): {"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a") ] assert [ - {"href": "/extra database/searchable", "text": "searchable"}, - {"href": "/extra database/searchable_view", "text": "searchable_view"}, + {"href": r"/extra%20database/searchable", "text": "searchable"}, + {"href": r"/extra%20database/searchable_view", "text": "searchable_view"}, ] == table_links diff --git a/tests/test_internals_urls.py b/tests/test_internals_urls.py index fd05c1b6..e6f405b3 100644 --- a/tests/test_internals_urls.py +++ b/tests/test_internals_urls.py @@ -103,9 +103,9 @@ def test_logout(ds, base_url, expected): @pytest.mark.parametrize( "base_url,format,expected", [ - ("/", None, "/:memory:"), - ("/prefix/", None, "/prefix/:memory:"), - ("/", "json", "/:memory:.json"), + ("/", None, "/%3Amemory%3A"), + ("/prefix/", None, "/prefix/%3Amemory%3A"), + ("/", "json", "/%3Amemory%3A.json"), ], ) def test_database(ds, base_url, format, expected): @@ -118,10 +118,10 @@ def test_database(ds, base_url, format, expected): @pytest.mark.parametrize( "base_url,name,format,expected", [ - ("/", "name", None, "/:memory:/name"), - ("/prefix/", "name", None, "/prefix/:memory:/name"), - ("/", "name", "json", "/:memory:/name.json"), - ("/", "name.json", "json", "/:memory:/name.json?_format=json"), + ("/", "name", None, "/%3Amemory%3A/name"), + ("/prefix/", "name", None, "/prefix/%3Amemory%3A/name"), + ("/", "name", "json", "/%3Amemory%3A/name.json"), + ("/", "name.json", "json", "/%3Amemory%3A/name.json?_format=json"), ], ) def test_table_and_query(ds, base_url, name, format, expected): @@ -137,9 +137,9 @@ def test_table_and_query(ds, base_url, name, format, expected): @pytest.mark.parametrize( "base_url,format,expected", [ - ("/", None, "/:memory:/facetable/1"), - ("/prefix/", None, "/prefix/:memory:/facetable/1"), - ("/", "json", "/:memory:/facetable/1.json"), + ("/", None, "/%3Amemory%3A/facetable/1"), + ("/prefix/", None, "/prefix/%3Amemory%3A/facetable/1"), + ("/", "json", "/%3Amemory%3A/facetable/1.json"), ], ) def test_row(ds, base_url, format, expected): From 0b9ac1b2e9c855f1b823a06a898891da87c720ef Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 25 Jan 2021 09:33:29 -0800 Subject: [PATCH 0134/1455] Release 0.54 Refs #509, #1091, #1150, #1151, #1166, #1167, #1178, #1181, #1182, #1184, #1185, #1186, #1187, #1194, #1198 --- datasette/version.py | 2 +- docs/changelog.rst | 54 ++++++++++++++++++++++++++++++++++++++++---- 2 files changed, 50 insertions(+), 6 deletions(-) diff --git a/datasette/version.py b/datasette/version.py index b19423a9..8fb7217d 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.54a0" +__version__ = "0.54" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index abc2f4f9..8fca312d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,16 +6,61 @@ Changelog .. _v0_54: -0.54 (2021-01-24) +0.54 (2021-01-25) ----------------- +The two big new features in this release are the ``_internal`` SQLite in-memory database storing details of all connected databases and tables, and support for JavaScript modules in plugins and additional scripts. +For additional commentary on this release, see `Datasette 0.54, the annotated release notes `__. +The _internal database +~~~~~~~~~~~~~~~~~~~~~~ +As part of ongoing work to help Datasette handle much larger numbers of connected databases and tables (see `Datasette Library `__) Datasette now maintains an in-memory SQLite database with details of all of the attached databases, tables, columns, indexes and foreign keys. (`#1150 `__) + +This will support future improvements such as a searchable, paginated homepage of all available tables. + +You can explore an example of this database by `signing in as root `__ to the ``latest.datasette.io`` demo instance and then navigating to `latest.datasette.io/_internal `__. + +Plugins can use these tables to introspect attached data in an efficient way. Plugin authors should note that this is not yet considered a stable interface, so any plugins that use this may need to make changes prior to Datasette 1.0 if the ``_internal`` table schemas change. + +Named in-memory database support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +As part of the work building the ``_internal`` database, Datasette now supports named in-memory databases that can be shared across multiple connections. This allows plugins to create in-memory databases which will persist data for the lifetime of the Datasette server process. (`#1151 `__) + +The new ``memory_name=`` parameter to the :ref:`internals_database` can be used to create named, shared in-memory databases. + +JavaScript modules +~~~~~~~~~~~~~~~~~~ + +`JavaScript modules `__ were introduced in ECMAScript 2015 and provide native browser support for the ``import`` and ``export`` keywords. + +To use modules, JavaScript needs to be included in `` + diff --git a/datasette/templates/_codemirror_foot.html b/datasette/templates/_codemirror_foot.html index 4019d448..ee09cff1 100644 --- a/datasette/templates/_codemirror_foot.html +++ b/datasette/templates/_codemirror_foot.html @@ -23,6 +23,7 @@ window.onload = () => { editor.setValue(sqlFormatter.format(editor.getValue())); }) } + cmResize(editor, {resizableWidth: false}); } if (sqlFormat && readOnly) { const formatted = sqlFormatter.format(readOnly.innerHTML); From 42caabf7e9e6e4d69ef6dd7de16f2cd96bc79d5b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 22 Feb 2021 09:35:41 -0800 Subject: [PATCH 0153/1455] Fixed typo --- docs/testing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 8ea5e79b..1291a875 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -79,7 +79,7 @@ Using pytest fixtures A common pattern for Datasette plugins is to create a fixture which sets up a temporary test database and wraps it in a Datasette instance. -Here's an example that uses the `sqlite-utils library `__ to populate a temporary test database. It also sets the title of that table using a simulated ``metadata.json`` congiguration: +Here's an example that uses the `sqlite-utils library `__ to populate a temporary test database. It also sets the title of that table using a simulated ``metadata.json`` configuration: .. code-block:: python From 726f781c50e88f557437f6490b8479c3d6fabfc2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 22 Feb 2021 16:22:47 -0800 Subject: [PATCH 0154/1455] Fix for arraycontains bug, closes #1239 --- datasette/filters.py | 4 ++-- tests/test_filters.py | 8 ++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/datasette/filters.py b/datasette/filters.py index 152a26b4..2b859d99 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -150,7 +150,7 @@ class Filters: "arraycontains", "array contains", """rowid in ( - select {t}.rowid from {t}, json_each({t}.{c}) j + select {t}.rowid from {t}, json_each([{t}].[{c}]) j where j.value = :{p} )""", '{c} contains "{v}"', @@ -159,7 +159,7 @@ class Filters: "arraynotcontains", "array does not contain", """rowid not in ( - select {t}.rowid from {t}, json_each({t}.{c}) j + select {t}.rowid from {t}, json_each([{t}].[{c}]) j where j.value = :{p} )""", '{c} does not contain "{v}"', diff --git a/tests/test_filters.py b/tests/test_filters.py index 75a779b9..f22b7b5c 100644 --- a/tests/test_filters.py +++ b/tests/test_filters.py @@ -56,6 +56,14 @@ import pytest # Not in, and JSON array not in ((("foo__notin", "1,2,3"),), ["foo not in (:p0, :p1, :p2)"], ["1", "2", "3"]), ((("foo__notin", "[1,2,3]"),), ["foo not in (:p0, :p1, :p2)"], [1, 2, 3]), + # JSON arraycontains + ( + (("Availability+Info__arraycontains", "yes"),), + [ + "rowid in (\n select table.rowid from table, json_each([table].[Availability+Info]) j\n where j.value = :p0\n )" + ], + ["yes"], + ), ], ) def test_build_where(args, expected_where, expected_params): From afed51b1e36cf275c39e71c7cb262d6c5bdbaa31 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 26 Feb 2021 09:27:09 -0800 Subject: [PATCH 0155/1455] Note about where to find plugin examples, closes #1244 --- docs/writing_plugins.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index b43ecb27..6afee1c3 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -5,6 +5,8 @@ Writing plugins You can write one-off plugins that apply to just one Datasette instance, or you can write plugins which can be installed using ``pip`` and can be shipped to the Python Package Index (`PyPI `__) for other people to install. +Want to start by looking at an example? The `Datasette plugins directory `__ lists more than 50 open source plugins with code you can explore. The :ref:`plugin hooks ` page includes links to example plugins for each of the documented hooks. + .. _writing_plugins_one_off: Writing one-off plugins From cc6774cbaaba2359e0a92cfcc41ad988680075d6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Feb 2021 14:34:44 -0800 Subject: [PATCH 0156/1455] Upgrade httpx and remove xfail from tests, refs #1005 --- setup.py | 2 +- tests/test_api.py | 2 -- tests/test_html.py | 3 --- 3 files changed, 1 insertion(+), 6 deletions(-) diff --git a/setup.py b/setup.py index 34b6b396..15ee63fe 100644 --- a/setup.py +++ b/setup.py @@ -48,7 +48,7 @@ setup( "click-default-group~=1.2.2", "Jinja2>=2.10.3,<2.12.0", "hupper~=1.9", - "httpx>=0.15", + "httpx>=0.17", "pint~=0.9", "pluggy~=0.13.0", "uvicorn~=0.11", diff --git a/tests/test_api.py b/tests/test_api.py index 0b5401d6..caf23329 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -810,7 +810,6 @@ def test_table_shape_object_compound_primary_key(app_client): assert {"a,b": {"pk1": "a", "pk2": "b", "content": "c"}} == response.json -@pytest.mark.xfail def test_table_with_slashes_in_name(app_client): response = app_client.get( "/fixtures/table%2Fwith%2Fslashes.csv?_shape=objects&_format=json" @@ -1286,7 +1285,6 @@ def test_row_format_in_querystring(app_client): assert [{"id": "1", "content": "hello"}] == response.json["rows"] -@pytest.mark.xfail def test_row_strange_table_name(app_client): response = app_client.get( "/fixtures/table%2Fwith%2Fslashes.csv/3.json?_shape=objects" diff --git a/tests/test_html.py b/tests/test_html.py index e21bd64d..3482ec35 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -158,7 +158,6 @@ def test_row_redirects_with_url_hash(app_client_with_hash): assert response.status == 200 -@pytest.mark.xfail def test_row_strange_table_name_with_url_hash(app_client_with_hash): response = app_client_with_hash.get( "/fixtures/table%2Fwith%2Fslashes.csv/3", allow_redirects=False @@ -552,7 +551,6 @@ def test_facets_persist_through_filter_form(app_client): ] -@pytest.mark.xfail @pytest.mark.parametrize( "path,expected_classes", [ @@ -584,7 +582,6 @@ def test_css_classes_on_body(app_client, path, expected_classes): assert classes == expected_classes -@pytest.mark.xfail @pytest.mark.parametrize( "path,expected_considered", [ From 47eb885cc2c3aafa03645c330c6f597bee9b3b25 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Feb 2021 19:44:04 -0800 Subject: [PATCH 0157/1455] JSON faceting now suggested even if column has blank strings, closes #1246 --- datasette/facets.py | 11 ++++++++--- tests/test_facets.py | 22 ++++++++++++++++++++++ 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index 207d819d..01628760 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -279,6 +279,7 @@ class ArrayFacet(Facet): suggested_facet_sql = """ select distinct json_type({column}) from ({sql}) + where {column} is not null and {column} != '' """.format( column=escape_sqlite(column), sql=self.sql ) @@ -298,9 +299,13 @@ class ArrayFacet(Facet): v[0] for v in await self.ds.execute( self.database, - "select {column} from ({sql}) where {column} is not null and json_array_length({column}) > 0 limit 100".format( - column=escape_sqlite(column), sql=self.sql - ), + ( + "select {column} from ({sql}) " + "where {column} is not null " + "and {column} != '' " + "and json_array_length({column}) > 0 " + "limit 100" + ).format(column=escape_sqlite(column), sql=self.sql), self.params, truncate=False, custom_time_limit=self.ds.setting( diff --git a/tests/test_facets.py b/tests/test_facets.py index 1e19dc3a..31518682 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -1,3 +1,5 @@ +from datasette.app import Datasette +from datasette.database import Database from datasette.facets import ColumnFacet, ArrayFacet, DateFacet from datasette.utils.asgi import Request from datasette.utils import detect_json1 @@ -325,3 +327,23 @@ async def test_date_facet_results(app_client): "truncated": False, } } == buckets + + +@pytest.mark.asyncio +async def test_json_array_with_blanks_and_nulls(): + ds = Datasette([], memory=True) + db = ds.add_database(Database(ds, memory_name="test_json_array")) + await db.execute_write("create table foo(json_column text)", block=True) + for value in ('["a", "b", "c"]', '["a", "b"]', "", None): + await db.execute_write( + "insert into foo (json_column) values (?)", [value], block=True + ) + response = await ds.client.get("/test_json_array/foo.json") + data = response.json() + assert data["suggested_facets"] == [ + { + "name": "json_column", + "type": "array", + "toggle_url": "http://localhost/test_json_array/foo.json?_facet_array=json_column", + } + ] From 7c87532acc4e9d92caa1c4ee29a3446200928018 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Feb 2021 20:02:18 -0800 Subject: [PATCH 0158/1455] New .add_memory_database() method, closes #1247 --- datasette/app.py | 3 +++ docs/internals.rst | 29 ++++++++++++++++++++--------- tests/test_internals_database.py | 4 ++-- 3 files changed, 25 insertions(+), 11 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index e3272c6e..02d432df 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -390,6 +390,9 @@ class Datasette: self.databases[name] = db return db + def add_memory_database(self, memory_name): + return self.add_database(Database(self, memory_name=memory_name)) + def remove_database(self, name): self.databases.pop(name) diff --git a/docs/internals.rst b/docs/internals.rst index 713f5d7d..e3bb83fd 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -273,7 +273,25 @@ The ``db`` parameter should be an instance of the ``datasette.database.Database` This will add a mutable database and serve it at ``/my-new-database``. -To create a shared in-memory database named ``statistics``, use the following: +``.add_database()`` returns the Database instance, with its name set as the ``database.name`` attribute. Any time you are working with a newly added database you should use the return value of ``.add_database()``, for example: + +.. code-block:: python + + db = datasette.add_database(Database(datasette, memory_name="statistics")) + await db.execute_write("CREATE TABLE foo(id integer primary key)", block=True) + +.. _datasette_add_memory_database: + +.add_memory_database(name) +-------------------------- + +Adds a shared in-memory database with the specified name: + +.. code-block:: python + + datasette.add_memory_database("statistics") + +This is a shortcut for the following: .. code-block:: python @@ -284,14 +302,7 @@ To create a shared in-memory database named ``statistics``, use the following: memory_name="statistics" )) -This database will be served at ``/statistics``. - -``.add_database()`` returns the Database instance, with its name set as the ``database.name`` attribute. Any time you are working with a newly added database you should use the return value of ``.add_database()``, for example: - -.. code-block:: python - - db = datasette.add_database(Database(datasette, memory_name="statistics")) - await db.execute_write("CREATE TABLE foo(id integer primary key)", block=True) +Using either of these pattern will result in the in-memory database being served at ``/statistics``. .. _datasette_remove_database: diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 086f1a48..b60aaa8e 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -479,9 +479,9 @@ async def test_attached_databases(app_client_two_attached_databases_crossdb_enab async def test_database_memory_name(app_client): ds = app_client.ds foo1 = ds.add_database(Database(ds, memory_name="foo")) - foo2 = ds.add_database(Database(ds, memory_name="foo")) + foo2 = ds.add_memory_database("foo") bar1 = ds.add_database(Database(ds, memory_name="bar")) - bar2 = ds.add_database(Database(ds, memory_name="bar")) + bar2 = ds.add_memory_database("bar") for db in (foo1, foo2, bar1, bar2): table_names = await db.table_names() assert table_names == [] From 4f9a2f1f47dcf7e8561d68a8a07f5009a13cfdb3 Mon Sep 17 00:00:00 2001 From: David Boucha Date: Wed, 3 Mar 2021 22:46:10 -0700 Subject: [PATCH 0159/1455] Fix small typo (#1243) Thanks, @UtahDave --- docs/deploying.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index 4e04ea1d..0f892f83 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -58,7 +58,7 @@ Add a random value for the ``DATASETTE_SECRET`` - this will be used to sign Data $ python3 -c 'import secrets; print(secrets.token_hex(32))' -This configuration will run Datasette against all database files contained in the ``/home/ubunt/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`config_dir` for details. +This configuration will run Datasette against all database files contained in the ``/home/ubuntu/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`config_dir` for details. You can start the Datasette process running using the following:: From d0fd833b8cdd97e1b91d0f97a69b494895d82bee Mon Sep 17 00:00:00 2001 From: Bob Whitelock Date: Sun, 7 Mar 2021 07:41:17 +0000 Subject: [PATCH 0160/1455] Add compile option to Dockerfile to fix failing test (fixes #696) (#1223) This test was failing when run inside the Docker container: `test_searchable[/fixtures/searchable.json?_search=te*+AND+do*&_searchmode=raw-expected_rows3]`, with this error: ``` def test_searchable(app_client, path, expected_rows): response = app_client.get(path) > assert expected_rows == response.json["rows"] E AssertionError: assert [[1, 'barry c...sel', 'puma']] == [] E Left contains 2 more items, first extra item: [1, 'barry cat', 'terry dog', 'panther'] E Full diff: E + [] E - [[1, 'barry cat', 'terry dog', 'panther'], E - [2, 'terry dog', 'sara weasel', 'puma']] ``` The issue was that the version of sqlite3 built inside the Docker container was built with FTS3 and FTS4 enabled, but without the `SQLITE_ENABLE_FTS3_PARENTHESIS` compile option passed, which adds support for using `AND` and `NOT` within `match` expressions (see https://sqlite.org/fts3.html#compiling_and_enabling_fts3_and_fts4 and https://www.sqlite.org/compile.html). Without this, the `AND` used in the search in this test was being interpreted as a literal string, and so no matches were found. Adding this compile option fixes this. Thanks, @bobwhitelock --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index aba701ab..f4b14146 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,7 +7,7 @@ RUN apt update \ RUN wget "https://www.sqlite.org/2020/sqlite-autoconf-3310100.tar.gz" && tar xzf sqlite-autoconf-3310100.tar.gz \ - && cd sqlite-autoconf-3310100 && ./configure --disable-static --enable-fts5 --enable-json1 CFLAGS="-g -O2 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS4=1 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_JSON1" \ + && cd sqlite-autoconf-3310100 && ./configure --disable-static --enable-fts5 --enable-json1 CFLAGS="-g -O2 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS -DSQLITE_ENABLE_FTS4=1 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_JSON1" \ && make && make install RUN wget "http://www.gaia-gis.it/gaia-sins/freexl-sources/freexl-1.0.5.tar.gz" && tar zxf freexl-1.0.5.tar.gz \ From a1bcd2fbe5e47bb431045f65eeceb5eb3a6718d5 Mon Sep 17 00:00:00 2001 From: Jean-Baptiste Pressac Date: Wed, 10 Mar 2021 19:26:39 +0100 Subject: [PATCH 0161/1455] Minor typo in IP adress (#1256) 127.0.01 replaced by 127.0.0.1 --- docs/deploying.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index 0f892f83..48261b59 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -67,7 +67,7 @@ You can start the Datasette process running using the following:: You can confirm that Datasette is running on port 8000 like so:: - curl 127.0.01:8000/-/versions.json + curl 127.0.0.1:8000/-/versions.json # Should output JSON showing the installed version Datasette will not be accessible from outside the server because it is listening on ``127.0.0.1``. You can expose it by instead listening on ``0.0.0.0``, but a better way is to set up a proxy such as ``nginx``. From 8e18c7943181f228ce5ebcea48deb59ce50bee1f Mon Sep 17 00:00:00 2001 From: Konstantin Baikov <4488943+kbaikov@users.noreply.github.com> Date: Thu, 11 Mar 2021 17:15:49 +0100 Subject: [PATCH 0162/1455] Use context manager instead of plain open (#1211) Context manager with open closes the files after usage. When the object is already a pathlib.Path i used read_text write_text functions In some cases pathlib.Path.open were used in context manager, it is basically the same as builtin open. Thanks, Konstantin Baikov! --- datasette/app.py | 13 ++++++------- datasette/cli.py | 13 +++++++------ datasette/publish/cloudrun.py | 6 ++++-- datasette/publish/heroku.py | 17 ++++++++++------- datasette/utils/__init__.py | 6 ++++-- setup.py | 3 ++- tests/conftest.py | 6 ++---- tests/fixtures.py | 5 +++-- tests/test_cli.py | 3 ++- tests/test_cli_serve_get.py | 3 ++- tests/test_docs.py | 8 ++++---- tests/test_package.py | 6 ++++-- tests/test_plugins.py | 3 ++- tests/test_publish_cloudrun.py | 32 ++++++++++++++++++++------------ tests/test_publish_heroku.py | 12 ++++++++---- tests/test_utils.py | 18 ++++++++++++------ update-docs-help.py | 2 +- 17 files changed, 93 insertions(+), 63 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 02d432df..f43ec205 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -212,7 +212,7 @@ class Datasette: and (config_dir / "inspect-data.json").exists() and not inspect_data ): - inspect_data = json.load((config_dir / "inspect-data.json").open()) + inspect_data = json.loads((config_dir / "inspect-data.json").read_text()) if immutables is None: immutable_filenames = [i["file"] for i in inspect_data.values()] immutables = [ @@ -269,7 +269,7 @@ class Datasette: if config_dir and (config_dir / "config.json").exists(): raise StartupError("config.json should be renamed to settings.json") if config_dir and (config_dir / "settings.json").exists() and not config: - config = json.load((config_dir / "settings.json").open()) + config = json.loads((config_dir / "settings.json").read_text()) self._settings = dict(DEFAULT_SETTINGS, **(config or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note @@ -450,11 +450,10 @@ class Datasette: def app_css_hash(self): if not hasattr(self, "_app_css_hash"): - self._app_css_hash = hashlib.sha1( - open(os.path.join(str(app_root), "datasette/static/app.css")) - .read() - .encode("utf8") - ).hexdigest()[:6] + with open(os.path.join(str(app_root), "datasette/static/app.css")) as fp: + self._app_css_hash = hashlib.sha1(fp.read().encode("utf8")).hexdigest()[ + :6 + ] return self._app_css_hash async def get_canned_queries(self, database_name, actor): diff --git a/datasette/cli.py b/datasette/cli.py index 96a41740..2fa039a0 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -125,13 +125,13 @@ def cli(): @sqlite_extensions def inspect(files, inspect_file, sqlite_extensions): app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions) - if inspect_file == "-": - out = sys.stdout - else: - out = open(inspect_file, "w") loop = asyncio.get_event_loop() inspect_data = loop.run_until_complete(inspect_(files, sqlite_extensions)) - out.write(json.dumps(inspect_data, indent=2)) + if inspect_file == "-": + sys.stdout.write(json.dumps(inspect_data, indent=2)) + else: + with open(inspect_file, "w") as fp: + fp.write(json.dumps(inspect_data, indent=2)) async def inspect_(files, sqlite_extensions): @@ -475,7 +475,8 @@ def serve( inspect_data = None if inspect_file: - inspect_data = json.load(open(inspect_file)) + with open(inspect_file) as fp: + inspect_data = json.load(fp) metadata_data = None if metadata: diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 7f9e89e2..bad223a1 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -141,9 +141,11 @@ def publish_subcommand(publish): if show_files: if os.path.exists("metadata.json"): print("=== metadata.json ===\n") - print(open("metadata.json").read()) + with open("metadata.json") as fp: + print(fp.read()) print("\n==== Dockerfile ====\n") - print(open("Dockerfile").read()) + with open("Dockerfile") as fp: + print(fp.read()) print("\n====================\n") image_id = f"gcr.io/{project}/{name}" diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index c0c70e12..19fe3fbe 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -171,9 +171,11 @@ def temporary_heroku_directory( os.chdir(tmp.name) if metadata_content: - open("metadata.json", "w").write(json.dumps(metadata_content, indent=2)) + with open("metadata.json", "w") as fp: + fp.write(json.dumps(metadata_content, indent=2)) - open("runtime.txt", "w").write("python-3.8.7") + with open("runtime.txt", "w") as fp: + fp.write("python-3.8.7") if branch: install = [ @@ -182,11 +184,11 @@ def temporary_heroku_directory( else: install = ["datasette"] + list(install) - open("requirements.txt", "w").write("\n".join(install)) + with open("requirements.txt", "w") as fp: + fp.write("\n".join(install)) os.mkdir("bin") - open("bin/post_compile", "w").write( - "datasette inspect --inspect-file inspect-data.json" - ) + with open("bin/post_compile", "w") as fp: + fp.write("datasette inspect --inspect-file inspect-data.json") extras = [] if template_dir: @@ -218,7 +220,8 @@ def temporary_heroku_directory( procfile_cmd = "web: datasette serve --host 0.0.0.0 {quoted_files} --cors --port $PORT --inspect-file inspect-data.json {extras}".format( quoted_files=quoted_files, extras=" ".join(extras) ) - open("Procfile", "w").write(procfile_cmd) + with open("Procfile", "w") as fp: + fp.write(procfile_cmd) for path, filename in zip(file_paths, file_names): link_or_copy(path, os.path.join(tmp.name, filename)) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 47ca0551..1fedb69c 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -428,8 +428,10 @@ def temporary_docker_directory( ) os.chdir(datasette_dir) if metadata_content: - open("metadata.json", "w").write(json.dumps(metadata_content, indent=2)) - open("Dockerfile", "w").write(dockerfile) + with open("metadata.json", "w") as fp: + fp.write(json.dumps(metadata_content, indent=2)) + with open("Dockerfile", "w") as fp: + fp.write(dockerfile) for path, filename in zip(file_paths, file_names): link_or_copy(path, os.path.join(datasette_dir, filename)) if template_dir: diff --git a/setup.py b/setup.py index 15ee63fe..3540e30a 100644 --- a/setup.py +++ b/setup.py @@ -17,7 +17,8 @@ def get_version(): os.path.dirname(os.path.abspath(__file__)), "datasette", "version.py" ) g = {} - exec(open(path).read(), g) + with open(path) as fp: + exec(fp.read(), g) return g["__version__"] diff --git a/tests/conftest.py b/tests/conftest.py index b00ea006..ad3eb9f1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -75,10 +75,8 @@ def check_permission_actions_are_documented(): from datasette.plugins import pm content = ( - (pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst") - .open() - .read() - ) + pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst" + ).read_text() permissions_re = re.compile(r"\.\. _permissions_([^\s:]+):") documented_permission_actions = set(permissions_re.findall(content)).union( UNDOCUMENTED_PERMISSIONS diff --git a/tests/fixtures.py b/tests/fixtures.py index 30113ff2..2fd8e9cb 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -789,7 +789,8 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename): conn.executescript(GENERATED_COLUMNS_SQL) print(f"Test tables written to {db_filename}") if metadata: - open(metadata, "w").write(json.dumps(METADATA, indent=4)) + with open(metadata, "w") as fp: + fp.write(json.dumps(METADATA, indent=4)) print(f"- metadata written to {metadata}") if plugins_path: path = pathlib.Path(plugins_path) @@ -798,7 +799,7 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename): test_plugins = pathlib.Path(__file__).parent / "plugins" for filepath in test_plugins.glob("*.py"): newpath = path / filepath.name - newpath.write_text(filepath.open().read()) + newpath.write_text(filepath.read_text()) print(f" Wrote plugin: {newpath}") if extra_db_filename: if pathlib.Path(extra_db_filename).exists(): diff --git a/tests/test_cli.py b/tests/test_cli.py index 8ddd32f6..e094ccb6 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -49,7 +49,8 @@ def test_inspect_cli_writes_to_file(app_client): cli, ["inspect", "fixtures.db", "--inspect-file", "foo.json"] ) assert 0 == result.exit_code, result.output - data = json.load(open("foo.json")) + with open("foo.json") as fp: + data = json.load(fp) assert ["fixtures"] == list(data.keys()) diff --git a/tests/test_cli_serve_get.py b/tests/test_cli_serve_get.py index aaa692e5..90fbfe3b 100644 --- a/tests/test_cli_serve_get.py +++ b/tests/test_cli_serve_get.py @@ -14,7 +14,8 @@ def test_serve_with_get(tmp_path_factory): @hookimpl def startup(datasette): - open("{}", "w").write("hello") + with open("{}", "w") as fp: + fp.write("hello") """.format( str(plugins_dir / "hello.txt") ), diff --git a/tests/test_docs.py b/tests/test_docs.py index 44b0810a..efd267b9 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -19,13 +19,13 @@ def get_headings(content, underline="-"): def get_labels(filename): - content = (docs_path / filename).open().read() + content = (docs_path / filename).read_text() return set(label_re.findall(content)) @pytest.fixture(scope="session") def settings_headings(): - return get_headings((docs_path / "settings.rst").open().read(), "~") + return get_headings((docs_path / "settings.rst").read_text(), "~") @pytest.mark.parametrize("setting", app.SETTINGS) @@ -43,7 +43,7 @@ def test_settings_are_documented(settings_headings, setting): ), ) def test_help_includes(name, filename): - expected = open(str(docs_path / filename)).read() + expected = (docs_path / filename).read_text() runner = CliRunner() result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88) actual = f"$ datasette {name} --help\n\n{result.output}" @@ -55,7 +55,7 @@ def test_help_includes(name, filename): @pytest.fixture(scope="session") def plugin_hooks_content(): - return (docs_path / "plugin_hooks.rst").open().read() + return (docs_path / "plugin_hooks.rst").read_text() @pytest.mark.parametrize( diff --git a/tests/test_package.py b/tests/test_package.py index 3248b3a4..bb939643 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -32,7 +32,8 @@ def test_package(mock_call, mock_which): capture = CaptureDockerfile() mock_call.side_effect = capture with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["package", "test.db", "--secret", "sekrit"]) assert 0 == result.exit_code mock_call.assert_has_calls([mock.call(["docker", "build", "."])]) @@ -47,7 +48,8 @@ def test_package_with_port(mock_call, mock_which): mock_call.side_effect = capture runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["package", "test.db", "-p", "8080", "--secret", "sekrit"] ) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 715c7c17..ee6f1efa 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -225,7 +225,8 @@ def test_plugin_config_env_from_list(app_client): def test_plugin_config_file(app_client): - open(TEMP_PLUGIN_SECRET_FILE, "w").write("FROM_FILE") + with open(TEMP_PLUGIN_SECRET_FILE, "w") as fp: + fp.write("FROM_FILE") assert {"foo": "FROM_FILE"} == app_client.ds.plugin_config("file-plugin") # Ensure secrets aren't visible in /-/metadata.json metadata = app_client.get("/-/metadata.json") diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 2ef90705..7881ebae 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -11,7 +11,8 @@ def test_publish_cloudrun_requires_gcloud(mock_which): mock_which.return_value = False runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"]) assert result.exit_code == 1 assert "Publishing to Google Cloud requires gcloud" in result.output @@ -40,7 +41,8 @@ def test_publish_cloudrun_prompts_for_service( mock_which.return_value = True runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "cloudrun", "test.db"], input="input-service" ) @@ -81,7 +83,8 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which): mock_which.return_value = True runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"] ) @@ -120,7 +123,8 @@ def test_publish_cloudrun_memory( mock_which.return_value = True runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "cloudrun", "test.db", "--service", "test", "--memory", memory], @@ -152,17 +156,19 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which): runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") - open("metadata.yml", "w").write( - textwrap.dedent( - """ + with open("test.db", "w") as fp: + fp.write("data") + with open("metadata.yml", "w") as fp: + fp.write( + textwrap.dedent( + """ title: Hello from metadata YAML plugins: datasette-auth-github: foo: bar """ - ).strip() - ) + ).strip() + ) result = runner.invoke( cli.cli, [ @@ -228,7 +234,8 @@ def test_publish_cloudrun_apt_get_install(mock_call, mock_output, mock_which): runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, [ @@ -295,7 +302,8 @@ def test_publish_cloudrun_extra_options( runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, [ diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py index c7a38031..c011ab43 100644 --- a/tests/test_publish_heroku.py +++ b/tests/test_publish_heroku.py @@ -8,7 +8,8 @@ def test_publish_heroku_requires_heroku(mock_which): mock_which.return_value = False runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["publish", "heroku", "test.db"]) assert result.exit_code == 1 assert "Publishing to Heroku requires heroku" in result.output @@ -22,7 +23,8 @@ def test_publish_heroku_installs_plugin(mock_call, mock_check_output, mock_which mock_check_output.side_effect = lambda s: {"['heroku', 'plugins']": b""}[repr(s)] runner = CliRunner() with runner.isolated_filesystem(): - open("t.db", "w").write("data") + with open("t.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["publish", "heroku", "t.db"], input="y\n") assert 0 != result.exit_code mock_check_output.assert_has_calls( @@ -54,7 +56,8 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which): }[repr(s)] runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "heroku", "test.db", "--tar", "gtar"] ) @@ -88,7 +91,8 @@ def test_publish_heroku_plugin_secrets(mock_call, mock_check_output, mock_which) }[repr(s)] runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, [ diff --git a/tests/test_utils.py b/tests/test_utils.py index 56306339..ecef6f7a 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -232,7 +232,8 @@ def test_to_css_class(s, expected): def test_temporary_docker_directory_uses_hard_link(): with tempfile.TemporaryDirectory() as td: os.chdir(td) - open("hello", "w").write("world") + with open("hello", "w") as fp: + fp.write("world") # Default usage of this should use symlink with utils.temporary_docker_directory( files=["hello"], @@ -249,7 +250,8 @@ def test_temporary_docker_directory_uses_hard_link(): secret="secret", ) as temp_docker: hello = os.path.join(temp_docker, "hello") - assert "world" == open(hello).read() + with open(hello) as fp: + assert "world" == fp.read() # It should be a hard link assert 2 == os.stat(hello).st_nlink @@ -260,7 +262,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): mock_link.side_effect = OSError with tempfile.TemporaryDirectory() as td: os.chdir(td) - open("hello", "w").write("world") + with open("hello", "w") as fp: + fp.write("world") # Default usage of this should use symlink with utils.temporary_docker_directory( files=["hello"], @@ -277,7 +280,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): secret=None, ) as temp_docker: hello = os.path.join(temp_docker, "hello") - assert "world" == open(hello).read() + with open(hello) as fp: + assert "world" == fp.read() # It should be a copy, not a hard link assert 1 == os.stat(hello).st_nlink @@ -285,7 +289,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): def test_temporary_docker_directory_quotes_args(): with tempfile.TemporaryDirectory() as td: os.chdir(td) - open("hello", "w").write("world") + with open("hello", "w") as fp: + fp.write("world") with utils.temporary_docker_directory( files=["hello"], name="t", @@ -301,7 +306,8 @@ def test_temporary_docker_directory_quotes_args(): secret="secret", ) as temp_docker: df = os.path.join(temp_docker, "Dockerfile") - df_contents = open(df).read() + with open(df) as fp: + df_contents = fp.read() assert "'$PWD'" in df_contents assert "'--$HOME'" in df_contents assert "ENV DATASETTE_SECRET 'secret'" in df_contents diff --git a/update-docs-help.py b/update-docs-help.py index 3a192575..292d1dcd 100644 --- a/update-docs-help.py +++ b/update-docs-help.py @@ -18,7 +18,7 @@ def update_help_includes(): result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88) actual = f"$ datasette {name} --help\n\n{result.output}" actual = actual.replace("Usage: cli ", "Usage: datasette ") - open(docs_path / filename, "w").write(actual) + (docs_path / filename).write_text(actual) if __name__ == "__main__": From c4f1ec7f33fd7d5b93f0f895dafb5351cc3bfc5b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Mar 2021 14:32:23 -0700 Subject: [PATCH 0163/1455] Documentation for Response.asgi_send(), closes #1266 --- docs/internals.rst | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/docs/internals.rst b/docs/internals.rst index e3bb83fd..18032406 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -138,6 +138,28 @@ Each of these responses will use the correct corresponding content-type - ``text Each of the helper methods take optional ``status=`` and ``headers=`` arguments, documented above. +.. _internals_response_asgi_send: + +Returning a response with .asgi_send(send) +------------------------------------------ + + +In most cases you will return ``Response`` objects from your own view functions. You can also use a ``Response`` instance to respond at a lower level via ASGI, for example if you are writing code that uses the :ref:`plugin_asgi_wrapper` hook. + +Create a ``Response`` object and then use ``await response.asgi_send(send)``, passing the ASGI ``send`` function. For example: + +.. code-block:: python + + async def require_authorization(scope, recieve, send): + response = Response.text( + "401 Authorization Required", + headers={ + "www-authenticate": 'Basic realm="Datasette", charset="UTF-8"' + }, + status=401, + ) + await response.asgi_send(send) + .. _internals_response_set_cookie: Setting cookies with response.set_cookie() From 6ad544df5e6bd027a8e27317041e6168aee07459 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 23 Mar 2021 09:19:41 -0700 Subject: [PATCH 0164/1455] Fixed master -> main in a bunch of places, mainly docs --- datasette/cli.py | 2 +- datasette/publish/common.py | 2 +- datasette/templates/patterns.html | 16 ++++++++-------- docs/contributing.rst | 2 +- docs/custom_templates.rst | 2 +- docs/datasette-package-help.txt | 2 +- docs/datasette-publish-cloudrun-help.txt | 2 +- docs/datasette-publish-heroku-help.txt | 2 +- docs/plugin_hooks.rst | 4 ++-- docs/publish.rst | 4 ++-- docs/spatialite.rst | 2 +- tests/fixtures.py | 4 ++-- tests/test_html.py | 9 ++++----- 13 files changed, 26 insertions(+), 27 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 2fa039a0..42b5c115 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -191,7 +191,7 @@ def plugins(all, plugins_dir): help="Path to JSON/YAML file containing metadata to publish", ) @click.option("--extra-options", help="Extra options to pass to datasette serve") -@click.option("--branch", help="Install datasette from a GitHub branch e.g. master") +@click.option("--branch", help="Install datasette from a GitHub branch e.g. main") @click.option( "--template-dir", type=click.Path(exists=True, file_okay=False, dir_okay=True), diff --git a/datasette/publish/common.py b/datasette/publish/common.py index b6570290..29665eb3 100644 --- a/datasette/publish/common.py +++ b/datasette/publish/common.py @@ -19,7 +19,7 @@ def add_common_publish_arguments_and_options(subcommand): "--extra-options", help="Extra options to pass to datasette serve" ), click.option( - "--branch", help="Install datasette from a GitHub branch e.g. master" + "--branch", help="Install datasette from a GitHub branch e.g. main" ), click.option( "--template-dir", diff --git a/datasette/templates/patterns.html b/datasette/templates/patterns.html index 984c1bf6..3f9b5a16 100644 --- a/datasette/templates/patterns.html +++ b/datasette/templates/patterns.html @@ -70,10 +70,10 @@

Data license: - Apache License 2.0 + Apache License 2.0 · Data source: - + tests/fixtures.py · About: @@ -118,10 +118,10 @@

Data license: - Apache License 2.0 + Apache License 2.0 · Data source: - + tests/fixtures.py · About: @@ -177,10 +177,10 @@

Data license: - Apache License 2.0 + Apache License 2.0 · Data source: - + tests/fixtures.py · About: @@ -478,10 +478,10 @@