From b28657672f1023d32c4afbecd0e7232ab417bc88 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 27 Jun 2020 20:29:24 -0700 Subject: [PATCH 0001/1705] Added register_magic_plugins hook to changelog, refs #842 --- docs/changelog.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index d9f48ecf..db51423e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -17,7 +17,9 @@ Changelog (user_id, timestamp) values (:_actor_id, :_timestamp_datetime_utc) - This inserts the currently authenticated actor ID and the current datetime. (`#842 `__) + + This inserts the currently authenticated actor ID and the current datetime. (`#842 `__) +- New :ref:`plugin_hook_register_magic_parameters` plugin hook. - New :ref:`plugin_hook_startup` plugin hook. (`#834 `__) - New :ref:`plugin_hook_canned_queries` plugin hook. See `datasette-saved-queries `__ for an example of this hook in action. (`#852 `__) - Workaround for "Too many open files" error in test runs. (`#846 `__) From 8b25b14de17e50edca4f5a5fe8001587d9e4006c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Jun 2020 09:09:43 -0700 Subject: [PATCH 0002/1705] Added note about unit testing the startup() hook --- docs/plugin_hooks.rst | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 8683bee8..ccd4ca3d 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -705,6 +705,18 @@ Potential use-cases: * Create database tables that a plugin needs on startup * Validate the metadata configuration for a plugin on startup, and raise an error if it is invalid +.. note:: + + If you are writing :ref:`unit tests ` for a plugin that uses this hook you will need to explicitly call ``await ds.invoke_startup()`` in your tests. An example: + + .. code-block:: python + + @pytest.mark.asyncio + async def test_my_plugin(): + ds = Datasette([], metadata={}) + await ds.invoke_startup() + # Rest of test goes here + Example: `datasette-saved-queries `__ .. _plugin_hook_canned_queries: From 99fba0fad35ab81f39e68568ea4afa94b88ab232 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Jun 2020 12:37:50 -0700 Subject: [PATCH 0003/1705] Link to datasette-init plugin hook, refs #834 --- docs/ecosystem.rst | 5 +++++ docs/plugin_hooks.rst | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst index 7c8959dd..72a2845e 100644 --- a/docs/ecosystem.rst +++ b/docs/ecosystem.rst @@ -107,6 +107,11 @@ datasette-json-html `datasette-json-html `__ renders HTML in Datasette's table view driven by JSON returned from your SQL queries. This provides a way to embed images, links and lists of links directly in Datasette's main interface, defined using custom SQL statements. +datasette-init +-------------- + +`datasette-init `__ allows you to define tables and views in your metadata file that should be created on startup if they do not already exist. + datasette-media --------------- diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index ccd4ca3d..de10a551 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -53,7 +53,7 @@ arguments and can be called like this:: select random_integer(1, 10); -Examples: `datasette-jellyfish `_, `datasette-jq `_, `datasette-haversine `__, `datasette-rure `__ +Examples: `datasette-jellyfish `__, `datasette-jq `__, `datasette-haversine `__, `datasette-rure `__ .. _plugin_hook_prepare_jinja2_environment: @@ -717,7 +717,7 @@ Potential use-cases: await ds.invoke_startup() # Rest of test goes here -Example: `datasette-saved-queries `__ +Examples: `datasette-saved-queries `__, `datasette-init `__ .. _plugin_hook_canned_queries: From 0991ea75cc7b265389aa8362414a305ba532d31a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Jun 2020 12:45:34 -0700 Subject: [PATCH 0004/1705] Renamed _timestamp to _now, refs #842, closes #871 --- datasette/default_magic_parameters.py | 4 ++-- docs/changelog.rst | 2 +- docs/sql_queries.rst | 8 ++++---- tests/fixtures.py | 2 +- tests/test_canned_queries.py | 6 +++--- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/datasette/default_magic_parameters.py b/datasette/default_magic_parameters.py index ac7c5eac..b788fee8 100644 --- a/datasette/default_magic_parameters.py +++ b/datasette/default_magic_parameters.py @@ -21,7 +21,7 @@ def cookie(key, request): return request.cookies[key] -def timestamp(key, request): +def now(key, request): if key == "epoch": return int(time.time()) elif key == "date_utc": @@ -50,6 +50,6 @@ def register_magic_parameters(): ("header", header), ("actor", actor), ("cookie", cookie), - ("timestamp", timestamp), + ("now", now), ("random", random), ] diff --git a/docs/changelog.rst b/docs/changelog.rst index db51423e..55f274a8 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -16,7 +16,7 @@ Changelog insert into logs (user_id, timestamp) values - (:_actor_id, :_timestamp_datetime_utc) + (:_actor_id, :_now_datetime_utc) This inserts the currently authenticated actor ID and the current datetime. (`#842 `__) - New :ref:`plugin_hook_register_magic_parameters` plugin hook. diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index aff16c1a..dd7743cf 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -292,13 +292,13 @@ Available magic parameters are: ``_cookie_*`` - e.g. ``_cookie_lang`` The value of the incoming cookie of that name. -``_timestamp_epoch`` +``_now_epoch`` The number of seconds since the Unix epoch. -``_timestamp_date_utc`` +``_now_date_utc`` The date in UTC, e.g. ``2020-06-01`` -``_timestamp_datetime_utc`` +``_now_datetime_utc`` The ISO 8601 datetime in UTC, e.g. ``2020-06-24T18:01:07Z`` ``_random_chars_*`` - e.g. ``_random_chars_128`` @@ -318,7 +318,7 @@ Here's an example configuration (this time using ``metadata.yaml`` since that pr INSERT INTO messages ( user_id, ip, message, datetime ) VALUES ( - :_actor_id, :_request_ip, :message, :_timestamp_datetime_utc + :_actor_id, :_request_ip, :message, :_now_datetime_utc ) write: true diff --git a/tests/fixtures.py b/tests/fixtures.py index f3fdc468..d103fa35 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -461,7 +461,7 @@ METADATA = { "queries": { "𝐜𝐢𝐭𝐢𝐞𝐬": "select id, name from facet_cities order by id limit 1;", "pragma_cache_size": "PRAGMA cache_size;", - "magic_parameters": "select :_header_user_agent as user_agent, :_timestamp_datetime_utc as datetime", + "magic_parameters": "select :_header_user_agent as user_agent, :_now_datetime_utc as datetime", "neighborhood_search": { "sql": textwrap.dedent( """ diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index 3942dc98..2e064db1 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -187,9 +187,9 @@ def magic_parameters_client(): ("_actor_id", "root"), ("_header_host", "localhost"), ("_cookie_foo", "bar"), - ("_timestamp_epoch", r"^\d+$"), - ("_timestamp_date_utc", r"^\d{4}-\d{2}-\d{2}$"), - ("_timestamp_datetime_utc", r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z$"), + ("_now_epoch", r"^\d+$"), + ("_now_date_utc", r"^\d{4}-\d{2}-\d{2}$"), + ("_now_datetime_utc", r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z$"), ("_random_chars_1", r"^\w$"), ("_random_chars_10", r"^\w{10}$"), ], From a8bcafc1775c8a8655b365ae22a3d64f6361c74a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Jun 2020 13:45:17 -0700 Subject: [PATCH 0005/1705] Refactored out AsgiRouter, refs #870 --- datasette/app.py | 34 ++++++++++++++++++++++----- datasette/utils/asgi.py | 52 ----------------------------------------- datasette/views/base.py | 1 - 3 files changed, 28 insertions(+), 59 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 0437a75b..bff01bc1 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -25,7 +25,7 @@ from jinja2.environment import Template from jinja2.exceptions import TemplateNotFound import uvicorn -from .views.base import DatasetteError, ureg, AsgiRouter +from .views.base import DatasetteError, ureg from .views.database import DatabaseDownload, DatabaseView from .views.index import IndexView from .views.special import ( @@ -902,10 +902,23 @@ class Datasette: return asgi -class DatasetteRouter(AsgiRouter): +class DatasetteRouter: def __init__(self, datasette, routes): self.ds = datasette - super().__init__(routes) + routes = routes or [] + self.routes = [ + # Compile any strings to regular expressions + ((re.compile(pattern) if isinstance(pattern, str) else pattern), view) + for pattern, view in routes + ] + + async def __call__(self, scope, receive, send): + # Because we care about "foo/bar" v.s. "foo%2Fbar" we decode raw_path ourselves + path = scope["path"] + raw_path = scope.get("raw_path") + if raw_path: + path = raw_path.decode("ascii") + return await self.route_path(scope, receive, send, path) async def route_path(self, scope, receive, send, path): # Strip off base_url if present before routing @@ -933,9 +946,18 @@ class DatasetteRouter(AsgiRouter): if actor: break scope_modifications["actor"] = actor or default_actor - return await super().route_path( - dict(scope, **scope_modifications), receive, send, path - ) + scope = dict(scope, **scope_modifications) + for regex, view in self.routes: + match = regex.match(path) + if match is not None: + new_scope = dict(scope, url_route={"kwargs": match.groupdict()}) + try: + return await view(new_scope, receive, send) + except NotFound as exception: + return await self.handle_404(scope, receive, send, exception) + except Exception as exception: + return await self.handle_500(scope, receive, send, exception) + return await self.handle_404(scope, receive, send) async def handle_404(self, scope, receive, send, exception=None): # If URL has a trailing slash, redirect to URL without it diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 5a152570..615bc0ab 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -118,58 +118,6 @@ class Request: return cls(scope, None) -class AsgiRouter: - def __init__(self, routes=None): - routes = routes or [] - self.routes = [ - # Compile any strings to regular expressions - ((re.compile(pattern) if isinstance(pattern, str) else pattern), view) - for pattern, view in routes - ] - - async def __call__(self, scope, receive, send): - # Because we care about "foo/bar" v.s. "foo%2Fbar" we decode raw_path ourselves - path = scope["path"] - raw_path = scope.get("raw_path") - if raw_path: - path = raw_path.decode("ascii") - return await self.route_path(scope, receive, send, path) - - async def route_path(self, scope, receive, send, path): - for regex, view in self.routes: - match = regex.match(path) - if match is not None: - new_scope = dict(scope, url_route={"kwargs": match.groupdict()}) - try: - return await view(new_scope, receive, send) - except NotFound as exception: - return await self.handle_404(scope, receive, send, exception) - except Exception as exception: - return await self.handle_500(scope, receive, send, exception) - return await self.handle_404(scope, receive, send) - - async def handle_404(self, scope, receive, send, exception=None): - await send( - { - "type": "http.response.start", - "status": 404, - "headers": [[b"content-type", b"text/html; charset=utf-8"]], - } - ) - await send({"type": "http.response.body", "body": b"

404

"}) - - async def handle_500(self, scope, receive, send, exception): - await send( - { - "type": "http.response.start", - "status": 404, - "headers": [[b"content-type", b"text/html; charset=utf-8"]], - } - ) - html = "

500

".format(escape(repr(exception))) - await send({"type": "http.response.body", "body": html.encode("utf-8")}) - - class AsgiLifespan: def __init__(self, app, on_startup=None, on_shutdown=None): self.app = app diff --git a/datasette/views/base.py b/datasette/views/base.py index f14e6d3a..821a6f0e 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -27,7 +27,6 @@ from datasette.utils import ( from datasette.utils.asgi import ( AsgiStream, AsgiWriter, - AsgiRouter, AsgiView, Forbidden, NotFound, From 3bc2461c77ecba3e1a95301dd440a9bef56b1283 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Jun 2020 16:06:30 -0700 Subject: [PATCH 0006/1705] Refactored AsgiView into BaseView, refs #870 --- datasette/utils/asgi.py | 27 --------------------------- datasette/views/base.py | 28 +++++++++++++++++++++++++--- 2 files changed, 25 insertions(+), 30 deletions(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 615bc0ab..e1ccd17b 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -147,33 +147,6 @@ class AsgiLifespan: await self.app(scope, receive, send) -class AsgiView: - async def dispatch_request(self, request, *args, **kwargs): - handler = getattr(self, request.method.lower(), None) - return await handler(request, *args, **kwargs) - - @classmethod - def as_asgi(cls, *class_args, **class_kwargs): - async def view(scope, receive, send): - # Uses scope to create a request object, then dispatches that to - # self.get(...) or self.options(...) along with keyword arguments - # that were already tucked into scope["url_route"]["kwargs"] by - # the router, similar to how Django Channels works: - # https://channels.readthedocs.io/en/latest/topics/routing.html#urlrouter - request = Request(scope, receive) - self = view.view_class(*class_args, **class_kwargs) - response = await self.dispatch_request( - request, **scope["url_route"]["kwargs"] - ) - await response.asgi_send(send) - - view.view_class = cls - view.__doc__ = cls.__doc__ - view.__module__ = cls.__module__ - view.__name__ = cls.__name__ - return view - - class AsgiStream: def __init__(self, stream_fn, status=200, headers=None, content_type="text/plain"): self.stream_fn = stream_fn diff --git a/datasette/views/base.py b/datasette/views/base.py index 821a6f0e..72fb6b6b 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -27,9 +27,9 @@ from datasette.utils import ( from datasette.utils.asgi import ( AsgiStream, AsgiWriter, - AsgiView, Forbidden, NotFound, + Request, Response, ) @@ -55,7 +55,7 @@ class DatasetteError(Exception): self.messagge_is_html = messagge_is_html -class BaseView(AsgiView): +class BaseView: ds = None async def head(self, *args, **kwargs): @@ -90,7 +90,8 @@ class BaseView(AsgiView): ) except BadSignature: pass - response = await super().dispatch_request(request, *args, **kwargs) + handler = getattr(self, request.method.lower(), None) + response = await handler(request, *args, **kwargs) if self.ds: self.ds._write_messages_to_response(request, response) return response @@ -118,6 +119,27 @@ class BaseView(AsgiView): ) ) + @classmethod + def as_asgi(cls, *class_args, **class_kwargs): + async def view(scope, receive, send): + # Uses scope to create a request object, then dispatches that to + # self.get(...) or self.options(...) along with keyword arguments + # that were already tucked into scope["url_route"]["kwargs"] by + # the router, similar to how Django Channels works: + # https://channels.readthedocs.io/en/latest/topics/routing.html#urlrouter + request = Request(scope, receive) + self = view.view_class(*class_args, **class_kwargs) + response = await self.dispatch_request( + request, **scope["url_route"]["kwargs"] + ) + await response.asgi_send(send) + + view.view_class = cls + view.__doc__ = cls.__doc__ + view.__module__ = cls.__module__ + view.__name__ = cls.__name__ + return view + class DataView(BaseView): name = "" From 4dad0284327738acd88316d0e35129a78a1a1b47 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Jun 2020 16:47:40 -0700 Subject: [PATCH 0007/1705] BaseView.as_asgi is now .as_view, refs #870 --- datasette/app.py | 34 +++++++++++++++++----------------- datasette/utils/asgi.py | 4 ++-- datasette/views/base.py | 12 +++--------- 3 files changed, 22 insertions(+), 28 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index bff01bc1..f4be24cd 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -794,7 +794,7 @@ class Datasette: # Generate a regex snippet to match all registered renderer file extensions renderer_regex = "|".join(r"\." + key for key in self.renderers.keys()) - add_route(IndexView.as_asgi(self), r"/(?P(\.jsono?)?$)") + add_route(IndexView.as_view(self), r"/(?P(\.jsono?)?$)") # TODO: /favicon.ico and /-/static/ deserve far-future cache expires add_route(favicon, "/favicon.ico") @@ -819,62 +819,62 @@ class Datasette: ), ) add_route( - JsonDataView.as_asgi(self, "metadata.json", lambda: self._metadata), + JsonDataView.as_view(self, "metadata.json", lambda: self._metadata), r"/-/metadata(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi(self, "versions.json", self._versions), + JsonDataView.as_view(self, "versions.json", self._versions), r"/-/versions(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi( + JsonDataView.as_view( self, "plugins.json", self._plugins, needs_request=True ), r"/-/plugins(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi(self, "config.json", lambda: self._config), + JsonDataView.as_view(self, "config.json", lambda: self._config), r"/-/config(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi(self, "threads.json", self._threads), + JsonDataView.as_view(self, "threads.json", self._threads), r"/-/threads(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi(self, "databases.json", self._connected_databases), + JsonDataView.as_view(self, "databases.json", self._connected_databases), r"/-/databases(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi(self, "actor.json", self._actor, needs_request=True), + JsonDataView.as_view(self, "actor.json", self._actor, needs_request=True), r"/-/actor(?P(\.json)?)$", ) add_route( - AuthTokenView.as_asgi(self), r"/-/auth-token$", + AuthTokenView.as_view(self), r"/-/auth-token$", ) add_route( - PermissionsDebugView.as_asgi(self), r"/-/permissions$", + PermissionsDebugView.as_view(self), r"/-/permissions$", ) add_route( - MessagesDebugView.as_asgi(self), r"/-/messages$", + MessagesDebugView.as_view(self), r"/-/messages$", ) add_route( - PatternPortfolioView.as_asgi(self), r"/-/patterns$", + PatternPortfolioView.as_view(self), r"/-/patterns$", ) add_route( - DatabaseDownload.as_asgi(self), r"/(?P[^/]+?)(?P\.db)$" + DatabaseDownload.as_view(self), r"/(?P[^/]+?)(?P\.db)$" ) add_route( - DatabaseView.as_asgi(self), + DatabaseView.as_view(self), r"/(?P[^/]+?)(?P" + renderer_regex + r"|.jsono|\.csv)?$", ) add_route( - TableView.as_asgi(self), + TableView.as_view(self), r"/(?P[^/]+)/(?P[^/]+?$)", ) add_route( - RowView.as_asgi(self), + RowView.as_view(self), r"/(?P[^/]+)/(?P[^/]+?)/(?P[^/]+?)(?P" + renderer_regex + r")?$", @@ -952,7 +952,7 @@ class DatasetteRouter: if match is not None: new_scope = dict(scope, url_route={"kwargs": match.groupdict()}) try: - return await view(new_scope, receive, send) + return await view(Request(new_scope, receive), send) except NotFound as exception: return await self.handle_404(scope, receive, send, exception) except Exception as exception: diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index e1ccd17b..08c57b26 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -269,8 +269,8 @@ async def asgi_send_file( def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None): - async def inner_static(scope, receive, send): - path = scope["url_route"]["kwargs"]["path"] + async def inner_static(request, send): + path = request.scope["url_route"]["kwargs"]["path"] try: full_path = (Path(root_path) / path).resolve().absolute() except FileNotFoundError: diff --git a/datasette/views/base.py b/datasette/views/base.py index 72fb6b6b..280ae49d 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -120,17 +120,11 @@ class BaseView: ) @classmethod - def as_asgi(cls, *class_args, **class_kwargs): - async def view(scope, receive, send): - # Uses scope to create a request object, then dispatches that to - # self.get(...) or self.options(...) along with keyword arguments - # that were already tucked into scope["url_route"]["kwargs"] by - # the router, similar to how Django Channels works: - # https://channels.readthedocs.io/en/latest/topics/routing.html#urlrouter - request = Request(scope, receive) + def as_view(cls, *class_args, **class_kwargs): + async def view(request, send): self = view.view_class(*class_args, **class_kwargs) response = await self.dispatch_request( - request, **scope["url_route"]["kwargs"] + request, **request.scope["url_route"]["kwargs"] ) await response.asgi_send(send) From af350ba4571b8e3f9708c40f2ddb48fea7ac1084 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Jun 2020 17:01:33 -0700 Subject: [PATCH 0008/1705] Use single Request created in DatasetteRouter, refs #870 --- datasette/app.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f4be24cd..d4276af1 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -925,6 +925,7 @@ class DatasetteRouter: base_url = self.ds.config("base_url") if base_url != "/" and path.startswith(base_url): path = "/" + path[len(base_url) :] + request = Request(scope, receive) scope_modifications = {} # Apply force_https_urls, if set if ( @@ -936,9 +937,7 @@ class DatasetteRouter: # Handle authentication default_actor = scope.get("actor") or None actor = None - for actor in pm.hook.actor_from_request( - datasette=self.ds, request=Request(scope, receive) - ): + for actor in pm.hook.actor_from_request(datasette=self.ds, request=request): if callable(actor): actor = actor() if asyncio.iscoroutine(actor): @@ -951,8 +950,9 @@ class DatasetteRouter: match = regex.match(path) if match is not None: new_scope = dict(scope, url_route={"kwargs": match.groupdict()}) + request.scope = new_scope try: - return await view(Request(new_scope, receive), send) + return await view(request, send) except NotFound as exception: return await self.handle_404(scope, receive, send, exception) except Exception as exception: @@ -1079,26 +1079,26 @@ def _cleaner_task_str(task): def wrap_view(view_fn, datasette): - async def asgi_view_fn(scope, receive, send): + async def async_view_fn(request, send): if inspect.iscoroutinefunction(view_fn): response = await async_call_with_supported_arguments( view_fn, - scope=scope, - receive=receive, + scope=request.scope, + receive=request.receive, send=send, - request=Request(scope, receive), + request=request, datasette=datasette, ) else: response = call_with_supported_arguments( view_fn, - scope=scope, - receive=receive, + scope=request.scope, + receive=request.receive, send=send, - request=Request(scope, receive), + request=request, datasette=datasette, ) if response is not None: await response.asgi_send(send) - return asgi_view_fn + return async_view_fn From 7ac4936cec87f5a591e5d2680f0acefc3d35a705 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Jun 2020 17:25:35 -0700 Subject: [PATCH 0009/1705] .add_message() now works inside plugins, closes #864 Refs #870 --- datasette/app.py | 17 +++++++++++++++-- datasette/views/base.py | 17 ++--------------- tests/plugins/my_plugin.py | 7 +++++++ tests/test_plugins.py | 8 ++++++++ 4 files changed, 32 insertions(+), 17 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index d4276af1..af3dcc8b 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -5,6 +5,7 @@ import datetime import hashlib import inspect import itertools +from itsdangerous import BadSignature import json import os import re @@ -926,6 +927,14 @@ class DatasetteRouter: if base_url != "/" and path.startswith(base_url): path = "/" + path[len(base_url) :] request = Request(scope, receive) + # Populate request_messages if ds_messages cookie is present + try: + request._messages = self.ds.unsign( + request.cookies.get("ds_messages", ""), "messages" + ) + except BadSignature: + pass + scope_modifications = {} # Apply force_https_urls, if set if ( @@ -952,7 +961,11 @@ class DatasetteRouter: new_scope = dict(scope, url_route={"kwargs": match.groupdict()}) request.scope = new_scope try: - return await view(request, send) + response = await view(request, send) + if response: + self.ds._write_messages_to_response(request, response) + await response.asgi_send(send) + return except NotFound as exception: return await self.handle_404(scope, receive, send, exception) except Exception as exception: @@ -1099,6 +1112,6 @@ def wrap_view(view_fn, datasette): datasette=datasette, ) if response is not None: - await response.asgi_send(send) + return response return async_view_fn diff --git a/datasette/views/base.py b/datasette/views/base.py index 280ae49d..208c3c96 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -1,7 +1,6 @@ import asyncio import csv import itertools -from itsdangerous import BadSignature import json import re import time @@ -82,19 +81,8 @@ class BaseView: return "ff0000" async def dispatch_request(self, request, *args, **kwargs): - # Populate request_messages if ds_messages cookie is present - if self.ds: - try: - request._messages = self.ds.unsign( - request.cookies.get("ds_messages", ""), "messages" - ) - except BadSignature: - pass handler = getattr(self, request.method.lower(), None) - response = await handler(request, *args, **kwargs) - if self.ds: - self.ds._write_messages_to_response(request, response) - return response + return await handler(request, *args, **kwargs) async def render(self, templates, request, context=None): context = context or {} @@ -123,10 +111,9 @@ class BaseView: def as_view(cls, *class_args, **class_kwargs): async def view(request, send): self = view.view_class(*class_args, **class_kwargs) - response = await self.dispatch_request( + return await self.dispatch_request( request, **request.scope["url_route"]["kwargs"] ) - await response.asgi_send(send) view.view_class = cls view.__doc__ = cls.__doc__ diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index e4e4153c..bf6340ce 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -190,6 +190,12 @@ def register_routes(): def not_async(): return Response.html("This was not async") + def add_message(datasette, request): + datasette.add_message(request, "Hello from messages") + print("Adding message") + print(request._messages) + return Response.html("Added message") + return [ (r"/one/$", one), (r"/two/(?P.*)$", two), @@ -197,6 +203,7 @@ def register_routes(): (r"/post/$", post), (r"/csrftoken-form/$", csrftoken_form), (r"/not-async/$", not_async), + (r"/add-message/$", add_message), ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index b798e52d..9468fde9 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -602,6 +602,14 @@ def test_register_routes_asgi(app_client): assert "1" == response.headers["x-three"] +def test_register_routes_add_message(app_client): + response = app_client.get("/add-message/") + assert 200 == response.status + assert "Added message" == response.text + decoded = app_client.ds.unsign(response.cookies["ds_messages"], "messages") + assert [["Hello from messages", 1]] == decoded + + @pytest.mark.asyncio async def test_startup(app_client): await app_client.ds.invoke_startup() From a8a5f813722f72703a7aae41135ccc40635cc02f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Jun 2020 17:50:47 -0700 Subject: [PATCH 0010/1705] Made show_messages available to plugins, closes #864 --- datasette/app.py | 1 + datasette/views/base.py | 1 - tests/plugins/my_plugin.py | 8 ++++++-- tests/test_plugins.py | 10 ++++++++++ 4 files changed, 17 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index af3dcc8b..90abc373 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -739,6 +739,7 @@ class Datasette: "zip": zip, "body_scripts": body_scripts, "format_bytes": format_bytes, + "show_messages": lambda: self._show_messages(request), "extra_css_urls": self._asset_urls("extra_css_urls", template, context), "extra_js_urls": self._asset_urls("extra_js_urls", template, context), "base_url": self.config("base_url"), diff --git a/datasette/views/base.py b/datasette/views/base.py index 208c3c96..6346a3f5 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -92,7 +92,6 @@ class BaseView: **{ "database_url": self.database_url, "database_color": self.database_color, - "show_messages": lambda: self.ds._show_messages(request), "select_templates": [ "{}{}".format( "*" if template_name == template.name else "", template_name diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index bf6340ce..8701c6db 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -192,10 +192,13 @@ def register_routes(): def add_message(datasette, request): datasette.add_message(request, "Hello from messages") - print("Adding message") - print(request._messages) return Response.html("Added message") + async def render_message(datasette, request): + return Response.html( + await datasette.render_template("render_message.html", request=request) + ) + return [ (r"/one/$", one), (r"/two/(?P.*)$", two), @@ -204,6 +207,7 @@ def register_routes(): (r"/csrftoken-form/$", csrftoken_form), (r"/not-async/$", not_async), (r"/add-message/$", add_message), + (r"/render-message/$", render_message), ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 9468fde9..9a2ee2a3 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -610,6 +610,16 @@ def test_register_routes_add_message(app_client): assert [["Hello from messages", 1]] == decoded +def test_register_routes_render_message(restore_working_directory, tmpdir_factory): + templates = tmpdir_factory.mktemp("templates") + (templates / "render_message.html").write_text('{% extends "base.html" %}', "utf-8") + with make_app_client(template_dir=templates) as client: + response1 = client.get("/add-message/") + response2 = client.get("/render-message/", cookies=response1.cookies) + assert 200 == response2.status + assert "Hello from messages" in response2.text + + @pytest.mark.asyncio async def test_startup(app_client): await app_client.ds.invoke_startup() From 265483173bc8341dc02c8b782b9b59d2ce8bbedb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Jun 2020 19:31:03 -0700 Subject: [PATCH 0011/1705] Release 0.45a4 Refs #864, #871 --- docs/changelog.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 55f274a8..96a53c52 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,9 +4,9 @@ Changelog ========= -.. _v0_45a3: +.. _v0_45a4: -0.45a3 (2020-06-27) +0.45a4 (2020-06-28) ------------------- .. warning:: This is an **alpha** release. See :ref:`contributing_alpha_beta`. @@ -22,6 +22,7 @@ Changelog - New :ref:`plugin_hook_register_magic_parameters` plugin hook. - New :ref:`plugin_hook_startup` plugin hook. (`#834 `__) - New :ref:`plugin_hook_canned_queries` plugin hook. See `datasette-saved-queries `__ for an example of this hook in action. (`#852 `__) +- ``datasette.add_message()`` now works inside plugins. (`#864 `__) - Workaround for "Too many open files" error in test runs. (`#846 `__) - Respect existing ``scope["actor"]`` if already set by ASGI middleware. (`#854 `__) - New process for shipping :ref:`contributing_alpha_beta`. (`#807 `__) From 968ce53689b088748e1587f5a57e5cc8150f8ea1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Jun 2020 20:49:45 -0700 Subject: [PATCH 0012/1705] Added datasette-write to plugins list on Ecosystem --- docs/ecosystem.rst | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst index 72a2845e..04a14453 100644 --- a/docs/ecosystem.rst +++ b/docs/ecosystem.rst @@ -112,6 +112,11 @@ datasette-init `datasette-init `__ allows you to define tables and views in your metadata file that should be created on startup if they do not already exist. +datasette-write +--------------- + +`datasette-write `__ provides an interface at ``/-/write`` allowing users to execute SQL write queries against a selected database. + datasette-media --------------- @@ -219,4 +224,4 @@ datasette-sentry datasette-publish-fly --------------------- -`datasette-publish-fly `__ lets you publish Datasette instances using the `Fly `__ hosting platform. See also :ref:`publish_fly`. \ No newline at end of file +`datasette-publish-fly `__ lets you publish Datasette instances using the `Fly `__ hosting platform. See also :ref:`publish_fly`. From 22d932fafc3fa9af5a8f5eeab908688eaeb177ea Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Jun 2020 21:17:30 -0700 Subject: [PATCH 0013/1705] /-/logout page for logging out of ds_actor cookie Refs #840 --- datasette/app.py | 4 ++++ datasette/templates/logout.html | 25 +++++++++++++++++++++++++ datasette/views/special.py | 17 +++++++++++++++++ docs/authentication.rst | 8 ++++++++ tests/test_auth.py | 29 +++++++++++++++++++++++++++++ 5 files changed, 83 insertions(+) create mode 100644 datasette/templates/logout.html diff --git a/datasette/app.py b/datasette/app.py index 90abc373..ceaf36f2 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -33,6 +33,7 @@ from .views.special import ( JsonDataView, PatternPortfolioView, AuthTokenView, + LogoutView, PermissionsDebugView, MessagesDebugView, ) @@ -853,6 +854,9 @@ class Datasette: add_route( AuthTokenView.as_view(self), r"/-/auth-token$", ) + add_route( + LogoutView.as_view(self), r"/-/logout$", + ) add_route( PermissionsDebugView.as_view(self), r"/-/permissions$", ) diff --git a/datasette/templates/logout.html b/datasette/templates/logout.html new file mode 100644 index 00000000..08141962 --- /dev/null +++ b/datasette/templates/logout.html @@ -0,0 +1,25 @@ +{% extends "base.html" %} + +{% block title %}Log out{% endblock %} + +{% block nav %} +

+ home +

+ {{ super() }} +{% endblock %} + +{% block content %} + +

Log out

+ +

You are logged in as {{ actor.id or actor }}

+ +
+
+ + +
+ + +{% endblock %} diff --git a/datasette/views/special.py b/datasette/views/special.py index 6c378995..374ca9f2 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -72,6 +72,23 @@ class AuthTokenView(BaseView): return Response("Invalid token", status=403) +class LogoutView(BaseView): + name = "logout" + + def __init__(self, datasette): + self.ds = datasette + + async def get(self, request): + if not request.actor: + return Response.redirect("/") + return await self.render(["logout.html"], request, {"actor": request.actor},) + + async def post(self, request): + response = Response.redirect("/") + response.set_cookie("ds_actor", "", expires=0, max_age=0) + return response + + class PermissionsDebugView(BaseView): name = "permissions_debug" diff --git a/docs/authentication.rst b/docs/authentication.rst index 2a6fa9bc..a2b1276b 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -404,6 +404,14 @@ The resulting cookie will encode data that looks something like this: "e": "1jjSji" } + +.. _LogoutView: + +The /-/logout page +------------------ + +The page at ``/-/logout`` provides the ability to log out of a ``ds_actor`` cookie authentication session. + .. _permissions: Built-in permissions diff --git a/tests/test_auth.py b/tests/test_auth.py index bb4bee4b..96a8bef9 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -47,3 +47,32 @@ def test_actor_cookie_that_expires(app_client, offset, expected): ) response = app_client.get("/", cookies={"ds_actor": cookie}) assert expected == app_client.ds._last_request.scope["actor"] + + +def test_logout(app_client): + response = app_client.get( + "/-/logout", cookies={"ds_actor": app_client.actor_cookie({"id": "test"})} + ) + assert 200 == response.status + assert "

You are logged in as test

" in response.text + # Actors without an id get full serialization + response2 = app_client.get( + "/-/logout", cookies={"ds_actor": app_client.actor_cookie({"name2": "bob"})} + ) + assert 200 == response2.status + assert ( + "

You are logged in as {'name2': 'bob'}

" + in response2.text + ) + # If logged out you get a redirect to / + response3 = app_client.get("/-/logout", allow_redirects=False) + assert 302 == response3.status + # A POST to that page should log the user out + response4 = app_client.post( + "/-/logout", + csrftoken_from=True, + cookies={"ds_actor": app_client.actor_cookie({"id": "test"})}, + allow_redirects=False, + ) + assert {"ds_actor": ""} == response4.cookies + assert 302 == response4.status From 35aee82c60b2c9a0185b934db5528c8bd11830f2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Jun 2020 21:27:11 -0700 Subject: [PATCH 0014/1705] Fixed 500 error with /favicon.ico, closes #874 --- datasette/app.py | 2 +- tests/test_html.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index ceaf36f2..d4c959b7 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -160,7 +160,7 @@ CONFIG_OPTIONS = ( DEFAULT_CONFIG = {option.name: option.default for option in CONFIG_OPTIONS} -async def favicon(scope, receive, send): +async def favicon(request, send): await asgi_send(send, "", 200) diff --git a/tests/test_html.py b/tests/test_html.py index c80a7685..d1411afd 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -52,6 +52,12 @@ def test_http_head(app_client): assert response.status == 200 +def test_favicon(app_client): + response = app_client.get("/favicon.ico") + assert response.status == 200 + assert "" == response.text + + def test_static(app_client): response = app_client.get("/-/static/app2.css") assert response.status == 404 From 16f592247a2a0e140ada487e9972645406dcae69 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 29 Jun 2020 08:42:50 -0700 Subject: [PATCH 0015/1705] Use explicit lifestyle=on for Uvicorn, refs #873 --- datasette/cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/cli.py b/datasette/cli.py index bba72484..287195a5 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -406,7 +406,7 @@ def serve( # Start the server if root: print("http://{}:{}/-/auth-token?token={}".format(host, port, ds._root_token)) - uvicorn.run(ds.app(), host=host, port=port, log_level="info") + uvicorn.run(ds.app(), host=host, port=port, log_level="info", lifespan="on") async def check_databases(ds): From 51427323e68c6fef19a72fad48dd44f933207811 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 29 Jun 2020 11:31:35 -0700 Subject: [PATCH 0016/1705] Add message when user logs out, refs #840 --- datasette/views/special.py | 1 + tests/fixtures.py | 3 ++- tests/test_auth.py | 6 ++++-- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/datasette/views/special.py b/datasette/views/special.py index 374ca9f2..51688f36 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -86,6 +86,7 @@ class LogoutView(BaseView): async def post(self, request): response = Response.redirect("/") response.set_cookie("ds_actor", "", expires=0, max_age=0) + self.ds.add_message(request, "You are now logged out", self.ds.WARNING) return response diff --git a/tests/fixtures.py b/tests/fixtures.py index d103fa35..94a3cce5 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -97,7 +97,8 @@ class TestResponse: @property def cookies(self): cookie = SimpleCookie() - cookie.load(self.headers.get("set-cookie") or "") + for header in self.headers.getlist("set-cookie"): + cookie.load(header) return {key: value.value for key, value in cookie.items()} @property diff --git a/tests/test_auth.py b/tests/test_auth.py index 96a8bef9..145a9a89 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -74,5 +74,7 @@ def test_logout(app_client): cookies={"ds_actor": app_client.actor_cookie({"id": "test"})}, allow_redirects=False, ) - assert {"ds_actor": ""} == response4.cookies - assert 302 == response4.status + assert "" == response4.cookies["ds_actor"] + # Should also have set a message + messages = app_client.ds.unsign(response4.cookies["ds_messages"], "messages") + assert [["You are now logged out", 2]] == messages From 2115d7e3457b48b3cf9c81551b9fed2d0e9cd111 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 29 Jun 2020 11:40:40 -0700 Subject: [PATCH 0017/1705] Logout link in nav, refs #875 --- datasette/app.py | 3 +++ datasette/static/app.css | 18 ++++++++++++++++++ datasette/templates/base.html | 12 +++++++++++- datasette/templates/logout.html | 2 +- datasette/utils/__init__.py | 7 +++++++ tests/test_auth.py | 14 ++++++++++++++ tests/test_utils.py | 19 +++++++++++++++++++ 7 files changed, 73 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index d4c959b7..43249eaa 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -44,6 +44,7 @@ from .database import Database, QueryInterrupted from .utils import ( async_call_with_supported_arguments, call_with_supported_arguments, + display_actor, escape_css_string, escape_sqlite, format_bytes, @@ -736,6 +737,8 @@ class Datasette: template_context = { **context, **{ + "actor": request.actor if request else None, + "display_actor": display_actor, "app_css_hash": self.app_css_hash(), "zip": zip, "body_scripts": body_scripts, diff --git a/datasette/static/app.css b/datasette/static/app.css index 774a2235..ed98b13e 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -100,6 +100,14 @@ table a:visited { .hd .crumbs { float: left; } +.hd .logout { + float: right; + text-align: right; + padding-left: 1em; +} +.hd .logout form { + display: inline; +} .ft { margin: 1em 0; padding: 0.5em 1em 0 1em; @@ -367,3 +375,13 @@ p.zero-results { border: 1px solid red; background-color: pink; } + +button.button-as-link { + background: none; + border: none; + padding: 0; + color: blue; + text-decoration: none; + cursor: pointer; + font-size: 1em; +} diff --git a/datasette/templates/base.html b/datasette/templates/base.html index 9b871d03..e739d804 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -14,7 +14,17 @@ - +
{% block messages %} diff --git a/datasette/templates/logout.html b/datasette/templates/logout.html index 08141962..3c8eb17a 100644 --- a/datasette/templates/logout.html +++ b/datasette/templates/logout.html @@ -13,7 +13,7 @@

Log out

-

You are logged in as {{ actor.id or actor }}

+

You are logged in as {{ display_actor(actor) }}

diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 30121cf2..6e3fd0db 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -924,3 +924,10 @@ def resolve_env_secrets(config, environ): return [resolve_env_secrets(value, environ) for value in config] else: return config + + +def display_actor(actor): + for key in ("display", "name", "username", "login", "id"): + if actor.get(key): + return actor[key] + return str(actor) diff --git a/tests/test_auth.py b/tests/test_auth.py index 145a9a89..1d8148f9 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -78,3 +78,17 @@ def test_logout(app_client): # Should also have set a message messages = app_client.ds.unsign(response4.cookies["ds_messages"], "messages") assert [["You are now logged out", 2]] == messages + + +@pytest.mark.parametrize("path", ["/", "/fixtures", "/fixtures/facetable"]) +def test_logout_button_in_navigation(app_client, path): + response = app_client.get( + path, cookies={"ds_actor": app_client.actor_cookie({"id": "test"})} + ) + anon_response = app_client.get(path) + for fragment in ( + "test ·", + '', + ): + assert fragment in response.text + assert fragment not in anon_response.text diff --git a/tests/test_utils.py b/tests/test_utils.py index 80c6f223..fb2d71f9 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -517,3 +517,22 @@ def test_actor_matches_allow(actor, allow, expected): ) def test_resolve_env_secrets(config, expected): assert expected == utils.resolve_env_secrets(config, {"FOO": "x"}) + + +@pytest.mark.parametrize( + "actor,expected", + [ + ({"id": "blah"}, "blah"), + ({"id": "blah", "login": "l"}, "l"), + ({"id": "blah", "login": "l"}, "l"), + ({"id": "blah", "login": "l", "username": "u"}, "u"), + ({"login": "l", "name": "n"}, "n"), + ( + {"id": "blah", "login": "l", "username": "u", "name": "n", "display": "d"}, + "d", + ), + ({"weird": "shape"}, "{'weird': 'shape'}"), + ], +) +def test_display_actor(actor, expected): + assert expected == utils.display_actor(actor) From 9ac6292614a332ddc5d0868abe4a72f8f48bc7ac Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 30 Jun 2020 15:00:17 -0700 Subject: [PATCH 0018/1705] _header_x now defaults to empty string Prior to this a request to e.g. https://latest.datasette.io/fixtures/magic_parameters which did not include a User-Agent header would trigger a 500 error. --- datasette/default_magic_parameters.py | 2 +- tests/test_canned_queries.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/datasette/default_magic_parameters.py b/datasette/default_magic_parameters.py index b788fee8..0f8f397e 100644 --- a/datasette/default_magic_parameters.py +++ b/datasette/default_magic_parameters.py @@ -8,7 +8,7 @@ import time def header(key, request): key = key.replace("_", "-").encode("utf-8") headers_dict = dict(request.scope["headers"]) - return headers_dict[key].decode("utf-8") + return headers_dict.get(key, b"").decode("utf-8") def actor(key, request): diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index 2e064db1..c0219cb1 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -186,6 +186,7 @@ def magic_parameters_client(): [ ("_actor_id", "root"), ("_header_host", "localhost"), + ("_header_not_a_thing", ""), ("_cookie_foo", "bar"), ("_now_epoch", r"^\d+$"), ("_now_date_utc", r"^\d{4}-\d{2}-\d{2}$"), From ab76eddf31bef99630cc78f462a0b67624db60ac Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 30 Jun 2020 15:49:06 -0700 Subject: [PATCH 0019/1705] Express no opinion if allow block is missing Default permission policy was returning True by default for permission checks - which means that if allow was not defined for a level it would be treated as a passing check. This is better: we now return None of the allow block is not defined, which means 'I have no opinion on this' and allows other code to make its own decisions. Added while working on #832 --- datasette/default_permissions.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 0929a17a..ddd45940 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -15,14 +15,14 @@ def permission_allowed(datasette, actor, action, resource): elif action == "view-database": database_allow = datasette.metadata("allow", database=resource) if database_allow is None: - return True + return None return actor_matches_allow(actor, database_allow) elif action == "view-table": database, table = resource tables = datasette.metadata("tables", database=database) or {} table_allow = (tables.get(table) or {}).get("allow") if table_allow is None: - return True + return None return actor_matches_allow(actor, table_allow) elif action == "view-query": # Check if this query has a "allow" block in metadata @@ -31,7 +31,7 @@ def permission_allowed(datasette, actor, action, resource): assert query is not None allow = query.get("allow") if allow is None: - return True + return None return actor_matches_allow(actor, allow) elif action == "execute-sql": # Use allow_sql block from database block, or from top-level @@ -39,7 +39,7 @@ def permission_allowed(datasette, actor, action, resource): if database_allow_sql is None: database_allow_sql = datasette.metadata("allow_sql") if database_allow_sql is None: - return True + return None return actor_matches_allow(actor, database_allow_sql) return inner From d6e03b04302a0852e7133dc030eab50177c37be7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 30 Jun 2020 16:40:50 -0700 Subject: [PATCH 0020/1705] Cascading view permissions, closes #832 - If you have table permission but not database permission you can now view the table page - New BaseView.check_permissions() method --- datasette/views/base.py | 23 ++++++++++++++ datasette/views/database.py | 21 ++++++++----- datasette/views/table.py | 11 +++++-- tests/fixtures.py | 4 ++- tests/test_permissions.py | 61 ++++++++++++++++++++++++++++++++++++- 5 files changed, 108 insertions(+), 12 deletions(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index 6346a3f5..399b1a1f 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -69,6 +69,29 @@ class BaseView: if not ok: raise Forbidden(action) + async def check_permissions(self, request, permissions): + "permissions is a list of (action, resource) tuples or 'action' strings" + for permission in permissions: + if isinstance(permission, str): + action = permission + resource = None + elif isinstance(permission, (tuple, list)) and len(permission) == 2: + action, resource = permission + else: + assert ( + False + ), "permission should be string or tuple of two items: {}".format( + repr(permission) + ) + ok = await self.ds.permission_allowed( + request.actor, action, resource=resource, default=None, + ) + if ok is not None: + if ok: + return + else: + raise Forbidden(action) + def database_url(self, database): db = self.ds.databases[database] base_url = self.ds.config("base_url") diff --git a/datasette/views/database.py b/datasette/views/database.py index 44750f5b..257305fd 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -20,8 +20,9 @@ class DatabaseView(DataView): name = "database" async def data(self, request, database, hash, default_labels=False, _size=None): - await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", database) + await self.check_permissions( + request, [("view-database", database), "view-instance",], + ) metadata = (self.ds.metadata("databases") or {}).get(database, {}) self.ds.update_with_inherited_metadata(metadata) @@ -88,7 +89,7 @@ class DatabaseView(DataView): "views": views, "queries": canned_queries, "private": not await self.ds.permission_allowed( - None, "view-database", database + None, "view-database", database, default=True ), "allow_execute_sql": await self.ds.permission_allowed( request.actor, "execute-sql", database, default=True @@ -150,17 +151,23 @@ class QueryView(DataView): if "_shape" in params: params.pop("_shape") - # Respect canned query permissions - await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", database) private = False if canned_query: - await self.check_permission(request, "view-query", (database, canned_query)) + # Respect canned query permissions + await self.check_permissions( + request, + [ + ("view-query", (database, canned_query)), + ("view-database", database), + "view-instance", + ], + ) private = not await self.ds.permission_allowed( None, "view-query", (database, canned_query), default=True ) else: await self.check_permission(request, "execute-sql", database) + # Extract any :named parameters named_parameters = named_parameters or self.re_named_parameter.findall(sql) named_parameter_values = { diff --git a/datasette/views/table.py b/datasette/views/table.py index 1a55a495..e0a52e20 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -269,9 +269,14 @@ class TableView(RowTableShared): if not is_view and not table_exists: raise NotFound("Table not found: {}".format(table)) - await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", database) - await self.check_permission(request, "view-table", (database, table)) + await self.check_permissions( + request, + [ + ("view-table", (database, table)), + ("view-database", database), + "view-instance", + ], + ) private = not await self.ds.permission_allowed( None, "view-table", (database, table), default=True diff --git a/tests/fixtures.py b/tests/fixtures.py index 94a3cce5..a9b9a396 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -462,7 +462,9 @@ METADATA = { "queries": { "𝐜𝐢𝐭𝐢𝐞𝐬": "select id, name from facet_cities order by id limit 1;", "pragma_cache_size": "PRAGMA cache_size;", - "magic_parameters": "select :_header_user_agent as user_agent, :_now_datetime_utc as datetime", + "magic_parameters": { + "sql": "select :_header_user_agent as user_agent, :_now_datetime_utc as datetime", + }, "neighborhood_search": { "sql": textwrap.dedent( """ diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 241dd2e5..2d57b5e3 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -1,5 +1,6 @@ from .fixtures import app_client, assert_permissions_checked, make_app_client from bs4 import BeautifulSoup as Soup +import copy import pytest @@ -43,7 +44,7 @@ def test_view_database(allow, expected_anon, expected_auth): "/fixtures/compound_three_primary_keys/a,a,a", ): anon_response = client.get(path) - assert expected_anon == anon_response.status + assert expected_anon == anon_response.status, path if allow and path == "/fixtures" and anon_response.status == 200: # Should be no padlock assert ">fixtures 🔒" not in anon_response.text @@ -348,3 +349,61 @@ def test_view_instance(path, view_instance_client): assert 403 == view_instance_client.get(path).status if path not in ("/-/permissions", "/-/messages", "/-/patterns"): assert 403 == view_instance_client.get(path + ".json").status + + +@pytest.fixture(scope="session") +def cascade_app_client(): + with make_app_client() as client: + yield client + + +@pytest.mark.parametrize( + "path,expected_status,permissions", + [ + ("/", 403, []), + ("/", 200, ["instance"]), + # Can view table even if not allowed database or instance + ("/fixtures/facet_cities", 403, []), + ("/fixtures/facet_cities", 403, ["database"]), + ("/fixtures/facet_cities", 403, ["instance"]), + ("/fixtures/facet_cities", 200, ["table"]), + ("/fixtures/facet_cities", 200, ["table", "database"]), + ("/fixtures/facet_cities", 200, ["table", "database", "instance"]), + # Can view query even if not allowed database or instance + ("/fixtures/magic_parameters", 403, []), + ("/fixtures/magic_parameters", 403, ["database"]), + ("/fixtures/magic_parameters", 403, ["instance"]), + ("/fixtures/magic_parameters", 200, ["query"]), + ("/fixtures/magic_parameters", 200, ["query", "database"]), + ("/fixtures/magic_parameters", 200, ["query", "database", "instance"]), + # Can view database even if not allowed instance + ("/fixtures", 403, []), + ("/fixtures", 403, ["instance"]), + ("/fixtures", 200, ["database"]), + ], +) +def test_permissions_cascade(cascade_app_client, path, expected_status, permissions): + "Test that e.g. having view-table but NOT view-database lets you view table page, etc" + allow = {"id": "*"} + deny = {} + previous_metadata = cascade_app_client.ds._metadata + updated_metadata = copy.deepcopy(previous_metadata) + try: + # Set up the different allow blocks + updated_metadata["allow"] = allow if "instance" in permissions else deny + updated_metadata["databases"]["fixtures"]["allow"] = ( + allow if "database" in permissions else deny + ) + updated_metadata["databases"]["fixtures"]["tables"]["facet_cities"]["allow"] = ( + allow if "table" in permissions else deny + ) + updated_metadata["databases"]["fixtures"]["queries"]["magic_parameters"][ + "allow" + ] = (allow if "query" in permissions else deny) + cascade_app_client.ds._metadata = updated_metadata + response = cascade_app_client.get( + path, cookies={"ds_actor": cascade_app_client.actor_cookie({"id": "test"})}, + ) + assert expected_status == response.status + finally: + cascade_app_client.ds._metadata = previous_metadata From cfd69593f75295c63642a38af6801704c1bddc60 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 30 Jun 2020 16:45:34 -0700 Subject: [PATCH 0021/1705] Removed hashes from examples on docs/pages - closes #879 --- docs/pages.rst | 25 +++++++++---------------- 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/docs/pages.rst b/docs/pages.rst index ce8f5d06..db970ead 100644 --- a/docs/pages.rst +++ b/docs/pages.rst @@ -28,24 +28,17 @@ Add ``/.json`` to the end of the URL for the JSON version of the underlying data Database ======== -Each database has a page listing the tables, views and canned queries -available for that database. If the :ref:`permissions_execute_sql` permission is enabled (it's on by default) there will also be an interface for executing arbitrary SQL select queries against the data. +Each database has a page listing the tables, views and canned queries available for that database. If the :ref:`permissions_execute_sql` permission is enabled (it's on by default) there will also be an interface for executing arbitrary SQL select queries against the data. Examples: -* `fivethirtyeight.datasettes.com/fivethirtyeight-ac35616 `_ -* `global-power-plants.datasettes.com/global-power-plants-9e55be2 `_ +* `fivethirtyeight.datasettes.com/fivethirtyeight `_ +* `global-power-plants.datasettes.com/global-power-plants `_ The JSON version of this page provides programmatic access to the underlying data: -* `fivethirtyeight.datasettes.com/fivethirtyeight-ac35616.json `_ -* `global-power-plants.datasettes.com/global-power-plants-9e55be2.json `_ - -Note that these URLs end in a 7 character hash. This hash is derived from the contents of the database, and ensures that each URL is immutable: the data returned from a URL containing the hash will always be the same, since if the contents of the database file changes by even a single byte a new hash will be generated. - -If you access one of these URLs with an incorrect hash (say because a new version of the underlying database has been published) Datasette will 302 redirect you to the correct URL. This happens for all URLs below the database page as well. - -Thanks to this hashing scheme, Datasette URLs can all be returned with far-future cache expiry headers. This means browsers will cache the data (including data from the JSON APIs) for a long time, and CDNs such as `Cloudflare `_ or `Fastly `_ can be used to dramatically improve the performance of a Datasette hosted API. +* `fivethirtyeight.datasettes.com/fivethirtyeight.json `_ +* `global-power-plants.datasettes.com/global-power-plants.json `_ .. _TableView: @@ -62,8 +55,8 @@ You can also use the table page to interactively construct a SQL query - by appl Some examples: -* `../items `_ lists all of the line-items registered by UK MPs as potential conflicts of interest. It demonstrates Datasette's support for :ref:`full_text_search`. -* `../antiquities-act%2Factions_under_antiquities_act `_ is an interface for exploring the "actions under the antiquities act" data table published by FiveThirtyEight. +* `../items `_ lists all of the line-items registered by UK MPs as potential conflicts of interest. It demonstrates Datasette's support for :ref:`full_text_search`. +* `../antiquities-act%2Factions_under_antiquities_act `_ is an interface for exploring the "actions under the antiquities act" data table published by FiveThirtyEight. * `../global-power-plants?country_long=United+Kingdom&primary_fuel=Gas `_ is a filtered table page showing every Gas power plant in the United Kingdom. It includes some default facets (configured using `its metadata.json `_) and uses the `datasette-cluster-map `_ plugin to show a map of the results. .. _RowView: @@ -77,10 +70,10 @@ Table cells with extremely long text contents are truncated on the table view ac Rows which are the targets of foreign key references from other tables will show a link to a filtered search for all records that reference that row. Here's an example from the Registers of Members Interests database: -`../people/uk.org.publicwhip%2Fperson%2F10001 `_ +`../people/uk.org.publicwhip%2Fperson%2F10001 `_ Note that this URL includes the encoded primary key of the record. Here's that same page as JSON: -`../people/uk.org.publicwhip%2Fperson%2F10001.json `_ +`../people/uk.org.publicwhip%2Fperson%2F10001.json `_ From 2b85bbdd45a2da51a735f4aaca940c86e104e18a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 30 Jun 2020 16:47:23 -0700 Subject: [PATCH 0022/1705] Added logout button to pattern portfolio, closes #876 Refs #875 --- datasette/templates/patterns.html | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/datasette/templates/patterns.html b/datasette/templates/patterns.html index 73443ac2..4dda7300 100644 --- a/datasette/templates/patterns.html +++ b/datasette/templates/patterns.html @@ -19,6 +19,12 @@ fixtures / attraction_characteristic

+
+ testuser · + + + +

Messages

From 08b4928a75faa1f2ba1c5b9908bcf2df4975a9d4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 30 Jun 2020 18:18:19 -0700 Subject: [PATCH 0023/1705] asgi-csrf>=0.6, refs #835 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 9660be61..5958d60b 100644 --- a/setup.py +++ b/setup.py @@ -53,7 +53,7 @@ setup( "uvicorn~=0.11", "aiofiles>=0.4,<0.6", "janus>=0.4,<0.6", - "asgi-csrf>=0.5.1", + "asgi-csrf>=0.6", "PyYAML~=5.3", "mergedeep>=1.1.1,<1.4.0", "itsdangerous~=1.1", From 3ec5b1abf6afa2d22a3378092809a1a8c0249d26 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 30 Jun 2020 20:08:00 -0700 Subject: [PATCH 0024/1705] CSRF tests for canned query POST, closes #835 --- tests/test_canned_queries.py | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index c0219cb1..365bcdfa 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -55,7 +55,11 @@ def test_canned_query_with_named_parameter(app_client): def test_insert(canned_write_client): response = canned_write_client.post( - "/data/add_name", {"name": "Hello"}, allow_redirects=False, csrftoken_from=True, + "/data/add_name", + {"name": "Hello"}, + allow_redirects=False, + csrftoken_from=True, + cookies={"foo": "bar"}, ) assert 302 == response.status assert "/data/add_name?success" == response.headers["Location"] @@ -65,6 +69,24 @@ def test_insert(canned_write_client): assert [["Query executed, 1 row affected", 1]] == messages +def test_insert_with_cookies_requires_csrf(canned_write_client): + response = canned_write_client.post( + "/data/add_name", + {"name": "Hello"}, + allow_redirects=False, + cookies={"foo": "bar"}, + ) + assert 403 == response.status + + +def test_insert_no_cookies_no_csrf(canned_write_client): + response = canned_write_client.post( + "/data/add_name", {"name": "Hello"}, allow_redirects=False + ) + assert 302 == response.status + assert "/data/add_name?success" == response.headers["Location"] + + def test_custom_success_message(canned_write_client): response = canned_write_client.post( "/data/delete_name", From 549b1c2063db48c4622ee5c7b478a1e3cbc1ac07 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 30 Jun 2020 21:17:38 -0700 Subject: [PATCH 0025/1705] New forbidden() plugin hook, closes #812 --- datasette/app.py | 39 +++++++++++++++++++++------------ datasette/hookspecs.py | 5 +++++ datasette/views/database.py | 4 ++-- datasette/views/special.py | 8 +++---- docs/plugin_hooks.rst | 43 +++++++++++++++++++++++++++++++++++++ tests/fixtures.py | 1 + tests/plugins/my_plugin.py | 7 ++++++ tests/test_plugins.py | 13 +++++++++++ 8 files changed, 100 insertions(+), 20 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 43249eaa..1473cce8 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -975,22 +975,24 @@ class DatasetteRouter: await response.asgi_send(send) return except NotFound as exception: - return await self.handle_404(scope, receive, send, exception) + return await self.handle_404(request, send, exception) except Exception as exception: - return await self.handle_500(scope, receive, send, exception) - return await self.handle_404(scope, receive, send) + return await self.handle_500(request, send, exception) + return await self.handle_404(request, send) - async def handle_404(self, scope, receive, send, exception=None): + async def handle_404(self, request, send, exception=None): # If URL has a trailing slash, redirect to URL without it - path = scope.get("raw_path", scope["path"].encode("utf8")) + path = request.scope.get("raw_path", request.scope["path"].encode("utf8")) if path.endswith(b"/"): path = path.rstrip(b"/") - if scope["query_string"]: - path += b"?" + scope["query_string"] + if request.scope["query_string"]: + path += b"?" + request.scope["query_string"] await asgi_send_redirect(send, path.decode("latin1")) else: # Is there a pages/* template matching this path? - template_path = os.path.join("pages", *scope["path"].split("/")) + ".html" + template_path = ( + os.path.join("pages", *request.scope["path"].split("/")) + ".html" + ) try: template = self.ds.jinja_env.select_template([template_path]) except TemplateNotFound: @@ -1019,7 +1021,7 @@ class DatasetteRouter: "custom_status": custom_status, "custom_redirect": custom_redirect, }, - request=Request(scope, receive), + request=request, view_name="page", ) # Pull content-type out into separate parameter @@ -1035,11 +1037,9 @@ class DatasetteRouter: content_type=content_type, ) else: - await self.handle_500( - scope, receive, send, exception or NotFound("404") - ) + await self.handle_500(request, send, exception or NotFound("404")) - async def handle_500(self, scope, receive, send, exception): + async def handle_500(self, request, send, exception): title = None if isinstance(exception, NotFound): status = 404 @@ -1049,6 +1049,17 @@ class DatasetteRouter: status = 403 info = {} message = exception.args[0] + # Try the forbidden() plugin hook + for custom_response in pm.hook.forbidden( + datasette=self.ds, request=request, message=message + ): + if callable(custom_response): + custom_response = custom_response() + if asyncio.iscoroutine(custom_response): + custom_response = await custom_response + if custom_response is not None: + await custom_response.asgi_send(send) + return elif isinstance(exception, DatasetteError): status = exception.status info = exception.error_dict @@ -1070,7 +1081,7 @@ class DatasetteRouter: headers = {} if self.ds.cors: headers["Access-Control-Allow-Origin"] = "*" - if scope["path"].split("?")[0].endswith(".json"): + if request.path.split("?")[0].endswith(".json"): await asgi_send_json(send, info, status=status, headers=headers) else: template = self.ds.jinja_env.select_template(templates) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 020e84b9..92d321b6 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -88,3 +88,8 @@ def canned_queries(datasette, database, actor): @hookspec def register_magic_parameters(datasette): "Return a list of (name, function) magic parameter functions" + + +@hookspec +def forbidden(datasette, request, message): + "Custom response for a 403 forbidden error" diff --git a/datasette/views/database.py b/datasette/views/database.py index 257305fd..9d639170 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -10,7 +10,7 @@ from datasette.utils import ( path_with_added_args, path_with_removed_args, ) -from datasette.utils.asgi import AsgiFileDownload, Response +from datasette.utils.asgi import AsgiFileDownload, Response, Forbidden from datasette.plugins import pm from .base import DatasetteError, DataView @@ -120,7 +120,7 @@ class DatabaseDownload(DataView): if db.is_memory: raise DatasetteError("Cannot download :memory: database", status=404) if not self.ds.config("allow_download") or db.is_mutable: - raise DatasetteError("Database download is forbidden", status=403) + raise Forbidden("Database download is forbidden") if not db.path: raise DatasetteError("Cannot download database", status=404) filepath = db.path diff --git a/datasette/views/special.py b/datasette/views/special.py index 51688f36..ed5a36f7 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -1,5 +1,5 @@ import json -from datasette.utils.asgi import Response +from datasette.utils.asgi import Response, Forbidden from .base import BaseView import secrets @@ -60,7 +60,7 @@ class AuthTokenView(BaseView): async def get(self, request): token = request.args.get("token") or "" if not self.ds._root_token: - return Response("Root token has already been used", status=403) + raise Forbidden("Root token has already been used") if secrets.compare_digest(token, self.ds._root_token): self.ds._root_token = None response = Response.redirect("/") @@ -69,7 +69,7 @@ class AuthTokenView(BaseView): ) return response else: - return Response("Invalid token", status=403) + raise Forbidden("Invalid token") class LogoutView(BaseView): @@ -99,7 +99,7 @@ class PermissionsDebugView(BaseView): async def get(self, request): await self.check_permission(request, "view-instance") if not await self.ds.permission_allowed(request.actor, "permissions-debug"): - return Response("Permission denied", status=403) + raise Forbidden("Permission denied") return await self.render( ["permissions_debug.html"], request, diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index de10a551..fc14bba0 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -946,3 +946,46 @@ This example registers two new magic parameters: ``:_request_http_version`` retu ("request", request), ("uuid", uuid), ] + +.. _plugin_hook_forbidden: + +forbidden(datasette, request, message) +-------------------------------------- + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + +``request`` - object + The current HTTP :ref:`internals_request`. + +``message`` - string + A message hinting at why the request was forbidden. + +Plugins can use this to customize how Datasette responds when a 403 Forbidden error occurs - usually because a page failed a permission check, see :authentication_permissions:. + +If a plugin hook wishes to react to the error, it should return a :ref:`Response object `. + +This example returns a redirect to a ``/-/login`` page: + +.. code-block:: python + + from datasette import hookimpl + from urllib.parse import urlencode + + @hookimpl + def forbidden(request, message): + return Response.redirect("/-/login?=" + urlencode({"message": message})) + +The function can alternatively return an awaitable function if it needs to make any asynchronous method calls. This example renders a template: + +.. code-block:: python + + from datasette import hookimpl + from datasette.utils.asgi import Response + + @hookimpl + def forbidden(datasette): + async def inner(): + return Response.html(await datasette.render_template("forbidden.html")) + + return inner diff --git a/tests/fixtures.py b/tests/fixtures.py index a9b9a396..e29ea45d 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -45,6 +45,7 @@ EXPECTED_PLUGINS = [ "extra_css_urls", "extra_js_urls", "extra_template_vars", + "forbidden", "permission_allowed", "prepare_connection", "prepare_jinja2_environment", diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 8701c6db..1870824f 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -245,3 +245,10 @@ def register_magic_parameters(): ("request", request), ("uuid", uuid), ] + + +@hookimpl +def forbidden(datasette, request, message): + datasette._last_forbidden_message = message + if request.path == "/data2": + return Response.redirect("/login?message=" + message) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 9a2ee2a3..c9fdf2e8 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -684,3 +684,16 @@ def test_register_magic_parameters(restore_working_directory): assert 200 == response_get.status new_uuid = response_get.json[0][":_uuid_new"] assert 4 == new_uuid.count("-") + + +def test_forbidden(restore_working_directory): + with make_app_client( + extra_databases={"data2.db": "create table logs (line text)"}, + metadata={"allow": {}}, + ) as client: + response = client.get("/") + assert 403 == response.status + response2 = client.get("/data2", allow_redirects=False) + assert 302 == response2.status + assert "/login?message=view-database" == response2.headers["Location"] + assert "view-database" == client.ds._last_forbidden_message From 676bb64c877d73f8ff496cef4632f5a8a5a9283c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 30 Jun 2020 21:25:35 -0700 Subject: [PATCH 0026/1705] Release 0.45a5 Refs #840, #832, #835, #812 --- docs/changelog.rst | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 96a53c52..adfd7c5a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,9 +4,9 @@ Changelog ========= -.. _v0_45a4: +.. _v0_45a5: -0.45a4 (2020-06-28) +0.45a5 (2020-06-30) ------------------- .. warning:: This is an **alpha** release. See :ref:`contributing_alpha_beta`. @@ -19,6 +19,10 @@ Changelog (:_actor_id, :_now_datetime_utc) This inserts the currently authenticated actor ID and the current datetime. (`#842 `__) +- New ``/-/logout`` page, linked to from the navigation if you are logged in with a ``ds_actor`` cookie. (`#840 `__) +- Cascading view permissons - so if a user has ``view-table`` they can view the table page even if they do not have ``view-database`` or ``view-instance``. (`#832 `__) +- CSRF protection no longer applies to ``Authentication: Bearer token`` requests or requests without cookies. (`#835 `__) +- New :ref:`plugin_hook_forbidden` plugin hook for customizing how Datasette responds to a 403 forbidden error. (`#812 `__) - New :ref:`plugin_hook_register_magic_parameters` plugin hook. - New :ref:`plugin_hook_startup` plugin hook. (`#834 `__) - New :ref:`plugin_hook_canned_queries` plugin hook. See `datasette-saved-queries `__ for an example of this hook in action. (`#852 `__) From f7c3fc978ca787e9bbd49357c649d93042396b44 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 1 Jul 2020 12:26:30 -0700 Subject: [PATCH 0027/1705] datasette-auth-tokens improved description Refs https://github.com/simonw/datasette-auth-tokens/issues/1 --- docs/ecosystem.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst index 04a14453..d74f280d 100644 --- a/docs/ecosystem.rst +++ b/docs/ecosystem.rst @@ -90,7 +90,7 @@ datasette-auth-github datasette-auth-tokens --------------------- -`datasette-auth-tokens `__ provides a mechanism for creating secret API tokens that can then be used with Datasette's :ref:`authentication` system. +`datasette-auth-tokens `__ provides a mechanism for creating secret API tokens that can then be used with Datasette's :ref:`authentication` system. These tokens can be hard-coded into the plugin configuration or the plugin can be configured to access tokens stored in a SQLite database table. datasette-permissions-sql ------------------------- From 1bae24691f1e9f87daa32b09827c2e3a6af075c3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 1 Jul 2020 14:25:59 -0700 Subject: [PATCH 0028/1705] Only show 'log out' if ds_cookie present, closes #884 --- datasette/app.py | 1 + datasette/templates/base.html | 4 ++-- tests/test_auth.py | 8 ++++++++ 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 1473cce8..dd443af9 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -739,6 +739,7 @@ class Datasette: **{ "actor": request.actor if request else None, "display_actor": display_actor, + "show_logout": "ds_actor" in request.cookies, "app_css_hash": self.app_css_hash(), "zip": zip, "body_scripts": body_scripts, diff --git a/datasette/templates/base.html b/datasette/templates/base.html index e739d804..fd4cf504 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -17,11 +17,11 @@ diff --git a/tests/test_auth.py b/tests/test_auth.py index 1d8148f9..d14af873 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -92,3 +92,11 @@ def test_logout_button_in_navigation(app_client, path): ): assert fragment in response.text assert fragment not in anon_response.text + + +@pytest.mark.parametrize("path", ["/", "/fixtures", "/fixtures/facetable"]) +def test_no_logout_button_in_navigation_if_no_ds_actor_cookie(app_client, path): + response = app_client.get(path + "?_bot=1") + assert "bot" in response.text + assert "bot ·" not in response.text + assert '
' not in response.text From c7e8a4aaac712519d3ffab5b0c774ea57907ddc1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 1 Jul 2020 14:36:36 -0700 Subject: [PATCH 0029/1705] Handle missing request object, refs #884 --- datasette/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index dd443af9..254d7872 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -739,7 +739,7 @@ class Datasette: **{ "actor": request.actor if request else None, "display_actor": display_actor, - "show_logout": "ds_actor" in request.cookies, + "show_logout": request is not None and "ds_actor" in request.cookies, "app_css_hash": self.app_css_hash(), "zip": zip, "body_scripts": body_scripts, From f1f581b7ffcd5d8f3ae6c1c654d813a6641410eb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 1 Jul 2020 14:43:07 -0700 Subject: [PATCH 0030/1705] Release notes for 0.45 Refs #687, #807, #812, #832, #834, #835, #840, #842, #846, #852, #854, #863, #864, #870 --- README.md | 1 + docs/changelog.rst | 59 +++++++++++++++++++++++++++++++++------------- 2 files changed, 44 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index 84d1dcd4..839bad9e 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover ## News + * 1st July 2020: [Datasette 0.45](http://datasette.readthedocs.io/en/latest/changelog.html#v0-45) - [Magic parameters for canned queries](https://datasette.readthedocs.io/en/latest/sql_queries.html#canned-queries-magic-parameters), a log out feature, improved plugin documentation and four new plugin hooks. * 20th June 2020: [A cookiecutter template for writing Datasette plugins](https://simonwillison.net/2020/Jun/20/cookiecutter-plugins/) * 11th June 2020: [Datasette 0.44](http://datasette.readthedocs.io/en/latest/changelog.html#v0-44) - [Authentication and permissions](https://datasette.readthedocs.io/en/latest/authentication.html), [writable canned queries](https://datasette.readthedocs.io/en/latest/sql_queries.html#writable-canned-queries), flash messages, new plugin hooks and much, much more. * 28th May 2020: [Datasette 0.43](http://datasette.readthedocs.io/en/latest/changelog.html#v0-43) - Redesigned [register_output_renderer](https://datasette.readthedocs.io/en/latest/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes. diff --git a/docs/changelog.rst b/docs/changelog.rst index adfd7c5a..62a6350a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,34 +4,61 @@ Changelog ========= -.. _v0_45a5: +.. _v0_45: -0.45a5 (2020-06-30) -------------------- +0.45 (2020-07-01) +----------------- -.. warning:: This is an **alpha** release. See :ref:`contributing_alpha_beta`. +Magic parameters for canned queries, a log out feature, improved plugin documentation and four new plugin hooks. -- Canned queries now support :ref:`canned_queries_magic_parameters`, which can be used to insert or select automatically generated values. For example:: +Magic parameters for canned queries +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - insert into logs - (user_id, timestamp) - values - (:_actor_id, :_now_datetime_utc) +Canned queries now support :ref:`canned_queries_magic_parameters`, which can be used to insert or select automatically generated values. For example:: + + insert into logs + (user_id, timestamp) + values + (:_actor_id, :_now_datetime_utc) + +This inserts the currently authenticated actor ID and the current datetime. (`#842 `__) + +Log out +~~~~~~~ + +The :ref:`ds_actor cookie ` can be used by plugins (or by Datasette's :ref:`--root mechanism`) to authenticate users. The new ``/-/logout`` page provides a way to clear that cookie. + +A "Log out" button now shows in the global navigation provided the user is authenticated using the ``ds_actor`` cookie. (`#840 `__) + +Better plugin documentation +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The plugin documentation has been re-arranged into four sections, including a brand new section on testing plugins. (`#687 `__) + +- :ref:`plugins` introduces Datasette's plugin system and describes how to install and configure plugins. +- :ref:`writing_plugins` describes how to author plugins, from simple one-off plugins to packaged plugins that can be published to PyPI. It also describes how to start a plugin using the new `datasette-plugin `__ cookiecutter template. +- :ref:`plugin_hooks` is a full list of detailed documentation for every Datasette plugin hook. +- :ref:`testing_plugins` describes how to write tests for Datasette plugins, using `pytest `__ and `HTTPX `__. + +New plugin hooks +~~~~~~~~~~~~~~~~ + +- :ref:`plugin_hook_register_magic_parameters` can be used to define new types of magic canned query parameters. +- :ref:`plugin_hook_startup` can run custom code when Datasette first starts up. `datasette-init `__ is a new plugin that uses this hook to create database tables and views on startup if they have not yet been created. (`#834 `__) +- :ref:`plugin_hook_canned_queries` lets plugins provide additional canned queries beyond those defined in Datasette's metadata. See `datasette-saved-queries `__ for an example of this hook in action. (`#852 `__) +- :ref:`plugin_hook_forbidden` is a hook for customizing how Datasette responds to 403 forbidden errors. (`#812 `__) + +Smaller changes +~~~~~~~~~~~~~~~ - This inserts the currently authenticated actor ID and the current datetime. (`#842 `__) -- New ``/-/logout`` page, linked to from the navigation if you are logged in with a ``ds_actor`` cookie. (`#840 `__) - Cascading view permissons - so if a user has ``view-table`` they can view the table page even if they do not have ``view-database`` or ``view-instance``. (`#832 `__) - CSRF protection no longer applies to ``Authentication: Bearer token`` requests or requests without cookies. (`#835 `__) -- New :ref:`plugin_hook_forbidden` plugin hook for customizing how Datasette responds to a 403 forbidden error. (`#812 `__) -- New :ref:`plugin_hook_register_magic_parameters` plugin hook. -- New :ref:`plugin_hook_startup` plugin hook. (`#834 `__) -- New :ref:`plugin_hook_canned_queries` plugin hook. See `datasette-saved-queries `__ for an example of this hook in action. (`#852 `__) - ``datasette.add_message()`` now works inside plugins. (`#864 `__) - Workaround for "Too many open files" error in test runs. (`#846 `__) - Respect existing ``scope["actor"]`` if already set by ASGI middleware. (`#854 `__) - New process for shipping :ref:`contributing_alpha_beta`. (`#807 `__) -- Re-arranged plugins documentation into :ref:`plugins`, :ref:`writing_plugins`, :ref:`plugin_hooks` and a new section on :ref:`testing_plugins`. (`#687 `__) - ``{{ csrftoken() }}`` now works when plugins render a template using ``datasette.render_template(..., request=request)``. (`#863 `__) +- Datasette now creates a single :ref:`internals_request` and uses it throughout the lifetime of the current HTTP request. (`#870 `__) .. _v0_44: From 57879dc8b346a435804a9e45ffaacbf2a0228bc6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 1 Jul 2020 17:23:37 -0700 Subject: [PATCH 0031/1705] Better titles for canned query pages, closes #887 --- datasette/templates/query.html | 2 +- tests/test_html.py | 7 +++++++ tests/test_permissions.py | 4 ++-- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/datasette/templates/query.html b/datasette/templates/query.html index c65953fb..0882e142 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -28,7 +28,7 @@ {% block content %} -

{{ metadata.title or database }}{% if private %} 🔒{% endif %}

+

{{ metadata.title or database }}{% if canned_query and not metadata.title %}: {{ canned_query }}{% endif %}{% if private %} 🔒{% endif %}

{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} diff --git a/tests/test_html.py b/tests/test_html.py index d1411afd..ebd91cf1 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1004,6 +1004,13 @@ def test_404_content_type(app_client): assert "text/html; charset=utf-8" == response.headers["content-type"] +def test_canned_query_default_title(app_client): + response = app_client.get("/fixtures/magic_parameters") + assert response.status == 200 + soup = Soup(response.body, "html.parser") + assert "fixtures: magic_parameters" == soup.find("h1").text + + def test_canned_query_with_custom_metadata(app_client): response = app_client.get("/fixtures/neighborhood_search?text=town") assert response.status == 200 diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 2d57b5e3..90e58a27 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -179,13 +179,13 @@ def test_view_query(allow, expected_anon, expected_auth): assert expected_anon == anon_response.status if allow and anon_response.status == 200: # Should be no padlock - assert ">fixtures 🔒" not in anon_response.text + assert "🔒" not in anon_response.text auth_response = client.get( "/fixtures/q", cookies={"ds_actor": client.actor_cookie({"id": "root"})} ) assert expected_auth == auth_response.status if allow and expected_anon == 403 and expected_auth == 200: - assert ">fixtures 🔒" in auth_response.text + assert ">fixtures: q 🔒" in auth_response.text @pytest.mark.parametrize( From ea99a4431ce5bc2d65a3496da5b38e1986550a96 Mon Sep 17 00:00:00 2001 From: Amjith Ramanujam Date: Thu, 2 Jul 2020 20:08:32 -0700 Subject: [PATCH 0032/1705] Only load Python files from plugins-dir Pull request #890. Thanks, @amjith! * Load only python files from plugins-dir * Add a test to verify non-python files are not loaded as plugins --- datasette/app.py | 8 +++++--- tests/test_config_dir.py | 4 ++++ 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 254d7872..bafee857 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -2,6 +2,7 @@ import asyncio import asgi_csrf import collections import datetime +import glob import hashlib import inspect import itertools @@ -263,9 +264,10 @@ class Datasette: # Execute plugins in constructor, to ensure they are available # when the rest of `datasette inspect` executes if self.plugins_dir: - for filename in os.listdir(self.plugins_dir): - filepath = os.path.join(self.plugins_dir, filename) - mod = module_from_path(filepath, name=filename) + for filepath in glob.glob(os.path.join(self.plugins_dir, "*.py")): + if not os.path.isfile(filepath): + continue + mod = module_from_path(filepath, name=os.path.basename(filepath)) try: pm.register(mod) except ValueError: diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index b1f6994f..430eba16 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -30,6 +30,8 @@ def config_dir_client(tmp_path_factory): plugins_dir = config_dir / "plugins" plugins_dir.mkdir() (plugins_dir / "hooray.py").write_text(PLUGIN, "utf-8") + (plugins_dir / "non_py_file.txt").write_text(PLUGIN, "utf-8") + (plugins_dir / ".mypy_cache").mkdir() templates_dir = config_dir / "templates" templates_dir.mkdir() @@ -95,6 +97,8 @@ def test_plugins(config_dir_client): response = config_dir_client.get("/-/plugins.json") assert 200 == response.status assert "hooray.py" in {p["name"] for p in response.json} + assert "non_py_file.txt" not in {p["name"] for p in response.json} + assert "mypy_cache" not in {p["name"] for p in response.json} def test_templates_and_plugin(config_dir_client): From bcb59ca466421f199444e2132c220f6ffaa9c655 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 2 Jul 2020 21:29:32 -0700 Subject: [PATCH 0033/1705] codecov should not be blocking From https://docs.codecov.io/docs/common-recipe-list --- codecov.yml | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 codecov.yml diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 00000000..bfdc9877 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,8 @@ +coverage: + status: + project: + default: + informational: true + patch: + default: + informational: true From ba739b2457306dbc14ae4a87003e8784e5ed715d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 7 Jul 2020 12:54:54 -0700 Subject: [PATCH 0034/1705] An open source multi-tool for exploring and publishing data --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index a579fdd6..ec5e9bd7 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -17,7 +17,7 @@ datasette| .. |docker: datasette| image:: https://img.shields.io/badge/docker-datasette-blue :target: https://hub.docker.com/r/datasetteproject/datasette -*A tool for exploring and publishing data* +*An open source multi-tool for exploring and publishing data* Datasette is a tool for exploring and publishing data. It helps people take data of any shape or size and publish that as an interactive, explorable website and accompanying API. From cd231e97cdff914c0371ac2f4dabf10ae93f485e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 7 Jul 2020 19:01:13 -0700 Subject: [PATCH 0035/1705] Updated example for asgi_wrapper --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index fc14bba0..a0a746b0 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -669,7 +669,7 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att return add_x_databases_header return wrap_with_databases_header -Examples: `datasette-search-all `_, `datasette-media `_ +Example: `datasette-cors `_ .. _plugin_hook_startup: From ee0ef016523a765b6ef6eaa43cad9ad568f78ae4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 12 Jul 2020 12:53:29 -0700 Subject: [PATCH 0036/1705] Added new logo to the documentation --- docs/conf.py | 10 ++++++++-- docs/datasette-logo.svg | 38 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 46 insertions(+), 2 deletions(-) create mode 100644 docs/datasette-logo.svg diff --git a/docs/conf.py b/docs/conf.py index b273afca..86bd8b4e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -91,14 +91,20 @@ html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -# -# html_theme_options = {} +html_theme_options = { + "logo_only": True, + "style_nav_header_background": "white", + "prev_next_buttons_location": "both", +} + # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] +html_logo = "datasette-logo.svg" + html_css_files = [ "css/custom.css", ] diff --git a/docs/datasette-logo.svg b/docs/datasette-logo.svg new file mode 100644 index 00000000..b2fd18c6 --- /dev/null +++ b/docs/datasette-logo.svg @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From 4691228a81550818fe7d4b43f67c76da2640687f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 12 Jul 2020 13:00:16 -0700 Subject: [PATCH 0037/1705] Fix for version color in nav, refs #892 --- docs/_static/css/custom.css | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/_static/css/custom.css b/docs/_static/css/custom.css index d7c2f164..4dabb725 100644 --- a/docs/_static/css/custom.css +++ b/docs/_static/css/custom.css @@ -1,3 +1,7 @@ a.external { overflow-wrap: anywhere; } + +div .wy-side-nav-search > div.version { + color: rgba(0,0,0,0.75); +} From c5f06bc356fb5917ef7fbb6fe4693f30d711cdb3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 16 Jul 2020 12:06:45 -0700 Subject: [PATCH 0038/1705] "white-space: pre-wrap" for all table cells, refs #896 --- datasette/static/app.css | 1 + 1 file changed, 1 insertion(+) diff --git a/datasette/static/app.css b/datasette/static/app.css index ed98b13e..8428a933 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -20,6 +20,7 @@ td { border-right: 1px solid #eee; padding: 4px; vertical-align: top; + white-space: pre-wrap; } td.col-link { font-weight: bold; From 1f6a134369e6a7efaae9db469f15b1dd2b7f3709 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 17 Jul 2020 13:12:35 -0700 Subject: [PATCH 0039/1705] await request.post_body() method, closes #897 --- datasette/utils/asgi.py | 6 ++++-- docs/internals.rst | 5 ++++- tests/test_internals_request.py | 29 +++++++++++++++++++++++++++++ 3 files changed, 37 insertions(+), 3 deletions(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 08c57b26..bf8461f8 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -90,8 +90,7 @@ class Request: def actor(self): return self.scope.get("actor", None) - async def post_vars(self): - body = [] + async def post_body(self): body = b"" more_body = True while more_body: @@ -99,7 +98,10 @@ class Request: assert message["type"] == "http.request", message body += message.get("body", b"") more_body = message.get("more_body", False) + return body + async def post_vars(self): + body = await self.post_body() return dict(parse_qsl(body.decode("utf-8"), keep_blank_values=True)) @classmethod diff --git a/docs/internals.rst b/docs/internals.rst index e4d0ea50..7ae836e9 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -48,11 +48,14 @@ The request object is passed to various plugin hooks. It represents an incoming ``.actor`` - dictionary (str -> Any) or None The currently authenticated actor (see :ref:`actors `), or ``None`` if the request is unauthenticated. -The object also has one awaitable method: +The object also has two awaitable methods: ``await request.post_vars()`` - dictionary Returns a dictionary of form variables that were submitted in the request body via ``POST``. Don't forget to read about :ref:`internals_csrf`! +``await request.post_body()`` - bytes + Returns the un-parsed body of a request submitted by ``POST`` - useful for things like incoming JSON data. + .. _internals_multiparams: The MultiParams class diff --git a/tests/test_internals_request.py b/tests/test_internals_request.py index 8367a693..a659262b 100644 --- a/tests/test_internals_request.py +++ b/tests/test_internals_request.py @@ -1,4 +1,5 @@ from datasette.utils.asgi import Request +import json import pytest @@ -26,6 +27,34 @@ async def test_request_post_vars(): assert {"foo": "bar", "baz": "1", "empty": ""} == await request.post_vars() +@pytest.mark.asyncio +async def test_request_post_body(): + scope = { + "http_version": "1.1", + "method": "POST", + "path": "/", + "raw_path": b"/", + "query_string": b"", + "scheme": "http", + "type": "http", + "headers": [[b"content-type", b"application/json"]], + } + + data = {"hello": "world"} + + async def receive(): + return { + "type": "http.request", + "body": json.dumps(data, indent=4).encode("utf-8"), + "more_body": False, + } + + request = Request(scope, receive) + body = await request.post_body() + assert isinstance(body, bytes) + assert data == json.loads(body) + + def test_request_args(): request = Request.fake("/foo?multi=1&multi=2&single=3") assert "1" == request.args.get("multi") From d9a5ef1c32a4390e398653ebfd570f8e1a03d93e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 19 Jul 2020 17:49:32 -0700 Subject: [PATCH 0040/1705] Don't need this, we're not using GitHub pages --- _config.yml | 1 - 1 file changed, 1 deletion(-) delete mode 100644 _config.yml diff --git a/_config.yml b/_config.yml deleted file mode 100644 index 3397c9a4..00000000 --- a/_config.yml +++ /dev/null @@ -1 +0,0 @@ -theme: jekyll-theme-architect \ No newline at end of file From 02dc6298bdbfb1d63e0d2a39ff597b5fcc60e06b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 21 Jul 2020 08:22:31 -0700 Subject: [PATCH 0041/1705] permission_allowed resource can be a tuple --- docs/internals.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/internals.rst b/docs/internals.rst index 7ae836e9..6dc2b60c 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -214,8 +214,8 @@ await .permission_allowed(actor, action, resource=None, default=False) ``action`` - string The name of the action that is being permission checked. -``resource`` - string, optional - The resource, e.g. the name of the table. Only some permissions apply to a resource. +``resource`` - string or tuple, optional + The resource, e.g. the name of the database, or a tuple of two strings containing the name of the database and the name of the table. Only some permissions apply to a resource. ``default`` - optional, True or False Should this permission check be default allow or default deny. From 213e6a892636408d289dd75511327c4312c438e4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 21 Jul 2020 21:52:35 -0700 Subject: [PATCH 0042/1705] content-length for DB downloads, closes #905 --- datasette/utils/asgi.py | 2 ++ tests/test_html.py | 10 ++++++++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index bf8461f8..7caa3469 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -7,6 +7,7 @@ from html import escape from http.cookies import SimpleCookie, Morsel import re import aiofiles +import aiofiles.os # Workaround for adding samesite support to pre 3.8 python Morsel._reserved["samesite"] = "SameSite" @@ -252,6 +253,7 @@ async def asgi_send_file( if filename: headers["Content-Disposition"] = 'attachment; filename="{}"'.format(filename) first = True + headers["content-length"] = str((await aiofiles.os.stat(str(filepath))).st_size) async with aiofiles.open(str(filepath), mode="rb") as fp: if first: await asgi_start( diff --git a/tests/test_html.py b/tests/test_html.py index ebd91cf1..e3d3c2fc 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -898,7 +898,7 @@ def test_table_metadata(app_client): assert_footer_links(soup) -def test_database_download_allowed_for_immutable(): +def test_database_download_for_immutable(): with make_app_client(is_immutable=True) as client: assert not client.ds.databases["fixtures"].is_mutable # Regular page should have a download link @@ -906,7 +906,13 @@ def test_database_download_allowed_for_immutable(): soup = Soup(response.body, "html.parser") assert len(soup.findAll("a", {"href": re.compile(r"\.db$")})) # Check we can actually download it - assert 200 == client.get("/fixtures.db").status + download_response = client.get("/fixtures.db") + assert 200 == download_response.status + # Check the content-length header exists + assert "content-length" in download_response.headers + content_length = download_response.headers["content-length"] + assert content_length.isdigit() + assert int(content_length) > 100 def test_database_download_disallowed_for_mutable(app_client): From 028f193dd6233fa116262ab4b07b13df7dcec9be Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 22 Jul 2020 11:17:05 -0700 Subject: [PATCH 0043/1705] How to use a custom domain with Cloud Run --- docs/publish.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/publish.rst b/docs/publish.rst index ebaf826a..64868078 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -54,6 +54,8 @@ You may need to interact with prompts from the tool. Once it has finished it wil Service [my-service] revision [my-service-00001] has been deployed and is serving traffic at https://my-service-j7hipcg4aq-uc.a.run.app +Cloud Run provides a URL on the ``.run.app`` domain, but you can also point your own domain or subdomain at your Cloud Run service - see `mapping custom domains `__ in the Cloud Run documentation for details. + .. literalinclude:: datasette-publish-cloudrun-help.txt .. _publish_fly: From 6be5654ffab282e8cf39cc138ba2d4496ebc7407 Mon Sep 17 00:00:00 2001 From: abeyerpath Date: Fri, 24 Jul 2020 13:39:53 -0700 Subject: [PATCH 0044/1705] Exclude tests from package, properly this time The `exclude` argument to `find_packages` needs an iterable of package names. Closes #456 - thanks, @abeyerpath! --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 5958d60b..b785ec8b 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,7 @@ setup( "Issues": "https://github.com/simonw/datasette/issues", "CI": "https://travis-ci.org/simonw/datasette", }, - packages=find_packages(exclude="tests"), + packages=find_packages(exclude=("tests",)), package_data={"datasette": ["templates/*.html"]}, include_package_data=True, install_requires=[ From 12c0bc09cc4bf9addde20f3d46613de11f27c641 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 24 Jul 2020 15:54:41 -0700 Subject: [PATCH 0045/1705] /-/allow-debug tool, closes #908 --- datasette/app.py | 4 ++ datasette/templates/allow_debug.html | 58 ++++++++++++++++++++++++++++ datasette/views/special.py | 38 ++++++++++++++++++ docs/authentication.rst | 7 ++++ tests/test_permissions.py | 18 +++++++++ 5 files changed, 125 insertions(+) create mode 100644 datasette/templates/allow_debug.html diff --git a/datasette/app.py b/datasette/app.py index bafee857..fa273df0 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -35,6 +35,7 @@ from .views.special import ( PatternPortfolioView, AuthTokenView, LogoutView, + AllowDebugView, PermissionsDebugView, MessagesDebugView, ) @@ -869,6 +870,9 @@ class Datasette: add_route( MessagesDebugView.as_view(self), r"/-/messages$", ) + add_route( + AllowDebugView.as_view(self), r"/-/allow-debug$", + ) add_route( PatternPortfolioView.as_view(self), r"/-/patterns$", ) diff --git a/datasette/templates/allow_debug.html b/datasette/templates/allow_debug.html new file mode 100644 index 00000000..05e3dd90 --- /dev/null +++ b/datasette/templates/allow_debug.html @@ -0,0 +1,58 @@ +{% extends "base.html" %} + +{% block title %}Debug allow rules{% endblock %} + +{% block extra_head %} + +{% endblock %} + +{% block content %} + +

Debug allow rules

+ +

Use this tool to try out different actor and allow combinations. See Defining permissions with "allow" blocks for documentation.

+ + +
+

+ +
+
+

+ +
+
+ +
+ + +{% if error %}

{{ error }}

{% endif %} + +{% if result == "True" %}

Result: allow

{% endif %} + +{% if result == "False" %}

Result: deny

{% endif %} + +{% endblock %} diff --git a/datasette/views/special.py b/datasette/views/special.py index ed5a36f7..3067b0d1 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -1,5 +1,6 @@ import json from datasette.utils.asgi import Response, Forbidden +from datasette.utils import actor_matches_allow from .base import BaseView import secrets @@ -107,6 +108,43 @@ class PermissionsDebugView(BaseView): ) +class AllowDebugView(BaseView): + name = "allow_debug" + + def __init__(self, datasette): + self.ds = datasette + + async def get(self, request): + errors = [] + actor_input = request.args.get("actor") or '{"id": "root"}' + try: + actor = json.loads(actor_input) + actor_input = json.dumps(actor, indent=4) + except json.decoder.JSONDecodeError as ex: + errors.append("Actor JSON error: {}".format(ex)) + allow_input = request.args.get("allow") or '{"id": "*"}' + try: + allow = json.loads(allow_input) + allow_input = json.dumps(allow, indent=4) + except json.decoder.JSONDecodeError as ex: + errors.append("Allow JSON error: {}".format(ex)) + + result = None + if not errors: + result = str(actor_matches_allow(actor, allow)) + + return await self.render( + ["allow_debug.html"], + request, + { + "result": result, + "error": "\n\n".join(errors) if errors else "", + "actor_input": actor_input, + "allow_input": allow_input, + }, + ) + + class MessagesDebugView(BaseView): name = "messages_debug" diff --git a/docs/authentication.rst b/docs/authentication.rst index a2b1276b..648d40f8 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -157,6 +157,13 @@ You can specify that unauthenticated actors (from anynomous HTTP requests) shoul Allow keys act as an "or" mechanism. An actor will be able to execute the query if any of their JSON properties match any of the values in the corresponding lists in the ``allow`` block. +.. _AllowDebugView: + +The /-/allow-debug tool +----------------------- + +The ``/-/allow-debug`` tool lets you try out different ``"action"`` blocks against different ``"actor"`` JSON objects. You can try that out here: https://latest.datasette.io/-/allow-debug + .. _authentication_permissions_metadata: Configuring permissions in metadata.json diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 90e58a27..e66ba53b 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -2,6 +2,7 @@ from .fixtures import app_client, assert_permissions_checked, make_app_client from bs4 import BeautifulSoup as Soup import copy import pytest +import urllib @pytest.mark.parametrize( @@ -312,6 +313,23 @@ def test_permissions_debug(app_client): ] == checks +@pytest.mark.parametrize( + "actor,allow,expected_fragment", + [ + ('{"id":"root"}', "{}", "Result: deny"), + ('{"id":"root"}', '{"id": "*"}', "Result: allow"), + ('{"', '{"id": "*"}', "Actor JSON error"), + ('{"id":"root"}', '"*"}', "Allow JSON error"), + ], +) +def test_allow_debug(app_client, actor, allow, expected_fragment): + response = app_client.get( + "/-/allow-debug?" + urllib.parse.urlencode({"actor": actor, "allow": allow}) + ) + assert 200 == response.status + assert expected_fragment in response.text + + @pytest.mark.parametrize( "allow,expected", [({"id": "root"}, 403), ({"id": "root", "unauthenticated": True}, 200),], From 88065fb74fb2ca66ebb0c1a0a5a75ca13c25405f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 24 Jul 2020 16:52:16 -0700 Subject: [PATCH 0046/1705] Increase size of allow/actor fields, refs #908 --- datasette/templates/allow_debug.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/templates/allow_debug.html b/datasette/templates/allow_debug.html index 05e3dd90..6f2a8122 100644 --- a/datasette/templates/allow_debug.html +++ b/datasette/templates/allow_debug.html @@ -5,7 +5,7 @@ {% block extra_head %} {% endblock %} @@ -111,7 +110,7 @@

View and edit SQL

{% endif %} - + {% if suggested_facets %}

@@ -160,10 +159,10 @@

Advanced export

JSON shape: - default, - array, - newline-delimited{% if primary_keys %}, - object + default, + array, + newline-delimited{% if primary_keys %}, + object {% endif %}

diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index 6fc4c633..8a8810e7 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -1,23 +1,39 @@ -from datasette.utils import MultiParams -from asgiref.testing import ApplicationCommunicator from asgiref.sync import async_to_sync -from urllib.parse import unquote, quote, urlencode -from http.cookies import SimpleCookie +from urllib.parse import urlencode import json +# These wrapper classes pre-date the introduction of +# datasette.client and httpx to Datasette. They could +# be removed if the Datasette tests are modified to +# call datasette.client directly. + class TestResponse: - def __init__(self, status, headers, body): - self.status = status - self.headers = headers - self.body = body + def __init__(self, httpx_response): + self.httpx_response = httpx_response + + @property + def status(self): + return self.httpx_response.status_code + + @property + def headers(self): + return self.httpx_response.headers + + @property + def body(self): + return self.httpx_response.content @property def cookies(self): - cookie = SimpleCookie() - for header in self.headers.getlist("set-cookie"): - cookie.load(header) - return {key: value.value for key, value in cookie.items()} + return dict(self.httpx_response.cookies) + + def cookie_was_deleted(self, cookie): + return any( + h + for h in self.httpx_response.headers.get_list("set-cookie") + if h.startswith('{}="";'.format(cookie)) + ) @property def json(self): @@ -31,8 +47,8 @@ class TestResponse: class TestClient: max_redirects = 5 - def __init__(self, asgi_app): - self.asgi_app = asgi_app + def __init__(self, ds): + self.ds = ds def actor_cookie(self, actor): return self.ds.sign({"a": actor}, "actor") @@ -94,61 +110,18 @@ class TestClient: post_body=None, content_type=None, ): - query_string = b"" - if "?" in path: - path, _, query_string = path.partition("?") - query_string = query_string.encode("utf8") - if "%" in path: - raw_path = path.encode("latin-1") - else: - raw_path = quote(path, safe="/:,").encode("latin-1") - asgi_headers = [[b"host", b"localhost"]] - if headers: - for key, value in headers.items(): - asgi_headers.append([key.encode("utf-8"), value.encode("utf-8")]) + headers = headers or {} if content_type: - asgi_headers.append((b"content-type", content_type.encode("utf-8"))) - if cookies: - sc = SimpleCookie() - for key, value in cookies.items(): - sc[key] = value - asgi_headers.append([b"cookie", sc.output(header="").encode("utf-8")]) - scope = { - "type": "http", - "http_version": "1.0", - "method": method, - "path": unquote(path), - "raw_path": raw_path, - "query_string": query_string, - "headers": asgi_headers, - } - instance = ApplicationCommunicator(self.asgi_app, scope) - - if post_body: - body = post_body.encode("utf-8") - await instance.send_input({"type": "http.request", "body": body}) - else: - await instance.send_input({"type": "http.request"}) - - # First message back should be response.start with headers and status - messages = [] - start = await instance.receive_output(2) - messages.append(start) - assert start["type"] == "http.response.start" - response_headers = MultiParams( - [(k.decode("utf8"), v.decode("utf8")) for k, v in start["headers"]] + headers["content-type"] = content_type + httpx_response = await self.ds.client.request( + method, + path, + allow_redirects=allow_redirects, + cookies=cookies, + headers=headers, + content=post_body, ) - status = start["status"] - # Now loop until we run out of response.body - body = b"" - while True: - message = await instance.receive_output(2) - messages.append(message) - assert message["type"] == "http.response.body" - body += message["body"] - if not message.get("more_body"): - break - response = TestResponse(status, response_headers, body) + response = TestResponse(httpx_response) if allow_redirects and response.status in (301, 302): assert ( redirect_count < self.max_redirects diff --git a/datasette/views/base.py b/datasette/views/base.py index 3fe2abd5..6cf0e8d9 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -113,6 +113,15 @@ class BaseView: async def options(self, request, *args, **kwargs): return Response.text("Method not allowed", status=405) + async def put(self, request, *args, **kwargs): + return Response.text("Method not allowed", status=405) + + async def patch(self, request, *args, **kwargs): + return Response.text("Method not allowed", status=405) + + async def delete(self, request, *args, **kwargs): + return Response.text("Method not allowed", status=405) + async def dispatch_request(self, request, *args, **kwargs): handler = getattr(self, request.method.lower(), None) return await handler(request, *args, **kwargs) diff --git a/setup.py b/setup.py index ddcd8106..8443fb41 100644 --- a/setup.py +++ b/setup.py @@ -49,6 +49,7 @@ setup( "click-default-group~=1.2.2", "Jinja2>=2.10.3,<2.12.0", "hupper~=1.9", + "httpx>=0.15", "pint~=0.9", "pluggy~=0.13.0", "uvicorn~=0.11", diff --git a/tests/fixtures.py b/tests/fixtures.py index 2f990490..e2a0ae1e 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -144,9 +144,7 @@ def make_app_client( template_dir=template_dir, ) ds.sqlite_functions.append(("sleep", 1, lambda n: time.sleep(float(n)))) - client = TestClient(ds.app()) - client.ds = ds - yield client + yield TestClient(ds) @pytest.fixture(scope="session") @@ -158,9 +156,7 @@ def app_client(): @pytest.fixture(scope="session") def app_client_no_files(): ds = Datasette([]) - client = TestClient(ds.app()) - client.ds = ds - yield client + yield TestClient(ds) @pytest.fixture(scope="session") diff --git a/tests/test_api.py b/tests/test_api.py index c797a1ad..4aa9811c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -739,6 +739,7 @@ def test_table_shape_object_compound_primary_Key(app_client): assert {"a,b": {"pk1": "a", "pk2": "b", "content": "c"}} == response.json +@pytest.mark.xfail def test_table_with_slashes_in_name(app_client): response = app_client.get( "/fixtures/table%2Fwith%2Fslashes.csv?_shape=objects&_format=json" @@ -1186,6 +1187,7 @@ def test_row_format_in_querystring(app_client): assert [{"id": "1", "content": "hello"}] == response.json["rows"] +@pytest.mark.xfail def test_row_strange_table_name(app_client): response = app_client.get( "/fixtures/table%2Fwith%2Fslashes.csv/3.json?_shape=objects" diff --git a/tests/test_auth.py b/tests/test_auth.py index a4c5cf45..f244f268 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -87,7 +87,8 @@ def test_logout(app_client): cookies={"ds_actor": app_client.actor_cookie({"id": "test"})}, allow_redirects=False, ) - assert "" == response4.cookies["ds_actor"] + # The ds_actor cookie should have been unset + assert response4.cookie_was_deleted("ds_actor") # Should also have set a message messages = app_client.ds.unsign(response4.cookies["ds_messages"], "messages") assert [["You are now logged out", 2]] == messages diff --git a/tests/test_cli.py b/tests/test_cli.py index 7ae9d6e7..09864602 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -108,8 +108,7 @@ def test_metadata_yaml(): open_browser=False, return_instance=True, ) - client = _TestClient(ds.app()) - client.ds = ds + client = _TestClient(ds) response = client.get("/-/metadata.json") assert {"title": "Hello from YAML"} == response.json diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index 430eba16..15c7a5c4 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -76,9 +76,7 @@ def config_dir_client(tmp_path_factory): ) ds = Datasette([], config_dir=config_dir) - client = _TestClient(ds.app()) - client.ds = ds - yield client + yield _TestClient(ds) def test_metadata(config_dir_client): @@ -137,8 +135,7 @@ def test_metadata_yaml(tmp_path_factory, filename): config_dir = tmp_path_factory.mktemp("yaml-config-dir") (config_dir / filename).write_text("title: Title from metadata", "utf-8") ds = Datasette([], config_dir=config_dir) - client = _TestClient(ds.app()) - client.ds = ds + client = _TestClient(ds) response = client.get("/-/metadata.json") assert 200 == response.status assert {"title": "Title from metadata"} == response.json diff --git a/tests/test_html.py b/tests/test_html.py index 02d49b52..c0e3625e 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -142,6 +142,7 @@ def test_row_redirects_with_url_hash(app_client_with_hash): assert response.status == 200 +@pytest.mark.xfail def test_row_strange_table_name_with_url_hash(app_client_with_hash): response = app_client_with_hash.get( "/fixtures/table%2Fwith%2Fslashes.csv/3", allow_redirects=False @@ -535,6 +536,7 @@ def test_facets_persist_through_filter_form(app_client): ] +@pytest.mark.xfail @pytest.mark.parametrize( "path,expected_classes", [ @@ -566,6 +568,7 @@ def test_css_classes_on_body(app_client, path, expected_classes): assert classes == expected_classes +@pytest.mark.xfail @pytest.mark.parametrize( "path,expected_considered", [ diff --git a/tests/test_internals_datasette_client.py b/tests/test_internals_datasette_client.py new file mode 100644 index 00000000..d73fbb06 --- /dev/null +++ b/tests/test_internals_datasette_client.py @@ -0,0 +1,44 @@ +from .fixtures import app_client +import httpx +import pytest + + +@pytest.fixture +def datasette(app_client): + return app_client.ds + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "method,path,expected_status", + [ + ("get", "/", 200), + ("options", "/", 405), + ("head", "/", 200), + ("put", "/", 405), + ("patch", "/", 405), + ("delete", "/", 405), + ], +) +async def test_client_methods(datasette, method, path, expected_status): + client_method = getattr(datasette.client, method) + response = await client_method(path) + assert isinstance(response, httpx.Response) + assert response.status_code == expected_status + # Try that again using datasette.client.request + response2 = await datasette.client.request(method, path) + assert response2.status_code == expected_status + + +@pytest.mark.asyncio +async def test_client_post(datasette): + response = await datasette.client.post( + "/-/messages", + data={ + "message": "A message", + }, + allow_redirects=False, + ) + assert isinstance(response, httpx.Response) + assert response.status_code == 302 + assert "ds_messages" in response.cookies diff --git a/tests/test_messages.py b/tests/test_messages.py index d17e015c..830244e1 100644 --- a/tests/test_messages.py +++ b/tests/test_messages.py @@ -25,4 +25,4 @@ def test_messages_are_displayed_and_cleared(app_client): # Messages should be in that HTML assert "xmessagex" in response.text # Cookie should have been set that clears messages - assert "" == response.cookies["ds_messages"] + assert response.cookie_was_deleted("ds_messages") diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 00bedb03..4b3634ab 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -380,9 +380,7 @@ def view_names_client(tmp_path_factory): conn = sqlite3.connect(db_path) conn.executescript(TABLES) return _TestClient( - Datasette( - [db_path], template_dir=str(templates), plugins_dir=str(plugins) - ).app() + Datasette([db_path], template_dir=str(templates), plugins_dir=str(plugins)) ) @@ -748,7 +746,7 @@ def test_hook_register_magic_parameters(restore_working_directory): response = client.post("/data/runme", {}, csrftoken_from=True) assert 200 == response.status actual = client.get("/data/logs.json?_sort_desc=rowid&_shape=array").json - assert [{"rowid": 1, "line": "1.0"}] == actual + assert [{"rowid": 1, "line": "1.1"}] == actual # Now try the GET request against get_uuid response_get = client.get("/data/get_uuid.json?_shape=array") assert 200 == response_get.status From 6e091b14b651d67e0ff41a353d36bbeb1d8ba235 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Oct 2020 09:22:49 -0700 Subject: [PATCH 0169/1705] Run tests against Python 3.9 --- .github/workflows/deploy-latest.yml | 2 +- .github/workflows/publish.yml | 4 ++-- .github/workflows/test-coverage.yml | 4 ++-- .github/workflows/test.yml | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 6c4b4334..625d16b9 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -14,7 +14,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v1 with: - python-version: 3.8 + python-version: 3.9 - uses: actions/cache@v2 name: Configure pip caching with: diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 84a8be6c..c1909bbe 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.6, 3.7, 3.8] + python-version: [3.6, 3.7, 3.8, 3.9] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} @@ -37,7 +37,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: '3.8' + python-version: '3.9' - uses: actions/cache@v2 name: Configure pip caching with: diff --git a/.github/workflows/test-coverage.yml b/.github/workflows/test-coverage.yml index 0b964c75..1d1cf332 100644 --- a/.github/workflows/test-coverage.yml +++ b/.github/workflows/test-coverage.yml @@ -14,9 +14,9 @@ jobs: - name: Check out datasette uses: actions/checkout@v2 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: 3.9 - uses: actions/cache@v2 name: Configure pip caching with: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 74e56e13..a1774213 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,7 +7,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.6, 3.7, 3.8] + python-version: [3.6, 3.7, 3.8, 3.9] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} From 896cc2c6acfefa65c54a162831e7f09159603988 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Oct 2020 09:26:17 -0700 Subject: [PATCH 0170/1705] Replace MockRequest with Request.fake() Close #1004 --- tests/test_facets.py | 20 ++++++++++---------- tests/utils.py | 8 -------- 2 files changed, 10 insertions(+), 18 deletions(-) delete mode 100644 tests/utils.py diff --git a/tests/test_facets.py b/tests/test_facets.py index e3dc3df3..1e19dc3a 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -1,7 +1,7 @@ from datasette.facets import ColumnFacet, ArrayFacet, DateFacet +from datasette.utils.asgi import Request from datasette.utils import detect_json1 from .fixtures import app_client # noqa -from .utils import MockRequest import pytest @@ -9,7 +9,7 @@ import pytest async def test_column_facet_suggest(app_client): facet = ColumnFacet( app_client.ds, - MockRequest("http://localhost/"), + Request.fake("/"), database="fixtures", sql="select * from facetable", table="facetable", @@ -34,7 +34,7 @@ async def test_column_facet_suggest(app_client): async def test_column_facet_suggest_skip_if_already_selected(app_client): facet = ColumnFacet( app_client.ds, - MockRequest("http://localhost/?_facet=planet_int&_facet=on_earth"), + Request.fake("/?_facet=planet_int&_facet=on_earth"), database="fixtures", sql="select * from facetable", table="facetable", @@ -72,7 +72,7 @@ async def test_column_facet_suggest_skip_if_already_selected(app_client): async def test_column_facet_suggest_skip_if_enabled_by_metadata(app_client): facet = ColumnFacet( app_client.ds, - MockRequest("http://localhost/"), + Request.fake("/"), database="fixtures", sql="select * from facetable", table="facetable", @@ -94,7 +94,7 @@ async def test_column_facet_suggest_skip_if_enabled_by_metadata(app_client): async def test_column_facet_results(app_client): facet = ColumnFacet( app_client.ds, - MockRequest("http://localhost/?_facet=city_id"), + Request.fake("/?_facet=city_id"), database="fixtures", sql="select * from facetable", table="facetable", @@ -146,7 +146,7 @@ async def test_column_facet_results(app_client): async def test_column_facet_from_metadata_cannot_be_hidden(app_client): facet = ColumnFacet( app_client.ds, - MockRequest("http://localhost/"), + Request.fake("/"), database="fixtures", sql="select * from facetable", table="facetable", @@ -200,7 +200,7 @@ async def test_column_facet_from_metadata_cannot_be_hidden(app_client): async def test_array_facet_suggest(app_client): facet = ArrayFacet( app_client.ds, - MockRequest("http://localhost/"), + Request.fake("/"), database="fixtures", sql="select * from facetable", table="facetable", @@ -220,7 +220,7 @@ async def test_array_facet_suggest(app_client): async def test_array_facet_suggest_not_if_all_empty_arrays(app_client): facet = ArrayFacet( app_client.ds, - MockRequest("http://localhost/"), + Request.fake("/"), database="fixtures", sql="select * from facetable where tags = '[]'", table="facetable", @@ -234,7 +234,7 @@ async def test_array_facet_suggest_not_if_all_empty_arrays(app_client): async def test_array_facet_results(app_client): facet = ArrayFacet( app_client.ds, - MockRequest("http://localhost/?_facet_array=tags"), + Request.fake("/?_facet_array=tags"), database="fixtures", sql="select * from facetable", table="facetable", @@ -279,7 +279,7 @@ async def test_array_facet_results(app_client): async def test_date_facet_results(app_client): facet = DateFacet( app_client.ds, - MockRequest("http://localhost/?_facet_date=created"), + Request.fake("/?_facet_date=created"), database="fixtures", sql="select * from facetable", table="facetable", diff --git a/tests/utils.py b/tests/utils.py deleted file mode 100644 index 8947956b..00000000 --- a/tests/utils.py +++ /dev/null @@ -1,8 +0,0 @@ -class MockRequest: - def __init__(self, url): - self.url = url - self.path = "/" + url.split("://")[1].split("/", 1)[1] - self.query_string = "" - if "?" in url: - self.query_string = url.split("?", 1)[1] - self.path = self.path.split("?")[0] From 6421ca2b22a8ebd801ca17b2ea38a98d353f1faa Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Oct 2020 09:28:17 -0700 Subject: [PATCH 0171/1705] Use actions/setup-python@v2 to deploy latest This should fix an error with Python 3.9. --- .github/workflows/deploy-latest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 625d16b9..55aabb76 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -12,7 +12,7 @@ jobs: - name: Check out datasette uses: actions/checkout@v2 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: python-version: 3.9 - uses: actions/cache@v2 From c12b7a5def7028845a54a9fdac4052a87a0a8bb8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Oct 2020 10:19:50 -0700 Subject: [PATCH 0172/1705] Documentation for datasette.client, closes #1006 Refs #1000 --- docs/internals.rst | 55 +++++++++++++++++++++++++++++++++++++++------- 1 file changed, 47 insertions(+), 8 deletions(-) diff --git a/docs/internals.rst b/docs/internals.rst index bffda3f7..94c142c2 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -1,14 +1,15 @@ .. _internals: -Internals for plugins -===================== +======================= + Internals for plugins +======================= Many :ref:`plugin_hooks` are passed objects that provide access to internal Datasette functionality. The interface to these objects should not be considered stable with the exception of methods that are documented here. .. _internals_request: Request object -~~~~~~~~~~~~~~ +============== The request object is passed to various plugin hooks. It represents an incoming HTTP request. It has the following properties: @@ -59,7 +60,7 @@ The object also has two awaitable methods: .. _internals_multiparams: The MultiParams class ---------------------- +===================== ``request.args`` is a ``MultiParams`` object - a dictionary-like object which provides access to querystring parameters that may have multiple values. @@ -89,7 +90,7 @@ Consider the querystring ``?foo=1&foo=2&bar=3`` - with two values for ``foo`` an .. _internals_response: Response class -~~~~~~~~~~~~~~ +============== The ``Response`` class can be returned from view functions that have been registered using the :ref:`plugin_register_routes` hook. @@ -167,7 +168,7 @@ You can use this with :ref:`datasette.sign() ` to set signed coo .. _internals_datasette: Datasette class -~~~~~~~~~~~~~~~ +=============== This object is an instance of the ``Datasette`` class, passed to many plugin hooks as an argument called ``datasette``. @@ -327,10 +328,48 @@ Datasette's flash messaging mechanism allows you to add a message that will be d You can try out these messages (including the different visual styling of the three message types) using the ``/-/messages`` debugging tool. +.. _internals_datasette_client: + +.client +------- + +Plugins can make internal HTTP requests to the Datasette instance within which they are running. This ensures that all of Datasette's external JSON APIs are also available to plugins. + +The ``datasette.client`` object is a wrapper around the `HTTPX Python library `__, providing an async-friendly API that is similar to the widely used `Requests library `__. + +It offers the following methods: + +``await datasette.client.get(path, **kwargs)`` - returns HTTPX Response + Execute an internal GET request against that path. + +``await datasette.client.post(path, **kwargs)`` - returns HTTPX Respons + Execute an internal POST request. Use ``data={"name": "value"}`` to pass form parameters. + +``await datasette.client.options(path, **kwargs)`` - returns HTTPX Response + Execute an internal OPTIONS request. + +``await datasette.client.head(path, **kwargs)`` - returns HTTPX Respons + Execute an internal HEAD request. + +``await datasette.client.put(path, **kwargs)`` - returns HTTPX Response + Execute an internal PUT request. + +``await datasette.client.patch(path, **kwargs)`` - returns HTTPX Response + Execute an internal PATCH request. + +``await datasette.client.delete(path, **kwargs)`` - returns HTTPX Response + Execute an internal DELETE request. + +``await datasette.client.request(method, path, **kwargs)`` - returns HTTPX Response + Execute an internal request with the given HTTP method against that path. + +For documentation on available ``**kwargs`` options and the shape of the HTTPX Response object refer to the `HTTPX Async documentation `__. + + .. _internals_database: Database class -~~~~~~~~~~~~~~ +============== Instances of the ``Database`` class can be used to execute queries against attached SQLite databases, and to run introspection against their schemas. @@ -549,7 +588,7 @@ The ``Database`` class also provides properties and methods for introspecting th .. _internals_csrf: CSRF protection -~~~~~~~~~~~~~~~ +=============== Datasette uses `asgi-csrf `__ to guard against CSRF attacks on form POST submissions. Users receive a ``ds_csrftoken`` cookie which is compared against the ``csrftoken`` form field (or ``x-csrftoken`` HTTP header) for every incoming request. From a61f0e4e1588083c9fe4636b8fb7178477c4c4a1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Oct 2020 10:51:03 -0700 Subject: [PATCH 0173/1705] Release 0.50 Refs #1001, #514, #891, #943, #969, #970, #978, #980, #996, #997 Closes #1002 --- README.md | 1 + docs/changelog.rst | 30 +++++++++++++++++------------- docs/internals.rst | 5 ++--- 3 files changed, 20 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index cc912a13..5c65d17c 100644 --- a/README.md +++ b/README.md @@ -23,6 +23,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover ## News + * 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. * 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). * 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. * 11th August 2020: [Datasette 0.47](https://docs.datasette.io/en/stable/changelog.html#v0-47) - Datasette can now be installed using Homebrew! `brew install simonw/datasette/datasette`. Also new: `datasette install name-of-plugin` and `datasette uninstall name-of-plugin` commands, and `datasette --get '/-/versions.json'` to output the result of Datasette HTTP calls on the command-line. diff --git a/docs/changelog.rst b/docs/changelog.rst index aad86e7b..046f5b4d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,24 +4,28 @@ Changelog ========= -.. _v0_50_a1: +.. _v0_50: -0.50a1 (2020-10-06) -------------------- +0.50 (2020-10-09) +----------------- + +The key new feature in this release is the **column actions** menu on the table page (`#891 `__). This can be used to sort a column in ascending or descending order, facet data by that column or filter the table to just rows that have a value for that column. + +Plugin authors can use the new :ref:`internals_datasette_client` object to make internal HTTP requests from their plugins, allowing them to make use of Datasette's JSON API. (`#943 `__) + +New :ref:`deploying` documentation with guides for deploying Datasette on a Linux server :ref:`using systemd ` or to hosting providers :ref:`that support buildpacks `. (`#514 `__, `#997 `__) + +Other improvements in this release: -- Column action menu now shows the column type. (`#993 `__) -- Column action sort links now correctly link to the first page of sorted results. (`#989 `__) - :ref:`publish_cloud_run` documentation now covers Google Cloud SDK options. Thanks, Geoffrey Hing. (`#995 `__) - -.. _v0_50_a0: - -0.50a0 (2020-10-01) -------------------- - -- New column action menu - table columns now show a cog icon which provides a contextual menu for that column. (`#981 `__) - New ``datasette -o`` option which opens your browser as soon as Datasette starts up. (`#970 `__) -- ``sqlite3.enable_callback_tracebacks(True)`` so errors in custom SQL functions will now display tracebacks. (`#891 `__) +- Datasette now sets ``sqlite3.enable_callback_tracebacks(True)`` so that errors in custom SQL functions will display tracebacks. (`#891 `__) - Fixed two rendering bugs with column headers in portrait mobile view. (`#978 `__, `#980 `__) +- New ``db.table_column_details(table)`` introspection method for retrieving full details of the columns in a specific table, see :ref:`internals_database_introspection`. +- Fixed a routing bug with custom page wildcard templates. (`#996 `__) +- ``datasette publish heroku`` now deploys using Python 3.8.6. +- New ``datasette publish heroku --tar=`` option. (`#969 `__) +- ``OPTIONS`` requests against HTML pages no longer return a 500 error. (`#1001 `__) .. _v0_49_1: diff --git a/docs/internals.rst b/docs/internals.rst index 94c142c2..0fdd943c 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -330,8 +330,8 @@ You can try out these messages (including the different visual styling of the th .. _internals_datasette_client: -.client -------- +datasette.client +---------------- Plugins can make internal HTTP requests to the Datasette instance within which they are running. This ensures that all of Datasette's external JSON APIs are also available to plugins. @@ -365,7 +365,6 @@ It offers the following methods: For documentation on available ``**kwargs`` options and the shape of the HTTPX Response object refer to the `HTTPX Async documentation `__. - .. _internals_database: Database class From 1bdbc8aa7f4fd7a768d456146e44da86cb1b36d1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Oct 2020 10:57:55 -0700 Subject: [PATCH 0174/1705] Datasette now supports Python 3.9 --- docs/changelog.rst | 1 + setup.py | 1 + 2 files changed, 2 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 046f5b4d..a6d74914 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -26,6 +26,7 @@ Other improvements in this release: - ``datasette publish heroku`` now deploys using Python 3.8.6. - New ``datasette publish heroku --tar=`` option. (`#969 `__) - ``OPTIONS`` requests against HTML pages no longer return a 500 error. (`#1001 `__) +- Datasette now supports Python 3.9. .. _v0_49_1: diff --git a/setup.py b/setup.py index 8443fb41..22d164b0 100644 --- a/setup.py +++ b/setup.py @@ -84,6 +84,7 @@ setup( "Intended Audience :: End Users/Desktop", "Topic :: Database", "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.6", From ef76c9ea571eeefe136a18202f87ea8c4ef80ace Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Oct 2020 14:49:13 -0700 Subject: [PATCH 0175/1705] Link to annotated release notes --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5c65d17c..92c898af 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover ## News - * 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. + * 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). * 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). * 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. * 11th August 2020: [Datasette 0.47](https://docs.datasette.io/en/stable/changelog.html#v0-47) - Datasette can now be installed using Homebrew! `brew install simonw/datasette/datasette`. Also new: `datasette install name-of-plugin` and `datasette uninstall name-of-plugin` commands, and `datasette --get '/-/versions.json'` to output the result of Datasette HTTP calls on the command-line. From 99488de329fa252f54db3166e46da468aa512388 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Oct 2020 14:50:19 -0700 Subject: [PATCH 0176/1705] Link to 0.50 annotated release notes --- docs/changelog.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index a6d74914..f5758126 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -28,6 +28,8 @@ Other improvements in this release: - ``OPTIONS`` requests against HTML pages no longer return a 500 error. (`#1001 `__) - Datasette now supports Python 3.9. +See also `Datasette 0.50: The annotated release notes `__. + .. _v0_49_1: 0.49.1 (2020-09-15) From 549a007683e38fd13da72be7b2f5ee1adb1484c5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Oct 2020 16:13:41 -0700 Subject: [PATCH 0177/1705] Clarify that datasette.client HTTP calls are simulated --- docs/internals.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/internals.rst b/docs/internals.rst index 0fdd943c..a04de9fe 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -333,7 +333,7 @@ You can try out these messages (including the different visual styling of the th datasette.client ---------------- -Plugins can make internal HTTP requests to the Datasette instance within which they are running. This ensures that all of Datasette's external JSON APIs are also available to plugins. +Plugins can make internal simulated HTTP requests to the Datasette instance within which they are running. This ensures that all of Datasette's external JSON APIs are also available to plugins, while avoiding the overhead of making an external HTTP call to access those APIs. The ``datasette.client`` object is a wrapper around the `HTTPX Python library `__, providing an async-friendly API that is similar to the widely used `Requests library `__. From c13d184704a74654befe061500f55ca61f29ef1b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Oct 2020 17:33:13 -0700 Subject: [PATCH 0178/1705] Emergency fix for broken links in 0.50, closes #1010 --- datasette/templates/row.html | 2 +- datasette/templates/table.html | 10 ++++----- tests/test_html.py | 41 ++++++++++++++++++++-------------- 3 files changed, 30 insertions(+), 23 deletions(-) diff --git a/datasette/templates/row.html b/datasette/templates/row.html index 6812b2d4..cd49a497 100644 --- a/datasette/templates/row.html +++ b/datasette/templates/row.html @@ -29,7 +29,7 @@ {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} -

This data as {% for name, url in renderers.items() %}{{ name }}{{ ", " if not loop.last }}{% endfor %}

+

This data as {% for name, url in renderers.items() %}{{ name }}{{ ", " if not loop.last }}{% endfor %}

{% include custom_table_templates %} diff --git a/datasette/templates/table.html b/datasette/templates/table.html index a5a3a180..ab2331c3 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -110,7 +110,7 @@

View and edit SQL

{% endif %} - + {% if suggested_facets %}

@@ -159,10 +159,10 @@

Advanced export

JSON shape: - default, - array, - newline-delimited{% if primary_keys %}, - object + default, + array, + newline-delimited{% if primary_keys %}, + object {% endif %}

diff --git a/tests/test_html.py b/tests/test_html.py index c0e3625e..aca4eedd 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -635,24 +635,24 @@ def test_table_csv_json_export_interface(app_client): .find("p", {"class": "export-links"}) .findAll("a") ) - actual = [l["href"].split("/")[-1] for l in links] + actual = [l["href"] for l in links] expected = [ - "simple_primary_key.json?id__gt=2", - "simple_primary_key.testall?id__gt=2", - "simple_primary_key.testnone?id__gt=2", - "simple_primary_key.testresponse?id__gt=2", - "simple_primary_key.csv?id__gt=2&_size=max", + "/fixtures/simple_primary_key.json?id__gt=2", + "/fixtures/simple_primary_key.testall?id__gt=2", + "/fixtures/simple_primary_key.testnone?id__gt=2", + "/fixtures/simple_primary_key.testresponse?id__gt=2", + "/fixtures/simple_primary_key.csv?id__gt=2&_size=max", "#export", ] assert expected == actual # And the advaced export box at the bottom: div = Soup(response.body, "html.parser").find("div", {"class": "advanced-export"}) - json_links = [a["href"].split("/")[-1] for a in div.find("p").findAll("a")] + json_links = [a["href"] for a in div.find("p").findAll("a")] assert [ - "simple_primary_key.json?id__gt=2", - "simple_primary_key.json?id__gt=2&_shape=array", - "simple_primary_key.json?id__gt=2&_shape=array&_nl=on", - "simple_primary_key.json?id__gt=2&_shape=object", + "/fixtures/simple_primary_key.json?id__gt=2", + "/fixtures/simple_primary_key.json?id__gt=2&_shape=array", + "/fixtures/simple_primary_key.json?id__gt=2&_shape=array&_nl=on", + "/fixtures/simple_primary_key.json?id__gt=2&_shape=object", ] == json_links # And the CSV form form = div.find("form") @@ -666,6 +666,12 @@ def test_table_csv_json_export_interface(app_client): ] == inputs +def test_row_json_export_link(app_client): + response = app_client.get("/fixtures/simple_primary_key/1") + assert response.status == 200 + assert 'json' in response.text + + def test_csv_json_export_links_include_labels_if_foreign_keys(app_client): response = app_client.get("/fixtures/facetable") assert response.status == 200 @@ -674,13 +680,13 @@ def test_csv_json_export_links_include_labels_if_foreign_keys(app_client): .find("p", {"class": "export-links"}) .findAll("a") ) - actual = [l["href"].split("/")[-1] for l in links] + actual = [l["href"] for l in links] expected = [ - "facetable.json?_labels=on", - "facetable.testall?_labels=on", - "facetable.testnone?_labels=on", - "facetable.testresponse?_labels=on", - "facetable.csv?_labels=on&_size=max", + "/fixtures/facetable.json?_labels=on", + "/fixtures/facetable.testall?_labels=on", + "/fixtures/facetable.testnone?_labels=on", + "/fixtures/facetable.testresponse?_labels=on", + "/fixtures/facetable.csv?_labels=on&_size=max", "#export", ] assert expected == actual @@ -1347,6 +1353,7 @@ def test_metadata_sort_desc(app_client): assert list(reversed(expected)) == rows +@pytest.mark.xfail @pytest.mark.parametrize("base_url", ["/prefix/", "https://example.com/"]) @pytest.mark.parametrize( "path", From 9f6dd985bc0eff70f8a9ce65c6578bc43d2e172b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Oct 2020 17:39:45 -0700 Subject: [PATCH 0179/1705] Fix broken CSV/JSON export on query page, refs #1010 --- datasette/templates/query.html | 2 +- tests/test_html.py | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 0add74a8..c6574f31 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -58,7 +58,7 @@ {% if display_rows %} - +
diff --git a/tests/test_html.py b/tests/test_html.py index aca4eedd..3f8cb178 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -672,6 +672,13 @@ def test_row_json_export_link(app_client): assert 'json' in response.text +def test_query_json_csv_export_links(app_client): + response = app_client.get("/fixtures?sql=select+1") + assert response.status == 200 + assert 'json' in response.text + assert 'CSV' in response.text + + def test_csv_json_export_links_include_labels_if_foreign_keys(app_client): response = app_client.get("/fixtures/facetable") assert response.status == 200 From 6fe30c348c58a0bc312552fd7a889731427b86e5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Oct 2020 17:41:22 -0700 Subject: [PATCH 0180/1705] Release 0.50.1 Refs #1010 --- docs/changelog.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index f5758126..f0e825b3 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_50.1: + +0.50.1 (2020-10-09) +------------------- + +- Fixed a bug introduced in 0.50 where the export as JSON/CSV links on the table, row and query pages were broken. (`#1010 `__) + .. _v0_50: 0.50 (2020-10-09) From 7239175f63d150356a7f795cc4cabf7764d2cf68 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Oct 2020 20:51:56 -0700 Subject: [PATCH 0181/1705] Fixed broken column header links, closes #1011 --- datasette/templates/_table.html | 4 ++-- tests/test_html.py | 10 +++++----- tests/test_plugins.py | 4 ++-- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/datasette/templates/_table.html b/datasette/templates/_table.html index 65789045..1dd94212 100644 --- a/datasette/templates/_table.html +++ b/datasette/templates/_table.html @@ -8,9 +8,9 @@ {{ column.name }} {% else %} {% if column.name == sort %} - {{ column.name }} ▼ + {{ column.name }} ▼ {% else %} - {{ column.name }}{% if column.name == sort_desc %} ▲{% endif %} + {{ column.name }}{% if column.name == sort_desc %} ▲{% endif %} {% endif %} {% endif %} diff --git a/tests/test_html.py b/tests/test_html.py index 3f8cb178..5691b6c4 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -345,7 +345,7 @@ def test_sort_links(app_client): attrs_and_link_attrs = [ { "attrs": th.attrs, - "a_href": (th.find("a")["href"].split("/")[-1] if th.find("a") else None), + "a_href": (th.find("a")["href"] if th.find("a") else None), } for th in ths ] @@ -403,7 +403,7 @@ def test_sort_links(app_client): "data-column-not-null": "0", "data-is-pk": "0", }, - "a_href": "sortable?_sort_desc=sortable", + "a_href": "/fixtures/sortable?_sort_desc=sortable", }, { "attrs": { @@ -414,7 +414,7 @@ def test_sort_links(app_client): "data-column-not-null": "0", "data-is-pk": "0", }, - "a_href": "sortable?_sort=sortable_with_nulls", + "a_href": "/fixtures/sortable?_sort=sortable_with_nulls", }, { "attrs": { @@ -425,7 +425,7 @@ def test_sort_links(app_client): "data-column-not-null": "0", "data-is-pk": "0", }, - "a_href": "sortable?_sort=sortable_with_nulls_2", + "a_href": "/fixtures/sortable?_sort=sortable_with_nulls_2", }, { "attrs": { @@ -436,7 +436,7 @@ def test_sort_links(app_client): "data-column-not-null": "0", "data-is-pk": "0", }, - "a_href": "sortable?_sort=text", + "a_href": "/fixtures/sortable?_sort=text", }, ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 4b3634ab..08ed2e6b 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -497,9 +497,9 @@ def test_hook_register_output_renderer_can_render(app_client): .find("p", {"class": "export-links"}) .findAll("a") ) - actual = [l["href"].split("/")[-1] for l in links] + actual = [l["href"] for l in links] # Should not be present because we sent ?_no_can_render=1 - assert "facetable.testall?_labels=on" not in actual + assert "/fixtures/facetable.testall?_labels=on" not in actual # Check that it was passed the values we expected assert hasattr(app_client.ds, "_can_render_saw") assert { From 0e58ae7600212c075f5b8ae4b52d2af0e1acd4f1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Oct 2020 20:53:47 -0700 Subject: [PATCH 0182/1705] Release 0.50.2 Refs #1011 --- docs/changelog.rst | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index f0e825b3..1d654485 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,7 +4,15 @@ Changelog ========= -.. _v0_50.1: +.. _v0_50_2: + +0.50.2 (2020-10-09) +------------------- + +- Fixed another bug introduced in 0.50 where column header links on the table page were broken. (`#1011 `__) + + +.. _v0_50_1: 0.50.1 (2020-10-09) ------------------- From a67cb536f1fde4b3cf38032b61bcc6d38c30d762 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 10 Oct 2020 13:54:27 -0700 Subject: [PATCH 0183/1705] Promote the Datasette Weekly newsletter --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 92c898af..66ddf803 100644 --- a/README.md +++ b/README.md @@ -21,6 +21,8 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover * Live demo of current main: https://latest.datasette.io/ * Support questions, feedback? Join our [GitHub Discussions forum](https://github.com/simonw/datasette/discussions) +Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly newsletter](https://datasette.substack.com/) for tips, tricks and news on what's new in the Datasette ecosystem. + ## News * 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). From 822260fb30c9a6726a36975c9b8b26148bd66818 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 10 Oct 2020 16:19:39 -0700 Subject: [PATCH 0184/1705] Improved homebrew instructions --- docs/installation.rst | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/docs/installation.rst b/docs/installation.rst index 1a45c594..dcae738a 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -27,12 +27,24 @@ Using Homebrew If you have a Mac and use `Homebrew `__, you can install Datasette by running this command in your terminal:: - brew install simonw/datasette/datasette + brew install datasette + +This should install the latest version. You can confirm by running:: + + datasette --version + +You can upgrade to the latest Homebrew packaged version using:: + + brew upgrade datasette Once you have installed Datasette you can install plugins using the following:: datasette install datasette-vega +If the latest packaged release of Datasette has not yet been made available through Homebrew, you can upgrade your Homebrew installation in-place using:: + + datasette install -U datasette + .. _installation_pip: Using pip From 7e7064385270dda09dc2aa396d290369a667a03f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 10 Oct 2020 16:39:38 -0700 Subject: [PATCH 0185/1705] Removed --debug option, which didn't do anything - closes #814 --- README.md | 1 - datasette/cli.py | 6 +----- docs/changelog.rst | 1 - docs/datasette-serve-help.txt | 1 - tests/test_cli.py | 1 - 5 files changed, 1 insertion(+), 9 deletions(-) diff --git a/README.md b/README.md index 66ddf803..8670936c 100644 --- a/README.md +++ b/README.md @@ -130,7 +130,6 @@ Now visiting http://localhost:8001/History/downloads will show you a web interfa allowed. Use 0.0.0.0 to listen to all IPs and allow access from other machines. -p, --port INTEGER Port for server, defaults to 8001 - --debug Enable debug mode - useful for development --reload Automatically reload if database or code change detected - useful for development --cors Enable CORS by serving Access-Control-Allow- diff --git a/datasette/cli.py b/datasette/cli.py index 43e03f0a..55576013 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -286,9 +286,6 @@ def uninstall(packages, yes): default=8001, help="Port for server, defaults to 8001. Use -p 0 to automatically assign an available port.", ) -@click.option( - "--debug", is_flag=True, help="Enable debug mode - useful for development" -) @click.option( "--reload", is_flag=True, @@ -366,7 +363,6 @@ def serve( immutable, host, port, - debug, reload, cors, sqlite_extensions, @@ -417,7 +413,7 @@ def serve( kwargs = dict( immutables=immutable, - cache_headers=not debug and not reload, + cache_headers=not reload, cors=cors, inspect_data=inspect_data, metadata=metadata_data, diff --git a/docs/changelog.rst b/docs/changelog.rst index 1d654485..3c56328c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -11,7 +11,6 @@ Changelog - Fixed another bug introduced in 0.50 where column header links on the table page were broken. (`#1011 `__) - .. _v0_50_1: 0.50.1 (2020-10-09) diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index ac3ca49f..0457a321 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -14,7 +14,6 @@ Options: -p, --port INTEGER Port for server, defaults to 8001. Use -p 0 to automatically assign an available port. - --debug Enable debug mode - useful for development --reload Automatically reload if database or code change detected - useful for development diff --git a/tests/test_cli.py b/tests/test_cli.py index 09864602..0e1745c2 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -89,7 +89,6 @@ def test_metadata_yaml(): immutable=[], host="127.0.0.1", port=8001, - debug=False, reload=False, cors=False, sqlite_extensions=[], From e34e84901d084ba3aaccecea020c5f9811865c8f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 10 Oct 2020 17:18:45 -0700 Subject: [PATCH 0186/1705] Link: HTTP header pagination, closes #1014 --- datasette/renderer.py | 12 +++++++++++- docs/json_api.rst | 32 ++++++++++++++++++++++++++++++++ tests/test_api.py | 28 ++++++++++++++++++++++++++++ 3 files changed, 71 insertions(+), 1 deletion(-) diff --git a/datasette/renderer.py b/datasette/renderer.py index 27a5092f..bcde8516 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -5,6 +5,7 @@ from datasette.utils import ( CustomJSONEncoder, path_from_row_pks, ) +from datasette.utils.asgi import Response def convert_specific_columns_to_json(rows, columns, json_cols): @@ -44,6 +45,9 @@ def json_renderer(args, data, view_name): # Deal with the _shape option shape = args.get("_shape", "arrays") + + next_url = data.get("next_url") + if shape == "arrayfirst": data = [row[0] for row in data["rows"]] elif shape in ("objects", "object", "array"): @@ -71,6 +75,7 @@ def json_renderer(args, data, view_name): data = {"ok": False, "error": error} elif shape == "array": data = data["rows"] + elif shape == "arrays": pass else: @@ -89,4 +94,9 @@ def json_renderer(args, data, view_name): else: body = json.dumps(data, cls=CustomJSONEncoder) content_type = "application/json; charset=utf-8" - return {"body": body, "status_code": status_code, "content_type": content_type} + headers = {} + if next_url: + headers["link"] = '<{}>; rel="next"'.format(next_url) + return Response( + body, status=status_code, headers=headers, content_type=content_type + ) diff --git a/docs/json_api.rst b/docs/json_api.rst index af98eecd..8d45ac6f 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -1,3 +1,5 @@ +.. _json_api: + JSON API ======== @@ -18,6 +20,8 @@ requests to fetch the data. If you start Datasette without the ``--cors`` option only JavaScript running on the same domain as Datasette will be able to access the API. +.. _json_api_shapes: + Different shapes ---------------- @@ -138,6 +142,34 @@ this format. The ``object`` keys are always strings. If your table has a compound primary key, the ``object`` keys will be a comma-separated string. +.. _json_api_pagination: + +Pagination +---------- + +The default JSON representation includes a ``"next_url"`` key which can be used to access the next page of results. If that key is null or missing then it means you have reached the final page of results. + +Other representations include pagination information in the ``link`` HTTP header. That header will look something like this:: + + link: ; rel="next" + +Here is an example Python function built using `requests `__ that returns a list of all of the paginated items from one of these API endpoints: + +.. code-block:: python + + def paginate(url): + items = [] + while url: + response = requests.get(url) + try: + url = response.links.get("next").get("url") + except AttributeError: + url = None + items.extend(response.json()) + return items + +.. _json_api_special: + Special JSON arguments ---------------------- diff --git a/tests/test_api.py b/tests/test_api.py index 4aa9811c..1d454ea1 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1828,3 +1828,31 @@ def test_binary_data_in_json(app_client, path, expected_json, expected_text): assert response.json == expected_json else: assert response.text == expected_text + + +@pytest.mark.parametrize( + "qs", + [ + "", + "?_shape=arrays", + "?_shape=arrayfirst", + "?_shape=object", + "?_shape=objects", + "?_shape=array", + "?_shape=array&_nl=on", + ], +) +def test_paginate_using_link_header(app_client, qs): + path = "/fixtures/compound_three_primary_keys.json{}".format(qs) + num_pages = 0 + while path: + response = app_client.get(path) + num_pages += 1 + link = response.headers.get("link") + if link: + assert link.startswith("<") + assert link.endswith('>; rel="next"') + path = link[1:].split(">")[0] + else: + path = None + assert num_pages == 21 From acf07a67722aa74828744726187690b59d342494 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 11 Oct 2020 19:53:26 -0700 Subject: [PATCH 0187/1705] x button for clearing filters, refs #1016 --- datasette/static/table.js | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/datasette/static/table.js b/datasette/static/table.js index 7e839b9c..08c560d6 100644 --- a/datasette/static/table.js +++ b/datasette/static/table.js @@ -152,3 +152,33 @@ var DROPDOWN_ICON_SVG = ` el.querySelector('.filter-op') + ); + rows.forEach(row => { + var a = document.createElement('a'); + a.setAttribute('href', '#'); + a.setAttribute('aria-label', 'Remove this filter'); + a.style.textDecoration = 'none'; + a.innerText = x; + a.addEventListener('click', (ev) => { + ev.preventDefault(); + let row = ev.target.closest('div'); + row.querySelector('select').value = ''; + row.querySelector('.filter-op select').value = 'exact'; + row.querySelector('input.filter-value').value = ''; + ev.target.closest('a').style.display = 'none'; + }); + row.appendChild(a); + var column = row.querySelector('select'); + if (!column.value) { + a.style.display = 'none'; + } + }); +})(); From f3a087a578ae2c418103ad144b08c2fc8ad9c31d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 13 Oct 2020 20:44:18 -0700 Subject: [PATCH 0188/1705] Edit SQL button on canned queries, closes #1019 --- datasette/static/app.css | 6 +++++ datasette/templates/query.html | 1 + datasette/views/database.py | 37 +++++++++++++++++++++++++--- tests/test_html.py | 45 ++++++++++++++++++++++++++++++++++ 4 files changed, 85 insertions(+), 4 deletions(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index da8ed2ab..d2494a34 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -447,3 +447,9 @@ svg.dropdown-menu-icon { border-right: 5px solid transparent; border-bottom: 5px solid #666; } + +.canned-query-edit-sql { + padding-left: 0.5em; + position: relative; + top: 1px; +} diff --git a/datasette/templates/query.html b/datasette/templates/query.html index c6574f31..be180f33 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -54,6 +54,7 @@ {% if canned_write %}

- home + home

{{ super() }} {% endblock %} @@ -23,7 +23,7 @@ {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} {% if allow_execute_sql %} -
+

Custom SQL query

@@ -36,7 +36,7 @@ {% for table in tables %} {% if show_hidden or not table.hidden %}

-

{{ table.name }}{% if table.private %} 🔒{% endif %}{% if table.hidden %} (hidden){% endif %}

+

{{ table.name }}{% if table.private %} 🔒{% endif %}{% if table.hidden %} (hidden){% endif %}

{% for column in table.columns[:9] %}{{ column }}{% if not loop.last %}, {% endif %}{% endfor %}{% if table.columns|length > 9 %}...{% endif %}

{% if table.count is none %}Many rows{% else %}{{ "{:,}".format(table.count) }} row{% if table.count == 1 %}{% else %}s{% endif %}{% endif %}

@@ -44,14 +44,14 @@ {% endfor %} {% if hidden_count and not show_hidden %} -

... and {{ "{:,}".format(hidden_count) }} hidden table{% if hidden_count == 1 %}{% else %}s{% endif %}

+

... and {{ "{:,}".format(hidden_count) }} hidden table{% if hidden_count == 1 %}{% else %}s{% endif %}

{% endif %} {% if views %}

Views

{% endif %} @@ -60,13 +60,13 @@

Queries

{% endif %} {% if allow_download %} -

Download SQLite DB: {{ database }}.db {{ format_bytes(size) }}

+

Download SQLite DB: {{ database }}.db {{ format_bytes(size) }}

{% endif %} {% include "_codemirror_foot.html" %} diff --git a/datasette/templates/index.html b/datasette/templates/index.html index c1adfc59..06e09635 100644 --- a/datasette/templates/index.html +++ b/datasette/templates/index.html @@ -10,7 +10,7 @@ {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} {% for database in databases %} -

{{ database.name }}{% if database.private %} 🔒{% endif %}

+

{{ database.name }}{% if database.private %} 🔒{% endif %}

{% if database.show_table_row_counts %}{{ "{:,}".format(database.table_rows_sum) }} rows in {% endif %}{{ database.tables_count }} table{% if database.tables_count != 1 %}s{% endif %}{% if database.tables_count and database.hidden_tables_count %}, {% endif -%} {% if database.hidden_tables_count -%} @@ -21,8 +21,7 @@ {{ "{:,}".format(database.views_count) }} view{% if database.views_count != 1 %}s{% endif %} {% endif %}

-

{% for table in database.tables_and_views_truncated %}{{ table.name }}{% if table.private %} 🔒{% endif %}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}

+

{% for table in database.tables_and_views_truncated %}{{ table.name }}{% if table.private %} 🔒{% endif %}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}

{% endfor %} {% endblock %} diff --git a/datasette/templates/query.html b/datasette/templates/query.html index be180f33..911119bb 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -20,8 +20,8 @@ {% block nav %}

- home / - {{ database }} + home / + {{ database }}

{{ super() }} {% endblock %} @@ -32,7 +32,7 @@ {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} - +

Custom SQL query{% if display_rows %} returning {% if truncated %}more than {% endif %}{{ "{:,}".format(display_rows|length) }} row{% if display_rows|length == 1 %}{% else %}s{% endif %}{% endif %} {% if hide_sql %}(show){% else %}(hide){% endif %}

{% if not hide_sql %} {% if editable and allow_execute_sql %} diff --git a/datasette/templates/row.html b/datasette/templates/row.html index cd49a497..916980b6 100644 --- a/datasette/templates/row.html +++ b/datasette/templates/row.html @@ -17,9 +17,9 @@ {% block nav %}

- home / - {{ database }} / - {{ table }} + home / + {{ database }} / + {{ table }}

{{ super() }} {% endblock %} @@ -38,7 +38,7 @@
    {% for other in foreign_key_tables %}
  • - + {{ "{:,}".format(other.count) }} row{% if other.count == 1 %}{% else %}s{% endif %} from {{ other.other_column }} in {{ other.other_table }}
  • diff --git a/datasette/templates/table.html b/datasette/templates/table.html index ab2331c3..3f8c2fee 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -5,7 +5,7 @@ {% block extra_head %} {{ super() }} - + + - -
    + + +
    +
    + +
    + + +
    + +
    + + + +

    Pattern Portfolio

    -
    -

    .hd for /database/table/row

    - -

    Messages

    -
    -

    Example message

    -

    Example message

    -

    Example message

    -
    + + + + + +

    Header for /database/table/row and Messages

    + +
    + +
    + +

    Example message

    +

    Example message

    +

    Example message

    + + + + + + + + + +

    .bd for /

    -
    +

    Datasette Fixtures

    +
    + + + + + + +

    .bd for /database

    -
    +

    fixtures

    -
    + + + + + + +

    .bd for /database/table

    -
    +

    roadside_attraction_characteristics

    Data license: @@ -203,9 +244,109 @@

    - -

    View and edit SQL

    - + + + +
    +

    2 extra where clauses

    + +
    + + +

    View and edit SQL

    + + + +

    + Suggested facets: tags, created (date), tags (array) +

    + + + + + + +
    + +
    +

    + tags (array) + + + +

    + +
    + +
    +

    + created + + + +

    + +
    + +
    +

    + city_id + + + +

    + +
    + +
    +
@@ -266,9 +407,20 @@ attraction_id INTEGER REFERENCES roadside_attractions(pk), characteristic_id INTEGER REFERENCES attraction_characteristic(pk) ); - + + + + + + + + + + + +

.bd for /database/table/row

-
+

roadside_attractions: 2

This data as json

@@ -309,9 +461,21 @@ from attraction_id in roadside_attraction_characteristics - + + + + + + + + + + + +

.ft

-
Powered by Datasette + +
+ From 6dff22eff8a52253a6c2bdf3e32f082fbf81b921 Mon Sep 17 00:00:00 2001 From: Natalie Downe Date: Tue, 27 Oct 2020 11:39:35 -0700 Subject: [PATCH 0226/1705] Visited link colours --- datasette/static/app.css | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index 87ec5f01..1ad04618 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -164,11 +164,14 @@ footer, box-sizing: border-box; } -a:link, -a:visited { +a:link { color: #276890; text-decoration: underline; } +a:visited { + color: #54AC8E; + text-decoration: underline; +} a:hover, a:focus, a:active { @@ -363,10 +366,6 @@ th { } table a:link { text-decoration: none; - color: #445ac8; -} -table a:visited { - color: #8f54c4; } .rows-and-columns td:before { display: block; From df19a48a3b72a51feb4203c44903451cc9e6c1bf Mon Sep 17 00:00:00 2001 From: Natalie Downe Date: Tue, 27 Oct 2020 11:40:08 -0700 Subject: [PATCH 0227/1705] Implemented new Natalie design --- datasette/static/app.css | 7 +++++-- datasette/templates/base.html | 10 +++++----- datasette/templates/database.html | 4 ++-- 3 files changed, 12 insertions(+), 9 deletions(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index 1ad04618..dff882af 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -210,7 +210,7 @@ ol.spaced { margin-bottom: 0.8rem; } ul.bullets { - padding-left: 0.9rem; + padding-left: 1.25rem; } ul.bullets li, ul.spaced li, @@ -290,7 +290,9 @@ section.content { } /* Footer */ - +footer { + margin-top: 1rem; +} /* Components ============================================================== */ @@ -568,6 +570,7 @@ form button[type=button] { width: auto; display: inline-block; box-shadow: 1px 2px 8px 2px rgba(0,0,0,0.08); + background-color: white; } .download-sqlite em { diff --git a/datasette/templates/base.html b/datasette/templates/base.html index 84708325..03de2115 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -14,7 +14,7 @@ - -
+
{% block messages %} {% if show_messages %} {% for message, message_type in show_messages() %} @@ -37,9 +37,9 @@ {% block content %} {% endblock %} -
+ -
{% block footer %}{% include "_footer.html" %}{% endblock %}
+
{% block footer %}{% include "_footer.html" %}{% endblock %}
{% for body_script in body_scripts %} diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 2f844b6a..3b89d68b 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -49,7 +49,7 @@ {% if views %}

Views

-
    +
      {% for view in views %}
    • {{ view.name }}{% if view.private %} 🔒{% endif %}
    • {% endfor %} @@ -58,7 +58,7 @@ {% if queries %}

      Queries

      -
        +
          {% for query in queries %}
        • {{ query.title or query.name }}{% if query.private %} 🔒{% endif %}
        • {% endfor %} From fe5e813f068abd2ee63994b2baf530c7abe34de1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 27 Oct 2020 11:57:34 -0700 Subject: [PATCH 0228/1705] Styled facets with different bullets --- datasette/static/app.css | 15 +++++++++++++-- datasette/templates/table.html | 2 +- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index dff882af..2dfc6b15 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -55,7 +55,12 @@ th { padding-right: 1em; white-space: nowrap; } - +strong { + font-weight: bold; +} +em { + font-style: italic; +} /* end reset */ @@ -205,11 +210,13 @@ pre { } ul.bullets, +ul.tight-bullets, ul.spaced, ol.spaced { margin-bottom: 0.8rem; } -ul.bullets { +ul.bullets, +ul.tight-bullets { padding-left: 1.25rem; } ul.bullets li, @@ -220,6 +227,10 @@ ol.spaced li { ul.bullets li { list-style-type: circle; } +ul.tight-bullets li { + list-style-type: disc; + margin-bottom: 0; +} a.not-underlined { text-decoration: none; } diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 3f8c2fee..bc8cfc0a 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -132,7 +132,7 @@ {% endif %}

          -
            +
              {% for facet_value in facet_info.results %} {% if not facet_value.selected %}
            • {{ (facet_value.label if facet_value.label is not none else "_") }} {{ "{:,}".format(facet_value.count) }}
            • From 62286b46a9b434467ab7dee37ec2f8619ca0d1b3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 27 Oct 2020 12:01:44 -0700 Subject: [PATCH 0229/1705] Tighten up table column CSS --- datasette/templates/table.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/datasette/templates/table.html b/datasette/templates/table.html index bc8cfc0a..6c27beee 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -8,9 +8,9 @@ {% endblock %} From dab4b73f7d76f43e67ae1e2b74921f62db71925c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 27 Oct 2020 12:07:25 -0700 Subject: [PATCH 0230/1705] White cards on mobile --- datasette/static/app.css | 1 + 1 file changed, 1 insertion(+) diff --git a/datasette/static/app.css b/datasette/static/app.css index 2dfc6b15..c89f412b 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -654,6 +654,7 @@ p.zero-results { border-bottom: 1px solid #eee; padding: 0; padding-left: 10%; + background-color: white; } .rows-and-columns td:before { From f49d15a7583fafb94e7a7fcfe504d333812139f3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 27 Oct 2020 12:20:28 -0700 Subject: [PATCH 0231/1705] word-break: break-word; --- datasette/static/app.css | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index c89f412b..7988252e 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -119,7 +119,8 @@ h6, font-weight: 700; font-size: 1rem; margin: 0; - padding: 0 + padding: 0; + word-break: break-word; } h1, .header1 { From c069d481af736f43e82598752f30ddc98bcb4b29 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 27 Oct 2020 12:27:14 -0700 Subject: [PATCH 0232/1705] Mobile view cards now have rounded corners --- datasette/static/app.css | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index 7988252e..adbfccab 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -647,6 +647,9 @@ p.zero-results { .rows-and-columns tr { border: 1px solid #ccc; margin-bottom: 1em; + border-radius: 10px; + background-color: white; + padding: 0.2rem; } .rows-and-columns td { @@ -655,7 +658,6 @@ p.zero-results { border-bottom: 1px solid #eee; padding: 0; padding-left: 10%; - background-color: white; } .rows-and-columns td:before { From 18977ce8026e71bc29c51eef2b46dbaa288042d5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 27 Oct 2020 12:28:50 -0700 Subject: [PATCH 0233/1705] Off-white yellow is now off-white blue --- datasette/static/app.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index adbfccab..085d829c 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -73,7 +73,7 @@ body { line-height: 1.5; color: #111A35; text-align: left; - background-color: #fefdf4; + background-color: #F8FAFB; } /* Helper Styles ===========================================================*/ From e7dd3434e1f3f20129798bcea1a629717eec1649 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 27 Oct 2020 12:30:40 -0700 Subject: [PATCH 0234/1705] No underline on nav links in header --- datasette/static/app.css | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index 085d829c..b6dfd7f3 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -272,7 +272,9 @@ footer a:visited, footer a:hover, footer a:focus, footer a:active, -footer button.button-as-link, +footer button.button-as-link { + color: rgba(255,255,244,0.8); +} header a:link, header a:visited, header a:hover, @@ -280,6 +282,7 @@ header a:focus, header a:active, header button.button-as-link { color: rgba(255,255,244,0.8); + text-decoration: none; } footer a:hover, From e5f5034bcdc71e4bc62a6a155ca60eb41910c335 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 27 Oct 2020 12:34:35 -0700 Subject: [PATCH 0235/1705] Fixed broken footer test --- tests/test_html.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_html.py b/tests/test_html.py index 3af9816f..06b11de5 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1058,7 +1058,7 @@ def assert_querystring_equal(expected, actual): def assert_footer_links(soup): - footer_links = soup.find("div", {"class": "ft"}).findAll("a") + footer_links = soup.find("footer").findAll("a") assert 4 == len(footer_links) datasette_link, license_link, source_link, about_link = footer_links assert "Datasette" == datasette_link.text.strip() From c3aba4aa986fdba39705a35de02d446db80a26b8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 27 Oct 2020 13:39:07 -0700 Subject: [PATCH 0236/1705] --cors for /name.db downloads, refs #1057 --- datasette/utils/asgi.py | 17 +++++++++++++---- datasette/views/database.py | 4 ++++ tests/fixtures.py | 2 +- tests/test_api.py | 1 + 4 files changed, 19 insertions(+), 5 deletions(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 911038ab..bd388390 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -247,9 +247,9 @@ async def asgi_start(send, status, headers=None, content_type="text/plain"): async def asgi_send_file( - send, filepath, filename=None, content_type=None, chunk_size=4096 + send, filepath, filename=None, content_type=None, chunk_size=4096, headers=None ): - headers = {} + headers = headers or {} if filename: headers["content-disposition"] = 'attachment; filename="{}"'.format(filename) first = True @@ -395,13 +395,22 @@ class Response: class AsgiFileDownload: def __init__( - self, filepath, filename=None, content_type="application/octet-stream" + self, + filepath, + filename=None, + content_type="application/octet-stream", + headers=None, ): + self.headers = headers or {} self.filepath = filepath self.filename = filename self.content_type = content_type async def asgi_send(self, send): return await asgi_send_file( - send, self.filepath, filename=self.filename, content_type=self.content_type + send, + self.filepath, + filename=self.filename, + content_type=self.content_type, + headers=self.headers, ) diff --git a/datasette/views/database.py b/datasette/views/database.py index 74509278..025e853d 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -144,10 +144,14 @@ class DatabaseDownload(DataView): if not db.path: raise DatasetteError("Cannot download database", status=404) filepath = db.path + headers = {} + if self.ds.cors: + headers["Access-Control-Allow-Origin"] = "*" return AsgiFileDownload( filepath, filename=os.path.basename(filepath), content_type="application/octet-stream", + headers=headers, ) diff --git a/tests/fixtures.py b/tests/fixtures.py index d8c92561..7786ca8c 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -227,7 +227,7 @@ def app_client_with_dot(): @pytest.fixture(scope="session") def app_client_with_cors(): - with make_app_client(cors=True) as client: + with make_app_client(is_immutable=True, cors=True) as client: yield client diff --git a/tests/test_api.py b/tests/test_api.py index 1d454ea1..461d3f81 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1739,6 +1739,7 @@ def test_trace(app_client): @pytest.mark.parametrize( "path,status_code", [ + ("/fixtures.db", 200), ("/fixtures.json", 200), ("/fixtures/no_primary_key.json", 200), # A 400 invalid SQL query should still have the header: From 7d9fedc176717a7e3d22a96575ae0aada5a65440 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 27 Oct 2020 20:15:41 -0700 Subject: [PATCH 0237/1705] Cascading permissions for .db download, closes #1058 --- datasette/views/database.py | 11 ++++++++--- tests/plugins/my_plugin.py | 2 ++ tests/test_permissions.py | 12 ++++++++++-- 3 files changed, 20 insertions(+), 5 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index 025e853d..00fbc0b0 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -131,9 +131,14 @@ class DatabaseDownload(DataView): name = "database_download" async def view_get(self, request, database, hash, correct_hash_present, **kwargs): - await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", database) - await self.check_permission(request, "view-database-download", database) + await self.check_permissions( + request, + [ + ("view-database-download", database), + ("view-database", database), + "view-instance", + ], + ) if database not in self.ds.databases: raise DatasetteError("Invalid database", status=404) db = self.ds.databases[database] diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 0870eb19..0dd0ad26 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -193,6 +193,8 @@ def permission_allowed(actor, action): return True elif action == "this_is_denied": return False + elif action == "view-database-download": + return (actor and actor.get("can_download")) or None @hookimpl diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 3c11985c..a935a495 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -394,7 +394,7 @@ def test_view_instance(path, view_instance_client): @pytest.fixture(scope="session") def cascade_app_client(): - with make_app_client() as client: + with make_app_client(is_immutable=True) as client: yield client @@ -439,6 +439,11 @@ def cascade_app_client(): ("/fixtures", [], 403), ("/fixtures", ["instance"], 403), ("/fixtures", ["database"], 200), + # Downloading the fixtures.db file + ("/fixtures.db", [], 403), + ("/fixtures.db", ["instance"], 403), + ("/fixtures.db", ["database"], 200), + ("/fixtures.db", ["download"], 200), ], ) def test_permissions_cascade(cascade_app_client, path, permissions, expected_status): @@ -447,6 +452,9 @@ def test_permissions_cascade(cascade_app_client, path, permissions, expected_sta deny = {} previous_metadata = cascade_app_client.ds._metadata updated_metadata = copy.deepcopy(previous_metadata) + actor = {"id": "test"} + if "download" in permissions: + actor["can_download"] = 1 try: # Set up the different allow blocks updated_metadata["allow"] = allow if "instance" in permissions else deny @@ -462,7 +470,7 @@ def test_permissions_cascade(cascade_app_client, path, permissions, expected_sta cascade_app_client.ds._metadata = updated_metadata response = cascade_app_client.get( path, - cookies={"ds_actor": cascade_app_client.actor_cookie({"id": "test"})}, + cookies={"ds_actor": cascade_app_client.actor_cookie(actor)}, ) assert expected_status == response.status finally: From 879617265262024edd93722adcdcb6c21e57f5f7 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Wed, 28 Oct 2020 10:08:27 -0700 Subject: [PATCH 0238/1705] Update aiofiles requirement from <0.6,>=0.4 to >=0.4,<0.7 (#1059) Updates the requirements on [aiofiles](https://github.com/Tinche/aiofiles) to permit the latest version. - [Release notes](https://github.com/Tinche/aiofiles/releases) - [Commits](https://github.com/Tinche/aiofiles/compare/v0.4.0...v0.6.0) Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 0de7f92d..c4dea142 100644 --- a/setup.py +++ b/setup.py @@ -54,7 +54,7 @@ setup( "pint~=0.9", "pluggy~=0.13.0", "uvicorn~=0.11", - "aiofiles>=0.4,<0.6", + "aiofiles>=0.4,<0.7", "janus>=0.4,<0.7", "asgi-csrf>=0.6", "PyYAML~=5.3", From abcf0222496d8148b2e585ffa0ff192270a04b06 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 28 Oct 2020 10:11:07 -0700 Subject: [PATCH 0239/1705] Margin bottom on metadata description --- datasette/static/app.css | 3 +++ 1 file changed, 3 insertions(+) diff --git a/datasette/static/app.css b/datasette/static/app.css index b6dfd7f3..8b462b35 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -81,6 +81,9 @@ body { .intro { font-size: 1rem; } +.metadata-description { + margin-bottom: 1em; +} p { margin: 0 0 0.75rem 0; padding: 0; From cefd058c1c216a184bb63c79abba66893977c18e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 28 Oct 2020 20:38:15 -0700 Subject: [PATCH 0240/1705] New explicit versioning mechanism Closes #1054 --- .gitattributes | 1 - datasette/_version.py | 556 ------------ datasette/cli.py | 3 +- datasette/version.py | 6 +- setup.cfg | 8 - setup.py | 6 +- tests/test_api.py | 2 + tests/test_cli.py | 7 + versioneer.py | 1885 ----------------------------------------- 9 files changed, 14 insertions(+), 2460 deletions(-) delete mode 100644 datasette/_version.py delete mode 100644 versioneer.py diff --git a/.gitattributes b/.gitattributes index e5e5865f..744258eb 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,2 +1 @@ -datasette/_version.py export-subst datasette/static/codemirror-* linguist-vendored diff --git a/datasette/_version.py b/datasette/_version.py deleted file mode 100644 index 5783f30f..00000000 --- a/datasette/_version.py +++ /dev/null @@ -1,556 +0,0 @@ -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. Generated by -# versioneer-0.18 (https://github.com/warner/python-versioneer) - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "$Format:%d$" - git_full = "$Format:%H$" - git_date = "$Format:%ci$" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "pep440" - cfg.tag_prefix = "" - cfg.parentdir_prefix = "datasette-" - cfg.versionfile_source = "datasette/_version.py" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY = {} -HANDLERS = {} - - -def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - p = None - for c in commands: - try: - dispcmd = str([c] + args) - # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen( - [c] + args, - cwd=cwd, - env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr else None), - ) - break - except EnvironmentError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, p.returncode - return stdout, p.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for i in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return { - "version": dirname[len(parentdir_prefix) :], - "full-revisionid": None, - "dirty": False, - "error": None, - "date": None, - } - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print( - "Tried directories %s but none started with prefix %s" - % (str(rootdirs), parentdir_prefix) - ) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") - date = keywords.get("date") - if date is not None: - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r"\d", r)]) - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix) :] - if verbose: - print("picking %s" % r) - return { - "version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": None, - "date": date, - } - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return { - "version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": "no suitable tags", - "date": None, - } - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command( - GITS, - [ - "describe", - "--tags", - "--dirty", - "--always", - "--long", - "--match", - "%s*" % tag_prefix, - ], - cwd=root, - ) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[: git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) - if not mo: - # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( - full_tag, - tag_prefix, - ) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix) :] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ - 0 - ].strip() - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post.devDISTANCE - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += ".post.dev%d" % pieces["distance"] - else: - # exception #1 - rendered = "0.post.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return { - "version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None, - } - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return { - "version": rendered, - "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], - "error": None, - "date": pieces.get("date"), - } - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for i in cfg.versionfile_source.split("/"): - root = os.path.dirname(root) - except NameError: - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None, - } - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", - "date": None, - } diff --git a/datasette/cli.py b/datasette/cli.py index ece03636..04d2950b 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -25,6 +25,7 @@ from .utils import ( ValueAsBooleanError, ) from .utils.testing import TestClient +from .version import __version__ class Config(click.ParamType): @@ -65,7 +66,7 @@ class Config(click.ParamType): @click.group(cls=DefaultGroup, default="serve", default_if_no_args=True) -@click.version_option() +@click.version_option(version=__version__) def cli(): """ Datasette! diff --git a/datasette/version.py b/datasette/version.py index e1fed2c4..b57d7a12 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,6 +1,2 @@ -from ._version import get_versions - -__version__ = get_versions()["version"] -del get_versions - +__version__ = "0.51.a0" __version_info__ = tuple(__version__.split(".")) diff --git a/setup.cfg b/setup.cfg index 1617b3eb..ebf43062 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,13 +1,5 @@ [aliases] test=pytest -[versioneer] -VCS = git -style = pep440 -versionfile_source = datasette/_version.py -versionfile_build = datasette/_version.py -tag_prefix = -parentdir_prefix = datasette- - [flake8] max-line-length = 160 diff --git a/setup.py b/setup.py index c4dea142..82696b38 100644 --- a/setup.py +++ b/setup.py @@ -1,9 +1,8 @@ +from re import VERBOSE from setuptools import setup, find_packages import os import sys -import versioneer - def get_long_description(): with open( @@ -24,8 +23,7 @@ def get_version(): setup( name="datasette", - version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), + version=get_version(), description="An open source multi-tool for exploring and publishing data", long_description=get_long_description(), long_description_content_type="text/markdown", diff --git a/tests/test_api.py b/tests/test_api.py index 461d3f81..53f33a9c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,5 +1,6 @@ from datasette.plugins import DEFAULT_PLUGINS from datasette.utils import detect_json1 +from datasette.version import __version__ from .fixtures import ( # noqa app_client, app_client_no_files, @@ -1290,6 +1291,7 @@ def test_versions_json(app_client): assert "full" in response.json["python"] assert "datasette" in response.json assert "version" in response.json["datasette"] + assert response.json["datasette"]["version"] == __version__ assert "sqlite" in response.json assert "version" in response.json["sqlite"] assert "fts_versions" in response.json["sqlite"] diff --git a/tests/test_cli.py b/tests/test_cli.py index 1aff8cd1..b27cd5a8 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -6,6 +6,7 @@ from .fixtures import ( ) from datasette.plugins import DEFAULT_PLUGINS from datasette.cli import cli, serve +from datasette.version import __version__ from click.testing import CliRunner import io import json @@ -156,3 +157,9 @@ def test_uninstall(run_module): runner.invoke(cli, ["uninstall", "datasette-mock-plugin", "-y"]) run_module.assert_called_once_with("pip", run_name="__main__") assert sys.argv == ["pip", "uninstall", "datasette-mock-plugin", "-y"] + + +def test_version(): + runner = CliRunner() + result = runner.invoke(cli, ["--version"]) + assert result.output == "cli, version {}\n".format(__version__) diff --git a/versioneer.py b/versioneer.py deleted file mode 100644 index 858fc0bd..00000000 --- a/versioneer.py +++ /dev/null @@ -1,1885 +0,0 @@ -# Version: 0.18 - -"""The Versioneer - like a rocketeer, but for versions. - -The Versioneer -============== - -* like a rocketeer, but for versions! -* https://github.com/warner/python-versioneer -* Brian Warner -* License: Public Domain -* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy -* [![Latest Version] -(https://pypip.in/version/versioneer/badge.svg?style=flat) -](https://pypi.python.org/pypi/versioneer/) -* [![Build Status] -(https://travis-ci.org/warner/python-versioneer.png?branch=master) -](https://travis-ci.org/warner/python-versioneer) - -This is a tool for managing a recorded version number in distutils-based -python projects. The goal is to remove the tedious and error-prone "update -the embedded version string" step from your release process. Making a new -release should be as easy as recording a new tag in your version-control -system, and maybe making new tarballs. - - -## Quick Install - -* `pip install versioneer` to somewhere to your $PATH -* add a `[versioneer]` section to your setup.cfg (see below) -* run `versioneer install` in your source tree, commit the results - -## Version Identifiers - -Source trees come from a variety of places: - -* a version-control system checkout (mostly used by developers) -* a nightly tarball, produced by build automation -* a snapshot tarball, produced by a web-based VCS browser, like github's - "tarball from tag" feature -* a release tarball, produced by "setup.py sdist", distributed through PyPI - -Within each source tree, the version identifier (either a string or a number, -this tool is format-agnostic) can come from a variety of places: - -* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows - about recent "tags" and an absolute revision-id -* the name of the directory into which the tarball was unpacked -* an expanded VCS keyword ($Id$, etc) -* a `_version.py` created by some earlier build step - -For released software, the version identifier is closely related to a VCS -tag. Some projects use tag names that include more than just the version -string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool -needs to strip the tag prefix to extract the version identifier. For -unreleased software (between tags), the version identifier should provide -enough information to help developers recreate the same tree, while also -giving them an idea of roughly how old the tree is (after version 1.2, before -version 1.3). Many VCS systems can report a description that captures this, -for example `git describe --tags --dirty --always` reports things like -"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the -0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes. - -The version identifier is used for multiple purposes: - -* to allow the module to self-identify its version: `myproject.__version__` -* to choose a name and prefix for a 'setup.py sdist' tarball - -## Theory of Operation - -Versioneer works by adding a special `_version.py` file into your source -tree, where your `__init__.py` can import it. This `_version.py` knows how to -dynamically ask the VCS tool for version information at import time. - -`_version.py` also contains `$Revision$` markers, and the installation -process marks `_version.py` to have this marker rewritten with a tag name -during the `git archive` command. As a result, generated tarballs will -contain enough information to get the proper version. - -To allow `setup.py` to compute a version too, a `versioneer.py` is added to -the top level of your source tree, next to `setup.py` and the `setup.cfg` -that configures it. This overrides several distutils/setuptools commands to -compute the version when invoked, and changes `setup.py build` and `setup.py -sdist` to replace `_version.py` with a small static file that contains just -the generated version data. - -## Installation - -See [INSTALL.md](./INSTALL.md) for detailed installation instructions. - -## Version-String Flavors - -Code which uses Versioneer can learn about its version string at runtime by -importing `_version` from your main `__init__.py` file and running the -`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can -import the top-level `versioneer.py` and run `get_versions()`. - -Both functions return a dictionary with different flavors of version -information: - -* `['version']`: A condensed version string, rendered using the selected - style. This is the most commonly used value for the project's version - string. The default "pep440" style yields strings like `0.11`, - `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section - below for alternative styles. - -* `['full-revisionid']`: detailed revision identifier. For Git, this is the - full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". - -* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the - commit date in ISO 8601 format. This will be None if the date is not - available. - -* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that - this is only accurate if run in a VCS checkout, otherwise it is likely to - be False or None - -* `['error']`: if the version string could not be computed, this will be set - to a string describing the problem, otherwise it will be None. It may be - useful to throw an exception in setup.py if this is set, to avoid e.g. - creating tarballs with a version string of "unknown". - -Some variants are more useful than others. Including `full-revisionid` in a -bug report should allow developers to reconstruct the exact code being tested -(or indicate the presence of local changes that should be shared with the -developers). `version` is suitable for display in an "about" box or a CLI -`--version` output: it can be easily compared against release notes and lists -of bugs fixed in various releases. - -The installer adds the following text to your `__init__.py` to place a basic -version in `YOURPROJECT.__version__`: - - from ._version import get_versions - __version__ = get_versions()['version'] - del get_versions - -## Styles - -The setup.cfg `style=` configuration controls how the VCS information is -rendered into a version string. - -The default style, "pep440", produces a PEP440-compliant string, equal to the -un-prefixed tag name for actual releases, and containing an additional "local -version" section with more detail for in-between builds. For Git, this is -TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags ---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the -tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and -that this commit is two revisions ("+2") beyond the "0.11" tag. For released -software (exactly equal to a known tag), the identifier will only contain the -stripped tag, e.g. "0.11". - -Other styles are available. See [details.md](details.md) in the Versioneer -source tree for descriptions. - -## Debugging - -Versioneer tries to avoid fatal errors: if something goes wrong, it will tend -to return a version of "0+unknown". To investigate the problem, run `setup.py -version`, which will run the version-lookup code in a verbose mode, and will -display the full contents of `get_versions()` (including the `error` string, -which may help identify what went wrong). - -## Known Limitations - -Some situations are known to cause problems for Versioneer. This details the -most significant ones. More can be found on Github -[issues page](https://github.com/warner/python-versioneer/issues). - -### Subprojects - -Versioneer has limited support for source trees in which `setup.py` is not in -the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are -two common reasons why `setup.py` might not be in the root: - -* Source trees which contain multiple subprojects, such as - [Buildbot](https://github.com/buildbot/buildbot), which contains both - "master" and "slave" subprojects, each with their own `setup.py`, - `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI - distributions (and upload multiple independently-installable tarballs). -* Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other languages) in subdirectories. - -Versioneer will look for `.git` in parent directories, and most operations -should get the right version string. However `pip` and `setuptools` have bugs -and implementation details which frequently cause `pip install .` from a -subproject directory to fail to find a correct version string (so it usually -defaults to `0+unknown`). - -`pip install --editable .` should work correctly. `setup.py install` might -work too. - -Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in -some later version. - -[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking -this issue. The discussion in -[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the -issue from the Versioneer side in more detail. -[pip PR#3176](https://github.com/pypa/pip/pull/3176) and -[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve -pip to let Versioneer work correctly. - -Versioneer-0.16 and earlier only looked for a `.git` directory next to the -`setup.cfg`, so subprojects were completely unsupported with those releases. - -### Editable installs with setuptools <= 18.5 - -`setup.py develop` and `pip install --editable .` allow you to install a -project into a virtualenv once, then continue editing the source code (and -test) without re-installing after every change. - -"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a -convenient way to specify executable scripts that should be installed along -with the python package. - -These both work as expected when using modern setuptools. When using -setuptools-18.5 or earlier, however, certain operations will cause -`pkg_resources.DistributionNotFound` errors when running the entrypoint -script, which must be resolved by re-installing the package. This happens -when the install happens with one version, then the egg_info data is -regenerated while a different version is checked out. Many setup.py commands -cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into -a different virtualenv), so this can be surprising. - -[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes -this one, but upgrading to a newer version of setuptools should probably -resolve it. - -### Unicode version strings - -While Versioneer works (and is continually tested) with both Python 2 and -Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. -Newer releases probably generate unicode version strings on py2. It's not -clear that this is wrong, but it may be surprising for applications when then -write these strings to a network connection or include them in bytes-oriented -APIs like cryptographic checksums. - -[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates -this question. - - -## Updating Versioneer - -To upgrade your project to a new release of Versioneer, do the following: - -* install the new Versioneer (`pip install -U versioneer` or equivalent) -* edit `setup.cfg`, if necessary, to include any new configuration settings - indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. -* re-run `versioneer install` in your source tree, to replace - `SRC/_version.py` -* commit any changed files - -## Future Directions - -This tool is designed to make it easily extended to other version-control -systems: all VCS-specific components are in separate directories like -src/git/ . The top-level `versioneer.py` script is assembled from these -components by running make-versioneer.py . In the future, make-versioneer.py -will take a VCS name as an argument, and will construct a version of -`versioneer.py` that is specific to the given VCS. It might also take the -configuration arguments that are currently provided manually during -installation by editing setup.py . Alternatively, it might go the other -direction and include code from all supported VCS systems, reducing the -number of intermediate scripts. - - -## License - -To make Versioneer easier to embed, all its code is dedicated to the public -domain. The `_version.py` that it creates is also in the public domain. -Specifically, both are released under the Creative Commons "Public Domain -Dedication" license (CC0-1.0), as described in -https://creativecommons.org/publicdomain/zero/1.0/ . - -""" - -from __future__ import print_function - -try: - import configparser -except ImportError: - import ConfigParser as configparser -import errno -import json -import os -import re -import subprocess -import sys - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_root(): - """Get the project root directory. - - We require that all commands are run from the project root, i.e. the - directory that contains setup.py, setup.cfg, and versioneer.py . - """ - root = os.path.realpath(os.path.abspath(os.getcwd())) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - # allow 'python path/to/setup.py COMMAND' - root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ( - "Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND')." - ) - raise VersioneerBadRootError(err) - try: - # Certain runtime workflows (setup.py install/develop in a setuptools - # tree) execute all dependencies in a single python process, so - # "versioneer" may be imported multiple times, and python's shared - # module-import table will cache the first one. So we can't use - # os.path.dirname(__file__), as that will find whichever - # versioneer.py was first imported, even in later projects. - me = os.path.realpath(os.path.abspath(__file__)) - me_dir = os.path.normcase(os.path.splitext(me)[0]) - vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) - if me_dir != vsr_dir: - print( - "Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(me), versioneer_py) - ) - except NameError: - pass - return root - - -def get_config_from_root(root): - """Read the project setup.cfg file to determine Versioneer config.""" - # This might raise EnvironmentError (if setup.cfg is missing), or - # configparser.NoSectionError (if it lacks a [versioneer] section), or - # configparser.NoOptionError (if it lacks "VCS="). See the docstring at - # the top of versioneer.py for instructions on writing your setup.cfg . - setup_cfg = os.path.join(root, "setup.cfg") - parser = configparser.SafeConfigParser() - with open(setup_cfg, "r") as f: - parser.readfp(f) - VCS = parser.get("versioneer", "VCS") # mandatory - - def get(parser, name): - if parser.has_option("versioneer", name): - return parser.get("versioneer", name) - return None - - cfg = VersioneerConfig() - cfg.VCS = VCS - cfg.style = get(parser, "style") or "" - cfg.versionfile_source = get(parser, "versionfile_source") - cfg.versionfile_build = get(parser, "versionfile_build") - cfg.tag_prefix = get(parser, "tag_prefix") - if cfg.tag_prefix in ("''", '""'): - cfg.tag_prefix = "" - cfg.parentdir_prefix = get(parser, "parentdir_prefix") - cfg.verbose = get(parser, "verbose") - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -# these dictionaries contain VCS-specific tools -LONG_VERSION_PY = {} -HANDLERS = {} - - -def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - p = None - for c in commands: - try: - dispcmd = str([c] + args) - # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen( - [c] + args, - cwd=cwd, - env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr else None), - ) - break - except EnvironmentError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, p.returncode - return stdout, p.returncode - - -LONG_VERSION_PY[ - "git" -] = ''' -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. Generated by -# versioneer-0.18 (https://github.com/warner/python-versioneer) - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" - git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" - git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "%(STYLE)s" - cfg.tag_prefix = "%(TAG_PREFIX)s" - cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" - cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY = {} -HANDLERS = {} - - -def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - p = None - for c in commands: - try: - dispcmd = str([c] + args) - # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen([c] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None)) - break - except EnvironmentError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %%s" %% dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %%s" %% (commands,)) - return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: - if verbose: - print("unable to run %%s (error)" %% dispcmd) - print("stdout was %%s" %% stdout) - return None, p.returncode - return stdout, p.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for i in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %%s but none started with prefix %%s" %% - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") - date = keywords.get("date") - if date is not None: - # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %%d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r'\d', r)]) - if verbose: - print("discarding '%%s', no digits" %% ",".join(refs - tags)) - if verbose: - print("likely tags: %%s" %% ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - if verbose: - print("picking %%s" %% r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %%s not under git control" %% root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", - "--match", "%%s*" %% tag_prefix], - cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%%s'" - %% describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%%s' doesn't start with prefix '%%s'" - print(fmt %% (full_tag, tag_prefix)) - pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" - %% (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], - cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], - cwd=root)[0].strip() - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post.devDISTANCE - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += ".post.dev%%d" %% pieces["distance"] - else: - # exception #1 - rendered = "0.post.dev%%d" %% pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%%s'" %% style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for i in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} -''' - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") - date = keywords.get("date") - if date is not None: - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r"\d", r)]) - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix) :] - if verbose: - print("picking %s" % r) - return { - "version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": None, - "date": date, - } - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return { - "version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": "no suitable tags", - "date": None, - } - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command( - GITS, - [ - "describe", - "--tags", - "--dirty", - "--always", - "--long", - "--match", - "%s*" % tag_prefix, - ], - cwd=root, - ) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[: git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) - if not mo: - # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( - full_tag, - tag_prefix, - ) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix) :] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ - 0 - ].strip() - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def do_vcs_install(manifest_in, versionfile_source, ipy): - """Git-specific installation logic for Versioneer. - - For Git, this means creating/changing .gitattributes to mark _version.py - for export-subst keyword substitution. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - files = [manifest_in, versionfile_source] - if ipy: - files.append(ipy) - try: - me = __file__ - if me.endswith(".pyc") or me.endswith(".pyo"): - me = os.path.splitext(me)[0] + ".py" - versioneer_file = os.path.relpath(me) - except NameError: - versioneer_file = "versioneer.py" - files.append(versioneer_file) - present = False - try: - f = open(".gitattributes", "r") - for line in f.readlines(): - if line.strip().startswith(versionfile_source): - if "export-subst" in line.strip().split()[1:]: - present = True - f.close() - except EnvironmentError: - pass - if not present: - f = open(".gitattributes", "a+") - f.write("%s export-subst\n" % versionfile_source) - f.close() - files.append(".gitattributes") - run_command(GITS, ["add", "--"] + files) - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for i in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return { - "version": dirname[len(parentdir_prefix) :], - "full-revisionid": None, - "dirty": False, - "error": None, - "date": None, - } - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print( - "Tried directories %s but none started with prefix %s" - % (str(rootdirs), parentdir_prefix) - ) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.18) from -# revision-control system data, or from the parent directory name of an -# unpacked source archive. Distribution tarballs contain a pre-generated copy -# of this file. - -import json - -version_json = ''' -%s -''' # END VERSION_JSON - - -def get_versions(): - return json.loads(version_json) -""" - - -def versions_from_file(filename): - """Try to determine the version from _version.py if present.""" - try: - with open(filename) as f: - contents = f.read() - except EnvironmentError: - raise NotThisMethod("unable to read _version.py") - mo = re.search( - r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S - ) - if not mo: - mo = re.search( - r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S - ) - if not mo: - raise NotThisMethod("no version_json in _version.py") - return json.loads(mo.group(1)) - - -def write_to_version_file(filename, versions): - """Write the given version number to the given _version.py file.""" - os.unlink(filename) - contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) - with open(filename, "w") as f: - f.write(SHORT_VERSION_PY % contents) - - print("set %s to '%s'" % (filename, versions["version"])) - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post.devDISTANCE - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += ".post.dev%d" % pieces["distance"] - else: - # exception #1 - rendered = "0.post.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return { - "version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None, - } - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return { - "version": rendered, - "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], - "error": None, - "date": pieces.get("date"), - } - - -class VersioneerBadRootError(Exception): - """The project root directory is unknown or missing key files.""" - - -def get_versions(verbose=False): - """Get the project version from whatever source is available. - - Returns dict with two keys: 'version' and 'full'. - """ - if "versioneer" in sys.modules: - # see the discussion in cmdclass.py:get_cmdclass() - del sys.modules["versioneer"] - - root = get_root() - cfg = get_config_from_root(root) - - assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" - handlers = HANDLERS.get(cfg.VCS) - assert handlers, "unrecognized VCS '%s'" % cfg.VCS - verbose = verbose or cfg.verbose - assert ( - cfg.versionfile_source is not None - ), "please set versioneer.versionfile_source" - assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" - - versionfile_abs = os.path.join(root, cfg.versionfile_source) - - # extract version from first of: _version.py, VCS command (e.g. 'git - # describe'), parentdir. This is meant to work for developers using a - # source checkout, for users of a tarball created by 'setup.py sdist', - # and for users of a tarball/zipball created by 'git archive' or github's - # download-from-tag feature or the equivalent in other VCSes. - - get_keywords_f = handlers.get("get_keywords") - from_keywords_f = handlers.get("keywords") - if get_keywords_f and from_keywords_f: - try: - keywords = get_keywords_f(versionfile_abs) - ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) - if verbose: - print("got version from expanded keyword %s" % ver) - return ver - except NotThisMethod: - pass - - try: - ver = versions_from_file(versionfile_abs) - if verbose: - print("got version from file %s %s" % (versionfile_abs, ver)) - return ver - except NotThisMethod: - pass - - from_vcs_f = handlers.get("pieces_from_vcs") - if from_vcs_f: - try: - pieces = from_vcs_f(cfg.tag_prefix, root, verbose) - ver = render(pieces, cfg.style) - if verbose: - print("got version from VCS %s" % ver) - return ver - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - if verbose: - print("got version from parentdir %s" % ver) - return ver - except NotThisMethod: - pass - - if verbose: - print("unable to compute version") - - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", - "date": None, - } - - -def get_version(): - """Get the short version string for this project.""" - return get_versions()["version"] - - -def get_cmdclass(): - """Get the custom setuptools/distutils subclasses used by Versioneer.""" - if "versioneer" in sys.modules: - del sys.modules["versioneer"] - # this fixes the "python setup.py develop" case (also 'install' and - # 'easy_install .'), in which subdependencies of the main project are - # built (using setup.py bdist_egg) in the same python process. Assume - # a main project A and a dependency B, which use different versions - # of Versioneer. A's setup.py imports A's Versioneer, leaving it in - # sys.modules by the time B's setup.py is executed, causing B to run - # with the wrong versioneer. Setuptools wraps the sub-dep builds in a - # sandbox that restores sys.modules to it's pre-build state, so the - # parent is protected against the child's "import versioneer". By - # removing ourselves from sys.modules here, before the child build - # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/warner/python-versioneer/issues/52 - - cmds = {} - - # we add "version" to both distutils and setuptools - from distutils.core import Command - - class cmd_version(Command): - description = "report generated version string" - user_options = [] - boolean_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - vers = get_versions(verbose=True) - print("Version: %s" % vers["version"]) - print(" full-revisionid: %s" % vers.get("full-revisionid")) - print(" dirty: %s" % vers.get("dirty")) - print(" date: %s" % vers.get("date")) - if vers["error"]: - print(" error: %s" % vers["error"]) - - cmds["version"] = cmd_version - - # we override "build_py" in both distutils and setuptools - # - # most invocation pathways end up running build_py: - # distutils/build -> build_py - # distutils/install -> distutils/build ->.. - # setuptools/bdist_wheel -> distutils/install ->.. - # setuptools/bdist_egg -> distutils/install_lib -> build_py - # setuptools/install -> bdist_egg ->.. - # setuptools/develop -> ? - # pip install: - # copies source tree to a tempdir before running egg_info/etc - # if .git isn't copied too, 'git describe' will fail - # then does setup.py bdist_wheel, or sometimes setup.py install - # setup.py egg_info -> ? - - # we override different "build_py" commands for both environments - if "setuptools" in sys.modules: - from setuptools.command.build_py import build_py as _build_py - else: - from distutils.command.build_py import build_py as _build_py - - class cmd_build_py(_build_py): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_py.run(self) - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if cfg.versionfile_build: - target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - cmds["build_py"] = cmd_build_py - - if "cx_Freeze" in sys.modules: # cx_freeze enabled? - from cx_Freeze.dist import build_exe as _build_exe - - # nczeczulin reports that py2exe won't like the pep440-style string - # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. - # setup(console=[{ - # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION - # "product_version": versioneer.get_version(), - # ... - - class cmd_build_exe(_build_exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _build_exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write( - LONG - % { - "DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - } - ) - - cmds["build_exe"] = cmd_build_exe - del cmds["build_py"] - - if "py2exe" in sys.modules: # py2exe enabled? - try: - from py2exe.distutils_buildexe import py2exe as _py2exe # py3 - except ImportError: - from py2exe.build_exe import py2exe as _py2exe # py2 - - class cmd_py2exe(_py2exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _py2exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write( - LONG - % { - "DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - } - ) - - cmds["py2exe"] = cmd_py2exe - - # we override different "sdist" commands for both environments - if "setuptools" in sys.modules: - from setuptools.command.sdist import sdist as _sdist - else: - from distutils.command.sdist import sdist as _sdist - - class cmd_sdist(_sdist): - def run(self): - versions = get_versions() - self._versioneer_generated_versions = versions - # unless we update this, the command will keep using the old - # version - self.distribution.metadata.version = versions["version"] - return _sdist.run(self) - - def make_release_tree(self, base_dir, files): - root = get_root() - cfg = get_config_from_root(root) - _sdist.make_release_tree(self, base_dir, files) - # now locate _version.py in the new base_dir directory - # (remembering that it may be a hardlink) and replace it with an - # updated value - target_versionfile = os.path.join(base_dir, cfg.versionfile_source) - print("UPDATING %s" % target_versionfile) - write_to_version_file( - target_versionfile, self._versioneer_generated_versions - ) - - cmds["sdist"] = cmd_sdist - - return cmds - - -CONFIG_ERROR = """ -setup.cfg is missing the necessary Versioneer configuration. You need -a section like: - - [versioneer] - VCS = git - style = pep440 - versionfile_source = src/myproject/_version.py - versionfile_build = myproject/_version.py - tag_prefix = - parentdir_prefix = myproject- - -You will also need to edit your setup.py to use the results: - - import versioneer - setup(version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), ...) - -Please read the docstring in ./versioneer.py for configuration instructions, -edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. -""" - -SAMPLE_CONFIG = """ -# See the docstring in versioneer.py for instructions. Note that you must -# re-run 'versioneer.py setup' after changing this section, and commit the -# resulting files. - -[versioneer] -#VCS = git -#style = pep440 -#versionfile_source = -#versionfile_build = -#tag_prefix = -#parentdir_prefix = - -""" - -INIT_PY_SNIPPET = """ -from ._version import get_versions -__version__ = get_versions()['version'] -del get_versions -""" - - -def do_setup(): - """Main VCS-independent setup function for installing Versioneer.""" - root = get_root() - try: - cfg = get_config_from_root(root) - except ( - EnvironmentError, - configparser.NoSectionError, - configparser.NoOptionError, - ) as e: - if isinstance(e, (EnvironmentError, configparser.NoSectionError)): - print("Adding sample versioneer config to setup.cfg", file=sys.stderr) - with open(os.path.join(root, "setup.cfg"), "a") as f: - f.write(SAMPLE_CONFIG) - print(CONFIG_ERROR, file=sys.stderr) - return 1 - - print(" creating %s" % cfg.versionfile_source) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write( - LONG - % { - "DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - } - ) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") - if os.path.exists(ipy): - try: - with open(ipy, "r") as f: - old = f.read() - except EnvironmentError: - old = "" - if INIT_PY_SNIPPET not in old: - print(" appending to %s" % ipy) - with open(ipy, "a") as f: - f.write(INIT_PY_SNIPPET) - else: - print(" %s unmodified" % ipy) - else: - print(" %s doesn't exist, ok" % ipy) - ipy = None - - # Make sure both the top-level "versioneer.py" and versionfile_source - # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so - # they'll be copied into source distributions. Pip won't be able to - # install the package without this. - manifest_in = os.path.join(root, "MANIFEST.in") - simple_includes = set() - try: - with open(manifest_in, "r") as f: - for line in f: - if line.startswith("include "): - for include in line.split()[1:]: - simple_includes.add(include) - except EnvironmentError: - pass - # That doesn't cover everything MANIFEST.in can do - # (http://docs.python.org/2/distutils/sourcedist.html#commands), so - # it might give some false negatives. Appending redundant 'include' - # lines is safe, though. - if "versioneer.py" not in simple_includes: - print(" appending 'versioneer.py' to MANIFEST.in") - with open(manifest_in, "a") as f: - f.write("include versioneer.py\n") - else: - print(" 'versioneer.py' already in MANIFEST.in") - if cfg.versionfile_source not in simple_includes: - print( - " appending versionfile_source ('%s') to MANIFEST.in" - % cfg.versionfile_source - ) - with open(manifest_in, "a") as f: - f.write("include %s\n" % cfg.versionfile_source) - else: - print(" versionfile_source already in MANIFEST.in") - - # Make VCS-specific changes. For git, this means creating/changing - # .gitattributes to mark _version.py for export-subst keyword - # substitution. - do_vcs_install(manifest_in, cfg.versionfile_source, ipy) - return 0 - - -def scan_setup_py(): - """Validate the contents of setup.py against Versioneer's expectations.""" - found = set() - setters = False - errors = 0 - with open("setup.py", "r") as f: - for line in f.readlines(): - if "import versioneer" in line: - found.add("import") - if "versioneer.get_cmdclass()" in line: - found.add("cmdclass") - if "versioneer.get_version()" in line: - found.add("get_version") - if "versioneer.VCS" in line: - setters = True - if "versioneer.versionfile_source" in line: - setters = True - if len(found) != 3: - print("") - print("Your setup.py appears to be missing some important items") - print("(but I might be wrong). Please make sure it has something") - print("roughly like the following:") - print("") - print(" import versioneer") - print(" setup( version=versioneer.get_version(),") - print(" cmdclass=versioneer.get_cmdclass(), ...)") - print("") - errors += 1 - if setters: - print("You should remove lines like 'versioneer.VCS = ' and") - print("'versioneer.versionfile_source = ' . This configuration") - print("now lives in setup.cfg, and should be removed from setup.py") - print("") - errors += 1 - return errors - - -if __name__ == "__main__": - cmd = sys.argv[1] - if cmd == "setup": - errors = do_setup() - errors += scan_setup_py() - if errors: - sys.exit(1) From 89519f9a3765bce7544d83d872db987c75757d9a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 28 Oct 2020 21:05:40 -0700 Subject: [PATCH 0241/1705] Fixed bug with download of BLOB null, refs #1050 --- datasette/views/table.py | 2 +- tests/fixtures.py | 1 + tests/test_api.py | 6 ++++-- tests/test_html.py | 29 ++++++++++++++++++++++------- 4 files changed, 28 insertions(+), 10 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 717341ae..d190b6af 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -1083,7 +1083,7 @@ class BlobView(BaseView): "Content-Disposition": 'attachment; filename="{}"'.format(filename), } return Response( - body=rows[0][column], + body=rows[0][column] or b"", status=200, headers=headers, content_type="application/binary", diff --git a/tests/fixtures.py b/tests/fixtures.py index 7786ca8c..31638fc8 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -667,6 +667,7 @@ CREATE VIEW searchable_view_configured_by_metadata AS TABLE_PARAMETERIZED_SQL = [ ("insert into binary_data (data) values (?);", [b"\x15\x1c\x02\xc7\xad\x05\xfe"]), ("insert into binary_data (data) values (?);", [b"\x15\x1c\x03\xc7\xad\x05\xfe"]), + ("insert into binary_data (data) values (null);", []), ] EXTRA_DATABASE_SQL = """ diff --git a/tests/test_api.py b/tests/test_api.py index 53f33a9c..5e9c1a0a 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -106,7 +106,7 @@ def test_database_page(app_client): "name": "binary_data", "columns": ["data"], "primary_keys": [], - "count": 2, + "count": 3, "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, @@ -1812,6 +1812,7 @@ def test_inspect_file_used_for_count(app_client_immutable_and_inspect_file): [ {"rowid": 1, "data": {"$base64": True, "encoded": "FRwCx60F/g=="}}, {"rowid": 2, "data": {"$base64": True, "encoded": "FRwDx60F/g=="}}, + {"rowid": 3, "data": None}, ], None, ), @@ -1820,7 +1821,8 @@ def test_inspect_file_used_for_count(app_client_immutable_and_inspect_file): None, ( '{"rowid": 1, "data": {"$base64": true, "encoded": "FRwCx60F/g=="}}\n' - '{"rowid": 2, "data": {"$base64": true, "encoded": "FRwDx60F/g=="}}' + '{"rowid": 2, "data": {"$base64": true, "encoded": "FRwDx60F/g=="}}\n' + '{"rowid": 3, "data": null}' ), ), ], diff --git a/tests/test_html.py b/tests/test_html.py index 06b11de5..3c1101d2 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1238,21 +1238,36 @@ def test_binary_data_display(app_client): '
', '', ], + [ + '', + '', + '', + ], ] assert expected_tds == [ [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") ] -def test_blob_download(app_client): - response = app_client.get("/fixtures/binary_data/-/blob/1/data.blob") +@pytest.mark.parametrize( + "path,expected_body,expected_filename", + [ + ( + "/fixtures/binary_data/-/blob/1/data.blob", + b"\x15\x1c\x02\xc7\xad\x05\xfe", + "binary_data-1-data.blob", + ), + ("/fixtures/binary_data/-/blob/3/data.blob", b"", "binary_data-3-data.blob"), + ], +) +def test_blob_download(app_client, path, expected_body, expected_filename): + response = app_client.get(path) assert response.status == 200 - assert response.body == b"\x15\x1c\x02\xc7\xad\x05\xfe" + assert response.body == expected_body assert response.headers["x-content-type-options"] == "nosniff" - assert ( - response.headers["content-disposition"] - == 'attachment; filename="binary_data-1-data.blob"' - ) + assert response.headers[ + "content-disposition" + ] == 'attachment; filename="{}"'.format(expected_filename) assert response.headers["content-type"] == "application/binary" From d6f9ff71378c4eab34dad181c23cfc143a4aef2d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Oct 2020 12:35:25 -0700 Subject: [PATCH 0242/1705] Docs on Designing URLs for your plugin - closes #1053 --- docs/introspection.rst | 2 ++ docs/plugin_hooks.rst | 2 ++ docs/writing_plugins.rst | 30 ++++++++++++++++++++++++++++++ 3 files changed, 34 insertions(+) diff --git a/docs/introspection.rst b/docs/introspection.rst index 08006529..698ba95f 100644 --- a/docs/introspection.rst +++ b/docs/introspection.rst @@ -1,3 +1,5 @@ +.. _introspection: + Introspection ============= diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 5cfae22a..b2c62ccd 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -556,6 +556,8 @@ The view function can be a regular function or an ``async def`` function, depend The function can either return a :ref:`internals_response` or it can return nothing and instead respond directly to the request using the ASGI ``send`` function (for advanced uses only). +See :ref:`writing_plugins_designing_urls` for tips on designing the URL routes used by your plugin. + Examples: `datasette-auth-github `__, `datasette-psutil `__ .. _plugin_register_facet_classes: diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index f763f617..29fcca13 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -202,6 +202,36 @@ The plugin configuration could also be set at the top level of ``metadata.json`` Now that ``datasette-cluster-map`` plugin configuration will apply to every table in every database. +.. _writing_plugins_designing_urls: + +Designing URLs for your plugin +------------------------------ + +You can register new URL routes within Datasette using the :ref:`plugin_register_routes` plugin hook. + +Datasette's default URLs include these: + +- ``/dbname`` - database page +- ``/dbname/tablename`` - table page +- ``/dbname/tablename/pk`` - row page + +See :ref:`pages` and :ref:`introspection` for more default URL routes. + +To avoid accidentally conflicting with a database file that may be loaded into Datasette, plugins should register URLs using a ``/-/`` prefix. For example, if your plugin adds a new interface for uploading Excel files you might register a URL route like this one: + +- ``/-/upload-excel`` + +Try to avoid registering URLs that clash with other plugins that your users might have installed. There is no central repository of reserved URL paths (yet) but you can review existing plugins by browsing the `datasette-plugin topic `__ on GitHub. + +If your plugin includes functionality that relates to a specific database you could also register a URL route like this: + +- ``/dbname/-/upload-excel`` + +Reserving routes under ``/dbname/tablename/-/...`` is not a good idea because a table could conceivably include a row with a primary key value of ``-``. Instead, you could use a pattern like this: + +- ``/dbname/-/upload-excel/tablename`` + + .. _writing_plugins_building_urls: Building URLs within plugins From 78b3eeaad9189eb737014f53212082684f4bb0d4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Oct 2020 15:01:38 -0700 Subject: [PATCH 0243/1705] .blob output renderer * _blob_hash= checking plus refactored to use new BadRequest class, refs #1050 * Replace BlobView with new .blob renderer, closes #1050 * .blob downloads on arbitrary queries, closes #1051 --- datasette/app.py | 19 +++++------ datasette/blob_renderer.py | 61 ++++++++++++++++++++++++++++++++++ datasette/plugins.py | 1 + datasette/templates/query.html | 2 +- datasette/utils/asgi.py | 17 +++++++--- datasette/views/base.py | 5 +-- datasette/views/database.py | 20 +++++++++++ datasette/views/table.py | 61 ++++------------------------------ docs/pages.rst | 11 ------ tests/test_csv.py | 18 ++++++++++ tests/test_html.py | 60 ++++++++++++++++++--------------- tests/test_permissions.py | 11 ------ 12 files changed, 165 insertions(+), 121 deletions(-) create mode 100644 datasette/blob_renderer.py diff --git a/datasette/app.py b/datasette/app.py index 5b50294f..3016043a 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -38,7 +38,7 @@ from .views.special import ( PermissionsDebugView, MessagesDebugView, ) -from .views.table import RowView, TableView, BlobView +from .views.table import RowView, TableView from .renderer import json_renderer from .database import Database, QueryInterrupted @@ -60,6 +60,7 @@ from .utils import ( ) from .utils.asgi import ( AsgiLifespan, + Base400, Forbidden, NotFound, Request, @@ -923,10 +924,6 @@ class Datasette: + renderer_regex + r")?$", ) - add_route( - BlobView.as_view(self), - r"/(?P[^/]+)/(?P
2<Binary:\xa07\xa0bytes>3\xa0
[^/]+?)/\-/blob/(?P[^/]+?)/(?P[^/]+)\.blob$", - ) self._register_custom_units() async def setup_db(): @@ -1113,11 +1110,7 @@ class DatasetteRouter: pdb.post_mortem(exception.__traceback__) title = None - if isinstance(exception, NotFound): - status = 404 - info = {} - message = exception.args[0] - elif isinstance(exception, Forbidden): + if isinstance(exception, Forbidden): status = 403 info = {} message = exception.args[0] @@ -1129,6 +1122,10 @@ class DatasetteRouter: if custom_response is not None: await custom_response.asgi_send(send) return + elif isinstance(exception, Base400): + status = exception.status + info = {} + message = exception.args[0] elif isinstance(exception, DatasetteError): status = exception.status info = exception.error_dict @@ -1308,6 +1305,6 @@ class Urls: return "{}/{}".format(self.table(database, table), row_path) def row_blob(self, database, table, row_path, column): - return self.table(database, table) + "/-/blob/{}/{}.blob".format( + return self.table(database, table) + "/{}.blob?_blob_column={}".format( row_path, urllib.parse.quote_plus(column) ) diff --git a/datasette/blob_renderer.py b/datasette/blob_renderer.py new file mode 100644 index 00000000..794b153e --- /dev/null +++ b/datasette/blob_renderer.py @@ -0,0 +1,61 @@ +from datasette import hookimpl +from datasette.utils.asgi import Response, BadRequest +from datasette.utils import to_css_class +import hashlib + +_BLOB_COLUMN = "_blob_column" +_BLOB_HASH = "_blob_hash" + + +async def render_blob(datasette, database, rows, columns, request, table, view_name): + if _BLOB_COLUMN not in request.args: + raise BadRequest("?{}= is required".format(_BLOB_COLUMN)) + blob_column = request.args[_BLOB_COLUMN] + if blob_column not in columns: + raise BadRequest("{} is not a valid column".format(blob_column)) + + # If ?_blob_hash= provided, use that to select the row - otherwise use first row + blob_hash = None + if _BLOB_HASH in request.args: + blob_hash = request.args[_BLOB_HASH] + for row in rows: + value = row[blob_column] + if hashlib.sha256(value).hexdigest() == blob_hash: + break + else: + # Loop did not break + raise BadRequest( + "Link has expired - the requested binary content has changed or could not be found." + ) + else: + row = rows[0] + + value = row[blob_column] + filename_bits = [] + if table: + filename_bits.append(to_css_class(table)) + if "pk_path" in request.url_vars: + filename_bits.append(request.url_vars["pk_path"]) + filename_bits.append(to_css_class(blob_column)) + if blob_hash: + filename_bits.append(blob_hash[:6]) + filename = "-".join(filename_bits) + ".blob" + headers = { + "X-Content-Type-Options": "nosniff", + "Content-Disposition": 'attachment; filename="{}"'.format(filename), + } + return Response( + body=value or b"", + status=200, + headers=headers, + content_type="application/binary", + ) + + +@hookimpl +def register_output_renderer(): + return { + "extension": "blob", + "render": render_blob, + "can_render": lambda: False, + } diff --git a/datasette/plugins.py b/datasette/plugins.py index cb3d2c34..1c2f392f 100644 --- a/datasette/plugins.py +++ b/datasette/plugins.py @@ -12,6 +12,7 @@ DEFAULT_PLUGINS = ( "datasette.actor_auth_cookie", "datasette.default_permissions", "datasette.default_magic_parameters", + "datasette.blob_renderer", ) pm = pluggy.PluginManager("datasette") diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 78a1c123..9b3fff25 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -70,7 +70,7 @@ {% for row in display_rows %} {% for column, td in zip(columns, row) %} - + {% endfor %} {% endfor %} diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index bd388390..f438f829 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -1,4 +1,5 @@ import json +from os import EX_CANTCREAT from datasette.utils import MultiParams from mimetypes import guess_type from urllib.parse import parse_qs, urlunparse, parse_qsl @@ -15,12 +16,20 @@ Morsel._reserved["samesite"] = "SameSite" # https://github.com/encode/starlette/blob/519f575/starlette/responses.py#L17 -class NotFound(Exception): - pass +class Base400(Exception): + status = 400 -class Forbidden(Exception): - pass +class NotFound(Base400): + status = 404 + + +class Forbidden(Base400): + status = 403 + + +class BadRequest(Base400): + status = 400 SAMESITE_VALUES = ("strict", "lax", "none") diff --git a/datasette/views/base.py b/datasette/views/base.py index f9bbe45d..4432ddca 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -26,6 +26,7 @@ from datasette.utils.asgi import ( Forbidden, NotFound, Response, + BadRequest, ) ureg = pint.UnitRegistry() @@ -260,9 +261,9 @@ class DataView(BaseView): if stream: # Some quick sanity checks if not self.ds.config("allow_csv_stream"): - raise DatasetteError("CSV streaming is disabled", status=400) + raise BadRequest("CSV streaming is disabled") if request.args.get("_next"): - raise DatasetteError("_next not allowed for CSV streaming", status=400) + raise BadRequest("_next not allowed for CSV streaming") kwargs["_size"] = "max" # Fetch the first page try: diff --git a/datasette/views/database.py b/datasette/views/database.py index 00fbc0b0..8b9e8833 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -1,4 +1,5 @@ import os +import hashlib import itertools import jinja2 import json @@ -10,6 +11,7 @@ from datasette.utils import ( validate_sql_select, is_url, path_with_added_args, + path_with_format, path_with_removed_args, InvalidSql, ) @@ -342,6 +344,24 @@ class QueryView(DataView): url=jinja2.escape(value.strip()) ) ) + elif isinstance(display_value, bytes): + blob_url = path_with_format( + request, + "blob", + extra_qs={ + "_blob_column": column, + "_blob_hash": hashlib.sha256( + display_value + ).hexdigest(), + }, + ) + display_value = jinja2.Markup( + '<Binary: {} byte{}>'.format( + blob_url, + len(display_value), + "" if len(value) == 1 else "s", + ) + ) display_row.append(display_value) display_rows.append(display_row) diff --git a/datasette/views/table.py b/datasette/views/table.py index d190b6af..079e0b0a 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -23,9 +23,9 @@ from datasette.utils import ( urlsafe_components, value_as_boolean, ) -from datasette.utils.asgi import NotFound, Response +from datasette.utils.asgi import BadRequest, NotFound from datasette.filters import Filters -from .base import BaseView, DataView, DatasetteError, ureg +from .base import DataView, DatasetteError, ureg from .database import QueryView LINK_WITH_LABEL = ( @@ -469,7 +469,7 @@ class TableView(RowTableShared): for i, (key, search_text) in enumerate(search_args.items()): search_col = key.split("_search_", 1)[1] if search_col not in await db.table_columns(fts_table): - raise DatasetteError("Cannot search by that column", status=400) + raise BadRequest("Cannot search by that column") where_clauses.append( "rowid in (select rowid from {fts_table} where {search_col} match {match_clause})".format( @@ -614,11 +614,11 @@ class TableView(RowTableShared): raise ValueError except ValueError: - raise DatasetteError("_size must be a positive integer", status=400) + raise BadRequest("_size must be a positive integer") if page_size > self.ds.max_returned_rows: - raise DatasetteError( - "_size must be <= {}".format(self.ds.max_returned_rows), status=400 + raise BadRequest( + "_size must be <= {}".format(self.ds.max_returned_rows) ) extra_args["page_size"] = page_size @@ -665,7 +665,7 @@ class TableView(RowTableShared): if not self.ds.config("allow_facet") and any( arg.startswith("_facet") for arg in request.args ): - raise DatasetteError("_facet= is not allowed", status=400) + raise BadRequest("_facet= is not allowed") # pylint: disable=no-member facet_classes = list( @@ -1041,50 +1041,3 @@ class RowView(RowTableShared): ) foreign_key_tables.append({**fk, **{"count": count}}) return foreign_key_tables - - -class BlobView(BaseView): - async def get(self, request, db_name, table, pk_path, column): - await self.check_permissions( - request, - [ - ("view-table", (db_name, table)), - ("view-database", db_name), - "view-instance", - ], - ) - try: - db = self.ds.get_database(db_name) - except KeyError: - raise NotFound("Database {} does not exist".format(db_name)) - if not await db.table_exists(table): - raise NotFound("Table {} does not exist".format(table)) - # Ensure the column exists and is of type BLOB - column_types = {c.name: c.type for c in await db.table_column_details(table)} - if column not in column_types: - raise NotFound("Table {} does not have column {}".format(table, column)) - if column_types[column].upper() not in ("BLOB", ""): - raise NotFound( - "Table {} does not have column {} of type BLOB".format(table, column) - ) - # Ensure the row exists for the pk_path - pk_values = urlsafe_components(pk_path) - sql, params, _ = await _sql_params_pks(db, table, pk_values) - results = await db.execute(sql, params, truncate=True) - rows = list(results.rows) - if not rows: - raise NotFound("Record not found: {}".format(pk_values)) - - # Serve back the binary data - filename_bits = [to_css_class(table), pk_path, to_css_class(column)] - filename = "-".join(filename_bits) + ".blob" - headers = { - "X-Content-Type-Options": "nosniff", - "Content-Disposition": 'attachment; filename="{}"'.format(filename), - } - return Response( - body=rows[0][column] or b"", - status=200, - headers=headers, - content_type="application/binary", - ) diff --git a/docs/pages.rst b/docs/pages.rst index 3ad58565..db970ead 100644 --- a/docs/pages.rst +++ b/docs/pages.rst @@ -77,14 +77,3 @@ Note that this URL includes the encoded primary key of the record. Here's that same page as JSON: `../people/uk.org.publicwhip%2Fperson%2F10001.json `_ - -.. _BlobView: - -Blob -==== - -SQLite databases can contain binary data, stored in a ``BLOB`` column. Datasette makes the content of these columns available to download directly, at URLs that look like the following:: - - /database-name/table-name/-/blob/row-identifier/column-name.blob - -Binary content is also made available as a base64 encoded string in the ``.json`` representation of the row. diff --git a/tests/test_csv.py b/tests/test_csv.py index 86e402b5..863659f7 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -1,3 +1,5 @@ +import textwrap +import pytest from .fixtures import ( # noqa app_client, app_client_csv_max_mb_one, @@ -78,6 +80,22 @@ def test_table_csv_with_nullable_labels(app_client): assert EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV == response.text +@pytest.mark.xfail +def test_table_csv_blob_columns(app_client): + response = app_client.get("/fixtures/binary_data.csv") + assert response.status == 200 + assert "text/plain; charset=utf-8" == response.headers["content-type"] + assert EXPECTED_TABLE_CSV == textwrap.dedent( + """ + rowid,data + 1,/fixtures/binary_data/-/blob/1/data.blob + 2,/fixtures/binary_data/-/blob/1/data.blob + """.strip().replace( + "\n", "\r\n" + ) + ) + + def test_custom_sql_csv(app_client): response = app_client.get( "/fixtures.csv?sql=select+content+from+simple_primary_key+limit+2" diff --git a/tests/test_html.py b/tests/test_html.py index 3c1101d2..95b5128a 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1223,7 +1223,7 @@ def test_extra_where_clauses(app_client): ] -def test_binary_data_display(app_client): +def test_binary_data_display_in_table(app_client): response = app_client.get("/fixtures/binary_data") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") @@ -1231,12 +1231,12 @@ def test_binary_data_display(app_client): [ '', '', - '', + '', ], [ '', '', - '', + '', ], [ '', @@ -1249,21 +1249,38 @@ def test_binary_data_display(app_client): ] +def test_binary_data_display_in_query(app_client): + response = app_client.get("/fixtures?sql=select+*+from+binary_data") + assert response.status == 200 + table = Soup(response.body, "html.parser").find("table") + expected_tds = [ + [ + '' + ], + [ + '' + ], + [''], + ] + assert expected_tds == [ + [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") + ] + + @pytest.mark.parametrize( - "path,expected_body,expected_filename", + "path,expected_filename", [ + ("/fixtures/binary_data/1.blob?_blob_column=data", "binary_data-1-data.blob"), ( - "/fixtures/binary_data/-/blob/1/data.blob", - b"\x15\x1c\x02\xc7\xad\x05\xfe", - "binary_data-1-data.blob", + "/fixtures.blob?sql=select+*+from+binary_data&_blob_column=data&_blob_hash=f3088978da8f9aea479ffc7f631370b968d2e855eeb172bea7f6c7a04262bb6d", + "data-f30889.blob", ), - ("/fixtures/binary_data/-/blob/3/data.blob", b"", "binary_data-3-data.blob"), ], ) -def test_blob_download(app_client, path, expected_body, expected_filename): +def test_blob_download(app_client, path, expected_filename): response = app_client.get(path) assert response.status == 200 - assert response.body == expected_body + assert response.body == b"\x15\x1c\x02\xc7\xad\x05\xfe" assert response.headers["x-content-type-options"] == "nosniff" assert response.headers[ "content-disposition" @@ -1274,28 +1291,17 @@ def test_blob_download(app_client, path, expected_body, expected_filename): @pytest.mark.parametrize( "path,expected_message", [ - ("/baddb/binary_data/-/blob/1/data.blob", "Database baddb does not exist"), + ("/fixtures/binary_data/1.blob", "?_blob_column= is required"), + ("/fixtures/binary_data/1.blob?_blob_column=foo", "foo is not a valid column"), ( - "/fixtures/binary_data_bad/-/blob/1/data.blob", - "Table binary_data_bad does not exist", - ), - ( - "/fixtures/binary_data/-/blob/1/bad.blob", - "Table binary_data does not have column bad", - ), - ( - "/fixtures/facetable/-/blob/1/state.blob", - "Table facetable does not have column state of type BLOB", - ), - ( - "/fixtures/binary_data/-/blob/101/data.blob", - "Record not found: ['101']", + "/fixtures/binary_data/1.blob?_blob_column=data&_blob_hash=x", + "Link has expired - the requested binary content has changed or could not be found.", ), ], ) -def test_blob_download_not_found_messages(app_client, path, expected_message): +def test_blob_download_invalid_messages(app_client, path, expected_message): response = app_client.get(path) - assert response.status == 404 + assert response.status == 400 assert expected_message in response.text diff --git a/tests/test_permissions.py b/tests/test_permissions.py index a935a495..4d1b09b8 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -417,17 +417,6 @@ def cascade_app_client(): ("/fixtures/binary_data/1", ["table"], 200), ("/fixtures/binary_data/1", ["table", "database"], 200), ("/fixtures/binary_data/1", ["table", "database", "instance"], 200), - # ... and for binary blob - ("/fixtures/binary_data/-/blob/1/data.blob", [], 403), - ("/fixtures/binary_data/-/blob/1/data.blob", ["database"], 403), - ("/fixtures/binary_data/-/blob/1/data.blob", ["instance"], 403), - ("/fixtures/binary_data/-/blob/1/data.blob", ["table"], 200), - ("/fixtures/binary_data/-/blob/1/data.blob", ["table", "database"], 200), - ( - "/fixtures/binary_data/-/blob/1/data.blob", - ["table", "database", "instance"], - 200, - ), # Can view query even if not allowed database or instance ("/fixtures/magic_parameters", [], 403), ("/fixtures/magic_parameters", ["database"], 403), From 178b7e8749f14300363af5961e9f8964595264d9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Oct 2020 15:47:32 -0700 Subject: [PATCH 0244/1705] .csv now links to .blob downloads Closes #1063, closes #1034 --- datasette/utils/__init__.py | 6 ++++-- datasette/views/base.py | 36 ++++++++++++++++++++++++++++++++++++ tests/test_csv.py | 28 +++++++++++++++++----------- tests/test_utils.py | 8 ++++++++ 4 files changed, 65 insertions(+), 13 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index f819aa82..33decbfc 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -678,10 +678,12 @@ async def resolve_table_and_format( return table_and_format, None -def path_with_format(request, format, extra_qs=None): +def path_with_format(request, format, extra_qs=None, replace_format=None): qs = extra_qs or {} path = request.path - if "." in request.path: + if replace_format and path.endswith(".{}".format(replace_format)): + path = path[: -(1 + len(replace_format))] + if "." in path: qs["_format"] = format else: path = "{}.{}".format(path, format) diff --git a/datasette/views/base.py b/datasette/views/base.py index 4432ddca..6ca78934 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -1,5 +1,6 @@ import asyncio import csv +import hashlib import re import time import urllib @@ -14,6 +15,7 @@ from datasette.utils import ( InvalidSql, LimitedWriter, call_with_supported_arguments, + path_from_row_pks, path_with_added_args, path_with_removed_args, path_with_format, @@ -310,6 +312,40 @@ class DataView(BaseView): first = False next = data.get("next") for row in data["rows"]: + if any(isinstance(r, bytes) for r in row): + new_row = [] + for column, cell in zip(headings, row): + if isinstance(cell, bytes): + # If this is a table page, use .urls.row_blob() + if data.get("table"): + pks = data.get("primary_keys") or [] + cell = self.ds.absolute_url( + request, + self.ds.urls.row_blob( + database, + data["table"], + path_from_row_pks(row, pks, not pks), + column, + ), + ) + else: + # Otherwise generate URL for this query + cell = self.ds.absolute_url( + request, + path_with_format( + request, + "blob", + extra_qs={ + "_blob_column": column, + "_blob_hash": hashlib.sha256( + cell + ).hexdigest(), + }, + replace_format="csv", + ), + ) + new_row.append(cell) + row = new_row if not expanded_columns: # Simple path await writer.writerow(row) diff --git a/tests/test_csv.py b/tests/test_csv.py index 863659f7..1a701828 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -1,5 +1,3 @@ -import textwrap -import pytest from .fixtures import ( # noqa app_client, app_client_csv_max_mb_one, @@ -80,19 +78,27 @@ def test_table_csv_with_nullable_labels(app_client): assert EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV == response.text -@pytest.mark.xfail def test_table_csv_blob_columns(app_client): response = app_client.get("/fixtures/binary_data.csv") assert response.status == 200 assert "text/plain; charset=utf-8" == response.headers["content-type"] - assert EXPECTED_TABLE_CSV == textwrap.dedent( - """ - rowid,data - 1,/fixtures/binary_data/-/blob/1/data.blob - 2,/fixtures/binary_data/-/blob/1/data.blob - """.strip().replace( - "\n", "\r\n" - ) + assert response.text == ( + "rowid,data\r\n" + "1,http://localhost/fixtures/binary_data/1.blob?_blob_column=data\r\n" + "2,http://localhost/fixtures/binary_data/2.blob?_blob_column=data\r\n" + "3,\r\n" + ) + + +def test_custom_sql_csv_blob_columns(app_client): + response = app_client.get("/fixtures.csv?sql=select+rowid,+data+from+binary_data") + assert response.status == 200 + assert "text/plain; charset=utf-8" == response.headers["content-type"] + assert response.text == ( + "rowid,data\r\n" + '1,"http://localhost/fixtures.blob?sql=select+rowid,+data+from+binary_data&_blob_column=data&_blob_hash=f3088978da8f9aea479ffc7f631370b968d2e855eeb172bea7f6c7a04262bb6d"\r\n' + '2,"http://localhost/fixtures.blob?sql=select+rowid,+data+from+binary_data&_blob_column=data&_blob_hash=b835b0483cedb86130b9a2c280880bf5fadc5318ddf8c18d0df5204d40df1724"\r\n' + "3,\r\n" ) diff --git a/tests/test_utils.py b/tests/test_utils.py index 0e2af098..bae3b685 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -386,6 +386,14 @@ def test_path_with_format(path, format, extra_qs, expected): assert expected == actual +def test_path_with_format_replace_format(): + request = Request.fake("/foo/bar.csv") + assert utils.path_with_format(request, "blob") == "/foo/bar.csv?_format=blob" + assert ( + utils.path_with_format(request, "blob", replace_format="csv") == "/foo/bar.blob" + ) + + @pytest.mark.parametrize( "bytes,expected", [ From 1a861be19e326e0c88230a711a1b6536366697d7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Oct 2020 15:58:40 -0700 Subject: [PATCH 0245/1705] Fixed test_max_csv_mb test that I just broke, refs #1063 --- tests/test_csv.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/test_csv.py b/tests/test_csv.py index 1a701828..3e91fb04 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -130,8 +130,10 @@ def test_csv_with_non_ascii_characters(app_client): def test_max_csv_mb(app_client_csv_max_mb_one): response = app_client_csv_max_mb_one.get( - "/fixtures.csv?sql=select+randomblob(10000)+" - "from+compound_three_primary_keys&_stream=1&_size=max" + ( + "/fixtures.csv?sql=select+'{}'+" + "from+compound_three_primary_keys&_stream=1&_size=max" + ).format("abcdefg" * 10000) ) # It's a 200 because we started streaming before we knew the error assert response.status == 200 From 18a64fbb29271ce607937110bbdb55488c43f4e0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Oct 2020 20:45:15 -0700 Subject: [PATCH 0246/1705] Navigation menu plus menu_links() hook Closes #1064, refs #690. --- datasette/app.py | 12 +++++++++ datasette/default_menu_links.py | 40 +++++++++++++++++++++++++++ datasette/hookspecs.py | 5 ++++ datasette/plugins.py | 1 + datasette/static/app.css | 31 ++++++++++++++++++--- datasette/templates/base.html | 48 ++++++++++++++++++++++++++++----- docs/plugin_hooks.rst | 32 ++++++++++++++++++++++ tests/fixtures.py | 2 ++ tests/plugins/my_plugin.py | 6 +++++ tests/plugins/my_plugin_2.py | 9 +++++++ tests/test_auth.py | 3 +-- tests/test_plugins.py | 17 ++++++++++++ 12 files changed, 193 insertions(+), 13 deletions(-) create mode 100644 datasette/default_menu_links.py diff --git a/datasette/app.py b/datasette/app.py index 3016043a..fb5c34a4 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -750,11 +750,22 @@ class Datasette: ) extra_template_vars.update(extra_vars) + async def menu_links(): + links = [] + for hook in pm.hook.menu_links( + datasette=self, actor=request.actor if request else None + ): + extra_links = await await_me_maybe(hook) + if extra_links: + links.extend(extra_links) + return links + template_context = { **context, **{ "urls": self.urls, "actor": request.actor if request else None, + "menu_links": menu_links, "display_actor": display_actor, "show_logout": request is not None and "ds_actor" in request.cookies, "app_css_hash": self.app_css_hash(), @@ -1161,6 +1172,7 @@ class DatasetteRouter: info, urls=self.ds.urls, app_css_hash=self.ds.app_css_hash(), + menu_links=lambda: [], ) ), status=status, diff --git a/datasette/default_menu_links.py b/datasette/default_menu_links.py new file mode 100644 index 00000000..11374fb5 --- /dev/null +++ b/datasette/default_menu_links.py @@ -0,0 +1,40 @@ +from datasette import hookimpl + + +@hookimpl +def menu_links(datasette, actor): + if actor and actor.get("id") == "root": + return [ + {"href": datasette.urls.path("/-/databases"), "label": "Databases"}, + { + "href": datasette.urls.path("/-/plugins"), + "label": "Installed plugins", + }, + { + "href": datasette.urls.path("/-/versions"), + "label": "Version info", + }, + { + "href": datasette.urls.path("/-/metadata"), + "label": "Metadata", + }, + { + "href": datasette.urls.path("/-/config"), + "label": "Config", + }, + { + "href": datasette.urls.path("/-/permissions"), + "label": "Debug permissions", + }, + { + "href": datasette.urls.path("/-/messages"), + "label": "Debug messages", + }, + { + "href": datasette.urls.path("/-/allow-debug"), + "label": "Debug allow rules", + }, + {"href": datasette.urls.path("/-/threads"), "label": "Debug threads"}, + {"href": datasette.urls.path("/-/actor"), "label": "Debug actor"}, + {"href": datasette.urls.path("/-/patterns"), "label": "Pattern portfolio"}, + ] diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index f7e90e4e..7bad262a 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -97,3 +97,8 @@ def register_magic_parameters(datasette): @hookspec def forbidden(datasette, request, message): "Custom response for a 403 forbidden error" + + +@hookspec +def menu_links(datasette, actor): + "Links for the navigation menu" diff --git a/datasette/plugins.py b/datasette/plugins.py index 1c2f392f..50791988 100644 --- a/datasette/plugins.py +++ b/datasette/plugins.py @@ -13,6 +13,7 @@ DEFAULT_PLUGINS = ( "datasette.default_permissions", "datasette.default_magic_parameters", "datasette.blob_renderer", + "datasette.default_menu_links", ) pm = pluggy.PluginManager("datasette") diff --git a/datasette/static/app.css b/datasette/static/app.css index 8b462b35..2fd5371b 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -261,13 +261,13 @@ footer p { header .crumbs { float: left; } -header .logout { +header .actor { float: right; text-align: right; padding-left: 1rem; -} -header .logout form { - display: inline; + padding-right: 1rem; + position: relative; + top: -3px; } footer a:link, @@ -312,6 +312,29 @@ footer { margin-top: 1rem; } +/* Navigation menu */ +details.nav-menu > summary { + list-style: none; + display: inline; + float: right; + position: relative; +} +details.nav-menu > summary::-webkit-details-marker { + display: none; +} +details .nav-menu-inner { + position: absolute; + top: 2rem; + right: 10px; + width: 180px; + background-color: #276890; + padding: 1rem; + z-index: 1000; +} +.nav-menu-inner a { + display: block; +} + /* Components ============================================================== */ diff --git a/datasette/templates/base.html b/datasette/templates/base.html index 03de2115..ec1fd00e 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -13,15 +13,33 @@ {% block extra_head %}{% endblock %} -
@@ -41,6 +59,22 @@
{% block footer %}{% include "_footer.html" %}{% endblock %}
+ {% for body_script in body_scripts %} {% endfor %} diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index b2c62ccd..82bc56a9 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -989,3 +989,35 @@ The function can alternatively return an awaitable function if it needs to make return Response.html(await datasette.render_template("forbidden.html")) return inner + +.. _plugin_hook_menu_links: + +menu_links(datasette, actor) +---------------------------- + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + +``request`` - object + The current HTTP :ref:`internals_request`. + +This hook provides items to be included in the menu displayed by Datasette's top right menu icon. + +The hook should return a list of ``{"href": "...", "label": "..."}`` menu items. These will be added to the menu. + +It can alternatively return an ``async def`` awaitable function which returns a list of menu items. + +This example adds a new menu item but only if the signed in user is ``"root"``: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def menu_links(datasette, actor): + if actor and actor.get("id") == "root": + return [ + {"href": datasette.urls.path("/-/edit-schema"), "label": "Edit schema"}, + ] + +Using :ref:`internals_datasette_urls` here ensures that links in the menu will take the :ref:`config_base_url` setting into account. diff --git a/tests/fixtures.py b/tests/fixtures.py index 31638fc8..69853b7d 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -43,6 +43,7 @@ EXPECTED_PLUGINS = [ "extra_js_urls", "extra_template_vars", "forbidden", + "menu_links", "permission_allowed", "prepare_connection", "prepare_jinja2_environment", @@ -64,6 +65,7 @@ EXPECTED_PLUGINS = [ "canned_queries", "extra_js_urls", "extra_template_vars", + "menu_links", "permission_allowed", "render_cell", "startup", diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 0dd0ad26..7f8a4871 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -290,3 +290,9 @@ def forbidden(datasette, request, message): datasette._last_forbidden_message = message if request.path == "/data2": return Response.redirect("/login?message=" + message) + + +@hookimpl +def menu_links(datasette, actor): + if actor: + return [{"href": datasette.urls.instance(), "label": "Hello"}] diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index ae0f338a..981b24cc 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -146,3 +146,12 @@ def canned_queries(datasette, database): } return inner + + +@hookimpl(trylast=True) +def menu_links(datasette, actor): + async def inner(): + if actor: + return [{"href": datasette.urls.instance(), "label": "Hello 2"}] + + return inner diff --git a/tests/test_auth.py b/tests/test_auth.py index f244f268..34138aa6 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -101,7 +101,7 @@ def test_logout_button_in_navigation(app_client, path): ) anon_response = app_client.get(path) for fragment in ( - "test ·", + "test", '
', ): assert fragment in response.text @@ -112,5 +112,4 @@ def test_logout_button_in_navigation(app_client, path): def test_no_logout_button_in_navigation_if_no_ds_actor_cookie(app_client, path): response = app_client.get(path + "?_bot=1") assert "bot" in response.text - assert "bot ·" not in response.text assert '' not in response.text diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 08ed2e6b..191d943d 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -765,3 +765,20 @@ def test_hook_forbidden(restore_working_directory): assert 302 == response2.status assert "/login?message=view-database" == response2.headers["Location"] assert "view-database" == client.ds._last_forbidden_message + + +def test_hook_menu_links(app_client): + def get_menu_links(html): + soup = Soup(html, "html.parser") + return [ + {"label": a.text, "href": a["href"]} for a in soup.find("nav").select("a") + ] + + response = app_client.get("/") + assert get_menu_links(response.text) == [] + + response_2 = app_client.get("/?_bot=1") + assert get_menu_links(response_2.text) == [ + {"label": "Hello", "href": "/"}, + {"label": "Hello 2", "href": "/"}, + ] From 561c1d2d36a89675764e4410a2a127323402eaa3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Oct 2020 20:51:37 -0700 Subject: [PATCH 0247/1705] Show logout link if they are logged in AND have ds_actor cookie Otherwise an expired cookie will still cause the logout link to show. --- datasette/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index fb5c34a4..efe5a812 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -767,7 +767,7 @@ class Datasette: "actor": request.actor if request else None, "menu_links": menu_links, "display_actor": display_actor, - "show_logout": request is not None and "ds_actor" in request.cookies, + "show_logout": request is not None and "ds_actor" in request.cookies and request.actor, "app_css_hash": self.app_css_hash(), "zip": zip, "body_scripts": body_scripts, From 8a4639bc43a016a1f8fae6a07d5b5f7abe0074e8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Oct 2020 22:14:33 -0700 Subject: [PATCH 0248/1705] Applied Black --- datasette/app.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index efe5a812..8cff6577 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -767,7 +767,9 @@ class Datasette: "actor": request.actor if request else None, "menu_links": menu_links, "display_actor": display_actor, - "show_logout": request is not None and "ds_actor" in request.cookies and request.actor, + "show_logout": request is not None + and "ds_actor" in request.cookies + and request.actor, "app_css_hash": self.app_css_hash(), "zip": zip, "body_scripts": body_scripts, From 2f7731e9e5ff9b324beb5039fbe2be55d704a184 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Oct 2020 22:16:41 -0700 Subject: [PATCH 0249/1705] table_actions() plugin hook plus menu, closes #1066 Refs #690 --- datasette/hookspecs.py | 5 ++++ datasette/static/app.css | 34 ++++++++++++++++++++++++++- datasette/templates/base.html | 16 ++++++------- datasette/templates/table.html | 25 ++++++++++++++++++-- datasette/views/table.py | 16 +++++++++++++ docs/plugin_hooks.rst | 42 +++++++++++++++++++++++++++++++--- tests/fixtures.py | 2 ++ tests/plugins/my_plugin.py | 12 ++++++++++ tests/plugins/my_plugin_2.py | 9 ++++++++ tests/test_plugins.py | 19 +++++++++++++++ 10 files changed, 166 insertions(+), 14 deletions(-) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 7bad262a..78070e67 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -102,3 +102,8 @@ def forbidden(datasette, request, message): @hookspec def menu_links(datasette, actor): "Links for the navigation menu" + + +@hookspec +def table_actions(datasette, actor, database, table): + "Links for the table actions menu" diff --git a/datasette/static/app.css b/datasette/static/app.css index 2fd5371b..95457766 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -118,7 +118,7 @@ h6, .header3, .header4, .header5, -.header6 { +.header6 { font-weight: 700; font-size: 1rem; margin: 0; @@ -162,6 +162,29 @@ h6, text-decoration: underline; } +.page-header { + padding-left: 10px; + border-left: 10px solid #666; + margin-bottom: 0.75rem; + margin-top: 1rem; +} +.page-header h1 { + display: inline; + margin: 0; + font-size: 2rem; + padding-right: 0.2em; +} +.page-header details { + display: inline; +} +.page-header details > summary { + list-style: none; + display: inline; +} +.page-header details > summary::-webkit-details-marker { + display: none; +} + div, section, article, @@ -335,6 +358,15 @@ details .nav-menu-inner { display: block; } +/* Table actions menu */ +.table-menu-links { + position: relative; +} +.table-menu-links .dropdown-menu { + position: absolute; + top: 2rem; + right: 0; +} /* Components ============================================================== */ diff --git a/datasette/templates/base.html b/datasette/templates/base.html index ec1fd00e..d860df37 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -60,19 +60,19 @@
{% block footer %}{% include "_footer.html" %}{% endblock %}
{% for body_script in body_scripts %} diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 6c27beee..13f6a832 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -25,8 +25,29 @@ {% endblock %} {% block content %} - -

{{ metadata.title or table }}{% if is_view %} (view){% endif %}{% if private %} 🔒{% endif %}

+ {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} diff --git a/datasette/views/table.py b/datasette/views/table.py index 079e0b0a..65fe7f8b 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -7,6 +7,7 @@ import jinja2 from datasette.plugins import pm from datasette.database import QueryInterrupted from datasette.utils import ( + await_me_maybe, CustomRow, MultiParams, append_querystring, @@ -840,7 +841,21 @@ class TableView(RowTableShared): elif use_rowid: sort = "rowid" + async def table_actions(): + links = [] + for hook in pm.hook.table_actions( + datasette=self.ds, + table=table, + database=database, + actor=request.actor, + ): + extra_links = await await_me_maybe(hook) + if extra_links: + links.extend(extra_links) + return links + return { + "table_actions": table_actions, "supports_search": bool(fts_table), "search": search or "", "use_rowid": use_rowid, @@ -959,6 +974,7 @@ class RowView(RowTableShared): ) for column in display_columns: column["sortable"] = False + return { "foreign_key_tables": await self.foreign_key_tables( database, table, pk_values diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 82bc56a9..1c28c72e 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -998,10 +998,10 @@ menu_links(datasette, actor) ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. -``request`` - object - The current HTTP :ref:`internals_request`. +``actor`` - dictionary or None + The currently authenticated :ref:`actor `. -This hook provides items to be included in the menu displayed by Datasette's top right menu icon. +This hook allows additional items to be included in the menu displayed by Datasette's top right menu icon. The hook should return a list of ``{"href": "...", "label": "..."}`` menu items. These will be added to the menu. @@ -1021,3 +1021,39 @@ This example adds a new menu item but only if the signed in user is ``"root"``: ] Using :ref:`internals_datasette_urls` here ensures that links in the menu will take the :ref:`config_base_url` setting into account. + + +.. _plugin_hook_table_actions: + +table_actions(datasette, actor, database, table) +------------------------------------------------ + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + +``actor`` - dictionary or None + The currently authenticated :ref:`actor `. + +``database`` - string + The name of the database. + +``table`` - string + The name of the table. + +This hook allows table actions to be displayed in a menu accessed via an action icon at the top of the table page. It should return a list of ``{"href": "...", "label": "..."}`` menu items. + +It can alternatively return an ``async def`` awaitable function which returns a list of menu items. + +This example adds a new table action if the signed in user is ``"root"``: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def table_actions(datasette, actor): + if actor and actor.get("id") == "root": + return [{ + "href": datasette.urls.path("/-/edit-schema/{}/{}".format(database, table)), + "label": "Edit schema for this table", + }] diff --git a/tests/fixtures.py b/tests/fixtures.py index 69853b7d..2f8383ef 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -52,6 +52,7 @@ EXPECTED_PLUGINS = [ "register_routes", "render_cell", "startup", + "table_actions", ], }, { @@ -69,6 +70,7 @@ EXPECTED_PLUGINS = [ "permission_allowed", "render_cell", "startup", + "table_actions", ], }, { diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 7f8a4871..8fc6a1b4 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -296,3 +296,15 @@ def forbidden(datasette, request, message): def menu_links(datasette, actor): if actor: return [{"href": datasette.urls.instance(), "label": "Hello"}] + + +@hookimpl +def table_actions(datasette, database, table, actor): + if actor: + return [ + { + "href": datasette.urls.instance(), + "label": "Database: {}".format(database), + }, + {"href": datasette.urls.instance(), "label": "Table: {}".format(table)}, + ] diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index 981b24cc..7d8095ed 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -155,3 +155,12 @@ def menu_links(datasette, actor): return [{"href": datasette.urls.instance(), "label": "Hello 2"}] return inner + + +@hookimpl +def table_actions(datasette, database, table, actor): + async def inner(): + if actor: + return [{"href": datasette.urls.instance(), "label": "From async"}] + + return inner diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 191d943d..be36a517 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -782,3 +782,22 @@ def test_hook_menu_links(app_client): {"label": "Hello", "href": "/"}, {"label": "Hello 2", "href": "/"}, ] + + +def test_hook_table_actions(app_client): + def get_table_actions_links(html): + soup = Soup(html, "html.parser") + details = soup.find("details", {"class": "table-menu-links"}) + if details is None: + return [] + return [{"label": a.text, "href": a["href"]} for a in details.select("a")] + + response = app_client.get("/fixtures/facetable") + assert get_table_actions_links(response.text) == [] + + response_2 = app_client.get("/fixtures/facetable?_bot=1") + assert get_table_actions_links(response_2.text) == [ + {"label": "From async", "href": "/"}, + {"label": "Database: fixtures", "href": "/"}, + {"label": "Table: facetable", "href": "/"}, + ] From 0e1e89c6ba3d0fbdb0823272952cf356f3016def Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Oct 2020 22:34:52 -0700 Subject: [PATCH 0250/1705] Release 0.51a1 Refs #1056, #1039, #998, #1045, #1033, #1036, #1034, #976, #1057, #1058, #1053, #1064, #1066 --- datasette/version.py | 2 +- docs/changelog.rst | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index b57d7a12..9a89c8e6 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.51.a0" +__version__ = "0.51a1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 17309155..893a0ee5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,25 @@ Changelog ========= +.. _v0_51_a1: + +0.51a1 (2020-10-29) +------------------- + +- New colour scheme and improved visual design, courtesy of Natalie Downe. (`#1056 `__) +- scale-in animation for column action menu. (`#1039 `__) +- Wide tables now scroll horizontally. (`#998 `__) +- Option to pass a list of templates to ``.render_template()`` is now documented. (`#1045 `__) +- New ``datasette.urls.static_plugins()`` method. (`#1033 `__) +- ``BLOB`` column values can now be downloaded directly from the Datasette UI. (`#1036 `__) +- ``.csv`` exports now link to direct ``BLOB`` downloads. (`#1034 `__) +- ``datasette -o`` option now opens the most relevant page. (`#976 `__) +- ``datasette --cors`` option now enables access to ``/database.db`` downloads. (`#1057 `__) +- Database file downloads now implement cascading permissions, so you can download a database if you have ``view-database-download`` permission even if you do not have permission to access the Datasette instance. (`#1058 `__) +- New documentation on :ref:`writing_plugins_designing_urls`. (`#1053 `__) +- New navigation menu plus a :ref:`plugin_hook_menu_links` plugin hook to customize it. (`#1064 `__) +- :ref:`plugin_hook_table_actions` plugin hook for the new table actions menu. (`#1066 `__) + .. _v0_51_a0: 0.51a0 (2020-10-19) From 9f0987cb57a82a7d2fe0c679fc909e5b39593ee4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Oct 2020 22:55:10 -0700 Subject: [PATCH 0251/1705] cursor: pointer; on the new menu icons Refs #1064, #1066 --- datasette/static/app.css | 2 ++ 1 file changed, 2 insertions(+) diff --git a/datasette/static/app.css b/datasette/static/app.css index 95457766..a1eb2099 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -180,6 +180,7 @@ h6, .page-header details > summary { list-style: none; display: inline; + cursor: pointer; } .page-header details > summary::-webkit-details-marker { display: none; @@ -341,6 +342,7 @@ details.nav-menu > summary { display: inline; float: right; position: relative; + cursor: pointer; } details.nav-menu > summary::-webkit-details-marker { display: none; From 222f79bb4c6e2aa5426cc5ff25f1b2461e18a300 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 08:41:57 -0700 Subject: [PATCH 0252/1705] debug-menu permission, closes #1068 Also added tests for navigation menu logic. --- datasette/default_menu_links.py | 7 +++++- datasette/default_permissions.py | 2 +- datasette/views/special.py | 3 ++- docs/authentication.rst | 9 ++++++++ tests/test_html.py | 38 ++++++++++++++++++++++++++++++++ tests/test_permissions.py | 3 ++- 6 files changed, 58 insertions(+), 4 deletions(-) diff --git a/datasette/default_menu_links.py b/datasette/default_menu_links.py index 11374fb5..0b135410 100644 --- a/datasette/default_menu_links.py +++ b/datasette/default_menu_links.py @@ -3,7 +3,10 @@ from datasette import hookimpl @hookimpl def menu_links(datasette, actor): - if actor and actor.get("id") == "root": + async def inner(): + if not await datasette.permission_allowed(actor, "debug-menu"): + return [] + return [ {"href": datasette.urls.path("/-/databases"), "label": "Databases"}, { @@ -38,3 +41,5 @@ def menu_links(datasette, actor): {"href": datasette.urls.path("/-/actor"), "label": "Debug actor"}, {"href": datasette.urls.path("/-/patterns"), "label": "Pattern portfolio"}, ] + + return inner diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index ddd45940..9f1d9c62 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -5,7 +5,7 @@ from datasette.utils import actor_matches_allow @hookimpl(tryfirst=True) def permission_allowed(datasette, actor, action, resource): async def inner(): - if action == "permissions-debug": + if action in ("permissions-debug", "debug-menu"): if actor and actor.get("id") == "root": return True elif action == "view-instance": diff --git a/datasette/views/special.py b/datasette/views/special.py index a9fc59b7..397dbc8c 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -96,7 +96,8 @@ class PermissionsDebugView(BaseView): return await self.render( ["permissions_debug.html"], request, - {"permission_checks": reversed(self.ds._permission_checks)}, + # list() avoids error if check is performed during template render: + {"permission_checks": list(reversed(self.ds._permission_checks))}, ) diff --git a/docs/authentication.rst b/docs/authentication.rst index f6c5d801..62ed7e8b 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -522,3 +522,12 @@ permissions-debug Actor is allowed to view the ``/-/permissions`` debug page. Default *deny*. + +.. _permissions_debug_menu: + +debug-menu +---------- + +Controls if the various debug pages are displayed in the navigation menu. + +Default *deny*. diff --git a/tests/test_html.py b/tests/test_html.py index 95b5128a..fed643a9 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1507,3 +1507,41 @@ def test_edit_sql_link_not_shown_if_user_lacks_permission(permission_allowed): assert "Edit SQL" in response.text else: assert "Edit SQL" not in response.text + + +@pytest.mark.parametrize( + "actor_id,should_have_links,should_not_have_links", + [ + (None, None, None), + ("test", None, ["/-/permissions"]), + ("root", ["/-/permissions", "/-/allow-debug", "/-/metadata"], None), + ], +) +def test_navigation_menu_links( + app_client, actor_id, should_have_links, should_not_have_links +): + cookies = {} + if actor_id: + cookies = {"ds_actor": app_client.actor_cookie({"id": actor_id})} + html = app_client.get("/", cookies=cookies).text + soup = Soup(html, "html.parser") + details = soup.find("nav").find("details") + if not actor_id: + # Should not show a menu + assert details is None + return + # They are logged in: should show a menu + assert details is not None + # And a rogout form + assert details.find("form") is not None + if should_have_links: + for link in should_have_links: + assert ( + details.find("a", {"href": link}) is not None + ), "{} expected but missing from nav menu".format(link) + + if should_not_have_links: + for link in should_not_have_links: + assert ( + details.find("a", {"href": link}) is None + ), "{} found but should not have been in nav menu".format(link) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 4d1b09b8..60883eef 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -310,10 +310,11 @@ def test_permissions_checked(app_client, path, permissions): def test_permissions_debug(app_client): app_client.ds._permission_checks.clear() - assert 403 == app_client.get("/-/permissions").status + assert app_client.get("/-/permissions").status == 403 # With the cookie it should work cookie = app_client.actor_cookie({"id": "root"}) response = app_client.get("/-/permissions", cookies={"ds_actor": cookie}) + assert response.status == 200 # Should show one failure and one success soup = Soup(response.body, "html.parser") check_divs = soup.findAll("div", {"class": "check"}) From fcf43589eb6a1f1d0432772a639fd35711c48e0c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 08:53:44 -0700 Subject: [PATCH 0253/1705] Link to homepage in nav on show-json page --- datasette/templates/show_json.html | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/datasette/templates/show_json.html b/datasette/templates/show_json.html index b9e49eb2..fd88756f 100644 --- a/datasette/templates/show_json.html +++ b/datasette/templates/show_json.html @@ -4,6 +4,13 @@ {% block body_class %}show-json{% endblock %} +{% block nav %} +

+ home +

+ {{ super() }} +{% endblock %} + {% block content %}

{{ filename }}

From 81dea4b07ab2b6f4eaaf248307d2b588472054a1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 10:47:18 -0700 Subject: [PATCH 0254/1705] load_template() plugin hook Closes #1042 --- datasette/app.py | 34 ++++++++++++++++++++++++++++++++-- datasette/hookspecs.py | 5 +++++ datasette/templates/base.html | 6 +++++- datasette/views/base.py | 10 +--------- docs/plugin_hooks.rst | 18 ++++++++++++++++++ tests/fixtures.py | 1 + tests/plugins/my_plugin.py | 6 ++++++ tests/test_plugins.py | 5 +++++ 8 files changed, 73 insertions(+), 12 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 8cff6577..4b28e715 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -21,7 +21,7 @@ from pathlib import Path from markupsafe import Markup from itsdangerous import URLSafeSerializer import jinja2 -from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader, escape +from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader from jinja2.environment import Template from jinja2.exceptions import TemplateNotFound import uvicorn @@ -713,12 +713,41 @@ class Datasette: self, templates, context=None, request=None, view_name=None ): context = context or {} + templates_considered = [] if isinstance(templates, Template): template = templates else: if isinstance(templates, str): templates = [templates] - template = self.jinja_env.select_template(templates) + + # Give plugins first chance at loading the template + break_outer = False + plugin_template_source = None + plugin_template_name = None + template_name = None + for template_name in templates: + if break_outer: + break + plugin_template_source = pm.hook.load_template( + template=template_name, + request=request, + datasette=self, + ) + plugin_template_source = await await_me_maybe(plugin_template_source) + if plugin_template_source: + break_outer = True + plugin_template_name = template_name + break + if plugin_template_source is not None: + template = self.jinja_env.from_string(plugin_template_source) + else: + template = self.jinja_env.select_template(templates) + for template_name in templates: + from_plugin = template_name == plugin_template_name + used = from_plugin or template_name == template.name + templates_considered.append( + {"name": template_name, "used": used, "from_plugin": from_plugin} + ) body_scripts = [] # pylint: disable=no-member for extra_script in pm.hook.extra_body_script( @@ -783,6 +812,7 @@ class Datasette: ), "base_url": self.config("base_url"), "csrftoken": request.scope["csrftoken"] if request else lambda: "", + "templates_considered": templates_considered, }, **extra_template_vars, } diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 78070e67..ca84b355 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -49,6 +49,11 @@ def extra_template_vars( "Extra template variables to be made available to the template - can return dict or callable or awaitable" +@hookspec(firstresult=True) +def load_template(template, request, datasette): + "Load the specified template, returning the template code as a string" + + @hookspec def publish_subcommand(publish): "Subcommands for 'datasette publish'" diff --git a/datasette/templates/base.html b/datasette/templates/base.html index d860df37..e29c2ea5 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -79,6 +79,10 @@ document.body.addEventListener('click', (ev) => { {% endfor %} -{% if select_templates %}{% endif %} +{% if templates_considered %} + +{% endif %} diff --git a/datasette/views/base.py b/datasette/views/base.py index 6ca78934..ed2631c5 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -8,7 +8,6 @@ import urllib import pint from datasette import __version__ -from datasette.plugins import pm from datasette.database import QueryInterrupted from datasette.utils import ( await_me_maybe, @@ -119,22 +118,15 @@ class BaseView: async def render(self, templates, request, context=None): context = context or {} - template = self.ds.jinja_env.select_template(templates) template_context = { **context, **{ "database_color": self.database_color, - "select_templates": [ - "{}{}".format( - "*" if template_name == template.name else "", template_name - ) - for template_name in templates - ], }, } return Response.html( await self.ds.render_template( - template, template_context, request=request, view_name=self.name + templates, template_context, request=request, view_name=self.name ) ) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 1c28c72e..3c57b6a8 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -271,6 +271,24 @@ You can also return an awaitable function that returns a string. Example: `datasette-cluster-map `_ +.. _plugin_hook_load_template: + +load_template(template, request, datasette) +------------------------------------------- + +``template`` - string + The template that is being rendered, e.g. ``database.html`` + +``request`` - object or None + The current HTTP :ref:`internals_request`. This can be ``None`` if the request object is not available. + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` + +Load the source code for a template from a custom location. Hooks should return a string, or ``None`` if the template is not found. + +Datasette will fall back to serving templates from files on disk if the requested template cannot be loaded by any plugins. + .. _plugin_hook_publish_subcommand: publish_subcommand(publish) diff --git a/tests/fixtures.py b/tests/fixtures.py index 2f8383ef..9f3052b7 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -43,6 +43,7 @@ EXPECTED_PLUGINS = [ "extra_js_urls", "extra_template_vars", "forbidden", + "load_template", "menu_links", "permission_allowed", "prepare_connection", diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 8fc6a1b4..9dbb3f40 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -308,3 +308,9 @@ def table_actions(datasette, database, table, actor): }, {"href": datasette.urls.instance(), "label": "Table: {}".format(table)}, ] + + +@hookimpl +def load_template(template, request): + if template == "show_json.html" and request.args.get("_special"): + return "

Special show_json: {{ filename }}

" diff --git a/tests/test_plugins.py b/tests/test_plugins.py index be36a517..f8888798 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -801,3 +801,8 @@ def test_hook_table_actions(app_client): {"label": "Database: fixtures", "href": "/"}, {"label": "Table: facetable", "href": "/"}, ] + + +def test_hook_load_template(app_client): + response = app_client.get("/-/databases?_special=1") + assert response.text == "

Special show_json: databases.json

" From a7d9e24ece665eef7c6dfc5f32855c98bd45d335 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 10:52:45 -0700 Subject: [PATCH 0255/1705] Update release process with explicit version, refs #1054 --- docs/contributing.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 39d4c3a2..375f6b89 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -159,12 +159,12 @@ We increment ``patch`` for bugfix releass. :ref:`contributing_alpha_beta` may have an additional ``a0`` or ``b0`` prefix - the integer component will be incremented with each subsequent alpha or beta. -To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__:: +To release a new version, first create a commit that updates the version number in ``datasette/version.py`` and the :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__:: # Update changelog - git commit -m "Release notes for 0.43 - - Refs #581, #770, #729, #706, #751, #706, #744, #771, #773" -a + git commit -m " Release 0.51a1 + + Refs #1056, #1039, #998, #1045, #1033, #1036, #1034, #976, #1057, #1058, #1053, #1064, #1066" -a git push Referencing the issues that are part of the release in the commit message ensures the name of the release shows up on those issue pages, e.g. `here `__. From 0cb29498c796267c5e4a5545ede8058b7ca03a94 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 10:54:47 -0700 Subject: [PATCH 0256/1705] Fixed bug with python tests/fixtures.py https://github.com/simonw/datasette/runs/1333357885?check_suite_focus=true --- datasette/views/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/datasette/views/base.py b/datasette/views/base.py index ed2631c5..813ee452 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -8,6 +8,7 @@ import urllib import pint from datasette import __version__ +from datasette.plugins import pm from datasette.database import QueryInterrupted from datasette.utils import ( await_me_maybe, From 59ab24af6bd9b517b53162fbffac1d0116100e0d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 10:56:02 -0700 Subject: [PATCH 0257/1705] Release 0.51a2 Refs #1068, #1042, #1054 --- datasette/version.py | 2 +- docs/changelog.rst | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 9a89c8e6..2f4bc37e 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.51a1" +__version__ = "0.51a2" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 893a0ee5..262400c8 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,14 @@ Changelog ========= +.. _v0_51_a2: + +0.51a2 (2020-10-30) +------------------- + +- New :ref:`plugin_hook_load_template` plugin hook. (`#1042 `__) +- New :ref:`permissions_debug_menu` permission. (`#1068 `__) + .. _v0_51_a1: 0.51a1 (2020-10-29) From 393f1b49d70e9f58bc193c6a28afff4ec9459a2e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 13:12:01 -0700 Subject: [PATCH 0258/1705] Updated nav in pattern portfolio --- datasette/templates/patterns.html | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/datasette/templates/patterns.html b/datasette/templates/patterns.html index ac9e2e46..62ef1322 100644 --- a/datasette/templates/patterns.html +++ b/datasette/templates/patterns.html @@ -38,11 +38,8 @@ fixtures / attraction_characteristic

-
- testuser · - - - +
+ testuser
From a2a709072059c6b3da365df9a332ca744c2079e9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 30 Oct 2020 13:12:57 -0700 Subject: [PATCH 0259/1705] Display messages in right place, closes #1071 --- datasette/templates/base.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/templates/base.html b/datasette/templates/base.html index e29c2ea5..7e9c6c05 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -44,7 +44,6 @@ {% endif %} {% endblock %} -
{% block messages %} {% if show_messages %} {% for message, message_type in show_messages() %} @@ -53,6 +52,7 @@ {% endif %} {% endblock %} +
{% block content %} {% endblock %}
From f0a740ac21cba11ded8717f49d664f9549cd2f83 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 09:21:22 -0700 Subject: [PATCH 0260/1705] Remove load_plugin hook - closes #1073 Refs #1042 This reverts commit 81dea4b07ab2b6f4eaaf248307d2b588472054a1. --- datasette/app.py | 34 ++-------------------------------- datasette/hookspecs.py | 5 ----- datasette/templates/base.html | 6 +----- datasette/views/base.py | 9 ++++++++- docs/plugin_hooks.rst | 18 ------------------ tests/fixtures.py | 1 - tests/plugins/my_plugin.py | 6 ------ tests/test_plugins.py | 5 ----- 8 files changed, 11 insertions(+), 73 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 4b28e715..8cff6577 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -21,7 +21,7 @@ from pathlib import Path from markupsafe import Markup from itsdangerous import URLSafeSerializer import jinja2 -from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader +from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader, escape from jinja2.environment import Template from jinja2.exceptions import TemplateNotFound import uvicorn @@ -713,41 +713,12 @@ class Datasette: self, templates, context=None, request=None, view_name=None ): context = context or {} - templates_considered = [] if isinstance(templates, Template): template = templates else: if isinstance(templates, str): templates = [templates] - - # Give plugins first chance at loading the template - break_outer = False - plugin_template_source = None - plugin_template_name = None - template_name = None - for template_name in templates: - if break_outer: - break - plugin_template_source = pm.hook.load_template( - template=template_name, - request=request, - datasette=self, - ) - plugin_template_source = await await_me_maybe(plugin_template_source) - if plugin_template_source: - break_outer = True - plugin_template_name = template_name - break - if plugin_template_source is not None: - template = self.jinja_env.from_string(plugin_template_source) - else: - template = self.jinja_env.select_template(templates) - for template_name in templates: - from_plugin = template_name == plugin_template_name - used = from_plugin or template_name == template.name - templates_considered.append( - {"name": template_name, "used": used, "from_plugin": from_plugin} - ) + template = self.jinja_env.select_template(templates) body_scripts = [] # pylint: disable=no-member for extra_script in pm.hook.extra_body_script( @@ -812,7 +783,6 @@ class Datasette: ), "base_url": self.config("base_url"), "csrftoken": request.scope["csrftoken"] if request else lambda: "", - "templates_considered": templates_considered, }, **extra_template_vars, } diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index ca84b355..78070e67 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -49,11 +49,6 @@ def extra_template_vars( "Extra template variables to be made available to the template - can return dict or callable or awaitable" -@hookspec(firstresult=True) -def load_template(template, request, datasette): - "Load the specified template, returning the template code as a string" - - @hookspec def publish_subcommand(publish): "Subcommands for 'datasette publish'" diff --git a/datasette/templates/base.html b/datasette/templates/base.html index 7e9c6c05..611ba9f6 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -79,10 +79,6 @@ document.body.addEventListener('click', (ev) => { {% endfor %} -{% if templates_considered %} - -{% endif %} +{% if select_templates %}{% endif %} diff --git a/datasette/views/base.py b/datasette/views/base.py index 813ee452..6ca78934 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -119,15 +119,22 @@ class BaseView: async def render(self, templates, request, context=None): context = context or {} + template = self.ds.jinja_env.select_template(templates) template_context = { **context, **{ "database_color": self.database_color, + "select_templates": [ + "{}{}".format( + "*" if template_name == template.name else "", template_name + ) + for template_name in templates + ], }, } return Response.html( await self.ds.render_template( - templates, template_context, request=request, view_name=self.name + template, template_context, request=request, view_name=self.name ) ) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 3c57b6a8..1c28c72e 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -271,24 +271,6 @@ You can also return an awaitable function that returns a string. Example: `datasette-cluster-map `_ -.. _plugin_hook_load_template: - -load_template(template, request, datasette) -------------------------------------------- - -``template`` - string - The template that is being rendered, e.g. ``database.html`` - -``request`` - object or None - The current HTTP :ref:`internals_request`. This can be ``None`` if the request object is not available. - -``datasette`` - :ref:`internals_datasette` - You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` - -Load the source code for a template from a custom location. Hooks should return a string, or ``None`` if the template is not found. - -Datasette will fall back to serving templates from files on disk if the requested template cannot be loaded by any plugins. - .. _plugin_hook_publish_subcommand: publish_subcommand(publish) diff --git a/tests/fixtures.py b/tests/fixtures.py index 9f3052b7..2f8383ef 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -43,7 +43,6 @@ EXPECTED_PLUGINS = [ "extra_js_urls", "extra_template_vars", "forbidden", - "load_template", "menu_links", "permission_allowed", "prepare_connection", diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 9dbb3f40..8fc6a1b4 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -308,9 +308,3 @@ def table_actions(datasette, database, table, actor): }, {"href": datasette.urls.instance(), "label": "Table: {}".format(table)}, ] - - -@hookimpl -def load_template(template, request): - if template == "show_json.html" and request.args.get("_special"): - return "

Special show_json: {{ filename }}

" diff --git a/tests/test_plugins.py b/tests/test_plugins.py index f8888798..be36a517 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -801,8 +801,3 @@ def test_hook_table_actions(app_client): {"label": "Database: fixtures", "href": "/"}, {"label": "Table: facetable", "href": "/"}, ] - - -def test_hook_load_template(app_client): - response = app_client.get("/-/databases?_special=1") - assert response.text == "

Special show_json: databases.json

" From d6db47f5c19f77e735279762d99720dc644bff48 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 10:25:32 -0700 Subject: [PATCH 0261/1705] Deploy demo plugins to latest.datasette.io, refs #1074 --- .github/workflows/deploy-latest.yml | 3 ++- tests/fixtures.py | 2 +- tests/plugins/my_plugin.py | 23 ++++++++++++++++++++++- tests/test_html.py | 8 +++++--- 4 files changed, 30 insertions(+), 6 deletions(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 8445f1d8..73b97a19 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -31,7 +31,7 @@ jobs: - name: Run tests run: pytest - name: Build fixtures.db - run: python tests/fixtures.py fixtures.db fixtures.json + run: python tests/fixtures.py fixtures.db fixtures.json plugins - name: Build docs.db run: |- cd docs @@ -50,6 +50,7 @@ jobs: gcloud config set project datasette-222320 datasette publish cloudrun fixtures.db \ -m fixtures.json \ + --plugins-dir=plugins \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--config template_debug:1" \ diff --git a/tests/fixtures.py b/tests/fixtures.py index 2f8383ef..5cbfc72f 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -267,7 +267,7 @@ def generate_sortable_rows(num): METADATA = { "title": "Datasette Fixtures", - "description": "An example SQLite database demonstrating Datasette", + "description_html": 'An example SQLite database demonstrating Datasette. Sign in as root user', "license": "Apache License 2.0", "license_url": "https://github.com/simonw/datasette/blob/master/LICENSE", "source": "tests/fixtures.py", diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 8fc6a1b4..b487cdf0 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -177,7 +177,7 @@ def actor_from_request(datasette, request): def asgi_wrapper(): def wrap(app): async def maybe_set_actor_in_scope(scope, recieve, send): - if b"_actor_in_scope" in scope["query_string"]: + if b"_actor_in_scope" in scope.get("query_string", b""): scope = dict(scope, actor={"id": "from-scope"}) print(scope) await app(scope, recieve, send) @@ -237,12 +237,33 @@ def register_routes(): await datasette.render_template("render_message.html", request=request) ) + def login_as_root(datasette, request): + # Mainly for the latest.datasette.io demo + if request.method == "POST": + response = Response.redirect("/") + response.set_cookie( + "ds_actor", datasette.sign({"a": {"id": "root"}}, "actor") + ) + return response + return Response.html( + """ +
+

+ +

+ + """.format( + request.path, request.scope["csrftoken"]() + ) + ) + return [ (r"/one/$", one), (r"/two/(?P.*)$", two), (r"/three/$", three), (r"/post/$", post), (r"/csrftoken-form/$", csrftoken_form), + (r"/login-as-root$", login_as_root), (r"/not-async/$", not_async), (r"/add-message/$", add_message), (r"/render-message/$", render_message), diff --git a/tests/test_html.py b/tests/test_html.py index fed643a9..7c068085 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -23,7 +23,7 @@ def test_homepage(app_client_two_attached_databases): soup = Soup(response.body, "html.parser") assert "Datasette Fixtures" == soup.find("h1").text assert ( - "An example SQLite database demonstrating Datasette" + "An example SQLite database demonstrating Datasette. Sign in as root user" == soup.select(".metadata-description")[0].text.strip() ) # Should be two attached databases @@ -949,8 +949,9 @@ def test_index_metadata(app_client): assert response.status == 200 soup = Soup(response.body, "html.parser") assert "Datasette Fixtures" == soup.find("h1").text - assert "An example SQLite database demonstrating Datasette" == inner_html( - soup.find("div", {"class": "metadata-description"}) + assert ( + 'An example SQLite database demonstrating Datasette. Sign in as root user' + == inner_html(soup.find("div", {"class": "metadata-description"})) ) assert_footer_links(soup) @@ -1451,6 +1452,7 @@ def test_base_url_config(app_client_base_url_prefix, path): "https://github.com/simonw/datasette", "https://github.com/simonw/datasette/blob/master/LICENSE", "https://github.com/simonw/datasette/blob/master/tests/fixtures.py", + "/login-as-root", # Only used for the latest.datasette.io demo } and not href.startswith("https://plugin-example.com/") ): From b84cfe1b08ec3a881767e30122b7d4c0fa03f9e4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 10:40:09 -0700 Subject: [PATCH 0262/1705] Confirm table actions work on views, closes #1067 --- tests/test_plugins.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index be36a517..6a4ea60a 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -784,7 +784,8 @@ def test_hook_menu_links(app_client): ] -def test_hook_table_actions(app_client): +@pytest.mark.parametrize("table_or_view", ["facetable", "simple_view"]) +def test_hook_table_actions(app_client, table_or_view): def get_table_actions_links(html): soup = Soup(html, "html.parser") details = soup.find("details", {"class": "table-menu-links"}) @@ -792,12 +793,12 @@ def test_hook_table_actions(app_client): return [] return [{"label": a.text, "href": a["href"]} for a in details.select("a")] - response = app_client.get("/fixtures/facetable") + response = app_client.get("/fixtures/{}".format(table_or_view)) assert get_table_actions_links(response.text) == [] - response_2 = app_client.get("/fixtures/facetable?_bot=1") + response_2 = app_client.get("/fixtures/{}?_bot=1".format(table_or_view)) assert get_table_actions_links(response_2.text) == [ {"label": "From async", "href": "/"}, {"label": "Database: fixtures", "href": "/"}, - {"label": "Table: facetable", "href": "/"}, + {"label": "Table: {}".format(table_or_view), "href": "/"}, ] From 11eb1e026f3d84cb771f8d6e204939cbaee130cd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 11:16:28 -0700 Subject: [PATCH 0263/1705] datasette.urls.table(..., format="json"), closes #1035 Also improved tests for datasette.urls and added format= to some other methods --- datasette/app.py | 42 +++++++++++++++++++++++++----------- datasette/utils/__init__.py | 10 +++++---- datasette/views/base.py | 10 +++++---- datasette/views/database.py | 4 ++-- docs/internals.rst | 14 ++++++------ tests/test_internals_urls.py | 42 +++++++++++++++++++++++++++++------- tests/test_utils.py | 10 ++++++--- 7 files changed, 92 insertions(+), 40 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 8cff6577..3a06d911 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -53,6 +53,7 @@ from .utils import ( format_bytes, module_from_path, parse_metadata, + path_with_format, resolve_env_secrets, sqlite3, to_css_class, @@ -1285,13 +1286,16 @@ class Urls: def __init__(self, ds): self.ds = ds - def path(self, path): + def path(self, path, format=None): if path.startswith("/"): path = path[1:] - return self.ds.config("base_url") + path + path = self.ds.config("base_url") + path + if format is not None: + path = path_with_format(path=path, format=format) + return path - def instance(self): - return self.path("") + def instance(self, format=None): + return self.path("", format=format) def static(self, path): return self.path("-/static/{}".format(path)) @@ -1302,21 +1306,33 @@ class Urls: def logout(self): return self.path("-/logout") - def database(self, database): + def database(self, database, format=None): db = self.ds.databases[database] if self.ds.config("hash_urls") and db.hash: - return self.path("{}-{}".format(database, db.hash[:HASH_LENGTH])) + path = self.path( + "{}-{}".format(database, db.hash[:HASH_LENGTH]), format=format + ) else: - return self.path(database) + path = self.path(database, format=format) + return path - def table(self, database, table): - return "{}/{}".format(self.database(database), urllib.parse.quote_plus(table)) + def table(self, database, table, format=None): + path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(table)) + if format is not None: + path = path_with_format(path=path, format=format) + return path - def query(self, database, query): - return "{}/{}".format(self.database(database), urllib.parse.quote_plus(query)) + def query(self, database, query, format=None): + path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(query)) + if format is not None: + path = path_with_format(path=path, format=format) + return path - def row(self, database, table, row_path): - return "{}/{}".format(self.table(database, table), row_path) + def row(self, database, table, row_path, format=None): + path = "{}/{}".format(self.table(database, table), row_path) + if format is not None: + path = path_with_format(path=path, format=format) + return path def row_blob(self, database, table, row_path, column): return self.table(database, table) + "/{}.blob?_blob_column={}".format( diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 33decbfc..bf361784 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -678,9 +678,11 @@ async def resolve_table_and_format( return table_and_format, None -def path_with_format(request, format, extra_qs=None, replace_format=None): +def path_with_format( + *, request=None, path=None, format=None, extra_qs=None, replace_format=None +): qs = extra_qs or {} - path = request.path + path = request.path if request else path if replace_format and path.endswith(".{}".format(replace_format)): path = path[: -(1 + len(replace_format))] if "." in path: @@ -689,11 +691,11 @@ def path_with_format(request, format, extra_qs=None, replace_format=None): path = "{}.{}".format(path, format) if qs: extra = urllib.parse.urlencode(sorted(qs.items())) - if request.query_string: + if request and request.query_string: path = "{}?{}&{}".format(path, request.query_string, extra) else: path = "{}?{}".format(path, extra) - elif request.query_string: + elif request and request.query_string: path = "{}?{}".format(path, request.query_string) return path diff --git a/datasette/views/base.py b/datasette/views/base.py index 6ca78934..430489c1 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -333,8 +333,8 @@ class DataView(BaseView): cell = self.ds.absolute_url( request, path_with_format( - request, - "blob", + request=request, + format="blob", extra_qs={ "_blob_column": column, "_blob_hash": hashlib.sha256( @@ -535,11 +535,13 @@ class DataView(BaseView): it_can_render = await await_me_maybe(it_can_render) if it_can_render: renderers[key] = path_with_format( - request, key, {**url_labels_extra} + request=request, format=key, extra_qs={**url_labels_extra} ) url_csv_args = {"_size": "max", **url_labels_extra} - url_csv = path_with_format(request, "csv", url_csv_args) + url_csv = path_with_format( + request=request, format="csv", extra_qs=url_csv_args + ) url_csv_path = url_csv.split("?")[0] context = { **data, diff --git a/datasette/views/database.py b/datasette/views/database.py index 8b9e8833..3ed60f4e 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -346,8 +346,8 @@ class QueryView(DataView): ) elif isinstance(display_value, bytes): blob_url = path_with_format( - request, - "blob", + request=request, + format="blob", extra_qs={ "_blob_column": column, "_blob_hash": hashlib.sha256( diff --git a/docs/internals.rst b/docs/internals.rst index 4ebeb983..ee7fe6e4 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -396,10 +396,10 @@ datasette.urls The ``datasette.urls`` object contains methods for building URLs to pages within Datasette. Plugins should use this to link to pages, since these methods take into account any :ref:`config_base_url` configuration setting that might be in effect. -``datasette.urls.instance()`` - Returns the URL to the Datasette instance root page. This is usually ``"/"`` +``datasette.urls.instance(format=None)`` + Returns the URL to the Datasette instance root page. This is usually ``"/"``. -``datasette.urls.path(path)`` +``datasette.urls.path(path, format=None)`` Takes a path and returns the full path, taking ``base_url`` into account. For example, ``datasette.urls.path("-/logout")`` will return the path to the logout page, which will be ``"/-/logout"`` by default or ``/prefix-path/-/logout`` if ``base_url`` is set to ``/prefix-path/`` @@ -423,13 +423,13 @@ The ``datasette.urls`` object contains methods for building URLs to pages within ``datasette.url.static_plugins("datasette_cluster_map", "datasette-cluster-map.js")`` would return ``"/-/static-plugins/datasette_cluster_map/datasette-cluster-map.js"`` -``datasette.urls.database(database_name)`` +``datasette.urls.database(database_name, format=None)`` Returns the URL to a database page, for example ``"/fixtures"`` -``datasette.urls.table(database_name, table_name)`` +``datasette.urls.table(database_name, table_name, format=None)`` Returns the URL to a table page, for example ``"/fixtures/facetable"`` -``datasette.urls.query(database_name, query_name)`` +``datasette.urls.query(database_name, query_name, format=None)`` Returns the URL to a query page, for example ``"/fixtures/pragma_cache_size"`` These functions can be accessed via the ``{{ urls }}`` object in Datasette templates, for example: @@ -441,6 +441,8 @@ These functions can be accessed via the ``{{ urls }}`` object in Datasette templ facetable table pragma_cache_size query +Use the ``format="json"`` (or ``"csv"`` or other formats supported by plugins) arguments to get back URLs to the JSON representation. This is usually the path with ``.json`` added on the end, but it may use ``?_format=json`` in cases where the path already includes ``.json``, for example a URL to a table named ``table.json``. + .. _internals_database: Database class diff --git a/tests/test_internals_urls.py b/tests/test_internals_urls.py index 6498ee43..005903df 100644 --- a/tests/test_internals_urls.py +++ b/tests/test_internals_urls.py @@ -82,18 +82,44 @@ def test_logout(ds, base_url, expected): @pytest.mark.parametrize( - "base_url,expected", + "base_url,format,expected", [ - ("/", "/:memory:"), - ("/prefix/", "/prefix/:memory:"), + ("/", None, "/:memory:"), + ("/prefix/", None, "/prefix/:memory:"), + ("/", "json", "/:memory:.json"), ], ) -def test_database(ds, base_url, expected): +def test_database(ds, base_url, format, expected): ds._config["base_url"] = base_url - assert ds.urls.database(":memory:") == expected - # Do table and query while we are here - assert ds.urls.table(":memory:", "name") == expected + "/name" - assert ds.urls.query(":memory:", "name") == expected + "/name" + assert ds.urls.database(":memory:", format=format) == expected + + +@pytest.mark.parametrize( + "base_url,name,format,expected", + [ + ("/", "name", None, "/:memory:/name"), + ("/prefix/", "name", None, "/prefix/:memory:/name"), + ("/", "name", "json", "/:memory:/name.json"), + ("/", "name.json", "json", "/:memory:/name.json?_format=json"), + ], +) +def test_table_and_query(ds, base_url, name, format, expected): + ds._config["base_url"] = base_url + assert ds.urls.table(":memory:", name, format=format) == expected + assert ds.urls.query(":memory:", name, format=format) == expected + + +@pytest.mark.parametrize( + "base_url,format,expected", + [ + ("/", None, "/:memory:/facetable/1"), + ("/prefix/", None, "/prefix/:memory:/facetable/1"), + ("/", "json", "/:memory:/facetable/1.json"), + ], +) +def test_row(ds, base_url, format, expected): + ds._config["base_url"] = base_url + assert ds.urls.row(":memory:", "facetable", "1", format=format) == expected @pytest.mark.parametrize("base_url", ["/", "/prefix/"]) diff --git a/tests/test_utils.py b/tests/test_utils.py index bae3b685..2d2ff52d 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -382,15 +382,19 @@ def test_table_columns(): ) def test_path_with_format(path, format, extra_qs, expected): request = Request.fake(path) - actual = utils.path_with_format(request, format, extra_qs) + actual = utils.path_with_format(request=request, format=format, extra_qs=extra_qs) assert expected == actual def test_path_with_format_replace_format(): request = Request.fake("/foo/bar.csv") - assert utils.path_with_format(request, "blob") == "/foo/bar.csv?_format=blob" assert ( - utils.path_with_format(request, "blob", replace_format="csv") == "/foo/bar.blob" + utils.path_with_format(request=request, format="blob") + == "/foo/bar.csv?_format=blob" + ) + assert ( + utils.path_with_format(request=request, format="blob", replace_format="csv") + == "/foo/bar.blob" ) From c1d386ef67786f07d69e566b8e054e92949a844f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 11:43:36 -0700 Subject: [PATCH 0264/1705] Refactor Urls into url_builder.py Refs #1026 --- datasette/app.py | 60 +--------------------------------------- datasette/url_builder.py | 60 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 59 deletions(-) create mode 100644 datasette/url_builder.py diff --git a/datasette/app.py b/datasette/app.py index 3a06d911..860f4563 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -40,6 +40,7 @@ from .views.special import ( ) from .views.table import RowView, TableView from .renderer import json_renderer +from .url_builder import Urls from .database import Database, QueryInterrupted from .utils import ( @@ -53,7 +54,6 @@ from .utils import ( format_bytes, module_from_path, parse_metadata, - path_with_format, resolve_env_secrets, sqlite3, to_css_class, @@ -1280,61 +1280,3 @@ class DatasetteClient: async def request(self, method, path, **kwargs): async with httpx.AsyncClient(app=self.app) as client: return await client.request(method, self._fix(path), **kwargs) - - -class Urls: - def __init__(self, ds): - self.ds = ds - - def path(self, path, format=None): - if path.startswith("/"): - path = path[1:] - path = self.ds.config("base_url") + path - if format is not None: - path = path_with_format(path=path, format=format) - return path - - def instance(self, format=None): - return self.path("", format=format) - - def static(self, path): - return self.path("-/static/{}".format(path)) - - def static_plugins(self, plugin, path): - return self.path("-/static-plugins/{}/{}".format(plugin, path)) - - def logout(self): - return self.path("-/logout") - - def database(self, database, format=None): - db = self.ds.databases[database] - if self.ds.config("hash_urls") and db.hash: - path = self.path( - "{}-{}".format(database, db.hash[:HASH_LENGTH]), format=format - ) - else: - path = self.path(database, format=format) - return path - - def table(self, database, table, format=None): - path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(table)) - if format is not None: - path = path_with_format(path=path, format=format) - return path - - def query(self, database, query, format=None): - path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(query)) - if format is not None: - path = path_with_format(path=path, format=format) - return path - - def row(self, database, table, row_path, format=None): - path = "{}/{}".format(self.table(database, table), row_path) - if format is not None: - path = path_with_format(path=path, format=format) - return path - - def row_blob(self, database, table, row_path, column): - return self.table(database, table) + "/{}.blob?_blob_column={}".format( - row_path, urllib.parse.quote_plus(column) - ) diff --git a/datasette/url_builder.py b/datasette/url_builder.py new file mode 100644 index 00000000..c1bf629b --- /dev/null +++ b/datasette/url_builder.py @@ -0,0 +1,60 @@ +from .utils import path_with_format, HASH_LENGTH +import urllib + + +class Urls: + def __init__(self, ds): + self.ds = ds + + def path(self, path, format=None): + if path.startswith("/"): + path = path[1:] + path = self.ds.config("base_url") + path + if format is not None: + path = path_with_format(path=path, format=format) + return path + + def instance(self, format=None): + return self.path("", format=format) + + def static(self, path): + return self.path("-/static/{}".format(path)) + + def static_plugins(self, plugin, path): + return self.path("-/static-plugins/{}/{}".format(plugin, path)) + + def logout(self): + return self.path("-/logout") + + def database(self, database, format=None): + db = self.ds.databases[database] + if self.ds.config("hash_urls") and db.hash: + path = self.path( + "{}-{}".format(database, db.hash[:HASH_LENGTH]), format=format + ) + else: + path = self.path(database, format=format) + return path + + def table(self, database, table, format=None): + path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(table)) + if format is not None: + path = path_with_format(path=path, format=format) + return path + + def query(self, database, query, format=None): + path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(query)) + if format is not None: + path = path_with_format(path=path, format=format) + return path + + def row(self, database, table, row_path, format=None): + path = "{}/{}".format(self.table(database, table), row_path) + if format is not None: + path = path_with_format(path=path, format=format) + return path + + def row_blob(self, database, table, row_path, column): + return self.table(database, table) + "/{}.blob?_blob_column={}".format( + row_path, urllib.parse.quote_plus(column) + ) From 7a67bc7a569509d65b3a8661e0ad2c65f0b09166 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 12:11:40 -0700 Subject: [PATCH 0265/1705] datasette.urls methods will not apply base_url prefix twice, refs #1026 --- datasette/url_builder.py | 17 +++++++------- datasette/utils/__init__.py | 23 ++++++++++++++++++ docs/internals.rst | 2 ++ tests/test_internals_urls.py | 45 ++++++++++++++++++++++++++++-------- 4 files changed, 70 insertions(+), 17 deletions(-) diff --git a/datasette/url_builder.py b/datasette/url_builder.py index c1bf629b..bcc4f39d 100644 --- a/datasette/url_builder.py +++ b/datasette/url_builder.py @@ -1,4 +1,4 @@ -from .utils import path_with_format, HASH_LENGTH +from .utils import path_with_format, HASH_LENGTH, PrefixedUrlString import urllib @@ -7,12 +7,13 @@ class Urls: self.ds = ds def path(self, path, format=None): - if path.startswith("/"): - path = path[1:] - path = self.ds.config("base_url") + path + if not isinstance(path, PrefixedUrlString): + if path.startswith("/"): + path = path[1:] + path = self.ds.config("base_url") + path if format is not None: path = path_with_format(path=path, format=format) - return path + return PrefixedUrlString(path) def instance(self, format=None): return self.path("", format=format) @@ -40,19 +41,19 @@ class Urls: path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(table)) if format is not None: path = path_with_format(path=path, format=format) - return path + return PrefixedUrlString(path) def query(self, database, query, format=None): path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(query)) if format is not None: path = path_with_format(path=path, format=format) - return path + return PrefixedUrlString(path) def row(self, database, table, row_path, format=None): path = "{}/{}".format(self.table(database, table), row_path) if format is not None: path = path_with_format(path=path, format=format) - return path + return PrefixedUrlString(path) def row_blob(self, database, table, row_path, column): return self.table(database, table) + "/{}.blob?_blob_column={}".format( diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index bf361784..21fa944c 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1010,3 +1010,26 @@ async def initial_path_for_datasette(datasette): else: path = datasette.urls.instance() return path + + +class PrefixedUrlString(str): + def __add__(self, other): + return type(self)(super().__add__(other)) + + def __getattribute__(self, name): + if name in dir(str): + + def method(self, *args, **kwargs): + value = getattr(super(), name)(*args, **kwargs) + if isinstance(value, str): + return type(self)(value) + elif isinstance(value, list): + return [type(self)(i) for i in value] + elif isinstance(value, tuple): + return tuple(type(self)(i) for i in value) + else: + return value + + return method.__get__(self) + else: + return super().__getattribute__(name) diff --git a/docs/internals.rst b/docs/internals.rst index ee7fe6e4..8594e36a 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -443,6 +443,8 @@ These functions can be accessed via the ``{{ urls }}`` object in Datasette templ Use the ``format="json"`` (or ``"csv"`` or other formats supported by plugins) arguments to get back URLs to the JSON representation. This is usually the path with ``.json`` added on the end, but it may use ``?_format=json`` in cases where the path already includes ``.json``, for example a URL to a table named ``table.json``. +These methods each return a ``datasette.utils.PrefixedUrlString`` object, which is a subclass of the Python ``str`` type. This allows the logic that considers the ``base_url`` setting to detect if that prefix has already been applied to the path. + .. _internals_database: Database class diff --git a/tests/test_internals_urls.py b/tests/test_internals_urls.py index 005903df..a56d735b 100644 --- a/tests/test_internals_urls.py +++ b/tests/test_internals_urls.py @@ -1,4 +1,5 @@ from datasette.app import Datasette +from datasette.utils import PrefixedUrlString from .fixtures import app_client_with_hash import pytest @@ -20,7 +21,17 @@ def ds(): ) def test_path(ds, base_url, path, expected): ds._config["base_url"] = base_url - assert ds.urls.path(path) == expected + actual = ds.urls.path(path) + assert actual == expected + assert isinstance(actual, PrefixedUrlString) + + +def test_path_applied_twice_does_not_double_prefix(ds): + ds._config["base_url"] = "/prefix/" + path = ds.urls.path("/") + assert path == "/prefix/" + path = ds.urls.path(path) + assert path == "/prefix/" @pytest.mark.parametrize( @@ -32,7 +43,9 @@ def test_path(ds, base_url, path, expected): ) def test_instance(ds, base_url, expected): ds._config["base_url"] = base_url - assert ds.urls.instance() == expected + actual = ds.urls.instance() + assert actual == expected + assert isinstance(actual, PrefixedUrlString) @pytest.mark.parametrize( @@ -44,7 +57,9 @@ def test_instance(ds, base_url, expected): ) def test_static(ds, base_url, file, expected): ds._config["base_url"] = base_url - assert ds.urls.static(file) == expected + actual = ds.urls.static(file) + assert actual == expected + assert isinstance(actual, PrefixedUrlString) @pytest.mark.parametrize( @@ -66,7 +81,9 @@ def test_static(ds, base_url, file, expected): ) def test_static_plugins(ds, base_url, plugin, file, expected): ds._config["base_url"] = base_url - assert ds.urls.static_plugins(plugin, file) == expected + actual = ds.urls.static_plugins(plugin, file) + assert actual == expected + assert isinstance(actual, PrefixedUrlString) @pytest.mark.parametrize( @@ -78,7 +95,9 @@ def test_static_plugins(ds, base_url, plugin, file, expected): ) def test_logout(ds, base_url, expected): ds._config["base_url"] = base_url - assert ds.urls.logout() == expected + actual = ds.urls.logout() + assert actual == expected + assert isinstance(actual, PrefixedUrlString) @pytest.mark.parametrize( @@ -91,7 +110,9 @@ def test_logout(ds, base_url, expected): ) def test_database(ds, base_url, format, expected): ds._config["base_url"] = base_url - assert ds.urls.database(":memory:", format=format) == expected + actual = ds.urls.database(":memory:", format=format) + assert actual == expected + assert isinstance(actual, PrefixedUrlString) @pytest.mark.parametrize( @@ -105,8 +126,12 @@ def test_database(ds, base_url, format, expected): ) def test_table_and_query(ds, base_url, name, format, expected): ds._config["base_url"] = base_url - assert ds.urls.table(":memory:", name, format=format) == expected - assert ds.urls.query(":memory:", name, format=format) == expected + actual1 = ds.urls.table(":memory:", name, format=format) + assert actual1 == expected + assert isinstance(actual1, PrefixedUrlString) + actual2 = ds.urls.query(":memory:", name, format=format) + assert actual2 == expected + assert isinstance(actual2, PrefixedUrlString) @pytest.mark.parametrize( @@ -119,7 +144,9 @@ def test_table_and_query(ds, base_url, name, format, expected): ) def test_row(ds, base_url, format, expected): ds._config["base_url"] = base_url - assert ds.urls.row(":memory:", "facetable", "1", format=format) == expected + actual = ds.urls.row(":memory:", "facetable", "1", format=format) + assert actual == expected + assert isinstance(actual, PrefixedUrlString) @pytest.mark.parametrize("base_url", ["/", "/prefix/"]) From 84bc7244c106ab6175b8315a2d917cf29ea53c4d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 12:29:42 -0700 Subject: [PATCH 0266/1705] datasette.client now applies base_url, closes #1026 --- datasette/app.py | 4 +++ datasette/utils/asgi.py | 4 +-- docs/internals.rst | 12 +++++++ tests/plugins/my_plugin.py | 4 +++ tests/test_internals_datasette_client.py | 45 ++++++++++++++++++------ 5 files changed, 56 insertions(+), 13 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 860f4563..8db650e9 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -44,6 +44,7 @@ from .url_builder import Urls from .database import Database, QueryInterrupted from .utils import ( + PrefixedUrlString, async_call_with_supported_arguments, await_me_maybe, call_with_supported_arguments, @@ -1242,9 +1243,12 @@ class NotFoundExplicit(NotFound): class DatasetteClient: def __init__(self, ds): + self.ds = ds self.app = ds.app() def _fix(self, path): + if not isinstance(path, PrefixedUrlString): + path = self.ds.urls.path(path) if path.startswith("/"): path = "http://localhost{}".format(path) return path diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index f438f829..e4c8ce5c 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -387,9 +387,9 @@ class Response: ) @classmethod - def json(cls, body, status=200, headers=None): + def json(cls, body, status=200, headers=None, default=None): return cls( - json.dumps(body), + json.dumps(body, default=default), status=status, headers=headers, content_type="application/json; charset=utf-8", diff --git a/docs/internals.rst b/docs/internals.rst index 8594e36a..d3d0be8e 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -387,6 +387,18 @@ It offers the following methods: ``await datasette.client.request(method, path, **kwargs)`` - returns HTTPX Response Execute an internal request with the given HTTP method against that path. +These methods can be used with :ref:`internals_datasette_urls` - for example: + +.. code-block:: python + + table_json = ( + await datasette.client.get( + datasette.urls.table("fixtures", "facetable", format="json") + ) + ).json() + +``datasette.client`` methods automatically take the current :ref:`config_base_url` setting into account, whether or not you use the ``datasette.urls`` family of methods to construct the path. + For documentation on available ``**kwargs`` options and the shape of the HTTPX Response object refer to the `HTTPX Async documentation `__. .. _internals_datasette_urls: diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index b487cdf0..767c363d 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -257,6 +257,9 @@ def register_routes(): ) ) + def asgi_scope(scope): + return Response.json(scope, default=repr) + return [ (r"/one/$", one), (r"/two/(?P.*)$", two), @@ -267,6 +270,7 @@ def register_routes(): (r"/not-async/$", not_async), (r"/add-message/$", add_message), (r"/render-message/$", render_message), + (r"/asgi-scope$", asgi_scope), ] diff --git a/tests/test_internals_datasette_client.py b/tests/test_internals_datasette_client.py index d73fbb06..0b1c5f0e 100644 --- a/tests/test_internals_datasette_client.py +++ b/tests/test_internals_datasette_client.py @@ -31,14 +31,37 @@ async def test_client_methods(datasette, method, path, expected_status): @pytest.mark.asyncio -async def test_client_post(datasette): - response = await datasette.client.post( - "/-/messages", - data={ - "message": "A message", - }, - allow_redirects=False, - ) - assert isinstance(response, httpx.Response) - assert response.status_code == 302 - assert "ds_messages" in response.cookies +@pytest.mark.parametrize("prefix", [None, "/prefix/"]) +async def test_client_post(datasette, prefix): + original_base_url = datasette._config["base_url"] + try: + if prefix is not None: + datasette._config["base_url"] = prefix + response = await datasette.client.post( + "/-/messages", + data={ + "message": "A message", + }, + allow_redirects=False, + ) + assert isinstance(response, httpx.Response) + assert response.status_code == 302 + assert "ds_messages" in response.cookies + finally: + datasette._config["base_url"] = original_base_url + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "prefix,expected_path", [(None, "/asgi-scope"), ("/prefix/", "/prefix/asgi-scope")] +) +async def test_client_path(datasette, prefix, expected_path): + original_base_url = datasette._config["base_url"] + try: + if prefix is not None: + datasette._config["base_url"] = prefix + response = await datasette.client.get("/asgi-scope") + path = response.json()["path"] + assert path == expected_path + finally: + datasette._config["base_url"] = original_base_url From bf18b9ba175a7b25fb8b765847397dd6efb8bb7b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 12:47:42 -0700 Subject: [PATCH 0267/1705] Stop using plugin-example.com, closes #1074 --- tests/plugins/my_plugin.py | 6 +++--- tests/plugins/my_plugin_2.py | 4 ++-- tests/test_html.py | 2 +- tests/test_plugins.py | 18 +++++++++--------- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 767c363d..cd2c8e23 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -29,7 +29,7 @@ def prepare_connection(conn, database, datasette): def extra_css_urls(template, database, table, view_name, columns, request, datasette): async def inner(): return [ - "https://plugin-example.com/{}/extra-css-urls-demo.css".format( + "https://plugin-example.datasette.io/{}/extra-css-urls-demo.css".format( base64.b64encode( json.dumps( { @@ -57,10 +57,10 @@ def extra_css_urls(template, database, table, view_name, columns, request, datas def extra_js_urls(): return [ { - "url": "https://plugin-example.com/jquery.js", + "url": "https://plugin-example.datasette.io/jquery.js", "sri": "SRIHASH", }, - "https://plugin-example.com/plugin1.js", + "https://plugin-example.datasette.io/plugin1.js", ] diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index 7d8095ed..6cd222e6 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -8,10 +8,10 @@ import json def extra_js_urls(): return [ { - "url": "https://plugin-example.com/jquery.js", + "url": "https://plugin-example.datasette.io/jquery.js", "sri": "SRIHASH", }, - "https://plugin-example.com/plugin2.js", + "https://plugin-example.datasette.io/plugin2.js", ] diff --git a/tests/test_html.py b/tests/test_html.py index 7c068085..79b6138d 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1454,7 +1454,7 @@ def test_base_url_config(app_client_base_url_prefix, path): "https://github.com/simonw/datasette/blob/master/tests/fixtures.py", "/login-as-root", # Only used for the latest.datasette.io demo } - and not href.startswith("https://plugin-example.com/") + and not href.startswith("https://plugin-example.datasette.io/") ): # If this has been made absolute it may start http://localhost/ if href.startswith("http://localhost/"): diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 6a4ea60a..5e3d6dc3 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -125,7 +125,7 @@ def test_hook_extra_js_urls(app_client): == { "integrity": "SRIHASH", "crossorigin": "anonymous", - "src": "https://plugin-example.com/jquery.js", + "src": "https://plugin-example.datasette.io/jquery.js", } ] @@ -135,7 +135,7 @@ def test_plugins_with_duplicate_js_urls(app_client): response = app_client.get("/fixtures") # This test is a little tricky, as if the user has any other plugins in # their current virtual environment those may affect what comes back too. - # What matters is that https://plugin-example.com/jquery.js is only there once + # What matters is that https://plugin-example.datasette.io/jquery.js is only there once # and it comes before plugin1.js and plugin2.js which could be in either # order scripts = Soup(response.body, "html.parser").findAll("script") @@ -143,16 +143,16 @@ def test_plugins_with_duplicate_js_urls(app_client): # No duplicates allowed: assert len(srcs) == len(set(srcs)) # jquery.js loaded once: - assert 1 == srcs.count("https://plugin-example.com/jquery.js") + assert 1 == srcs.count("https://plugin-example.datasette.io/jquery.js") # plugin1.js and plugin2.js are both there: - assert 1 == srcs.count("https://plugin-example.com/plugin1.js") - assert 1 == srcs.count("https://plugin-example.com/plugin2.js") + assert 1 == srcs.count("https://plugin-example.datasette.io/plugin1.js") + assert 1 == srcs.count("https://plugin-example.datasette.io/plugin2.js") # jquery comes before them both - assert srcs.index("https://plugin-example.com/jquery.js") < srcs.index( - "https://plugin-example.com/plugin1.js" + assert srcs.index("https://plugin-example.datasette.io/jquery.js") < srcs.index( + "https://plugin-example.datasette.io/plugin1.js" ) - assert srcs.index("https://plugin-example.com/jquery.js") < srcs.index( - "https://plugin-example.com/plugin2.js" + assert srcs.index("https://plugin-example.datasette.io/jquery.js") < srcs.index( + "https://plugin-example.datasette.io/plugin2.js" ) From a4ca26a2659d21779adf625183061d8879954c15 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 13:35:47 -0700 Subject: [PATCH 0268/1705] Address PrefixedUrlString bug in #1075 --- datasette/app.py | 3 +++ datasette/utils/__init__.py | 7 +++++-- tests/fixtures.py | 1 + tests/test_api.py | 1 + tests/test_cli_serve_get.py | 1 + tests/test_html.py | 5 +++++ 6 files changed, 16 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 8db650e9..1271e52f 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -822,6 +822,9 @@ class Datasette: if url in seen_urls: continue seen_urls.add(url) + if url.startswith("/"): + # Take base_url into account: + url = self.urls.path(url) if sri: output.append({"url": url, "sri": sri}) else: diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 21fa944c..a7d96401 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1,8 +1,8 @@ import asyncio from contextlib import contextmanager +import click from collections import OrderedDict, namedtuple import base64 -import click import hashlib import inspect import itertools @@ -1016,8 +1016,11 @@ class PrefixedUrlString(str): def __add__(self, other): return type(self)(super().__add__(other)) + def __str__(self): + return super().__str__() + def __getattribute__(self, name): - if name in dir(str): + if not name.startswith("__") and name in dir(str): def method(self, *args, **kwargs): value = getattr(super(), name)(*args, **kwargs) diff --git a/tests/fixtures.py b/tests/fixtures.py index 5cbfc72f..d2ac661d 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -274,6 +274,7 @@ METADATA = { "source_url": "https://github.com/simonw/datasette/blob/master/tests/fixtures.py", "about": "About Datasette", "about_url": "https://github.com/simonw/datasette", + "extra_css_urls": ["/static/extra-css-urls.css"], "plugins": { "name-of-plugin": {"depth": "root"}, "env-plugin": {"foo": {"$env": "FOO_ENV"}}, diff --git a/tests/test_api.py b/tests/test_api.py index 5e9c1a0a..18e4b9e4 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1852,6 +1852,7 @@ def test_paginate_using_link_header(app_client, qs): num_pages = 0 while path: response = app_client.get(path) + assert response.status == 200 num_pages += 1 link = response.headers.get("link") if link: diff --git a/tests/test_cli_serve_get.py b/tests/test_cli_serve_get.py index 8f1665a9..39236dd8 100644 --- a/tests/test_cli_serve_get.py +++ b/tests/test_cli_serve_get.py @@ -61,6 +61,7 @@ def test_serve_with_get_exit_code_for_error(tmp_path_factory): "--get", "/this-is-404", ], + catch_exceptions=False, ) assert result.exit_code == 1 assert "404" in result.output diff --git a/tests/test_html.py b/tests/test_html.py index 79b6138d..006c223d 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1466,6 +1466,11 @@ def test_base_url_config(app_client_base_url_prefix, path): } +def test_base_url_affects_metadata_extra_css_urls(app_client_base_url_prefix): + html = app_client_base_url_prefix.get("/").text + assert '' in html + + @pytest.mark.parametrize( "path,expected", [ From 6bb41c4b33dbd1015c181cd43465b645298c3c88 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 13:48:39 -0700 Subject: [PATCH 0269/1705] Fix for test_paginate_using_link_header --- tests/test_api.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_api.py b/tests/test_api.py index 18e4b9e4..3365bf57 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1859,6 +1859,7 @@ def test_paginate_using_link_header(app_client, qs): assert link.startswith("<") assert link.endswith('>; rel="next"') path = link[1:].split(">")[0] + path = path.replace("http://localhost", "") else: path = None assert num_pages == 21 From 1fe15f4dc110622754d9dbeafe0f93c79fde9022 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 14:13:57 -0700 Subject: [PATCH 0270/1705] Docs: Running Datasette behind a proxy, closes #1027 --- docs/deploying.rst | 54 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/docs/deploying.rst b/docs/deploying.rst index b0647b2f..e777f296 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -110,3 +110,57 @@ If you want to build SQLite files or download them as part of the deployment pro wget https://fivethirtyeight.datasettes.com/fivethirtyeight.db `simonw/buildpack-datasette-demo `__ is an example GitHub repository showing a simple Datasette configuration that can be deployed to a buildpack-supporting host. + +.. _deploying_proxy: + +Running Datasette behind a proxy +================================ + +You may wish to run Datasette behind an Apache or nginx proxy, using a path within your existing site. + +You can use the :ref:`config_base_url` configuration setting to tell Datasette to serve traffic with a specific URL prefix. For example, you could run Datasette like this:: + + datasette my-database.db --config base_url:/my-datasette/ -p 8009 + +This will run Datasette with the following URLs: + +- ``http://127.0.0.1:8009/my-datasette/`` - the Datasette homepage +- ``http://127.0.0.1:8009/my-datasette/my-database`` - the page for the ``my-database.db`` database +- ``http://127.0.0.1:8009/my-datasette/my-database/some_table`` - the page for the ``some_table`` table + +You can now set your nginx or Apache server to proxy the ``/my-datasette/`` path to this Datasette instance. + +Nginx proxy configuration +------------------------- + +Here is an example of an `nginx `__ configuration file that will proxy traffic to Datasette:: + + daemon off; + + events { + worker_connections 1024; + } + + http { + server { + listen 80; + + location /my-datasette { + proxy_pass http://127.0.0.1:8009; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + } + } + +Apache proxy configuration +-------------------------- + +For `Apache `__, you can use the ``ProxyPass`` directive. First make sure the following lines are uncommented:: + + LoadModule proxy_module lib/httpd/modules/mod_proxy.so + LoadModule proxy_http_module lib/httpd/modules/mod_proxy_http.so + +Then add this directive to proxy traffic:: + + ProxyPass /datasette-prefix/ http://127.0.0.1:8009/datasette-prefix/ From fa4de7551cbaf5e08f022d106605252d2a4332ec Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 14:37:58 -0700 Subject: [PATCH 0271/1705] Binary data documentation, closes #1047 --- docs/binary_data.png | Bin 0 -> 5572 bytes docs/binary_data.rst | 68 +++++++++++++++++++++++++++++++++++++++++++ docs/changelog.rst | 2 +- docs/csv_export.rst | 2 +- docs/index.rst | 1 + 5 files changed, 71 insertions(+), 2 deletions(-) create mode 100644 docs/binary_data.png create mode 100644 docs/binary_data.rst diff --git a/docs/binary_data.png b/docs/binary_data.png new file mode 100644 index 0000000000000000000000000000000000000000..2c5d0bdda89fc03e1632c9843ac9a11321e367bb GIT binary patch literal 5572 zcmd5=S5OlSmj&q^r9~p3f^-lR3B8DP5>QY~%5MMcGCXz<_>71br! zKYKSL%|FwnH2d@)FfcQ*&^`Why1nyjW@T%bywEl{QQ9_~Qq@&lTWg1UIfd!-w>#XJ947xNz3@XLk75EeM3Wi<(d#HG8 zVI*s9Mnl>)Nt-~+3_p8)qs#83rgrU0z1p=aDmG7Q zhjIElO8t5jS>4o@P~bd6>+~7;o|v^O>rNP3*`M>)DDm2eY6F-p6adzv3Ss?8F?Em_ z=#259kA}RT%4@mtivgqy2)_S$*N639(ia1ek6P%N4yCj~)6++6 zmsRE0FyzgKMW?8nn;*P=^;y9GrS_m+{6$29 ze3mm}zuNZDk@dii=(*u{#mBM%)|?1+UGZRm7LKA3H?Hp11{*vk{dfz#u6-EDC>^=D z5bMth732k#v#;gbbQ&L|nxrr3c0?JqJN_$}_MfoP`$lUmNU;rQ~2B3w|VAZ9SSY&Gu# z9A#g@CPmC*=qTfeW0$m5?>RP)p4mXEXf{5|{AzQ{;tOo>%h)^&v4OrT?WtWkEpMS> zJvkz?DCe~-pgvY_OC;Y`fHGK$At%Or8t$!1l0G5$D1}WTF&5gj#`kLUWpsxDc&z8AFi=lb4_2 zrL)?IJ&!58sZyZ2QuI6vEiC+}%{XKpU zJ0{7fPG(gK_-)8~P@4QkEkR~{vA`&yM0>l)<{hi>6t^4!Eh632m_h~qU7k#2uq|k^ z#Zie;V$FxUo}#kSJ5{yr7#&QTlhvx&DIEt~j>)u~!-Xi_ee20m%}~SnK5>Y1I0x`@ zsgEcRfE=xh&UdhLi`emp^s^-a1tCUO8`q~7mPt*vIQo#oOB!?dAj&`yuF@PZe@FsH z?9njucA4#CrJKru8~*HAvr!MiQ%U+?;vR|Yg;warKA)H5k%3ZP+#~(t34#4Cf1cS! z{ijAB_8je%T_3XOsvY~VrVn!E!&nA65D|0mCRi}560mx1&R6Y4Pn1lpl3|x{_5o0c zrOVyP635ZW{#QPI6goLcxJ2!*Evy){qoP{Ki}g(vQZnw!h+wD|(5WrC{yGoHimgK! z9~rSkyh#E_(ZpJV#I7n)`#JRo`to2}wJJ%Bv8tS4 z;1Oy!+mP%|57h;wEQJr?X&@?_qh-k(X*(Ho&m}_-4@I*247zY|=+7!dyK*uwp_O&l zSc)^-^qaAze;-@_60tN|?E?y4`s1rhT^T^qj_$;MjwF6~t8h46-QkpRDmRFyd|qx9+|QxU}@Y>-GxR<#G*Pc zB33JEo$rkvv=9B)R`{9JHadHz#W&J&q52WVLx1soc5|jhVy5DTfEp$oBAhKh(gKxS zRT#M{{2xP{y-LZ93vT(hInB)T{pt$>WFcHr28uS}6{0N`OIxgd(vic$4xQ41fKE=2 zDzMI*@cg|49*#g5dO;>wvxh5Rmvj)5Dnu!Yr}�?>toCDIo1_3+ z%I}?(HBnQqqG`_E9vo)ceiak?H}oTH4;d&$@lYO#S1uSKZ8N2;zWW7PwuRr-uFd_y z#ylOgPH%7b8y@wmd?$ap0yCWn*Rf}hDSnSXA<@Z9T_U^|WwC#ggjg_&YaseGFosJN z@LY%y*IukoZG~9Z20qH*B3= zQ2c`={6|PfxwkjT9Dw{&RZt%Hev$7|Hqy0mLs_sYAe2t7xAhnQST*z$vO@HPyd~0J z_=@6E+YlToIRp*~wX?GZt+U(^UEcUc?6FDG!gV`D%l{D00+OYx9bX(?=``xn!X@N! z!{d;k=Q9pArYK)K%5#;8^pi3J?V?z*SLBGPu`P2R%GgTZh8~f)TbPo1Lh@X_`B3t< z7oxf-7*1IJoc|_)jrzrNHT^bAVzITFnIollO;J^$J(@hv_X8#?)YtZvKGrvQ4aUD4 zf)h>nQ=gv?3plMs2-{E|+-Z*u>Z{MG22!q>+?u0qv>>-4vU52+J?NC!H!v=v3%@0+ z&_iHiScRFmK#ZlGb*mR)+j>aGv*Fm5u4)`ilpg{oEU^9211V1}wMM+UHT(%h1J)Yg z(Go61)lKER?z+c7Y5qbMg>u^F+;D&Q^0?S*<<}0+ILt|17o^^P#Rb5bGtFoN8bF}H z>@P<6LE$i8<+7!(ZahilWQVf`8ML4Y10{b@h3i4wT$Ezy5i?>)s*z``Z|3eTD~iZ^ z$cL=^+P^*lmFQ8eN}BXqxD#$Fc3W*{iOe_o4upQFi{fQCFcUr(ABlS?@O#DB;{-#J z`GZ}>>{~d1K_8e)H(a`CSLtDE3hQjMT$gy2Dq(SRy!6i5apYo{AK^~xRkOp-34bt* z!8aqHBh1anl`!v^siA?QTMPvO9&b|E%hx){VkQKGHsZSg1kF}!ui%JhCD)*I$+kDP zz9-@2{wdW67u>oI9)SSi<7zS1By~ntD$1v_2wqm{Ob`3IllgRZ&Ba2XWrD=$L(Uk~ zMjp_(aozW!h{ldy6*2Ff+ZR}O)IAETg=`$$_9JTu_f9S1=f3i9Q=fh|$d z1-=U)7!zAst;GNZ6~OUz-Zh69tJTh0j;)frqg9uzWt-JUWY9Q+hS$Ms^u&bX?nQ0L z=P0MD{!F>Z+;i^Lj{F?oQRg<*RXrS0*Q9^p)1&juUu+bM-wHpsdppBnsf-05BqD1h z73U9YE?L5sA%|MK3HnH$R^u)`ES%lF(pKf@rjzDSXk2zB4dnT9rYdFEH zyu=pI+h(hCCe(=nv$a-^#Q|m;tmm|hYNj0n%kvF-7w=Z>pMmDbWv5AFJ=NVy4L55)8#FtCh2qik$FvM++4c7D1 zCf=u=B90bF+UC(Fu?9t|RH}G6z1=Bu=lCb9eABiRJMjgt;_2!O_SU0^M;Y-m<)e1% z0}^x<4y9+Zm90_8YDiESNQ|p~*yT)H{W+Fm8#GfqYPUVmLu)S@rTM`7sJy{z-0k+! z^7HU$p6cAuff=UrGL@~RpJ{g*;YAVCCBg$mMyE?Z9pS{Tt~G2~Ww zSMUFc=_k~M5gk9)2I>+q&_h%RGJ2q+8IvmAgb8U??p$y+tE-tXD zp-UgdAxF|?5v}Bp>xPWfx+*C$+P#63PsidNZ#=;>@s04C1}4RX*pxTHPXLyy!0!WP zqS3Wcep-^!i_e66_T(s+4EVQ&^A%0_zp}7AX&kJwvfbtKX}lYlJtEd{`;}5-W7ojA`j*YjVl>dd zvYqSS%^4 z{1qTuoac{}ibU)6S{6jT9our$o9# zM4j6##tT=$gTLy|)6Mcv3?ac|9D5uK1QdApz8?+4=+Jxyx6KD*jYrNA`p?bnq-?&G z{5xVSeh4sszr=wk#zKP|G_?jEN*6BZ?av6s{cb5R%AajPoHzr|BO3Sdu zO(~nmMiFQ3cZl}fPIiTNl_&^4y-UhRXPNJGR4c$RplQrCDUppKvGOt&IagYQgH$9}iHvJ94ifqf>m-2bwY$Er$sCdUk6Vf2t3rp-ECaALtFZs)KWY+rDG)snGOYCgmQW5CGeO#?-W zmzhSpdAl1^ZIV(j$_`=T;!mCXr5$Nu`;VZ(`V)(9Ag6!dS0G0>{=f`8?L2_;g^Gge z$qWl%3A#Xh9{1|P^0M?S{WlZAgGozkhg*C5 z2h{Zw=L`eQE7nKH@n2ZgYYqf%@F`cOaNfqiR_j_`K{;?|zR8nRMc`^YDIu4i}^>2mW~i8u74>5p9f)O zL|z2?;qK^Ol4bz;16`dz8at<-55+b5q6^TZlf)nvl48<_MLl3V))N#v%^k45KySHd z;@x+>aBS>pb6?>OhEpRB7&y;0`^0~!+4$R<>_SxMw7CCS7{i~KAl=Q>uJn@39H4x= zv^SSa>~vka+g9@g)A0lExE<@r&~;OaIIOx{4*Z5|%Iv2Es`#oe@ij|-{uit|hAn6z zV-TJ>t>BcoVe4_*i7f)`!^-b0C}KB@;li%@J{9G~-%uT_(vER>?7C@lSLTPh=k$PY zAHK7du!;I`r~a{knj3VLbR}cp99{zPQ=r#8ifoURhn-v5Wz0 zW(*x1%6c*>xxY3~O=8Pq{Pcd}<*?fL1V30~GO2nx?B+QJzx5Xwc`)KyujRE;qmzwJ z-Q|ZaJeYcqL?m+I12-HH(hQ(0=&TYWYUqC}7WFX!`jRh${J?i=@OU zKRnp!dY}|mV0+Oc9`{rAo`>jCeGv1Gweps?m}c#+g_t-0M$A{moON0VR6u^B(>%M` ztyH*^ge>M#N`__. + +That page links to the binary value downloads. Those links look like this: + +https://latest.datasette.io/fixtures.blob?sql=select+data+from+binary_data&_blob_column=data&_blob_hash=f3088978da8f9aea479ffc7f631370b968d2e855eeb172bea7f6c7a04262bb6d + +These ``.blob`` links are also returned in the ``.csv`` exports Datasette provides for binary tables and queries, since the CSV format does not have a mechanism for representing binary data. + +Binary plugins +-------------- + +Several Datasette plugins are available that change the way Datasette treats binary data. + +- `datasette-render-binary `__ modifies +- https://github.com/simonw/datasette-render-images +- https://github.com/simonw/datasette-media \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 262400c8..fc566a37 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,7 +9,7 @@ Changelog 0.51a2 (2020-10-30) ------------------- -- New :ref:`plugin_hook_load_template` plugin hook. (`#1042 `__) +- New ``load_template`` plugin hook. (`#1042 `__) - New :ref:`permissions_debug_menu` permission. (`#1068 `__) .. _v0_51_a1: diff --git a/docs/csv_export.rst b/docs/csv_export.rst index 9b7f8188..b5cc599a 100644 --- a/docs/csv_export.rst +++ b/docs/csv_export.rst @@ -1,6 +1,6 @@ .. _csv_export: -CSV Export +CSV export ========== Any Datasette table, view or custom SQL query can be exported as CSV. diff --git a/docs/index.rst b/docs/index.rst index 9096efd9..6b55da8c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -46,6 +46,7 @@ Contents authentication performance csv_export + binary_data facets full_text_search spatialite From d53d747e6a9dbc294c0565bc5eefe9aa16989316 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 15:13:39 -0700 Subject: [PATCH 0272/1705] Release 0.51 Refs #1014, #1016, #1019, #1023, #1027, #1028, #1033, #1034, #1036, #1039 Closes #1076 --- README.md | 1 + datasette/version.py | 2 +- docs/changelog.rst | 82 +++++++++++++++++++++++++--------------- docs/datasette-0.51.png | Bin 0 -> 47637 bytes 4 files changed, 53 insertions(+), 32 deletions(-) create mode 100644 docs/datasette-0.51.png diff --git a/README.md b/README.md index 8670936c..c101a4ed 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,7 @@ Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly new ## News + * 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. * 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). * 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). * 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. diff --git a/datasette/version.py b/datasette/version.py index 2f4bc37e..f6e9ce97 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.51a2" +__version__ = "0.51" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index fc566a37..b9120c52 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,45 +4,65 @@ Changelog ========= -.. _v0_51_a2: +.. _v0_51: -0.51a2 (2020-10-30) -------------------- +0.51 (2020-10-31) +----------------- -- New ``load_template`` plugin hook. (`#1042 `__) +A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. + +New visual design +~~~~~~~~~~~~~~~~~ + +Datasette is no longer white and grey with blue and purple links! `Natalie Downe `__ has been working on a visual refresh, the first iteration of which is included in this release. (`#1056 `__) + +.. image:: datasette-0.51.png + :width: 740px + :alt: Screenshot showing Datasette's new visual look + +Plugins can now add links within Datasette +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A number of existing Datasette plugins add new pages to the Datasette interface, providig tools for things like `uploading CSVs `__, `editing table schemas `__ or `configuring full-text search `__. + +Plugins like this can now link to themselves from other parts of Datasette interface. The :ref:`plugin_hook_menu_links` hook (`#1064 `__) lets plugins add links to Datasette's new top-right application menu, and the :ref:`plugin_hook_table_actions` hook (`#1066 `__) adds links to a new "table actions" menu on the table page. + +The demo at `latest.datasette.io `__ now includes some example plugins. To see the new table actions menu first `sign into that demo as root `__ and then visit the `facetable `__ table to see the new cog icon menu at the top of the page. + +Binary data +~~~~~~~~~~~ + +SQLite tables can contain binary data in ``BLOB`` columns. Datasette now provides links for users to download this data directly from Datasette, and uses those links to make binary data available from CSV exports. See :ref:`binary` for more details. (`#1036 `__ and `#1034 `__). + +URL building +~~~~~~~~~~~~ + +The new :ref:`internals_datasette_urls` family of methods can be used to generate URLs to key pages within the Datasette interface, both within custom templates and Datasette plugins. See :ref:`writing_plugins_building_urls` for more details. (`#904 `__) + +Running Datasette behind a proxy +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The :ref:`config_base_url` configuration option is designed to help run Datasette on a specific path behind a proxy - for example if you want to run an instance of Datasette at ``/my-datasette/`` within your existing site's URL hierarchy, proxied behind nginx or Apache. + +Support for this configuration option has been greatly improved (`#1023 `__), and guidelines for using it are now available in a new documentation section on :ref:`deploying_proxy`. (`#1027 `__) + +Smaller changes +~~~~~~~~~~~~~~~ + +- Wide tables shown within Datasette now scroll horizontally (`#998 `__). This is achieved using a new ``
`` element which may impact the implementation of some plugins (for example `this change to datasette-cluster-map `__). - New :ref:`permissions_debug_menu` permission. (`#1068 `__) - -.. _v0_51_a1: - -0.51a1 (2020-10-29) -------------------- - -- New colour scheme and improved visual design, courtesy of Natalie Downe. (`#1056 `__) -- scale-in animation for column action menu. (`#1039 `__) -- Wide tables now scroll horizontally. (`#998 `__) -- Option to pass a list of templates to ``.render_template()`` is now documented. (`#1045 `__) -- New ``datasette.urls.static_plugins()`` method. (`#1033 `__) -- ``BLOB`` column values can now be downloaded directly from the Datasette UI. (`#1036 `__) -- ``.csv`` exports now link to direct ``BLOB`` downloads. (`#1034 `__) -- ``datasette -o`` option now opens the most relevant page. (`#976 `__) -- ``datasette --cors`` option now enables access to ``/database.db`` downloads. (`#1057 `__) -- Database file downloads now implement cascading permissions, so you can download a database if you have ``view-database-download`` permission even if you do not have permission to access the Datasette instance. (`#1058 `__) -- New documentation on :ref:`writing_plugins_designing_urls`. (`#1053 `__) -- New navigation menu plus a :ref:`plugin_hook_menu_links` plugin hook to customize it. (`#1064 `__) -- :ref:`plugin_hook_table_actions` plugin hook for the new table actions menu. (`#1066 `__) - -.. _v0_51_a0: - -0.51a0 (2020-10-19) -------------------- - -- Fixed a bunch of issues relating to the :ref:`config_base_url` setting. (`#1023 `__) -- New ``datasette.urls`` URL builder for plugins, see :ref:`writing_plugins_building_urls`. (`#904 `__) - Removed ``--debug`` option, which didn't do anything. (`#814 `__) - ``Link:`` HTTP header pagination. (`#1014 `__) - ``x`` button for clearing filters. (`#1016 `__) - Edit SQL button on canned queries, (`#1019 `__) - ``--load-extension=spatialite`` shortcut. (`#1028 `__) +- scale-in animation for column action menu. (`#1039 `__) +- Option to pass a list of templates to ``.render_template()`` is now documented. (`#1045 `__) +- New ``datasette.urls.static_plugins()`` method. (`#1033 `__) +- ``datasette -o`` option now opens the most relevant page. (`#976 `__) +- ``datasette --cors`` option now enables access to ``/database.db`` downloads. (`#1057 `__) +- Database file downloads now implement cascading permissions, so you can download a database if you have ``view-database-download`` permission even if you do not have permission to access the Datasette instance. (`#1058 `__) +- New documentation on :ref:`writing_plugins_designing_urls`. (`#1053 `__) .. _v0_50_2: diff --git a/docs/datasette-0.51.png b/docs/datasette-0.51.png new file mode 100644 index 0000000000000000000000000000000000000000..eef83b9f6de49114baad50cfaac899f050559849 GIT binary patch literal 47637 zcmd?QWmFtdus=8i0)$`*9$W&!T?P$K(BKXMg8Sf3aAyX0f;$8cPC|ge9fA`WnBXwD z@8rF=pZ0(DytDgu_rreb)75qB-m3en>h7vr(H~Xiaj@QC0RRA;4+=6G0Kju70DvZr ziHfAGXtUiQFH#?swPc^3o^J1MuW#@6kB?_JHb=%LYQ`p8+kZeh`YI}_Kpll1d9B(t ziJ?(xx-sQkewX6jIiGFZ_Ic=8Y{24jhOfx@6Jv#10D!iH4>FQk-b)9ot4_;8_z{0N zu0v7KY0x(w*7vh^CNg&AJhyNhN{-z=suQ9z1FuDCn6Gr719Fz?umD2*Art`B1R9Ls zWO^vd1`HDf5CZ-WlT7N0Pb&0~lD&yK$`hmh{;sTEUp2n;oTE`&SmGjP@X@B`vL1lZ`0Xez4FwT``4fDA5d!-#;`oYpY>sJ0o zEI{3-vF&oY60h>8w07p!fccRF2P+96)B>|aINmEnbU>j3cjK+b_wqL!=UKW1BP}Ew zRhSFkeo<4vEFXMLz_BcnxEl|@Tp-d#%PF3-WEp}1+jTDx_hyK9{qNlD@@Ozlbj{D+NWwpeow4m5qf4 z4yIq4=Crk%wX9hTjyOHC^kdXVtDF4>J@I($CD-a4)9*iOiHrXdp8KiD9Bj>sV5szT zls7zO@8Y4%Oq15!(e_!)&ZUB-ZE3eG*~aKrRP!vl$3y1!7(8$DgNSQle@RvjWZcF1 zT-s*^yzj8CP;tJLWDzxktnM zLD$rzMvHtz{CZpiP6eyiF}$f;P>{d|*+>3PNrP+dN_4}p0H#U)4Tici%MIp%E$iE3 z<2+mXI_1NE6{@Tg^eyXP9yQHLUKH#ecI^qnwuc7Hu7tlI-YuXom#KwxDp@^-1!zee zpr#z=in zw%u!c*+A$@d5-qt$DIb;44kNAf%n`ePc7B`yQhIe(mPt*jUgJ&BCM;AYTfphVT-Rq==A($ldHfwLYK_=MGPtIo*HGgW4a`V|% z2MaJ?6vc^CyBzp=)y%?KK(DMZlyW0AS#Ljr({J!UtlT#$<%x`-AQxB%dg{=5bI~X= zeRV`%H|Pe#AZmNMJ~hm}A`vMHcmuP=8L(fc+)|Ez}h}QFJbpF4i-1Q2!mn$N7V?N)6}?9}O7% z6d$Dfm9?_O>7cOtei^R|7^*qX^I5DMD*qbpIn|JeC>bxJp>9=f`MGqqyspGzi-)-R zw74T%B|K5pu=HXB<93Fo?sayKMbX7qx??etip23{4eLs;P|=%KllVg4RqZ?JrdK`B z+xi)=vWJD!N6ndnsdp1YJMD~`sI*~4_KGcI{JX0(>4&LP84G1m=Xo4u>`-zg*Cfb+ zg9O@%8zn}Y57@5}_Pf}DH84O!x{HpwhT8ydeeaycl={uie&yq{Zif6t+lJYDZs^@N z#>DKbU#jJUDEFd0d%AkeI@DITu1!(%Wkl?kC)%v{7?Oa%a#JFJiyayekV7Gv5VY8l zz1rgX4p2nPEV&W#5~unqXs+T(k+oKTqBq*@iyIbUbO98xGxEGd;k}C#E`AOe6^#i7 z`0)bxKYhC*k50o!i-D;MMF}AQ1#g5=&eCf9Nc-_Z^+Wal$vs?W!0A8mkOjE>x z1Tp|^5c8UiJlq7Eelj^Ob{9Kr6SNG*Frh%Sd)OyzvD2zuG>Do=TnMMmo4y;Hi|Xx- zl@`wxz8a72Y`(a!+ktmE(n-8+KcDJM%|WQMT==H(ZkszkC-ruPE;#xf%jC5@5iGT; zzz%&ghvhTvyNvVHDn>mx;C0~?*DYWq%asfjCk#H9;(p7yT^nQ^PC@Ez4E4iaZt#v|2#sCCD-RaVT=j0OYUgapy&nYvE@?+HrhPOR1vJiDrR zZ{kaELEVcR+8+7NXwlX{y0aeyA;ex|&my`xVU`AVcO|&TRvJ47iOZu!E@MiLW$}o$ z^#$n2jr{TYhB)l(Mr`;jzn-o6_?FDgasR`x^~Kq$_N|4e7=-M46Fjj$b-4_W=vis?0O({K{iq5!{<5G0zVc zixK1Mk8aTA4U#$OLtp1UQ^ewKs?xk>7w|<^f+*5v)8Mi}C00lBcQ@vbcXAaJ=-gBh zqGI<$R4^)pYs#^7H@4@4g5pA?!u|L)aCW5UV0Afduln`#PZc3FWS}3UCy#aQBHZ=A z6uCMMowiNS3`s`DzvNisb5H-&RoRRf^+?O3I>iO3Th|(D#I~5|?ES2l`idT}y?J@pJx*3e z>6hx;!v_bTZ<=lK*Dg4utevgPgaz7YO-HVsYUEs0;Y(>?P9F1Vi6#I`UwayreZG|F zODPe+TMf*V+e}Esx@_dWa>r6qmGnAUA8R_gjR>aC|Gf(Sb5=4>uSVv@^77yF%H?VS z18!OsDHr9cI}SDhZOJ34-pE|QNh!_5?UocMQIbl_NT>O{+G=_xY(V%#=Q{=^%VdDoE)_saUn2o8T-|4H%s?J_gOg2W)vqHN-ue6jcEeIx5vlG~e= zl{dn4fA)mL@6V+#BzMzwJr6cTQ-TAi;>i6n6|{lmz0x2xBzE7P(oXj z4w&~adONy$8Oka^G=+^&kcy~c@lLB}&ohWS^#H4d=I{#^Amo1AgUJn>Jnh3hg`nAqnPLX?R}6V* zZLZsy@7Woh&T$13XTD+p0iC>1;64-wDQJnvv;xvmSX>@*PstDR`r6_sr1A^}(7FFo zlCF%Df}m?wF0Up0K>);o$NGmIA~G)*c;7eP6kj1}03{zCDomvStdYRFqik&FF+uWI zoraIx>gA=j@5bQa@ZQfmIG!Dju;`<7#mgy2$7>`^eF|2e2 zjOhU;=MX;9wiI+`vVy*BoslzH7nE7TLnzZVQD<4+U*bfZ(NW$qKV*cRYh`X=oTF^4 z@`ET73{@z#QAd>r16JZJsy2<+g;#Xu8;;~TLQ?V#6YE!lWRy~V8pgfYh2+PoBtFY) ze1=pFWSn@p%{T-W9=gC2II$YviyOD&Y4s3{Zl`Wj1zEV|Uip%%q)Z>m?WVoitMqa? z+gCvN6Xol?T1Ec5t~$b!nhK(d8Ce&nND?Yb=*3l~1)Q3O2qET}Q%N;WFElc0A+h#O z?M7(M&h%AnQ23r}f+GctR)2>WW=8h&)rBj-+YMX>N~Q>KtzUt<*S^WnSSzP;vsF$x z7ly7>zARR=WP-{jJ#WgdC8gG>Vp*Q%9r%uKf$d+F*LZG~IdF^L`4$lD_1vd?Mvz2Z zg(P9Th~Vq#(Rm-WXpO~eKc9JlE3wBI1KpJ0IAyY5KY8x%HCax~2ZKMm#BBpWKdbh=`SuJKs&gI8m}aAjV6 zMfN&d)+IxiL3V=SjT>3!cF9r5&P(44KyT!8;0K4iM~ANXdrnnj7`-_KK%5JT0@y%7 zW)J}QFaY3*5}9ZS9;ZbTg(GP&0DmcvPrkbS&k|Rn0f4Xe|FbgxpIGPrQszo@R<$8l z*C?=+@Iz31q}jLgH&rn`umTEbHu+Eb-Vp5+TXT5I_zGU>2Zg1u`yV3P-1FT3nlOZ44 z65h!BRz^`!K6DI>Y%Bj-p!5<$z3ttOEenq>k-5d`+kQzfq%S_H#?`ky9>wP@LJXr5 zS^DFqI*VAN=}}%8v5a${Tn_Q?tVXY_k83!WsbDCjKDU@<9BZYS65rSt#*O z`Pj{1AoddBY&yS?)Ae+wy#Dc6p9rSGeX|%K!Tt-}xwrU*MlIRM3es|;!PATk=41Nf zte??wp#)>UO9uE4m4F8HiQGI7HR_1{yMc5&*p?8_LyH;i zN%LimJdTV%EX}jSg8Wkdb-`u5i23bEuUj$bdl1?r6fS8%nbKC#;;5+>#0y#Zyv0LYtg!X3#mYaN_zQ z;Y8aAeR6b+Q})63xSeUxLn4kxM_hoEdO$&4+h^fB#CGRdb&5vG@Jh9PgzdNNffn6T z9vldLCmZ}xfrpQ`h6KN9Qb;xjmO=gTWX&ickHq!FMjIyC*^;UvO$FA;dL zoz>%WwdComG)4QQ<%6R;DN~XhVn^d=)pwHOMoitMgN>>BLz#?440EtqyAQ3#gRKF1 z4HS{m7=_){jkpN@pPZ&~?NOD2k+G0%qm+2a!>1NI7-*<+qzMX7(q%X@>eFIpAp=Ia{cW!FQ7&nh9iGL*sNcYoO}FryAU z>W%__vIk|q7CefS%TGDKrL!IqypJGv8m{Iw(Gu}uBO8suT-dp8HXBnJm){Q{RsBpu zD+Ydx4jmI+-QQ6ll6bUci;DUUQc(nm<@AGcSX^Q{6>e65_Cq(2RE-OjmgZmZn>Ue-1hZS3Crpa zQO9;`XcqPp4(}w##XtcATVsTd`NL8gLao^i@_DfpszW_YFjw9|ZhPn1J5y02+q1tf zVyCp2U`pcO<_h?oM7Rj>w(?s=i>!#mq9ifyIQ*J`ETbHZ5vn8{O;%fbZtXw|aJY3_3!@QAQn&(c1}+r~ z=i=0YdvbZ@s5(%?eGas1Jbc^({p6`ql$e=YHd*vTP+Ru6-JG`e#L)Ns)j;h8&|e`IZCSUJmRs|2wmcb1oQWQ`wHaOoLchvn<3gW+R)WtK-Xogoz!rp zh?DT40Roz=phV9LvrxHdW+BbWTh{y{B9e{sLelT=EAh9E8|~DP;w`4nW^&AyiAK$P zO+0|J*VB%Rjqm>Gt@{AhwC&i)eF*29eAZrZ?Z@#&e0S~)&B`V>JBA3Bsj~`2@@lSM zR$jtAHRI?9pG!#@bKJ;FiBBl;?S6MpJX;Zr<~E-DLIjEe5iEGbzcV9`rl-In!t%DX zJ&gK&%|>;NgEw0|Y0ap<{<4kAr-p|H{^bhd`67nho$Bvd?TLfD5&j&! zZTc0L{l*}n6LSnd^+&T+Rq7BrGO!m1E$B&{xq;iDKWo)s3~`ADBP zjHc;qYm(lyfJm@yNPv5ZWw%<$ix{Cp^7QfUb^7xZIhkKgl<3iOE8m{KcDK^d@9upr zIrv5R4C{5GD9F^VGPf`xOH3sXGb^IpjY&LKHsa^9n$V=K(-$WaQPB4}#e;=NB9I`> zdvAa4E1+L&p-ldePa;J_V_OPo1+!As*R-_nyu-E&lA%~9vu*v7zslVIV(dfqiTUrSPmwh0nY%h-{Z2mPn?*tI2O??? z)Ges1!FMPq25jBog)WrA$ci0c|=W zMQK_A>_`;KRejhn%glau+67jMbiPzy94l35!u2|*r-6&+(mU7c3=o!u8FLXB;jjCA za@YA2?te*nYMf}aLakMOlNQ1@6X^yk9qI%dvbNa>ZiaW>y_M>=`Fcz|GTUm#M+G#W z9T4~>ok$^-P`E%A_m+0uFHng&*7^jrF9t%Z^r-h?f?Xxaiyz`s_|d^Wc{9@!ih@>@ zN~QeMC<#9Q0BjAd0r9y9MG{4e=jTb#(&`h(%)?jkFqNu|ThLRn`+*^c0-xi+%k3pI z!QH2NiT=;}E?dP{ThMtnG)~HACLs21J6*^CFjg`QfG@P%lKmN_a$mds_A5y~aQ+nr z6_CY>dEx#@6p^?C?MRu+X3i3|nVuAw;qPGfgI#{5W`Vup1XPf7y-73*eYttIfli@Z zbG}8|coC9K0(#4^`b|IKJ|g4800t+{}eIEw{_&?PjWB3EM42c|eV zB=PxFIcl)}0DjkfVZ{_8Di6Jj1uL1o*JFsYXOS%-3mu6|HW34fK~&*RVXhRRc{dq0 zR7NO1fijLv&|sx6%6H|djCm3V*yHpPRoLHn$D-P@Tzq^&?Odo+b?zLjnUmgJRMU@( zA#9!XfFXkmD|*j%pz6>?3gGy>FK>UD4JKEfRqkSZHod%QsBcYfW${`_UCh@&CySLDhTnU1P*`_WIXCdeG`bKh$ zO50i@cQ7fBIO2OJoYMI2`VMyqC0oAyrl@AY*u5dv;1u;)oLd(8gqib_EBTM-N~0%G zN7-|VmTR9(PMMl_GkpDr5`51-_m;Rr_e4SQ(_Vkdf4~1rKfBe$vvCyqs7(Sn!R4>c z_J2r*<&hNXZ0gJB_`%9Qdq(yztPI$CaM*1Jx+WNDFJ)K6i`=HID$ozAvzlcET3wq> zpDt#&bK!{`j-7yDkS(GlM>pz>w6LEq+x9U0kvj4IXdK>Bk)ZQ+`F*vHrfc^z!>&u7 zrzA+vTZrA6l}uRcK)0Ca*6woed3u=D>GXycRT2qiuqoxS*)=G*U-umufrx;7Kp zkIN27obZ8p*Pnfa_GP{NacDY>b#_73W~J_TsejLUJ0LrEG?<-Bs)%_t$KZPWsT~NC zd4}QEC3Nx+gZd$btlqGwPxdou6G^V88Yffg3!e$;#!y_sq&xni@}8MnD@H1JAT>MV z=2_}&!Z6X$Kbm zRzQ75b(1>$weBRa*r&^-9l@)+=j^=kQ`kPe!uWD5HGxa2I|EcR0Z3n*Gs4L8Y2-DB zu&xT9L+JyF2eK;;e?)0;E~-dEU44!MmtIN&s`iV^Od1(EPTN3=^qe08P!BwM7Q!`& zo3&ruX1ME+Oin;r4bxmlwi<6Qu;`H zRkFmJz5@*f(zUZWOMjx1D>5Y6!S~TiQ&F6nw`KiX>%oD8`L^1y)^*6FH{`h+KCF7A zUN6yT=KL=M4=E~hfk&*>A2pT7Z;p<44+)d81@+9H90qr|4=848A|DlJ3R2R6h_him zT6t@gEQvkW$nqW>;()BMewXv$m&k9gzyMj(Ldw&mHzj&!8}h6PQ^p}%e;}wTteMP9 zfxaKG$~@_(E%$$=8IP3|Qd2*rf%`gnx*`h0@0T^ia0S*f2|!^VDg>E43pzDs6`eVl zIbsPwTY{e=Xgk@a89ht*a*z8yf=r}c1Nd!wMQFWBD$h#8Hf=X4-Nj$N3se1J?{F1# z`T{T5no4gTM5&#+d}wd`{`022B+6jzcgXtWh81QTP`GE$dBlGG#SOThV3n@Kpmi=JKGMlDx${p_XWiX|!v8j2%Qi}JJNZpsKm`p%T+n}Ox*)%!c(cC#lyyug zV1w1p{tEuIiLf;v=kXAK2`&WF6WWK^Aj8Ao3$VOmj$2TLzP)u_KZ+SBlON`HeNJvR z6p(>6BLwo}8^)eLzKXT_pjD=ujmyq7t+Ml$LS25mG($c&!vHc%y;Mlb*P%g!Iy3+0 z1ggerA%?)+QvVSmjseYy`}jq6)RZ`MZm`Y+4ST#M@SU8e`VDBT2GUE=-MS=77fJ+T zeM+`s%V1ujsv&nDc>Lw`(TK}%TKi`Cp>>C#I68p3^>0O;nzVz5uFD6y8~TEh;PYiE zl<^v-U)|31G2irI8ryR`TU;^+DZg=y6^5?WAx(JjXTM3>3u1S`$-ZB;C^}PJ{rs{{ zQny(rY||PES{K?$$lrHo8JM;>-?_NHS9pUTz&AM9jZ?Sk)Bqi$df-v?&3*#ekLU)K zq`5&kN^;l8>oXkH6v{@T{6A(gXzhL^4Bo%x_b+*#2CASJEsw?cP~PU45&#T=ylaPl zyJfPnQ$CHYjV<%EP^&U*CyV5Wh%v{bh2_&+-*-m!;2N=&bzeJO=F*f6=A_^L(67y# zwY+_zy}qA`T0T{`zpH~YQ(qjyQzdC4KHQN6k)4CcQygR-wf<}~+)y8sR$iZu*4zYy zwNqJuP36ouI@@i85P~yRNZ{uEKecry%uUYA4VOg?r zT{x4Wp60*=Y@A8fI;_zre%K|}M)Og~{x&+!>rmZxBS&PH=)c&dQk-t7hOUj_F=JNSG-+FEOK*Oz-ZehEF{VU1L zKukcQ4(!6wz^!gLM)dD|(QU%=Wq?Gs?zPp66;2XB-XUbdl?e8YkjRG$mP&Qw<06zN z<`l~F4gU6H5^Apn(G6c&e5&y~UzjX7G7;-spW69;(P^Q^{LsOgzUu8E8sNk(apiM! z{oP$PQ9fQ#0v|+Lxb4i+eSO;`M+8|Lp{juL`-ip>4L6q4K@UBG(Lvy`B-NdRAAbv$ zrjTWnjUgWx2c=YbpzQko^{vUoYra-pH=WQ(_iqg!vbkcTO5||?6JrLa7ui;+l1@p( z$w4t!Z~TYy)I!AupaID8GIBiacHHL}8j^Gn?Xt)AxyQKzf#z{Yk==XG9!VA2$g z?wP4osG8el$9oZ05rQFPA+ABb)+!%&N>MV@i2Zb{NBUJAd|tye<>mdDD!`i-YH!tg z-T&rBbpJefg4Z*E*1FA-81U|b5fEJP3I^a3LMnzXk33|$fh^LHQXuQ5p~&N{pInd! zs<8hZcKz{xhsiHl*dpi!FVuq^B{=8>V(5{s$fy<2D}i>SgQD8=rG8!&)#&O$)Gu%g zuqIkiYYZm0FEi~k;Qi%cXdmJIAH+g4u%UgasfEh%t6C8eHG3G2vGAHG3VY|A2#iN1 zpoKJ#X@0G)WBKL68Q*+&frY#7$EgOo{L(;zL&=6KHrS-2^;-!w`!RB1t9L`{Q{U+F zJrOfcHAh4~)YjU-i7meM$@*vxAj=yaSWf|k=@2QAPhg(Aj;}npsL(@}V}D|zb|f~Y zb4F>L1r*Zhg$86X(1CR%NZj2OBB^*7<+$khex3gK-oVz*gC&l=SB1LEEz_REi+ItYSbjLRa70^gYa8c<) zb*dZls%OHJbHzfcE-Y6dIZU5bnii@)7f5v@cgwE%WKjXDBWnAc{uHnk`w@Z{Mk8;0 z7h^Od6R6W_R61OpdY=pRN(}dbSs({Z1yZ%8)lb6(sd^5MA}zNc9W8bsc#$;nvRg5o z_r02tg^7rICur*x&*S|nUNhtTR*yB?r~w(aWJ1j48FYK=J&AF4VyeF=_?WE6{YpAB zrHM)tk{CrJuYKnOv;3|=c3@fGjiCK2-hSILaI4v|pxAA}37@D=t^cRDA;g+JLiBfx z|HwdD#&msYqG*bPV~IRR|2R+cP1xDiIyG`G9OPU(VhG>$;@4hpVEvEnUdgVRI~RU6 zpAiZVOR9=(>X9oCE?h{1y_ql=nyITEDlw?D1M6N+3?lfqG;h9DLodD-DsUil*AkI$ z7`cv`z4wG-WTA9a-tR-6p61BQN;TU-Eg(M@rL;>OC9>ro;Kqr1NTHg)XFWGMu&tF7;X z)yIijMf=`%_H~Bh98-ntfiw3l>k``{gVNj3%t#Jq*bLdLaUJv!n z{#%v`zy4zlU}xa_fUbqD%jd9>=dd~Ny7+CPJ-GFc7KhY*p9fReXWBtGAvahjdTKGo z#rl5a0>%)$6Hm7$?rrGG>%CoQ^z=en(+@A5kW9#8NGxm*coTF*+(QH zX-Y$c{`6{QRw(qD#9G)Yp5%&ET5BE4d%oAI?bOcBNjZU{2T=KBNv9VV9xzFk*K$`( z`Q%9pSQqo{4|9-ofBimxm73Y|S)E=W0)8lj3ij{*n3izQ?+V|l!V+f>wx7y}Q}9nE z>V~_)q=#r)X)2#gSYP})tnhpJl!M)!YFkN$@J81R@s~{Kbai#4G0mgNpJ^7XF`PqK zNQ@s~Js`DnpE)G~5AGil8ioa0PVg!uP2{hRLlT+(shCG!Cc-|bxzFUJrrOGB~@=0g46A0KlSk&wFnO`&)ao=os(AcLiSysAtE z1eT)RG}=02tK!1RK(H4eAq3*$nJdc81h!A-In3?TI}q$xJ*Om^4|V|8ZOr*}n@4>W z2dQ59DmnHr(T#uPE(hVLz7Pk%~m7|V6!-M)7&0%mCDj)4fD>_tlphs zeNTp-wJC*Q7IFTwiZ_TfEc1owA{l4YIXulkO?Bb^dOp}_x2u#s2sc9&T(<;~fDc7) zxRdSI;q6YVzk~{dbn;dp^pVGQhZ4((1ji?^~# z_`5sQ?0U~Y+-mW;+593%751hfXPGY$F9@A5I$w8FmTjtbEex0QK-mk{fCxbPSFFD^ zE2Ukp+hz}ZR)%j;U|j`wdgdmHt!}Us4q>znkF0FNWH$*A-S^R}o5vee=(D{Hw69DD zKIwl_b1jhgrz!8CXVN`6ghz6lCTN~9L0jh$RtY%?`gd-G&+L?_gi7e)1e(gblYSWG z$}jUJF|#78bOw_CVXKxN?^d(&Kz>fgjtj2CiUVUBN|EISuL2{7m|+8DysU z)lT+R{$>f#@|zgU5>$O_I7p(5<6BNoCz}B0imBYtd;F=@Nn`xPU3S1t3PBS~pn=Nc^A5)G1`xkP@Q=?(qQF6__94E)UNcN82*u43|q~ z=CA-ST|Wl*ke=UC!fLj^y37rG|1;D$5Tw4d5Dw4Td2QpTB!R8OQKfSo$!qQ+z>9@F zW-9)<#ajY_@^3qGt8}1yle;$Tbg{Z}d#ICrSbnz9x`(EvQAjIpLTOl!PC>1kr1_8s z>_0+>uLuGS`$kF}W#DAESg$+obI~(YfG?M+-X_aI^D}9j_8or*%!nUO-JzXuC~n;W zg(`Ym$!yS-G>VJy_TWz3vnKRg1Hd}YdTpzLB*rwQT^$ko8fTdmC=wvAd4kfX&HB&# z!rFCfdLedKqzh3xsa?)+u3GsB$!nBiz{3cI&u8n6$PFCJyGCt2&vbLfysyF^X*FP>86B-lXR_RPit2X#S|M$5pt=zZlx}Tw&M_qS+qW;;aHbL~g zPHs0%$+OElP#U*zI2m5nvFI0O#s^6cA;f|%wvfY4Fu&steCbh0WYeO58^^xcDt5${S!iRsoh27?SvpU`pwyLasYnWo z*M_Vvzdq|_F?5sTklKi1Qvk&k08U0c*|UKrq!hjU6Z=0B{6+t{y0yHjv}!vHU%-{k zji}fpN)4bMA7cAkDp^%tH4m{Sq||8-m8~@G%&j#bQ~3uiL3xg9=2JNd<-wTPzlmtZ z@O0i!CM#_$nql-LAMoX_{c_1(Wnnu*fr8C2iL2`gSRR zO9u(It$6rHGr8}EmP6c=Kt#bm$Wl@Qg|4kKXJ>bb@L6}v#Mg;^1Z(BVPT&LYnDion{e=C59x z;~Ob);%}$^1R{R?nV+4WhxTs94ISMxh_9e_2@!z zV-%M2^DdFT&R$HIz;;-5#!mZAVImbzpaOo>X;EwH16y@_l(DzlYvnA>XZ=73ccq{k z^t%U%BVN1!hPSH0fGj@3Y8qem=uP$r0P`fcW7<@p1~t!|%b?77f(}ez_|*}#Dz}CpbC4A9Qe~J&i4=5{z_k1n)Z{x01BTUYt?HcbQjMr>Gm%xT~D#c1s7wy`7Alg(@ zcc>4tbC`m5Wvl&cn-bjg$B)wyv3%RJlA z2-oltcTg|p;@Olvp`G{ebxZYC(z@;_@qx0_0Fps+Vor{+E#N>&c?*1^&W(BkSVJW| z?(!b`lup0C%`AyZ8N)y?cwq8QsQ#rzv;w-!~I6rL7 z{5WnNns-d8HV`mu9kr>Jy6`0SGXq1W<~cwU*{m`20@w=+qxb}p!sPvlyW24ye3lBeuh3et7dnn5I)-Qs#{eDzsDg*#|`*`=?nD`jS$Fx=o*QFGEwYG5R zVQ-IYWwFNcv3Fd2M4`pFb}0mN9)?C!&{X8UgE@NfLr@t?&cgg1JDlh+6cphve4HKQ z>kakOESJ;PHurHR_XGQyTj%Q%32xfi_C0$d!Kf4@hzn)5H@ire1DZ>aFu~S3iUFA_Nj16!L-O{e~|G zA*d9@h!U}P(&7m&i*nFq$3{9BATqpDti8LzfQ;Z~?ZV@!LCy0SQi522z`X#nd0kDV!MbI0xg7Y4yuYcfJ18bzwpGgnifT_zD!W%xgOrxOCSt(7 z0Bmp!X761^u(1tZxJ6+$Jsdt8a+}A`aNLY7IS3baY{afhfRUYM%hQvo zm<8C7)R5}9U*GjYT-&hky?yawrT;F?V(h>CgDW!s!1gcy(2vYNeEXNAApV!6*hD5N z^!_C&{!f_%+N22qz1oh5%e%5|vOYo9|s&lsx zz~~1fT#BP#@dKh(FR#nTS)=pOaL)a~{Pc#qty3dkMciIJ>*G4+fACf7)oUrHc%^PC ziM=>`Em>QV!j&$RtU5BK@+O3$*U|x`s1)5+o<=twp0=Wr_|tov2Jf)M_&dd1iFKNG zjaK`FPHEQiyI4$QBdwwUaO!MWvB8{Y-;edr^|;I^+pyPNHf7r-&Iu4&BC7N{Bn&*# zeb2*Fkt|uA`d>p{HDwSdYZRukHFjY*OF6|mU?LtKO~6<7FWuy+&cE?eIjA`}|75jC zQ3IoXPdcMX*KlgHVSC&1DI435b>7_RYo&TO-56fjuU6j5TQhK3M@$*bRZmjoY`4yn zuh}~dWE#t8h|yZg*LGm$u!ua5mg;p)VnCuO4DnNgn(1_)OOai4bxhI1ujE7aoA&MD zX$qL+2vdJsck%SiP}hq(slVx5^9mvw$bHgn7y`1t&09Ue-cet8wiGIAuvkKTre7)S zSXXrFz;XGzc{=kt@^+aJ$#GrdwECFgAZv2I`bNe(|PZVl;ccpCmx4(zCMd z`W}c7Zf(pq9iW+){m-El$f4b>=8-kYvwmw-Gc~YD!aD`rQQzRB4!-W@Df^kghAtEm z62p33wSG9I4<U=pzy-egXBNJs2tU=pHwHsT~cD>b?IoW*}QDq zajflUxq+SjxDo~>yS#>ZIN}d(CMWop9%ku8~i&H7t! z|As6}{x!4n;J<>zEW52V3>^5uek{Od5>{|G(}fXyR-P{p__p~a@XTPo*d_ag)7W)W zBdgJY&W1DGe?y?(D0zZ3Fm*iJ*n4)NeY?&Q6bqX|r!Dk}Cj2>TZ*4Wm^a=k1p`ado zhD#cX@PorvOeowow8N#FlY}ya7nNn6n!VTXe+49bxBU%+)e5ds?dK3 zhU^HCFE#E0T;67{j1ZhuUHoy@j+|-WtPZ#dfmbMI85IJM?)D6zx5W6A9HQ~t5#?n0 z6%6BFPqUXp4dA~S`2SmPhySmbmSMjfllF{KB+oO|-qt9wv&{mY5UtdcDfOrUS9^(^au&gW_J5WsQLFs5qKB(+d zNrus=DIT6r(2sl~4Ij%7?VqM*3q&^glcWaj1+yhGVr7 z3Zu>!j5$g^18*nF92M@1Bm=90Xuhb0h(6j;;a3U+3tLt5K1BV4$Z>B8-En_+t{ojLgKARga)3?*TX9G`l?SY?nCAA2UFz^a8ZkNQ~6C8<*p>sHSPE5BnBr2mK#FMO3jtV;o7C^*2jCvz0iovgc)FJ_y~TTRfPTZ zp;=|v;WxykAQR&qB^Y3@0y%U z!vs+`1ks`xA4R;nMaf4AvxfIdv=jCjFEP&#N&TCi?+URsY-bQNz%Gr zpiTueWe4CzoJIg%7%71p9aB2R2m_|N>AI7ff2v>q4_l* zYc>v8{nBmtmKPdOC!hkBE4Io-mTssg9CG(3TN^9ylitvvIN?4m6wsH7*0BU|Ik zD#HnUQi-S_O|~@I)6*LrQ;!nq>vAz>M)piqykt4pFj$7iq(pH%mN|(3+q)9(avcqC zZX4E5pB9bSc5g^0(g0|9&>Yesv2&TNq~VA@Eq<)%Nvwf(Hf;NqY}`c7XfHF|Z)a;s z#P62Jei|e>EJLNHzua5<=BP>>x2w_~i;x*R{L1ltUEu1~j%t;>Ucb+%@^0UFH2-iO z=8EJ&d2wwq-BN!p$p3ZCvFH{H$+EX2M-qk1K@XoKdC>92DxO_s> zeV0B1m>_@KWW3gyEAQWtOsqhk-s3LwMUX44?vqxu#ySPT$7!?j<3iQ)nEf^*o$C*8 zI#C2Y9cHJQj?T2z6b-M995I6j65Q?RerNOL1seE?{x-1B?%Y{d85NRtJ+r4es6U*2 zNU_d0?aV=yzpY=4>-X>Iw(Uj5-qTh(v+CG8s>}nYFjhLbcA|f$nl2w+dDxv6=dT)|R`u4zD5u~UTRIn{k}1hIGW9TdS*zLBavoWhp^c`PbM?8O7rR#1 zK=hYzG(ZfSCooG@w@B8bppp z%hpwK6749~3%1!hh?vk}G@^n`Z&*GE0k;&>QHI@1XV@sA zGnnOadcIoWuE8%0oTUaAJl#7`Wi;8$(FqE+Y2Zdy1#FY>*d>%o1xNkb%O|?dFf3qe54y)N zwxYm;H_*;lIEFkdc?3OB;8{lH(N#zA$FGP5YF$UK@)7C?OyR)I03IL)H~^l)LhDU z6|!JIM*P!NW9LXY3jC_>1Or=u6a$riA?XDeP?aQoE*A+{mc_LktvO2vYbG-Ky{XZP zCm6wrdCgHP@O^>&koHKQl}i;|k`QU|Tp{C7XGliPK@P);{W#{|tln5BY-D*Yk0J7) zU@8AwS0K<7%AbYgwdG#KIDtp6xq0_jyhX9^6uOfgjo>B%cW0*EP?%1#@$a=e(fMO9 zvc~HuZfyt{0Odo~Ct5V!jWxyK7)=tODMuV0yQww}x%g*tB}qgTY9N33Wr&jG3%3&2 zqlY@QooLYFBR923R8XO`HsEhM;Gly~Fr{WxrNlx*l|o$?g>H`hFu?xJ&-M$$Jll02 z*dL-^IUP;3Q)nyrxNnJJ>6T~;LOFA0^2-ONMmDSn>{L8sQ%y&;Y7Kl<6Lsqb3_flM$vqQ-Fc21YLa z_%H4-A_`CYau`MDf-~$3ZtEG%0*$wNAO*X-ugo&&ynofYZjF6$&h?R>6;VR<~)1{5`3*9QZVuA zO`)3jty~AedZ8(w`pHY3yVNc$4D7NaaMyc;;^Bz`UBG~WsCt*Tgdi%z$BS%$8(}n0 zHA8*AnK#Ts#>Qm+CukjjLn($+bJsXj&i!Y#4ZzK)Q7`tYqtOu>A?X7d*x}~}(YS@~ zaWj&1&j*$!nG~gLSIJ1*<&^u~_}EFDSf|7oUHq!N4&?yRPfF7SWQH3;PsQN{g}tW$ zp+2wWaliOKtoZ|J1w8_rV`yT|RQa8LLAvOjpz!(F7PSPlh#}j69bov|F8B7IP|d}F z{`qJYu}iNlHN&@@cKvC4`0eUS&m~;aJ^dBbZ`f&NA%D>48`41y7t)$VI&p8|YL0Xq zH4D|UOe47NqCf8XA-1s+j$diM&{-Ch(tbAEtprr5<#W_DuTrf9MSVI*tf4LbNo*pn z4dSo}lfvJt7{N$c^VBjCj+3SDae93N8evpT#V>ss6Hj?3V;XyTRQ{z!43C%cS%m1J zy?{5)u!EU$Ki?8d&w@bOWt)f0I~4yp-s8EI+cLj*i6P7555}wr=O%0+3y`9W%KbLw z2wyqPK{b`ZgTb)lgj8m!MtVUTdo7UJA&&h{6+gtk|GvURN-M6d?%k#MWF6Zoq`l^2 zjg?{*;;aAdbSBmkA3+1<4}Lt8`w1YqXn?2;2x~4(I_lkOaaLRg)bTU{MK9Efkq`0& zN3X$!)KzpI1L9QtH&&%P_`tZ&3hJ*PVlcruWG=YHx1NmK2*Bbym0yOodm3Bbfr$7I zEn~j13ZSpJh{UVvjh#@EFWvjEsZh*9+dARZpSd}igtb$=ZN&rAmBdmgMKwCQiUB{c z@GY=S)BAI*eoZCHQrD54ew%{PwNQ$gA(DT6I-^P02;pZ$ zHQ0tj@EZj{=&3)FGf4VF7~_~{H#&#og&l-yPMv3LbP z1vXKr+~b7gv;65-iDY295O+HNu0v==bY8JLD+zbY$Nm1B_Z_eC;sP5C%HR2*S(gpR z*I{}u80P7?R~AJjlrC}z`SA!9c|*iJr@kc7fVCAyQ`^J@%SM+i;kTnI32MK^3^C`o z6KlT>E@_d&pIO>7Qb#j5$K33S7^`>~`9`1U{pm!XBL*MN4H#-roeMnh(p!Gx;aKq& zd`INbd~EaYx;e4k#^$Jt5fl6fwdG<$`xf_-fi*2?*Fy}oWMGnh^Qkfp>fF^|lo6Q9&PR>#`eUb#2d=LplZ9GF1hThwi=j4w_qx)R_d6>?lSf5* zm%dLaR9PZKJw+oQ8?+&x??}AJRGu;j#YsJ*MF`G}9$M~sjI;-Qr6>r&;DiNIuAH4d zI|RS@yyJU*Bc%&n3q=u?lo9k|(O!k))EU%}wmzv^%sFCU-D9EN+=cEU8pHRK(cSKZ zA};anAhI+m8FzKyl(EQjqSqe2XpA=V3D}b5LcQUL&Nmnv-B12=JkuC+Nff=VaH8&q zbP!74c zK5sx7bLo5^FY8Jcb6LwnFN8g~BEk%du_hB6jauiywmrxOQ^r`IP!RzKd6Eij|8rfl zry3MzkjDpv(P>RKGR^z{(@oRVvNVsEUZTkbp?IS)Mm|5Fl<>5_cUfflRcxFdjVRp@xRx0Cqd9POn*o_j~=(xA@T|p0wQa^QOf? z63ktk=BZ$ELg&elpK}7XMJmKOak3TN!ItaYy0vP=IlO@)lFFuE&e1+U*vqenO(#Jb6h%?tOybOUG}0e639L)ySWr2(flo9 z-lE@J_L*HNWS?OHWai*6QN8|8nEoXh6vRvaqT`Bnk6|Dba{e%UB`YaeG0b|}E(#w* z2FI58O?9K+2a3Rj)gnVbK&ALmUS|VEjg~EF^xV(M_x`fDq#9>ES6k3K+M9aC18wTq zO>hw^ADvd``Abdrm(REja^tV=|4e0h9$TQfUO6_u{|{ zw{{=23V-T*dT-NZu=j!tewl!N=Ln`tIXdw_XZRR*S>QaR#6v?nzyY{-2^SaN#>WZU ztjxi~n;gV;q~Zs>@(YlOM(7Z1?7|d{AJgT3MPPKzx+5FP{-zfI+|t{VQ`KC4MiE$7 zurC7gF#qc`e(X!ukV^(U+&-Bo76P}LQ`p|51&(EKrYj5oym(H3l-~3u4oIS6I|1SK zLff%3($!t9=x#qgJ}3_HqM^czm0}~$x)z#z$vb~_`^?ttNI6`2T8+Y*CDg1CfWm!D z?`Z@SqIM6NKPI8ydrC{ty_D8A$??_2@Hg zLF)IyJ9agY{iDe7il=^?3$l)KJ-yRu7p6QH@0}`S?0ia$^096Zx+kTV$?izhmpJ5$ z(qj4YV}^L>k`d=EG<=nXQQ zg5qVKk4W7k+*G!X1k967<4X$lY5z(578O3Ee1!%kEzgqY>r_1XTH>@%3MNIVW$wMJ z^;Vi7T$P@tk6-&hTXMv=w%n;OE^>3W6k%&~+V3P#r&pu&soN~Aqe&vnOex<{fpLpV zhI~G*ifeSj^^0z?;~S?@SuXK>?9=bKus)=&NVwLx2S4;l+q57vj0R z&_$C**2_gl_`s0>$zpk3JeHCv59oiNkMpW8skny((cNf^-ASI#CNp^-kP^AVk z770nR`&bZoZ)Gr5(ijWFs@&ZV$NOZ`(W`Q-JUTI*6x8bJr29!1wO`r67b~Tq@<<;G zjjYH=m0*283`gltwDMrMvG3B?wq{oeP9Be-9lD`b4aI{e%&|lr$2SIYq9y_yW9;+A z@Z7011))9>N+LW&{}QX(Xf=;$6Na)R>b8&~NcX?!@MbJnMDf}rG-&%y(oIauE3|d8 zF>&-D>(tiogj=8CpJ(q{H3!XIw{GM|n9ch%!sLGT=cUn*kpW>`o1RDO-@l+<1#f^M z&xigj6cW6Qc3ntuEETl_uyqm*P@wdgoiC@8X*ehM#Q02|X$gDxt_vmCAHny8=uhkx zk4futIRZ(+M8qXvjWn|h@E)SAuP;@1F=5bj@5BLDC=!s<7;kKO0ecx0j3({+N$PMt z_f5@*x3&75rXWW+yjERMf1#gs;9Fg^_61*Mbr3N=mf3t1_~a2!MzFu|*_ z?oa57KjIc9j&Tv7&{9Q=%55&=>J&>Ve=A&N-SNIR30pviKsp6(le4qOiD2Um*?T0ZkAtIc1R^7r;+v{(&c;$wQpBp0SA zLI&9VS2nZC;RdNc)0cWt*G9UqdjE=_9b^?KoFQ=pmS>xDeeK2iYjnx}bsqbq{|-2m z@gd%e=cA0pKLU$W{Jab`wNF!in6%<=?z!0hnwkC7j^OYb7#m*>UoV)JY8!n6Cb~hq z(~nLp{=%~UmgiP{aqpq(C^nSJ#D)y&pK}(ss9b+v3;I`5tvlrghVy? zZt(9A+M01m>s4!Ff7k&79fRS|>hYY(ZNCmw;4fQg@sBV+cB)n}gJDyqP=O}})87;s z)MTBkb-^cDI4NrmgPWK<+&U=xpP*rEe8byCR1;3Zoq6EE_1O(C19F5dL=zvJr?3M( zumi~mR2>a)P}HiMJL@%aP_3W4Ip@Y+uyo0B99Rnq0Vrg@KcPa5i7z%omk2- zr2cF|!<``La*UjBJEzA>2a~d-?LE&VR*pyh^wWC>Zhvnq-))Ri=-8JGA8|qsPztp7 zPF-LEyM-HBWRox9q!|N-SS_KqkGv|=82(euXOtc@nKmeiAP$>#zb*NYFMf+VJg879 zdOjts=<^os&*qOXvA=UJNhpe+#s3lQ_`gB)+9#zccg(dyr6Y}Y8uA(UIN^iy7gKdJ zb50-?Fl^vbCWaG;3X%J3xt(&ULqm0LM>gXe-kfZHW&} zpLW#}4^t)u8E3|Y`EBNIG{gd3^!@+D?|Yl)g>lfKU`#FewSNZ7gP9hALFqjfAD3l6HGf6x$3(q8b z|Lx&*=DeLPDP@ofrU4$WiO@5cQJ@VD0Fa{ zGQ_X--ZKZp!5?8pDH*e5EwDflEVjjfQB!`N8?5=d@b%+SPWoN>)G?g>io~`!v{37B z3p={jh8n-arY`L1Vux}j#5lnzEdAIhN0W2WG{4E>3o#I^4PD_{3=7y_%tR-33G- z42fDOY`tCo^DC0y(ODwEVj#6GJlTj(oZv_ZiwUE^o9?rOvhIC;fRdVU9j|*);C^LO z_)0$*&mlaY>o-*8T*>=43i1a4&1J0ng)_njzF3DXGu;l=S9wila zhuXrfl!NGIKO>1uBF#Ns62xrq!7KNK+_aA<_lC!69dj<=SB#Ee%noAI31}-EkKCUz z{VkQHDrj)k8irN=q^;Pu=>VHh?n&3qB>uq!_NA7~ze2k-;Li0o22wE(LjQifqvZki zV`eYil69*8qP2$YM?gmLl7g0!o<=mUwKa3nMh~@lOzzTJG8V57$U3KmL=dVec`vIK zh6Iq$ucQA^yfaOYT9u>xToOTWXKn#*uo4_)Qf83t;MSYm$CftQW}6Ytfr%6 z82T>bi4Wj=$`6;^G?`BLns8yOi0c8jU5i`qRy*{b?l~D^mkwCuPS?Q(BZo9(lX(A; zTklRMgK4hli7Np+bIDvmpEM9TWCe;3WhN{;%4Eykc0dUfgAAykqJ$({d0e zn|@mA#N>Jf-D@&4s6X+tM}tgISqqwu3cO42DZUs{VHA)K&$9|^Fu7b1;8p8Tyf2Y- zo*CM-rhd@ps2nQ(Bej{EWahTi!+D>VMD>@XKhy4nTl4+pTPZ8wgDqRwg1{Sn$MRQS!@(t5h%byBAICpC77Dsfk!4pxJOene? z|Cqsh?P4q+XtW#EeJO2gN4bBOsIb-_LzQ1Veos(_ViZ~;>L7dX(lw9WAKOL*y20#{yJ5>>67^4Ssuna7O}nB>7RABT|i_>p1 z>wDK_a8tzz6P&F4%RorW3e(!*^@|gbp}pTJUHSEE66nBUx((Z?f$a_Bev?$X2H!v_ zd72@yKjV*`JRMl{y2ieY9#56&4)8?f8?+4hES4CHCw95^V|Q#_k)WfcwR50ydV+nQIfNn{KYFMF4Ng%{Tfr9)CxHMG>!{9*$4$3aWkb||baL`# zmsv=h$nUH+HB2kJl;ZF`DGJFz=}*%=E9TKZHGkG#zc>m#xbw4ha7Z+V`^y^$F#tF0 zoE%t$UOvim(a_kWk+;>c)02j#@)bW6e5XKqpOg83R^rNmi$zAa<^Q@T!`;WkniAk| z@afWO^CN+XFUObKHjgDF+ip|$DTJQnf{IiO?uk<&DNLP` z&h``zs16^K7XoZD%Jz-zMs;^V%+Hg~i*mSaRj$oK$!5Q^ZBP6=n?#hRZ$_BZTO^-{ za^8(MrI=j}M>jZjo%|j#=BUy&Sfhf}mgz&gYs*Fc2E-9zh{5r*KGV;{8eXqFB#FR1 zw{AobNuu(nRZIC@>EB=Z$%8n%fG(k*I&tw(r;!g1ud)wPex;PWCS=?{8ySu=%{@HV zoN6vxHn|qf$}kvuptw)XCc*2{!p|Bt2Myf3nAf(Y(O5P0)4srGCVk{V$dX6->NsE8 zs33b}#2OaIA%+h1E|~XCi}d4?=KxY9!zNIE!P%Ua;dQC;H``kP@Ef*o%60ZXtj$*R zO8h$HQ%z$i+Iq5I1m$eZ!j9~;wovumYW^6nfQ0)4z0adme%s+zI)no=(93_w`JTn*V`d08}={5MbeMDZ{Y~Bef-j!YnDVn$;!OL2)$e0 zA@AHWiYiq*O2<=yBVk~b>*rf4EkiS}KOg;r`~>TRH|ZMQ+G{lL@_Eaj?Pg!0w53H; zF37F&IFt@+=d~Ft z;lB*o)d2YFqt^0RR?@p1-Q?17h0Vc@-iUF>M_kdTY-Q?!Fl58AQ zyVC96vf83ly}?5pIggeTcWmeK?QFP_>Z0fKr>~-ZHEDPMM*G*^!5$sExZ>Wha#;z| z-YkE|zYWaXD60@H&sO_cyPnn5acc40V%NR?;TMKIa@%8?bPj-*(AZuws!5 zfDe*Sr&=rR-;j9unNs6zYTwn>N-&zc*XDZUjuw0$)pDcQl+U=i(d5v?+}zN5jDGqX zN%2}8;TNj_n-KTyXE;BvV1u#2MTeyNmya(tCmJSOvJN8IaE7m5CsQ|Xz0kYragU=s zX#RPIJj^m%Bm~{R@}A#)7V7dZbx=XhZ)4JouR5POAJQGP50-l+P_s}H)XPzTT6*55 zU-qf@(8WmEY)e43Wi|G&cL{SN=U(3Q^QG1DMd=+3sX&qY zeniX4TbPf|>nWtlYLp~&YB9l@F;V!-xB=1P<>?H{v1!*rmBipuJ)byzYzurFqbtLj zvs~9gN;k3?k8j}a*`n{{M(JFos#&U8Depp;&}N37C$FDrZ>ANLJgDMIJ5Gf0R<%og zo?i^J+&?d8Bdp;CwDoo=b*Bxzugr$vR+OTD_!gdl7VA(R8WV+`=f5Q&v zbv68nd$bmBdgsa2pZ?(_rK5eaZ(7663Mh1SR{AJmvUf$uO=9jp=ph0o9(_i7==dzN z`*!%O^ z8cADnCa@`=)s2nfrNbUQ4-W$EWV^(yE%qm(ieI zhC&fKk@Es*8`0=AjV&#K1?HlS4T&te{}*U)rtHV^pVfK&Q;tVoBa5ZZ^cM`kG@3d* zN|n|pn43Z$^UmLkJCG`aq2LWndBXHe(Q03@$;>^Gv&X-U+DXp&WT`7Tb<*EK|S%1-)W z;EOP)G~2e0#8bbL;4Co|RUqjT5$n7yGNNUf}UB{HzoCdk-%&nU$FY>arSqe=-sZrDBQ<|L>hSP89 z+MCnz@!=*AXXg51Mx;>=s<%VGKh-eOc=%!9(z{&+psn@nu7#VT0B~iPA8V zFG&`OW+^0ctsYBC4QGP{`7VV1q(bg_kRl%KxYE*~e2Cnk5W_7}Ds&$<5iyof%7AAo z%MaI=#=LSpVYWhp?Ns=^GAl`O0a;SzB{>dYm-aM061054x~U*xKrB=c;QMErGLYpm z8!}p)1TRSn3M)c8U*~;|3DAXzsT~CrSCIGx;RHJU(^o^`WSTl;tO}K{ueE6&v7s&%KJzK8PEMXEMoD zWCTL{!`^Bm86(8xo{Au-K5TU5Z$*QM${>XZJq)eeKnvlr23F&2pM9_+ihw0?YSMhg z7h31cMAdDuczR>JJl+vwAvxc=`z;lCVnR*_cgzK9mv`hmWiEDDqYe5VyVX-HJMf&2%zz(krfy_f7-hOvCJ zq`(HFjF2i?AKAHwDUE*#b7CMP^LR7Q?q$=yI4B@6^F(&<^@6^eeIXZRbsi zQFwiovRgtlTbLx=Y3lMe{DSSb=x-iFmeHiAkQ0$lzjk^JUI79Cl9Bn1F`#Kwef-je z7Yp-AR5d*>6ZCO61uSd6JiKd>g=wJ4@@;3IhkOFJAE^G_)rO6Hgy7Y<_&vJc4tB{k zuU4w!;=V?(FRB@qlb&2p;kx2n zYiFQsLDO_qKXg6>%pmW6#g^mzjZ4tddjgx>q>?XOaH-9_;bVkQ3d!vGbXEQtxjV|- zPe^v$0(*M+-G@Auh>Tc->Nxq;{`WZz9oFtZ+GDiy>zWCUjr~t&r?$t4Zu{tU1@6Iy zQkAO6Z$~Ne9FDtJWHS?ZnTz7DB4_44jKu%RicB&pvaCW1PjO7^^ZlfwrsPQ&{B^Xs z_R83pJ}Sr@_8naOZ%YyP&!p#1IAr%uctO85?|4pqAw6bpzSP-CM^Nf$32m=tx%d#Z zDe=`W_=nU&H#oY4%$iT`;?)dKlf}}dSfmFnD_n)C?l~T!z>ODXu=7v??WFrQI#pRv zi#~#1;44Kqu0fsB{!^1ILeo~jD7n}Z8EVrLL9i@3!skRY6v0HdImZx6QSHJ+vmwJQ zO@-HaXco6kdx6ns1$6o%SbL@c>^iu%-t-%sWDyoAj^^l<-iQcM%4)o`r<09iJxw$# zKR@*qa)&b|-8ly2F`vVbJNnYNZRF6)-WdkZ{J^A#T|L=AUSi48Ku@(SQ38mcWzp%A zlK1ibm7~&!(V^MUzWlOZVfDw|8D+s zdsYa$+HAeX>EIO<|L9%wSTc<@!_H{fUo59w@l zZmFW4dh{#>Y1)*)Z7HZcWM;>Xg*C#SO6f}qWv+7Ljz(IYo#i+-KaB^Oh}}sF=kZ6c zD&-~X#|(xo0Q|9XgK#s;vCE?BP$zWiB&jms*#2YVIpWBMb13B+uYvmYA-F{k7T4)c z7_2=}6o#QQvkIjBkye2mCohL3NwPqIqglZTY| zX~>!5EwuJw(}p_J_uX0`OsvuS}kgTDC(52@`Yg7c%U-W zdo0c`eUL<2%30q#2^wv&g^=F}eI1d|#ABI={V$1NME?GqIY*!<;4`pD7vfOPUi*>a z2WV8ALFi|0Knm2+G@>WKJ2!lZxrb|V%gni_s~>jPSJHh+RWPed9ybdpsD~m`3qoPm1h0vMMgLd8WxqQ8cfF@aDccwI)9JRrMJiZ zB;itQVCqVti;(tyJJtRHu_-)7s0F`s=f|)Lqxpw!6|G1%m`LI2 zpONUN4+q2vk@0T;Dc@3r7AB%v-kFiP(+yL1Zy<_5A=ES_M}Cv{g~vKVHpbl`{sFK* zJ#>x3ofyl&2w&H0{`l|_fLDjBK^dk}TEG|o)^jWLeOaz&S& z@i@%I*k<08JyJ7>i`;QHejOMmKaHKirrqOvn@TtFW%IQuT`gcU53ky9cx&ix3yk&Y zSt%774TbE>A}myj8D!n+M%?`+goy8Z{s$XK;71zX;+1+rfr~P*z12ce{14CijSp#t^R-atwC@k2Bma_U? zduI`D<^j@+Zy{7k0p_&Bz|x;Mh(!(wgyR- z*7^*g-)w=pR&{DrbHZa1f!7?e+tDuT`3~ShEj9DJ;^EeV$Lxh>%PFN_m(%sjY2x9R zM%?^qx+)BI9=%h;0ugJnudv<31TNgsl0tc=-p9R&zt|kz-stVV^?UM}&Rw;}TyYJ< zK8Zk_M39reftz(;CRt!My|-M~5*zGZ-us^IUb4#Nr4V^m<#)dM@<30uvxA=qwjl}s z)H*8||1LvtM}HZ0A4`dIUKg`_-$_0EztO44NJ zP`97<7}PXKP3Xaji>AuNao_S9t`L9ND!Ke(lnsU|XLphEqdt|UarZ)KA;oasXN9n+Y?6oBcY|E${J-uab~ z3rxZpr6z2|wwEB{3kNpJ7SF);urJ17l4<<2PFQPnCS&k@_h^?PS))P2Cxk1i zJ3LZRbr=bWzxyA-+XGs#I%ZF4+dAk_`OxQIqAh3=q?KCrx@9lQx_03fuT0Pn9VJ55 z4nIy63{L7MloE0R5h-4esB|q@5F9*7EZu_0GvPIQq=U91{^iy6^kr!sHcjDwd~rC( zOb6KKY^TsEU1u`vzdX-8*z+g86H{52=1LOv_HNZS>vAY2Com?a*u%-NaHH+%P#{Mp zEz5K|#pK$S=*GgUu}V%5%1z@c1ZYa8fxD{<*Y$1{WJ4bNy^kG_MuVPT4yF%op7%SD z6+n({^z=f!YoJyLJKo^z9^YTqNF+Nim+VFZ%-4;X9r)oD=%wfW=LdY~?XtH?Dv^AT zd_bZUx}7>A0$9u4WUXB#?Hf9!vRmALIQMI^P?0PZY)%YBpU1Gso?8i0U$RSpJi)&mFRWJcvf}BbH85CGOg?IuDQt>@%`(YC7m&)eNTle zf^}YT7vnm9pS#2)D%n0(msIXP+Ak>Cd8L&|7*@y7KvFOSEh>+ZPkzMnn0l?^U-Q0hn2f({0&4aFRq!GMJjLg1~pimSo&<{7sV zd6)U2Wv}@FHIf+ocC4_Knz(5ZU#}mnZUWKG=Nltwi5zbxdiyFqa0Qek4uI{W(8KN` z^*isSi|P^m3E}H`1$RaxVU34TkzwgrB0JHYKu#?#E|^(^K!H_E=C1LX7A}ul&oEqz z=qa6z4}K;Bz*9~IZ@$b~A#}n{BS|%bd548)GJ8jsJxHFW1KZHFW;L=}fap>X2%ac^ zXK9z2?B-!B8af3YzQqi5Dj$M}rMn?bm%rruhkXpX@KqeJ936J+0F!9FCp+co>ch%O zS|1$wk<^nm1c69a2?N>4Q<(-_8O!U*-q4Kp?k`_AvBdf6jr#TNl0f%);{1J(F)v zQxJ|xNVk&4qrOU?hQap@3(NVz184>%Y8s|UK?KdhZU&yJLA_NNlFjgM=metYpy%;7 zw%z-5C6?`b^~tZ+73BURGW!Mx(7`NE*b|1kruJG9#1NO@o!rRw2crnZ;C`h0>R){e zIeQ*df)h|%)8hA6t5p6X{jW;VI7upPx_yMX%*!>-K=P84%p4A&$_M`Myv?Gs*?zWs zSwMIj*zAiV8?DcQuU+1c3^a26@XB_Q$@z7w9Y#H&88$^o)s~V#iTda-&64${Ri7h( z43)jPt{ipysfNU^spq_Y5^@#GxY@n?YRzK$I-|JJnn}$!rst#kxrJH4 zu*lghc&))VygtIWN0;s>Gf)7hXm{w63R}OMJxqMjs5JuNVV$Z|b=9~B+fj}SV99Y- z)J1FQtIv`dM}GXPnlKWZ|Dtl;H;v8mv2vL3#N?#hPr}}_W z|6w|P(5xf7bk+N1$N~a_4-__maIkgarQBleJLuyL$`?W~EQDkc6-o0(0DwCH}ok70#RSduvVOzbEMA*q}@}Mtmo|JbIzpndCY#pCW zbsz%%TQ#jlKoRed3Hypy6#b#noO3nwtF$7B0CTHs-ca$y05LzbCC#KM{fkH-=lV%&K5x7Uu2zi&AlW7^cB4W=6*FpWWDp6^G)IW!TejjzR zQS50jAWM>C)UFyGB>#z*BqO+mi0E4z)wU4dDBx9SXM- zC#^bX>_bVnE~0O4jFcoF)X^(saXkwnfWKJ`MHyk8$+wS&Le7Rx2sDWvZ^C#azOx>o zhrr4fD&m7jvlx#E`yYL!t`Ak+=OoLVvcW9{4^3=iXGv69iwxBgJb&+pa=7pI#4vPX zW5F|^9a)R`@Ej|ySALaCO_%;3H*B24NDlA~ykCD3g?6sGX@9tg^t;);0{wmi&tGwx zElIiQE4c_a=+BRWu((LU|C)T`aQ!UST0dAN+Dz;Zbvpm^=5$krh?~k&PuPJcZ{>ru zf1veYTJC-T4@SOlRfH*8Tx)#F$Bbn5`t;#jE2`{egSSZ2GZ3-F%GN)DBQt;-VxeA2 zDX0XqH>7Dh)d^)IO3$`J<)c@OOVaYtrl>4*0jFN@aaR<3`;KjeiC$QY zY+l0(k<*3Fd^8XD)#eJUH1=K=swE^eSB?7!S0kpJc&(#R%~hP~!hG~BW67L!uEE)n zNO<)^;ucgOnQ!#B17Xr%YVr9uTblp*H4O>UgcXrR+0dVh?yXaJWlZFMKKpi`eRJ>m zU7;kaVkIT9?;4B!N$I_4#jfQI$ZQS|UM<$}W@zSou0W-aEYuKXvM7w~;DqAgkK@B7 zm}<09%^tMKvLaS{8)nocRbicfd}%9ZnNu?%O?hnAXsB^b3|WaNMf^6m)SHvSUCF}N z$FzXbhhX*PQmc0H+`LMFIB1E}b^s+lXPk67llIF|jk(0yP{HkGn#=`Jq?QbnQt7sr|!Fy#mZ9Tw{VI+X*T0<^sN-d_B)8w6kln%%vbz@sS)ngL>QKkiw@s z`9<%QrbKXA==g{=h;-#92oJZ20%B=LzK;0d-mYRFO9O0^@T-_&MCvw}nJ7g_G zb)b}au05~ET-`3_Q11Vn;UfB+1-A70WuJx7uKK@4QKkmaEeihnt*E>pRHeNI!pO{~ ze1g!mzK*CK`m~wtPeCR&vxJ%ayKZZw_O%8S4^^V|Ytryw%w2NM7jps;u1wFHnQ;k# z+d%=q(Bz>O8BR`>tf`7^@&l;&R2odL3>B-dXmrl{KU&HM6951U}zf&7&lYIBQxCb}YhpB_5am^1hoNx?#vO zZTSp?(I7`%ZeE!i*WG`ZQGtfb1nVyuhR60V0CO716$cQFpPio9Z?E&C*29O+ku>(4 zJ&ND@9fkq8qt|1L|5tbK8P??Xw2RY2k=`-1M2d7okkEUiNK-&U2a(=FSE_W7K%^>F zI?@HCi1ZSQfFK>IL6A=9f^g#dx_|H9`~T&9_+RH-*U87^d7imvX02H>nfqC*r)nuf zY2g}*LUwsm>-I~hs0z?{iIEC6zQC~{Ynpkj_>+}Wc)f3)LJFgOUIg^#krn0?W-2Hf zFmZ8Ck7F+ncUpC5dKnj_qby$zFrpabj{sM(a88e%#d-je`|DRw@_7^d;lS<UZtTLt&4uQI;WOUe(!n}6krIfn~4E7JWZ5XDj^z9NVJmMK&2eEazTYEEZQ9$ zYArbq32{H(sB6~?Iq-xVf4#X$>?H64mk-KDV7&lr@)?f)7&1PchkeNujQ+O2h$n?; z5gXlG=A=zg4$F^3ZKqsi2%_I???s|gFY`25vw;kBh%2pq~(rzcd5`u#h?Vsjp z@^NoT#3Jp4N|9NDYRb={)Uo4mkoo&>lR?w{eU?MOQ&;RTIGT)p5&^s!=tI9$2;RZ0 zF*mgj#A@NEv@MqYd~1{!vJIf&x<;eoTGyuz1T=VElFR$V3nmRrZ_sl#hl| zYv1WRNc9qy@A|bziSIsnGe@?%$|v){QqWbP;#Ot5-tz*AYEDEKP{E0f2Cg!NoULD3 z9idu-SgO#seT{ge7Qhbt#$at1DGQD~TGkuE}JOwr_ zHsVbLL{o1EeYE`WOJol|+rDRDU+{$~t?|=fB~FtJxH<+ zJ(MTWv719ybVwTQsL7M0@f`X z6lArFNyuD*rcJ%azI!q%yenH~{9~}&FU^RALe?lKg!pr_lY8+y){==LfXzTWBBI&! zp`fC|8~LLwkDeIQ+uKFDNy5p0EhE6tyJ!^ntP%;GGsEX%tE{(syr|=naMF@W2 z_+sf%bXVE;sxpY_J*`9o7++MNK-vbX_>2koiK-r)K$feXTw6$=Q0XX3di;30WWDqx z7cJ3>Zn#fHr?&?yrKYKgw_?n-49YKr(P64Cb7FoqAB{K8nmNAjJ8pQVAT0*ORw|W@ z+RyEZm$=ZVrSF|2mHnZU89W9L5saqP7Kd-R6TPS~8z`Gzu!?!~EV4=}PT!ys>ExbO z4OTT;^?E~W-@IDGcl!2Az_EfL1xb_Fuoa4dAi_--<#f#>yK#oNk#D2@OVE#fY=?A~ zs{~)kJk#QH0vUfFtN%kM^mV&73W?hq_v*4SPeUT-|tl* zSH*BTdKcGehC;M&xnwb zg`DNH$`n@%l+H4$iNxRAuDD|vu&wta)s3&C6B3*mN3b>=m9OiUG>Ym*hE-53CB}-w z174AD8vR>-8(3JdYg?B6G)N>9Zlb0wt946;=TIU(*|uL+$}nvB%V! z@MG8k_680((VZC=;Ljr>yVII0Ku6mM;R!wWQCT9T-}p#(4^nvX!PmSoqzS-jdFg(j zcx#hWL=Je77NhU%J{9`_NfaTo2Pg8zc2z$0QW!~Lo{(4j0iH^3qo{v;9O^{) zg8`a|nTv@VD}|AI=Tl8yjCE)amllh*&c`0c;3#+x66nBd;>F z^<&+OIDB1BGUSHjgRKixXh3LPI;Zmafbv__n|ry|(5I4{DC*+2+9YIR+KpSSheS@k ztuP2c{CvxF>DOmXjp1t>+y*QeOfCxtncgLeS9!r(mVMtab% z>v1aAfkLa|Q8YcfT?XHw)=K8m7(xmbt2I>IRCQ@zV^a@<2b(7UsZRxmFwYazu4`y> ze{I0ex1|v>7jylFw;myk{hgl^x(@IApzn?5fk&>lABre_Wlth`hR#lmQ~AcWiZ2S+ zTfuV+cHR6&?S&Q@(;F)bM6>gircC9rtY?@c_w~XgG1?wi<@AkTfk9LslRyQI%#;-C(mC6|^5fime_J;=7dB&Z&+rC!EbR-Iski zF1Z1^6T?_8sNl)Ts&nFh zU6Y$U$b5J$zAcGSRydY8M6TbVg=_5oD8B()RgS?eG#yUA-g~=%WwIEqh&Tu2D+mG< z>$uV6UIABsyTW3kGw`q~F7KL%UHHb`9)XcJSWI>v9IRh)Z764}>2e zoNl@Q{OujXfPn7e^H%Q#>9QVb5nA{aYmR)_ZF>WfBd%{}L#_4s)mZ z*Oep944L6Ap@@;3|IusRluY~IXpE;`BqiP(JvbM$kEi4!N`^yht}>lE)KTZYf7eNV z3o8OcBqbM2RB`aOJ49c|ZYal7P?7|4{SOg#W>^sm3VrJ%x#DukOu7SWFV8(I33)iz z7ylum{I`gk8)uiyb-htE_!>=_qQS%UOSjOM(*F?QjTPaK{|_6CG+{N%4E~3R+TSAn zWlP9!%?|heA!72kh^XewdF-j#_YZhkjsz~xb*&TE$I=p4^fp@4c;2m%=}jp)3WiT^ zR&__cP7L0KPe%DuEJPFh=)4qONiNBqjB4p(zAH0F+CAJY*WOK;t2ok`gSDKh&=gI+ zAF;?=8hPA7SSyXU;I@pW=!o_u!EC5*Js549_RtEkPXpp6+>D)pfYK#X=0Kz%Y1%D8 z>mGdj(c1lW<(Q6CG96AHgTX00Cq1&J8=TRr)8QKPL}Hx6A59+{NG>fFPl~DVbv&S< zgqKf7cij&$cGI4}EqSWP<@VnXP+UU^M=Q#NH-Lk=Pgt7>^yG*s9ls_gGMWvC+7Mkk ztI%0yT-Nes-|K>m38@-`q+lLD_^-`%TC%}5(upg5e{d*flQr&JbW6tXiDT?=oDLcj z^L3jd!&^4s8PsV9_g#aGL-D{2zb1Q-i??Q!jeWTdGbXp91pLFYBjmBeTqNxln&6AN z3)5Fj;h8LSA;P7n9cuyqwDAlHGO|49QK2Xup*bEGhC8u$0tR`SWox=(WSM@l=lwAy z6xg&4Xk;>wKZ3>L{mPB4VR^>Ici&95mR*kUQD$l6hg4-ji?~&UDPR8JEPsVZ$fQS= z`W;$K`Cy=A@*|O@`oyp~LsfAC8J#Zt^M01PPi5S_5+T~H@Fc#Z&ql(ehAR`l{O zy*KaOTnuFr>Y2+kqg>s(H9kHSM%Q`-vM$GCB)*XsSvPs7WKFXM`yc%coCf>V(JJtK zICUM&0BW1Jh)7&F8|Lzkn4Ny$qZjhlp0t-mQ_8o~GB8iOit~D+d!IhnvhzP`Nn@pM zHY<>LtMYPA9wg#1)>sSZP#;``SdBV(&*0HMK@t}5<|fyi?4a5L&6t=n;&*f&DtV)j? z-V(4j3=^(TM&FlWyQB$8yxUs&wj4Iz1BL^C`5t|xak=Mr6r#z0 z+)eZAGE8Y9*KTjKz*WR+ zhStg4pbHGxA?CBYt%V$X)=O;7y2hmH*Fg-Wb-kG|MiJ^D?E&k;z=kH)abs$&5m`af z7(gXI9ef|bG}cHD=JV(LCg>vHvW!X+;Fsqq=&R=4@9t7U&?+P2puw?3vSh*7uw^OK z5Du*K&n!}GrzZ7(c|@fMVdz@ZCp~YBG+2a2RT5Z!+9O5-6A{#VX8cj^LrAA}fp@R8 z=g>HB-O*zWUdp-)Ynz4iBxKsXKlS^`pbM;Y@XWK*@Rt9?it6MnDsU2t^Wt`FixOm# zN__wk^6=SXsXZ=&jx;cQ@amWi)c|fwP(}dXy5Pk3+xD-$e=^&wdYuIJ#lv|Px1~8z z#f4q$%3>Cs(8Wv2F^Zp@3gfP0MrLK&Gmx(nm4Os`Gt^Wap9PZ4^U_RFWeOA03y4eJ z6IKYrcocJPQX;N^MzU#zgYrEBTeHfthybpG$reXzVS=pv&+k7kPhOy4P_-N?< z8DE#gACAT+pi)P9utNkIEi_7n@JlwM6R-&5@)4#y)Bq@g`~7|zchIzjv{JS&B;h^z z=>6A>I0`^spns@9goW}n>CekkK6Ze&DJ}RN4jlaz}ypxu8)0vMr&C1%r zyzO3U1slzMp|AAz?m`X@wGMg!x#U=Lxzlf1fcN@pf~%0%cob6boEAGCg_b)z=lciK zJ>R|xl0bYQ$<{IgK!1l!#n=OphHX+xH?7Q+sZa7wg}>J@pa*)5EoC_p95vx01MscT zO6sJAiaz~mPWbb*5_@_`Iv7fJ8(TwbP>%|bFYblBPP9o=&hrBoLwk3VJbW^$=Xg)8 zq`SJl40jV1u(`4P@MjEXdVBb_UieiZ z**is{h{@VGyh2@z)cf~)YsEQ*n=^GVn=7GVg#{&(a#{I#H~^B9n@{G7UNK8Dsy1go z9`ayeW-|f(Sra-upcD!HKb{RDx16K`39hq&=TF~9m25lQp;RKnbkFiWbeUR z4^NfltNoVm}P`a2-(BdV%< zn=$0tm(iSm$-?gKCTues;>XXgj0Jp`=FJyKjs8QAGBU})hp81oba?yUlJH`)xTsV^DW&UXZ4H*!muKw3L^EyN|DE7_w#o$`q#T%odFpnL zo{q}=RWkf{lD=Ol`Ts!n{eQzV|C^;toEu3KU6ty$H8c`<>h$yM2IfpG5jRrO;tB=m zh|~G3z_R|y*D!_#=5#!7`qQJ?w5WzrD{)QOJDg@Da`H_(efqV4+2f_j`2yWwy`)to z>~2rT>_V^>BJZ@M-&KUURd)ro5yQYxZNJ+7Eq>DNk)`j(v82cUo6U<6i_bS-`05xO zij2ToSpL?xICH@sS9JA8MEwAij8Kh$bdrQDfFiUmwCoFWz#8>4S&F-@FFdX=JUpjN z94}9^_Xey#L!TKcQQX)pd)G=TLy(<1==E8IlmdmB^2Fuo*;9(7YK?tJ4KE=yms&z; za-MQt>gQpP5ZbDWR&OD)O&cHG(fiZ7@Z_$uk|k`_Kw8-m{qPNM$z=DG-atvOWx&x{ zM=jwkmWh>oZuK=*LP6CumGQ-EcPfSJ-?C4J@kgztj38vrf{DMR@*l*7u=m9aJat4| zf?=POP>L_J1<@<-8}V*;?>AcZ@QbAeVd> zdI)}!j@<%vt;G%dFYXp2ZwCOyQCiQ4^Pd(*(-r<#YICm_xNs<nkpWz=%|%iya;nI(q%MXxnU9DyVgB)z=o} zqPWMiqZCr^f_fqn*(Fl($mbjWj#`OB(|@FPiEV*)LNDKe~(f;&ippDP4KO%{RPS|$fkdX1W>XdBU(%;N;4lAS_=a#hiV zw|P>6!|cW6Z^G_Q6dt$9&VP^2Jk`lPU?0O%bzP-=rjI@V1ZPR*Qw$ok< zAfs!Nm^3y65C)bo3vk0x`{RZx!*)$L9_WoswHr#%w9iy4#8ef1?T2j$e!~Js;t7uDw0}@nppu^r&$@?`pqd3!^mqhfcrRM@ zo#P;vYO3V{dQAzGIj4)JR^}V`REq1sDIgaD&}os^6UyvZX-B^e_Hv%UQtDBWtqJV=Z*wcz?)vl_S}{@vv00Y*=EX z$oD~fFq{_-Bg#Gz(LrzIEERj!lpr#rTNqU#{$!Uk~q=7)@&zjS}TOkmF2 z9~9@`w|Zn?KXZ4xHn!!)SST?hGP-!6?tWe~!RxPn1oZ1&N}zX|4yN~S&zRaL!_!WL zrXJ4t0OB~;PAe1Q3ue-;9JWetJPM)~l??xs3J0?KL(j4IqYpmHUTzw;XbY`H-4 zt`dhfsjDLAsMWxk)*}vj?;Akg3`{|79aDR=6e~GK{(`S#%Q3p39 zc1}KPbH$;SJG52^AP}-It(AgjyuIODPo~zClC@Q`>HR;PX$j6*nImbD(skc$Jy-LS zBu1x|TLOtoj2g)~;Hg65x4F7#o_HzU!?CUjL3tC0exV>6AXRa#oWuB4TzlPO%Xw>6 zluo`jZp~NqV@B7KfeJF$w#goof<3jo40ul2O%0K0KPwHDZX!@WkKelRy8Xm;)KPm% zUKeenc~eC$9~I;F!>*%yibBR4HF!S+RIEBGF`o`vmj+@ZO9}2_do?Q(%uuDzAL0?? zY^QiGgUpc#GHRR*-p!_X{uXPz6!mtE)`x_Cm4TbE((5`Y<$67v&20oDNjPL`Cs8aM89ON^pUg59Wbe;x2 z)|GpJ{@%R9SnJr62aZjD>ZqNY?lCIpsPR$Mx>qkQCi9dX>ZzvnQhO5b8jdE%V@nJn zbDpax5Mb{7`N16+cdM<|&k*fGeVz&8Sq4(K41K}P6n~_Gh_y1Zuhd5$o6(&eD5yE( znAhLYM5}0o7zyy^aKmexfK_cl_u(zMsM1Y7Yn5yZX8sxHRk9;3N)Gr3`MsRf5E%J( zk)tXVQoaftRetv#qOMgilINjz^4KkhAPDQRaXN_I=oi1LYtC(qe$IM@L;-3&`f@T8 z6pV!Ri@#oArpNv!uZb=Sy+_e#EgIvL&uOq{OOu<9n{AS@XXQUJfStx4b1 zU`&}SZwEq9L0cRHFQQw1vO!!| zXlp12Gi{nJW>Uc#WXY{B`ALO+VCn|^vptw_CNVDaS+ByL#j}ho+f$qfDON(zJyM(c z5@9O?{%nb)k7)lT#d87VxD5v5cCm+~T`!V;(Hs>TY#qw~y<;hfoKD)ZD5}i>{XD4l zAq#TqTA50Sy5-K$-LC1@RzV&X&os}$>sMRz}8m5eNQ+cdD~8^Sdk6>SX8^yZ6Yo#X2& z1cxQ7UilV^gnn-5Xg@9X(t@?=75*-66+s-4zgZA+y55EJ7dcbUJ4&86^7qux39-QY zYPRWTl6c40fxkqS@5?#k?|YDp?hZn>eRTm{2)W`tFP2!?4hOBH8m_`f zdT;}HBMH~URLpHIgkwG0Ib5AUOg~kTtriqnpz*->ZY!y6$$r1NSwCp?)Y&z;Jt|ls}HS?bdiy*37jVLEs@u z`YrX^hq>5{0>V_sQ94x$_;f!03Q#W?Pj&RB=7@9u`f*5f00Um$BclodHP+;XXj6x8 z^CWXw|9zd{42J$f@zJHtym}0@5Vc4)AQMjQues?^#dh>vN0Y7Y$za2Fab|uK!1=q_Y(tQl`Gx)JD^tDWPf5X{6}md zfb!X4F&8E5{aV}!z;LHDQYbpJ2XrNfJk0fiqF3lODgPsgac4@do2f;xY}lBKtB(_dDlHR3U#Vo6N@NoKzY6{#44DMKF2bjwxGne6Cl|SlC~ByoX+J^(oOHHH`#h z{YEq71^q`~2|lm0@DjL1?GF%Z4fqR)ZNiBrBH|V(O@4LoH1}RG43o=ca$?`M@%TOn zL-91={6B!$cG7@k>MBCPy=i-Y>AC$S=u1ta3YvB33?us&CfQkG2*ayr)2o-li(w0w z4*g-N&NKPH@@zO`P}1oKYtR3N2ad(lbjfVd`ZwUJzk&OT{1XoviwA!GPvCLCf$v%V z6HotdJQ%xw0B2;y0$=F(C!SZo@vM#f6Sx!>c)%Bd;&q8n_!SGPQEyaRv9{aCX3|lH zd!1Ka@ADtK-e4nhsnDNfsiz62K3^4Bw`u;S@AXb0%Im^`M9n~CyohNutT6u*-cu<= zj8qUlt0TiXc2wZju8jomn8m^KWhez(-I;rDWT&47Ph3~of`L(jJymS$oi5P(OSHAudSkB%=^IzR3~nZ zjtk@dqwtK|Y#Y-m3CcPlB_Bg?;(LzVX;S#y$xg{dkOIp2^2{5%L)n$ZY#V(Ok-nTg z+w^@p~dHiK~R!I*Ds39i4j#dS&nr0Hws15$KX^Vg3<;my84@~DNuVhobd8&<@ z7P4QV@x;l3Ws<2ZgdAzrMn+k3G1hLO|R`{|rKL@V}D z1}hOo{bf0;5qj3^L;0r~hDDB>BtNS?$Jbhc`h3LuMk>4GEQ4zIyZj~Cc)3lMPKroh zy5L;LH)!QPT~3Iic_FKcU@PQ#yeYAY?~F*RIlopo_B^PuA0HO69XmggMC~>i^!E;k zhUFX8|FB}hcBr~ORH3?F4fW#xs7Tmzr&X)|p5e_BRPrLlkKle~0oezRJ}`txe<)8;EM* z6x+A<@u%jjv`=b0>`sk12T7Lh#!^GI)Cd!-Picx*Gdc%mLUA53tSVj~6b>QK8oLc%9irL#^MDG9F50 zhHN=SDgDbm1!C*llXGHyc75fe3h|T;pzB}WwyjPQHx`d6IJ;eVJBlhiJDN81UiXD1 zAU2eOb_oKfW6UZO``bS`0AhXFP9yP=`B&O+VENlb@oer6g%ss_BRRL=e>|9dicFY! zwy$DQW{_cQKGI&VFv5*!&nHu3Hv;W-kcWw~W;(HlbEf4}Pkn5rNr>D7OwRc&8=~Lu>niOhNK4n+A;i=; zwp9A=cOEdk^aRssQbqJ`p2L__+KsV&tv zU-;1Z7ul|+IiE8z5NgY4p_!OsO@V=b`KsOYBpTR=?e3U&5XZ>)!}d8?%Yw2sLG1Z+ zbuRiFYN0(Bh>+n94Aag+J^Tv$bYE8m!}ksuz&r728t4d6RjT1#0ZH#L;n&!*>a!xK zuRR$0*W8fd$N0kp{fn$yU5iyPL_elq@Q6vXXgOH@Xqi|dFhhJu*{(WOSctt0Vbw$1 z`w$~-NMZ1su=i%`FE~KK`e+KZ_J+pR@V3+tR#R{l9=5rY)~-zQySbwt{8jAU>An>@ z64gPe{KV(({?gOb+vZS{G-~pNp#QtI4 zWDoX5^<$R@>)Ap*h!q=3_*3i({uBCnql+X3t`lX7oMSa2^#WIs8UgP$3z&XU}akGt>o=1+W*-vdWc@1NcM z(7VlxVc?l6-U`ytx_X1M3-IK}G;F7X?o~6%c&SBbAiMHHBV^;`nZ-bTLJbpN6qX75 zBnXlBnwx8D%XYWtDd)p4hP9ip^XkkPnc;m@?eWBiu4w_~D~rhQ9q$;T3!`&F??`?) zWjSY`R#e4qB;@?)mN1%aT9h$lc8wa_(J8P7GQ_sf4I^Vc9^dy#ihLd#hiS$(vkI26 z@h=)Ad|Gl;MGvWTT?I-8zTpm|g^IhLeS+6v8#bDsvmYuvjk+T3mGm554MED!8N z9-XY@0r&~=0Z}WO4cZjL=XV*IKb7M9GM&BTVEm(%krVnb{T|2gFEZ6i?sNj-ck@s= zoge6-#RP|!dnT8aDH|cW_E=9?m>TU5vF8V%b6aplZ)i9$h2p2;)7*?{R&CpWKLFT! z4tCnPq&mB-#I*Wl+CiAp9$ap<54>F;D&+g4*MS-TjM~x<7%~jDczvRL)UXz&s2HrZE z;WdtntV&`~)a3V06Em|LVE#~BSnMDXb#yfYz;p5*{uA)eRE3K6Tv#60JMC=o5Yz{r z*wh*FPdFFX?YF)!veFx*oL+`mT0JPDC$C!in%n&?HNNgAROYFQxp&i4n!&O^?Q_<% zmOqKXev@e{%fk71Ex5i!eGrnyDjampsEo$LvTX>)u+b1p{o((9eE{Ql{?S4-vEewQ){mXTDMeqHQ@d$gnVGkN%SsfrMmPz2>5$4>ENc$4)NdsPIRl6|qFH>e7E1bn5c@JnC9(R~)Xi krF{?Y;4k=gW^vcJJx7aub-H(_aIhbBRc)0@CCeB83qAjf)&Kwi literal 0 HcmV?d00001 From f0bd2d05f5f7832df4879822afb99d2096c00d48 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 15:24:54 -0700 Subject: [PATCH 0273/1705] Link to global-power-plants demo instead of sf-trees --- docs/getting_started.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 2f0a7962..52434fdc 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -6,8 +6,8 @@ Play with a live demo The best way to experience Datasette for the first time is with a demo: +* `global-power-plants.datasettes.com `__ provides a searchable database of power plants around the world, using data from the `World Resources Institude `__ rendered using the `datasette-cluster-map `__ plugin. * `fivethirtyeight.datasettes.com `__ shows Datasette running against over 400 datasets imported from the `FiveThirtyEight GitHub repository `__. -* `sf-trees.datasettes.com `__ demonstrates the `datasette-cluster-map `__ plugin running against 190,000 trees imported from `data.sfgov.org `__. .. _getting_started_glitch: From 7788d62fa679fa87d3f34a3466295b0ae06598dd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 20:28:16 -0700 Subject: [PATCH 0274/1705] Expanded the Binary plugins section --- docs/binary_data.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/binary_data.rst b/docs/binary_data.rst index 593cf78d..6902af00 100644 --- a/docs/binary_data.rst +++ b/docs/binary_data.rst @@ -63,6 +63,6 @@ Binary plugins Several Datasette plugins are available that change the way Datasette treats binary data. -- `datasette-render-binary `__ modifies -- https://github.com/simonw/datasette-render-images -- https://github.com/simonw/datasette-media \ No newline at end of file +- `datasette-render-binary `__ modifies Datasette's default interface to show an automatic guess at what type of binary data is being stored, along with a visual representation of the binary value that displays ASCII strings directly in the interface. +- `datasette-render-images `__ detects common image formats and renders them as images directly in the Datasette interface. +- `datasette-media `__ allows Datasette interfaces to be configured to serve binary files from configured SQL queries, and includes the ability to resize images directly before serving them. From 4785172bbcb9edd22b6955b415cd18cd4d83f0aa Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 20:33:47 -0700 Subject: [PATCH 0275/1705] Release 0.51.1 --- datasette/version.py | 2 +- docs/changelog.rst | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index f6e9ce97..2d949370 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.51" +__version__ = "0.51.1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index b9120c52..97d5d251 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_51_1: + +0.51.1 (2020-10-31) +------------------- + +- Improvements to the new :ref:`binary` documentation page. + .. _v0_51: 0.51 (2020-10-31) From 59b252a0c020d687259ab85e06f0636feefa0dd0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Oct 2020 21:45:42 -0700 Subject: [PATCH 0276/1705] Link to annotated release notes for 0.51 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index c101a4ed..a10ccfd3 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly new ## News - * 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. + * 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. [Annotated release notes](https://simonwillison.net/2020/Nov/1/datasette-0-51/). * 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). * 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). * 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. From b61f6cceb5682f9154ba72259c0c9c7503a605bf Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 1 Nov 2020 09:22:13 -0800 Subject: [PATCH 0277/1705] Add nav menu to pattern portfolio --- datasette/templates/patterns.html | 38 +++++++++++++++++++++---------- 1 file changed, 26 insertions(+), 12 deletions(-) diff --git a/datasette/templates/patterns.html b/datasette/templates/patterns.html index 62ef1322..25fb6008 100644 --- a/datasette/templates/patterns.html +++ b/datasette/templates/patterns.html @@ -9,19 +9,33 @@ +
-
-
- -
- - -
- -
- - -

Pattern Portfolio

From 7b194920702358b65739a6e8bd3adb765ffa346a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 2 Nov 2020 10:27:25 -0800 Subject: [PATCH 0278/1705] database_actions() plugin hook, closes #1077 --- datasette/hookspecs.py | 5 +++++ datasette/static/app.css | 6 +++--- datasette/templates/database.html | 25 ++++++++++++++++++++++++- datasette/templates/table.html | 6 +++--- datasette/views/database.py | 15 +++++++++++++++ docs/plugin_hooks.rst | 16 ++++++++++++++++ tests/fixtures.py | 1 + tests/plugins/my_plugin.py | 11 +++++++++++ tests/test_api.py | 1 + tests/test_plugins.py | 19 ++++++++++++++++++- 10 files changed, 97 insertions(+), 8 deletions(-) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 78070e67..a305ca6a 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -107,3 +107,8 @@ def menu_links(datasette, actor): @hookspec def table_actions(datasette, actor, database, table): "Links for the table actions menu" + + +@hookspec +def database_actions(datasette, actor, database): + "Links for the database actions menu" diff --git a/datasette/static/app.css b/datasette/static/app.css index a1eb2099..675285c1 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -360,11 +360,11 @@ details .nav-menu-inner { display: block; } -/* Table actions menu */ -.table-menu-links { +/* Table/database actions menu */ +.actions-menu-links { position: relative; } -.table-menu-links .dropdown-menu { +.actions-menu-links .dropdown-menu { position: absolute; top: 2rem; right: 0; diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 3b89d68b..7065f2c2 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -18,7 +18,30 @@ {% block content %} -

{{ metadata.title or database }}{% if private %} 🔒{% endif %}

+ + {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 13f6a832..5034b62e 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -28,12 +28,12 @@
- - - - -

.bd for /database/table

+
-

roadside_attraction_characteristics

+ +

Data license: Apache License 2.0 @@ -257,7 +274,6 @@

-

2 extra where clauses

    @@ -269,7 +285,6 @@
-

View and edit SQL

@@ -278,11 +293,6 @@ Suggested facets: tags, created (date), tags (array)

- - - - -
@@ -420,16 +430,6 @@ ); - - - - - - - - - -

.bd for /database/table/row

roadside_attractions: 2

@@ -474,16 +474,6 @@
- - - - - - - - - -

.ft

+ +{% include "_close_open_menus.html" %} + From 13d1228d80c91d382a05b1a9549ed02c300ef851 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 2 Nov 2020 12:02:50 -0800 Subject: [PATCH 0280/1705] /dbname/tablename/-/modify-table-schema is OK after all Refs #1053, #296 --- docs/writing_plugins.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index 29fcca13..dfcda8a9 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -227,10 +227,11 @@ If your plugin includes functionality that relates to a specific database you co - ``/dbname/-/upload-excel`` -Reserving routes under ``/dbname/tablename/-/...`` is not a good idea because a table could conceivably include a row with a primary key value of ``-``. Instead, you could use a pattern like this: +Or for a specific table like this: -- ``/dbname/-/upload-excel/tablename`` +- ``/dbname/tablename/-/modify-table-schema`` +Note that a row could have a primary key of ``-`` and this URL scheme will still work, because Datasette row pages do not ever have a trailing slash followed by additional path components. .. _writing_plugins_building_urls: From 2a981e2ac1d13125973904b777d00ea75e8df4e6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 11 Nov 2020 15:37:37 -0800 Subject: [PATCH 0281/1705] Blank foreign key labels now show as hyphens, closes #1086 --- datasette/templates/table.html | 4 +- datasette/views/table.py | 2 +- tests/fixtures.py | 6 ++- tests/test_api.py | 76 ++++++++++++++++++++++------------ tests/test_csv.py | 6 +-- tests/test_html.py | 30 +++++++++----- 6 files changed, 80 insertions(+), 44 deletions(-) diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 5034b62e..077332dc 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -156,9 +156,9 @@
    {% for facet_value in facet_info.results %} {% if not facet_value.selected %} -
  • {{ (facet_value.label if facet_value.label is not none else "_") }} {{ "{:,}".format(facet_value.count) }}
  • +
  • {{ (facet_value.label | string()) or "-" }} {{ "{:,}".format(facet_value.count) }}
  • {% else %} -
  • {{ facet_value.label }} · {{ "{:,}".format(facet_value.count) }}
  • +
  • {{ facet_value.label or "-" }} · {{ "{:,}".format(facet_value.count) }}
  • {% endif %} {% endfor %} {% if facet_info.truncated %} diff --git a/datasette/views/table.py b/datasette/views/table.py index 65fe7f8b..d29ef201 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -195,7 +195,7 @@ class RowTableShared(DataView): table=urllib.parse.quote_plus(other_table), link_id=urllib.parse.quote_plus(str(value)), id=str(jinja2.escape(value)), - label=str(jinja2.escape(label)), + label=str(jinja2.escape(label)) or "-", ) ) elif value in ("", None): diff --git a/tests/fixtures.py b/tests/fixtures.py index a48cfb46..bd530398 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -386,8 +386,10 @@ CREATE INDEX idx_compound_three_primary_keys_content ON compound_three_primary_k CREATE TABLE foreign_key_references ( pk varchar(30) primary key, foreign_key_with_label varchar(30), + foreign_key_with_blank_label varchar(30), foreign_key_with_no_label varchar(30), FOREIGN KEY (foreign_key_with_label) REFERENCES simple_primary_key(id), + FOREIGN KEY (foreign_key_with_blank_label) REFERENCES simple_primary_key(id), FOREIGN KEY (foreign_key_with_no_label) REFERENCES primary_key_multiple_columns(id) ); @@ -622,8 +624,8 @@ INSERT INTO simple_primary_key VALUES (4, 'RENDER_CELL_DEMO'); INSERT INTO primary_key_multiple_columns VALUES (1, 'hey', 'world'); INSERT INTO primary_key_multiple_columns_explicit_label VALUES (1, 'hey', 'world2'); -INSERT INTO foreign_key_references VALUES (1, 1, 1); -INSERT INTO foreign_key_references VALUES (2, null, null); +INSERT INTO foreign_key_references VALUES (1, 1, 3, 1); +INSERT INTO foreign_key_references VALUES (2, null, null, null); INSERT INTO complex_foreign_keys VALUES (1, 1, 2, 1); INSERT INTO custom_foreign_key_label VALUES (1, 1); diff --git a/tests/test_api.py b/tests/test_api.py index 1a43e7f4..d6d683b7 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -49,21 +49,21 @@ def test_homepage_sort_by_relationships(app_client): tables = [ t["name"] for t in response.json["fixtures"]["tables_and_views_truncated"] ] - assert [ + assert tables == [ "simple_primary_key", + "foreign_key_references", "complex_foreign_keys", "roadside_attraction_characteristics", "searchable_tags", - "foreign_key_references", - ] == tables + ] def test_database_page(app_client): response = app_client.get("/fixtures.json") assert response.status == 200 data = response.json - assert "fixtures" == data["database"] - assert [ + assert data["database"] == "fixtures" + assert data["tables"] == [ { "name": "123_starts_with_digits", "columns": ["content"], @@ -232,7 +232,12 @@ def test_database_page(app_client): }, { "name": "foreign_key_references", - "columns": ["pk", "foreign_key_with_label", "foreign_key_with_no_label"], + "columns": [ + "pk", + "foreign_key_with_label", + "foreign_key_with_blank_label", + "foreign_key_with_no_label", + ], "primary_keys": ["pk"], "count": 2, "hidden": False, @@ -245,6 +250,11 @@ def test_database_page(app_client): "column": "foreign_key_with_no_label", "other_column": "id", }, + { + "other_table": "simple_primary_key", + "column": "foreign_key_with_blank_label", + "other_column": "id", + }, { "other_table": "simple_primary_key", "column": "foreign_key_with_label", @@ -403,6 +413,11 @@ def test_database_page(app_client): "fts_table": None, "foreign_keys": { "incoming": [ + { + "other_table": "foreign_key_references", + "column": "id", + "other_column": "foreign_key_with_blank_label", + }, { "other_table": "foreign_key_references", "column": "id", @@ -548,7 +563,7 @@ def test_database_page(app_client): "foreign_keys": {"incoming": [], "outgoing": []}, "private": False, }, - ] == data["tables"] + ] def test_no_files_uses_memory_database(app_client_no_files): @@ -1203,32 +1218,38 @@ def test_row_foreign_key_tables(app_client): "/fixtures/simple_primary_key/1.json?_extras=foreign_key_tables" ) assert response.status == 200 - assert [ + assert response.json["foreign_key_tables"] == [ { - "column": "id", - "count": 1, - "other_column": "foreign_key_with_label", "other_table": "foreign_key_references", - }, - { - "column": "id", - "count": 1, - "other_column": "f3", - "other_table": "complex_foreign_keys", - }, - { "column": "id", + "other_column": "foreign_key_with_blank_label", "count": 0, - "other_column": "f2", - "other_table": "complex_foreign_keys", }, { + "other_table": "foreign_key_references", "column": "id", + "other_column": "foreign_key_with_label", "count": 1, - "other_column": "f1", - "other_table": "complex_foreign_keys", }, - ] == response.json["foreign_key_tables"] + { + "other_table": "complex_foreign_keys", + "column": "id", + "other_column": "f3", + "count": 1, + }, + { + "other_table": "complex_foreign_keys", + "column": "id", + "other_column": "f2", + "count": 0, + }, + { + "other_table": "complex_foreign_keys", + "column": "id", + "other_column": "f1", + "count": 1, + }, + ] def test_unit_filters(app_client): @@ -1593,13 +1614,14 @@ def test_expand_label(app_client): "/fixtures/foreign_key_references.json?_shape=object" "&_label=foreign_key_with_label&_size=1" ) - assert { + assert response.json == { "1": { "pk": "1", "foreign_key_with_label": {"value": "1", "label": "hello"}, + "foreign_key_with_blank_label": "3", "foreign_key_with_no_label": "1", } - } == response.json + } @pytest.mark.parametrize( @@ -1790,11 +1812,13 @@ def test_null_foreign_keys_are_not_expanded(app_client): { "pk": "1", "foreign_key_with_label": {"value": "1", "label": "hello"}, + "foreign_key_with_blank_label": {"value": "3", "label": ""}, "foreign_key_with_no_label": {"value": "1", "label": "1"}, }, { "pk": "2", "foreign_key_with_label": None, + "foreign_key_with_blank_label": None, "foreign_key_with_no_label": None, }, ] == response.json diff --git a/tests/test_csv.py b/tests/test_csv.py index 3e91fb04..209bce2b 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -42,9 +42,9 @@ pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,com ) EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV = """ -pk,foreign_key_with_label,foreign_key_with_label_label,foreign_key_with_no_label,foreign_key_with_no_label_label -1,1,hello,1,1 -2,,,, +pk,foreign_key_with_label,foreign_key_with_label_label,foreign_key_with_blank_label,foreign_key_with_blank_label_label,foreign_key_with_no_label,foreign_key_with_no_label_label +1,1,hello,3,,1,1 +2,,,,,, """.lstrip().replace( "\n", "\r\n" ) diff --git a/tests/test_html.py b/tests/test_html.py index 006c223d..7fca8a68 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -464,7 +464,7 @@ def test_facet_display(app_client): ], } ) - assert [ + assert actual == [ { "name": "city_id", "items": [ @@ -520,7 +520,7 @@ def test_facet_display(app_client): }, ], }, - ] == actual + ] def test_facets_persist_through_filter_form(app_client): @@ -801,37 +801,47 @@ def test_table_html_foreign_key_links(app_client): response = app_client.get("/fixtures/foreign_key_references") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") - expected = [ + actual = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] + assert actual == [ [ '
', '', + '', '', ], [ '', '', + '', '', ], ] - assert expected == [ - [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") - ] + + +def test_table_html_foreign_key_facets(app_client): + response = app_client.get( + "/fixtures/foreign_key_references?_facet=foreign_key_with_blank_label" + ) + assert response.status == 200 + assert ( + '
  • ' + "- 1
  • " + ) in response.text def test_table_html_disable_foreign_key_links_with_labels(app_client): response = app_client.get("/fixtures/foreign_key_references?_labels=off&_size=1") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") - expected = [ + actual = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] + assert actual == [ [ '
    ', '', + '', '', ] ] - assert expected == [ - [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") - ] def test_table_html_foreign_key_custom_label_column(app_client): From e8e0a6f284ca953b2980186c4356594c07bd1929 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 11 Nov 2020 16:02:58 -0800 Subject: [PATCH 0282/1705] Use FTS4 in fixtures Closes #1081 --- tests/fixtures.py | 2 +- tests/test_api.py | 28 ++++++++++++++++------------ tests/test_internals_database.py | 7 ++++--- 3 files changed, 21 insertions(+), 16 deletions(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index bd530398..183b8ca4 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -488,7 +488,7 @@ INSERT INTO searchable_tags (searchable_id, tag) VALUES ; CREATE VIRTUAL TABLE "searchable_fts" - USING FTS3 (text1, text2, [name with . and spaces], content="searchable"); + USING FTS4 (text1, text2, [name with . and spaces], content="searchable"); INSERT INTO "searchable_fts" (rowid, text1, text2, [name with . and spaces]) SELECT rowid, text1, text2, [name with . and spaces] FROM searchable; diff --git a/tests/test_api.py b/tests/test_api.py index d6d683b7..93097574 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -37,9 +37,9 @@ def test_homepage(app_client): assert len(d["tables_and_views_truncated"]) == 5 assert d["tables_and_views_more"] is True # 4 hidden FTS tables + no_primary_key (hidden in metadata) - assert d["hidden_tables_count"] == 5 - # 201 in no_primary_key, plus 5 in other hidden tables: - assert d["hidden_table_rows_sum"] == 206 + assert d["hidden_tables_count"] == 6 + # 201 in no_primary_key, plus 6 in other hidden tables: + assert d["hidden_table_rows_sum"] == 207 assert d["views_count"] == 4 @@ -512,7 +512,7 @@ def test_database_page(app_client): }, { "name": "searchable_fts", - "columns": ["text1", "text2", "name with . and spaces", "content"], + "columns": ["text1", "text2", "name with . and spaces"], "primary_keys": [], "count": 2, "hidden": True, @@ -521,14 +521,8 @@ def test_database_page(app_client): "private": False, }, { - "name": "searchable_fts_content", - "columns": [ - "docid", - "c0text1", - "c1text2", - "c2name with . and spaces", - "c3content", - ], + "name": "searchable_fts_docsize", + "columns": ["docid", "size"], "primary_keys": ["docid"], "count": 2, "hidden": True, @@ -563,6 +557,16 @@ def test_database_page(app_client): "foreign_keys": {"incoming": [], "outgoing": []}, "private": False, }, + { + "name": "searchable_fts_stat", + "columns": ["id", "value"], + "primary_keys": ["id"], + "count": 1, + "hidden": True, + "fts_table": None, + "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, + }, ] diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 8042cf53..e5938f3b 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -297,7 +297,7 @@ async def test_get_all_foreign_keys(db): @pytest.mark.asyncio async def test_table_names(db): table_names = await db.table_names() - assert [ + assert table_names == [ "simple_primary_key", "primary_key_multiple_columns", "primary_key_multiple_columns_explicit_label", @@ -316,9 +316,10 @@ async def test_table_names(db): "searchable", "searchable_tags", "searchable_fts", - "searchable_fts_content", "searchable_fts_segments", "searchable_fts_segdir", + "searchable_fts_docsize", + "searchable_fts_stat", "select", "infinity", "facet_cities", @@ -327,7 +328,7 @@ async def test_table_names(db): "roadside_attractions", "attraction_characteristic", "roadside_attraction_characteristics", - ] == table_names + ] @pytest.mark.asyncio From 253f2d9a3cc96edcb47b33c6971300d0ff15d4dc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 11 Nov 2020 20:36:44 -0800 Subject: [PATCH 0283/1705] Use correct QueryInterrupted exception on row page, closes #1088 --- datasette/views/table.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index d29ef201..9ed45df1 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -19,7 +19,6 @@ from datasette.utils import ( path_with_added_args, path_with_removed_args, path_with_replaced_args, - sqlite3, to_css_class, urlsafe_components, value_as_boolean, @@ -1040,7 +1039,7 @@ class RowView(RowTableShared): ) try: rows = list(await db.execute(sql, {"id": pk_values[0]})) - except sqlite3.OperationalError: + except QueryInterrupted: # Almost certainly hit the timeout return [] From 5eb8e9bf250b26e30b017d39a392c33973997656 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 12 Nov 2020 12:07:19 -0800 Subject: [PATCH 0284/1705] Removed words that minimize involved difficulty, closes #1089 --- docs/changelog.rst | 8 ++++---- docs/contributing.rst | 8 ++++---- docs/deploying.rst | 4 ++-- docs/ecosystem.rst | 2 +- docs/internals.rst | 2 +- docs/metadata.rst | 2 +- docs/plugin_hooks.rst | 2 +- docs/publish.rst | 4 ++-- docs/sql_queries.rst | 2 +- docs/writing_plugins.rst | 4 ++-- 10 files changed, 19 insertions(+), 19 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 97d5d251..34bd95d4 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -239,7 +239,7 @@ Better plugin documentation The plugin documentation has been re-arranged into four sections, including a brand new section on testing plugins. (`#687 `__) - :ref:`plugins` introduces Datasette's plugin system and describes how to install and configure plugins. -- :ref:`writing_plugins` describes how to author plugins, from simple one-off plugins to packaged plugins that can be published to PyPI. It also describes how to start a plugin using the new `datasette-plugin `__ cookiecutter template. +- :ref:`writing_plugins` describes how to author plugins, from one-off single file plugins to packaged plugins that can be published to PyPI. It also describes how to start a plugin using the new `datasette-plugin `__ cookiecutter template. - :ref:`plugin_hooks` is a full list of detailed documentation for every Datasette plugin hook. - :ref:`testing_plugins` describes how to write tests for Datasette plugins, using `pytest `__ and `HTTPX `__. @@ -277,7 +277,7 @@ Authentication Prior to this release the Datasette ecosystem has treated authentication as exclusively the realm of plugins, most notably through `datasette-auth-github `__. -0.44 introduces :ref:`authentication` as core Datasette concepts (`#699 `__). This makes it easier for different plugins can share responsibility for authenticating requests - you might have one plugin that handles user accounts and another one that allows automated access via API keys, for example. +0.44 introduces :ref:`authentication` as core Datasette concepts (`#699 `__). This enables different plugins to share responsibility for authenticating requests - you might have one plugin that handles user accounts and another one that allows automated access via API keys, for example. You'll need to install plugins if you want full user accounts, but default Datasette can now authenticate a single root user with the new ``--root`` command-line option, which outputs a one-time use URL to :ref:`authenticate as a root actor ` (`#784 `__):: @@ -572,7 +572,7 @@ Also in this release: 0.32 (2019-11-14) ----------------- -Datasette now renders templates using `Jinja async mode `__. This makes it easy for plugins to provide custom template functions that perform asynchronous actions, for example the new `datasette-template-sql `__ plugin which allows custom templates to directly execute SQL queries and render their results. (`#628 `__) +Datasette now renders templates using `Jinja async mode `__. This means plugins can provide custom template functions that perform asynchronous actions, for example the new `datasette-template-sql `__ plugin which allows custom templates to directly execute SQL queries and render their results. (`#628 `__) .. _v0_31_2: @@ -1881,7 +1881,7 @@ as a more powerful alternative to SQL views. This will write those values into the metadata.json that is packaged with the app. If you also pass ``--metadata=metadata.json`` that file will be updated with the extra values before being written into the Docker image. -- Added simple production-ready Dockerfile (`#94`_) [Andrew +- Added production-ready Dockerfile (`#94`_) [Andrew Cutler] - New ``?_sql_time_limit_ms=10`` argument to database and table page (`#95`_) - SQL syntax highlighting with Codemirror (`#89`_) [Tom Dyson] diff --git a/docs/contributing.rst b/docs/contributing.rst index 375f6b89..ca194001 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -19,7 +19,7 @@ General guidelines Setting up a development environment ------------------------------------ -If you have Python 3.6 or higher installed on your computer (on OS X the easiest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps. +If you have Python 3.6 or higher installed on your computer (on OS X the quickest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps. If you want to use GitHub to publish your changes, first `create a fork of datasette `__ under your own GitHub account. @@ -27,7 +27,7 @@ Now clone that repository somewhere on your computer:: git clone git@github.com:YOURNAME/datasette -If you just want to get started without creating your own fork, you can do this instead:: +If you want to get started without creating your own fork, you can do this instead:: git clone git@github.com:simonw/datasette @@ -47,9 +47,9 @@ Once you have done this, you can run the Datasette unit tests from inside your ` pytest -To run Datasette itself, just type ``datasette``. +To run Datasette itself, type ``datasette``. -You're going to need at least one SQLite database. An easy way to get started is to use the fixtures database that Datasette uses for its own tests. +You're going to need at least one SQLite database. A quick way to get started is to use the fixtures database that Datasette uses for its own tests. You can create a copy of that database by running this command:: diff --git a/docs/deploying.rst b/docs/deploying.rst index e777f296..3eeaaad8 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -4,7 +4,7 @@ Deploying Datasette ===================== -The easiest way to deploy a Datasette instance on the internet is to use the ``datasette publish`` command, described in :ref:`publishing`. This can be used to quickly deploy Datasette to a number of hosting providers including Heroku, Google Cloud Run and Vercel. +The quickest way to deploy a Datasette instance on the internet is to use the ``datasette publish`` command, described in :ref:`publishing`. This can be used to quickly deploy Datasette to a number of hosting providers including Heroku, Google Cloud Run and Vercel. You can deploy Datasette to other hosting providers using the instructions on this page. @@ -109,7 +109,7 @@ If you want to build SQLite files or download them as part of the deployment pro wget https://fivethirtyeight.datasettes.com/fivethirtyeight.db -`simonw/buildpack-datasette-demo `__ is an example GitHub repository showing a simple Datasette configuration that can be deployed to a buildpack-supporting host. +`simonw/buildpack-datasette-demo `__ is an example GitHub repository showing a Datasette configuration that can be deployed to a buildpack-supporting host. .. _deploying_proxy: diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst index 4b80e71e..2ab4224a 100644 --- a/docs/ecosystem.rst +++ b/docs/ecosystem.rst @@ -68,7 +68,7 @@ For example, to create a SQLite database of the `City of Dallas Payment Register Datasette Plugins ================= -Datasette's :ref:`plugin system ` makes it easy to enhance Datasette with additional functionality. +Datasette's :ref:`plugin system ` allows developers to enhance Datasette with additional functionality. datasette-graphql ----------------- diff --git a/docs/internals.rst b/docs/internals.rst index d3d0be8e..92496490 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -119,7 +119,7 @@ For example: content_type="application/xml; charset=utf-8" ) -The easiest way to create responses is using the ``Response.text(...)``, ``Response.html(...)``, ``Response.json(...)`` or ``Response.redirect(...)`` helper methods: +The quickest way to create responses is using the ``Response.text(...)``, ``Response.html(...)``, ``Response.json(...)`` or ``Response.redirect(...)`` helper methods: .. code-block:: python diff --git a/docs/metadata.rst b/docs/metadata.rst index 471a52e3..87c81ff6 100644 --- a/docs/metadata.rst +++ b/docs/metadata.rst @@ -310,7 +310,7 @@ Here's an example of a ``metadata.yml`` file, re-using an example from :ref:`can where neighborhood like '%' || :text || '%' order by neighborhood; title: Search neighborhoods description_html: |- -

    This demonstrates simple LIKE search +

    This demonstrates basic LIKE search The ``metadata.yml`` file is passed to Datasette using the same ``--metadata`` option:: diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 6f8d269d..8407a259 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -471,7 +471,7 @@ It can also return a dictionary with the following keys. This format is **deprec ``headers`` - dictionary, optional Extra HTTP headers to be returned in the response. -A simple example of an output renderer callback function: +An example of an output renderer callback function: .. code-block:: python diff --git a/docs/publish.rst b/docs/publish.rst index 45048ce1..a905ac92 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -71,7 +71,7 @@ You can specify a custom app name by passing ``-n my-app-name`` to the publish c Publishing to Vercel -------------------- -`Vercel `__ - previously known as Zeit Now - provides a layer over AWS Lambda to allow for easy, scale-to-zero deployment. You can deploy Datasette instances to Vercel using the `datasette-publish-vercel `__ plugin. +`Vercel `__ - previously known as Zeit Now - provides a layer over AWS Lambda to allow for quick, scale-to-zero deployment. You can deploy Datasette instances to Vercel using the `datasette-publish-vercel `__ plugin. :: @@ -85,7 +85,7 @@ Not every feature is supported: consult the `datasette-publish-vercel README `__ is a `competitively priced `__ Docker-compatible hosting platform that makes it easy to run applications in globally distributed data centers close to your end users. You can deploy Datasette instances to Fly using the `datasette-publish-fly `__ plugin. +`Fly `__ is a `competitively priced `__ Docker-compatible hosting platform that supports running applications in globally distributed data centers close to your end users. You can deploy Datasette instances to Fly using the `datasette-publish-fly `__ plugin. :: diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index 0ce506cb..ec4c860e 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -64,7 +64,7 @@ If you want to bundle some pre-written SQL queries with your Datasette-hosted database you can do so in two ways. The first is to include SQL views in your database - Datasette will then list those views on your database index page. -The easiest way to create views is with the SQLite command-line interface:: +The quickest way to create views is with the SQLite command-line interface:: $ sqlite3 sf-trees.db SQLite version 3.19.3 2017-06-27 16:48:08 diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index dfcda8a9..60d5056a 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -10,7 +10,7 @@ You can write one-off plugins that apply to just one Datasette instance, or you Writing one-off plugins ----------------------- -The easiest way to write a plugin is to create a ``my_plugin.py`` file and drop it into your ``plugins/`` directory. Here is an example plugin, which adds a new custom SQL function called ``hello_world()`` which takes no arguments and returns the string ``Hello world!``. +The quickest way to start writing a plugin is to create a ``my_plugin.py`` file and drop it into your ``plugins/`` directory. Here is an example plugin, which adds a new custom SQL function called ``hello_world()`` which takes no arguments and returns the string ``Hello world!``. .. code-block:: python @@ -37,7 +37,7 @@ Starting an installable plugin using cookiecutter Plugins that can be installed should be written as Python packages using a ``setup.py`` file. -The easiest way to start writing one an installable plugin is to use the `datasette-plugin `__ cookiecutter template. This creates a new plugin structure for you complete with an example test and GitHub Actions workflows for testing and publishing your plugin. +The quickest way to start writing one an installable plugin is to use the `datasette-plugin `__ cookiecutter template. This creates a new plugin structure for you complete with an example test and GitHub Actions workflows for testing and publishing your plugin. `Install cookiecutter `__ and then run this command to start building a plugin using the template:: From 200284e1a7541af62c7df5467acfb7edd0ee934a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 15 Nov 2020 08:43:13 -0800 Subject: [PATCH 0285/1705] Clarified how --plugin-secret works --- docs/plugins.rst | 30 ++++++++++++++++++++++++++---- 1 file changed, 26 insertions(+), 4 deletions(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index 1c0dd588..06e2ec00 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -123,7 +123,6 @@ If you run ``datasette plugins --all`` it will include default plugins that ship You can add the ``--plugins-dir=`` option to include any plugins found in that directory. - .. _plugins_configuration: Plugin configuration @@ -131,7 +130,9 @@ Plugin configuration Plugins can have their own configuration, embedded in a :ref:`metadata` file. Configuration options for plugins live within a ``"plugins"`` key in that file, which can be included at the root, database or table level. -Here is an example of some plugin configuration for a specific table:: +Here is an example of some plugin configuration for a specific table: + +.. code-block:: json { "databases: { @@ -159,7 +160,9 @@ Secret configuration values Any values embedded in ``metadata.json`` will be visible to anyone who views the ``/-/metadata`` page of your Datasette instance. Some plugins may need configuration that should stay secret - API keys for example. There are two ways in which you can store secret configuration values. -**As environment variables**. If your secret lives in an environment variable that is available to the Datasette process, you can indicate that the configuration value should be read from that environment variable like so:: +**As environment variables**. If your secret lives in an environment variable that is available to the Datasette process, you can indicate that the configuration value should be read from that environment variable like so: + +.. code-block:: json { "plugins": { @@ -171,7 +174,9 @@ Any values embedded in ``metadata.json`` will be visible to anyone who views the } } -**As values in separate files**. Your secrets can also live in files on disk. To specify a secret should be read from a file, provide the full file path like this:: +**As values in separate files**. Your secrets can also live in files on disk. To specify a secret should be read from a file, provide the full file path like this: + +.. code-block:: json { "plugins": { @@ -190,3 +195,20 @@ If you are publishing your data using the :ref:`datasette publish ` --install=datasette-auth-github \ --plugin-secret datasette-auth-github client_id your_client_id \ --plugin-secret datasette-auth-github client_secret your_client_secret + +This will set the necessary environment variables and add the following to the deployed ``metadata.json``: + +.. code-block:: json + + { + "plugins": { + "datasette-auth-github": { + "client_id": { + "$env": "DATASETTE_AUTH_GITHUB_CLIENT_ID" + }, + "client_secret": { + "$env": "DATASETTE_AUTH_GITHUB_CLIENT_SECRET" + } + } + } + } From 6fd35be64de221eba4945ca24e8e1678f6142a73 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 15 Nov 2020 08:45:26 -0800 Subject: [PATCH 0286/1705] Fixed invalid JSON in exampl --- docs/plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index 06e2ec00..3e756a9e 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -135,7 +135,7 @@ Here is an example of some plugin configuration for a specific table: .. code-block:: json { - "databases: { + "databases": { "sf-trees": { "tables": { "Street_Tree_List": { From 30e64c8d3b3728a86c3ca42a75322cc3feb5b0c8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 15 Nov 2020 15:24:22 -0800 Subject: [PATCH 0287/1705] Use f-strings in place of .format() Code transformed like so: pip install flynt flynt . black . --- datasette/app.py | 14 +++--- datasette/blob_renderer.py | 6 +-- datasette/cli.py | 20 +++----- datasette/database.py | 12 ++--- datasette/facets.py | 20 ++++---- datasette/filters.py | 18 +++---- datasette/inspect.py | 2 +- datasette/publish/cloudrun.py | 8 ++- datasette/publish/common.py | 4 +- datasette/publish/heroku.py | 14 ++---- datasette/renderer.py | 4 +- datasette/tracer.py | 4 +- datasette/url_builder.py | 14 +++--- datasette/utils/__init__.py | 91 +++++++++++++++------------------- datasette/utils/asgi.py | 2 +- datasette/utils/testing.py | 6 +-- datasette/views/base.py | 22 ++++---- datasette/views/database.py | 8 ++- datasette/views/special.py | 4 +- datasette/views/table.py | 88 +++++++++++++------------------- tests/fixtures.py | 12 ++--- tests/plugins/my_plugin.py | 16 +++--- tests/test_api.py | 8 +-- tests/test_auth.py | 2 +- tests/test_canned_queries.py | 8 +-- tests/test_cli.py | 2 +- tests/test_docs.py | 6 +-- tests/test_filters.py | 4 +- tests/test_html.py | 41 +++++++-------- tests/test_internals_urls.py | 2 +- tests/test_messages.py | 2 +- tests/test_plugins.py | 12 ++--- tests/test_publish_cloudrun.py | 10 ++-- tests/test_utils.py | 2 +- update-docs-help.py | 2 +- 35 files changed, 213 insertions(+), 277 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 1271e52f..b2bdb746 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -231,7 +231,7 @@ class Datasette: is_mutable = path not in self.immutables db = Database(self, path, is_mutable=is_mutable, is_memory=is_memory) if db.name in self.databases: - raise Exception("Multiple files with same stem: {}".format(db.name)) + raise Exception(f"Multiple files with same stem: {db.name}") self.add_database(db.name, db) self.cache_headers = cache_headers self.cors = cors @@ -455,9 +455,9 @@ class Datasette: if self.sqlite_extensions: conn.enable_load_extension(True) for extension in self.sqlite_extensions: - conn.execute("SELECT load_extension('{}')".format(extension)) + conn.execute(f"SELECT load_extension('{extension}')") if self.config("cache_size_kb"): - conn.execute("PRAGMA cache_size=-{}".format(self.config("cache_size_kb"))) + conn.execute(f"PRAGMA cache_size=-{self.config('cache_size_kb')}") # pylint: disable=no-member pm.hook.prepare_connection(conn=conn, database=database, datasette=self) @@ -860,7 +860,7 @@ class Datasette: if plugin["static_path"]: add_route( asgi_static(plugin["static_path"]), - "/-/static-plugins/{}/(?P.*)$".format(plugin["name"]), + f"/-/static-plugins/{plugin['name']}/(?P.*)$", ) # Support underscores in name in addition to hyphens, see https://github.com/simonw/datasette/issues/611 add_route( @@ -1156,7 +1156,7 @@ class DatasetteRouter: info = {} message = str(exception) traceback.print_exc() - templates = ["{}.html".format(status), "error.html"] + templates = [f"{status}.html", "error.html"] info.update( { "ok": False, @@ -1234,7 +1234,7 @@ def route_pattern_from_filepath(filepath): re_bits = ["/"] for bit in _curly_re.split(filepath): if _curly_re.match(bit): - re_bits.append("(?P<{}>[^/]*)".format(bit[1:-1])) + re_bits.append(f"(?P<{bit[1:-1]}>[^/]*)") else: re_bits.append(re.escape(bit)) return re.compile("^" + "".join(re_bits) + "$") @@ -1253,7 +1253,7 @@ class DatasetteClient: if not isinstance(path, PrefixedUrlString): path = self.ds.urls.path(path) if path.startswith("/"): - path = "http://localhost{}".format(path) + path = f"http://localhost{path}" return path async def get(self, path, **kwargs): diff --git a/datasette/blob_renderer.py b/datasette/blob_renderer.py index 794b153e..217b3638 100644 --- a/datasette/blob_renderer.py +++ b/datasette/blob_renderer.py @@ -9,10 +9,10 @@ _BLOB_HASH = "_blob_hash" async def render_blob(datasette, database, rows, columns, request, table, view_name): if _BLOB_COLUMN not in request.args: - raise BadRequest("?{}= is required".format(_BLOB_COLUMN)) + raise BadRequest(f"?{_BLOB_COLUMN}= is required") blob_column = request.args[_BLOB_COLUMN] if blob_column not in columns: - raise BadRequest("{} is not a valid column".format(blob_column)) + raise BadRequest(f"{blob_column} is not a valid column") # If ?_blob_hash= provided, use that to select the row - otherwise use first row blob_hash = None @@ -42,7 +42,7 @@ async def render_blob(datasette, database, rows, columns, request, table, view_n filename = "-".join(filename_bits) + ".blob" headers = { "X-Content-Type-Options": "nosniff", - "Content-Disposition": 'attachment; filename="{}"'.format(filename), + "Content-Disposition": f'attachment; filename="{filename}"', } return Response( body=value or b"", diff --git a/datasette/cli.py b/datasette/cli.py index 04d2950b..99075078 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -33,12 +33,12 @@ class Config(click.ParamType): def convert(self, config, param, ctx): if ":" not in config: - self.fail('"{}" should be name:value'.format(config), param, ctx) + self.fail(f'"{config}" should be name:value', param, ctx) return name, value = config.split(":", 1) if name not in DEFAULT_CONFIG: self.fail( - "{} is not a valid option (--help-config to see all)".format(name), + f"{name} is not a valid option (--help-config to see all)", param, ctx, ) @@ -49,13 +49,11 @@ class Config(click.ParamType): try: return name, value_as_boolean(value) except ValueAsBooleanError: - self.fail( - '"{}" should be on/off/true/false/1/0'.format(name), param, ctx - ) + self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx) return elif isinstance(default, int): if not value.isdigit(): - self.fail('"{}" should be an integer'.format(name), param, ctx) + self.fail(f'"{name}" should be an integer', param, ctx) return return name, int(value) elif isinstance(default, str): @@ -203,7 +201,7 @@ def package( version_note, secret, port, - **extra_metadata + **extra_metadata, ): "Package specified SQLite files into a new datasette Docker container" if not shutil.which("docker"): @@ -389,7 +387,7 @@ def serve( with formatter.section("Config options"): formatter.write_dl( [ - (option.name, "{} (default={})".format(option.help, option.default)) + (option.name, f"{option.help} (default={option.default})") for option in CONFIG_OPTIONS ] ) @@ -470,7 +468,7 @@ def serve( path = asyncio.get_event_loop().run_until_complete( initial_path_for_datasette(ds) ) - url = "http://{}:{}{}".format(host, port, path) + url = f"http://{host}:{port}{path}" webbrowser.open(url) uvicorn.run( ds.app(), host=host, port=port, log_level="info", lifespan="on", workers=1 @@ -491,7 +489,5 @@ async def check_databases(ds): ) except ConnectionProblem as e: raise click.UsageError( - "Connection to {} failed check: {}".format( - database.path, str(e.args[0]) - ) + f"Connection to {database.path} failed check: {str(e.args[0])}" ) diff --git a/datasette/database.py b/datasette/database.py index a9f39253..ea1424a5 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -57,7 +57,7 @@ class Database: if write: qs = "" return sqlite3.connect( - "file:{}{}".format(self.path, qs), uri=True, check_same_thread=False + f"file:{self.path}{qs}", uri=True, check_same_thread=False ) async def execute_write(self, sql, params=None, block=False): @@ -191,7 +191,7 @@ class Database: try: table_count = ( await self.execute( - "select count(*) from [{}]".format(table), + f"select count(*) from [{table}]", custom_time_limit=limit, ) ).rows[0][0] @@ -362,13 +362,13 @@ class Database: if self.is_memory: tags.append("memory") if self.hash: - tags.append("hash={}".format(self.hash)) + tags.append(f"hash={self.hash}") if self.size is not None: - tags.append("size={}".format(self.size)) + tags.append(f"size={self.size}") tags_str = "" if tags: - tags_str = " ({})".format(", ".join(tags)) - return "".format(self.name, tags_str) + tags_str = f" ({', '.join(tags)})" + return f"" class WriteTask: diff --git a/datasette/facets.py b/datasette/facets.py index 1712db9b..a818a9e9 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -86,7 +86,7 @@ class Facet: self.database = database # For foreign key expansion. Can be None for e.g. canned SQL queries: self.table = table - self.sql = sql or "select * from [{}]".format(table) + self.sql = sql or f"select * from [{table}]" self.params = params or [] self.metadata = metadata # row_count can be None, in which case we calculate it ourselves: @@ -114,7 +114,7 @@ class Facet: # Detect column names using the "limit 0" trick return ( await self.ds.execute( - self.database, "select * from ({}) limit 0".format(sql), params or [] + self.database, f"select * from ({sql}) limit 0", params or [] ) ).columns @@ -123,7 +123,7 @@ class Facet: self.row_count = ( await self.ds.execute( self.database, - "select count(*) from ({})".format(self.sql), + f"select count(*) from ({self.sql})", self.params, ) ).rows[0][0] @@ -371,14 +371,14 @@ class ArrayFacet(Facet): pairs = self.get_querystring_pairs() for row in facet_rows: value = str(row["value"]) - selected = ("{}__arraycontains".format(column), value) in pairs + selected = (f"{column}__arraycontains", value) in pairs if selected: toggle_path = path_with_removed_args( - self.request, {"{}__arraycontains".format(column): value} + self.request, {f"{column}__arraycontains": value} ) else: toggle_path = path_with_added_args( - self.request, {"{}__arraycontains".format(column): value} + self.request, {f"{column}__arraycontains": value} ) facet_results_values.append( { @@ -482,16 +482,14 @@ class DateFacet(Facet): } facet_rows = facet_rows_results.rows[:facet_size] for row in facet_rows: - selected = str(args.get("{}__date".format(column))) == str( - row["value"] - ) + selected = str(args.get(f"{column}__date")) == str(row["value"]) if selected: toggle_path = path_with_removed_args( - self.request, {"{}__date".format(column): str(row["value"])} + self.request, {f"{column}__date": str(row["value"])} ) else: toggle_path = path_with_added_args( - self.request, {"{}__date".format(column): row["value"]} + self.request, {f"{column}__date": row["value"]} ) facet_results_values.append( { diff --git a/datasette/filters.py b/datasette/filters.py index 4891154a..1524b32a 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -43,7 +43,7 @@ class TemplatedFilter(Filter): kwargs = {"c": column} converted = None else: - kwargs = {"c": column, "p": "p{}".format(param_counter), "t": table} + kwargs = {"c": column, "p": f"p{param_counter}", "t": table} return self.sql_template.format(**kwargs), converted def human_clause(self, column, value): @@ -69,12 +69,12 @@ class InFilter(Filter): def where_clause(self, table, column, value, param_counter): values = self.split_value(value) - params = [":p{}".format(param_counter + i) for i in range(len(values))] - sql = "{} in ({})".format(escape_sqlite(column), ", ".join(params)) + params = [f":p{param_counter + i}" for i in range(len(values))] + sql = f"{escape_sqlite(column)} in ({', '.join(params)})" return sql, values def human_clause(self, column, value): - return "{} in {}".format(column, json.dumps(self.split_value(value))) + return f"{column} in {json.dumps(self.split_value(value))}" class NotInFilter(InFilter): @@ -83,12 +83,12 @@ class NotInFilter(InFilter): def where_clause(self, table, column, value, param_counter): values = self.split_value(value) - params = [":p{}".format(param_counter + i) for i in range(len(values))] - sql = "{} not in ({})".format(escape_sqlite(column), ", ".join(params)) + params = [f":p{param_counter + i}" for i in range(len(values))] + sql = f"{escape_sqlite(column)} not in ({', '.join(params)})" return sql, values def human_clause(self, column, value): - return "{} not in {}".format(column, json.dumps(self.split_value(value))) + return f"{column} not in {json.dumps(self.split_value(value))}" class Filters: @@ -221,7 +221,7 @@ class Filters: s = " and ".join(and_bits) if not s: return "" - return "where {}".format(s) + return f"where {s}" def selections(self): "Yields (column, lookup, value) tuples" @@ -265,7 +265,7 @@ class Filters: if not isinstance(param, list): param = [param] for individual_param in param: - param_id = "p{}".format(i) + param_id = f"p{i}" params[param_id] = individual_param i += 1 return sql_bits, params diff --git a/datasette/inspect.py b/datasette/inspect.py index 2324c02c..4d538e5f 100644 --- a/datasette/inspect.py +++ b/datasette/inspect.py @@ -47,7 +47,7 @@ def inspect_tables(conn, database_metadata): try: count = conn.execute( - "select count(*) from {}".format(escape_sqlite(table)) + f"select count(*) from {escape_sqlite(table)}" ).fetchone()[0] except sqlite3.OperationalError: # This can happen when running against a FTS virtual table diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 8f99dc2e..54f55fcb 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -100,9 +100,7 @@ def publish_subcommand(publish): extra_metadata["plugins"] = {} for plugin_name, plugin_setting, setting_value in plugin_secret: environment_variable = ( - "{}_{}".format(plugin_name, plugin_setting) - .upper() - .replace("-", "_") + f"{plugin_name}_{plugin_setting}".upper().replace("-", "_") ) environment_variables[environment_variable] = setting_value extra_metadata["plugins"].setdefault(plugin_name, {})[ @@ -133,8 +131,8 @@ def publish_subcommand(publish): print(open("Dockerfile").read()) print("\n====================\n") - image_id = "gcr.io/{project}/{name}".format(project=project, name=name) - check_call("gcloud builds submit --tag {}".format(image_id), shell=True) + image_id = f"gcr.io/{project}/{name}" + check_call(f"gcloud builds submit --tag {image_id}", shell=True) check_call( "gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}".format( image_id, service, " --memory {}".format(memory) if memory else "" diff --git a/datasette/publish/common.py b/datasette/publish/common.py index 49a4798e..b6570290 100644 --- a/datasette/publish/common.py +++ b/datasette/publish/common.py @@ -85,9 +85,7 @@ def fail_if_publish_binary_not_installed(binary, publish_target, install_link): err=True, ) click.echo( - "Follow the instructions at {install_link}".format( - install_link=install_link - ), + f"Follow the instructions at {install_link}", err=True, ) sys.exit(1) diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index 24305de5..c772b476 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -83,9 +83,7 @@ def publish_subcommand(publish): extra_metadata["plugins"] = {} for plugin_name, plugin_setting, setting_value in plugin_secret: environment_variable = ( - "{}_{}".format(plugin_name, plugin_setting) - .upper() - .replace("-", "_") + f"{plugin_name}_{plugin_setting}".upper().replace("-", "_") ) environment_variables[environment_variable] = setting_value extra_metadata["plugins"].setdefault(plugin_name, {})[ @@ -129,9 +127,7 @@ def publish_subcommand(publish): app_name = json.loads(create_output)["name"] for key, value in environment_variables.items(): - call( - ["heroku", "config:set", "-a", app_name, "{}={}".format(key, value)] - ) + call(["heroku", "config:set", "-a", app_name, f"{key}={value}"]) tar_option = [] if tar: tar_option = ["--tar", tar] @@ -181,9 +177,7 @@ def temporary_heroku_directory( if branch: install = [ - "https://github.com/simonw/datasette/archive/{branch}.zip".format( - branch=branch - ) + f"https://github.com/simonw/datasette/archive/{branch}.zip" ] + list(install) else: install = ["datasette"] + list(install) @@ -216,7 +210,7 @@ def temporary_heroku_directory( link_or_copy_directory( os.path.join(saved_cwd, path), os.path.join(tmp.name, mount_point) ) - extras.extend(["--static", "{}:{}".format(mount_point, mount_point)]) + extras.extend(["--static", f"{mount_point}:{mount_point}"]) quoted_files = " ".join( ["-i {}".format(shlex.quote(file_name)) for file_name in file_names] diff --git a/datasette/renderer.py b/datasette/renderer.py index bcde8516..d779b44f 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -82,7 +82,7 @@ def json_renderer(args, data, view_name): status_code = 400 data = { "ok": False, - "error": "Invalid _shape: {}".format(shape), + "error": f"Invalid _shape: {shape}", "status": 400, "title": None, } @@ -96,7 +96,7 @@ def json_renderer(args, data, view_name): content_type = "application/json; charset=utf-8" headers = {} if next_url: - headers["link"] = '<{}>; rel="next"'.format(next_url) + headers["link"] = f'<{next_url}>; rel="next"' return Response( body, status=status_code, headers=headers, content_type=content_type ) diff --git a/datasette/tracer.py b/datasette/tracer.py index a638b140..8f666767 100644 --- a/datasette/tracer.py +++ b/datasette/tracer.py @@ -28,7 +28,7 @@ def get_task_id(): def trace(type, **kwargs): assert not TRACE_RESERVED_KEYS.intersection( kwargs.keys() - ), ".trace() keyword parameters cannot include {}".format(TRACE_RESERVED_KEYS) + ), f".trace() keyword parameters cannot include {TRACE_RESERVED_KEYS}" task_id = get_task_id() if task_id is None: yield @@ -124,7 +124,7 @@ class AsgiTracer: content_type = "" if "text/html" in content_type and b"" in accumulated_body: extra = json.dumps(trace_info, indent=2) - extra_html = "

    {}
    ".format(extra).encode("utf8") + extra_html = f"
    {extra}
    ".encode("utf8") accumulated_body = accumulated_body.replace(b"", extra_html) elif "json" in content_type and accumulated_body.startswith(b"{"): data = json.loads(accumulated_body.decode("utf8")) diff --git a/datasette/url_builder.py b/datasette/url_builder.py index bcc4f39d..697f60ae 100644 --- a/datasette/url_builder.py +++ b/datasette/url_builder.py @@ -19,10 +19,10 @@ class Urls: return self.path("", format=format) def static(self, path): - return self.path("-/static/{}".format(path)) + return self.path(f"-/static/{path}") def static_plugins(self, plugin, path): - return self.path("-/static-plugins/{}/{}".format(plugin, path)) + return self.path(f"-/static-plugins/{plugin}/{path}") def logout(self): return self.path("-/logout") @@ -30,27 +30,25 @@ class Urls: def database(self, database, format=None): db = self.ds.databases[database] if self.ds.config("hash_urls") and db.hash: - path = self.path( - "{}-{}".format(database, db.hash[:HASH_LENGTH]), format=format - ) + path = self.path(f"{database}-{db.hash[:HASH_LENGTH]}", format=format) else: path = self.path(database, format=format) return path def table(self, database, table, format=None): - path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(table)) + path = f"{self.database(database)}/{urllib.parse.quote_plus(table)}" if format is not None: path = path_with_format(path=path, format=format) return PrefixedUrlString(path) def query(self, database, query, format=None): - path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(query)) + path = f"{self.database(database)}/{urllib.parse.quote_plus(query)}" if format is not None: path = path_with_format(path=path, format=format) return PrefixedUrlString(path) def row(self, database, table, row_path, format=None): - path = "{}/{}".format(self.table(database, table), row_path) + path = f"{self.table(database, table)}/{row_path}" if format is not None: path = path_with_format(path=path, format=format) return PrefixedUrlString(path) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index a7d96401..02b59b2b 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -115,13 +115,10 @@ def compound_keys_after_sql(pks, start_index=0): last = pks_left[-1] rest = pks_left[:-1] and_clauses = [ - "{} = :p{}".format(escape_sqlite(pk), (i + start_index)) - for i, pk in enumerate(rest) + f"{escape_sqlite(pk)} = :p{i + start_index}" for i, pk in enumerate(rest) ] - and_clauses.append( - "{} > :p{}".format(escape_sqlite(last), (len(rest) + start_index)) - ) - or_clauses.append("({})".format(" and ".join(and_clauses))) + and_clauses.append(f"{escape_sqlite(last)} > :p{len(rest) + start_index}") + or_clauses.append(f"({' and '.join(and_clauses)})") pks_left.pop() or_clauses.reverse() return "({})".format("\n or\n".join(or_clauses)) @@ -195,7 +192,7 @@ allowed_pragmas = ( ) disallawed_sql_res = [ ( - re.compile("pragma(?!_({}))".format("|".join(allowed_pragmas))), + re.compile(f"pragma(?!_({'|'.join(allowed_pragmas)}))"), "Statement may not contain PRAGMA", ) ] @@ -215,7 +212,7 @@ def validate_sql_select(sql): def append_querystring(url, querystring): op = "&" if ("?" in url) else "?" - return "{}{}{}".format(url, op, querystring) + return f"{url}{op}{querystring}" def path_with_added_args(request, args, path=None): @@ -230,7 +227,7 @@ def path_with_added_args(request, args, path=None): current.extend([(key, value) for key, value in args if value is not None]) query_string = urllib.parse.urlencode(current) if query_string: - query_string = "?{}".format(query_string) + query_string = f"?{query_string}" return path + query_string @@ -259,7 +256,7 @@ def path_with_removed_args(request, args, path=None): current.append((key, value)) query_string = urllib.parse.urlencode(current) if query_string: - query_string = "?{}".format(query_string) + query_string = f"?{query_string}" return path + query_string @@ -275,7 +272,7 @@ def path_with_replaced_args(request, args, path=None): current.extend([p for p in args if p[1] is not None]) query_string = urllib.parse.urlencode(current) if query_string: - query_string = "?{}".format(query_string) + query_string = f"?{query_string}" return path + query_string @@ -285,7 +282,7 @@ _boring_keyword_re = re.compile(r"^[a-zA-Z_][a-zA-Z0-9_]*$") def escape_css_string(s): return _css_re.sub( - lambda m: "\\" + ("{:X}".format(ord(m.group())).zfill(6)), + lambda m: "\\" + (f"{ord(m.group()):X}".zfill(6)), s.replace("\r\n", "\n"), ) @@ -294,7 +291,7 @@ def escape_sqlite(s): if _boring_keyword_re.match(s) and (s.lower() not in reserved_words): return s else: - return "[{}]".format(s) + return f"[{s}]" def make_dockerfile( @@ -319,27 +316,27 @@ def make_dockerfile( cmd.extend(["-i", filename]) cmd.extend(["--cors", "--inspect-file", "inspect-data.json"]) if metadata_file: - cmd.extend(["--metadata", "{}".format(metadata_file)]) + cmd.extend(["--metadata", f"{metadata_file}"]) if template_dir: cmd.extend(["--template-dir", "templates/"]) if plugins_dir: cmd.extend(["--plugins-dir", "plugins/"]) if version_note: - cmd.extend(["--version-note", "{}".format(version_note)]) + cmd.extend(["--version-note", f"{version_note}"]) if static: for mount_point, _ in static: - cmd.extend(["--static", "{}:{}".format(mount_point, mount_point)]) + cmd.extend(["--static", f"{mount_point}:{mount_point}"]) if extra_options: for opt in extra_options.split(): - cmd.append("{}".format(opt)) + cmd.append(f"{opt}") cmd = [shlex.quote(part) for part in cmd] # port attribute is a (fixed) env variable and should not be quoted cmd.extend(["--port", "$PORT"]) cmd = " ".join(cmd) if branch: - install = [ - "https://github.com/simonw/datasette/archive/{}.zip".format(branch) - ] + list(install) + install = [f"https://github.com/simonw/datasette/archive/{branch}.zip"] + list( + install + ) else: install = ["datasette"] + list(install) @@ -449,7 +446,7 @@ def detect_primary_keys(conn, table): " Figure out primary keys for a table. " table_info_rows = [ row - for row in conn.execute('PRAGMA table_info("{}")'.format(table)).fetchall() + for row in conn.execute(f'PRAGMA table_info("{table}")').fetchall() if row[-1] ] table_info_rows.sort(key=lambda row: row[-1]) @@ -457,7 +454,7 @@ def detect_primary_keys(conn, table): def get_outbound_foreign_keys(conn, table): - infos = conn.execute("PRAGMA foreign_key_list([{}])".format(table)).fetchall() + infos = conn.execute(f"PRAGMA foreign_key_list([{table}])").fetchall() fks = [] for info in infos: if info is not None: @@ -476,7 +473,7 @@ def get_all_foreign_keys(conn): for table in tables: table_to_foreign_keys[table] = {"incoming": [], "outgoing": []} for table in tables: - infos = conn.execute("PRAGMA foreign_key_list([{}])".format(table)).fetchall() + infos = conn.execute(f"PRAGMA foreign_key_list([{table}])").fetchall() for info in infos: if info is not None: id, seq, table_name, from_, to_, on_update, on_delete, match = info @@ -544,9 +541,7 @@ def table_columns(conn, table): def table_column_details(conn, table): return [ Column(*r) - for r in conn.execute( - "PRAGMA table_info({});".format(escape_sqlite(table)) - ).fetchall() + for r in conn.execute(f"PRAGMA table_info({escape_sqlite(table)});").fetchall() ] @@ -562,9 +557,7 @@ def filters_should_redirect(special_args): if "__" in filter_op: filter_op, filter_value = filter_op.split("__", 1) if filter_column: - redirect_params.append( - ("{}__{}".format(filter_column, filter_op), filter_value) - ) + redirect_params.append((f"{filter_column}__{filter_op}", filter_value)) for key in ("_filter_column", "_filter_op", "_filter_value"): if key in special_args: redirect_params.append((key, None)) @@ -573,17 +566,17 @@ def filters_should_redirect(special_args): for column_key in column_keys: number = column_key.split("_")[-1] column = special_args[column_key] - op = special_args.get("_filter_op_{}".format(number)) or "exact" - value = special_args.get("_filter_value_{}".format(number)) or "" + op = special_args.get(f"_filter_op_{number}") or "exact" + value = special_args.get(f"_filter_value_{number}") or "" if "__" in op: op, value = op.split("__", 1) if column: - redirect_params.append(("{}__{}".format(column, op), value)) + redirect_params.append((f"{column}__{op}", value)) redirect_params.extend( [ - ("_filter_column_{}".format(number), None), - ("_filter_op_{}".format(number), None), - ("_filter_value_{}".format(number), None), + (f"_filter_column_{number}", None), + (f"_filter_op_{number}", None), + (f"_filter_value_{number}", None), ] ) return redirect_params @@ -672,7 +665,7 @@ async def resolve_table_and_format( # Check if table ends with a known format formats = list(allowed_formats) + ["csv", "jsono"] for _format in formats: - if table_and_format.endswith(".{}".format(_format)): + if table_and_format.endswith(f".{_format}"): table = table_and_format[: -(len(_format) + 1)] return table, _format return table_and_format, None @@ -683,20 +676,20 @@ def path_with_format( ): qs = extra_qs or {} path = request.path if request else path - if replace_format and path.endswith(".{}".format(replace_format)): + if replace_format and path.endswith(f".{replace_format}"): path = path[: -(1 + len(replace_format))] if "." in path: qs["_format"] = format else: - path = "{}.{}".format(path, format) + path = f"{path}.{format}" if qs: extra = urllib.parse.urlencode(sorted(qs.items())) if request and request.query_string: - path = "{}?{}&{}".format(path, request.query_string, extra) + path = f"{path}?{request.query_string}&{extra}" else: - path = "{}?{}".format(path, extra) + path = f"{path}?{extra}" elif request and request.query_string: - path = "{}?{}".format(path, request.query_string) + path = f"{path}?{request.query_string}" return path @@ -742,9 +735,7 @@ class LimitedWriter: async def write(self, bytes): self.bytes_count += len(bytes) if self.limit_bytes and (self.bytes_count > self.limit_bytes): - raise WriteLimitExceeded( - "CSV contains more than {} bytes".format(self.limit_bytes) - ) + raise WriteLimitExceeded(f"CSV contains more than {self.limit_bytes} bytes") await self.writer.write(bytes) @@ -763,14 +754,14 @@ class StaticMount(click.ParamType): def convert(self, value, param, ctx): if ":" not in value: self.fail( - '"{}" should be of format mountpoint:directory'.format(value), + f'"{value}" should be of format mountpoint:directory', param, ctx, ) path, dirpath = value.split(":", 1) dirpath = os.path.abspath(dirpath) if not os.path.exists(dirpath) or not os.path.isdir(dirpath): - self.fail("%s is not a valid directory path" % value, param, ctx) + self.fail(f"{value} is not a valid directory path", param, ctx) return path, dirpath @@ -781,9 +772,9 @@ def format_bytes(bytes): break current = current / 1024 if unit == "bytes": - return "{} {}".format(int(current), unit) + return f"{int(current)} {unit}" else: - return "{:.1f} {}".format(current, unit) + return f"{current:.1f} {unit}" _escape_fts_re = re.compile(r'\s+|(".*?")') @@ -820,7 +811,7 @@ class MultiParams: self._data = new_data def __repr__(self): - return "".format(self._data) + return f"" def __contains__(self, key): return key in self._data @@ -867,7 +858,7 @@ def check_connection(conn): for table in tables: try: conn.execute( - "PRAGMA table_info({});".format(escape_sqlite(table)), + f"PRAGMA table_info({escape_sqlite(table)});", ) except sqlite3.OperationalError as e: if e.args[0] == "no such module: VirtualSpatialIndex": diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index e4c8ce5c..ce78a597 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -260,7 +260,7 @@ async def asgi_send_file( ): headers = headers or {} if filename: - headers["content-disposition"] = 'attachment; filename="{}"'.format(filename) + headers["content-disposition"] = f'attachment; filename="{filename}"' first = True headers["content-length"] = str((await aiofiles.os.stat(str(filepath))).st_size) async with aiofiles.open(str(filepath), mode="rb") as fp: diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index 8a8810e7..bcbc1c7a 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -32,7 +32,7 @@ class TestResponse: return any( h for h in self.httpx_response.headers.get_list("set-cookie") - if h.startswith('{}="";'.format(cookie)) + if h.startswith(f'{cookie}="";') ) @property @@ -125,9 +125,7 @@ class TestClient: if allow_redirects and response.status in (301, 302): assert ( redirect_count < self.max_redirects - ), "Redirected {} times, max_redirects={}".format( - redirect_count, self.max_redirects - ) + ), f"Redirected {redirect_count} times, max_redirects={self.max_redirects}" location = response.headers["Location"] return await self._request( location, allow_redirects=True, redirect_count=redirect_count + 1 diff --git a/datasette/views/base.py b/datasette/views/base.py index 430489c1..b3a54bcc 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -125,9 +125,7 @@ class BaseView: **{ "database_color": self.database_color, "select_templates": [ - "{}{}".format( - "*" if template_name == template.name else "", template_name - ) + f"{'*' if template_name == template.name else ''}{template_name}" for template_name in templates ], }, @@ -165,11 +163,11 @@ class DataView(BaseView): def redirect(self, request, path, forward_querystring=True, remove_args=None): if request.query_string and "?" not in path and forward_querystring: - path = "{}?{}".format(path, request.query_string) + path = f"{path}?{request.query_string}" if remove_args: path = path_with_removed_args(request, remove_args, path=path) r = Response.redirect(path) - r.headers["Link"] = "<{}>; rel=preload".format(path) + r.headers["Link"] = f"<{path}>; rel=preload" if self.ds.cors: r.headers["Access-Control-Allow-Origin"] = "*" return r @@ -184,7 +182,7 @@ class DataView(BaseView): # No matching DB found, maybe it's a name-hash? name_bit, hash_bit = db_name.rsplit("-", 1) if name_bit not in self.ds.databases: - raise NotFound("Database not found: {}".format(name)) + raise NotFound(f"Database not found: {name}") else: name = name_bit hash = hash_bit @@ -194,7 +192,7 @@ class DataView(BaseView): try: db = self.ds.databases[name] except KeyError: - raise NotFound("Database not found: {}".format(name)) + raise NotFound(f"Database not found: {name}") # Verify the hash expected = "000" @@ -217,11 +215,11 @@ class DataView(BaseView): ) kwargs["table"] = table if _format: - kwargs["as_format"] = ".{}".format(_format) + kwargs["as_format"] = f".{_format}" elif kwargs.get("table"): kwargs["table"] = urllib.parse.unquote_plus(kwargs["table"]) - should_redirect = self.ds.urls.path("{}-{}".format(name, expected)) + should_redirect = self.ds.urls.path(f"{name}-{expected}") if kwargs.get("table"): should_redirect += "/" + urllib.parse.quote_plus(kwargs["table"]) if kwargs.get("pk_path"): @@ -294,7 +292,7 @@ class DataView(BaseView): for column in data["columns"]: headings.append(column) if column in expanded_columns: - headings.append("{}_label".format(column)) + headings.append(f"{column}_label") async def stream_fn(r): nonlocal data @@ -505,7 +503,7 @@ class DataView(BaseView): elif isinstance(result, Response): r = result else: - assert False, "{} should be dict or Response".format(result) + assert False, f"{result} should be dict or Response" else: extras = {} if callable(extra_template_data): @@ -581,7 +579,7 @@ class DataView(BaseView): if ttl == 0: ttl_header = "no-cache" else: - ttl_header = "max-age={}".format(ttl) + ttl_header = f"max-age={ttl}" response.headers["Cache-Control"] = ttl_header response.headers["Referrer-Policy"] = "no-referrer" if self.ds.cors: diff --git a/datasette/views/database.py b/datasette/views/database.py index 9a7b96fd..d4ed8570 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -140,7 +140,7 @@ class DatabaseView(DataView): and not db.is_mutable and database != ":memory:", }, - ("database-{}.html".format(to_css_class(database)), "database.html"), + (f"database-{to_css_class(database)}.html", "database.html"), ) @@ -233,7 +233,7 @@ class QueryView(DataView): if _size: extra_args["page_size"] = _size - templates = ["query-{}.html".format(to_css_class(database)), "query.html"] + templates = [f"query-{to_css_class(database)}.html", "query.html"] # Execute query - as write or as read if write: @@ -324,9 +324,7 @@ class QueryView(DataView): if canned_query: templates.insert( 0, - "query-{}-{}.html".format( - to_css_class(database), to_css_class(canned_query) - ), + f"query-{to_css_class(database)}-{to_css_class(canned_query)}.html", ) allow_execute_sql = await self.ds.permission_allowed( diff --git a/datasette/views/special.py b/datasette/views/special.py index 397dbc8c..9750dd06 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -111,13 +111,13 @@ class AllowDebugView(BaseView): actor = json.loads(actor_input) actor_input = json.dumps(actor, indent=4) except json.decoder.JSONDecodeError as ex: - errors.append("Actor JSON error: {}".format(ex)) + errors.append(f"Actor JSON error: {ex}") allow_input = request.args.get("allow") or '{"id": "*"}' try: allow = json.loads(allow_input) allow_input = json.dumps(allow, indent=4) except json.decoder.JSONDecodeError as ex: - errors.append("Allow JSON error: {}".format(ex)) + errors.append(f"Allow JSON error: {ex}") result = None if not errors: diff --git a/datasette/views/table.py b/datasette/views/table.py index 9ed45df1..09c2d740 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -212,13 +212,11 @@ class RowTableShared(DataView): # representation, which we have to round off to avoid ugliness. In the vast # majority of cases this rounding will be inconsequential. I hope. value = round(value.to_compact(), 6) - display_value = jinja2.Markup( - "{:~P}".format(value).replace(" ", " ") - ) + display_value = jinja2.Markup(f"{value:~P}".replace(" ", " ")) else: display_value = str(value) if truncate_cells and len(display_value) > truncate_cells: - display_value = display_value[:truncate_cells] + u"\u2026" + display_value = display_value[:truncate_cells] + "\u2026" cells.append( { @@ -307,7 +305,7 @@ class TableView(RowTableShared): is_view = bool(await db.get_view_definition(table)) table_exists = bool(await db.table_exists(table)) if not is_view and not table_exists: - raise NotFound("Table not found: {}".format(table)) + raise NotFound(f"Table not found: {table}") await self.check_permissions( request, @@ -330,7 +328,7 @@ class TableView(RowTableShared): use_rowid = not pks and not is_view if use_rowid: - select = "rowid, {}".format(select_columns) + select = f"rowid, {select_columns}" order_by = "rowid" order_by_pks = "rowid" else: @@ -424,7 +422,7 @@ class TableView(RowTableShared): raise DatasetteError( "Invalid _through - could not find corresponding foreign key" ) - param = "p{}".format(len(params)) + param = f"p{len(params)}" where_clauses.append( "{our_pk} in (select {our_column} from {through_table} where {other_column} = :{param})".format( through_table=escape_sqlite(through_table), @@ -436,7 +434,7 @@ class TableView(RowTableShared): ) params[param] = value extra_human_descriptions.append( - '{}.{} = "{}"'.format(through_table, other_column, value) + f'{through_table}.{other_column} = "{value}"' ) # _search support: @@ -462,7 +460,7 @@ class TableView(RowTableShared): else "escape_fts(:search)", ) ) - extra_human_descriptions.append('search matches "{}"'.format(search)) + extra_human_descriptions.append(f'search matches "{search}"') params["search"] = search else: # More complex: search against specific columns @@ -481,11 +479,9 @@ class TableView(RowTableShared): ) ) extra_human_descriptions.append( - 'search column "{}" matches "{}"'.format( - search_col, search_text - ) + f'search column "{search_col}" matches "{search_text}"' ) - params["search_{}".format(i)] = search_text + params[f"search_{i}"] = search_text sortable_columns = set() @@ -506,15 +502,15 @@ class TableView(RowTableShared): if sort: if sort not in sortable_columns: - raise DatasetteError("Cannot sort table by {}".format(sort)) + raise DatasetteError(f"Cannot sort table by {sort}") order_by = escape_sqlite(sort) if sort_desc: if sort_desc not in sortable_columns: - raise DatasetteError("Cannot sort table by {}".format(sort_desc)) + raise DatasetteError(f"Cannot sort table by {sort_desc}") - order_by = "{} desc".format(escape_sqlite(sort_desc)) + order_by = f"{escape_sqlite(sort_desc)} desc" from_sql = "from {table_name} {where}".format( table_name=escape_sqlite(table), @@ -525,14 +521,14 @@ class TableView(RowTableShared): # Copy of params so we can mutate them later: from_sql_params = dict(**params) - count_sql = "select count(*) {}".format(from_sql) + count_sql = f"select count(*) {from_sql}" _next = _next or special_args.get("_next") offset = "" if _next: if is_view: # _next is an offset - offset = " offset {}".format(int(_next)) + offset = f" offset {int(_next)}" else: components = urlsafe_components(_next) # If a sort order is applied, the first of these is the sort value @@ -546,8 +542,8 @@ class TableView(RowTableShared): # Figure out the SQL for next-based-on-primary-key first next_by_pk_clauses = [] if use_rowid: - next_by_pk_clauses.append("rowid > :p{}".format(len(params))) - params["p{}".format(len(params))] = components[0] + next_by_pk_clauses.append(f"rowid > :p{len(params)}") + params[f"p{len(params)}"] = components[0] else: # Apply the tie-breaker based on primary keys if len(components) == len(pks): @@ -556,7 +552,7 @@ class TableView(RowTableShared): compound_keys_after_sql(pks, param_len) ) for i, pk_value in enumerate(components): - params["p{}".format(param_len + i)] = pk_value + params[f"p{param_len + i}"] = pk_value # Now add the sort SQL, which may incorporate next_by_pk_clauses if sort or sort_desc: @@ -590,17 +586,17 @@ class TableView(RowTableShared): next_clauses=" and ".join(next_by_pk_clauses), ) ) - params["p{}".format(len(params))] = sort_value - order_by = "{}, {}".format(order_by, order_by_pks) + params[f"p{len(params)}"] = sort_value + order_by = f"{order_by}, {order_by_pks}" else: where_clauses.extend(next_by_pk_clauses) where_clause = "" if where_clauses: - where_clause = "where {} ".format(" and ".join(where_clauses)) + where_clause = f"where {' and '.join(where_clauses)} " if order_by: - order_by = "order by {} ".format(order_by) + order_by = f"order by {order_by} " extra_args = {} # Handle ?_size=500 @@ -617,9 +613,7 @@ class TableView(RowTableShared): raise BadRequest("_size must be a positive integer") if page_size > self.ds.max_returned_rows: - raise BadRequest( - "_size must be <= {}".format(self.ds.max_returned_rows) - ) + raise BadRequest(f"_size must be <= {self.ds.max_returned_rows}") extra_args["page_size"] = page_size else: @@ -631,9 +625,7 @@ class TableView(RowTableShared): where=where_clause, order_by=order_by, ) - sql = "{sql_no_limit} limit {limit}{offset}".format( - sql_no_limit=sql_no_limit.rstrip(), limit=page_size + 1, offset=offset - ) + sql = f"{sql_no_limit.rstrip()} limit {page_size + 1}{offset}" if request.args.get("_timelimit"): extra_args["custom_time_limit"] = int(request.args.get("_timelimit")) @@ -645,7 +637,7 @@ class TableView(RowTableShared): if ( not db.is_mutable and self.ds.inspect_data - and count_sql == "select count(*) from {} ".format(table) + and count_sql == f"select count(*) from {table} " ): try: filtered_table_rows_count = self.ds.inspect_data[database]["tables"][ @@ -763,7 +755,7 @@ class TableView(RowTableShared): prefix = "$null" else: prefix = urllib.parse.quote_plus(str(prefix)) - next_value = "{},{}".format(prefix, next_value) + next_value = f"{prefix},{next_value}" added_args = {"_next": next_value} if sort: added_args["_sort"] = sort @@ -879,12 +871,8 @@ class TableView(RowTableShared): "sort_desc": sort_desc, "disable_sort": is_view, "custom_table_templates": [ - "_table-{}-{}.html".format( - to_css_class(database), to_css_class(table) - ), - "_table-table-{}-{}.html".format( - to_css_class(database), to_css_class(table) - ), + f"_table-{to_css_class(database)}-{to_css_class(table)}.html", + f"_table-table-{to_css_class(database)}-{to_css_class(table)}.html", "_table.html", ], "metadata": metadata, @@ -918,7 +906,7 @@ class TableView(RowTableShared): }, extra_template, ( - "table-{}-{}.html".format(to_css_class(database), to_css_class(table)), + f"table-{to_css_class(database)}-{to_css_class(table)}.html", "table.html", ), ) @@ -931,13 +919,11 @@ async def _sql_params_pks(db, table, pk_values): if use_rowid: select = "rowid, *" pks = ["rowid"] - wheres = ['"{}"=:p{}'.format(pk, i) for i, pk in enumerate(pks)] - sql = "select {} from {} where {}".format( - select, escape_sqlite(table), " AND ".join(wheres) - ) + wheres = [f'"{pk}"=:p{i}' for i, pk in enumerate(pks)] + sql = f"select {select} from {escape_sqlite(table)} where {' AND '.join(wheres)}" params = {} for i, pk_value in enumerate(pk_values): - params["p{}".format(i)] = pk_value + params[f"p{i}"] = pk_value return sql, params, pks @@ -960,7 +946,7 @@ class RowView(RowTableShared): columns = [r[0] for r in results.description] rows = list(results.rows) if not rows: - raise NotFound("Record not found: {}".format(pk_values)) + raise NotFound(f"Record not found: {pk_values}") async def template_data(): display_columns, display_rows = await self.display_columns_and_rows( @@ -981,12 +967,8 @@ class RowView(RowTableShared): "display_columns": display_columns, "display_rows": display_rows, "custom_table_templates": [ - "_table-{}-{}.html".format( - to_css_class(database), to_css_class(table) - ), - "_table-row-{}-{}.html".format( - to_css_class(database), to_css_class(table) - ), + f"_table-{to_css_class(database)}-{to_css_class(table)}.html", + f"_table-row-{to_css_class(database)}-{to_css_class(table)}.html", "_table.html", ], "metadata": (self.ds.metadata("databases") or {}) @@ -1014,7 +996,7 @@ class RowView(RowTableShared): data, template_data, ( - "row-{}-{}.html".format(to_css_class(database), to_css_class(table)), + f"row-{to_css_class(database)}-{to_css_class(table)}.html", "row.html", ), ) diff --git a/tests/fixtures.py b/tests/fixtures.py index 183b8ca4..3abca821 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -247,7 +247,7 @@ def generate_compound_rows(num): for a, b, c in itertools.islice( itertools.product(string.ascii_lowercase, repeat=3), num ): - yield a, b, c, "{}-{}-{}".format(a, b, c) + yield a, b, c, f"{a}-{b}-{c}" def generate_sortable_rows(num): @@ -258,7 +258,7 @@ def generate_sortable_rows(num): yield { "pk1": a, "pk2": b, - "content": "{}-{}".format(a, b), + "content": f"{a}-{b}", "sortable": rand.randint(-100, 100), "sortable_with_nulls": rand.choice([None, rand.random(), rand.random()]), "sortable_with_nulls_2": rand.choice([None, rand.random(), rand.random()]), @@ -742,7 +742,7 @@ def cli(db_filename, metadata, plugins_path, recreate): if pathlib.Path(db_filename).exists(): if not recreate: raise click.ClickException( - "{} already exists, use --recreate to reset it".format(db_filename) + f"{db_filename} already exists, use --recreate to reset it" ) else: pathlib.Path(db_filename).unlink() @@ -751,10 +751,10 @@ def cli(db_filename, metadata, plugins_path, recreate): for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) - print("Test tables written to {}".format(db_filename)) + print(f"Test tables written to {db_filename}") if metadata: open(metadata, "w").write(json.dumps(METADATA, indent=4)) - print("- metadata written to {}".format(metadata)) + print(f"- metadata written to {metadata}") if plugins_path: path = pathlib.Path(plugins_path) if not path.exists(): @@ -763,7 +763,7 @@ def cli(db_filename, metadata, plugins_path, recreate): for filepath in test_plugins.glob("*.py"): newpath = path / filepath.name newpath.write_text(filepath.open().read()) - print(" Wrote plugin: {}".format(newpath)) + print(f" Wrote plugin: {newpath}") if __name__ == "__main__": diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 4ac3953b..3f5ec832 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -130,7 +130,7 @@ def extra_template_vars( @hookimpl def prepare_jinja2_environment(env): - env.filters["format_numeric"] = lambda s: "{:,.0f}".format(float(s)) + env.filters["format_numeric"] = lambda s: f"{float(s):,.0f}" @hookimpl @@ -207,7 +207,7 @@ def register_routes(): async def two(request): name = request.url_vars["name"] greeting = request.args.get("greeting") - return Response.text("{} {}".format(greeting, name)) + return Response.text(f"{greeting} {name}") async def three(scope, send): await asgi_send_json( @@ -281,11 +281,7 @@ def startup(datasette): @hookimpl def canned_queries(datasette, database, actor): - return { - "from_hook": "select 1, '{}' as actor_id".format( - actor["id"] if actor else "null" - ) - } + return {"from_hook": f"select 1, '{actor['id'] if actor else 'null'}' as actor_id"} @hookimpl @@ -329,9 +325,9 @@ def table_actions(datasette, database, table, actor): return [ { "href": datasette.urls.instance(), - "label": "Database: {}".format(database), + "label": f"Database: {database}", }, - {"href": datasette.urls.instance(), "label": "Table: {}".format(table)}, + {"href": datasette.urls.instance(), "label": f"Table: {table}"}, ] @@ -341,6 +337,6 @@ def database_actions(datasette, database, actor): return [ { "href": datasette.urls.instance(), - "label": "Database: {}".format(database), + "label": f"Database: {database}", } ] diff --git a/tests/test_api.py b/tests/test_api.py index 93097574..3d48d350 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -918,7 +918,7 @@ def test_paginate_compound_keys_with_extra_filters(app_client): ], ) def test_sortable(app_client, query_string, sort_key, human_description_en): - path = "/fixtures/sortable.json?_shape=objects&{}".format(query_string) + path = f"/fixtures/sortable.json?_shape=objects&{query_string}" fetched = [] page = 0 while path: @@ -969,8 +969,8 @@ def test_sortable_columns_metadata(app_client): assert "Cannot sort table by content" == response.json["error"] # no_primary_key has ALL sort options disabled for column in ("content", "a", "b", "c"): - response = app_client.get("/fixtures/sortable.json?_sort={}".format(column)) - assert "Cannot sort table by {}".format(column) == response.json["error"] + response = app_client.get(f"/fixtures/sortable.json?_sort={column}") + assert f"Cannot sort table by {column}" == response.json["error"] @pytest.mark.parametrize( @@ -1877,7 +1877,7 @@ def test_binary_data_in_json(app_client, path, expected_json, expected_text): ], ) def test_paginate_using_link_header(app_client, qs): - path = "/fixtures/compound_three_primary_keys.json{}".format(qs) + path = f"/fixtures/compound_three_primary_keys.json{qs}" num_pages = 0 while path: response = app_client.get(path) diff --git a/tests/test_auth.py b/tests/test_auth.py index 34138aa6..5f3985db 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -7,7 +7,7 @@ import time def test_auth_token(app_client): "The /-/auth-token endpoint sets the correct cookie" assert app_client.ds._root_token is not None - path = "/-/auth-token?token={}".format(app_client.ds._root_token) + path = f"/-/auth-token?token={app_client.ds._root_token}" response = app_client.get( path, allow_redirects=False, diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index 9620c693..65f23cc7 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -82,7 +82,7 @@ def test_insert(canned_write_client): def test_canned_query_form_csrf_hidden_field( canned_write_client, query_name, expect_csrf_hidden_field ): - response = canned_write_client.get("/data/{}".format(query_name)) + response = canned_write_client.get(f"/data/{query_name}") html = response.text fragment = '".format(expected_considered) in response.text - ) + assert f"" in response.text def test_table_html_simple_primary_key(app_client): @@ -607,9 +605,7 @@ def test_table_html_simple_primary_key(app_client): for expected_col, th in zip(("content",), ths[1:]): a = th.find("a") assert expected_col == a.string - assert a["href"].endswith( - "/simple_primary_key?_size=3&_sort={}".format(expected_col) - ) + assert a["href"].endswith(f"/simple_primary_key?_size=3&_sort={expected_col}") assert ["nofollow"] == a["rel"] assert [ [ @@ -730,11 +726,11 @@ def test_table_html_no_primary_key(app_client): '
    '.format( i, i ), - ''.format(i), - ''.format(i), - ''.format(i), - ''.format(i), - ''.format(i), + f'', + f'', + f'', + f'', + f'', ] for i in range(1, 51) ] @@ -782,8 +778,8 @@ def test_table_html_compound_primary_key(app_client): for expected_col, th in zip(("pk1", "pk2", "content"), ths[1:]): a = th.find("a") assert expected_col == a.string - assert th["class"] == ["col-{}".format(expected_col)] - assert a["href"].endswith("/compound_primary_key?_sort={}".format(expected_col)) + assert th["class"] == [f"col-{expected_col}"] + assert a["href"].endswith(f"/compound_primary_key?_sort={expected_col}") expected = [ [ '', @@ -1100,9 +1096,7 @@ def test_404(app_client, path): response = app_client.get(path) assert 404 == response.status assert ( - 'Edit SQL' if expected: assert expected_link in response.text else: @@ -1555,10 +1548,10 @@ def test_navigation_menu_links( for link in should_have_links: assert ( details.find("a", {"href": link}) is not None - ), "{} expected but missing from nav menu".format(link) + ), f"{link} expected but missing from nav menu" if should_not_have_links: for link in should_not_have_links: assert ( details.find("a", {"href": link}) is None - ), "{} found but should not have been in nav menu".format(link) + ), f"{link} found but should not have been in nav menu" diff --git a/tests/test_internals_urls.py b/tests/test_internals_urls.py index a56d735b..89290911 100644 --- a/tests/test_internals_urls.py +++ b/tests/test_internals_urls.py @@ -157,7 +157,7 @@ def test_database_hashed(app_client_with_hash, base_url): ds._config["base_url"] = base_url db_hash = ds.get_database("fixtures").hash assert len(db_hash) == 64 - expected = "{}fixtures-{}".format(base_url, db_hash[:7]) + expected = f"{base_url}fixtures-{db_hash[:7]}" assert ds.urls.database("fixtures") == expected assert ds.urls.table("fixtures", "name") == expected + "/name" assert ds.urls.query("fixtures", "name") == expected + "/name" diff --git a/tests/test_messages.py b/tests/test_messages.py index 830244e1..3af5439a 100644 --- a/tests/test_messages.py +++ b/tests/test_messages.py @@ -11,7 +11,7 @@ import pytest ], ) def test_add_message_sets_cookie(app_client, qs, expected): - response = app_client.get("/fixtures.message?{}".format(qs)) + response = app_client.get(f"/fixtures.message?{qs}") signed = response.cookies["ds_messages"] decoded = app_client.ds.unsign(signed, "messages") assert expected == decoded diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 3819c872..51faeccb 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -34,7 +34,7 @@ def test_plugin_hooks_have_tests(plugin_hook): for test in tests_in_this_module: if plugin_hook in test: ok = True - assert ok, "Plugin hook is missing tests: {}".format(plugin_hook) + assert ok, f"Plugin hook is missing tests: {plugin_hook}" def test_hook_plugins_dir_plugin_prepare_connection(app_client): @@ -398,7 +398,7 @@ def view_names_client(tmp_path_factory): def test_view_names(view_names_client, path, view_name): response = view_names_client.get(path) assert response.status == 200 - assert "view_name:{}".format(view_name) == response.text + assert f"view_name:{view_name}" == response.text def test_hook_register_output_renderer_no_parameters(app_client): @@ -659,7 +659,7 @@ def test_hook_register_routes_csrftoken(restore_working_directory, tmpdir_factor with make_app_client(template_dir=templates) as client: response = client.get("/csrftoken-form/") expected_token = client.ds._last_request.scope["csrftoken"]() - assert "CSRFTOKEN: {}".format(expected_token) == response.text + assert f"CSRFTOKEN: {expected_token}" == response.text def test_hook_register_routes_asgi(app_client): @@ -793,14 +793,14 @@ def test_hook_table_actions(app_client, table_or_view): return [] return [{"label": a.text, "href": a["href"]} for a in details.select("a")] - response = app_client.get("/fixtures/{}".format(table_or_view)) + response = app_client.get(f"/fixtures/{table_or_view}") assert get_table_actions_links(response.text) == [] - response_2 = app_client.get("/fixtures/{}?_bot=1".format(table_or_view)) + response_2 = app_client.get(f"/fixtures/{table_or_view}?_bot=1") assert get_table_actions_links(response_2.text) == [ {"label": "From async", "href": "/"}, {"label": "Database: fixtures", "href": "/"}, - {"label": "Table: {}".format(table_or_view), "href": "/"}, + {"label": f"Table: {table_or_view}", "href": "/"}, ] diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index a4eca49f..e629bba0 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -62,7 +62,7 @@ Service name: input-service tag = "gcr.io/myproject/datasette" mock_call.assert_has_calls( [ - mock.call("gcloud builds submit --tag {}".format(tag), shell=True), + mock.call(f"gcloud builds submit --tag {tag}", shell=True), mock.call( "gcloud run deploy --allow-unauthenticated --platform=managed --image {} input-service".format( tag @@ -86,10 +86,10 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which): cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"] ) assert 0 == result.exit_code - tag = "gcr.io/{}/datasette".format(mock_output.return_value) + tag = f"gcr.io/{mock_output.return_value}/datasette" mock_call.assert_has_calls( [ - mock.call("gcloud builds submit --tag {}".format(tag), shell=True), + mock.call(f"gcloud builds submit --tag {tag}", shell=True), mock.call( "gcloud run deploy --allow-unauthenticated --platform=managed --image {} test".format( tag @@ -129,10 +129,10 @@ def test_publish_cloudrun_memory( assert 2 == result.exit_code return assert 0 == result.exit_code - tag = "gcr.io/{}/datasette".format(mock_output.return_value) + tag = f"gcr.io/{mock_output.return_value}/datasette" mock_call.assert_has_calls( [ - mock.call("gcloud builds submit --tag {}".format(tag), shell=True), + mock.call(f"gcloud builds submit --tag {tag}", shell=True), mock.call( "gcloud run deploy --allow-unauthenticated --platform=managed --image {} test --memory {}".format( tag, memory diff --git a/tests/test_utils.py b/tests/test_utils.py index 2d2ff52d..07e6f870 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -457,7 +457,7 @@ def test_check_connection_passes(): def test_call_with_supported_arguments(): def foo(a, b): - return "{}+{}".format(a, b) + return f"{a}+{b}" assert "1+2" == utils.call_with_supported_arguments(foo, a=1, b=2) assert "1+2" == utils.call_with_supported_arguments(foo, a=1, b=2, c=3) diff --git a/update-docs-help.py b/update-docs-help.py index c007e23c..3a192575 100644 --- a/update-docs-help.py +++ b/update-docs-help.py @@ -16,7 +16,7 @@ def update_help_includes(): for name, filename in includes: runner = CliRunner() result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88) - actual = "$ datasette {} --help\n\n{}".format(name, result.output) + actual = f"$ datasette {name} --help\n\n{result.output}" actual = actual.replace("Usage: cli ", "Usage: datasette ") open(docs_path / filename, "w").write(actual) From 4bac9f18f9d04e5ed10f072502bcc508e365438e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 21 Nov 2020 15:33:04 -0800 Subject: [PATCH 0288/1705] Fix off-screen action menu bug, refs #1084 --- datasette/static/app.css | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index 675285c1..b9378a9e 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -361,13 +361,13 @@ details .nav-menu-inner { } /* Table/database actions menu */ -.actions-menu-links { +.page-header { position: relative; } .actions-menu-links .dropdown-menu { position: absolute; - top: 2rem; - right: 0; + top: calc(100% + 10px); + left: -10px; } /* Components ============================================================== */ From 3159263f05ac4baf968929d59384d9223a539071 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 24 Nov 2020 12:01:47 -0800 Subject: [PATCH 0289/1705] New --setting to replace --config, closes #992 --- datasette/cli.py | 56 +++++++++++++++++++++++++++++++++-- docs/datasette-serve-help.txt | 5 ++-- tests/test_cli.py | 36 ++++++++++++++++++++++ 3 files changed, 93 insertions(+), 4 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 99075078..9e696aa8 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -2,6 +2,7 @@ import asyncio import uvicorn import click from click import formatting +from click.types import CompositeParamType from click_default_group import DefaultGroup import json import os @@ -29,6 +30,7 @@ from .version import __version__ class Config(click.ParamType): + # This will be removed in Datasette 1.0 in favour of class Setting name = "config" def convert(self, config, param, ctx): @@ -63,6 +65,39 @@ class Config(click.ParamType): self.fail("Invalid option") +class Setting(CompositeParamType): + name = "setting" + arity = 2 + + def convert(self, config, param, ctx): + name, value = config + if name not in DEFAULT_CONFIG: + self.fail( + f"{name} is not a valid option (--help-config to see all)", + param, + ctx, + ) + return + # Type checking + default = DEFAULT_CONFIG[name] + if isinstance(default, bool): + try: + return name, value_as_boolean(value) + except ValueAsBooleanError: + self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx) + return + elif isinstance(default, int): + if not value.isdigit(): + self.fail(f'"{name}" should be an integer', param, ctx) + return + return name, int(value) + elif isinstance(default, str): + return name, value + else: + # Should never happen: + self.fail("Invalid option") + + @click.group(cls=DefaultGroup, default="serve", default_if_no_args=True) @click.version_option(version=__version__) def cli(): @@ -330,7 +365,14 @@ def uninstall(packages, yes): @click.option( "--config", type=Config(), - help="Set config option using configname:value docs.datasette.io/en/stable/config.html", + help="Deprecated: set config option using configname:value. Use --setting instead.", + multiple=True, +) +@click.option( + "--setting", + "settings", + type=Setting(), + help="Setting, see docs.datasette.io/en/stable/config.html", multiple=True, ) @click.option( @@ -372,6 +414,7 @@ def serve( static, memory, config, + settings, secret, root, get, @@ -410,6 +453,15 @@ def serve( if metadata: metadata_data = parse_metadata(metadata.read()) + combined_config = {} + if config: + click.echo( + "--config name:value will be deprecated in Datasette 1.0, use --setting name value instead", + err=True, + ) + combined_config.update(config) + combined_config.update(settings) + kwargs = dict( immutables=immutable, cache_headers=not reload, @@ -420,7 +472,7 @@ def serve( template_dir=template_dir, plugins_dir=plugins_dir, static_mounts=static, - config=dict(config), + config=combined_config, memory=memory, secret=secret, version_note=version_note, diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index 5a63d4c4..bdaf0894 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -25,9 +25,10 @@ Options: --plugins-dir DIRECTORY Path to directory containing custom plugins --static MOUNT:DIRECTORY Serve static files from this directory at /MOUNT/... --memory Make :memory: database available - --config CONFIG Set config option using configname:value - docs.datasette.io/en/stable/config.html + --config CONFIG Deprecated: set config option using configname:value. Use + --setting instead. + --setting SETTING... Setting, see docs.datasette.io/en/stable/config.html --secret TEXT Secret used for signing secure values, such as signed cookies diff --git a/tests/test_cli.py b/tests/test_cli.py index aa39b0ee..99aea053 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -4,6 +4,7 @@ from .fixtures import ( TestClient as _TestClient, EXPECTED_PLUGINS, ) +import asyncio from datasette.plugins import DEFAULT_PLUGINS from datasette.cli import cli, serve from datasette.version import __version__ @@ -17,6 +18,13 @@ import textwrap from unittest import mock +@pytest.fixture +def ensure_eventloop(): + # Workaround for "Event loop is closed" error + if asyncio.get_event_loop().is_closed(): + asyncio.set_event_loop(asyncio.new_event_loop()) + + def test_inspect_cli(app_client): runner = CliRunner() result = runner.invoke(cli, ["inspect", "fixtures.db"]) @@ -115,6 +123,7 @@ def test_metadata_yaml(): static=[], memory=False, config=[], + settings=[], secret=None, root=False, version_note=None, @@ -163,3 +172,30 @@ def test_version(): runner = CliRunner() result = runner.invoke(cli, ["--version"]) assert result.output == f"cli, version {__version__}\n" + + +def test_setting(ensure_eventloop): + runner = CliRunner() + result = runner.invoke( + cli, ["--setting", "default_page_size", "5", "--get", "/-/config.json"] + ) + assert result.exit_code == 0, result.output + assert json.loads(result.output)["default_page_size"] == 5 + + +def test_setting_type_validation(ensure_eventloop): + runner = CliRunner(mix_stderr=False) + result = runner.invoke(cli, ["--setting", "default_page_size", "dog"]) + assert result.exit_code == 2 + assert '"default_page_size" should be an integer' in result.stderr + + +def test_config_deprecated(ensure_eventloop): + # The --config option should show a deprecation message + runner = CliRunner(mix_stderr=False) + result = runner.invoke( + cli, ["--config", "allow_download:off", "--get", "/-/config.json"] + ) + assert result.exit_code == 0 + assert not json.loads(result.output)["allow_download"] + assert "will be deprecated in" in result.stderr From 2a3d5b720b96d5ad79ccad655f6575bb71aae302 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 24 Nov 2020 12:19:14 -0800 Subject: [PATCH 0290/1705] Redirect /-/config to /-/settings, closes #1103 --- datasette/app.py | 20 ++++++++++++++++++-- datasette/default_menu_links.py | 4 ++-- docs/introspection.rst | 8 ++++---- tests/test_api.py | 17 +++++++++++++++-- tests/test_cli.py | 4 ++-- tests/test_config_dir.py | 2 +- tests/test_permissions.py | 2 +- 7 files changed, 43 insertions(+), 14 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index b2bdb746..36df6032 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -66,6 +66,7 @@ from .utils.asgi import ( Forbidden, NotFound, Request, + Response, asgi_static, asgi_send, asgi_send_html, @@ -884,8 +885,16 @@ class Datasette: r"/-/plugins(?P(\.json)?)$", ) add_route( - JsonDataView.as_view(self, "config.json", lambda: self._config), - r"/-/config(?P(\.json)?)$", + JsonDataView.as_view(self, "settings.json", lambda: self._config), + r"/-/settings(?P(\.json)?)$", + ) + add_route( + permanent_redirect("/-/settings.json"), + r"/-/config.json", + ) + add_route( + permanent_redirect("/-/settings"), + r"/-/config", ) add_route( JsonDataView.as_view(self, "threads.json", self._threads), @@ -1224,6 +1233,13 @@ def wrap_view(view_fn, datasette): return async_view_fn +def permanent_redirect(path): + return wrap_view( + lambda request, send: Response.redirect(path, status=301), + datasette=None, + ) + + _curly_re = re.compile(r"(\{.*?\})") diff --git a/datasette/default_menu_links.py b/datasette/default_menu_links.py index 0b135410..56f481ef 100644 --- a/datasette/default_menu_links.py +++ b/datasette/default_menu_links.py @@ -22,8 +22,8 @@ def menu_links(datasette, actor): "label": "Metadata", }, { - "href": datasette.urls.path("/-/config"), - "label": "Config", + "href": datasette.urls.path("/-/settings"), + "label": "Settings", }, { "href": datasette.urls.path("/-/permissions"), diff --git a/docs/introspection.rst b/docs/introspection.rst index 698ba95f..a0402b9d 100644 --- a/docs/introspection.rst +++ b/docs/introspection.rst @@ -89,10 +89,10 @@ Add ``?all=1`` to include details of the default plugins baked into Datasette. .. _JsonDataView_config: -/-/config ---------- +/-/settings +----------- -Shows the :ref:`config` options for this instance of Datasette. `Config example `_: +Shows the :ref:`config` options for this instance of Datasette. `Settings example `_: .. code-block:: json @@ -110,7 +110,7 @@ Shows the :ref:`config` options for this instance of Datasette. `Config example /-/databases ------------ -Shows currently attached databases. `Databases example `_: +Shows currently attached databases. `Databases example `_: .. code-block:: json diff --git a/tests/test_api.py b/tests/test_api.py index 3d48d350..2bab6c30 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1324,8 +1324,8 @@ def test_versions_json(app_client): assert "compile_options" in response.json["sqlite"] -def test_config_json(app_client): - response = app_client.get("/-/config.json") +def test_settings_json(app_client): + response = app_client.get("/-/settings.json") assert { "default_page_size": 50, "default_facet_size": 30, @@ -1350,6 +1350,19 @@ def test_config_json(app_client): } == response.json +@pytest.mark.parametrize( + "path,expected_redirect", + ( + ("/-/config.json", "/-/settings.json"), + ("/-/config", "/-/settings"), + ), +) +def test_config_redirects_to_settings(app_client, path, expected_redirect): + response = app_client.get(path, allow_redirects=False) + assert response.status == 301 + assert response.headers["Location"] == expected_redirect + + def test_page_size_matching_max_returned_rows( app_client_returned_rows_matches_page_size, ): diff --git a/tests/test_cli.py b/tests/test_cli.py index 99aea053..36b9a092 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -177,7 +177,7 @@ def test_version(): def test_setting(ensure_eventloop): runner = CliRunner() result = runner.invoke( - cli, ["--setting", "default_page_size", "5", "--get", "/-/config.json"] + cli, ["--setting", "default_page_size", "5", "--get", "/-/settings.json"] ) assert result.exit_code == 0, result.output assert json.loads(result.output)["default_page_size"] == 5 @@ -194,7 +194,7 @@ def test_config_deprecated(ensure_eventloop): # The --config option should show a deprecation message runner = CliRunner(mix_stderr=False) result = runner.invoke( - cli, ["--config", "allow_download:off", "--get", "/-/config.json"] + cli, ["--config", "allow_download:off", "--get", "/-/settings.json"] ) assert result.exit_code == 0 assert not json.loads(result.output)["allow_download"] diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index 15c7a5c4..34bd1d7e 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -86,7 +86,7 @@ def test_metadata(config_dir_client): def test_config(config_dir_client): - response = config_dir_client.get("/-/config.json") + response = config_dir_client.get("/-/settings.json") assert 200 == response.status assert 60 == response.json["default_cache_ttl"] diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 60883eef..3b7e1654 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -378,7 +378,7 @@ def view_instance_client(): "/-/metadata", "/-/versions", "/-/plugins", - "/-/config", + "/-/settings", "/-/threads", "/-/databases", "/-/actor", From 33eadb8782d5b3e179df7dfa08f6d376ded2acd3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 24 Nov 2020 12:37:29 -0800 Subject: [PATCH 0291/1705] config.json is now settings.json, closes #1104 --- datasette/app.py | 7 +++++-- datasette/cli.py | 3 +++ datasette/utils/__init__.py | 4 ++++ docs/config.rst | 8 ++++---- tests/test_config_dir.py | 17 ++++++++++++++--- 5 files changed, 30 insertions(+), 9 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 36df6032..0e42b7c6 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -45,6 +45,7 @@ from .database import Database, QueryInterrupted from .utils import ( PrefixedUrlString, + StartupError, async_call_with_supported_arguments, await_me_maybe, call_with_supported_arguments, @@ -265,8 +266,10 @@ class Datasette: if config_dir and (config_dir / "static").is_dir() and not static_mounts: static_mounts = [("static", str((config_dir / "static").resolve()))] self.static_mounts = static_mounts or [] - if config_dir and (config_dir / "config.json").exists() and not config: - config = json.load((config_dir / "config.json").open()) + if config_dir and (config_dir / "config.json").exists(): + raise StartupError("config.json should be renamed to settings.json") + if config_dir and (config_dir / "settings.json").exists() and not config: + config = json.load((config_dir / "settings.json").open()) self._config = dict(DEFAULT_CONFIG, **(config or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note diff --git a/datasette/cli.py b/datasette/cli.py index 9e696aa8..95e1418c 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -14,6 +14,7 @@ from runpy import run_module import webbrowser from .app import Datasette, DEFAULT_CONFIG, CONFIG_OPTIONS, pm from .utils import ( + StartupError, check_connection, parse_metadata, ConnectionProblem, @@ -488,6 +489,8 @@ def serve( ds = Datasette(files, **kwargs) except SpatialiteNotFound: raise click.ClickException("Could not find SpatiaLite extension") + except StartupError as e: + raise click.ClickException(e.args[0]) if return_instance: # Private utility mechanism for writing unit tests diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 02b59b2b..d62302e9 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1027,3 +1027,7 @@ class PrefixedUrlString(str): return method.__get__(self) else: return super().__getattribute__(name) + + +class StartupError(Exception): + pass diff --git a/docs/config.rst b/docs/config.rst index 0883e532..27b73d44 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -50,15 +50,15 @@ The files that can be included in this directory are as follows. All are optiona * ``*.db`` - SQLite database files that will be served by Datasette * ``metadata.json`` - :ref:`metadata` for those databases - ``metadata.yaml`` or ``metadata.yml`` can be used as well * ``inspect-data.json`` - the result of running ``datasette inspect`` - any database files listed here will be treated as immutable, so they should not be changed while Datasette is running -* ``config.json`` - settings that would normally be passed using ``--config`` - here they should be stored as a JSON object of key/value pairs +* ``settings.json`` - settings that would normally be passed using ``--setting`` - here they should be stored as a JSON object of key/value pairs * ``templates/`` - a directory containing :ref:`customization_custom_templates` * ``plugins/`` - a directory containing plugins, see :ref:`writing_plugins_one_off` * ``static/`` - a directory containing static files - these will be served from ``/static/filename.txt``, see :ref:`customization_static_files` -Configuration options ---------------------- +Settings +-------- -The followig options can be set using ``--config name:value``, or by storing them in the ``config.json`` file for use with :ref:`config_dir`. +The following options can be set using ``--setting name value``, or by storing them in the ``settings.json`` file for use with :ref:`config_dir`. default_page_size ~~~~~~~~~~~~~~~~~ diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index 34bd1d7e..cd158474 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -3,7 +3,9 @@ import pytest import sqlite3 from datasette.app import Datasette +from datasette.cli import cli from .fixtures import TestClient as _TestClient +from click.testing import CliRunner PLUGIN = """ from datasette import hookimpl @@ -15,7 +17,7 @@ def extra_template_vars(): } """ METADATA = {"title": "This is from metadata"} -CONFIG = { +SETTINGS = { "default_cache_ttl": 60, } CSS = """ @@ -44,7 +46,7 @@ def config_dir_client(tmp_path_factory): (static_dir / "hello.css").write_text(CSS, "utf-8") (config_dir / "metadata.json").write_text(json.dumps(METADATA), "utf-8") - (config_dir / "config.json").write_text(json.dumps(CONFIG), "utf-8") + (config_dir / "settings.json").write_text(json.dumps(SETTINGS), "utf-8") for dbname in ("demo.db", "immutable.db"): db = sqlite3.connect(str(config_dir / dbname)) @@ -85,12 +87,21 @@ def test_metadata(config_dir_client): assert METADATA == response.json -def test_config(config_dir_client): +def test_settings(config_dir_client): response = config_dir_client.get("/-/settings.json") assert 200 == response.status assert 60 == response.json["default_cache_ttl"] +def test_error_on_config_json(tmp_path_factory): + config_dir = tmp_path_factory.mktemp("config-dir") + (config_dir / "config.json").write_text(json.dumps(SETTINGS), "utf-8") + runner = CliRunner(mix_stderr=False) + result = runner.invoke(cli, [str(config_dir), "--get", "/-/settings.json"]) + assert result.exit_code == 1 + assert "config.json should be renamed to settings.json" in result.stderr + + def test_plugins(config_dir_client): response = config_dir_client.get("/-/plugins.json") assert 200 == response.status From 5a77f7a6494c74372bedfef6185e1beed7bea5dc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 24 Nov 2020 13:22:33 -0800 Subject: [PATCH 0292/1705] Updated docs renaming config to settings - config.html is now settings.html - ConfigOption in app.py is now Setting - updated documentation unit tests Refs #1106 --- .github/workflows/deploy-latest.yml | 4 +- datasette/app.py | 56 +++++++-------- datasette/cli.py | 12 ++-- docs/changelog.rst | 18 ++--- docs/csv_export.rst | 6 +- docs/deploying.rst | 8 +-- docs/index.rst | 2 +- docs/internals.rst | 6 +- docs/introspection.rst | 2 +- docs/pages.rst | 2 +- docs/performance.rst | 6 +- docs/plugin_hooks.rst | 2 +- docs/publish.rst | 4 +- docs/{config.rst => settings.rst} | 108 ++++++++++++++++------------ docs/spatialite.rst | 2 +- docs/writing_plugins.rst | 2 +- tests/test_docs.py | 10 +-- 17 files changed, 131 insertions(+), 119 deletions(-) rename docs/{config.rst => settings.rst} (80%) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 73b97a19..7a41bda2 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -53,11 +53,11 @@ jobs: --plugins-dir=plugins \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ - --extra-options="--config template_debug:1" \ + --extra-options="--setting template_debug 1" \ --service=datasette-latest # Deploy docs.db to a different service datasette publish cloudrun docs.db \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ - --extra-options="--config template_debug:1" \ + --extra-options="--setting template_debug 1" \ --service=datasette-docs-latest diff --git a/datasette/app.py b/datasette/app.py index 0e42b7c6..3bb6ce79 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -82,91 +82,85 @@ app_root = Path(__file__).parent.parent MEMORY = object() -ConfigOption = collections.namedtuple("ConfigOption", ("name", "default", "help")) -CONFIG_OPTIONS = ( - ConfigOption("default_page_size", 100, "Default page size for the table view"), - ConfigOption( +Setting = collections.namedtuple("Setting", ("name", "default", "help")) +SETTINGS = ( + Setting("default_page_size", 100, "Default page size for the table view"), + Setting( "max_returned_rows", 1000, "Maximum rows that can be returned from a table or custom query", ), - ConfigOption( + Setting( "num_sql_threads", 3, "Number of threads in the thread pool for executing SQLite queries", ), - ConfigOption( - "sql_time_limit_ms", 1000, "Time limit for a SQL query in milliseconds" - ), - ConfigOption( + Setting("sql_time_limit_ms", 1000, "Time limit for a SQL query in milliseconds"), + Setting( "default_facet_size", 30, "Number of values to return for requested facets" ), - ConfigOption( - "facet_time_limit_ms", 200, "Time limit for calculating a requested facet" - ), - ConfigOption( + Setting("facet_time_limit_ms", 200, "Time limit for calculating a requested facet"), + Setting( "facet_suggest_time_limit_ms", 50, "Time limit for calculating a suggested facet", ), - ConfigOption( + Setting( "hash_urls", False, "Include DB file contents hash in URLs, for far-future caching", ), - ConfigOption( + Setting( "allow_facet", True, "Allow users to specify columns to facet using ?_facet= parameter", ), - ConfigOption( + Setting( "allow_download", True, "Allow users to download the original SQLite database files", ), - ConfigOption("suggest_facets", True, "Calculate and display suggested facets"), - ConfigOption( + Setting("suggest_facets", True, "Calculate and display suggested facets"), + Setting( "default_cache_ttl", 5, "Default HTTP cache TTL (used in Cache-Control: max-age= header)", ), - ConfigOption( + Setting( "default_cache_ttl_hashed", 365 * 24 * 60 * 60, "Default HTTP cache TTL for hashed URL pages", ), - ConfigOption( - "cache_size_kb", 0, "SQLite cache size in KB (0 == use SQLite default)" - ), - ConfigOption( + Setting("cache_size_kb", 0, "SQLite cache size in KB (0 == use SQLite default)"), + Setting( "allow_csv_stream", True, "Allow .csv?_stream=1 to download all rows (ignoring max_returned_rows)", ), - ConfigOption( + Setting( "max_csv_mb", 100, "Maximum size allowed for CSV export in MB - set 0 to disable this limit", ), - ConfigOption( + Setting( "truncate_cells_html", 2048, "Truncate cells longer than this in HTML table view - set 0 to disable", ), - ConfigOption( + Setting( "force_https_urls", False, "Force URLs in API output to always use https:// protocol", ), - ConfigOption( + Setting( "template_debug", False, "Allow display of template debug information with ?_context=1", ), - ConfigOption("base_url", "/", "Datasette URLs should use this base path"), + Setting("base_url", "/", "Datasette URLs should use this base path"), ) -DEFAULT_CONFIG = {option.name: option.default for option in CONFIG_OPTIONS} +DEFAULT_SETTINGS = {option.name: option.default for option in SETTINGS} async def favicon(request, send): @@ -270,7 +264,7 @@ class Datasette: raise StartupError("config.json should be renamed to settings.json") if config_dir and (config_dir / "settings.json").exists() and not config: config = json.load((config_dir / "settings.json").open()) - self._config = dict(DEFAULT_CONFIG, **(config or {})) + self._config = dict(DEFAULT_SETTINGS, **(config or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note self.executor = futures.ThreadPoolExecutor( @@ -358,7 +352,7 @@ class Datasette: def config_dict(self): # Returns a fully resolved config dictionary, useful for templates - return {option.name: self.config(option.name) for option in CONFIG_OPTIONS} + return {option.name: self.config(option.name) for option in SETTINGS} def metadata(self, key=None, database=None, table=None, fallback=True): """ diff --git a/datasette/cli.py b/datasette/cli.py index 95e1418c..5feab51e 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -12,7 +12,7 @@ from subprocess import call import sys from runpy import run_module import webbrowser -from .app import Datasette, DEFAULT_CONFIG, CONFIG_OPTIONS, pm +from .app import Datasette, DEFAULT_SETTINGS, SETTINGS, pm from .utils import ( StartupError, check_connection, @@ -39,7 +39,7 @@ class Config(click.ParamType): self.fail(f'"{config}" should be name:value', param, ctx) return name, value = config.split(":", 1) - if name not in DEFAULT_CONFIG: + if name not in DEFAULT_SETTINGS: self.fail( f"{name} is not a valid option (--help-config to see all)", param, @@ -47,7 +47,7 @@ class Config(click.ParamType): ) return # Type checking - default = DEFAULT_CONFIG[name] + default = DEFAULT_SETTINGS[name] if isinstance(default, bool): try: return name, value_as_boolean(value) @@ -72,7 +72,7 @@ class Setting(CompositeParamType): def convert(self, config, param, ctx): name, value = config - if name not in DEFAULT_CONFIG: + if name not in DEFAULT_SETTINGS: self.fail( f"{name} is not a valid option (--help-config to see all)", param, @@ -80,7 +80,7 @@ class Setting(CompositeParamType): ) return # Type checking - default = DEFAULT_CONFIG[name] + default = DEFAULT_SETTINGS[name] if isinstance(default, bool): try: return name, value_as_boolean(value) @@ -432,7 +432,7 @@ def serve( formatter.write_dl( [ (option.name, f"{option.help} (default={option.default})") - for option in CONFIG_OPTIONS + for option in SETTINGS ] ) click.echo(formatter.getvalue()) diff --git a/docs/changelog.rst b/docs/changelog.rst index 34bd95d4..2916b373 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -49,7 +49,7 @@ The new :ref:`internals_datasette_urls` family of methods can be used to generat Running Datasette behind a proxy ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The :ref:`config_base_url` configuration option is designed to help run Datasette on a specific path behind a proxy - for example if you want to run an instance of Datasette at ``/my-datasette/`` within your existing site's URL hierarchy, proxied behind nginx or Apache. +The :ref:`setting_base_url` configuration option is designed to help run Datasette on a specific path behind a proxy - for example if you want to run an instance of Datasette at ``/my-datasette/`` within your existing site's URL hierarchy, proxied behind nginx or Apache. Support for this configuration option has been greatly improved (`#1023 `__), and guidelines for using it are now available in a new documentation section on :ref:`deploying_proxy`. (`#1027 `__) @@ -353,9 +353,9 @@ Signed values and secrets Both flash messages and user authentication needed a way to sign values and set signed cookies. Two new methods are now available for plugins to take advantage of this mechanism: :ref:`datasette_sign` and :ref:`datasette_unsign`. -Datasette will generate a secret automatically when it starts up, but to avoid resetting the secret (and hence invalidating any cookies) every time the server restarts you should set your own secret. You can pass a secret to Datasette using the new ``--secret`` option or with a ``DATASETTE_SECRET`` environment variable. See :ref:`config_secret` for more details. +Datasette will generate a secret automatically when it starts up, but to avoid resetting the secret (and hence invalidating any cookies) every time the server restarts you should set your own secret. You can pass a secret to Datasette using the new ``--secret`` option or with a ``DATASETTE_SECRET`` environment variable. See :ref:`settings_secret` for more details. -You can also set a secret when you deploy Datasette using ``datasette publish`` or ``datasette package`` - see :ref:`config_publish_secrets`. +You can also set a secret when you deploy Datasette using ``datasette publish`` or ``datasette package`` - see :ref:`settings_publish_secrets`. Plugins can now sign values and verify their signatures using the :ref:`datasette.sign() ` and :ref:`datasette.unsign() ` methods. @@ -450,7 +450,7 @@ A small release which provides improved internal methods for use in plugins, alo You can now create :ref:`custom pages ` within your Datasette instance using a custom template file. For example, adding a template file called ``templates/pages/about.html`` will result in a new page being served at ``/about`` on your instance. See the :ref:`custom pages documentation ` for full details, including how to return custom HTTP headers, redirects and status codes. (`#648 `__) -:ref:`config_dir` (`#731 `__) allows you to define a custom Datasette instance as a directory. So instead of running the following:: +:ref:`settings_dir` (`#731 `__) allows you to define a custom Datasette instance as a directory. So instead of running the following:: $ datasette one.db two.db \ --metadata.json \ @@ -480,7 +480,7 @@ Also in this release: * Datasette :ref:`metadata` can now be provided as a YAML file as an optional alternative to JSON. See :ref:`metadata_yaml`. (`#713 `__) * Removed support for ``datasette publish now``, which used the the now-retired Zeit Now v1 hosting platform. A new plugin, `datasette-publish-now `__, can be installed to publish data to Zeit (`now Vercel `__) Now v2. (`#710 `__) * Fixed a bug where the ``extra_template_vars(request, view_name)`` plugin hook was not receiving the correct ``view_name``. (`#716 `__) -* Variables added to the template context by the ``extra_template_vars()`` plugin hook are now shown in the ``?_context=1`` debugging mode (see :ref:`config_template_debug`). (`#693 `__) +* Variables added to the template context by the ``extra_template_vars()`` plugin hook are now shown in the ``?_context=1`` debugging mode (see :ref:`settings_template_debug`). (`#693 `__) * Fixed a bug where the "templates considered" HTML comment was no longer being displayed. (`#689 `__) * Fixed a ``datasette publish`` bug where ``--plugin-secret`` would over-ride plugin configuration in the provided ``metadata.json`` file. (`#724 `__) * Added a new CSS class for customizing the canned query page. (`#727 `__) @@ -490,7 +490,7 @@ Also in this release: 0.39 (2020-03-24) ----------------- -* New :ref:`config_base_url` configuration setting for serving up the correct links while running Datasette under a different URL prefix. (`#394 `__) +* New :ref:`setting_base_url` configuration setting for serving up the correct links while running Datasette under a different URL prefix. (`#394 `__) * New metadata settings ``"sort"`` and ``"sort_desc"`` for setting the default sort order for a table. See :ref:`metadata_default_sort`. (`#702 `__) * Sort direction arrow now displays by default on the primary key. This means you only have to click once (not twice) to sort in reverse order. (`#677 `__) * New ``await Request(scope, receive).post_vars()`` method for accessing POST form variables. (`#700 `__) @@ -565,7 +565,7 @@ Also in this release: * asyncio task information is now included on the ``/-/threads`` debug page * Bumped Uvicorn dependency 0.11 * You can now use ``--port 0`` to listen on an available port -* New :ref:`config_template_debug` setting for debugging templates, e.g. https://latest.datasette.io/fixtures/roadside_attractions?_context=1 (`#654 `__) +* New :ref:`settings_template_debug` setting for debugging templates, e.g. https://latest.datasette.io/fixtures/roadside_attractions?_context=1 (`#654 `__) .. _v0_32: @@ -1000,7 +1000,7 @@ Check out the :ref:`CSV export documentation ` for more details, or try the feature out on https://fivethirtyeight.datasettes.com/fivethirtyeight/bechdel%2Fmovies -If your table has more than :ref:`config_max_returned_rows` (default 1,000) +If your table has more than :ref:`settings_max_returned_rows` (default 1,000) Datasette provides the option to *stream all rows*. This option takes advantage of async Python and Datasette's efficient :ref:`pagination ` to iterate through the entire matching result set and stream it back as a @@ -1020,7 +1020,7 @@ table, using the new ``_labels=on`` querystring option. See New configuration settings ~~~~~~~~~~~~~~~~~~~~~~~~~~ -Datasette's :ref:`config` now also supports boolean settings. A number of new +Datasette's :ref:`settings` now also supports boolean settings. A number of new configuration options have been added: * ``num_sql_threads`` - the number of threads used to execute SQLite queries. Defaults to 3. diff --git a/docs/csv_export.rst b/docs/csv_export.rst index b5cc599a..704cc19d 100644 --- a/docs/csv_export.rst +++ b/docs/csv_export.rst @@ -23,7 +23,7 @@ file, which looks like this and has the following options: the ``city_id`` column is accompanied by a ``city_id_label`` column. * **stream all rows** - by default CSV files only contain the first - :ref:`config_max_returned_rows` records. This option will cause Datasette to + :ref:`settings_max_returned_rows` records. This option will cause Datasette to loop through every matching record and return them as a single CSV file. You can try that out on https://latest.datasette.io/fixtures/facetable?_size=4 @@ -40,9 +40,9 @@ Since databases can get pretty large, by default this option is capped at 100MB if a table returns more than 100MB of data the last line of the CSV will be a truncation error message. -You can increase or remove this limit using the :ref:`config_max_csv_mb` config +You can increase or remove this limit using the :ref:`settings_max_csv_mb` config setting. You can also disable the CSV export feature entirely using -:ref:`config_allow_csv_stream`. +:ref:`settings_allow_csv_stream`. A note on URLs -------------- diff --git a/docs/deploying.rst b/docs/deploying.rst index 3eeaaad8..4ca0e82a 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -58,7 +58,7 @@ Add a random value for the ``DATASETTE_SECRET`` - this will be used to sign Data $ python3 -c 'import secrets; print(secrets.token_hex(32))' -This configuration will run Datasette against all database files contained in the ``/home/ubunt/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`config_dir` for details. +This configuration will run Datasette against all database files contained in the ``/home/ubunt/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`settings_dir` for details. You can start the Datasette process running using the following:: @@ -101,7 +101,7 @@ The ``Procfile`` lets the hosting platform know how to run the command that serv web: datasette . -h 0.0.0.0 -p $PORT --cors -The ``$PORT`` environment variable is provided by the hosting platform. ``--cors`` enables CORS requests from JavaScript running on other websites to your domain - omit this if you don't want to allow CORS. You can add additional Datasette :ref:`config` options here too. +The ``$PORT`` environment variable is provided by the hosting platform. ``--cors`` enables CORS requests from JavaScript running on other websites to your domain - omit this if you don't want to allow CORS. You can add additional Datasette :ref:`settings` options here too. These two files should be enough to deploy Datasette on any host that supports buildpacks. Datasette will serve any SQLite files that are included in the root directory of the application. @@ -118,9 +118,9 @@ Running Datasette behind a proxy You may wish to run Datasette behind an Apache or nginx proxy, using a path within your existing site. -You can use the :ref:`config_base_url` configuration setting to tell Datasette to serve traffic with a specific URL prefix. For example, you could run Datasette like this:: +You can use the :ref:`setting_base_url` configuration setting to tell Datasette to serve traffic with a specific URL prefix. For example, you could run Datasette like this:: - datasette my-database.db --config base_url:/my-datasette/ -p 8009 + datasette my-database.db --setting base_url /my-datasette/ -p 8009 This will run Datasette with the following URLs: diff --git a/docs/index.rst b/docs/index.rst index 6b55da8c..ff8db04b 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -51,7 +51,7 @@ Contents full_text_search spatialite metadata - config + settings introspection custom_templates plugins diff --git a/docs/internals.rst b/docs/internals.rst index 92496490..cec1268f 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -350,7 +350,7 @@ Returns the absolute URL for the given path, including the protocol and host. Fo absolute_url = datasette.absolute_url(request, "/dbname/table.json") # Would return "http://localhost:8001/dbname/table.json" -The current request object is used to determine the hostname and protocol that should be used for the returned URL. The :ref:`config_force_https_urls` configuration setting is taken into account. +The current request object is used to determine the hostname and protocol that should be used for the returned URL. The :ref:`settings_force_https_urls` configuration setting is taken into account. .. _internals_datasette_client: @@ -397,7 +397,7 @@ These methods can be used with :ref:`internals_datasette_urls` - for example: ) ).json() -``datasette.client`` methods automatically take the current :ref:`config_base_url` setting into account, whether or not you use the ``datasette.urls`` family of methods to construct the path. +``datasette.client`` methods automatically take the current :ref:`setting_base_url` setting into account, whether or not you use the ``datasette.urls`` family of methods to construct the path. For documentation on available ``**kwargs`` options and the shape of the HTTPX Response object refer to the `HTTPX Async documentation `__. @@ -406,7 +406,7 @@ For documentation on available ``**kwargs`` options and the shape of the HTTPX R datasette.urls -------------- -The ``datasette.urls`` object contains methods for building URLs to pages within Datasette. Plugins should use this to link to pages, since these methods take into account any :ref:`config_base_url` configuration setting that might be in effect. +The ``datasette.urls`` object contains methods for building URLs to pages within Datasette. Plugins should use this to link to pages, since these methods take into account any :ref:`setting_base_url` configuration setting that might be in effect. ``datasette.urls.instance(format=None)`` Returns the URL to the Datasette instance root page. This is usually ``"/"``. diff --git a/docs/introspection.rst b/docs/introspection.rst index a0402b9d..d1a0a854 100644 --- a/docs/introspection.rst +++ b/docs/introspection.rst @@ -92,7 +92,7 @@ Add ``?all=1`` to include details of the default plugins baked into Datasette. /-/settings ----------- -Shows the :ref:`config` options for this instance of Datasette. `Settings example `_: +Shows the :ref:`settings` for this instance of Datasette. `Settings example `_: .. code-block:: json diff --git a/docs/pages.rst b/docs/pages.rst index db970ead..5f77bec7 100644 --- a/docs/pages.rst +++ b/docs/pages.rst @@ -66,7 +66,7 @@ Row Every row in every Datasette table has its own URL. This means individual records can be linked to directly. -Table cells with extremely long text contents are truncated on the table view according to the :ref:`config_truncate_cells_html` setting. If a cell has been truncated the full length version of that cell will be available on the row page. +Table cells with extremely long text contents are truncated on the table view according to the :ref:`settings_truncate_cells_html` setting. If a cell has been truncated the full length version of that cell will be available on the row page. Rows which are the targets of foreign key references from other tables will show a link to a filtered search for all records that reference that row. Here's an example from the Registers of Members Interests database: diff --git a/docs/performance.rst b/docs/performance.rst index d7f852d5..1d24adce 100644 --- a/docs/performance.rst +++ b/docs/performance.rst @@ -56,7 +56,7 @@ Using a caching proxy in this way could enable a Datasette-backed visualization Datasette's integration with HTTP caches can be enabled using a combination of configuration options and querystring arguments. -The :ref:`config_default_cache_ttl` setting sets the default HTTP cache TTL for all Datasette pages. This is 5 seconds unless you change it - you can set it to 0 if you wish to disable HTTP caching entirely. +The :ref:`settings_default_cache_ttl` setting sets the default HTTP cache TTL for all Datasette pages. This is 5 seconds unless you change it - you can set it to 0 if you wish to disable HTTP caching entirely. You can also change the cache timeout on a per-request basis using the ``?_ttl=10`` querystring parameter. This can be useful when you are working with the Datasette JSON API - you may decide that a specific query can be cached for a longer time, or maybe you need to set ``?_ttl=0`` for some requests for example if you are running a SQL ``order by random()`` query. @@ -65,9 +65,9 @@ Hashed URL mode When you open a database file in immutable mode using the ``-i`` option, Datasette calculates a SHA-256 hash of the contents of that file on startup. This content hash can then optionally be used to create URLs that are guaranteed to change if the contents of the file changes in the future. This results in URLs that can then be cached indefinitely by both browsers and caching proxies - an enormous potential performance optimization. -You can enable these hashed URLs in two ways: using the :ref:`config_hash_urls` configuration setting (which affects all requests to Datasette) or via the ``?_hash=1`` querystring parameter (which only applies to the current request). +You can enable these hashed URLs in two ways: using the :ref:`settings_hash_urls` configuration setting (which affects all requests to Datasette) or via the ``?_hash=1`` querystring parameter (which only applies to the current request). -With hashed URLs enabled, any request to e.g. ``/mydatabase/mytable`` will 302 redirect to ``mydatabase-455fe3a/mytable``. The URL containing the hash will be served with a very long cache expire header - configured using :ref:`config_default_cache_ttl_hashed` which defaults to 365 days. +With hashed URLs enabled, any request to e.g. ``/mydatabase/mytable`` will 302 redirect to ``mydatabase-455fe3a/mytable``. The URL containing the hash will be served with a very long cache expire header - configured using :ref:`settings_default_cache_ttl_hashed` which defaults to 365 days. Since these responses are cached for a long time, you may wish to build API clients against the non-hashed version of these URLs. These 302 redirects are served extremely quickly, so this should still be a performant way to work against the Datasette API. diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 8407a259..72b09367 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1020,7 +1020,7 @@ This example adds a new menu item but only if the signed in user is ``"root"``: {"href": datasette.urls.path("/-/edit-schema"), "label": "Edit schema"}, ] -Using :ref:`internals_datasette_urls` here ensures that links in the menu will take the :ref:`config_base_url` setting into account. +Using :ref:`internals_datasette_urls` here ensures that links in the menu will take the :ref:`setting_base_url` setting into account. .. _plugin_hook_table_actions: diff --git a/docs/publish.rst b/docs/publish.rst index a905ac92..d5015e21 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -135,7 +135,7 @@ If you have docker installed (e.g. using `Docker for Mac 79e1dc9af1c1 @@ -154,7 +154,7 @@ Here's example output for the package command:: Step 6/7 : EXPOSE 8001 ---> Using cache ---> 8e83844b0fed - Step 7/7 : CMD datasette serve parlgov.db --port 8001 --inspect-file inspect-data.json --config sql_time_limit_ms:2500 + Step 7/7 : CMD datasette serve parlgov.db --port 8001 --inspect-file inspect-data.json --setting sql_time_limit_ms 2500 ---> Using cache ---> 1bd380ea8af3 Successfully built 1bd380ea8af3 diff --git a/docs/config.rst b/docs/settings.rst similarity index 80% rename from docs/config.rst rename to docs/settings.rst index 27b73d44..350fd048 100644 --- a/docs/config.rst +++ b/docs/settings.rst @@ -1,20 +1,19 @@ -.. _config: +.. _settings: -Configuration -============= +Settings +======== -Using \-\-config ----------------- +Using \-\-setting +----------------- -Datasette provides a number of configuration options. These can be set using the ``--config name:value`` option to ``datasette serve``. +Datasette supports a number of settings. These can be set using the ``--setting name value`` option to ``datasette serve``. -You can set multiple configuration options at once like this:: +You can set multiple settings at once like this:: datasette mydatabase.db \ - --config default_page_size:50 \ - --config sql_time_limit_ms:3500 \ - --config max_returned_rows:2000 - + --setting default_page_size 50 \ + --setting sql_time_limit_ms 3500 \ + --setting max_returned_rows 2000 .. _config_dir: @@ -60,12 +59,16 @@ Settings The following options can be set using ``--setting name value``, or by storing them in the ``settings.json`` file for use with :ref:`config_dir`. +.. _setting_default_page_size: + default_page_size ~~~~~~~~~~~~~~~~~ -The default number of rows returned by the table page. You can over-ride this on a per-page basis using the ``?_size=80`` querystring parameter, provided you do not specify a value higher than the ``max_returned_rows`` setting. You can set this default using ``--config`` like so:: +The default number of rows returned by the table page. You can over-ride this on a per-page basis using the ``?_size=80`` querystring parameter, provided you do not specify a value higher than the ``max_returned_rows`` setting. You can set this default using ``--setting`` like so:: - datasette mydatabase.db --config default_page_size:50 + datasette mydatabase.db --setting default_page_size 50 + +.. _setting_sql_time_limit_ms: sql_time_limit_ms ~~~~~~~~~~~~~~~~~ @@ -74,7 +77,7 @@ By default, queries have a time limit of one second. If a query takes longer tha If this time limit is too short for you, you can customize it using the ``sql_time_limit_ms`` limit - for example, to increase it to 3.5 seconds:: - datasette mydatabase.db --config sql_time_limit_ms:3500 + datasette mydatabase.db --setting sql_time_limit_ms 3500 You can optionally set a lower time limit for an individual query using the ``?_timelimit=100`` querystring argument:: @@ -82,7 +85,7 @@ You can optionally set a lower time limit for an individual query using the ``?_ This would set the time limit to 100ms for that specific query. This feature is useful if you are working with databases of unknown size and complexity - a query that might make perfect sense for a smaller table could take too long to execute on a table with millions of rows. By setting custom time limits you can execute queries "optimistically" - e.g. give me an exact count of rows matching this query but only if it takes less than 100ms to calculate. -.. _config_max_returned_rows: +.. _setting_max_returned_rows: max_returned_rows ~~~~~~~~~~~~~~~~~ @@ -91,7 +94,9 @@ Datasette returns a maximum of 1,000 rows of data at a time. If you execute a qu You can increase or decrease this limit like so:: - datasette mydatabase.db --config max_returned_rows:2000 + datasette mydatabase.db --setting max_returned_rows 2000 + +.. _setting_num_sql_threads: num_sql_threads ~~~~~~~~~~~~~~~ @@ -100,7 +105,9 @@ Maximum number of threads in the thread pool Datasette uses to execute SQLite qu :: - datasette mydatabase.db --config num_sql_threads:10 + datasette mydatabase.db --setting num_sql_threads 10 + +.. _setting_allow_facet: allow_facet ~~~~~~~~~~~ @@ -111,21 +118,27 @@ This is enabled by default. If disabled, facets will still be displayed if they Here's how to disable this feature:: - datasette mydatabase.db --config allow_facet:off + datasette mydatabase.db --setting allow_facet off + +.. _setting_default_facet_size: default_facet_size ~~~~~~~~~~~~~~~~~~ The default number of unique rows returned by :ref:`facets` is 30. You can customize it like this:: - datasette mydatabase.db --config default_facet_size:50 + datasette mydatabase.db --setting default_facet_size 50 + +.. _setting_facet_time_limit_ms: facet_time_limit_ms ~~~~~~~~~~~~~~~~~~~ This is the time limit Datasette allows for calculating a facet, which defaults to 200ms:: - datasette mydatabase.db --config facet_time_limit_ms:1000 + datasette mydatabase.db --setting facet_time_limit_ms 1000 + +.. _setting_facet_suggest_time_limit_ms: facet_suggest_time_limit_ms ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -134,23 +147,27 @@ When Datasette calculates suggested facets it needs to run a SQL query for every You can increase this time limit like so:: - datasette mydatabase.db --config facet_suggest_time_limit_ms:500 + datasette mydatabase.db --setting facet_suggest_time_limit_ms 500 + +.. _setting_suggest_facets: suggest_facets ~~~~~~~~~~~~~~ Should Datasette calculate suggested facets? On by default, turn this off like so:: - datasette mydatabase.db --config suggest_facets:off + datasette mydatabase.db --setting suggest_facets off + +.. _setting_allow_download: allow_download ~~~~~~~~~~~~~~ Should users be able to download the original SQLite database using a link on the database index page? This is turned on by default - to disable database downloads, use the following:: - datasette mydatabase.db --config allow_download:off + datasette mydatabase.db --setting allow_download off -.. _config_default_cache_ttl: +.. _setting_default_cache_ttl: default_cache_ttl ~~~~~~~~~~~~~~~~~ @@ -159,19 +176,20 @@ Default HTTP caching max-age header in seconds, used for ``Cache-Control: max-ag :: - datasette mydatabase.db --config default_cache_ttl:60 + datasette mydatabase.db --setting default_cache_ttl 60 -.. _config_default_cache_ttl_hashed: +.. _setting_default_cache_ttl_hashed: default_cache_ttl_hashed ~~~~~~~~~~~~~~~~~~~~~~~~ -Default HTTP caching max-age for responses served using using the :ref:`hashed-urls mechanism `. Defaults to 365 days (31536000 seconds). +Default HTTP caching max-age for responses served using using the :ref:`hashed-urls mechanism `. Defaults to 365 days (31536000 seconds). :: - datasette mydatabase.db --config default_cache_ttl_hashed:10000 + datasette mydatabase.db --setting default_cache_ttl_hashed 10000 +.. _setting_cache_size_kb: cache_size_kb ~~~~~~~~~~~~~ @@ -180,9 +198,9 @@ Sets the amount of memory SQLite uses for its `per-connection cache Date: Tue, 24 Nov 2020 14:06:32 -0800 Subject: [PATCH 0293/1705] Renamed datasette.config() to .setting(), closes #1107 --- datasette/app.py | 32 ++++++++++++------------ datasette/facets.py | 22 ++++++++-------- datasette/url_builder.py | 4 +-- datasette/views/base.py | 10 ++++---- datasette/views/database.py | 4 +-- datasette/views/table.py | 10 ++++---- docs/internals.rst | 16 +++++++++++- tests/test_internals_datasette.py | 12 +++++++++ tests/test_internals_datasette_client.py | 12 ++++----- tests/test_internals_urls.py | 24 +++++++++--------- 10 files changed, 86 insertions(+), 60 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 3bb6ce79..88d5ecc6 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -264,15 +264,15 @@ class Datasette: raise StartupError("config.json should be renamed to settings.json") if config_dir and (config_dir / "settings.json").exists() and not config: config = json.load((config_dir / "settings.json").open()) - self._config = dict(DEFAULT_SETTINGS, **(config or {})) + self._settings = dict(DEFAULT_SETTINGS, **(config or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note self.executor = futures.ThreadPoolExecutor( - max_workers=self.config("num_sql_threads") + max_workers=self.setting("num_sql_threads") ) - self.max_returned_rows = self.config("max_returned_rows") - self.sql_time_limit_ms = self.config("sql_time_limit_ms") - self.page_size = self.config("default_page_size") + self.max_returned_rows = self.setting("max_returned_rows") + self.sql_time_limit_ms = self.setting("sql_time_limit_ms") + self.page_size = self.setting("default_page_size") # Execute plugins in constructor, to ensure they are available # when the rest of `datasette inspect` executes if self.plugins_dir: @@ -347,12 +347,12 @@ class Datasette: def remove_database(self, name): self.databases.pop(name) - def config(self, key): - return self._config.get(key, None) + def setting(self, key): + return self._settings.get(key, None) def config_dict(self): # Returns a fully resolved config dictionary, useful for templates - return {option.name: self.config(option.name) for option in SETTINGS} + return {option.name: self.setting(option.name) for option in SETTINGS} def metadata(self, key=None, database=None, table=None, fallback=True): """ @@ -454,8 +454,8 @@ class Datasette: conn.enable_load_extension(True) for extension in self.sqlite_extensions: conn.execute(f"SELECT load_extension('{extension}')") - if self.config("cache_size_kb"): - conn.execute(f"PRAGMA cache_size=-{self.config('cache_size_kb')}") + if self.setting("cache_size_kb"): + conn.execute(f"PRAGMA cache_size=-{self.setting('cache_size_kb')}") # pylint: disable=no-member pm.hook.prepare_connection(conn=conn, database=database, datasette=self) @@ -567,7 +567,7 @@ class Datasette: def absolute_url(self, request, path): url = urllib.parse.urljoin(request.url, path) - if url.startswith("http://") and self.config("force_https_urls"): + if url.startswith("http://") and self.setting("force_https_urls"): url = "https://" + url[len("http://") :] return url @@ -781,12 +781,12 @@ class Datasette: "extra_js_urls": await self._asset_urls( "extra_js_urls", template, context, request, view_name ), - "base_url": self.config("base_url"), + "base_url": self.setting("base_url"), "csrftoken": request.scope["csrftoken"] if request else lambda: "", }, **extra_template_vars, } - if request and request.args.get("_context") and self.config("template_debug"): + if request and request.args.get("_context") and self.setting("template_debug"): return "
    {}
    ".format( jinja2.escape(json.dumps(template_context, default=repr, indent=4)) ) @@ -882,7 +882,7 @@ class Datasette: r"/-/plugins(?P(\.json)?)$", ) add_route( - JsonDataView.as_view(self, "settings.json", lambda: self._config), + JsonDataView.as_view(self, "settings.json", lambda: self._settings), r"/-/settings(?P(\.json)?)$", ) add_route( @@ -1001,7 +1001,7 @@ class DatasetteRouter: async def route_path(self, scope, receive, send, path): # Strip off base_url if present before routing - base_url = self.ds.config("base_url") + base_url = self.ds.setting("base_url") if base_url != "/" and path.startswith(base_url): path = "/" + path[len(base_url) :] request = Request(scope, receive) @@ -1016,7 +1016,7 @@ class DatasetteRouter: scope_modifications = {} # Apply force_https_urls, if set if ( - self.ds.config("force_https_urls") + self.ds.setting("force_https_urls") and scope["type"] == "http" and scope.get("scheme") != "https" ): diff --git a/datasette/facets.py b/datasette/facets.py index a818a9e9..8ad5a423 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -136,7 +136,7 @@ class ColumnFacet(Facet): async def suggest(self): row_count = await self.get_row_count() columns = await self.get_columns(self.sql, self.params) - facet_size = self.ds.config("default_facet_size") + facet_size = self.ds.setting("default_facet_size") suggested_facets = [] already_enabled = [c["config"]["simple"] for c in self.get_configs()] for column in columns: @@ -158,7 +158,7 @@ class ColumnFacet(Facet): suggested_facet_sql, self.params, truncate=False, - custom_time_limit=self.ds.config("facet_suggest_time_limit_ms"), + custom_time_limit=self.ds.setting("facet_suggest_time_limit_ms"), ) num_distinct_values = len(distinct_values) if ( @@ -188,7 +188,7 @@ class ColumnFacet(Facet): qs_pairs = self.get_querystring_pairs() - facet_size = self.ds.config("default_facet_size") + facet_size = self.ds.setting("default_facet_size") for source_and_config in self.get_configs(): config = source_and_config["config"] source = source_and_config["source"] @@ -208,7 +208,7 @@ class ColumnFacet(Facet): facet_sql, self.params, truncate=False, - custom_time_limit=self.ds.config("facet_time_limit_ms"), + custom_time_limit=self.ds.setting("facet_time_limit_ms"), ) facet_results_values = [] facet_results[column] = { @@ -290,7 +290,7 @@ class ArrayFacet(Facet): suggested_facet_sql, self.params, truncate=False, - custom_time_limit=self.ds.config("facet_suggest_time_limit_ms"), + custom_time_limit=self.ds.setting("facet_suggest_time_limit_ms"), log_sql_errors=False, ) types = tuple(r[0] for r in results.rows) @@ -305,7 +305,7 @@ class ArrayFacet(Facet): ), self.params, truncate=False, - custom_time_limit=self.ds.config( + custom_time_limit=self.ds.setting( "facet_suggest_time_limit_ms" ), log_sql_errors=False, @@ -335,7 +335,7 @@ class ArrayFacet(Facet): facet_results = {} facets_timed_out = [] - facet_size = self.ds.config("default_facet_size") + facet_size = self.ds.setting("default_facet_size") for source_and_config in self.get_configs(): config = source_and_config["config"] source = source_and_config["source"] @@ -354,7 +354,7 @@ class ArrayFacet(Facet): facet_sql, self.params, truncate=False, - custom_time_limit=self.ds.config("facet_time_limit_ms"), + custom_time_limit=self.ds.setting("facet_time_limit_ms"), ) facet_results_values = [] facet_results[column] = { @@ -421,7 +421,7 @@ class DateFacet(Facet): suggested_facet_sql, self.params, truncate=False, - custom_time_limit=self.ds.config("facet_suggest_time_limit_ms"), + custom_time_limit=self.ds.setting("facet_suggest_time_limit_ms"), log_sql_errors=False, ) values = tuple(r[0] for r in results.rows) @@ -446,7 +446,7 @@ class DateFacet(Facet): facet_results = {} facets_timed_out = [] args = dict(self.get_querystring_pairs()) - facet_size = self.ds.config("default_facet_size") + facet_size = self.ds.setting("default_facet_size") for source_and_config in self.get_configs(): config = source_and_config["config"] source = source_and_config["source"] @@ -467,7 +467,7 @@ class DateFacet(Facet): facet_sql, self.params, truncate=False, - custom_time_limit=self.ds.config("facet_time_limit_ms"), + custom_time_limit=self.ds.setting("facet_time_limit_ms"), ) facet_results_values = [] facet_results[column] = { diff --git a/datasette/url_builder.py b/datasette/url_builder.py index 697f60ae..3034b664 100644 --- a/datasette/url_builder.py +++ b/datasette/url_builder.py @@ -10,7 +10,7 @@ class Urls: if not isinstance(path, PrefixedUrlString): if path.startswith("/"): path = path[1:] - path = self.ds.config("base_url") + path + path = self.ds.setting("base_url") + path if format is not None: path = path_with_format(path=path, format=format) return PrefixedUrlString(path) @@ -29,7 +29,7 @@ class Urls: def database(self, database, format=None): db = self.ds.databases[database] - if self.ds.config("hash_urls") and db.hash: + if self.ds.setting("hash_urls") and db.hash: path = self.path(f"{database}-{db.hash[:HASH_LENGTH]}", format=format) else: path = self.path(database, format=format) diff --git a/datasette/views/base.py b/datasette/views/base.py index b3a54bcc..bde8449f 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -230,7 +230,7 @@ class DataView(BaseView): should_redirect += kwargs["as_db"] if ( - (self.ds.config("hash_urls") or "_hash" in request.args) + (self.ds.setting("hash_urls") or "_hash" in request.args) and # Redirect only if database is immutable not self.ds.databases[name].is_mutable @@ -260,7 +260,7 @@ class DataView(BaseView): stream = request.args.get("_stream") if stream: # Some quick sanity checks - if not self.ds.config("allow_csv_stream"): + if not self.ds.setting("allow_csv_stream"): raise BadRequest("CSV streaming is disabled") if request.args.get("_next"): raise BadRequest("_next not allowed for CSV streaming") @@ -296,7 +296,7 @@ class DataView(BaseView): async def stream_fn(r): nonlocal data - writer = csv.writer(LimitedWriter(r, self.ds.config("max_csv_mb"))) + writer = csv.writer(LimitedWriter(r, self.ds.setting("max_csv_mb"))) first = True next = None while first or (next and stream): @@ -566,9 +566,9 @@ class DataView(BaseView): ttl = request.args.get("_ttl", None) if ttl is None or not ttl.isdigit(): if correct_hash_provided: - ttl = self.ds.config("default_cache_ttl_hashed") + ttl = self.ds.setting("default_cache_ttl_hashed") else: - ttl = self.ds.config("default_cache_ttl") + ttl = self.ds.setting("default_cache_ttl") return self.set_response_headers(r, ttl) diff --git a/datasette/views/database.py b/datasette/views/database.py index d4ed8570..17c78150 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -136,7 +136,7 @@ class DatabaseView(DataView): "show_hidden": request.args.get("_show_hidden"), "editable": True, "metadata": metadata, - "allow_download": self.ds.config("allow_download") + "allow_download": self.ds.setting("allow_download") and not db.is_mutable and database != ":memory:", }, @@ -161,7 +161,7 @@ class DatabaseDownload(DataView): db = self.ds.databases[database] if db.is_memory: raise DatasetteError("Cannot download :memory: database", status=404) - if not self.ds.config("allow_download") or db.is_mutable: + if not self.ds.setting("allow_download") or db.is_mutable: raise Forbidden("Database download is forbidden") if not db.path: raise DatasetteError("Cannot download database", status=404) diff --git a/datasette/views/table.py b/datasette/views/table.py index 09c2d740..a0de2a8e 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -121,7 +121,7 @@ class RowTableShared(DataView): } cell_rows = [] - base_url = self.ds.config("base_url") + base_url = self.ds.setting("base_url") for row in rows: cells = [] # Unless we are a view, the first column is a link - either to the rowid @@ -654,7 +654,7 @@ class TableView(RowTableShared): pass # facets support - if not self.ds.config("allow_facet") and any( + if not self.ds.setting("allow_facet") and any( arg.startswith("_facet") for arg in request.args ): raise BadRequest("_facet= is not allowed") @@ -772,8 +772,8 @@ class TableView(RowTableShared): suggested_facets = [] if ( - self.ds.config("suggest_facets") - and self.ds.config("allow_facet") + self.ds.setting("suggest_facets") + and self.ds.setting("allow_facet") and not _next ): for facet in facet_instances: @@ -801,7 +801,7 @@ class TableView(RowTableShared): results.description, rows, link_column=not is_view, - truncate_cells=self.ds.config("truncate_cells_html"), + truncate_cells=self.ds.setting("truncate_cells_html"), ) metadata = ( (self.ds.metadata("databases") or {}) diff --git a/docs/internals.rst b/docs/internals.rst index cec1268f..78d4e5d2 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -350,7 +350,21 @@ Returns the absolute URL for the given path, including the protocol and host. Fo absolute_url = datasette.absolute_url(request, "/dbname/table.json") # Would return "http://localhost:8001/dbname/table.json" -The current request object is used to determine the hostname and protocol that should be used for the returned URL. The :ref:`settings_force_https_urls` configuration setting is taken into account. +The current request object is used to determine the hostname and protocol that should be used for the returned URL. The :ref:`setting_force_https_urls` configuration setting is taken into account. + +.setting(key) +------------- + +``key`` - string + The name of the setting, e.g. ``base_url``. + +Returns the configured value for the specified :ref:`setting `. This can be a string, boolean or integer depending on the requested setting. + +For example: + +.. code-block:: python + + downloads_are_allowed = datasette.setting("allow_download") .. _internals_datasette_client: diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index 0be0b932..56bc2fb4 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -33,3 +33,15 @@ def test_sign_unsign(datasette, value, namespace): assert value == datasette.unsign(signed, *extra_args) with pytest.raises(BadSignature): datasette.unsign(signed[:-1] + ("!" if signed[-1] != "!" else ":")) + + +@pytest.mark.parametrize( + "setting,expected", + ( + ("base_url", "/"), + ("max_csv_mb", 100), + ("allow_csv_stream", True), + ), +) +def test_datasette_setting(datasette, setting, expected): + assert datasette.setting(setting) == expected diff --git a/tests/test_internals_datasette_client.py b/tests/test_internals_datasette_client.py index 0b1c5f0e..c538bef1 100644 --- a/tests/test_internals_datasette_client.py +++ b/tests/test_internals_datasette_client.py @@ -33,10 +33,10 @@ async def test_client_methods(datasette, method, path, expected_status): @pytest.mark.asyncio @pytest.mark.parametrize("prefix", [None, "/prefix/"]) async def test_client_post(datasette, prefix): - original_base_url = datasette._config["base_url"] + original_base_url = datasette._settings["base_url"] try: if prefix is not None: - datasette._config["base_url"] = prefix + datasette._settings["base_url"] = prefix response = await datasette.client.post( "/-/messages", data={ @@ -48,7 +48,7 @@ async def test_client_post(datasette, prefix): assert response.status_code == 302 assert "ds_messages" in response.cookies finally: - datasette._config["base_url"] = original_base_url + datasette._settings["base_url"] = original_base_url @pytest.mark.asyncio @@ -56,12 +56,12 @@ async def test_client_post(datasette, prefix): "prefix,expected_path", [(None, "/asgi-scope"), ("/prefix/", "/prefix/asgi-scope")] ) async def test_client_path(datasette, prefix, expected_path): - original_base_url = datasette._config["base_url"] + original_base_url = datasette._settings["base_url"] try: if prefix is not None: - datasette._config["base_url"] = prefix + datasette._settings["base_url"] = prefix response = await datasette.client.get("/asgi-scope") path = response.json()["path"] assert path == expected_path finally: - datasette._config["base_url"] = original_base_url + datasette._settings["base_url"] = original_base_url diff --git a/tests/test_internals_urls.py b/tests/test_internals_urls.py index 89290911..fd05c1b6 100644 --- a/tests/test_internals_urls.py +++ b/tests/test_internals_urls.py @@ -20,14 +20,14 @@ def ds(): ], ) def test_path(ds, base_url, path, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual = ds.urls.path(path) assert actual == expected assert isinstance(actual, PrefixedUrlString) def test_path_applied_twice_does_not_double_prefix(ds): - ds._config["base_url"] = "/prefix/" + ds._settings["base_url"] = "/prefix/" path = ds.urls.path("/") assert path == "/prefix/" path = ds.urls.path(path) @@ -42,7 +42,7 @@ def test_path_applied_twice_does_not_double_prefix(ds): ], ) def test_instance(ds, base_url, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual = ds.urls.instance() assert actual == expected assert isinstance(actual, PrefixedUrlString) @@ -56,7 +56,7 @@ def test_instance(ds, base_url, expected): ], ) def test_static(ds, base_url, file, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual = ds.urls.static(file) assert actual == expected assert isinstance(actual, PrefixedUrlString) @@ -80,7 +80,7 @@ def test_static(ds, base_url, file, expected): ], ) def test_static_plugins(ds, base_url, plugin, file, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual = ds.urls.static_plugins(plugin, file) assert actual == expected assert isinstance(actual, PrefixedUrlString) @@ -94,7 +94,7 @@ def test_static_plugins(ds, base_url, plugin, file, expected): ], ) def test_logout(ds, base_url, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual = ds.urls.logout() assert actual == expected assert isinstance(actual, PrefixedUrlString) @@ -109,7 +109,7 @@ def test_logout(ds, base_url, expected): ], ) def test_database(ds, base_url, format, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual = ds.urls.database(":memory:", format=format) assert actual == expected assert isinstance(actual, PrefixedUrlString) @@ -125,7 +125,7 @@ def test_database(ds, base_url, format, expected): ], ) def test_table_and_query(ds, base_url, name, format, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual1 = ds.urls.table(":memory:", name, format=format) assert actual1 == expected assert isinstance(actual1, PrefixedUrlString) @@ -143,7 +143,7 @@ def test_table_and_query(ds, base_url, name, format, expected): ], ) def test_row(ds, base_url, format, expected): - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url actual = ds.urls.row(":memory:", "facetable", "1", format=format) assert actual == expected assert isinstance(actual, PrefixedUrlString) @@ -152,9 +152,9 @@ def test_row(ds, base_url, format, expected): @pytest.mark.parametrize("base_url", ["/", "/prefix/"]) def test_database_hashed(app_client_with_hash, base_url): ds = app_client_with_hash.ds - original_base_url = ds._config["base_url"] + original_base_url = ds._settings["base_url"] try: - ds._config["base_url"] = base_url + ds._settings["base_url"] = base_url db_hash = ds.get_database("fixtures").hash assert len(db_hash) == 64 expected = f"{base_url}fixtures-{db_hash[:7]}" @@ -163,4 +163,4 @@ def test_database_hashed(app_client_with_hash, base_url): assert ds.urls.query("fixtures", "name") == expected + "/name" finally: # Reset this since fixture is shared with other tests - ds._config["base_url"] = original_base_url + ds._settings["base_url"] = original_base_url From 37d18a5bce08c9ee53c080f613bae84fc2ccc853 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 24 Nov 2020 19:05:35 -0800 Subject: [PATCH 0294/1705] datasette publish cloudrun --apt-get-install, closes #1110 --- datasette/publish/cloudrun.py | 8 +++ datasette/utils/__init__.py | 27 +++++--- docs/datasette-publish-cloudrun-help.txt | 1 + tests/test_publish_cloudrun.py | 78 +++++++++++++++++++++--- 4 files changed, 97 insertions(+), 17 deletions(-) diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 54f55fcb..54f06da0 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -36,6 +36,12 @@ def publish_subcommand(publish): callback=_validate_memory, help="Memory to allocate in Cloud Run, e.g. 1Gi", ) + @click.option( + "--apt-get-install", + "apt_get_extras", + multiple=True, + help="Additional packages to apt-get install", + ) def cloudrun( files, metadata, @@ -60,6 +66,7 @@ def publish_subcommand(publish): spatialite, show_files, memory, + apt_get_extras, ): fail_if_publish_binary_not_installed( "gcloud", "Google Cloud", "https://cloud.google.com/sdk/" @@ -122,6 +129,7 @@ def publish_subcommand(publish): secret, extra_metadata, environment_variables, + apt_get_extras=apt_get_extras, ): if show_files: if os.path.exists("metadata.json"): diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index d62302e9..54a5b247 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -47,11 +47,10 @@ reserved_words = set( ).split() ) -SPATIALITE_DOCKERFILE_EXTRAS = r""" +APT_GET_DOCKERFILE_EXTRAS = r""" RUN apt-get update && \ - apt-get install -y python3-dev gcc libsqlite3-mod-spatialite && \ + apt-get install -y {} && \ rm -rf /var/lib/apt/lists/* -ENV SQLITE_EXTENSIONS /usr/lib/x86_64-linux-gnu/mod_spatialite.so """ # Can replace with sqlite-utils when I add that dependency @@ -308,10 +307,12 @@ def make_dockerfile( secret, environment_variables=None, port=8001, + apt_get_extras=None, ): cmd = ["datasette", "serve", "--host", "0.0.0.0"] environment_variables = environment_variables or {} environment_variables["DATASETTE_SECRET"] = secret + apt_get_extras = apt_get_extras or [] for filename in files: cmd.extend(["-i", filename]) cmd.extend(["--cors", "--inspect-file", "inspect-data.json"]) @@ -340,28 +341,38 @@ def make_dockerfile( else: install = ["datasette"] + list(install) + apt_get_extras_ = [] + apt_get_extras_.extend(apt_get_extras) + apt_get_extras = apt_get_extras_ + if spatialite: + apt_get_extras.extend(["python3-dev", "gcc", "libsqlite3-mod-spatialite"]) + environment_variables[ + "SQLITE_EXTENSIONS" + ] = "/usr/lib/x86_64-linux-gnu/mod_spatialite.so" return """ FROM python:3.8 COPY . /app WORKDIR /app -{spatialite_extras} +{apt_get_extras} {environment_variables} RUN pip install -U {install_from} RUN datasette inspect {files} --inspect-file inspect-data.json ENV PORT {port} EXPOSE {port} CMD {cmd}""".format( + apt_get_extras=APT_GET_DOCKERFILE_EXTRAS.format(" ".join(apt_get_extras)) + if apt_get_extras + else "", environment_variables="\n".join( [ "ENV {} '{}'".format(key, value) for key, value in environment_variables.items() ] ), - files=" ".join(files), - cmd=cmd, install_from=" ".join(install), - spatialite_extras=SPATIALITE_DOCKERFILE_EXTRAS if spatialite else "", + files=" ".join(files), port=port, + cmd=cmd, ).strip() @@ -382,6 +393,7 @@ def temporary_docker_directory( extra_metadata=None, environment_variables=None, port=8001, + apt_get_extras=None, ): extra_metadata = extra_metadata or {} tmp = tempfile.TemporaryDirectory() @@ -415,6 +427,7 @@ def temporary_docker_directory( secret, environment_variables, port=port, + apt_get_extras=apt_get_extras, ) os.chdir(datasette_dir) if metadata_content: diff --git a/docs/datasette-publish-cloudrun-help.txt b/docs/datasette-publish-cloudrun-help.txt index a625bd10..8cf293d9 100644 --- a/docs/datasette-publish-cloudrun-help.txt +++ b/docs/datasette-publish-cloudrun-help.txt @@ -30,4 +30,5 @@ Options: --spatialite Enable SpatialLite extension --show-files Output the generated Dockerfile and metadata.json --memory TEXT Memory to allocate in Cloud Run, e.g. 1Gi + --apt-get-install TEXT Additional packages to apt-get install --help Show this message and exit. diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index e629bba0..7adef39d 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -182,22 +182,26 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which): "x-secret", ], ) + assert result.exit_code == 0 dockerfile = ( result.output.split("==== Dockerfile ====\n")[1] .split("\n====================\n")[0] .strip() ) - expected = """FROM python:3.8 -COPY . /app -WORKDIR /app + expected = textwrap.dedent( + r""" + FROM python:3.8 + COPY . /app + WORKDIR /app -ENV DATASETTE_AUTH_GITHUB_CLIENT_ID 'x-client-id' -ENV DATASETTE_SECRET 'x-secret' -RUN pip install -U datasette -RUN datasette inspect test.db --inspect-file inspect-data.json -ENV PORT 8001 -EXPOSE 8001 -CMD datasette serve --host 0.0.0.0 -i test.db --cors --inspect-file inspect-data.json --metadata metadata.json --port $PORT""".strip() + ENV DATASETTE_AUTH_GITHUB_CLIENT_ID 'x-client-id' + ENV DATASETTE_SECRET 'x-secret' + RUN pip install -U datasette + RUN datasette inspect test.db --inspect-file inspect-data.json + ENV PORT 8001 + EXPOSE 8001 + CMD datasette serve --host 0.0.0.0 -i test.db --cors --inspect-file inspect-data.json --metadata metadata.json --port $PORT""" + ).strip() assert expected == dockerfile metadata = ( result.output.split("=== metadata.json ===\n")[1] @@ -213,3 +217,57 @@ CMD datasette serve --host 0.0.0.0 -i test.db --cors --inspect-file inspect-data } }, } == json.loads(metadata) + + +@mock.patch("shutil.which") +@mock.patch("datasette.publish.cloudrun.check_output") +@mock.patch("datasette.publish.cloudrun.check_call") +def test_publish_cloudrun_apt_get_install(mock_call, mock_output, mock_which): + mock_which.return_value = True + mock_output.return_value = "myproject" + + runner = CliRunner() + with runner.isolated_filesystem(): + open("test.db", "w").write("data") + result = runner.invoke( + cli.cli, + [ + "publish", + "cloudrun", + "test.db", + "--service", + "datasette", + "--show-files", + "--secret", + "x-secret", + "--apt-get-install", + "ripgrep", + "--spatialite", + ], + ) + assert result.exit_code == 0 + dockerfile = ( + result.output.split("==== Dockerfile ====\n")[1] + .split("\n====================\n")[0] + .strip() + ) + expected = textwrap.dedent( + r""" + FROM python:3.8 + COPY . /app + WORKDIR /app + + RUN apt-get update && \ + apt-get install -y ripgrep python3-dev gcc libsqlite3-mod-spatialite && \ + rm -rf /var/lib/apt/lists/* + + ENV DATASETTE_SECRET 'x-secret' + ENV SQLITE_EXTENSIONS '/usr/lib/x86_64-linux-gnu/mod_spatialite.so' + RUN pip install -U datasette + RUN datasette inspect test.db --inspect-file inspect-data.json + ENV PORT 8001 + EXPOSE 8001 + CMD datasette serve --host 0.0.0.0 -i test.db --cors --inspect-file inspect-data.json --port $PORT + """ + ).strip() + assert expected == dockerfile From bbde835a1fec01458e8d00929e7bab6d6a5ba948 Mon Sep 17 00:00:00 2001 From: Jeff Triplett Date: Sat, 28 Nov 2020 13:53:48 -0600 Subject: [PATCH 0295/1705] Fix --metadata doc usage (#1112) Thanks, @jefftriplett. --- docs/changelog.rst | 4 ++-- docs/settings.rst | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2916b373..20181ca9 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -453,7 +453,7 @@ You can now create :ref:`custom pages ` within your Datasette inst :ref:`settings_dir` (`#731 `__) allows you to define a custom Datasette instance as a directory. So instead of running the following:: $ datasette one.db two.db \ - --metadata.json \ + --metadata=metadata.json \ --template-dir=templates/ \ --plugins-dir=plugins \ --static css:css @@ -770,7 +770,7 @@ Small changes 0.28 (2019-05-19) ----------------- -A `salmagundi `__ of new features! +A `salmagundi `__ of new features! .. _v0_28_databases_that_change: diff --git a/docs/settings.rst b/docs/settings.rst index 350fd048..156893e0 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -23,7 +23,7 @@ Configuration directory mode Normally you configure Datasette using command-line options. For a Datasette instance with custom templates, custom plugins, a static directory and several databases this can get quite verbose:: $ datasette one.db two.db \ - --metadata.json \ + --metadata=metadata.json \ --template-dir=templates/ \ --plugins-dir=plugins \ --static css:css From 50cc6af01672526791900df7c8834a62fa094852 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 28 Nov 2020 15:34:56 -0800 Subject: [PATCH 0296/1705] Fixed some broken internal links, refs #1106 --- docs/changelog.rst | 14 +++++++------- docs/csv_export.rst | 6 +++--- docs/deploying.rst | 2 +- docs/pages.rst | 2 +- docs/performance.rst | 6 +++--- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 20181ca9..15992020 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -353,9 +353,9 @@ Signed values and secrets Both flash messages and user authentication needed a way to sign values and set signed cookies. Two new methods are now available for plugins to take advantage of this mechanism: :ref:`datasette_sign` and :ref:`datasette_unsign`. -Datasette will generate a secret automatically when it starts up, but to avoid resetting the secret (and hence invalidating any cookies) every time the server restarts you should set your own secret. You can pass a secret to Datasette using the new ``--secret`` option or with a ``DATASETTE_SECRET`` environment variable. See :ref:`settings_secret` for more details. +Datasette will generate a secret automatically when it starts up, but to avoid resetting the secret (and hence invalidating any cookies) every time the server restarts you should set your own secret. You can pass a secret to Datasette using the new ``--secret`` option or with a ``DATASETTE_SECRET`` environment variable. See :ref:`setting_secret` for more details. -You can also set a secret when you deploy Datasette using ``datasette publish`` or ``datasette package`` - see :ref:`settings_publish_secrets`. +You can also set a secret when you deploy Datasette using ``datasette publish`` or ``datasette package`` - see :ref:`setting_publish_secrets`. Plugins can now sign values and verify their signatures using the :ref:`datasette.sign() ` and :ref:`datasette.unsign() ` methods. @@ -450,7 +450,7 @@ A small release which provides improved internal methods for use in plugins, alo You can now create :ref:`custom pages ` within your Datasette instance using a custom template file. For example, adding a template file called ``templates/pages/about.html`` will result in a new page being served at ``/about`` on your instance. See the :ref:`custom pages documentation ` for full details, including how to return custom HTTP headers, redirects and status codes. (`#648 `__) -:ref:`settings_dir` (`#731 `__) allows you to define a custom Datasette instance as a directory. So instead of running the following:: +:ref:`config_dir` (`#731 `__) allows you to define a custom Datasette instance as a directory. So instead of running the following:: $ datasette one.db two.db \ --metadata=metadata.json \ @@ -480,7 +480,7 @@ Also in this release: * Datasette :ref:`metadata` can now be provided as a YAML file as an optional alternative to JSON. See :ref:`metadata_yaml`. (`#713 `__) * Removed support for ``datasette publish now``, which used the the now-retired Zeit Now v1 hosting platform. A new plugin, `datasette-publish-now `__, can be installed to publish data to Zeit (`now Vercel `__) Now v2. (`#710 `__) * Fixed a bug where the ``extra_template_vars(request, view_name)`` plugin hook was not receiving the correct ``view_name``. (`#716 `__) -* Variables added to the template context by the ``extra_template_vars()`` plugin hook are now shown in the ``?_context=1`` debugging mode (see :ref:`settings_template_debug`). (`#693 `__) +* Variables added to the template context by the ``extra_template_vars()`` plugin hook are now shown in the ``?_context=1`` debugging mode (see :ref:`setting_template_debug`). (`#693 `__) * Fixed a bug where the "templates considered" HTML comment was no longer being displayed. (`#689 `__) * Fixed a ``datasette publish`` bug where ``--plugin-secret`` would over-ride plugin configuration in the provided ``metadata.json`` file. (`#724 `__) * Added a new CSS class for customizing the canned query page. (`#727 `__) @@ -565,7 +565,7 @@ Also in this release: * asyncio task information is now included on the ``/-/threads`` debug page * Bumped Uvicorn dependency 0.11 * You can now use ``--port 0`` to listen on an available port -* New :ref:`settings_template_debug` setting for debugging templates, e.g. https://latest.datasette.io/fixtures/roadside_attractions?_context=1 (`#654 `__) +* New :ref:`setting_template_debug` setting for debugging templates, e.g. https://latest.datasette.io/fixtures/roadside_attractions?_context=1 (`#654 `__) .. _v0_32: @@ -941,7 +941,7 @@ A number of small new features: - ``datasette publish heroku`` now supports ``--extra-options``, fixes `#334 `_ - Custom error message if SpatiaLite is needed for specified database, closes `#331 `_ -- New config option: ``truncate_cells_html`` for :ref:`truncating long cell values ` in HTML view - closes `#330 `_ +- New config option: ``truncate_cells_html`` for :ref:`truncating long cell values ` in HTML view - closes `#330 `_ - Documentation for :ref:`datasette publish and datasette package `, closes `#337 `_ - Fixed compatibility with Python 3.7 - ``datasette publish heroku`` now supports app names via the ``-n`` option, which can also be used to overwrite an existing application [Russ Garrett] @@ -1000,7 +1000,7 @@ Check out the :ref:`CSV export documentation ` for more details, or try the feature out on https://fivethirtyeight.datasettes.com/fivethirtyeight/bechdel%2Fmovies -If your table has more than :ref:`settings_max_returned_rows` (default 1,000) +If your table has more than :ref:`setting_max_returned_rows` (default 1,000) Datasette provides the option to *stream all rows*. This option takes advantage of async Python and Datasette's efficient :ref:`pagination ` to iterate through the entire matching result set and stream it back as a diff --git a/docs/csv_export.rst b/docs/csv_export.rst index 704cc19d..0bda20ef 100644 --- a/docs/csv_export.rst +++ b/docs/csv_export.rst @@ -23,7 +23,7 @@ file, which looks like this and has the following options: the ``city_id`` column is accompanied by a ``city_id_label`` column. * **stream all rows** - by default CSV files only contain the first - :ref:`settings_max_returned_rows` records. This option will cause Datasette to + :ref:`setting_max_returned_rows` records. This option will cause Datasette to loop through every matching record and return them as a single CSV file. You can try that out on https://latest.datasette.io/fixtures/facetable?_size=4 @@ -40,9 +40,9 @@ Since databases can get pretty large, by default this option is capped at 100MB if a table returns more than 100MB of data the last line of the CSV will be a truncation error message. -You can increase or remove this limit using the :ref:`settings_max_csv_mb` config +You can increase or remove this limit using the :ref:`setting_max_csv_mb` config setting. You can also disable the CSV export feature entirely using -:ref:`settings_allow_csv_stream`. +:ref:`setting_allow_csv_stream`. A note on URLs -------------- diff --git a/docs/deploying.rst b/docs/deploying.rst index 4ca0e82a..d1abe6a3 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -58,7 +58,7 @@ Add a random value for the ``DATASETTE_SECRET`` - this will be used to sign Data $ python3 -c 'import secrets; print(secrets.token_hex(32))' -This configuration will run Datasette against all database files contained in the ``/home/ubunt/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`settings_dir` for details. +This configuration will run Datasette against all database files contained in the ``/home/ubunt/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`config_dir` for details. You can start the Datasette process running using the following:: diff --git a/docs/pages.rst b/docs/pages.rst index 5f77bec7..0941c960 100644 --- a/docs/pages.rst +++ b/docs/pages.rst @@ -66,7 +66,7 @@ Row Every row in every Datasette table has its own URL. This means individual records can be linked to directly. -Table cells with extremely long text contents are truncated on the table view according to the :ref:`settings_truncate_cells_html` setting. If a cell has been truncated the full length version of that cell will be available on the row page. +Table cells with extremely long text contents are truncated on the table view according to the :ref:`setting_truncate_cells_html` setting. If a cell has been truncated the full length version of that cell will be available on the row page. Rows which are the targets of foreign key references from other tables will show a link to a filtered search for all records that reference that row. Here's an example from the Registers of Members Interests database: diff --git a/docs/performance.rst b/docs/performance.rst index 1d24adce..2727416d 100644 --- a/docs/performance.rst +++ b/docs/performance.rst @@ -56,7 +56,7 @@ Using a caching proxy in this way could enable a Datasette-backed visualization Datasette's integration with HTTP caches can be enabled using a combination of configuration options and querystring arguments. -The :ref:`settings_default_cache_ttl` setting sets the default HTTP cache TTL for all Datasette pages. This is 5 seconds unless you change it - you can set it to 0 if you wish to disable HTTP caching entirely. +The :ref:`setting_default_cache_ttl` setting sets the default HTTP cache TTL for all Datasette pages. This is 5 seconds unless you change it - you can set it to 0 if you wish to disable HTTP caching entirely. You can also change the cache timeout on a per-request basis using the ``?_ttl=10`` querystring parameter. This can be useful when you are working with the Datasette JSON API - you may decide that a specific query can be cached for a longer time, or maybe you need to set ``?_ttl=0`` for some requests for example if you are running a SQL ``order by random()`` query. @@ -65,9 +65,9 @@ Hashed URL mode When you open a database file in immutable mode using the ``-i`` option, Datasette calculates a SHA-256 hash of the contents of that file on startup. This content hash can then optionally be used to create URLs that are guaranteed to change if the contents of the file changes in the future. This results in URLs that can then be cached indefinitely by both browsers and caching proxies - an enormous potential performance optimization. -You can enable these hashed URLs in two ways: using the :ref:`settings_hash_urls` configuration setting (which affects all requests to Datasette) or via the ``?_hash=1`` querystring parameter (which only applies to the current request). +You can enable these hashed URLs in two ways: using the :ref:`setting_hash_urls` configuration setting (which affects all requests to Datasette) or via the ``?_hash=1`` querystring parameter (which only applies to the current request). -With hashed URLs enabled, any request to e.g. ``/mydatabase/mytable`` will 302 redirect to ``mydatabase-455fe3a/mytable``. The URL containing the hash will be served with a very long cache expire header - configured using :ref:`settings_default_cache_ttl_hashed` which defaults to 365 days. +With hashed URLs enabled, any request to e.g. ``/mydatabase/mytable`` will 302 redirect to ``mydatabase-455fe3a/mytable``. The URL containing the hash will be served with a very long cache expire header - configured using :ref:`setting_default_cache_ttl_hashed` which defaults to 365 days. Since these responses are cached for a long time, you may wish to build API clients against the non-hashed version of these URLs. These 302 redirects are served extremely quickly, so this should still be a performant way to work against the Datasette API. From a8e66f9065fb55a3863cc05dfb2ce52f9618cdb7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 28 Nov 2020 15:54:35 -0800 Subject: [PATCH 0297/1705] Release 0.52 Refs #992, #1103, #1104, #1107, #1077, #1110, #1089, #1086, #1088, #1084 --- README.md | 1 + datasette/version.py | 2 +- docs/changelog.rst | 24 ++++++++++++++++++++++++ docs/internals.rst | 2 ++ 4 files changed, 28 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index a10ccfd3..c0019e9b 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,7 @@ Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly new ## News + * 28th November 2020: [Datasette 0.52](https://docs.datasette.io/en/stable/changelog.html#v0-52) - `--config` is now `--setting`, new `database_actions` plugin hook, `datasette publish cloudrun --apt-get-install` option and several bug fixes. * 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. [Annotated release notes](https://simonwillison.net/2020/Nov/1/datasette-0-51/). * 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). * 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). diff --git a/datasette/version.py b/datasette/version.py index 2d949370..3b84c97b 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.51.1" +__version__ = "0.52" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 15992020..49772638 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,30 @@ Changelog ========= +.. _v0_52: + +0.52 (2020-11-28) +----------------- + +This release includes a number of changes relating to an internal rebranding effort: Datasette's **configuration** mechanism (things like ``datasette --config default_page_size:10``) has been renamed to **settings**. + +- New ``--setting default_page_size 10`` option as a replacement for ``--config default_page_size:10`` (note the lack of a colon). The ``--config`` option is deprecated but will continue working until Datasette 1.0. (`#992 `__) +- The ``/-/config`` introspection page is now ``/-/settings``, and the previous page redirects to the new one. (`#1103 `__) +- The ``config.json`` file in :ref:`config_dir` is now called ``settings.json``. (`#1104 `__) +- The undocumented ``datasette.config()`` internal method has been replaced by a documented :ref:`datasette_setting` method. (`#1107 `__) + +Also in this release: + +- New plugin hook: :ref:`plugin_hook_database_actions`, which adds menu items to a new cog menu shown at the top of the database page. (`#1077 `__) +- ``datasette publish cloudrun`` has a new ``--apt-get-install`` option that can be used to install additional Ubuntu packages as part of the deployment. This is useful for deploying the new `datasette-ripgrep plugin `__. (`#1110 `__) +- Swept the documentation to remove words that minimize involved difficulty. (`#1089 `__) + +And some bug fixes: + +- Foreign keys linking to rows with blank label columns now display as a hyphen, allowing those links to be clicked. (`#1086 `__) +- Fixed bug where row pages could sometimes 500 if the underlying queries exceeded a time limit. (`#1088 `__) +- Fixed a bug where the table action menu could appear partially obscured by the edge of the page. (`#1084 `__) + .. _v0_51_1: 0.51.1 (2020-10-31) diff --git a/docs/internals.rst b/docs/internals.rst index 78d4e5d2..ff566f69 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -352,6 +352,8 @@ Returns the absolute URL for the given path, including the protocol and host. Fo The current request object is used to determine the hostname and protocol that should be used for the returned URL. The :ref:`setting_force_https_urls` configuration setting is taken into account. +.. _datasette_setting: + .setting(key) ------------- From 12877d7a48e2aa28bb5e780f929a218f7265d849 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 28 Nov 2020 23:44:57 -0800 Subject: [PATCH 0298/1705] Plugin testing docs now recommend datasette.client, closes #1102 --- docs/testing_plugins.rst | 57 +++++++++++++++++++--------------------- 1 file changed, 27 insertions(+), 30 deletions(-) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 76f69a6a..d8ebdc77 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -11,24 +11,24 @@ If you use the template described in :ref:`writing_plugins_cookiecutter` your pl from datasette.app import Datasette import pytest - import httpx + @pytest.mark.asyncio async def test_plugin_is_installed(): - app = Datasette([], memory=True).app() - async with httpx.AsyncClient(app=app) as client: - response = await client.get("http://localhost/-/plugins.json") - assert 200 == response.status_code - installed_plugins = {p["name"] for p in response.json()} - assert "datasette-plugin-template-demo" in installed_plugins + datasette = Datasette([], memory=True) + response = await datasette.client.get("/-/plugins.json") + assert response.status_code == 200 + installed_plugins = {p["name"] for p in response.json()} + assert "datasette-plugin-template-demo" in installed_plugins -This test uses the `HTTPX `__ Python library to run mock HTTP requests through a fresh instance of Datasette. This is the recommended way to write tests against a Datasette instance. -It also uses the `pytest-asyncio `__ package to add support for ``async def`` test functions running under pytest. +This test uses the :ref:`internals_datasette_client` object to exercise a test instance of Datasette. ``datasette.client`` is a wrapper around the `HTTPX `__ Python library which can imitate HTTP requests using ASGI. This is the recommended way to write tests against a Datasette instance. + +This test also uses the `pytest-asyncio `__ package to add support for ``async def`` test functions running under pytest. You can install these packages like so:: - pip install pytest pytest-asyncio httpx + pip install pytest pytest-asyncio If you are building an installable package you can add them as test dependencies to your ``setup.py`` module like this: @@ -38,7 +38,7 @@ If you are building an installable package you can add them as test dependencies name="datasette-my-plugin", # ... extras_require={ - "test": ["pytest", "pytest-asyncio", "httpx"] + "test": ["pytest", "pytest-asyncio"] }, tests_require=["datasette-my-plugin[test]"], ) @@ -65,12 +65,11 @@ Here's an example that uses the `sqlite-utils library Some dogs" in response.text + async def test_example_table_html(datasette): + response = await datasette.client.get("/test/dogs") + assert ">Some dogs" in response.text -Here the ``ds()`` function defines the fixture, which is than automatically passed to the two test functions based on pytest automatically matching their ``ds`` function parameters. +Here the ``datasette()`` function defines the fixture, which is than automatically passed to the two test functions based on pytest automatically matching their ``datasette`` function parameters. The ``@pytest.fixture(scope="session")`` line here ensures the fixture is reused for the full ``pytest`` execution session. This means that the temporary database file will be created once and reused for each test. @@ -119,5 +116,5 @@ If you want to create that test database repeatedly for every individual test fu .. code-block:: python @pytest.fixture - def ds(tmp_path_factory): - # ... + def datasette(tmp_path_factory): + # This fixture will be executed repeatedly for every test From e800ffcf7cc6a915eb554b369c654f87162575e5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 29 Nov 2020 09:37:43 -0800 Subject: [PATCH 0299/1705] /usr/local/lib/mod_spatialite.so Closes #1114 --- datasette/utils/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 54a5b247..d326c773 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -57,6 +57,7 @@ RUN apt-get update && \ SPATIALITE_PATHS = ( "/usr/lib/x86_64-linux-gnu/mod_spatialite.so", "/usr/local/lib/mod_spatialite.dylib", + "/usr/local/lib/mod_spatialite.so", ) # Length of hash subset used in hashed URLs: HASH_LENGTH = 7 From deb0be4ae56f191f121239b29e83dd53b62d6305 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 29 Nov 2020 11:30:17 -0800 Subject: [PATCH 0300/1705] Fix bug where compound foreign keys produced broken links, closes #1098 --- datasette/utils/__init__.py | 51 +++++++++++++++++++++----------- tests/fixtures.py | 7 +++-- tests/test_api.py | 14 +++++++-- tests/test_csv.py | 6 ++-- tests/test_html.py | 6 ++++ tests/test_internals_database.py | 33 ++++++++++++++++++--- 6 files changed, 88 insertions(+), 29 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index d326c773..d467383d 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1,7 +1,7 @@ import asyncio from contextlib import contextmanager import click -from collections import OrderedDict, namedtuple +from collections import OrderedDict, namedtuple, Counter import base64 import hashlib import inspect @@ -474,9 +474,25 @@ def get_outbound_foreign_keys(conn, table): if info is not None: id, seq, table_name, from_, to_, on_update, on_delete, match = info fks.append( - {"column": from_, "other_table": table_name, "other_column": to_} + { + "column": from_, + "other_table": table_name, + "other_column": to_, + "id": id, + "seq": seq, + } ) - return fks + # Filter out compound foreign keys by removing any where "id" is not unique + id_counts = Counter(fk["id"] for fk in fks) + return [ + { + "column": fk["column"], + "other_table": fk["other_table"], + "other_column": fk["other_column"], + } + for fk in fks + if id_counts[fk["id"]] == 1 + ] def get_all_foreign_keys(conn): @@ -487,20 +503,21 @@ def get_all_foreign_keys(conn): for table in tables: table_to_foreign_keys[table] = {"incoming": [], "outgoing": []} for table in tables: - infos = conn.execute(f"PRAGMA foreign_key_list([{table}])").fetchall() - for info in infos: - if info is not None: - id, seq, table_name, from_, to_, on_update, on_delete, match = info - if table_name not in table_to_foreign_keys: - # Weird edge case where something refers to a table that does - # not actually exist - continue - table_to_foreign_keys[table_name]["incoming"].append( - {"other_table": table, "column": to_, "other_column": from_} - ) - table_to_foreign_keys[table]["outgoing"].append( - {"other_table": table_name, "column": from_, "other_column": to_} - ) + fks = get_outbound_foreign_keys(conn, table) + for fk in fks: + table_name = fk["other_table"] + from_ = fk["column"] + to_ = fk["other_column"] + if table_name not in table_to_foreign_keys: + # Weird edge case where something refers to a table that does + # not actually exist + continue + table_to_foreign_keys[table_name]["incoming"].append( + {"other_table": table, "column": to_, "other_column": from_} + ) + table_to_foreign_keys[table]["outgoing"].append( + {"other_table": table_name, "column": from_, "other_column": to_} + ) return table_to_foreign_keys diff --git a/tests/fixtures.py b/tests/fixtures.py index 3abca821..f95a2d6b 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -388,9 +388,12 @@ CREATE TABLE foreign_key_references ( foreign_key_with_label varchar(30), foreign_key_with_blank_label varchar(30), foreign_key_with_no_label varchar(30), + foreign_key_compound_pk1 varchar(30), + foreign_key_compound_pk2 varchar(30), FOREIGN KEY (foreign_key_with_label) REFERENCES simple_primary_key(id), FOREIGN KEY (foreign_key_with_blank_label) REFERENCES simple_primary_key(id), FOREIGN KEY (foreign_key_with_no_label) REFERENCES primary_key_multiple_columns(id) + FOREIGN KEY (foreign_key_compound_pk1, foreign_key_compound_pk2) REFERENCES compound_primary_key(pk1, pk2) ); CREATE TABLE sortable ( @@ -624,8 +627,8 @@ INSERT INTO simple_primary_key VALUES (4, 'RENDER_CELL_DEMO'); INSERT INTO primary_key_multiple_columns VALUES (1, 'hey', 'world'); INSERT INTO primary_key_multiple_columns_explicit_label VALUES (1, 'hey', 'world2'); -INSERT INTO foreign_key_references VALUES (1, 1, 3, 1); -INSERT INTO foreign_key_references VALUES (2, null, null, null); +INSERT INTO foreign_key_references VALUES (1, 1, 3, 1, 'a', 'b'); +INSERT INTO foreign_key_references VALUES (2, null, null, null, null, null); INSERT INTO complex_foreign_keys VALUES (1, 1, 2, 1); INSERT INTO custom_foreign_key_label VALUES (1, 1); diff --git a/tests/test_api.py b/tests/test_api.py index 2bab6c30..848daf9c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -237,6 +237,8 @@ def test_database_page(app_client): "foreign_key_with_label", "foreign_key_with_blank_label", "foreign_key_with_no_label", + "foreign_key_compound_pk1", + "foreign_key_compound_pk2", ], "primary_keys": ["pk"], "count": 2, @@ -1637,6 +1639,8 @@ def test_expand_label(app_client): "foreign_key_with_label": {"value": "1", "label": "hello"}, "foreign_key_with_blank_label": "3", "foreign_key_with_no_label": "1", + "foreign_key_compound_pk1": "a", + "foreign_key_compound_pk2": "b", } } @@ -1821,24 +1825,28 @@ def test_common_prefix_database_names(app_client_conflicting_database_names): assert db_name == data["database"] -def test_null_foreign_keys_are_not_expanded(app_client): +def test_null_and_compound_foreign_keys_are_not_expanded(app_client): response = app_client.get( "/fixtures/foreign_key_references.json?_shape=array&_labels=on" ) - assert [ + assert response.json == [ { "pk": "1", "foreign_key_with_label": {"value": "1", "label": "hello"}, "foreign_key_with_blank_label": {"value": "3", "label": ""}, "foreign_key_with_no_label": {"value": "1", "label": "1"}, + "foreign_key_compound_pk1": "a", + "foreign_key_compound_pk2": "b", }, { "pk": "2", "foreign_key_with_label": None, "foreign_key_with_blank_label": None, "foreign_key_with_no_label": None, + "foreign_key_compound_pk1": None, + "foreign_key_compound_pk2": None, }, - ] == response.json + ] def test_inspect_file_used_for_count(app_client_immutable_and_inspect_file): diff --git a/tests/test_csv.py b/tests/test_csv.py index 209bce2b..0fd665a9 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -42,9 +42,9 @@ pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,com ) EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV = """ -pk,foreign_key_with_label,foreign_key_with_label_label,foreign_key_with_blank_label,foreign_key_with_blank_label_label,foreign_key_with_no_label,foreign_key_with_no_label_label -1,1,hello,3,,1,1 -2,,,,,, +pk,foreign_key_with_label,foreign_key_with_label_label,foreign_key_with_blank_label,foreign_key_with_blank_label_label,foreign_key_with_no_label,foreign_key_with_no_label_label,foreign_key_compound_pk1,foreign_key_compound_pk2 +1,1,hello,3,,1,1,a,b +2,,,,,,,, """.lstrip().replace( "\n", "\r\n" ) diff --git a/tests/test_html.py b/tests/test_html.py index d53dbabc..ecbf89b4 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -804,12 +804,16 @@ def test_table_html_foreign_key_links(app_client): '
    ', '', '', + '', + '', ], [ '', '', '', '', + '', + '', ], ] @@ -836,6 +840,8 @@ def test_table_html_disable_foreign_key_links_with_labels(app_client): '', '', '', + '', + '', ] ] diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index e5938f3b..7c8f478c 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -267,7 +267,7 @@ async def test_table_column_details(db, table, expected): @pytest.mark.asyncio async def test_get_all_foreign_keys(db): all_foreign_keys = await db.get_all_foreign_keys() - assert { + assert all_foreign_keys["roadside_attraction_characteristics"] == { "incoming": [], "outgoing": [ { @@ -281,8 +281,8 @@ async def test_get_all_foreign_keys(db): "other_column": "pk", }, ], - } == all_foreign_keys["roadside_attraction_characteristics"] - assert { + } + assert all_foreign_keys["attraction_characteristic"] == { "incoming": [ { "other_table": "roadside_attraction_characteristics", @@ -291,7 +291,32 @@ async def test_get_all_foreign_keys(db): } ], "outgoing": [], - } == all_foreign_keys["attraction_characteristic"] + } + assert all_foreign_keys["compound_primary_key"] == { + # No incoming because these are compound foreign keys, which we currently ignore + "incoming": [], + "outgoing": [], + } + assert all_foreign_keys["foreign_key_references"] == { + "incoming": [], + "outgoing": [ + { + "other_table": "primary_key_multiple_columns", + "column": "foreign_key_with_no_label", + "other_column": "id", + }, + { + "other_table": "simple_primary_key", + "column": "foreign_key_with_blank_label", + "other_column": "id", + }, + { + "other_table": "simple_primary_key", + "column": "foreign_key_with_label", + "other_column": "id", + }, + ], + } @pytest.mark.asyncio From 242bc89fdf2e775e340d69a4e851b3a9accb31c6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 29 Nov 2020 11:38:29 -0800 Subject: [PATCH 0301/1705] Release 0.52.1 Refs #1098, #1102, #1114 --- datasette/version.py | 2 +- docs/changelog.rst | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 3b84c97b..119295b3 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52" +__version__ = "0.52.1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 49772638..a77cf5a5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,15 @@ Changelog ========= +.. _v0_52_1: + +0.52.1 (2020-11-29) +------------------- + +- Documentation on :ref:`testing_plugins` now recommends using :ref:`internals_datasette_client`. (`#1102 `__) +- Fix bug where compound foreign keys produced broken links. (`#1098 `__) +- ``datasette --load-module=spatialite`` now also checks for ``/usr/local/lib/mod_spatialite.so``. Thanks, Dan Peterson. (`#1114 `__) + .. _v0_52: 0.52 (2020-11-28) From 09033c08bec8555e0e893e077afa10a7a75d7d35 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 29 Nov 2020 12:13:16 -0800 Subject: [PATCH 0302/1705] Suggest --load-extension=spatialite, closes #1115 --- datasette/cli.py | 12 ++++++++++-- tests/test_cli.py | 29 ++++++++++++++++++++++------- 2 files changed, 32 insertions(+), 9 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 5feab51e..e84695e3 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -16,6 +16,7 @@ from .app import Datasette, DEFAULT_SETTINGS, SETTINGS, pm from .utils import ( StartupError, check_connection, + find_spatialite, parse_metadata, ConnectionProblem, SpatialiteConnectionProblem, @@ -537,10 +538,17 @@ async def check_databases(ds): try: await database.execute_fn(check_connection) except SpatialiteConnectionProblem: + suggestion = "" + try: + find_spatialite() + suggestion = "\n\nTry adding the --load-extension=spatialite option." + except SpatialiteNotFound: + pass raise click.UsageError( "It looks like you're trying to load a SpatiaLite" - " database without first loading the SpatiaLite module." - "\n\nRead more: https://docs.datasette.io/en/stable/spatialite.html" + + " database without first loading the SpatiaLite module." + + suggestion + + "\n\nRead more: https://docs.datasette.io/en/stable/spatialite.html" ) except ConnectionProblem as e: raise click.UsageError( diff --git a/tests/test_cli.py b/tests/test_cli.py index 36b9a092..409408ae 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -59,13 +59,28 @@ def test_serve_with_inspect_file_prepopulates_table_counts_cache(): assert {"hithere": 44} == db.cached_table_counts -def test_spatialite_error_if_attempt_to_open_spatialite(): - runner = CliRunner() - result = runner.invoke( - cli, ["serve", str(pathlib.Path(__file__).parent / "spatialite.db")] - ) - assert result.exit_code != 0 - assert "trying to load a SpatiaLite database" in result.output +@pytest.mark.parametrize( + "spatialite_paths,should_suggest_load_extension", + ( + ([], False), + (["/tmp"], True), + ), +) +def test_spatialite_error_if_attempt_to_open_spatialite( + spatialite_paths, should_suggest_load_extension +): + with mock.patch("datasette.utils.SPATIALITE_PATHS", spatialite_paths): + runner = CliRunner() + result = runner.invoke( + cli, ["serve", str(pathlib.Path(__file__).parent / "spatialite.db")] + ) + assert result.exit_code != 0 + assert "It looks like you're trying to load a SpatiaLite" in result.output + suggestion = "--load-extension=spatialite" + if should_suggest_load_extension: + assert suggestion in result.output + else: + assert suggestion not in result.output @mock.patch("datasette.utils.SPATIALITE_PATHS", ["/does/not/exist"]) From 4777362bf2692bc72b221ec47c3e6216151d1b89 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 29 Nov 2020 12:19:24 -0800 Subject: [PATCH 0303/1705] Work around CI bug with ensure_eventloop, refs #1115 --- tests/test_cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_cli.py b/tests/test_cli.py index 409408ae..c52960fb 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -67,7 +67,7 @@ def test_serve_with_inspect_file_prepopulates_table_counts_cache(): ), ) def test_spatialite_error_if_attempt_to_open_spatialite( - spatialite_paths, should_suggest_load_extension + ensure_eventloop, spatialite_paths, should_suggest_load_extension ): with mock.patch("datasette.utils.SPATIALITE_PATHS", spatialite_paths): runner = CliRunner() From c745c2715ab5933d7629a76bab4684632383f807 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 29 Nov 2020 12:27:34 -0800 Subject: [PATCH 0304/1705] Moved comment for clarity --- datasette/database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/database.py b/datasette/database.py index ea1424a5..71c45ba0 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -247,12 +247,12 @@ class Database: ) if explicit_label_column: return explicit_label_column - # If a table has two columns, one of which is ID, then label_column is the other one column_names = await self.execute_fn(lambda conn: table_columns(conn, table)) # Is there a name or title column? name_or_title = [c for c in column_names if c in ("name", "title")] if name_or_title: return name_or_title[0] + # If a table has two columns, one of which is ID, then label_column is the other one if ( column_names and len(column_names) == 2 From 37f87b5e52e7f8ddd1c4ffcf368bd7a62a406a6d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 12:01:15 -0800 Subject: [PATCH 0305/1705] Support for generated columns, closes #1116 --- datasette/utils/__init__.py | 12 +++---- tests/test_api.py | 55 ++++++++++++++++++++++++++++++-- tests/test_internals_database.py | 17 ++++++++++ 3 files changed, 76 insertions(+), 8 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index d467383d..28df2ef1 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -64,7 +64,7 @@ HASH_LENGTH = 7 # Can replace this with Column from sqlite_utils when I add that dependency Column = namedtuple( - "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk") + "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk", "hidden") ) @@ -460,11 +460,11 @@ def detect_primary_keys(conn, table): " Figure out primary keys for a table. " table_info_rows = [ row - for row in conn.execute(f'PRAGMA table_info("{table}")').fetchall() - if row[-1] + for row in conn.execute(f'PRAGMA table_xinfo("{table}")').fetchall() + if row["pk"] ] - table_info_rows.sort(key=lambda row: row[-1]) - return [str(r[1]) for r in table_info_rows] + table_info_rows.sort(key=lambda row: row["pk"]) + return [str(r["name"]) for r in table_info_rows] def get_outbound_foreign_keys(conn, table): @@ -572,7 +572,7 @@ def table_columns(conn, table): def table_column_details(conn, table): return [ Column(*r) - for r in conn.execute(f"PRAGMA table_info({escape_sqlite(table)});").fetchall() + for r in conn.execute(f"PRAGMA table_xinfo({escape_sqlite(table)});").fetchall() ] diff --git a/tests/test_api.py b/tests/test_api.py index 848daf9c..ebe50d10 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,5 +1,6 @@ +from datasette.app import Datasette from datasette.plugins import DEFAULT_PLUGINS -from datasette.utils import detect_json1 +from datasette.utils import detect_json1, sqlite3 from datasette.version import __version__ from .fixtures import ( # noqa app_client, @@ -514,7 +515,14 @@ def test_database_page(app_client): }, { "name": "searchable_fts", - "columns": ["text1", "text2", "name with . and spaces"], + "columns": [ + "text1", + "text2", + "name with . and spaces", + "searchable_fts", + "docid", + "__langid", + ], "primary_keys": [], "count": 2, "hidden": True, @@ -1913,3 +1921,46 @@ def test_paginate_using_link_header(app_client, qs): else: path = None assert num_pages == 21 + + +@pytest.mark.skipif( + tuple( + map( + int, + sqlite3.connect(":memory:") + .execute("select sqlite_version()") + .fetchone()[0] + .split("."), + ) + ) + < (3, 31, 0), + reason="generated columns were added in SQLite 3.31.0", +) +@pytest.mark.asyncio +async def test_generated_columns_are_visible_in_datasette(tmp_path_factory): + db_directory = tmp_path_factory.mktemp("dbs") + db_path = db_directory / "test.db" + conn = sqlite3.connect(str(db_path)) + conn.executescript( + """ + CREATE TABLE deeds ( + body TEXT, + id INT GENERATED ALWAYS AS (json_extract(body, '$.id')) STORED, + consideration INT GENERATED ALWAYS AS (json_extract(body, '$.consideration')) STORED + ); + INSERT INTO deeds (body) VALUES ('{ + "id": 1, + "consideration": "This is the consideration" + }'); + """ + ) + datasette = Datasette([db_path]) + response = await datasette.client.get("/test/deeds.json?_shape=array") + assert response.json() == [ + { + "rowid": 1, + "body": '{\n "id": 1,\n "consideration": "This is the consideration"\n }', + "id": 1, + "consideration": "This is the consideration", + } + ] diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 7c8f478c..56397dab 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -120,6 +120,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, + hidden=0, ), Column( cid=1, @@ -128,6 +129,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=2, @@ -136,6 +138,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=3, @@ -144,6 +147,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=4, @@ -152,6 +156,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=5, @@ -160,6 +165,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=6, @@ -168,6 +174,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=7, @@ -176,6 +183,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=8, @@ -184,6 +192,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=9, @@ -192,6 +201,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), ], ), @@ -205,6 +215,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, + hidden=0, ), Column( cid=1, @@ -213,6 +224,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=2, + hidden=0, ), Column( cid=2, @@ -221,6 +233,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=3, @@ -229,6 +242,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=4, @@ -237,6 +251,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=5, @@ -245,6 +260,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=6, @@ -253,6 +269,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), ], ), From dea3c508b39528e566d711c38a467b3d372d220b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 12:09:22 -0800 Subject: [PATCH 0306/1705] Revert "Support for generated columns, closes #1116" - it failed CI This reverts commit 37f87b5e52e7f8ddd1c4ffcf368bd7a62a406a6d. --- datasette/utils/__init__.py | 12 +++---- tests/test_api.py | 55 ++------------------------------ tests/test_internals_database.py | 17 ---------- 3 files changed, 8 insertions(+), 76 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 28df2ef1..d467383d 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -64,7 +64,7 @@ HASH_LENGTH = 7 # Can replace this with Column from sqlite_utils when I add that dependency Column = namedtuple( - "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk", "hidden") + "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk") ) @@ -460,11 +460,11 @@ def detect_primary_keys(conn, table): " Figure out primary keys for a table. " table_info_rows = [ row - for row in conn.execute(f'PRAGMA table_xinfo("{table}")').fetchall() - if row["pk"] + for row in conn.execute(f'PRAGMA table_info("{table}")').fetchall() + if row[-1] ] - table_info_rows.sort(key=lambda row: row["pk"]) - return [str(r["name"]) for r in table_info_rows] + table_info_rows.sort(key=lambda row: row[-1]) + return [str(r[1]) for r in table_info_rows] def get_outbound_foreign_keys(conn, table): @@ -572,7 +572,7 @@ def table_columns(conn, table): def table_column_details(conn, table): return [ Column(*r) - for r in conn.execute(f"PRAGMA table_xinfo({escape_sqlite(table)});").fetchall() + for r in conn.execute(f"PRAGMA table_info({escape_sqlite(table)});").fetchall() ] diff --git a/tests/test_api.py b/tests/test_api.py index ebe50d10..848daf9c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,6 +1,5 @@ -from datasette.app import Datasette from datasette.plugins import DEFAULT_PLUGINS -from datasette.utils import detect_json1, sqlite3 +from datasette.utils import detect_json1 from datasette.version import __version__ from .fixtures import ( # noqa app_client, @@ -515,14 +514,7 @@ def test_database_page(app_client): }, { "name": "searchable_fts", - "columns": [ - "text1", - "text2", - "name with . and spaces", - "searchable_fts", - "docid", - "__langid", - ], + "columns": ["text1", "text2", "name with . and spaces"], "primary_keys": [], "count": 2, "hidden": True, @@ -1921,46 +1913,3 @@ def test_paginate_using_link_header(app_client, qs): else: path = None assert num_pages == 21 - - -@pytest.mark.skipif( - tuple( - map( - int, - sqlite3.connect(":memory:") - .execute("select sqlite_version()") - .fetchone()[0] - .split("."), - ) - ) - < (3, 31, 0), - reason="generated columns were added in SQLite 3.31.0", -) -@pytest.mark.asyncio -async def test_generated_columns_are_visible_in_datasette(tmp_path_factory): - db_directory = tmp_path_factory.mktemp("dbs") - db_path = db_directory / "test.db" - conn = sqlite3.connect(str(db_path)) - conn.executescript( - """ - CREATE TABLE deeds ( - body TEXT, - id INT GENERATED ALWAYS AS (json_extract(body, '$.id')) STORED, - consideration INT GENERATED ALWAYS AS (json_extract(body, '$.consideration')) STORED - ); - INSERT INTO deeds (body) VALUES ('{ - "id": 1, - "consideration": "This is the consideration" - }'); - """ - ) - datasette = Datasette([db_path]) - response = await datasette.client.get("/test/deeds.json?_shape=array") - assert response.json() == [ - { - "rowid": 1, - "body": '{\n "id": 1,\n "consideration": "This is the consideration"\n }', - "id": 1, - "consideration": "This is the consideration", - } - ] diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 56397dab..7c8f478c 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -120,7 +120,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, - hidden=0, ), Column( cid=1, @@ -129,7 +128,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=2, @@ -138,7 +136,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=3, @@ -147,7 +144,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=4, @@ -156,7 +152,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=5, @@ -165,7 +160,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=6, @@ -174,7 +168,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=7, @@ -183,7 +176,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=8, @@ -192,7 +184,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=9, @@ -201,7 +192,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), ], ), @@ -215,7 +205,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, - hidden=0, ), Column( cid=1, @@ -224,7 +213,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=2, - hidden=0, ), Column( cid=2, @@ -233,7 +221,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=3, @@ -242,7 +229,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=4, @@ -251,7 +237,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=5, @@ -260,7 +245,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=6, @@ -269,7 +253,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), ], ), From 49b6297fb7513291110d86688c688700e6f6d9cc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 13:24:23 -0800 Subject: [PATCH 0307/1705] Typo fix: messagge_is_html, closes #1118 --- datasette/app.py | 2 +- datasette/views/base.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 88d5ecc6..922046d5 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1154,7 +1154,7 @@ class DatasetteRouter: status = exception.status info = exception.error_dict message = exception.message - if exception.messagge_is_html: + if exception.message_is_html: message = Markup(message) title = exception.title else: diff --git a/datasette/views/base.py b/datasette/views/base.py index bde8449f..5ba8fcb1 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -42,13 +42,13 @@ class DatasetteError(Exception): error_dict=None, status=500, template=None, - messagge_is_html=False, + message_is_html=False, ): self.message = message self.title = title self.error_dict = error_dict or {} self.status = status - self.messagge_is_html = messagge_is_html + self.message_is_html = message_is_html class BaseView: @@ -441,7 +441,7 @@ class DataView(BaseView): """, title="SQL Interrupted", status=400, - messagge_is_html=True, + message_is_html=True, ) except (sqlite3.OperationalError, InvalidSql) as e: raise DatasetteError(str(e), title="Invalid SQL", status=400) From 461670a0b87efa953141b449a9a261919864ceb3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 13:29:57 -0800 Subject: [PATCH 0308/1705] Support for generated columns * Support for generated columns, closes #1116 * Show SQLite version in pytest report header * Use table_info() if SQLite < 3.26.0 * Cache sqlite_version() rather than re-calculate every time * Adjust test_database_page for SQLite 3.26.0 or higher --- datasette/utils/__init__.py | 41 +++++++++++++------------ datasette/utils/sqlite.py | 28 ++++++++++++++++++ tests/conftest.py | 11 +++++++ tests/fixtures.py | 2 +- tests/test_api.py | 51 +++++++++++++++++++++++++++++++- tests/test_config_dir.py | 2 +- tests/test_internals_database.py | 20 ++++++++++++- tests/test_plugins.py | 4 +-- tests/test_utils.py | 2 +- 9 files changed, 135 insertions(+), 26 deletions(-) create mode 100644 datasette/utils/sqlite.py diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index d467383d..b951539d 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -19,15 +19,9 @@ import urllib import numbers import yaml from .shutil_backport import copytree +from .sqlite import sqlite3, sqlite_version from ..plugins import pm -try: - import pysqlite3 as sqlite3 -except ImportError: - import sqlite3 - -if hasattr(sqlite3, "enable_callback_tracebacks"): - sqlite3.enable_callback_tracebacks(True) # From https://www.sqlite.org/lang_keywords.html reserved_words = set( @@ -64,7 +58,7 @@ HASH_LENGTH = 7 # Can replace this with Column from sqlite_utils when I add that dependency Column = namedtuple( - "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk") + "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk", "hidden") ) @@ -458,13 +452,10 @@ def temporary_docker_directory( def detect_primary_keys(conn, table): " Figure out primary keys for a table. " - table_info_rows = [ - row - for row in conn.execute(f'PRAGMA table_info("{table}")').fetchall() - if row[-1] - ] - table_info_rows.sort(key=lambda row: row[-1]) - return [str(r[1]) for r in table_info_rows] + columns = table_column_details(conn, table) + pks = [column for column in columns if column.is_pk] + pks.sort(key=lambda column: column.is_pk) + return [column.name for column in pks] def get_outbound_foreign_keys(conn, table): @@ -570,10 +561,22 @@ def table_columns(conn, table): def table_column_details(conn, table): - return [ - Column(*r) - for r in conn.execute(f"PRAGMA table_info({escape_sqlite(table)});").fetchall() - ] + if sqlite_version() >= (3, 26, 0): + # table_xinfo was added in 3.26.0 + return [ + Column(*r) + for r in conn.execute( + f"PRAGMA table_xinfo({escape_sqlite(table)});" + ).fetchall() + ] + else: + # Treat hidden as 0 for all columns + return [ + Column(*(list(r) + [0])) + for r in conn.execute( + f"PRAGMA table_info({escape_sqlite(table)});" + ).fetchall() + ] filter_column_re = re.compile(r"^_filter_column_\d+$") diff --git a/datasette/utils/sqlite.py b/datasette/utils/sqlite.py new file mode 100644 index 00000000..9a043ccd --- /dev/null +++ b/datasette/utils/sqlite.py @@ -0,0 +1,28 @@ +try: + import pysqlite3 as sqlite3 +except ImportError: + import sqlite3 + +if hasattr(sqlite3, "enable_callback_tracebacks"): + sqlite3.enable_callback_tracebacks(True) + +_cached_sqlite_version = None + + +def sqlite_version(): + global _cached_sqlite_version + if _cached_sqlite_version is None: + _cached_sqlite_version = _sqlite_version() + return _cached_sqlite_version + + +def _sqlite_version(): + return tuple( + map( + int, + sqlite3.connect(":memory:") + .execute("select sqlite_version()") + .fetchone()[0] + .split("."), + ) + ) diff --git a/tests/conftest.py b/tests/conftest.py index 91b811e2..a963a4fd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,6 +3,11 @@ import pathlib import pytest import re +try: + import pysqlite3 as sqlite3 +except ImportError: + import sqlite3 + UNDOCUMENTED_PERMISSIONS = { "this_is_allowed", "this_is_denied", @@ -12,6 +17,12 @@ UNDOCUMENTED_PERMISSIONS = { } +def pytest_report_header(config): + return "SQLite: {}".format( + sqlite3.connect(":memory:").execute("select sqlite_version()").fetchone()[0] + ) + + def pytest_configure(config): import sys diff --git a/tests/fixtures.py b/tests/fixtures.py index f95a2d6b..b0c98f39 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -1,5 +1,5 @@ from datasette.app import Datasette -from datasette.utils import sqlite3 +from datasette.utils.sqlite import sqlite3 from datasette.utils.testing import TestClient import click import contextlib diff --git a/tests/test_api.py b/tests/test_api.py index 848daf9c..5676622e 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,5 +1,7 @@ +from datasette.app import Datasette from datasette.plugins import DEFAULT_PLUGINS from datasette.utils import detect_json1 +from datasette.utils.sqlite import sqlite3, sqlite_version from datasette.version import __version__ from .fixtures import ( # noqa app_client, @@ -514,7 +516,20 @@ def test_database_page(app_client): }, { "name": "searchable_fts", - "columns": ["text1", "text2", "name with . and spaces"], + "columns": [ + "text1", + "text2", + "name with . and spaces", + ] + + ( + [ + "searchable_fts", + "docid", + "__langid", + ] + if sqlite_version() >= (3, 26, 0) + else [] + ), "primary_keys": [], "count": 2, "hidden": True, @@ -1913,3 +1928,37 @@ def test_paginate_using_link_header(app_client, qs): else: path = None assert num_pages == 21 + + +@pytest.mark.skipif( + sqlite_version() < (3, 31, 0), + reason="generated columns were added in SQLite 3.31.0", +) +@pytest.mark.asyncio +async def test_generated_columns_are_visible_in_datasette(tmp_path_factory): + db_directory = tmp_path_factory.mktemp("dbs") + db_path = db_directory / "test.db" + conn = sqlite3.connect(str(db_path)) + conn.executescript( + """ + CREATE TABLE deeds ( + body TEXT, + id INT GENERATED ALWAYS AS (json_extract(body, '$.id')) STORED, + consideration INT GENERATED ALWAYS AS (json_extract(body, '$.consideration')) STORED + ); + INSERT INTO deeds (body) VALUES ('{ + "id": 1, + "consideration": "This is the consideration" + }'); + """ + ) + datasette = Datasette([db_path]) + response = await datasette.client.get("/test/deeds.json?_shape=array") + assert response.json() == [ + { + "rowid": 1, + "body": '{\n "id": 1,\n "consideration": "This is the consideration"\n }', + "id": 1, + "consideration": "This is the consideration", + } + ] diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index cd158474..015c6ace 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -1,9 +1,9 @@ import json import pytest -import sqlite3 from datasette.app import Datasette from datasette.cli import cli +from datasette.utils.sqlite import sqlite3 from .fixtures import TestClient as _TestClient from click.testing import CliRunner diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 7c8f478c..e50cf20e 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -2,7 +2,8 @@ Tests for the datasette.database.Database class """ from datasette.database import Database, Results, MultipleValues -from datasette.utils import sqlite3, Column +from datasette.utils.sqlite import sqlite3 +from datasette.utils import Column from .fixtures import app_client import pytest import time @@ -120,6 +121,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, + hidden=0, ), Column( cid=1, @@ -128,6 +130,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=2, @@ -136,6 +139,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=3, @@ -144,6 +148,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=4, @@ -152,6 +157,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=5, @@ -160,6 +166,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=6, @@ -168,6 +175,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=7, @@ -176,6 +184,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=8, @@ -184,6 +193,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=9, @@ -192,6 +202,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), ], ), @@ -205,6 +216,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, + hidden=0, ), Column( cid=1, @@ -213,6 +225,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=2, + hidden=0, ), Column( cid=2, @@ -221,6 +234,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=3, @@ -229,6 +243,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=4, @@ -237,6 +252,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=5, @@ -245,6 +261,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=6, @@ -253,6 +270,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), ], ), diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 51faeccb..4554cfd4 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -9,14 +9,14 @@ from .fixtures import ( from datasette.app import Datasette from datasette import cli from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm -from datasette.utils import sqlite3, CustomRow +from datasette.utils.sqlite import sqlite3 +from datasette.utils import CustomRow from jinja2.environment import Template import base64 import json import os import pathlib import re -import sqlite3 import textwrap import pytest import urllib diff --git a/tests/test_utils.py b/tests/test_utils.py index 07e6f870..56306339 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -4,11 +4,11 @@ Tests for various datasette helper functions. from datasette.app import Datasette from datasette import utils from datasette.utils.asgi import Request +from datasette.utils.sqlite import sqlite3 import json import os import pathlib import pytest -import sqlite3 import tempfile from unittest.mock import patch From 17cbbb1f7f230b39650afac62dd16476626001b5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 16:28:02 -0800 Subject: [PATCH 0309/1705] generated_columns table in fixtures.py, closes #1119 --- datasette/utils/__init__.py | 4 +- datasette/utils/sqlite.py | 8 ++++ tests/fixtures.py | 19 ++++++++- tests/test_api.py | 51 ++++++++++++----------- tests/test_internals_database.py | 70 +++++++++++++++++--------------- tests/test_plugins.py | 6 +-- 6 files changed, 93 insertions(+), 65 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index b951539d..2576090a 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -19,7 +19,7 @@ import urllib import numbers import yaml from .shutil_backport import copytree -from .sqlite import sqlite3, sqlite_version +from .sqlite import sqlite3, sqlite_version, supports_table_xinfo from ..plugins import pm @@ -561,7 +561,7 @@ def table_columns(conn, table): def table_column_details(conn, table): - if sqlite_version() >= (3, 26, 0): + if supports_table_xinfo(): # table_xinfo was added in 3.26.0 return [ Column(*r) diff --git a/datasette/utils/sqlite.py b/datasette/utils/sqlite.py index 9a043ccd..c8522f35 100644 --- a/datasette/utils/sqlite.py +++ b/datasette/utils/sqlite.py @@ -26,3 +26,11 @@ def _sqlite_version(): .split("."), ) ) + + +def supports_table_xinfo(): + return sqlite_version() >= (3, 26, 0) + + +def supports_generated_columns(): + return sqlite_version() >= (3, 31, 0) diff --git a/tests/fixtures.py b/tests/fixtures.py index b0c98f39..b52a531f 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -1,5 +1,5 @@ from datasette.app import Datasette -from datasette.utils.sqlite import sqlite3 +from datasette.utils.sqlite import sqlite3, sqlite_version, supports_generated_columns from datasette.utils.testing import TestClient import click import contextlib @@ -116,6 +116,8 @@ def make_app_client( immutables = [] conn = sqlite3.connect(filepath) conn.executescript(TABLES) + if supports_generated_columns(): + conn.executescript(GENERATED_COLUMNS_SQL) for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) @@ -699,6 +701,18 @@ INSERT INTO "searchable_fts" (rowid, text1, text2) SELECT rowid, text1, text2 FROM searchable; """ +GENERATED_COLUMNS_SQL = """ +CREATE TABLE generated_columns ( + body TEXT, + id INT GENERATED ALWAYS AS (json_extract(body, '$.number')) STORED, + consideration INT GENERATED ALWAYS AS (json_extract(body, '$.string')) STORED +); +INSERT INTO generated_columns (body) VALUES ('{ + "number": 1, + "string": "This is a string" +}'); +""" + def assert_permissions_checked(datasette, actions): # actions is a list of "action" or (action, resource) tuples @@ -754,6 +768,9 @@ def cli(db_filename, metadata, plugins_path, recreate): for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) + if supports_generated_columns(): + with conn: + conn.executescript(GENERATED_COLUMNS_SQL) print(f"Test tables written to {db_filename}") if metadata: open(metadata, "w").write(json.dumps(METADATA, indent=4)) diff --git a/tests/test_api.py b/tests/test_api.py index 5676622e..f82a8fe9 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,7 +1,7 @@ from datasette.app import Datasette from datasette.plugins import DEFAULT_PLUGINS from datasette.utils import detect_json1 -from datasette.utils.sqlite import sqlite3, sqlite_version +from datasette.utils.sqlite import sqlite3, sqlite_version, supports_table_xinfo from datasette.version import __version__ from .fixtures import ( # noqa app_client, @@ -19,6 +19,7 @@ from .fixtures import ( # noqa generate_compound_rows, generate_sortable_rows, make_app_client, + supports_generated_columns, EXPECTED_PLUGINS, METADATA, ) @@ -35,7 +36,7 @@ def test_homepage(app_client): assert response.json.keys() == {"fixtures": 0}.keys() d = response.json["fixtures"] assert d["name"] == "fixtures" - assert d["tables_count"] == 24 + assert d["tables_count"] == 25 if supports_generated_columns() else 24 assert len(d["tables_and_views_truncated"]) == 5 assert d["tables_and_views_more"] is True # 4 hidden FTS tables + no_primary_key (hidden in metadata) @@ -268,6 +269,22 @@ def test_database_page(app_client): }, "private": False, }, + ] + ( + [ + { + "columns": ["body", "id", "consideration"], + "count": 1, + "foreign_keys": {"incoming": [], "outgoing": []}, + "fts_table": None, + "hidden": False, + "name": "generated_columns", + "primary_keys": [], + "private": False, + } + ] + if supports_generated_columns() + else [] + ) + [ { "name": "infinity", "columns": ["value"], @@ -527,7 +544,7 @@ def test_database_page(app_client): "docid", "__langid", ] - if sqlite_version() >= (3, 26, 0) + if supports_table_xinfo() else [] ), "primary_keys": [], @@ -1934,31 +1951,13 @@ def test_paginate_using_link_header(app_client, qs): sqlite_version() < (3, 31, 0), reason="generated columns were added in SQLite 3.31.0", ) -@pytest.mark.asyncio -async def test_generated_columns_are_visible_in_datasette(tmp_path_factory): - db_directory = tmp_path_factory.mktemp("dbs") - db_path = db_directory / "test.db" - conn = sqlite3.connect(str(db_path)) - conn.executescript( - """ - CREATE TABLE deeds ( - body TEXT, - id INT GENERATED ALWAYS AS (json_extract(body, '$.id')) STORED, - consideration INT GENERATED ALWAYS AS (json_extract(body, '$.consideration')) STORED - ); - INSERT INTO deeds (body) VALUES ('{ - "id": 1, - "consideration": "This is the consideration" - }'); - """ - ) - datasette = Datasette([db_path]) - response = await datasette.client.get("/test/deeds.json?_shape=array") +async def test_generated_columns_are_visible_in_datasette(app_client): + response = app_client.get("/test/generated_columns.json?_shape=array") assert response.json() == [ { "rowid": 1, - "body": '{\n "id": 1,\n "consideration": "This is the consideration"\n }', - "id": 1, - "consideration": "This is the consideration", + "body": '{\n "number": 1,\n "string": "This is a string"\n }', + "number": 1, + "string": "This is a string", } ] diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index e50cf20e..49b8a1b3 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -2,7 +2,7 @@ Tests for the datasette.database.Database class """ from datasette.database import Database, Results, MultipleValues -from datasette.utils.sqlite import sqlite3 +from datasette.utils.sqlite import sqlite3, supports_generated_columns from datasette.utils import Column from .fixtures import app_client import pytest @@ -340,38 +340,42 @@ async def test_get_all_foreign_keys(db): @pytest.mark.asyncio async def test_table_names(db): table_names = await db.table_names() - assert table_names == [ - "simple_primary_key", - "primary_key_multiple_columns", - "primary_key_multiple_columns_explicit_label", - "compound_primary_key", - "compound_three_primary_keys", - "foreign_key_references", - "sortable", - "no_primary_key", - "123_starts_with_digits", - "Table With Space In Name", - "table/with/slashes.csv", - "complex_foreign_keys", - "custom_foreign_key_label", - "units", - "tags", - "searchable", - "searchable_tags", - "searchable_fts", - "searchable_fts_segments", - "searchable_fts_segdir", - "searchable_fts_docsize", - "searchable_fts_stat", - "select", - "infinity", - "facet_cities", - "facetable", - "binary_data", - "roadside_attractions", - "attraction_characteristic", - "roadside_attraction_characteristics", - ] + assert ( + table_names + == [ + "simple_primary_key", + "primary_key_multiple_columns", + "primary_key_multiple_columns_explicit_label", + "compound_primary_key", + "compound_three_primary_keys", + "foreign_key_references", + "sortable", + "no_primary_key", + "123_starts_with_digits", + "Table With Space In Name", + "table/with/slashes.csv", + "complex_foreign_keys", + "custom_foreign_key_label", + "units", + "tags", + "searchable", + "searchable_tags", + "searchable_fts", + "searchable_fts_segments", + "searchable_fts_segdir", + "searchable_fts_docsize", + "searchable_fts_stat", + "select", + "infinity", + "facet_cities", + "facetable", + "binary_data", + "roadside_attractions", + "attraction_characteristic", + "roadside_attraction_characteristics", + ] + + (["generated_columns"] if supports_generated_columns() else []) + ) @pytest.mark.asyncio diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 4554cfd4..dab5ef68 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -413,8 +413,7 @@ def test_hook_register_output_renderer_all_parameters(app_client): # Lots of 'at 0x103a4a690' in here - replace those so we can do # an easy comparison body = at_memory_re.sub(" at 0xXXX", response.text) - assert { - "1+1": 2, + assert json.loads(body) == { "datasette": "", "columns": [ "pk", @@ -451,7 +450,8 @@ def test_hook_register_output_renderer_all_parameters(app_client): "table": "facetable", "request": "", "view_name": "table", - } == json.loads(body) + "1+1": 2, + } # Test that query_name is set correctly query_response = app_client.get("/fixtures/pragma_cache_size.testall") assert "pragma_cache_size" == json.loads(query_response.body)["query_name"] From a970276b9999687b96c5e11ea1c817d814f5d267 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 17:19:09 -0800 Subject: [PATCH 0310/1705] Try pysqlite3 on latest.datasette.io --install=pysqlite3-binary to get a working demo of generated columns, refs #1119 --- .github/workflows/deploy-latest.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 7a41bda2..05f0bad1 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -54,6 +54,7 @@ jobs: --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1" \ + --install=pysqlite3-binary \ --service=datasette-latest # Deploy docs.db to a different service datasette publish cloudrun docs.db \ From 88ac538b41a4753c3de9b509c3a0e13077f66182 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 2 Dec 2020 15:47:37 -0800 Subject: [PATCH 0311/1705] transfer-encoding: chunked for DB downloads, refs #749 This should get >32MB downloads working on Cloud Run. --- datasette/views/database.py | 1 + tests/test_html.py | 1 + 2 files changed, 2 insertions(+) diff --git a/datasette/views/database.py b/datasette/views/database.py index 17c78150..f6fd579c 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -169,6 +169,7 @@ class DatabaseDownload(DataView): headers = {} if self.ds.cors: headers["Access-Control-Allow-Origin"] = "*" + headers["Transfer-Encoding"] = "chunked" return AsgiFileDownload( filepath, filename=os.path.basename(filepath), diff --git a/tests/test_html.py b/tests/test_html.py index ecbf89b4..b9d3afcd 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1028,6 +1028,7 @@ def test_database_download_for_immutable(): download_response.headers["content-disposition"] == 'attachment; filename="fixtures.db"' ) + assert download_response.headers["transfer-encoding"] == "chunked" def test_database_download_disallowed_for_mutable(app_client): From daae35be46ec5cb8a207aa20986a4fa62e94777e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Abdussamet=20Ko=C3=A7ak?= Date: Thu, 3 Dec 2020 03:33:36 +0300 Subject: [PATCH 0312/1705] Fix misaligned table actions cog Closes #1121. Thanks, @abdusco --- datasette/static/app.css | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index b9378a9e..9e498ab9 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -163,6 +163,8 @@ h6, } .page-header { + display: flex; + align-items: center; padding-left: 10px; border-left: 10px solid #666; margin-bottom: 0.75rem; @@ -175,11 +177,11 @@ h6, padding-right: 0.2em; } .page-header details { - display: inline; + display: inline-flex; } .page-header details > summary { list-style: none; - display: inline; + display: inline-flex; cursor: pointer; } .page-header details > summary::-webkit-details-marker { From a45a3dff3ea01a2382dcedae5923a7b821a12aec Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 2 Dec 2020 16:44:03 -0800 Subject: [PATCH 0313/1705] Fix for OPTIONS request against /db, closes #1100 --- datasette/utils/testing.py | 23 +++++++++++++++++++++++ datasette/views/base.py | 2 +- tests/test_api.py | 6 ++++++ 3 files changed, 30 insertions(+), 1 deletion(-) diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index bcbc1c7a..57b19ea5 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -99,6 +99,29 @@ class TestClient: content_type=content_type, ) + @async_to_sync + async def request( + self, + path, + allow_redirects=True, + redirect_count=0, + method="GET", + cookies=None, + headers=None, + post_body=None, + content_type=None, + ): + return await self._request( + path, + allow_redirects=allow_redirects, + redirect_count=redirect_count, + method=method, + cookies=cookies, + headers=headers, + post_body=post_body, + content_type=content_type, + ) + async def _request( self, path, diff --git a/datasette/views/base.py b/datasette/views/base.py index 5ba8fcb1..a93a6378 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -155,7 +155,7 @@ class DataView(BaseView): name = "" re_named_parameter = re.compile(":([a-zA-Z0-9_]+)") - def options(self, request, *args, **kwargs): + async def options(self, request, *args, **kwargs): r = Response.text("ok") if self.ds.cors: r.headers["Access-Control-Allow-Origin"] = "*" diff --git a/tests/test_api.py b/tests/test_api.py index f82a8fe9..016894b4 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1961,3 +1961,9 @@ async def test_generated_columns_are_visible_in_datasette(app_client): "string": "This is a string", } ] + + +def test_http_options_request(app_client): + response = app_client.request("/fixtures", method="OPTIONS") + assert response.status == 200 + assert response.text == "ok" From 13c960c03b46e35f3432063a19f3f528ca249e23 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 2 Dec 2020 16:49:43 -0800 Subject: [PATCH 0314/1705] Test is no longer order dependent, closes #1123 --- tests/test_plugins.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index dab5ef68..93b444ab 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -797,9 +797,11 @@ def test_hook_table_actions(app_client, table_or_view): assert get_table_actions_links(response.text) == [] response_2 = app_client.get(f"/fixtures/{table_or_view}?_bot=1") - assert get_table_actions_links(response_2.text) == [ - {"label": "From async", "href": "/"}, + assert sorted( + get_table_actions_links(response_2.text), key=lambda l: l["label"] + ) == [ {"label": "Database: fixtures", "href": "/"}, + {"label": "From async", "href": "/"}, {"label": f"Table: {table_or_view}", "href": "/"}, ] From e048791a9a2686f47d81a2c8aa88aa1966d82521 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 2 Dec 2020 16:57:40 -0800 Subject: [PATCH 0315/1705] Release 0.52.2 Refs #1116, #1115, #1100, #749, #1121 --- datasette/version.py | 2 +- docs/changelog.rst | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 119295b3..0353358a 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52.1" +__version__ = "0.52.2" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index a77cf5a5..6fb06beb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,17 @@ Changelog ========= +.. _v0_52_2: + +0.52.2 (2020-12-02) +------------------- + +- Generated columns from SQLite 3.31.0 or higher are now correctly displayed. (`#1116 `__) +- Error message if you attempt to open a SpatiaLite database now suggests using ``--load-extension=spatialite`` if it detects that the extension is available in a common location. (`#1115 `__) +- ``OPTIONS`` requests against the ``/database`` page no longer raise a 500 error. (`#1100 `__) +- Databases larger than 32MB that are published to Cloud Run can now be downloaded. (`#749 `__) +- Fix for misaligned cog icon on table and database pages. Thanks, Abdussamet Koçak. (`#1121 `__) + .. _v0_52_1: 0.52.1 (2020-11-29) From 6b4c55efea3e9d34d92cbe5f0066553ad9b14071 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 10:53:26 -0800 Subject: [PATCH 0316/1705] Fix for Amazon Linux static assets 404ing, refs #1124 --- datasette/utils/asgi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index ce78a597..31b0bdcd 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -294,7 +294,7 @@ def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None): return # Ensure full_path is within root_path to avoid weird "../" tricks try: - full_path.relative_to(root_path) + full_path.relative_to(root_path.resolve()) except ValueError: await asgi_send_html(send, "404", 404) return From 63efcb35ce879fe68ee02411c8dd2fd5f127cc32 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 11:02:53 -0800 Subject: [PATCH 0317/1705] More tweaks to root_path handling, refs #1124 --- datasette/utils/asgi.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 31b0bdcd..3b41c2d7 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -282,10 +282,12 @@ async def asgi_send_file( def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None): + root_path = Path(root_path) + async def inner_static(request, send): path = request.scope["url_route"]["kwargs"]["path"] try: - full_path = (Path(root_path) / path).resolve().absolute() + full_path = (root_path / path).resolve().absolute() except FileNotFoundError: await asgi_send_html(send, "404", 404) return From ca6e8e53dc9b094a5ce169d81a69d872546e595a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 11:05:12 -0800 Subject: [PATCH 0318/1705] More helpful 404 messages, refs #1124 --- datasette/utils/asgi.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 3b41c2d7..363f059f 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -289,7 +289,7 @@ def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None): try: full_path = (root_path / path).resolve().absolute() except FileNotFoundError: - await asgi_send_html(send, "404", 404) + await asgi_send_html(send, "404: Directory not found", 404) return if full_path.is_dir(): await asgi_send_html(send, "403: Directory listing is not allowed", 403) @@ -298,12 +298,12 @@ def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None): try: full_path.relative_to(root_path.resolve()) except ValueError: - await asgi_send_html(send, "404", 404) + await asgi_send_html(send, "404: Path not inside root path", 404) return try: await asgi_send_file(send, full_path, chunk_size=chunk_size) except FileNotFoundError: - await asgi_send_html(send, "404", 404) + await asgi_send_html(send, "404: File not found", 404) return return inner_static From 4cce5516661b24afeddaf35bee84b00fbf5c7f89 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 11:07:05 -0800 Subject: [PATCH 0319/1705] Release 0.52.3 Refs #1124 --- datasette/version.py | 2 +- docs/changelog.rst | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 0353358a..ab02947d 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52.2" +__version__ = "0.52.3" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6fb06beb..4fa7609c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_52_3: + +0.52.3 (2020-12-03) +------------------- + +- Fixed bug where static assets would 404 for Datasette installed on ARM Amazon Linux. (`#1124 `__) + .. _v0_52_2: 0.52.2 (2020-12-02) From 00185af74a91646d47aa54f2369c1a19a6f76a27 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 14:08:50 -0800 Subject: [PATCH 0320/1705] Show pysqlite3 version on /-/versions, if installed - #1125 --- datasette/app.py | 14 ++++++++++++-- datasette/utils/sqlite.py | 3 +++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 922046d5..b2f16257 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -9,6 +9,7 @@ import inspect from itsdangerous import BadSignature import json import os +import pkg_resources import re import secrets import sys @@ -57,7 +58,6 @@ from .utils import ( module_from_path, parse_metadata, resolve_env_secrets, - sqlite3, to_css_class, HASH_LENGTH, ) @@ -74,6 +74,10 @@ from .utils.asgi import ( asgi_send_json, asgi_send_redirect, ) +from .utils.sqlite import ( + sqlite3, + using_pysqlite3, +) from .tracer import AsgiTracer from .plugins import pm, DEFAULT_PLUGINS, get_plugins from .version import __version__ @@ -619,7 +623,7 @@ class Datasette: datasette_version = {"version": __version__} if self.version_note: datasette_version["note"] = self.version_note - return { + info = { "python": { "version": ".".join(map(str, sys.version_info[:3])), "full": sys.version, @@ -636,6 +640,12 @@ class Datasette: ], }, } + if using_pysqlite3: + try: + info["pysqlite3"] = pkg_resources.get_distribution("pysqlite3").version + except pkg_resources.DistributionNotFound: + pass + return info def _plugins(self, request=None, all=False): ps = list(get_plugins()) diff --git a/datasette/utils/sqlite.py b/datasette/utils/sqlite.py index c8522f35..342ff3fa 100644 --- a/datasette/utils/sqlite.py +++ b/datasette/utils/sqlite.py @@ -1,5 +1,8 @@ +using_pysqlite3 = False try: import pysqlite3 as sqlite3 + + using_pysqlite3 = True except ImportError: import sqlite3 From e2fea36540e952d8d72c1bd0af7144b85b7a4671 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 19:12:33 -0800 Subject: [PATCH 0321/1705] Switch to google-github-actions/setup-gcloud - refs #1126 --- .github/workflows/deploy-latest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 05f0bad1..2de0a8b6 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -39,7 +39,7 @@ jobs: sphinx-to-sqlite ../docs.db _build cd .. - name: Set up Cloud Run - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master + uses: google-github-actions/setup-gcloud@master with: version: '275.0.0' service_account_email: ${{ secrets.GCP_SA_EMAIL }} From 49d8fc056844d5a537d6cfd96dab0dd5686fe718 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 20:07:10 -0800 Subject: [PATCH 0322/1705] Try pysqlite3-binary version as well, refs #1125 --- datasette/app.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index b2f16257..9bc84df0 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -641,10 +641,12 @@ class Datasette: }, } if using_pysqlite3: - try: - info["pysqlite3"] = pkg_resources.get_distribution("pysqlite3").version - except pkg_resources.DistributionNotFound: - pass + for package in ("pysqlite3", "pysqlite3-binary"): + try: + info["pysqlite3"] = pkg_resources.get_distribution(package).version + break + except pkg_resources.DistributionNotFound: + pass return info def _plugins(self, request=None, all=False): From 42efb799ea9b362f0c7598f3ff3c4bf46c18e53f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 4 Dec 2020 21:20:12 -0800 Subject: [PATCH 0323/1705] Fixed invalid test for generated columns, refs #1119 --- tests/test_api.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index 016894b4..4339507c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1951,14 +1951,14 @@ def test_paginate_using_link_header(app_client, qs): sqlite_version() < (3, 31, 0), reason="generated columns were added in SQLite 3.31.0", ) -async def test_generated_columns_are_visible_in_datasette(app_client): - response = app_client.get("/test/generated_columns.json?_shape=array") - assert response.json() == [ +def test_generated_columns_are_visible_in_datasette(app_client): + response = app_client.get("/fixtures/generated_columns.json?_shape=array") + assert response.json == [ { "rowid": 1, - "body": '{\n "number": 1,\n "string": "This is a string"\n }', - "number": 1, - "string": "This is a string", + "body": '{\n "number": 1,\n "string": "This is a string"\n}', + "id": 1, + "consideration": "This is a string", } ] From eae103a82b92949189cf718794d2ad0424005460 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 4 Dec 2020 21:21:11 -0800 Subject: [PATCH 0324/1705] Write errors to stderr, closes #1131 --- datasette/database.py | 10 ++++++---- datasette/renderer.py | 1 - datasette/views/base.py | 4 +++- tests/test_cli.py | 7 +++++++ 4 files changed, 16 insertions(+), 6 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 71c45ba0..412e0c59 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -1,8 +1,8 @@ import asyncio -import contextlib from pathlib import Path import janus import queue +import sys import threading import uuid @@ -104,7 +104,8 @@ class Database: try: result = task.fn(conn) except Exception as e: - print(e) + sys.stderr.write("{}\n".format(e)) + sys.stderr.flush() result = e task.reply_queue.sync_q.put(result) @@ -156,11 +157,12 @@ class Database: if e.args == ("interrupted",): raise QueryInterrupted(e, sql, params) if log_sql_errors: - print( - "ERROR: conn={}, sql = {}, params = {}: {}".format( + sys.stderr.write( + "ERROR: conn={}, sql = {}, params = {}: {}\n".format( conn, repr(sql), params, e ) ) + sys.stderr.flush() raise if truncate: diff --git a/datasette/renderer.py b/datasette/renderer.py index d779b44f..258199fc 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -20,7 +20,6 @@ def convert_specific_columns_to_json(rows, columns, json_cols): try: value = json.loads(value) except (TypeError, ValueError) as e: - print(e) pass new_row.append(value) new_rows.append(new_row) diff --git a/datasette/views/base.py b/datasette/views/base.py index a93a6378..b8860b74 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -2,6 +2,7 @@ import asyncio import csv import hashlib import re +import sys import time import urllib @@ -362,7 +363,8 @@ class DataView(BaseView): new_row.append(cell) await writer.writerow(new_row) except Exception as e: - print("caught this", e) + sys.stderr.write("Caught this error: {}\n".format(e)) + sys.stderr.flush() await r.write(str(e)) return diff --git a/tests/test_cli.py b/tests/test_cli.py index c52960fb..a0ac7d7a 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -214,3 +214,10 @@ def test_config_deprecated(ensure_eventloop): assert result.exit_code == 0 assert not json.loads(result.output)["allow_download"] assert "will be deprecated in" in result.stderr + + +def test_sql_errors_logged_to_stderr(ensure_eventloop): + runner = CliRunner(mix_stderr=False) + result = runner.invoke(cli, ["--get", "/:memory:.json?sql=select+blah"]) + assert result.exit_code == 1 + assert "sql = 'select blah', params = {}: no such column: blah\n" in result.stderr From 705d1a1555c4791e9be3b884285b047223ab184f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Abdussamet=20Ko=C3=A7ak?= Date: Sat, 5 Dec 2020 22:35:03 +0300 Subject: [PATCH 0325/1705] Fix startup error on windows (#1128) Fixes https://github.com/simonw/datasette/issues/1094 This import isn't used at all, and causes error on startup on Windows. --- datasette/utils/asgi.py | 1 - 1 file changed, 1 deletion(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 363f059f..fc9adcff 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -1,5 +1,4 @@ import json -from os import EX_CANTCREAT from datasette.utils import MultiParams from mimetypes import guess_type from urllib.parse import parse_qs, urlunparse, parse_qsl From 2dc281645a76c550789ede80c1bc6f733fa9a82e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 5 Dec 2020 11:41:40 -0800 Subject: [PATCH 0326/1705] Release 0.52.4 Refs #1125, #1131, #1094 --- datasette/version.py | 2 +- docs/changelog.rst | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index ab02947d..ce06fe1d 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52.3" +__version__ = "0.52.4" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 4fa7609c..a9922ab3 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,15 @@ Changelog ========= +.. _v0_52_4: + +0.52.4 (2020-12-05) +------------------- + +- Show `pysqlite3 `__ version on ``/-/versions``, if installed. (`#1125 `__) +- Errors output by Datasette (e.g. for invalid SQL queries) now go to ``stderr``, not ``stdout``. (`#1131 `__) +- Fix for a startup error on windows caused by unneccessary ``from os import EX_CANTCREAT`` - thanks, Abdussamet Koçak. (`#1094 `__) + .. _v0_52_3: 0.52.3 (2020-12-03) From e5930e6f889617320454ab53ecc1c438377d49e6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 5 Dec 2020 11:42:42 -0800 Subject: [PATCH 0327/1705] Typo fix in release notes --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index a9922ab3..86d844f7 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -11,7 +11,7 @@ Changelog - Show `pysqlite3 `__ version on ``/-/versions``, if installed. (`#1125 `__) - Errors output by Datasette (e.g. for invalid SQL queries) now go to ``stderr``, not ``stdout``. (`#1131 `__) -- Fix for a startup error on windows caused by unneccessary ``from os import EX_CANTCREAT`` - thanks, Abdussamet Koçak. (`#1094 `__) +- Fix for a startup error on windows caused by unnecessary ``from os import EX_CANTCREAT`` - thanks, Abdussamet Koçak. (`#1094 `__) .. _v0_52_3: From e3143700a245d87bc532d44867b2e380b4225324 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 7 Dec 2020 11:00:10 -0800 Subject: [PATCH 0328/1705] Custom template for docs, linking to datasette.io --- docs/_templates/layout.html | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 docs/_templates/layout.html diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html new file mode 100644 index 00000000..b7b6f794 --- /dev/null +++ b/docs/_templates/layout.html @@ -0,0 +1,23 @@ +{%- extends "!layout.html" %} + +{% block sidebartitle %} + + + + + +{% if theme_display_version %} + {%- set nav_version = version %} + {% if READTHEDOCS and current_version %} + {%- set nav_version = current_version %} + {% endif %} + {% if nav_version %} +
    + {{ nav_version }} +
    + {% endif %} +{% endif %} + +{% include "searchbox.html" %} + +{% endblock %} From 62a6f70c64e4d04c15d9f386dcdf9cd465bbb0f6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 7 Dec 2020 12:10:05 -0800 Subject: [PATCH 0329/1705] Fixed Markdown indentation of news To make it easier to programmatically extract. --- README.md | 76 +++++++++++++++++++++++++++---------------------------- 1 file changed, 38 insertions(+), 38 deletions(-) diff --git a/README.md b/README.md index c0019e9b..89245cf1 100644 --- a/README.md +++ b/README.md @@ -25,53 +25,53 @@ Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly new ## News - * 28th November 2020: [Datasette 0.52](https://docs.datasette.io/en/stable/changelog.html#v0-52) - `--config` is now `--setting`, new `database_actions` plugin hook, `datasette publish cloudrun --apt-get-install` option and several bug fixes. - * 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. [Annotated release notes](https://simonwillison.net/2020/Nov/1/datasette-0-51/). - * 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). - * 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). - * 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. - * 11th August 2020: [Datasette 0.47](https://docs.datasette.io/en/stable/changelog.html#v0-47) - Datasette can now be installed using Homebrew! `brew install simonw/datasette/datasette`. Also new: `datasette install name-of-plugin` and `datasette uninstall name-of-plugin` commands, and `datasette --get '/-/versions.json'` to output the result of Datasette HTTP calls on the command-line. - * 9th August 2020: [Datasette 0.46](https://docs.datasette.io/en/stable/changelog.html#v0-46) - security fix relating to CSRF protection for writable canned queries, a new logo, new debugging tools, improved file downloads and more. - * 6th August 2020: [GraphQL in Datasette with the new datasette-graphql plugin](https://simonwillison.net/2020/Aug/7/datasette-graphql/) - * 24th July 2020: Two new plugins: [datasette-copyable and datasette-insert-api](https://simonwillison.net/2020/Jul/23/datasette-copyable-datasette-insert-api/). `datasette-copyable` adds copy-and-paste export options, and `datasette-insert-api` lets you create tables and insert or update data by POSTing JSON directly to Datasette. - * 1st July 2020: [Datasette 0.45](https://docs.datasette.io/en/stable/changelog.html#v0-45) - [Magic parameters for canned queries](https://docs.datasette.io/en/stable/sql_queries.html#canned-queries-magic-parameters), a log out feature, improved plugin documentation and four new plugin hooks. See also [Datasette 0.45: The annotated release notes](https://simonwillison.net/2020/Jul/1/datasette-045/). - * 20th June 2020: [A cookiecutter template for writing Datasette plugins](https://simonwillison.net/2020/Jun/20/cookiecutter-plugins/) - * 11th June 2020: [Datasette 0.44](https://docs.datasette.io/en/stable/changelog.html#v0-44) - [Authentication and permissions](https://docs.datasette.io/en/stable/authentication.html), [writable canned queries](https://docs.datasette.io/en/stable/sql_queries.html#writable-canned-queries), flash messages, new plugin hooks and much, much more. - * 28th May 2020: [Datasette 0.43](https://docs.datasette.io/en/stable/changelog.html#v0-43) - Redesigned [register_output_renderer](https://docs.datasette.io/en/stable/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes. - * 8th May 2020: [Datasette 0.42](https://docs.datasette.io/en/stable/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database. - * 6th May 2020: [Datasette 0.41](https://docs.datasette.io/en/stable/changelog.html#v0-41) - New mechanism for [creating custom pages](https://docs.datasette.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://docs.datasette.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements. - * 21st April 2020: [Datasette 0.40](https://docs.datasette.io/en/stable/changelog.html#v0-40) - Metadata can now be provided as YAML instead of JSON. Publishing to Zeit Now v1 is no longer supported, but Now v2 support is provided by the new [datasette-publish-now](https://github.com/simonw/datasette-publish-now) plugin. Various bug fixes. - * 24th March 2020: [Datasette 0.39](https://docs.datasette.io/en/stable/changelog.html#v0-39) - New `base_url` configuration option for running Datasette under a different URL prefix, `"sort"` and `"sort_desc"` metadata options for setting a default sort order for a table. - * 8th March 2020: [Datasette 0.38](https://docs.datasette.io/en/stable/changelog.html#v0-38) - New `--memory` option for `datasete publish cloudrun`, [Docker image](https://hub.docker.com/r/datasetteproject/datasette) upgraded to SQLite 3.31.1. - * 25th February 2020: [Datasette 0.37](https://docs.datasette.io/en/stable/changelog.html#v0-37) - new internal APIs enabling plugins to safely write to databases. Read more here: [Datasette Writes](https://simonwillison.net/2020/Feb/26/weeknotes-datasette-writes/). - * 21st February 2020: [Datasette 0.36](https://docs.datasette.io/en/stable/changelog.html#v0-36) - new internals documentation for plugins, `prepare_connection()` now accepts optional `database` and `datasette` arguments. - * 4th February 2020: [Datasette 0.35](https://docs.datasette.io/en/stable/changelog.html#v0-35) - new `.render_template()` method for plugins. - * 29th January 2020: [Datasette 0.34](https://docs.datasette.io/en/stable/changelog.html#v0-34) - improvements to search, `datasette publish cloudrun` and `datasette package`. - * 21st January 2020: [Deploying a data API using GitHub Actions and Cloud Run](https://simonwillison.net/2020/Jan/21/github-actions-cloud-run/) - how to use GitHub Actions and Google Cloud Run to automatically scrape data and deploy the result as an API with Datasette. - * 22nd December 2019: [Datasette 0.33](https://docs.datasette.io/en/stable/changelog.html#v0-33) - various small improvements. - * 19th December 2019: [Building tools to bring data-driven reporting to more newsrooms](https://medium.com/jsk-class-of-2020/building-tools-to-bring-data-driven-reporting-to-more-newsrooms-4520a0c9b3f2) - some notes on my JSK fellowship so far. - * 2nd December 2019: [Niche Museums](https://www.niche-museums.com/) is a new site entirely powered by Datasette, using custom templates and plugins. [niche-museums.com, powered by Datasette](https://simonwillison.net/2019/Nov/25/niche-museums/) describes how the site works, and [datasette-atom: Define an Atom feed using a custom SQL query](https://simonwillison.net/2019/Dec/3/datasette-atom/) describes how the new [datasette-atom plugin](https://github.com/simonw/datasette-atom) was used to add an Atom syndication feed to the site. - * 14th November 2019: [Datasette 0.32](https://docs.datasette.io/en/stable/changelog.html#v0-32) now uses asynchronous rendering in Jinja templates, which means template functions can perform asynchronous operations such as executing SQL queries. [datasette-template-sql](https://github.com/simonw/datasette-template-sql) is a new plugin uses this capability to add a new custom `sql(sql_query)` template function. - * 11th November 2019: [Datasette 0.31](https://docs.datasette.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5. - * 18th October 2019: [Datasette 0.30](https://docs.datasette.io/en/stable/changelog.html#v0-30) - * 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail. - * 7th July 2019: [Datasette 0.29](https://docs.datasette.io/en/stable/changelog.html#v0-29) - ASGI, new plugin hooks, facet by date and much, much more... +* 28th November 2020: [Datasette 0.52](https://docs.datasette.io/en/stable/changelog.html#v0-52) - `--config` is now `--setting`, new `database_actions` plugin hook, `datasette publish cloudrun --apt-get-install` option and several bug fixes. +* 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. [Annotated release notes](https://simonwillison.net/2020/Nov/1/datasette-0-51/). +* 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). +* 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). +* 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. +* 11th August 2020: [Datasette 0.47](https://docs.datasette.io/en/stable/changelog.html#v0-47) - Datasette can now be installed using Homebrew! `brew install simonw/datasette/datasette`. Also new: `datasette install name-of-plugin` and `datasette uninstall name-of-plugin` commands, and `datasette --get '/-/versions.json'` to output the result of Datasette HTTP calls on the command-line. +* 9th August 2020: [Datasette 0.46](https://docs.datasette.io/en/stable/changelog.html#v0-46) - security fix relating to CSRF protection for writable canned queries, a new logo, new debugging tools, improved file downloads and more. +* 6th August 2020: [GraphQL in Datasette with the new datasette-graphql plugin](https://simonwillison.net/2020/Aug/7/datasette-graphql/) +* 24th July 2020: Two new plugins: [datasette-copyable and datasette-insert-api](https://simonwillison.net/2020/Jul/23/datasette-copyable-datasette-insert-api/). `datasette-copyable` adds copy-and-paste export options, and `datasette-insert-api` lets you create tables and insert or update data by POSTing JSON directly to Datasette. +* 1st July 2020: [Datasette 0.45](https://docs.datasette.io/en/stable/changelog.html#v0-45) - [Magic parameters for canned queries](https://docs.datasette.io/en/stable/sql_queries.html#canned-queries-magic-parameters), a log out feature, improved plugin documentation and four new plugin hooks. See also [Datasette 0.45: The annotated release notes](https://simonwillison.net/2020/Jul/1/datasette-045/). +* 20th June 2020: [A cookiecutter template for writing Datasette plugins](https://simonwillison.net/2020/Jun/20/cookiecutter-plugins/) +* 11th June 2020: [Datasette 0.44](https://docs.datasette.io/en/stable/changelog.html#v0-44) - [Authentication and permissions](https://docs.datasette.io/en/stable/authentication.html), [writable canned queries](https://docs.datasette.io/en/stable/sql_queries.html#writable-canned-queries), flash messages, new plugin hooks and much, much more. +* 28th May 2020: [Datasette 0.43](https://docs.datasette.io/en/stable/changelog.html#v0-43) - Redesigned [register_output_renderer](https://docs.datasette.io/en/stable/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes. +* 8th May 2020: [Datasette 0.42](https://docs.datasette.io/en/stable/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database. +* 6th May 2020: [Datasette 0.41](https://docs.datasette.io/en/stable/changelog.html#v0-41) - New mechanism for [creating custom pages](https://docs.datasette.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://docs.datasette.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements. +* 21st April 2020: [Datasette 0.40](https://docs.datasette.io/en/stable/changelog.html#v0-40) - Metadata can now be provided as YAML instead of JSON. Publishing to Zeit Now v1 is no longer supported, but Now v2 support is provided by the new [datasette-publish-now](https://github.com/simonw/datasette-publish-now) plugin. Various bug fixes. +* 24th March 2020: [Datasette 0.39](https://docs.datasette.io/en/stable/changelog.html#v0-39) - New `base_url` configuration option for running Datasette under a different URL prefix, `"sort"` and `"sort_desc"` metadata options for setting a default sort order for a table. +* 8th March 2020: [Datasette 0.38](https://docs.datasette.io/en/stable/changelog.html#v0-38) - New `--memory` option for `datasete publish cloudrun`, [Docker image](https://hub.docker.com/r/datasetteproject/datasette) upgraded to SQLite 3.31.1. +* 25th February 2020: [Datasette 0.37](https://docs.datasette.io/en/stable/changelog.html#v0-37) - new internal APIs enabling plugins to safely write to databases. Read more here: [Datasette Writes](https://simonwillison.net/2020/Feb/26/weeknotes-datasette-writes/). +* 21st February 2020: [Datasette 0.36](https://docs.datasette.io/en/stable/changelog.html#v0-36) - new internals documentation for plugins, `prepare_connection()` now accepts optional `database` and `datasette` arguments. +* 4th February 2020: [Datasette 0.35](https://docs.datasette.io/en/stable/changelog.html#v0-35) - new `.render_template()` method for plugins. +* 29th January 2020: [Datasette 0.34](https://docs.datasette.io/en/stable/changelog.html#v0-34) - improvements to search, `datasette publish cloudrun` and `datasette package`. +* 21st January 2020: [Deploying a data API using GitHub Actions and Cloud Run](https://simonwillison.net/2020/Jan/21/github-actions-cloud-run/) - how to use GitHub Actions and Google Cloud Run to automatically scrape data and deploy the result as an API with Datasette. +* 22nd December 2019: [Datasette 0.33](https://docs.datasette.io/en/stable/changelog.html#v0-33) - various small improvements. +* 19th December 2019: [Building tools to bring data-driven reporting to more newsrooms](https://medium.com/jsk-class-of-2020/building-tools-to-bring-data-driven-reporting-to-more-newsrooms-4520a0c9b3f2) - some notes on my JSK fellowship so far. +* 2nd December 2019: [Niche Museums](https://www.niche-museums.com/) is a new site entirely powered by Datasette, using custom templates and plugins. [niche-museums.com, powered by Datasette](https://simonwillison.net/2019/Nov/25/niche-museums/) describes how the site works, and [datasette-atom: Define an Atom feed using a custom SQL query](https://simonwillison.net/2019/Dec/3/datasette-atom/) describes how the new [datasette-atom plugin](https://github.com/simonw/datasette-atom) was used to add an Atom syndication feed to the site. +* 14th November 2019: [Datasette 0.32](https://docs.datasette.io/en/stable/changelog.html#v0-32) now uses asynchronous rendering in Jinja templates, which means template functions can perform asynchronous operations such as executing SQL queries. [datasette-template-sql](https://github.com/simonw/datasette-template-sql) is a new plugin uses this capability to add a new custom `sql(sql_query)` template function. +* 11th November 2019: [Datasette 0.31](https://docs.datasette.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5. +* 18th October 2019: [Datasette 0.30](https://docs.datasette.io/en/stable/changelog.html#v0-30) +* 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail. +* 7th July 2019: [Datasette 0.29](https://docs.datasette.io/en/stable/changelog.html#v0-29) - ASGI, new plugin hooks, facet by date and much, much more... * [datasette-auth-github](https://github.com/simonw/datasette-auth-github) - a new plugin for Datasette 0.29 that lets you require users to authenticate against GitHub before accessing your Datasette instance. You can whitelist specific users, or you can restrict access to members of specific GitHub organizations or teams. * [datasette-cors](https://github.com/simonw/datasette-cors) - a plugin that lets you configure CORS access from a list of domains (or a set of domain wildcards) so you can make JavaScript calls to a Datasette instance from a specific set of other hosts. - * 23rd June 2019: [Porting Datasette to ASGI, and Turtles all the way down](https://simonwillison.net/2019/Jun/23/datasette-asgi/) - * 21st May 2019: The anonymized raw data from [the Stack Overflow Developer Survey 2019](https://stackoverflow.blog/2019/05/21/public-data-release-of-stack-overflows-2019-developer-survey/) has been [published in partnership with Glitch](https://glitch.com/culture/discover-insights-explore-developer-survey-results-2019/), powered by Datasette. - * 19th May 2019: [Datasette 0.28](https://docs.datasette.io/en/stable/changelog.html#v0-28) - a salmagundi of new features! +* 23rd June 2019: [Porting Datasette to ASGI, and Turtles all the way down](https://simonwillison.net/2019/Jun/23/datasette-asgi/) +* 21st May 2019: The anonymized raw data from [the Stack Overflow Developer Survey 2019](https://stackoverflow.blog/2019/05/21/public-data-release-of-stack-overflows-2019-developer-survey/) has been [published in partnership with Glitch](https://glitch.com/culture/discover-insights-explore-developer-survey-results-2019/), powered by Datasette. +* 19th May 2019: [Datasette 0.28](https://docs.datasette.io/en/stable/changelog.html#v0-28) - a salmagundi of new features! * No longer immutable! Datasette now supports [databases that change](https://docs.datasette.io/en/stable/changelog.html#supporting-databases-that-change). * [Faceting improvements](https://docs.datasette.io/en/stable/changelog.html#faceting-improvements-and-faceting-plugins) including facet-by-JSON-array and the ability to define custom faceting using plugins. * [datasette publish cloudrun](https://docs.datasette.io/en/stable/changelog.html#datasette-publish-cloudrun) lets you publish databases to Google's new Cloud Run hosting service. * New [register_output_renderer](https://docs.datasette.io/en/stable/changelog.html#register-output-renderer-plugins) plugin hook for adding custom output extensions to Datasette in addition to the default `.json` and `.csv`. * Dozens of other smaller features and tweaks - see [the release notes](https://docs.datasette.io/en/stable/changelog.html#v0-28) for full details. * Read more about this release here: [Datasette 0.28—and why master should always be releasable](https://simonwillison.net/2019/May/19/datasette-0-28/) - * 24th February 2019: [ +* 24th February 2019: [ sqlite-utils: a Python library and CLI tool for building SQLite databases](https://simonwillison.net/2019/Feb/25/sqlite-utils/) - a partner tool for easily creating SQLite databases for use with Datasette. - * 31st Janary 2019: [Datasette 0.27](https://docs.datasette.io/en/stable/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://docs.datasette.io/en/stable/ecosystem.html). - * 10th January 2019: [Datasette 0.26.1](https://docs.datasette.io/en/stable/changelog.html#v0-26-1) - SQLite upgrade in Docker image, `/-/versions` now shows SQLite compile options. - * 2nd January 2019: [Datasette 0.26](https://docs.datasette.io/en/stable/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument. +* 31st Janary 2019: [Datasette 0.27](https://docs.datasette.io/en/stable/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://docs.datasette.io/en/stable/ecosystem.html). +* 10th January 2019: [Datasette 0.26.1](https://docs.datasette.io/en/stable/changelog.html#v0-26-1) - SQLite upgrade in Docker image, `/-/versions` now shows SQLite compile options. +* 2nd January 2019: [Datasette 0.26](https://docs.datasette.io/en/stable/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument. * 18th December 2018: [Fast Autocomplete Search for Your Website](https://24ways.org/2018/fast-autocomplete-search-for-your-website/) - a new tutorial on using Datasette to build a JavaScript autocomplete search engine. * 3rd October 2018: [The interesting ideas in Datasette](https://simonwillison.net/2018/Oct/4/datasette-ideas/) - a write-up of some of the less obvious interesting ideas embedded in the Datasette project. * 19th September 2018: [Datasette 0.25](https://docs.datasette.io/en/stable/changelog.html#v0-25) - New plugin hooks, improved database view support and an easier way to use more recent versions of SQLite. From 8ae0f9f7f0d644b0161165a1084f53acd2786f7c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 7 Dec 2020 12:16:13 -0800 Subject: [PATCH 0330/1705] Fixed spelling of Janary --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 89245cf1..7861abbd 100644 --- a/README.md +++ b/README.md @@ -69,7 +69,7 @@ Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly new * Read more about this release here: [Datasette 0.28—and why master should always be releasable](https://simonwillison.net/2019/May/19/datasette-0-28/) * 24th February 2019: [ sqlite-utils: a Python library and CLI tool for building SQLite databases](https://simonwillison.net/2019/Feb/25/sqlite-utils/) - a partner tool for easily creating SQLite databases for use with Datasette. -* 31st Janary 2019: [Datasette 0.27](https://docs.datasette.io/en/stable/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://docs.datasette.io/en/stable/ecosystem.html). +* 31st January 2019: [Datasette 0.27](https://docs.datasette.io/en/stable/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://docs.datasette.io/en/stable/ecosystem.html). * 10th January 2019: [Datasette 0.26.1](https://docs.datasette.io/en/stable/changelog.html#v0-26-1) - SQLite upgrade in Docker image, `/-/versions` now shows SQLite compile options. * 2nd January 2019: [Datasette 0.26](https://docs.datasette.io/en/stable/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument. * 18th December 2018: [Fast Autocomplete Search for Your Website](https://24ways.org/2018/fast-autocomplete-search-for-your-website/) - a new tutorial on using Datasette to build a JavaScript autocomplete search engine. From 4c25b035b2370983c8dd5e0c8762e9154e379774 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 7 Dec 2020 14:41:03 -0800 Subject: [PATCH 0331/1705] arraynotcontains filter, closes #1132 --- datasette/filters.py | 11 ++++++++++- docs/json_api.rst | 7 ++++++- tests/test_api.py | 25 +++++++++++++++++++++++-- 3 files changed, 39 insertions(+), 4 deletions(-) diff --git a/datasette/filters.py b/datasette/filters.py index 1524b32a..edf2de99 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -154,7 +154,16 @@ class Filters: where j.value = :{p} )""", '{c} contains "{v}"', - ) + ), + TemplatedFilter( + "arraynotcontains", + "array does not contain", + """rowid not in ( + select {t}.rowid from {t}, json_each({t}.{c}) j + where j.value = :{p} + )""", + '{c} does not contain "{v}"', + ), ] if detect_json1() else [] diff --git a/docs/json_api.rst b/docs/json_api.rst index 8d45ac6f..582a6159 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -267,7 +267,12 @@ You can filter the data returned by the table based on column values using a que Rows where column does not match any of the provided values. The inverse of ``__in=``. Also supports JSON arrays. ``?column__arraycontains=value`` - Works against columns that contain JSON arrays - matches if any of the values in that array match. + Works against columns that contain JSON arrays - matches if any of the values in that array match the provided value. + + This is only available if the ``json1`` SQLite extension is enabled. + +``?column__arraynotcontains=value`` + Works against columns that contain JSON arrays - matches if none of the values in that array match the provided value. This is only available if the ``json1`` SQLite extension is enabled. diff --git a/tests/test_api.py b/tests/test_api.py index 4339507c..a4c30414 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1121,7 +1121,7 @@ def test_table_filter_queries_multiple_of_same_type(app_client): @pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module") def test_table_filter_json_arraycontains(app_client): response = app_client.get("/fixtures/facetable.json?tags__arraycontains=tag1") - assert [ + assert response.json["rows"] == [ [ 1, "2019-01-14 08:00:00", @@ -1146,7 +1146,28 @@ def test_table_filter_json_arraycontains(app_client): "[]", "two", ], - ] == response.json["rows"] + ] + + +@pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module") +def test_table_filter_json_arraynotcontains(app_client): + response = app_client.get( + "/fixtures/facetable.json?tags__arraynotcontains=tag3&tags__not=[]" + ) + assert response.json["rows"] == [ + [ + 1, + "2019-01-14 08:00:00", + 1, + 1, + "CA", + 1, + "Mission", + '["tag1", "tag2"]', + '[{"foo": "bar"}]', + "one", + ] + ] def test_table_filter_extra_where(app_client): From fe86d853089f324f92daa950cc56f4052bf78f98 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Dec 2020 11:45:45 -0800 Subject: [PATCH 0332/1705] datasette serve --create option, closes #1135 --- datasette/cli.py | 21 ++++++++++++++++++++- docs/datasette-serve-help.txt | 1 + tests/test_cli.py | 19 +++++++++++++++++++ 3 files changed, 40 insertions(+), 1 deletion(-) diff --git a/datasette/cli.py b/datasette/cli.py index e84695e3..32408d23 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -27,6 +27,7 @@ from .utils import ( StaticMount, ValueAsBooleanError, ) +from .utils.sqlite import sqlite3 from .utils.testing import TestClient from .version import __version__ @@ -299,7 +300,7 @@ def uninstall(packages, yes): @cli.command() -@click.argument("files", type=click.Path(exists=True), nargs=-1) +@click.argument("files", type=click.Path(), nargs=-1) @click.option( "-i", "--immutable", @@ -401,6 +402,11 @@ def uninstall(packages, yes): is_flag=True, help="Open Datasette in your web browser", ) +@click.option( + "--create", + is_flag=True, + help="Create database files if they do not exist", +) def serve( files, immutable, @@ -424,6 +430,7 @@ def serve( help_config, pdb, open_browser, + create, return_instance=False, ): """Serve up specified SQLite database files with a web UI""" @@ -486,6 +493,18 @@ def serve( kwargs["config_dir"] = pathlib.Path(files[0]) files = [] + # Verify list of files, create if needed (and --create) + for file in files: + if not pathlib.Path(file).exists(): + if create: + sqlite3.connect(file).execute("vacuum") + else: + raise click.ClickException( + "Invalid value for '[FILES]...': Path '{}' does not exist.".format( + file + ) + ) + try: ds = Datasette(files, **kwargs) except SpatialiteNotFound: diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index bdaf0894..079ec9f8 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -40,4 +40,5 @@ Options: --help-config Show available config options --pdb Launch debugger on any errors -o, --open Open Datasette in your web browser + --create Create database files if they do not exist --help Show this message and exit. diff --git a/tests/test_cli.py b/tests/test_cli.py index a0ac7d7a..3f6b1840 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -146,6 +146,7 @@ def test_metadata_yaml(): help_config=False, pdb=False, open_browser=False, + create=False, return_instance=True, ) client = _TestClient(ds) @@ -221,3 +222,21 @@ def test_sql_errors_logged_to_stderr(ensure_eventloop): result = runner.invoke(cli, ["--get", "/:memory:.json?sql=select+blah"]) assert result.exit_code == 1 assert "sql = 'select blah', params = {}: no such column: blah\n" in result.stderr + + +def test_serve_create(ensure_eventloop, tmpdir): + runner = CliRunner() + db_path = tmpdir / "does_not_exist_yet.db" + assert not db_path.exists() + result = runner.invoke( + cli, [str(db_path), "--create", "--get", "/-/databases.json"] + ) + assert result.exit_code == 0, result.output + databases = json.loads(result.output) + assert { + "name": "does_not_exist_yet", + "is_mutable": True, + "is_memory": False, + "hash": None, + }.items() <= databases[0].items() + assert db_path.exists() From 6000d1a724d0e28cdb102e7be83eac07a00b41e8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Dec 2020 11:56:44 -0800 Subject: [PATCH 0333/1705] Fix for combining ?_search_x and ?_searchmode=raw, closes #1134 --- datasette/views/table.py | 4 +++- tests/test_api.py | 7 +++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index a0de2a8e..3e9adf88 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -443,7 +443,9 @@ class TableView(RowTableShared): fts_table = fts_table or await db.fts_table(table) fts_pk = special_args.get("_fts_pk", table_metadata.get("fts_pk", "rowid")) search_args = dict( - pair for pair in special_args.items() if pair[0].startswith("_search") + pair + for pair in special_args.items() + if pair[0].startswith("_search") and pair[0] != "_searchmode" ) search = "" search_mode_raw = special_args.get("_searchmode") == "raw" diff --git a/tests/test_api.py b/tests/test_api.py index a4c30414..10755b95 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1035,6 +1035,13 @@ def test_sortable_columns_metadata(app_client): [2, "terry dog", "sara weasel", "puma"], ], ), + ( + # _searchmode=raw combined with _search_COLUMN + "/fixtures/searchable.json?_search_text2=te*&_searchmode=raw", + [ + [1, "barry cat", "terry dog", "panther"], + ], + ), ( "/fixtures/searchable.json?_search=weasel", [[2, "terry dog", "sara weasel", "puma"]], From 387b471b88788069191bc845224b7712d92e9c0b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Dec 2020 12:03:44 -0800 Subject: [PATCH 0334/1705] Release 0.52.5 Refs #1134 --- datasette/version.py | 2 +- docs/changelog.rst | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index ce06fe1d..b0a59018 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52.4" +__version__ = "0.52.5" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 86d844f7..c79e7c86 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_52_5: + +0.52.5 (2020-12-09) +------------------- + +- Fix for error caused by combining the ``_searchmode=raw`` and ``?_search_COLUMN`` parameters. (`#1134 `__) + .. _v0_52_4: 0.52.4 (2020-12-05) From 4c6407cd74070237fdad0dd6df4d016740806fbd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Dec 2020 12:14:33 -0800 Subject: [PATCH 0335/1705] Releasing bug fixes from a branch, closes #1136 --- docs/contributing.rst | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index ca194001..8cd9c210 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -204,6 +204,34 @@ You are welcome to try these out, but please be aware that details may change be Please join `discussions on the issue tracker `__ to share your thoughts and experiences with on alpha and beta features that you try out. +.. _contributing_bug_fix_branch: + +Releasing bug fixes from a branch +--------------------------------- + +If it's necessary to publish a bug fix release without shipping new features that have landed on ``main`` a release branch can be used. + +Create it from the relevant last tagged release like so:: + + git branch 0.52.x 0.52.4 + git checkout 0.52.x + +Next cherry-pick the commits containing the bug fixes:: + + git cherry-pick COMMIT + +Write the release notes in the branch, and update the version number in ``version.py``. Then push the branch:: + + git push -u origin 0.52.x + +Once the tests have completed, publish the release from that branch target using the GitHub `Draft a new release `__ form. + +Finally, cherry-pick the commit with the release notes and version number bump across to ``main``:: + + git checkout main + git cherry-pick COMMIT + git push + .. _contributing_upgrading_codemirror: Upgrading CodeMirror From e0b54d09115ded459e09e2e89e0962cfddcb0244 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 15:20:43 -0800 Subject: [PATCH 0336/1705] No longer using Wiki for examples --- README.md | 2 +- docs/index.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 7861abbd..71e488f7 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover [Explore a demo](https://fivethirtyeight.datasettes.com/fivethirtyeight), watch [a video about the project](https://www.youtube.com/watch?v=pTr1uLQTJNE) or try it out by [uploading and publishing your own CSV data](https://simonwillison.net/2019/Apr/23/datasette-glitch/). * Comprehensive documentation: https://docs.datasette.io/ -* Examples: https://github.com/simonw/datasette/wiki/Datasettes +* Examples: https://datasette.io/examples * Live demo of current main: https://latest.datasette.io/ * Support questions, feedback? Join our [GitHub Discussions forum](https://github.com/simonw/datasette/discussions) diff --git a/docs/index.rst b/docs/index.rst index ff8db04b..eafc5bdb 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -25,7 +25,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover `Explore a demo `__, watch `a presentation about the project `__ or :ref:`getting_started_glitch`. -More examples: https://github.com/simonw/datasette/wiki/Datasettes +More examples: https://datasette.io/examples Support questions, feedback? Join our `GitHub Discussions forum `__. From 7ef80d0145dc9a2a16c46823704517d7f35fbe45 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 15:24:16 -0800 Subject: [PATCH 0337/1705] News is now on datasette.io/news Closes #1137, closes #659 --- README.md | 83 ++++++------------------------------------------------- 1 file changed, 8 insertions(+), 75 deletions(-) diff --git a/README.md b/README.md index 71e488f7..16fc8f0e 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover [Explore a demo](https://fivethirtyeight.datasettes.com/fivethirtyeight), watch [a video about the project](https://www.youtube.com/watch?v=pTr1uLQTJNE) or try it out by [uploading and publishing your own CSV data](https://simonwillison.net/2019/Apr/23/datasette-glitch/). +* Latest [Datasette News](https://datasette.io/news) * Comprehensive documentation: https://docs.datasette.io/ * Examples: https://datasette.io/examples * Live demo of current main: https://latest.datasette.io/ @@ -23,83 +24,15 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly newsletter](https://datasette.substack.com/) for tips, tricks and news on what's new in the Datasette ecosystem. -## News - -* 28th November 2020: [Datasette 0.52](https://docs.datasette.io/en/stable/changelog.html#v0-52) - `--config` is now `--setting`, new `database_actions` plugin hook, `datasette publish cloudrun --apt-get-install` option and several bug fixes. -* 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. [Annotated release notes](https://simonwillison.net/2020/Nov/1/datasette-0-51/). -* 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). -* 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). -* 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. -* 11th August 2020: [Datasette 0.47](https://docs.datasette.io/en/stable/changelog.html#v0-47) - Datasette can now be installed using Homebrew! `brew install simonw/datasette/datasette`. Also new: `datasette install name-of-plugin` and `datasette uninstall name-of-plugin` commands, and `datasette --get '/-/versions.json'` to output the result of Datasette HTTP calls on the command-line. -* 9th August 2020: [Datasette 0.46](https://docs.datasette.io/en/stable/changelog.html#v0-46) - security fix relating to CSRF protection for writable canned queries, a new logo, new debugging tools, improved file downloads and more. -* 6th August 2020: [GraphQL in Datasette with the new datasette-graphql plugin](https://simonwillison.net/2020/Aug/7/datasette-graphql/) -* 24th July 2020: Two new plugins: [datasette-copyable and datasette-insert-api](https://simonwillison.net/2020/Jul/23/datasette-copyable-datasette-insert-api/). `datasette-copyable` adds copy-and-paste export options, and `datasette-insert-api` lets you create tables and insert or update data by POSTing JSON directly to Datasette. -* 1st July 2020: [Datasette 0.45](https://docs.datasette.io/en/stable/changelog.html#v0-45) - [Magic parameters for canned queries](https://docs.datasette.io/en/stable/sql_queries.html#canned-queries-magic-parameters), a log out feature, improved plugin documentation and four new plugin hooks. See also [Datasette 0.45: The annotated release notes](https://simonwillison.net/2020/Jul/1/datasette-045/). -* 20th June 2020: [A cookiecutter template for writing Datasette plugins](https://simonwillison.net/2020/Jun/20/cookiecutter-plugins/) -* 11th June 2020: [Datasette 0.44](https://docs.datasette.io/en/stable/changelog.html#v0-44) - [Authentication and permissions](https://docs.datasette.io/en/stable/authentication.html), [writable canned queries](https://docs.datasette.io/en/stable/sql_queries.html#writable-canned-queries), flash messages, new plugin hooks and much, much more. -* 28th May 2020: [Datasette 0.43](https://docs.datasette.io/en/stable/changelog.html#v0-43) - Redesigned [register_output_renderer](https://docs.datasette.io/en/stable/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes. -* 8th May 2020: [Datasette 0.42](https://docs.datasette.io/en/stable/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database. -* 6th May 2020: [Datasette 0.41](https://docs.datasette.io/en/stable/changelog.html#v0-41) - New mechanism for [creating custom pages](https://docs.datasette.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://docs.datasette.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements. -* 21st April 2020: [Datasette 0.40](https://docs.datasette.io/en/stable/changelog.html#v0-40) - Metadata can now be provided as YAML instead of JSON. Publishing to Zeit Now v1 is no longer supported, but Now v2 support is provided by the new [datasette-publish-now](https://github.com/simonw/datasette-publish-now) plugin. Various bug fixes. -* 24th March 2020: [Datasette 0.39](https://docs.datasette.io/en/stable/changelog.html#v0-39) - New `base_url` configuration option for running Datasette under a different URL prefix, `"sort"` and `"sort_desc"` metadata options for setting a default sort order for a table. -* 8th March 2020: [Datasette 0.38](https://docs.datasette.io/en/stable/changelog.html#v0-38) - New `--memory` option for `datasete publish cloudrun`, [Docker image](https://hub.docker.com/r/datasetteproject/datasette) upgraded to SQLite 3.31.1. -* 25th February 2020: [Datasette 0.37](https://docs.datasette.io/en/stable/changelog.html#v0-37) - new internal APIs enabling plugins to safely write to databases. Read more here: [Datasette Writes](https://simonwillison.net/2020/Feb/26/weeknotes-datasette-writes/). -* 21st February 2020: [Datasette 0.36](https://docs.datasette.io/en/stable/changelog.html#v0-36) - new internals documentation for plugins, `prepare_connection()` now accepts optional `database` and `datasette` arguments. -* 4th February 2020: [Datasette 0.35](https://docs.datasette.io/en/stable/changelog.html#v0-35) - new `.render_template()` method for plugins. -* 29th January 2020: [Datasette 0.34](https://docs.datasette.io/en/stable/changelog.html#v0-34) - improvements to search, `datasette publish cloudrun` and `datasette package`. -* 21st January 2020: [Deploying a data API using GitHub Actions and Cloud Run](https://simonwillison.net/2020/Jan/21/github-actions-cloud-run/) - how to use GitHub Actions and Google Cloud Run to automatically scrape data and deploy the result as an API with Datasette. -* 22nd December 2019: [Datasette 0.33](https://docs.datasette.io/en/stable/changelog.html#v0-33) - various small improvements. -* 19th December 2019: [Building tools to bring data-driven reporting to more newsrooms](https://medium.com/jsk-class-of-2020/building-tools-to-bring-data-driven-reporting-to-more-newsrooms-4520a0c9b3f2) - some notes on my JSK fellowship so far. -* 2nd December 2019: [Niche Museums](https://www.niche-museums.com/) is a new site entirely powered by Datasette, using custom templates and plugins. [niche-museums.com, powered by Datasette](https://simonwillison.net/2019/Nov/25/niche-museums/) describes how the site works, and [datasette-atom: Define an Atom feed using a custom SQL query](https://simonwillison.net/2019/Dec/3/datasette-atom/) describes how the new [datasette-atom plugin](https://github.com/simonw/datasette-atom) was used to add an Atom syndication feed to the site. -* 14th November 2019: [Datasette 0.32](https://docs.datasette.io/en/stable/changelog.html#v0-32) now uses asynchronous rendering in Jinja templates, which means template functions can perform asynchronous operations such as executing SQL queries. [datasette-template-sql](https://github.com/simonw/datasette-template-sql) is a new plugin uses this capability to add a new custom `sql(sql_query)` template function. -* 11th November 2019: [Datasette 0.31](https://docs.datasette.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5. -* 18th October 2019: [Datasette 0.30](https://docs.datasette.io/en/stable/changelog.html#v0-30) -* 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail. -* 7th July 2019: [Datasette 0.29](https://docs.datasette.io/en/stable/changelog.html#v0-29) - ASGI, new plugin hooks, facet by date and much, much more... - * [datasette-auth-github](https://github.com/simonw/datasette-auth-github) - a new plugin for Datasette 0.29 that lets you require users to authenticate against GitHub before accessing your Datasette instance. You can whitelist specific users, or you can restrict access to members of specific GitHub organizations or teams. - * [datasette-cors](https://github.com/simonw/datasette-cors) - a plugin that lets you configure CORS access from a list of domains (or a set of domain wildcards) so you can make JavaScript calls to a Datasette instance from a specific set of other hosts. -* 23rd June 2019: [Porting Datasette to ASGI, and Turtles all the way down](https://simonwillison.net/2019/Jun/23/datasette-asgi/) -* 21st May 2019: The anonymized raw data from [the Stack Overflow Developer Survey 2019](https://stackoverflow.blog/2019/05/21/public-data-release-of-stack-overflows-2019-developer-survey/) has been [published in partnership with Glitch](https://glitch.com/culture/discover-insights-explore-developer-survey-results-2019/), powered by Datasette. -* 19th May 2019: [Datasette 0.28](https://docs.datasette.io/en/stable/changelog.html#v0-28) - a salmagundi of new features! - * No longer immutable! Datasette now supports [databases that change](https://docs.datasette.io/en/stable/changelog.html#supporting-databases-that-change). - * [Faceting improvements](https://docs.datasette.io/en/stable/changelog.html#faceting-improvements-and-faceting-plugins) including facet-by-JSON-array and the ability to define custom faceting using plugins. - * [datasette publish cloudrun](https://docs.datasette.io/en/stable/changelog.html#datasette-publish-cloudrun) lets you publish databases to Google's new Cloud Run hosting service. - * New [register_output_renderer](https://docs.datasette.io/en/stable/changelog.html#register-output-renderer-plugins) plugin hook for adding custom output extensions to Datasette in addition to the default `.json` and `.csv`. - * Dozens of other smaller features and tweaks - see [the release notes](https://docs.datasette.io/en/stable/changelog.html#v0-28) for full details. - * Read more about this release here: [Datasette 0.28—and why master should always be releasable](https://simonwillison.net/2019/May/19/datasette-0-28/) -* 24th February 2019: [ -sqlite-utils: a Python library and CLI tool for building SQLite databases](https://simonwillison.net/2019/Feb/25/sqlite-utils/) - a partner tool for easily creating SQLite databases for use with Datasette. -* 31st January 2019: [Datasette 0.27](https://docs.datasette.io/en/stable/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://docs.datasette.io/en/stable/ecosystem.html). -* 10th January 2019: [Datasette 0.26.1](https://docs.datasette.io/en/stable/changelog.html#v0-26-1) - SQLite upgrade in Docker image, `/-/versions` now shows SQLite compile options. -* 2nd January 2019: [Datasette 0.26](https://docs.datasette.io/en/stable/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument. -* 18th December 2018: [Fast Autocomplete Search for Your Website](https://24ways.org/2018/fast-autocomplete-search-for-your-website/) - a new tutorial on using Datasette to build a JavaScript autocomplete search engine. -* 3rd October 2018: [The interesting ideas in Datasette](https://simonwillison.net/2018/Oct/4/datasette-ideas/) - a write-up of some of the less obvious interesting ideas embedded in the Datasette project. -* 19th September 2018: [Datasette 0.25](https://docs.datasette.io/en/stable/changelog.html#v0-25) - New plugin hooks, improved database view support and an easier way to use more recent versions of SQLite. -* 23rd July 2018: [Datasette 0.24](https://docs.datasette.io/en/stable/changelog.html#v0-24) - a number of small new features -* 29th June 2018: [datasette-vega](https://github.com/simonw/datasette-vega), a new plugin for visualizing data as bar, line or scatter charts -* 21st June 2018: [Datasette 0.23.1](https://docs.datasette.io/en/stable/changelog.html#v0-23-1) - minor bug fixes -* 18th June 2018: [Datasette 0.23: CSV, SpatiaLite and more](https://docs.datasette.io/en/stable/changelog.html#v0-23) - CSV export, foreign key expansion in JSON and CSV, new config options, improved support for SpatiaLite and a bunch of other improvements -* 23rd May 2018: [Datasette 0.22.1 bugfix](https://github.com/simonw/datasette/releases/tag/0.22.1) plus we now use [versioneer](https://github.com/warner/python-versioneer) -* 20th May 2018: [Datasette 0.22: Datasette Facets](https://simonwillison.net/2018/May/20/datasette-facets) -* 5th May 2018: [Datasette 0.21: New _shape=, new _size=, search within columns](https://github.com/simonw/datasette/releases/tag/0.21) -* 25th April 2018: [Exploring the UK Register of Members Interests with SQL and Datasette](https://simonwillison.net/2018/Apr/25/register-members-interests/) - a tutorial describing how [register-of-members-interests.datasettes.com](https://register-of-members-interests.datasettes.com/) was built ([source code here](https://github.com/simonw/register-of-members-interests)) -* 20th April 2018: [Datasette plugins, and building a clustered map visualization](https://simonwillison.net/2018/Apr/20/datasette-plugins/) - introducing Datasette's new plugin system and [datasette-cluster-map](https://pypi.org/project/datasette-cluster-map/), a plugin for visualizing data on a map -* 20th April 2018: [Datasette 0.20: static assets and templates for plugins](https://github.com/simonw/datasette/releases/tag/0.20) -* 16th April 2018: [Datasette 0.19: plugins preview](https://github.com/simonw/datasette/releases/tag/0.19) -* 14th April 2018: [Datasette 0.18: units](https://github.com/simonw/datasette/releases/tag/0.18) -* 9th April 2018: [Datasette 0.15: sort by column](https://github.com/simonw/datasette/releases/tag/0.15) -* 28th March 2018: [Baltimore Sun Public Salary Records](https://simonwillison.net/2018/Mar/28/datasette-in-the-wild/) - a data journalism project from the Baltimore Sun powered by Datasette - source code [is available here](https://github.com/baltimore-sun-data/salaries-datasette) -* 27th March 2018: [Cloud-first: Rapid webapp deployment using containers](https://wwwf.imperial.ac.uk/blog/research-software-engineering/2018/03/27/cloud-first-rapid-webapp-deployment-using-containers/) - a tutorial covering deploying Datasette using Microsoft Azure by the Research Software Engineering team at Imperial College London -* 28th January 2018: [Analyzing my Twitter followers with Datasette](https://simonwillison.net/2018/Jan/28/analyzing-my-twitter-followers/) - a tutorial on using Datasette to analyze follower data pulled from the Twitter API -* 17th January 2018: [Datasette Publish: a web app for publishing CSV files as an online database](https://simonwillison.net/2018/Jan/17/datasette-publish/) -* 12th December 2017: [Building a location to time zone API with SpatiaLite, OpenStreetMap and Datasette](https://simonwillison.net/2017/Dec/12/building-a-location-time-zone-api/) -* 9th December 2017: [Datasette 0.14: customization edition](https://github.com/simonw/datasette/releases/tag/0.14) -* 25th November 2017: [New in Datasette: filters, foreign keys and search](https://simonwillison.net/2017/Nov/25/new-in-datasette/) -* 13th November 2017: [Datasette: instantly create and publish an API for your SQLite databases](https://simonwillison.net/2017/Nov/13/datasette/) - ## Installation - pip3 install datasette +If you are on a Mac, [Homebrew](https://brew.sh/) is the easiest way to install Datasette: + + brew install datasette + +You can also install it using `pip` or `pipx`: + + pip install datasette Datasette requires Python 3.6 or higher. We also have [detailed installation instructions](https://docs.datasette.io/en/stable/installation.html) covering other options such as Docker. From 2c0aca4887ed65167606a5fd084f35d046e2a00a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 15:28:44 -0800 Subject: [PATCH 0338/1705] _header=off option for CSV export, closes #1133 --- datasette/views/base.py | 3 ++- docs/csv_export.rst | 16 ++++++++++++++++ tests/test_csv.py | 8 ++++++++ 3 files changed, 26 insertions(+), 1 deletion(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index b8860b74..76e03206 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -307,7 +307,8 @@ class DataView(BaseView): if not first: data, _, _ = await self.data(request, database, hash, **kwargs) if first: - await writer.writerow(headings) + if request.args.get("_header") != "off": + await writer.writerow(headings) first = False next = data.get("next") for row in data["rows"]: diff --git a/docs/csv_export.rst b/docs/csv_export.rst index 0bda20ef..7f0d8396 100644 --- a/docs/csv_export.rst +++ b/docs/csv_export.rst @@ -28,6 +28,22 @@ file, which looks like this and has the following options: You can try that out on https://latest.datasette.io/fixtures/facetable?_size=4 +.. _csv_export_url_parameters: + +URL parameters +-------------- + +The following options can be used to customize the CSVs returned by Datasette. + +``?_header=off`` + This removes the first row of the CSV file specifying the headings - only the row data will be returned. + +``?_stream=on`` + Stream all matching records, not just the first page of results. See below. + +``?_dl=on`` + Causes Datasette to return a ``content-disposition: attachment; filename="filename.csv"`` header. + Streaming all records --------------------- diff --git a/tests/test_csv.py b/tests/test_csv.py index 0fd665a9..6b17033c 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -64,6 +64,14 @@ def test_table_csv_cors_headers(app_client_with_cors): assert "*" == response.headers["Access-Control-Allow-Origin"] +def test_table_csv_no_header(app_client): + response = app_client.get("/fixtures/simple_primary_key.csv?_header=off") + assert response.status == 200 + assert not response.headers.get("Access-Control-Allow-Origin") + assert "text/plain; charset=utf-8" == response.headers["content-type"] + assert EXPECTED_TABLE_CSV.split("\r\n", 1)[1] == response.text + + def test_table_csv_with_labels(app_client): response = app_client.get("/fixtures/facetable.csv?_labels=1") assert response.status == 200 From 967cc05545480f09d421a7bf8b6dbfc27609a181 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 15:37:08 -0800 Subject: [PATCH 0339/1705] Powered by links to datasette.io, closes #1138 --- datasette/templates/_footer.html | 2 +- datasette/templates/patterns.html | 2 +- setup.py | 2 +- tests/test_html.py | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/datasette/templates/_footer.html b/datasette/templates/_footer.html index f930f445..b1380ae9 100644 --- a/datasette/templates/_footer.html +++ b/datasette/templates/_footer.html @@ -1,4 +1,4 @@ -Powered by Datasette +Powered by Datasette {% if query_ms %}· Query took {{ query_ms|round(3) }}ms{% endif %} {% if metadata %} {% if metadata.license or metadata.license_url %}· Data license: diff --git a/datasette/templates/patterns.html b/datasette/templates/patterns.html index 4ef2c29f..984c1bf6 100644 --- a/datasette/templates/patterns.html +++ b/datasette/templates/patterns.html @@ -476,7 +476,7 @@

    .ft

    -
    Powered by Datasette +
    Powered by Datasette · Data license: Apache License 2.0 · diff --git a/setup.py b/setup.py index 82696b38..e9eb1597 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ setup( long_description_content_type="text/markdown", author="Simon Willison", license="Apache License, Version 2.0", - url="https://github.com/simonw/datasette", + url="https://datasette.io/", project_urls={ "Documentation": "https://docs.datasette.io/en/stable/", "Changelog": "https://docs.datasette.io/en/stable/changelog.html", diff --git a/tests/test_html.py b/tests/test_html.py index b9d3afcd..8b0b1c8d 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1079,7 +1079,7 @@ def assert_footer_links(soup): assert "tests/fixtures.py" == source_link.text.strip() assert "Apache License 2.0" == license_link.text.strip() assert "About Datasette" == about_link.text.strip() - assert "https://github.com/simonw/datasette" == datasette_link["href"] + assert "https://datasette.io/" == datasette_link["href"] assert ( "https://github.com/simonw/datasette/blob/master/tests/fixtures.py" == source_link["href"] @@ -1461,7 +1461,7 @@ def test_base_url_config(app_client_base_url_prefix, path): not href.startswith("#") and href not in { - "https://github.com/simonw/datasette", + "https://datasette.io/", "https://github.com/simonw/datasette/blob/master/LICENSE", "https://github.com/simonw/datasette/blob/master/tests/fixtures.py", "/login-as-root", # Only used for the latest.datasette.io demo From 02bb373194000d2b15f61914e7c5fdb124275bcd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 17:38:16 -0800 Subject: [PATCH 0340/1705] Updated release process --- docs/contributing.rst | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 8cd9c210..24d5c8f0 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -181,17 +181,9 @@ You can generate the list of issue references for a specific release by pasting ), ].sort().join(", "); -For non-bugfix releases you may want to update the news section of ``README.md`` as part of the same commit. +To create the tag for the release, create `a new release `__ on GitHub matching the new version number. You can convert the release notes to Markdown by copying and pasting the rendered HTML into this `Paste to Markdown tool `__. -To tag and push the releaes, run the following:: - - git tag 0.25.2 - git push --tags - -Final steps once the release has deployed to https://pypi.org/project/datasette/ - -* Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases - you can convert the release notes to Markdown by copying and pasting the rendered HTML into this tool: https://euangoddard.github.io/clipboard2markdown/ -* Manually kick off a build of the `stable` branch on Read The Docs: https://readthedocs.org/projects/datasette/builds/ +Finally, post a news item about the release on `datasette.io `__ by editing the `news.yaml `__ file in that site's repository. .. _contributing_alpha_beta: From 0c616f732cee79db80cad830917666f41b344262 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 17:44:36 -0800 Subject: [PATCH 0341/1705] Release 0.53 Refs #1132, #1135, #1133, #1138, #1137 --- datasette/version.py | 2 +- docs/changelog.rst | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index b0a59018..a5edecfa 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52.5" +__version__ = "0.53" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index c79e7c86..c570642f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,20 @@ Changelog ========= +.. _v0_53: + +0.53 (2020-12-10) +----------------- + +Datasette has an official project website now, at https://datasette.io/. This release mainly updates the documentation to reflect the new site. + +- New ``?column__arraynotcontains=`` table filter. (`#1132 `__) +- ``datasette serve`` has a new ``--create`` option, which will create blank database files if they do not already exist rather than exiting with an error. (`#1135 `__) +- New ``?_header=off`` option for CSV export which omits the CSV header row, :ref:`documented here `. (`#1133 `__) +- "Powered by Datasette" link in the footer now links to https://datasette.io/. (`#1138 `__) +- Project news no longer lives in the README - it can now be found at https://datasette.io/news. (`#1137 `__) + + .. _v0_52_5: 0.52.5 (2020-12-09) From 6119bd797366a899119f1bba51c1c8cba2efc8fc Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Wed, 16 Dec 2020 13:44:39 -0800 Subject: [PATCH 0342/1705] Update pytest requirement from <6.2.0,>=5.2.2 to >=5.2.2,<6.3.0 (#1145) Updates the requirements on [pytest](https://github.com/pytest-dev/pytest) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/5.2.2...6.2.0) Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index e9eb1597..be94c1c6 100644 --- a/setup.py +++ b/setup.py @@ -68,7 +68,7 @@ setup( extras_require={ "docs": ["sphinx_rtd_theme", "sphinx-autobuild"], "test": [ - "pytest>=5.2.2,<6.2.0", + "pytest>=5.2.2,<6.3.0", "pytest-asyncio>=0.10,<0.15", "beautifulsoup4>=4.8.1,<4.10.0", "black==20.8b1", From 5e9895c67f08e9f42acedd3d6d29512ac446e15f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 17 Dec 2020 17:01:18 -0800 Subject: [PATCH 0343/1705] Database(memory_name=) for shared in-memory databases, closes #1151 --- datasette/database.py | 24 +++++++++++++++++++-- docs/internals.rst | 37 +++++++++++++++++++++++++++++--- tests/test_internals_database.py | 30 ++++++++++++++++++++++++++ 3 files changed, 86 insertions(+), 5 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 412e0c59..a977b362 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -24,11 +24,18 @@ connections = threading.local() class Database: - def __init__(self, ds, path=None, is_mutable=False, is_memory=False): + def __init__( + self, ds, path=None, is_mutable=False, is_memory=False, memory_name=None + ): self.ds = ds self.path = path self.is_mutable = is_mutable self.is_memory = is_memory + self.memory_name = memory_name + if memory_name is not None: + self.path = memory_name + self.is_memory = True + self.is_mutable = True self.hash = None self.cached_size = None self.cached_table_counts = None @@ -46,6 +53,16 @@ class Database: } def connect(self, write=False): + if self.memory_name: + uri = "file:{}?mode=memory&cache=shared".format(self.memory_name) + conn = sqlite3.connect( + uri, + uri=True, + check_same_thread=False, + ) + if not write: + conn.execute("PRAGMA query_only=1") + return conn if self.is_memory: return sqlite3.connect(":memory:") # mode=ro or immutable=1? @@ -215,7 +232,10 @@ class Database: @property def name(self): if self.is_memory: - return ":memory:" + if self.memory_name: + return ":memory:{}".format(self.memory_name) + else: + return ":memory:" else: return Path(self.path).stem diff --git a/docs/internals.rst b/docs/internals.rst index ff566f69..b68a1d8a 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -270,11 +270,16 @@ The ``db`` parameter should be an instance of the ``datasette.database.Database` This will add a mutable database from the provided file path. -The ``Database()`` constructor takes four arguments: the first is the ``datasette`` instance you are attaching to, the second is a ``path=``, then ``is_mutable`` and ``is_memory`` are both optional arguments. +To create a shared in-memory database named ``statistics``, use the following: -Use ``is_mutable`` if it is possible that updates will be made to that database - otherwise Datasette will open it in immutable mode and any changes could cause undesired behavior. +.. code-block:: python -Use ``is_memory`` if the connection is to an in-memory SQLite database. + from datasette.database import Database + + datasette.add_database("statistics", Database( + datasette, + memory_name="statistics" + )) .. _datasette_remove_database: @@ -480,6 +485,32 @@ Database class Instances of the ``Database`` class can be used to execute queries against attached SQLite databases, and to run introspection against their schemas. +.. _database_constructor: + +Database(ds, path=None, is_mutable=False, is_memory=False, memory_name=None) +---------------------------------------------------------------------------- + +The ``Database()`` constructor can be used by plugins, in conjunction with :ref:`datasette_add_database`, to create and register new databases. + +The arguments are as follows: + +``ds`` - :ref:`internals_datasette` (required) + The Datasette instance you are attaching this database to. + +``path`` - string + Path to a SQLite database file on disk. + +``is_mutable`` - boolean + Set this to ``True`` if it is possible that updates will be made to that database - otherwise Datasette will open it in immutable mode and any changes could cause undesired behavior. + +``is_memory`` - boolean + Use this to create non-shared memory connections. + +``memory_name`` - string or ``None`` + Use this to create a named in-memory database. Unlike regular memory databases these can be accessed by multiple threads and will persist an changes made to them for the lifetime of the Datasette server process. + +The first argument is the ``datasette`` instance you are attaching to, the second is a ``path=``, then ``is_mutable`` and ``is_memory`` are both optional arguments. + .. _database_execute: await db.execute(sql, ...) diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 49b8a1b3..dc1af48c 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -464,3 +464,33 @@ def test_mtime_ns_is_none_for_memory(app_client): def test_is_mutable(app_client): assert Database(app_client.ds, is_memory=True, is_mutable=True).is_mutable is True assert Database(app_client.ds, is_memory=True, is_mutable=False).is_mutable is False + + +@pytest.mark.asyncio +async def test_database_memory_name(app_client): + ds = app_client.ds + foo1 = Database(ds, memory_name="foo") + foo2 = Database(ds, memory_name="foo") + bar1 = Database(ds, memory_name="bar") + bar2 = Database(ds, memory_name="bar") + for db in (foo1, foo2, bar1, bar2): + table_names = await db.table_names() + assert table_names == [] + # Now create a table in foo + await foo1.execute_write("create table foo (t text)", block=True) + assert await foo1.table_names() == ["foo"] + assert await foo2.table_names() == ["foo"] + assert await bar1.table_names() == [] + assert await bar2.table_names() == [] + + +@pytest.mark.asyncio +async def test_in_memory_databases_forbid_writes(app_client): + ds = app_client.ds + db = Database(ds, memory_name="test") + with pytest.raises(sqlite3.OperationalError): + await db.execute("create table foo (t text)") + assert await db.table_names() == [] + # Using db.execute_write() should work: + await db.execute_write("create table foo (t text)", block=True) + assert await db.table_names() == ["foo"] From ebc7aa287c99fe6114b79aeab8efb8d4489a6182 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 18 Dec 2020 14:34:05 -0800 Subject: [PATCH 0344/1705] In-memory _schemas database tracking schemas of attached tables, closes #1150 --- datasette/app.py | 39 +++++++- datasette/cli.py | 3 + datasette/default_permissions.py | 2 + datasette/utils/__init__.py | 7 +- datasette/utils/schemas.py | 162 +++++++++++++++++++++++++++++++ datasette/views/base.py | 2 + tests/test_plugins.py | 2 +- tests/test_schemas.py | 68 +++++++++++++ 8 files changed, 279 insertions(+), 6 deletions(-) create mode 100644 datasette/utils/schemas.py create mode 100644 tests/test_schemas.py diff --git a/datasette/app.py b/datasette/app.py index 9bc84df0..cc8506e2 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -74,6 +74,7 @@ from .utils.asgi import ( asgi_send_json, asgi_send_redirect, ) +from .utils.schemas import init_schemas, populate_schema_tables from .utils.sqlite import ( sqlite3, using_pysqlite3, @@ -222,6 +223,11 @@ class Datasette: elif memory: self.files = (MEMORY,) + self.files self.databases = collections.OrderedDict() + # memory_name is a random string so that each Datasette instance gets its own + # unique in-memory named database - otherwise unit tests can fail with weird + # errors when different instances accidentally share an in-memory database + self.add_database("_schemas", Database(self, memory_name=secrets.token_hex())) + self._schemas_created = False for file in self.files: path = file is_memory = False @@ -326,6 +332,33 @@ class Datasette: self._root_token = secrets.token_hex(32) self.client = DatasetteClient(self) + async def refresh_schemas(self): + schema_db = self.databases["_schemas"] + if not self._schemas_created: + await init_schemas(schema_db) + self._schemas_created = True + + current_schema_versions = { + row["database_name"]: row["schema_version"] + for row in await schema_db.execute( + "select database_name, schema_version from databases" + ) + } + for database_name, db in self.databases.items(): + schema_version = (await db.execute("PRAGMA schema_version")).first()[0] + # Compare schema versions to see if we should skip it + if schema_version == current_schema_versions.get(database_name): + continue + await schema_db.execute_write( + """ + INSERT OR REPLACE INTO databases (database_name, path, is_memory, schema_version) + VALUES (?, ?, ?, ?) + """, + [database_name, db.path, db.is_memory, schema_version], + block=True, + ) + await populate_schema_tables(schema_db, db) + @property def urls(self): return Urls(self) @@ -342,7 +375,8 @@ class Datasette: def get_database(self, name=None): if name is None: - return next(iter(self.databases.values())) + # Return first no-_schemas database + name = [key for key in self.databases.keys() if key != "_schemas"][0] return self.databases[name] def add_database(self, name, db): @@ -590,7 +624,8 @@ class Datasette: "is_memory": d.is_memory, "hash": d.hash, } - for d in sorted(self.databases.values(), key=lambda d: d.name) + for name, d in sorted(self.databases.items(), key=lambda p: p[1].name) + if name != "_schemas" ] def _versions(self): diff --git a/datasette/cli.py b/datasette/cli.py index 32408d23..50367fb3 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -134,6 +134,9 @@ async def inspect_(files, sqlite_extensions): app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions) data = {} for name, database in app.databases.items(): + if name == "_schemas": + # Don't include the in-memory _schemas database + continue counts = await database.table_counts(limit=3600 * 1000) data[name] = { "hash": database.hash, diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 9f1d9c62..62cab83a 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -13,6 +13,8 @@ def permission_allowed(datasette, actor, action, resource): if allow is not None: return actor_matches_allow(actor, allow) elif action == "view-database": + if resource == "_schemas" and (actor is None or actor.get("id") != "root"): + return False database_allow = datasette.metadata("allow", database=resource) if database_allow is None: return None diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 2576090a..ac1d82f7 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1024,11 +1024,12 @@ def find_spatialite(): async def initial_path_for_datasette(datasette): "Return suggested path for opening this Datasette, based on number of DBs and tables" - if len(datasette.databases) == 1: - db_name = next(iter(datasette.databases.keys())) + databases = dict([p for p in datasette.databases.items() if p[0] != "_schemas"]) + if len(databases) == 1: + db_name = next(iter(databases.keys())) path = datasette.urls.database(db_name) # Does this DB only have one table? - db = next(iter(datasette.databases.values())) + db = next(iter(databases.values())) tables = await db.table_names() if len(tables) == 1: path = datasette.urls.table(db_name, tables[0]) diff --git a/datasette/utils/schemas.py b/datasette/utils/schemas.py new file mode 100644 index 00000000..4612e236 --- /dev/null +++ b/datasette/utils/schemas.py @@ -0,0 +1,162 @@ +async def init_schemas(db): + await db.execute_write( + """ + CREATE TABLE databases ( + "database_name" TEXT PRIMARY KEY, + "path" TEXT, + "is_memory" INTEGER, + "schema_version" INTEGER + ) + """, + block=True, + ) + await db.execute_write( + """ + CREATE TABLE tables ( + "database_name" TEXT, + "table_name" TEXT, + "rootpage" INTEGER, + "sql" TEXT, + PRIMARY KEY (database_name, table_name) + ) + """, + block=True, + ) + await db.execute_write( + """ + CREATE TABLE columns ( + "database_name" TEXT, + "table_name" TEXT, + "cid" INTEGER, + "name" TEXT, + "type" TEXT, + "notnull" INTEGER, + "default_value" TEXT, -- renamed from dflt_value + "is_pk" INTEGER, -- renamed from pk + "hidden" INTEGER, + PRIMARY KEY (database_name, table_name, name) + ) + """, + block=True, + ) + await db.execute_write( + """ + CREATE TABLE indexes ( + "database_name" TEXT, + "table_name" TEXT, + "seq" INTEGER, + "name" TEXT, + "unique" INTEGER, + "origin" TEXT, + "partial" INTEGER, + PRIMARY KEY (database_name, table_name, name) + ) + """, + block=True, + ) + await db.execute_write( + """ + CREATE TABLE foreign_keys ( + "database_name" TEXT, + "table_name" TEXT, + "id" INTEGER, + "seq" INTEGER, + "table" TEXT, + "from" TEXT, + "to" TEXT, + "on_update" TEXT, + "on_delete" TEXT, + "match" TEXT + ) + """, + block=True, + ) + + +async def populate_schema_tables(schema_db, db): + database_name = db.name + await schema_db.execute_write( + "delete from tables where database_name = ?", [database_name], block=True + ) + tables = (await db.execute("select * from sqlite_master where type = 'table'")).rows + for table in tables: + table_name = table["name"] + await schema_db.execute_write( + """ + insert into tables (database_name, table_name, rootpage, sql) + values (?, ?, ?, ?) + """, + [database_name, table_name, table["rootpage"], table["sql"]], + block=True, + ) + # And the columns + await schema_db.execute_write( + "delete from columns where database_name = ? and table_name = ?", + [database_name, table_name], + block=True, + ) + columns = await db.table_column_details(table_name) + for column in columns: + params = { + **{"database_name": database_name, "table_name": table_name}, + **column._asdict(), + } + await schema_db.execute_write( + """ + insert into columns ( + database_name, table_name, cid, name, type, "notnull", default_value, is_pk, hidden + ) VALUES ( + :database_name, :table_name, :cid, :name, :type, :notnull, :default_value, :is_pk, :hidden + ) + """, + params, + block=True, + ) + # And the foreign_keys + await schema_db.execute_write( + "delete from foreign_keys where database_name = ? and table_name = ?", + [database_name, table_name], + block=True, + ) + foreign_keys = ( + await db.execute(f"PRAGMA foreign_key_list([{table_name}])") + ).rows + for foreign_key in foreign_keys: + params = { + **{"database_name": database_name, "table_name": table_name}, + **dict(foreign_key), + } + await schema_db.execute_write( + """ + insert into foreign_keys ( + database_name, table_name, "id", seq, "table", "from", "to", on_update, on_delete, match + ) VALUES ( + :database_name, :table_name, :id, :seq, :table, :from, :to, :on_update, :on_delete, :match + ) + """, + params, + block=True, + ) + # And the indexes + await schema_db.execute_write( + "delete from indexes where database_name = ? and table_name = ?", + [database_name, table_name], + block=True, + ) + indexes = (await db.execute(f"PRAGMA index_list([{table_name}])")).rows + for index in indexes: + params = { + **{"database_name": database_name, "table_name": table_name}, + **dict(index), + } + await schema_db.execute_write( + """ + insert into indexes ( + database_name, table_name, seq, name, "unique", origin, partial + ) VALUES ( + :database_name, :table_name, :seq, :name, :unique, :origin, :partial + ) + """, + params, + block=True, + ) diff --git a/datasette/views/base.py b/datasette/views/base.py index 76e03206..73bf9459 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -115,6 +115,8 @@ class BaseView: return Response.text("Method not allowed", status=405) async def dispatch_request(self, request, *args, **kwargs): + if self.ds: + await self.ds.refresh_schemas() handler = getattr(self, request.method.lower(), None) return await handler(request, *args, **kwargs) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 93b444ab..61e7d4b5 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -293,7 +293,7 @@ def test_hook_extra_body_script(app_client, path, expected_extra_body_script): def test_hook_asgi_wrapper(app_client): response = app_client.get("/fixtures") - assert "fixtures" == response.headers["x-databases"] + assert "_schemas, fixtures" == response.headers["x-databases"] def test_hook_extra_template_vars(restore_working_directory): diff --git a/tests/test_schemas.py b/tests/test_schemas.py new file mode 100644 index 00000000..87656784 --- /dev/null +++ b/tests/test_schemas.py @@ -0,0 +1,68 @@ +from .fixtures import app_client +import pytest + + +def test_schemas_only_available_to_root(app_client): + cookie = app_client.actor_cookie({"id": "root"}) + assert app_client.get("/_schemas").status == 403 + assert app_client.get("/_schemas", cookies={"ds_actor": cookie}).status == 200 + + +def test_schemas_databases(app_client): + cookie = app_client.actor_cookie({"id": "root"}) + databases = app_client.get( + "/_schemas/databases.json?_shape=array", cookies={"ds_actor": cookie} + ).json + assert len(databases) == 2 + assert databases[0]["database_name"] == "_schemas" + assert databases[1]["database_name"] == "fixtures" + + +def test_schemas_tables(app_client): + cookie = app_client.actor_cookie({"id": "root"}) + tables = app_client.get( + "/_schemas/tables.json?_shape=array", cookies={"ds_actor": cookie} + ).json + assert len(tables) > 5 + table = tables[0] + assert set(table.keys()) == {"rootpage", "table_name", "database_name", "sql"} + + +def test_schemas_indexes(app_client): + cookie = app_client.actor_cookie({"id": "root"}) + indexes = app_client.get( + "/_schemas/indexes.json?_shape=array", cookies={"ds_actor": cookie} + ).json + assert len(indexes) > 5 + index = indexes[0] + assert set(index.keys()) == { + "partial", + "name", + "table_name", + "unique", + "seq", + "database_name", + "origin", + } + + +def test_schemas_foreign_keys(app_client): + cookie = app_client.actor_cookie({"id": "root"}) + foreign_keys = app_client.get( + "/_schemas/foreign_keys.json?_shape=array", cookies={"ds_actor": cookie} + ).json + assert len(foreign_keys) > 5 + foreign_key = foreign_keys[0] + assert set(foreign_key.keys()) == { + "table", + "seq", + "on_update", + "on_delete", + "to", + "rowid", + "id", + "match", + "database_name", + "table_name", + "from", + } From dcdfb2c301341d45b66683e3e3be72f9c7585b2f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 21 Dec 2020 11:48:06 -0800 Subject: [PATCH 0345/1705] Rename _schemas to _internal, closes #1156 --- datasette/app.py | 39 +++++++------------ datasette/cli.py | 4 +- datasette/default_permissions.py | 2 +- datasette/utils/__init__.py | 2 +- .../utils/{schemas.py => internal_db.py} | 20 +++++----- .../{test_schemas.py => test_internal_db.py} | 24 ++++++------ tests/test_plugins.py | 2 +- 7 files changed, 42 insertions(+), 51 deletions(-) rename datasette/utils/{schemas.py => internal_db.py} (91%) rename tests/{test_schemas.py => test_internal_db.py} (63%) diff --git a/datasette/app.py b/datasette/app.py index cc8506e2..f995e79d 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -74,7 +74,7 @@ from .utils.asgi import ( asgi_send_json, asgi_send_redirect, ) -from .utils.schemas import init_schemas, populate_schema_tables +from .utils.internal_db import init_internal_db, populate_schema_tables from .utils.sqlite import ( sqlite3, using_pysqlite3, @@ -85,8 +85,6 @@ from .version import __version__ app_root = Path(__file__).parent.parent -MEMORY = object() - Setting = collections.namedtuple("Setting", ("name", "default", "help")) SETTINGS = ( Setting("default_page_size", 100, "Default page size for the table view"), @@ -218,24 +216,17 @@ class Datasette: ] self.inspect_data = inspect_data self.immutables = set(immutables or []) - if not self.files: - self.files = [MEMORY] - elif memory: - self.files = (MEMORY,) + self.files self.databases = collections.OrderedDict() + if memory or not self.files: + self.add_database(":memory:", Database(self, ":memory:", is_memory=True)) # memory_name is a random string so that each Datasette instance gets its own # unique in-memory named database - otherwise unit tests can fail with weird # errors when different instances accidentally share an in-memory database - self.add_database("_schemas", Database(self, memory_name=secrets.token_hex())) - self._schemas_created = False + self.add_database("_internal", Database(self, memory_name=secrets.token_hex())) + self._interna_db_created = False for file in self.files: path = file - is_memory = False - if file is MEMORY: - path = None - is_memory = True - is_mutable = path not in self.immutables - db = Database(self, path, is_mutable=is_mutable, is_memory=is_memory) + db = Database(self, path, is_mutable=path not in self.immutables) if db.name in self.databases: raise Exception(f"Multiple files with same stem: {db.name}") self.add_database(db.name, db) @@ -333,14 +324,14 @@ class Datasette: self.client = DatasetteClient(self) async def refresh_schemas(self): - schema_db = self.databases["_schemas"] - if not self._schemas_created: - await init_schemas(schema_db) - self._schemas_created = True + internal_db = self.databases["_internal"] + if not self._interna_db_created: + await init_internal_db(internal_db) + self._interna_db_created = True current_schema_versions = { row["database_name"]: row["schema_version"] - for row in await schema_db.execute( + for row in await internal_db.execute( "select database_name, schema_version from databases" ) } @@ -349,7 +340,7 @@ class Datasette: # Compare schema versions to see if we should skip it if schema_version == current_schema_versions.get(database_name): continue - await schema_db.execute_write( + await internal_db.execute_write( """ INSERT OR REPLACE INTO databases (database_name, path, is_memory, schema_version) VALUES (?, ?, ?, ?) @@ -357,7 +348,7 @@ class Datasette: [database_name, db.path, db.is_memory, schema_version], block=True, ) - await populate_schema_tables(schema_db, db) + await populate_schema_tables(internal_db, db) @property def urls(self): @@ -376,7 +367,7 @@ class Datasette: def get_database(self, name=None): if name is None: # Return first no-_schemas database - name = [key for key in self.databases.keys() if key != "_schemas"][0] + name = [key for key in self.databases.keys() if key != "_internal"][0] return self.databases[name] def add_database(self, name, db): @@ -625,7 +616,7 @@ class Datasette: "hash": d.hash, } for name, d in sorted(self.databases.items(), key=lambda p: p[1].name) - if name != "_schemas" + if name != "_internal" ] def _versions(self): diff --git a/datasette/cli.py b/datasette/cli.py index 50367fb3..c342a35a 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -134,8 +134,8 @@ async def inspect_(files, sqlite_extensions): app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions) data = {} for name, database in app.databases.items(): - if name == "_schemas": - # Don't include the in-memory _schemas database + if name == "_internal": + # Don't include the in-memory _internal database continue counts = await database.table_counts(limit=3600 * 1000) data[name] = { diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 62cab83a..b58d8d1b 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -13,7 +13,7 @@ def permission_allowed(datasette, actor, action, resource): if allow is not None: return actor_matches_allow(actor, allow) elif action == "view-database": - if resource == "_schemas" and (actor is None or actor.get("id") != "root"): + if resource == "_internal" and (actor is None or actor.get("id") != "root"): return False database_allow = datasette.metadata("allow", database=resource) if database_allow is None: diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index ac1d82f7..34ee4630 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1024,7 +1024,7 @@ def find_spatialite(): async def initial_path_for_datasette(datasette): "Return suggested path for opening this Datasette, based on number of DBs and tables" - databases = dict([p for p in datasette.databases.items() if p[0] != "_schemas"]) + databases = dict([p for p in datasette.databases.items() if p[0] != "_internal"]) if len(databases) == 1: db_name = next(iter(databases.keys())) path = datasette.urls.database(db_name) diff --git a/datasette/utils/schemas.py b/datasette/utils/internal_db.py similarity index 91% rename from datasette/utils/schemas.py rename to datasette/utils/internal_db.py index 4612e236..a60fe1fe 100644 --- a/datasette/utils/schemas.py +++ b/datasette/utils/internal_db.py @@ -1,4 +1,4 @@ -async def init_schemas(db): +async def init_internal_db(db): await db.execute_write( """ CREATE TABLE databases ( @@ -73,15 +73,15 @@ async def init_schemas(db): ) -async def populate_schema_tables(schema_db, db): +async def populate_schema_tables(internal_db, db): database_name = db.name - await schema_db.execute_write( + await internal_db.execute_write( "delete from tables where database_name = ?", [database_name], block=True ) tables = (await db.execute("select * from sqlite_master where type = 'table'")).rows for table in tables: table_name = table["name"] - await schema_db.execute_write( + await internal_db.execute_write( """ insert into tables (database_name, table_name, rootpage, sql) values (?, ?, ?, ?) @@ -90,7 +90,7 @@ async def populate_schema_tables(schema_db, db): block=True, ) # And the columns - await schema_db.execute_write( + await internal_db.execute_write( "delete from columns where database_name = ? and table_name = ?", [database_name, table_name], block=True, @@ -101,7 +101,7 @@ async def populate_schema_tables(schema_db, db): **{"database_name": database_name, "table_name": table_name}, **column._asdict(), } - await schema_db.execute_write( + await internal_db.execute_write( """ insert into columns ( database_name, table_name, cid, name, type, "notnull", default_value, is_pk, hidden @@ -113,7 +113,7 @@ async def populate_schema_tables(schema_db, db): block=True, ) # And the foreign_keys - await schema_db.execute_write( + await internal_db.execute_write( "delete from foreign_keys where database_name = ? and table_name = ?", [database_name, table_name], block=True, @@ -126,7 +126,7 @@ async def populate_schema_tables(schema_db, db): **{"database_name": database_name, "table_name": table_name}, **dict(foreign_key), } - await schema_db.execute_write( + await internal_db.execute_write( """ insert into foreign_keys ( database_name, table_name, "id", seq, "table", "from", "to", on_update, on_delete, match @@ -138,7 +138,7 @@ async def populate_schema_tables(schema_db, db): block=True, ) # And the indexes - await schema_db.execute_write( + await internal_db.execute_write( "delete from indexes where database_name = ? and table_name = ?", [database_name, table_name], block=True, @@ -149,7 +149,7 @@ async def populate_schema_tables(schema_db, db): **{"database_name": database_name, "table_name": table_name}, **dict(index), } - await schema_db.execute_write( + await internal_db.execute_write( """ insert into indexes ( database_name, table_name, seq, name, "unique", origin, partial diff --git a/tests/test_schemas.py b/tests/test_internal_db.py similarity index 63% rename from tests/test_schemas.py rename to tests/test_internal_db.py index 87656784..9349fa3c 100644 --- a/tests/test_schemas.py +++ b/tests/test_internal_db.py @@ -2,36 +2,36 @@ from .fixtures import app_client import pytest -def test_schemas_only_available_to_root(app_client): +def test_internal_only_available_to_root(app_client): cookie = app_client.actor_cookie({"id": "root"}) - assert app_client.get("/_schemas").status == 403 - assert app_client.get("/_schemas", cookies={"ds_actor": cookie}).status == 200 + assert app_client.get("/_internal").status == 403 + assert app_client.get("/_internal", cookies={"ds_actor": cookie}).status == 200 -def test_schemas_databases(app_client): +def test_internal_databases(app_client): cookie = app_client.actor_cookie({"id": "root"}) databases = app_client.get( - "/_schemas/databases.json?_shape=array", cookies={"ds_actor": cookie} + "/_internal/databases.json?_shape=array", cookies={"ds_actor": cookie} ).json assert len(databases) == 2 - assert databases[0]["database_name"] == "_schemas" + assert databases[0]["database_name"] == "_internal" assert databases[1]["database_name"] == "fixtures" -def test_schemas_tables(app_client): +def test_internal_tables(app_client): cookie = app_client.actor_cookie({"id": "root"}) tables = app_client.get( - "/_schemas/tables.json?_shape=array", cookies={"ds_actor": cookie} + "/_internal/tables.json?_shape=array", cookies={"ds_actor": cookie} ).json assert len(tables) > 5 table = tables[0] assert set(table.keys()) == {"rootpage", "table_name", "database_name", "sql"} -def test_schemas_indexes(app_client): +def test_internal_indexes(app_client): cookie = app_client.actor_cookie({"id": "root"}) indexes = app_client.get( - "/_schemas/indexes.json?_shape=array", cookies={"ds_actor": cookie} + "/_internal/indexes.json?_shape=array", cookies={"ds_actor": cookie} ).json assert len(indexes) > 5 index = indexes[0] @@ -46,10 +46,10 @@ def test_schemas_indexes(app_client): } -def test_schemas_foreign_keys(app_client): +def test_internal_foreign_keys(app_client): cookie = app_client.actor_cookie({"id": "root"}) foreign_keys = app_client.get( - "/_schemas/foreign_keys.json?_shape=array", cookies={"ds_actor": cookie} + "/_internal/foreign_keys.json?_shape=array", cookies={"ds_actor": cookie} ).json assert len(foreign_keys) > 5 foreign_key = foreign_keys[0] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 61e7d4b5..8063460b 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -293,7 +293,7 @@ def test_hook_extra_body_script(app_client, path, expected_extra_body_script): def test_hook_asgi_wrapper(app_client): response = app_client.get("/fixtures") - assert "_schemas, fixtures" == response.headers["x-databases"] + assert "_internal, fixtures" == response.headers["x-databases"] def test_hook_extra_template_vars(restore_working_directory): From 810853c5f2fa560c6d303331c037f6443c145930 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 21 Dec 2020 13:49:14 -0800 Subject: [PATCH 0346/1705] Use time.perf_counter() instead of time.time(), closes #1157 --- datasette/tracer.py | 8 ++++---- datasette/utils/__init__.py | 4 ++-- datasette/views/base.py | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/datasette/tracer.py b/datasette/tracer.py index 8f666767..772f0405 100644 --- a/datasette/tracer.py +++ b/datasette/tracer.py @@ -37,9 +37,9 @@ def trace(type, **kwargs): if tracer is None: yield return - start = time.time() + start = time.perf_counter() yield - end = time.time() + end = time.perf_counter() trace_info = { "type": type, "start": start, @@ -74,7 +74,7 @@ class AsgiTracer: if b"_trace=1" not in scope.get("query_string", b"").split(b"&"): await self.app(scope, receive, send) return - trace_start = time.time() + trace_start = time.perf_counter() traces = [] accumulated_body = b"" @@ -109,7 +109,7 @@ class AsgiTracer: # We have all the body - modify it and send the result # TODO: What to do about Content-Type or other cases? trace_info = { - "request_duration_ms": 1000 * (time.time() - trace_start), + "request_duration_ms": 1000 * (time.perf_counter() - trace_start), "sum_trace_duration_ms": sum(t["duration_ms"] for t in traces), "num_traces": len(traces), "traces": traces, diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 34ee4630..0d45e11a 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -138,7 +138,7 @@ class CustomJSONEncoder(json.JSONEncoder): @contextmanager def sqlite_timelimit(conn, ms): - deadline = time.time() + (ms / 1000) + deadline = time.perf_counter() + (ms / 1000) # n is the number of SQLite virtual machine instructions that will be # executed between each check. It's hard to know what to pick here. # After some experimentation, I've decided to go with 1000 by default and @@ -148,7 +148,7 @@ def sqlite_timelimit(conn, ms): n = 1 def handler(): - if time.time() >= deadline: + if time.perf_counter() >= deadline: return 1 conn.set_progress_handler(handler, n) diff --git a/datasette/views/base.py b/datasette/views/base.py index 73bf9459..8a64f88e 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -425,7 +425,7 @@ class DataView(BaseView): kwargs["default_labels"] = True extra_template_data = {} - start = time.time() + start = time.perf_counter() status_code = 200 templates = [] try: @@ -457,7 +457,7 @@ class DataView(BaseView): except DatasetteError: raise - end = time.time() + end = time.perf_counter() data["query_ms"] = (end - start) * 1000 for key in ("source", "source_url", "license", "license_url"): value = self.ds.metadata(key) From bc1f1e1ce8562872b7532a167873193e787cef20 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 22 Dec 2020 11:04:29 -0800 Subject: [PATCH 0347/1705] Compound primary key for foreign_keys table in _internal --- datasette/utils/internal_db.py | 3 ++- tests/test_internal_db.py | 1 - 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/datasette/utils/internal_db.py b/datasette/utils/internal_db.py index a60fe1fe..959f422e 100644 --- a/datasette/utils/internal_db.py +++ b/datasette/utils/internal_db.py @@ -66,7 +66,8 @@ async def init_internal_db(db): "to" TEXT, "on_update" TEXT, "on_delete" TEXT, - "match" TEXT + "match" TEXT, + PRIMARY KEY (database_name, table_name, id, seq) ) """, block=True, diff --git a/tests/test_internal_db.py b/tests/test_internal_db.py index 9349fa3c..755ddae5 100644 --- a/tests/test_internal_db.py +++ b/tests/test_internal_db.py @@ -59,7 +59,6 @@ def test_internal_foreign_keys(app_client): "on_update", "on_delete", "to", - "rowid", "id", "match", "database_name", From 270de6527bc2afb8c5996c400099321c320ded31 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 22 Dec 2020 11:48:54 -0800 Subject: [PATCH 0348/1705] Foreign keys for _internal database Refs #1099 - Datasette now uses compound foreign keys internally, so it would be great to link them correctly. --- datasette/utils/internal_db.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/datasette/utils/internal_db.py b/datasette/utils/internal_db.py index 959f422e..5cd32381 100644 --- a/datasette/utils/internal_db.py +++ b/datasette/utils/internal_db.py @@ -17,7 +17,8 @@ async def init_internal_db(db): "table_name" TEXT, "rootpage" INTEGER, "sql" TEXT, - PRIMARY KEY (database_name, table_name) + PRIMARY KEY (database_name, table_name), + FOREIGN KEY (database_name) REFERENCES databases(database_name) ) """, block=True, @@ -34,7 +35,9 @@ async def init_internal_db(db): "default_value" TEXT, -- renamed from dflt_value "is_pk" INTEGER, -- renamed from pk "hidden" INTEGER, - PRIMARY KEY (database_name, table_name, name) + PRIMARY KEY (database_name, table_name, name), + FOREIGN KEY (database_name) REFERENCES databases(database_name), + FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name) ) """, block=True, @@ -49,7 +52,9 @@ async def init_internal_db(db): "unique" INTEGER, "origin" TEXT, "partial" INTEGER, - PRIMARY KEY (database_name, table_name, name) + PRIMARY KEY (database_name, table_name, name), + FOREIGN KEY (database_name) REFERENCES databases(database_name), + FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name) ) """, block=True, @@ -67,7 +72,9 @@ async def init_internal_db(db): "on_update" TEXT, "on_delete" TEXT, "match" TEXT, - PRIMARY KEY (database_name, table_name, id, seq) + PRIMARY KEY (database_name, table_name, id, seq), + FOREIGN KEY (database_name) REFERENCES databases(database_name), + FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name) ) """, block=True, From 8919f99c2f7f245aca7f94bd53d5ac9d04aa42b5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 22 Dec 2020 12:04:18 -0800 Subject: [PATCH 0349/1705] Improved .add_database() method design Closes #1155 - _internal now has a sensible name Closes #509 - Support opening multiple databases with the same stem --- datasette/app.py | 34 +++++++++++++++++--------- datasette/database.py | 42 +++++++++++++++++--------------- docs/internals.rst | 29 ++++++++++++++-------- tests/test_cli.py | 15 ++++++++++++ tests/test_internals_database.py | 12 ++++----- 5 files changed, 86 insertions(+), 46 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f995e79d..ad3ba07e 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -218,18 +218,18 @@ class Datasette: self.immutables = set(immutables or []) self.databases = collections.OrderedDict() if memory or not self.files: - self.add_database(":memory:", Database(self, ":memory:", is_memory=True)) + self.add_database(Database(self, is_memory=True), name=":memory:") # memory_name is a random string so that each Datasette instance gets its own # unique in-memory named database - otherwise unit tests can fail with weird # errors when different instances accidentally share an in-memory database - self.add_database("_internal", Database(self, memory_name=secrets.token_hex())) - self._interna_db_created = False + self.add_database( + Database(self, memory_name=secrets.token_hex()), name="_internal" + ) + self.internal_db_created = False for file in self.files: - path = file - db = Database(self, path, is_mutable=path not in self.immutables) - if db.name in self.databases: - raise Exception(f"Multiple files with same stem: {db.name}") - self.add_database(db.name, db) + self.add_database( + Database(self, file, is_mutable=file not in self.immutables) + ) self.cache_headers = cache_headers self.cors = cors metadata_files = [] @@ -325,9 +325,9 @@ class Datasette: async def refresh_schemas(self): internal_db = self.databases["_internal"] - if not self._interna_db_created: + if not self.internal_db_created: await init_internal_db(internal_db) - self._interna_db_created = True + self.internal_db_created = True current_schema_versions = { row["database_name"]: row["schema_version"] @@ -370,8 +370,20 @@ class Datasette: name = [key for key in self.databases.keys() if key != "_internal"][0] return self.databases[name] - def add_database(self, name, db): + def add_database(self, db, name=None): + if name is None: + # Pick a unique name for this database + suggestion = db.suggest_name() + name = suggestion + else: + suggestion = name + i = 2 + while name in self.databases: + name = "{}_{}".format(suggestion, i) + i += 1 + db.name = name self.databases[name] = db + return db def remove_database(self, name): self.databases.pop(name) diff --git a/datasette/database.py b/datasette/database.py index a977b362..cda36e6e 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -27,30 +27,44 @@ class Database: def __init__( self, ds, path=None, is_mutable=False, is_memory=False, memory_name=None ): + self.name = None self.ds = ds self.path = path self.is_mutable = is_mutable self.is_memory = is_memory self.memory_name = memory_name if memory_name is not None: - self.path = memory_name self.is_memory = True self.is_mutable = True self.hash = None self.cached_size = None - self.cached_table_counts = None + self._cached_table_counts = None self._write_thread = None self._write_queue = None if not self.is_mutable and not self.is_memory: p = Path(path) self.hash = inspect_hash(p) self.cached_size = p.stat().st_size - # Maybe use self.ds.inspect_data to populate cached_table_counts - if self.ds.inspect_data and self.ds.inspect_data.get(self.name): - self.cached_table_counts = { - key: value["count"] - for key, value in self.ds.inspect_data[self.name]["tables"].items() - } + + @property + def cached_table_counts(self): + if self._cached_table_counts is not None: + return self._cached_table_counts + # Maybe use self.ds.inspect_data to populate cached_table_counts + if self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self._cached_table_counts = { + key: value["count"] + for key, value in self.ds.inspect_data[self.name]["tables"].items() + } + return self._cached_table_counts + + def suggest_name(self): + if self.path: + return Path(self.path).stem + elif self.memory_name: + return self.memory_name + else: + return "db" def connect(self, write=False): if self.memory_name: @@ -220,7 +234,7 @@ class Database: except (QueryInterrupted, sqlite3.OperationalError, sqlite3.DatabaseError): counts[table] = None if not self.is_mutable: - self.cached_table_counts = counts + self._cached_table_counts = counts return counts @property @@ -229,16 +243,6 @@ class Database: return None return Path(self.path).stat().st_mtime_ns - @property - def name(self): - if self.is_memory: - if self.memory_name: - return ":memory:{}".format(self.memory_name) - else: - return ":memory:" - else: - return Path(self.path).stem - async def table_exists(self, table): results = await self.execute( "select 1 from sqlite_master where type='table' and name=?", params=(table,) diff --git a/docs/internals.rst b/docs/internals.rst index b68a1d8a..05cb8bd7 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -245,16 +245,16 @@ Returns the specified database object. Raises a ``KeyError`` if the database doe .. _datasette_add_database: -.add_database(name, db) ------------------------ - -``name`` - string - The unique name to use for this database. Also used in the URL. +.add_database(db, name=None) +---------------------------- ``db`` - datasette.database.Database instance The database to be attached. -The ``datasette.add_database(name, db)`` method lets you add a new database to the current Datasette instance. This database will then be served at URL path that matches the ``name`` parameter, e.g. ``/mynewdb/``. +``name`` - string, optional + The name to be used for this database - this will be used in the URL path, e.g. ``/dbname``. If not specified Datasette will pick one based on the filename or memory name. + +The ``datasette.add_database(db)`` method lets you add a new database to the current Datasette instance. The ``db`` parameter should be an instance of the ``datasette.database.Database`` class. For example: @@ -262,13 +262,13 @@ The ``db`` parameter should be an instance of the ``datasette.database.Database` from datasette.database import Database - datasette.add_database("my-new-database", Database( + datasette.add_database(Database( datasette, path="path/to/my-new-database.db", is_mutable=True )) -This will add a mutable database from the provided file path. +This will add a mutable database and serve it at ``/my-new-database``. To create a shared in-memory database named ``statistics``, use the following: @@ -276,11 +276,20 @@ To create a shared in-memory database named ``statistics``, use the following: from datasette.database import Database - datasette.add_database("statistics", Database( + datasette.add_database(Database( datasette, memory_name="statistics" )) +This database will be served at ``/statistics``. + +``.add_database()`` returns the Database instance, with its name set as the ``database.name`` attribute. Any time you are working with a newly added database you should use the return value of ``.add_database()``, for example: + +.. code-block:: python + + db = datasette.add_database(Database(datasette, memory_name="statistics")) + await db.execute_write("CREATE TABLE foo(id integer primary key)", block=True) + .. _datasette_remove_database: .remove_database(name) @@ -289,7 +298,7 @@ To create a shared in-memory database named ``statistics``, use the following: ``name`` - string The name of the database to be removed. -This removes a database that has been previously added. ``name=`` is the unique name of that database, also used in the URL for it. +This removes a database that has been previously added. ``name=`` is the unique name of that database, used in its URL path. .. _datasette_sign: diff --git a/tests/test_cli.py b/tests/test_cli.py index 3f6b1840..ff46d76f 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -8,6 +8,7 @@ import asyncio from datasette.plugins import DEFAULT_PLUGINS from datasette.cli import cli, serve from datasette.version import __version__ +from datasette.utils.sqlite import sqlite3 from click.testing import CliRunner import io import json @@ -240,3 +241,17 @@ def test_serve_create(ensure_eventloop, tmpdir): "hash": None, }.items() <= databases[0].items() assert db_path.exists() + + +def test_serve_duplicate_database_names(ensure_eventloop, tmpdir): + runner = CliRunner() + db_1_path = str(tmpdir / "db.db") + nested = tmpdir / "nested" + nested.mkdir() + db_2_path = str(tmpdir / "nested" / "db.db") + for path in (db_1_path, db_2_path): + sqlite3.connect(path).execute("vacuum") + result = runner.invoke(cli, [db_1_path, db_2_path, "--get", "/-/databases.json"]) + assert result.exit_code == 0, result.output + databases = json.loads(result.output) + assert {db["name"] for db in databases} == {"db", "db_2"} diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index dc1af48c..7eff9f7e 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -439,7 +439,7 @@ async def test_execute_write_fn_connection_exception(tmpdir, app_client): path = str(tmpdir / "immutable.db") sqlite3.connect(path).execute("vacuum") db = Database(app_client.ds, path=path, is_mutable=False) - app_client.ds.add_database("immutable-db", db) + app_client.ds.add_database(db, name="immutable-db") def write_fn(conn): assert False @@ -469,10 +469,10 @@ def test_is_mutable(app_client): @pytest.mark.asyncio async def test_database_memory_name(app_client): ds = app_client.ds - foo1 = Database(ds, memory_name="foo") - foo2 = Database(ds, memory_name="foo") - bar1 = Database(ds, memory_name="bar") - bar2 = Database(ds, memory_name="bar") + foo1 = ds.add_database(Database(ds, memory_name="foo")) + foo2 = ds.add_database(Database(ds, memory_name="foo")) + bar1 = ds.add_database(Database(ds, memory_name="bar")) + bar2 = ds.add_database(Database(ds, memory_name="bar")) for db in (foo1, foo2, bar1, bar2): table_names = await db.table_names() assert table_names == [] @@ -487,7 +487,7 @@ async def test_database_memory_name(app_client): @pytest.mark.asyncio async def test_in_memory_databases_forbid_writes(app_client): ds = app_client.ds - db = Database(ds, memory_name="test") + db = ds.add_database(Database(ds, memory_name="test")) with pytest.raises(sqlite3.OperationalError): await db.execute("create table foo (t text)") assert await db.table_names() == [] From 90eba4c3ca569c57e96bce314e7ac8caf67d884e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 22 Dec 2020 15:55:43 -0800 Subject: [PATCH 0350/1705] Prettier CREATE TABLE SQL for _internal --- datasette/utils/internal_db.py | 109 ++++++++++++++++++--------------- 1 file changed, 61 insertions(+), 48 deletions(-) diff --git a/datasette/utils/internal_db.py b/datasette/utils/internal_db.py index 5cd32381..e92625d5 100644 --- a/datasette/utils/internal_db.py +++ b/datasette/utils/internal_db.py @@ -1,82 +1,95 @@ +import textwrap + + async def init_internal_db(db): await db.execute_write( - """ + textwrap.dedent( + """ CREATE TABLE databases ( - "database_name" TEXT PRIMARY KEY, - "path" TEXT, - "is_memory" INTEGER, - "schema_version" INTEGER + database_name TEXT PRIMARY KEY, + path TEXT, + is_memory INTEGER, + schema_version INTEGER ) - """, + """ + ), block=True, ) await db.execute_write( - """ + textwrap.dedent( + """ CREATE TABLE tables ( - "database_name" TEXT, - "table_name" TEXT, - "rootpage" INTEGER, - "sql" TEXT, + database_name TEXT, + table_name TEXT, + rootpage INTEGER, + sql TEXT, PRIMARY KEY (database_name, table_name), FOREIGN KEY (database_name) REFERENCES databases(database_name) ) - """, + """ + ), block=True, ) await db.execute_write( - """ + textwrap.dedent( + """ CREATE TABLE columns ( - "database_name" TEXT, - "table_name" TEXT, - "cid" INTEGER, - "name" TEXT, - "type" TEXT, + database_name TEXT, + table_name TEXT, + cid INTEGER, + name TEXT, + type TEXT, "notnull" INTEGER, - "default_value" TEXT, -- renamed from dflt_value - "is_pk" INTEGER, -- renamed from pk - "hidden" INTEGER, + default_value TEXT, -- renamed from dflt_value + is_pk INTEGER, -- renamed from pk + hidden INTEGER, PRIMARY KEY (database_name, table_name, name), FOREIGN KEY (database_name) REFERENCES databases(database_name), FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name) ) - """, + """ + ), block=True, ) await db.execute_write( - """ + textwrap.dedent( + """ CREATE TABLE indexes ( - "database_name" TEXT, - "table_name" TEXT, - "seq" INTEGER, - "name" TEXT, + database_name TEXT, + table_name TEXT, + seq INTEGER, + name TEXT, "unique" INTEGER, - "origin" TEXT, - "partial" INTEGER, + origin TEXT, + partial INTEGER, PRIMARY KEY (database_name, table_name, name), FOREIGN KEY (database_name) REFERENCES databases(database_name), FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name) ) - """, + """ + ), block=True, ) await db.execute_write( - """ + textwrap.dedent( + """ CREATE TABLE foreign_keys ( - "database_name" TEXT, - "table_name" TEXT, - "id" INTEGER, - "seq" INTEGER, + database_name TEXT, + table_name TEXT, + id INTEGER, + seq INTEGER, "table" TEXT, "from" TEXT, "to" TEXT, - "on_update" TEXT, - "on_delete" TEXT, - "match" TEXT, + on_update TEXT, + on_delete TEXT, + match TEXT, PRIMARY KEY (database_name, table_name, id, seq), FOREIGN KEY (database_name) REFERENCES databases(database_name), FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name) ) - """, + """ + ), block=True, ) @@ -84,14 +97,14 @@ async def init_internal_db(db): async def populate_schema_tables(internal_db, db): database_name = db.name await internal_db.execute_write( - "delete from tables where database_name = ?", [database_name], block=True + "DELETE FROM tables WHERE database_name = ?", [database_name], block=True ) - tables = (await db.execute("select * from sqlite_master where type = 'table'")).rows + tables = (await db.execute("select * from sqlite_master WHERE type = 'table'")).rows for table in tables: table_name = table["name"] await internal_db.execute_write( """ - insert into tables (database_name, table_name, rootpage, sql) + INSERT INTO tables (database_name, table_name, rootpage, sql) values (?, ?, ?, ?) """, [database_name, table_name, table["rootpage"], table["sql"]], @@ -99,7 +112,7 @@ async def populate_schema_tables(internal_db, db): ) # And the columns await internal_db.execute_write( - "delete from columns where database_name = ? and table_name = ?", + "DELETE FROM columns WHERE database_name = ? and table_name = ?", [database_name, table_name], block=True, ) @@ -111,7 +124,7 @@ async def populate_schema_tables(internal_db, db): } await internal_db.execute_write( """ - insert into columns ( + INSERT INTO columns ( database_name, table_name, cid, name, type, "notnull", default_value, is_pk, hidden ) VALUES ( :database_name, :table_name, :cid, :name, :type, :notnull, :default_value, :is_pk, :hidden @@ -122,7 +135,7 @@ async def populate_schema_tables(internal_db, db): ) # And the foreign_keys await internal_db.execute_write( - "delete from foreign_keys where database_name = ? and table_name = ?", + "DELETE FROM foreign_keys WHERE database_name = ? and table_name = ?", [database_name, table_name], block=True, ) @@ -136,7 +149,7 @@ async def populate_schema_tables(internal_db, db): } await internal_db.execute_write( """ - insert into foreign_keys ( + INSERT INTO foreign_keys ( database_name, table_name, "id", seq, "table", "from", "to", on_update, on_delete, match ) VALUES ( :database_name, :table_name, :id, :seq, :table, :from, :to, :on_update, :on_delete, :match @@ -147,7 +160,7 @@ async def populate_schema_tables(internal_db, db): ) # And the indexes await internal_db.execute_write( - "delete from indexes where database_name = ? and table_name = ?", + "DELETE FROM indexes WHERE database_name = ? and table_name = ?", [database_name, table_name], block=True, ) @@ -159,7 +172,7 @@ async def populate_schema_tables(internal_db, db): } await internal_db.execute_write( """ - insert into indexes ( + INSERT INTO indexes ( database_name, table_name, seq, name, "unique", origin, partial ) VALUES ( :database_name, :table_name, :seq, :name, :unique, :origin, :partial From a882d679626438ba0d809944f06f239bcba8ee96 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miroslav=20=C5=A0ediv=C3=BD?= <6774676+eumiro@users.noreply.github.com> Date: Wed, 23 Dec 2020 18:04:32 +0100 Subject: [PATCH 0351/1705] Modernize code to Python 3.6+ (#1158) * Compact dict and set building * Remove redundant parentheses * Simplify chained conditions * Change method name to lowercase * Use triple double quotes for docstrings Thanks, @eumiro! --- datasette/app.py | 16 +++++++------- datasette/cli.py | 10 ++++----- datasette/facets.py | 4 +--- datasette/filters.py | 6 +++--- datasette/hookspecs.py | 42 ++++++++++++++++++------------------- datasette/inspect.py | 6 +++--- datasette/renderer.py | 2 +- datasette/utils/__init__.py | 20 +++++++++--------- datasette/utils/asgi.py | 18 +++++++--------- datasette/views/base.py | 6 +++--- datasette/views/table.py | 4 ++-- tests/fixtures.py | 2 +- tests/plugins/my_plugin.py | 2 +- tests/test_api.py | 4 ++-- tests/test_auth.py | 4 ++-- tests/test_cli.py | 2 +- tests/test_docs.py | 6 +++--- tests/test_permissions.py | 2 +- tests/test_plugins.py | 2 +- 19 files changed, 76 insertions(+), 82 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index ad3ba07e..bd62fd3b 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -429,7 +429,7 @@ class Datasette: return m def plugin_config(self, plugin_name, database=None, table=None, fallback=True): - "Return config for plugin, falling back from specified database/table" + """Return config for plugin, falling back from specified database/table""" plugins = self.metadata( "plugins", database=database, table=table, fallback=fallback ) @@ -523,7 +523,7 @@ class Datasette: return [] async def permission_allowed(self, actor, action, resource=None, default=False): - "Check permissions using the permissions_allowed plugin hook" + """Check permissions using the permissions_allowed plugin hook""" result = None for check in pm.hook.permission_allowed( datasette=self, @@ -570,7 +570,7 @@ class Datasette: ) async def expand_foreign_keys(self, database, table, column, values): - "Returns dict mapping (column, value) -> label" + """Returns dict mapping (column, value) -> label""" labeled_fks = {} db = self.databases[database] foreign_keys = await db.foreign_keys_for_table(table) @@ -613,7 +613,7 @@ class Datasette: return url def _register_custom_units(self): - "Register any custom units defined in the metadata.json with Pint" + """Register any custom units defined in the metadata.json with Pint""" for unit in self.metadata("custom_units") or []: ureg.define(unit) @@ -730,7 +730,7 @@ class Datasette: return {"actor": request.actor} def table_metadata(self, database, table): - "Fetch table-specific metadata." + """Fetch table-specific metadata.""" return ( (self.metadata("databases") or {}) .get(database, {}) @@ -739,7 +739,7 @@ class Datasette: ) def _register_renderers(self): - """ Register output renderers which output data in custom formats. """ + """Register output renderers which output data in custom formats.""" # Built-in renderers self.renderers["json"] = (json_renderer, lambda: True) @@ -880,7 +880,7 @@ class Datasette: return output def app(self): - "Returns an ASGI app function that serves the whole of Datasette" + """Returns an ASGI app function that serves the whole of Datasette""" routes = [] for routes_to_add in pm.hook.register_routes(): @@ -1287,7 +1287,7 @@ def permanent_redirect(path): ) -_curly_re = re.compile(r"(\{.*?\})") +_curly_re = re.compile(r"({.*?})") def route_pattern_from_filepath(filepath): diff --git a/datasette/cli.py b/datasette/cli.py index c342a35a..2a84bf30 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -152,7 +152,7 @@ async def inspect_(files, sqlite_extensions): @cli.group() def publish(): - "Publish specified SQLite database files to the internet along with a Datasette-powered interface and API" + """Publish specified SQLite database files to the internet along with a Datasette-powered interface and API""" pass @@ -168,7 +168,7 @@ pm.hook.publish_subcommand(publish=publish) help="Path to directory containing custom plugins", ) def plugins(all, plugins_dir): - "List currently available plugins" + """List currently available plugins""" app = Datasette([], plugins_dir=plugins_dir) click.echo(json.dumps(app._plugins(all=all), indent=4)) @@ -244,7 +244,7 @@ def package( port, **extra_metadata, ): - "Package specified SQLite files into a new datasette Docker container" + """Package specified SQLite files into a new datasette Docker container""" if not shutil.which("docker"): click.secho( ' The package command requires "docker" to be installed and configured ', @@ -284,7 +284,7 @@ def package( "-U", "--upgrade", is_flag=True, help="Upgrade packages to latest version" ) def install(packages, upgrade): - "Install Python packages - e.g. Datasette plugins - into the same environment as Datasette" + """Install Python packages - e.g. Datasette plugins - into the same environment as Datasette""" args = ["pip", "install"] if upgrade: args += ["--upgrade"] @@ -297,7 +297,7 @@ def install(packages, upgrade): @click.argument("packages", nargs=-1, required=True) @click.option("-y", "--yes", is_flag=True, help="Don't ask for confirmation") def uninstall(packages, yes): - "Uninstall Python packages (e.g. plugins) from the Datasette environment" + """Uninstall Python packages (e.g. plugins) from the Datasette environment""" sys.argv = ["pip", "uninstall"] + list(packages) + (["-y"] if yes else []) run_module("pip", run_name="__main__") diff --git a/datasette/facets.py b/datasette/facets.py index 8ad5a423..207d819d 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -162,10 +162,8 @@ class ColumnFacet(Facet): ) num_distinct_values = len(distinct_values) if ( - num_distinct_values - and num_distinct_values > 1 + 1 < num_distinct_values < row_count and num_distinct_values <= facet_size - and num_distinct_values < row_count # And at least one has n > 1 and any(r["n"] > 1 for r in distinct_values) ): diff --git a/datasette/filters.py b/datasette/filters.py index edf2de99..152a26b4 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -208,7 +208,7 @@ class Filters: self.ureg = ureg def lookups(self): - "Yields (lookup, display, no_argument) pairs" + """Yields (lookup, display, no_argument) pairs""" for filter in self._filters: yield filter.key, filter.display, filter.no_argument @@ -233,7 +233,7 @@ class Filters: return f"where {s}" def selections(self): - "Yields (column, lookup, value) tuples" + """Yields (column, lookup, value) tuples""" for key, value in self.pairs: if "__" in key: column, lookup = key.rsplit("__", 1) @@ -246,7 +246,7 @@ class Filters: return bool(self.pairs) def convert_unit(self, column, value): - "If the user has provided a unit in the query, convert it into the column unit, if present." + """If the user has provided a unit in the query, convert it into the column unit, if present.""" if column not in self.units: return value diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index a305ca6a..13a10680 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -7,108 +7,108 @@ hookimpl = HookimplMarker("datasette") @hookspec def startup(datasette): - "Fires directly after Datasette first starts running" + """Fires directly after Datasette first starts running""" @hookspec def asgi_wrapper(datasette): - "Returns an ASGI middleware callable to wrap our ASGI application with" + """Returns an ASGI middleware callable to wrap our ASGI application with""" @hookspec def prepare_connection(conn, database, datasette): - "Modify SQLite connection in some way e.g. register custom SQL functions" + """Modify SQLite connection in some way e.g. register custom SQL functions""" @hookspec def prepare_jinja2_environment(env): - "Modify Jinja2 template environment e.g. register custom template tags" + """Modify Jinja2 template environment e.g. register custom template tags""" @hookspec def extra_css_urls(template, database, table, columns, view_name, request, datasette): - "Extra CSS URLs added by this plugin" + """Extra CSS URLs added by this plugin""" @hookspec def extra_js_urls(template, database, table, columns, view_name, request, datasette): - "Extra JavaScript URLs added by this plugin" + """Extra JavaScript URLs added by this plugin""" @hookspec def extra_body_script( template, database, table, columns, view_name, request, datasette ): - "Extra JavaScript code to be included in + {% endfor %} {% block extra_head %}{% endblock %} diff --git a/docs/custom_templates.rst b/docs/custom_templates.rst index d37bb729..a7236873 100644 --- a/docs/custom_templates.rst +++ b/docs/custom_templates.rst @@ -5,6 +5,8 @@ Custom pages and templates Datasette provides a number of ways of customizing the way data is displayed. +.. _customization_css_and_javascript: + Custom CSS and JavaScript ------------------------- @@ -25,7 +27,12 @@ Your ``metadata.json`` file can include links that look like this: ] } -The extra CSS and JavaScript files will be linked in the ```` of every page. +The extra CSS and JavaScript files will be linked in the ```` of every page: + +.. code-block:: html + + + You can also specify a SRI (subresource integrity hash) for these assets: @@ -46,9 +53,39 @@ You can also specify a SRI (subresource integrity hash) for these assets: ] } +This will produce: + +.. code-block:: html + + + + Modern browsers will only execute the stylesheet or JavaScript if the SRI hash matches the content served. You can generate hashes using `www.srihash.org `_ +Items in ``"extra_js_urls"`` can specify ``"module": true`` if they reference JavaScript that uses `JavaScript modules `__. This configuration: + +.. code-block:: json + + { + "extra_js_urls": [ + { + "url": "https://example.datasette.io/module.js", + "module": true + } + ] + } + +Will produce this HTML: + +.. code-block:: html + + + CSS classes on the ~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 72b09367..d465307b 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -182,7 +182,7 @@ This can be a list of URLs: @hookimpl def extra_css_urls(): return [ - 'https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css' + "https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css" ] Or a list of dictionaries defining both a URL and an @@ -190,21 +190,17 @@ Or a list of dictionaries defining both a URL and an .. code-block:: python - from datasette import hookimpl - @hookimpl def extra_css_urls(): return [{ - 'url': 'https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css', - 'sri': 'sha384-9gVQ4dYFwwWSjIDZnLEWnxCjeSWFphJiwGPXr1jddIhOegiu1FwO5qRGvFXOdJZ4', + "url": "https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css", + "sri": "sha384-9gVQ4dYFwwWSjIDZnLEWnxCjeSWFphJiwGPXr1jddIhOegiu1FwO5qRGvFXOdJZ4", }] This function can also return an awaitable function, useful if it needs to run any async code: .. code-block:: python - from datasette import hookimpl - @hookimpl def extra_css_urls(datasette): async def inner(): @@ -233,8 +229,8 @@ return a list of URLs, a list of dictionaries or an awaitable function that retu @hookimpl def extra_js_urls(): return [{ - 'url': 'https://code.jquery.com/jquery-3.3.1.slim.min.js', - 'sri': 'sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo', + "url": "https://code.jquery.com/jquery-3.3.1.slim.min.js", + "sri": "sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo", }] You can also return URLs to files from your plugin's ``static/`` directory, if @@ -242,12 +238,21 @@ you have one: .. code-block:: python - from datasette import hookimpl - @hookimpl def extra_js_urls(): return [ - '/-/static-plugins/your-plugin/app.js' + "/-/static-plugins/your-plugin/app.js" + ] + +If your code uses `JavaScript modules `__ you should include the ``"module": True`` key. See :ref:`customization_css_and_javascript` for more details. + +.. code-block:: python + + @hookimpl + def extra_js_urls(): + return [{ + "url": "/-/static-plugins/your-plugin/app.js", + "module": True ] Examples: `datasette-cluster-map `_, `datasette-vega `_ diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 2e653e2b..1c86b4bc 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -61,6 +61,7 @@ def extra_js_urls(): "sri": "SRIHASH", }, "https://plugin-example.datasette.io/plugin1.js", + {"url": "https://plugin-example.datasette.io/plugin.module.js", "module": True}, ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 212de2b5..648e7abd 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -118,16 +118,19 @@ def test_hook_extra_css_urls(app_client, path, expected_decoded_object): def test_hook_extra_js_urls(app_client): response = app_client.get("/") scripts = Soup(response.body, "html.parser").findAll("script") - assert [ - s - for s in scripts - if s.attrs - == { + script_attrs = [s.attrs for s in scripts] + for attrs in [ + { "integrity": "SRIHASH", "crossorigin": "anonymous", "src": "https://plugin-example.datasette.io/jquery.js", - } - ] + }, + { + "src": "https://plugin-example.datasette.io/plugin.module.js", + "type": "module", + }, + ]: + assert any(s == attrs for s in script_attrs), "Expected: {}".format(attrs) def test_plugins_with_duplicate_js_urls(app_client): From c38c42948cbfddd587729413fd6082ba352eaece Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 13 Jan 2021 18:14:33 -0800 Subject: [PATCH 0373/1705] extra_body_script module support, closes #1187 --- datasette/app.py | 8 +++++++- datasette/templates/base.html | 2 +- docs/plugin_hooks.rst | 25 ++++++++++++++++++++----- tests/plugins/my_plugin.py | 3 ++- tests/test_plugins.py | 2 +- 5 files changed, 31 insertions(+), 9 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f8549fac..cfce8e0b 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -781,7 +781,13 @@ class Datasette: datasette=self, ): extra_script = await await_me_maybe(extra_script) - body_scripts.append(Markup(extra_script)) + if isinstance(extra_script, dict): + script = extra_script["script"] + module = bool(extra_script.get("module")) + else: + script = extra_script + module = False + body_scripts.append({"script": Markup(script), "module": module}) extra_template_vars = {} # pylint: disable=no-member diff --git a/datasette/templates/base.html b/datasette/templates/base.html index 3f3d4507..e61edc4f 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -62,7 +62,7 @@ {% include "_close_open_menus.html" %} {% for body_script in body_scripts %} - + {{ body_script.script }} {% endfor %} {% if select_templates %}{% endif %} diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index d465307b..0206daaa 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -168,7 +168,7 @@ Examples: `datasette-search-all extra_css_urls(template, database, table, columns, view_name, request, datasette) --------------------------------------------------------------------------------- -Same arguments as :ref:`extra_template_vars(...) ` +This takes the same arguments as :ref:`extra_template_vars(...) ` Return a list of extra CSS URLs that should be included on the page. These can take advantage of the CSS class hooks described in :ref:`customization`. @@ -217,7 +217,7 @@ Examples: `datasette-cluster-map ` +This takes the same arguments as :ref:`extra_template_vars(...) ` This works in the same way as ``extra_css_urls()`` but for JavaScript. You can return a list of URLs, a list of dictionaries or an awaitable function that returns those things: @@ -264,15 +264,30 @@ extra_body_script(template, database, table, columns, view_name, request, datase Extra JavaScript to be added to a ```` element: + +.. code-block:: python + + @hookimpl + def extra_body_script(): + return { + "module": True, + "script": "console.log('Your JavaScript goes here...')" + } + +This will add the following to the end of your page: + +.. code-block:: html + + Example: `datasette-cluster-map `_ diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 1c86b4bc..8d192d28 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -70,7 +70,7 @@ def extra_body_script( template, database, table, view_name, columns, request, datasette ): async def inner(): - return "var extra_body_script = {};".format( + script = "var extra_body_script = {};".format( json.dumps( { "template": template, @@ -90,6 +90,7 @@ def extra_body_script( } ) ) + return {"script": script, "module": True} return inner diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 648e7abd..715c7c17 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -288,7 +288,7 @@ def test_plugin_config_file(app_client): ], ) def test_hook_extra_body_script(app_client, path, expected_extra_body_script): - r = re.compile(r"") + r = re.compile(r"") json_data = r.search(app_client.get(path).text).group(1) actual_data = json.loads(json_data) assert expected_extra_body_script == actual_data From 7e3cfd9cf7aeddf153d907bc3ee08ae0cd489370 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 19 Jan 2021 12:27:45 -0800 Subject: [PATCH 0374/1705] Clarify the name of plugin used in /-/static-plugins/ --- docs/plugin_hooks.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 0206daaa..23e57278 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -244,6 +244,8 @@ you have one: "/-/static-plugins/your-plugin/app.js" ] +Note that `your-plugin` here should be the hyphenated plugin name - the name that is displayed in the list on the `/-/plugins` debug page. + If your code uses `JavaScript modules `__ you should include the ``"module": True`` key. See :ref:`customization_css_and_javascript` for more details. .. code-block:: python From 57f4d7b82f9c74298c67c5640207241925b70c02 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 19 Jan 2021 12:47:30 -0800 Subject: [PATCH 0375/1705] Release 0.54a0 Refs #1091, #1145, #1151, #1156, #1157, #1158, #1166, #1170, #1178, #1182, #1184, #1185, #1186, #1187 --- datasette/version.py | 2 +- docs/changelog.rst | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index a5edecfa..b19423a9 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.53" +__version__ = "0.54a0" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 40b9c5a3..ac2ac8c9 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,17 @@ Changelog ========= +.. _v0_54_a0: + +0.54a0 (2020-12-19) +------------------- + +**Alpha release**. Release notes in progress. + +- Improved support for named in-memory databases. (`#1151 `__) +- New ``_internal`` in-memory database tracking attached databases, tables and columns. (`#1150 `__) +- Support for JavaScript modules. (`#1186 `__, `#1187 `__) + .. _v0_53: 0.53 (2020-12-10) From 5378f023529107ff7edbd6ee4ecab6ac170a83db Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 19 Jan 2021 12:50:12 -0800 Subject: [PATCH 0376/1705] Better tool for extracting issue numbers --- docs/contributing.rst | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 24d5c8f0..3a4b2caa 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -169,17 +169,7 @@ To release a new version, first create a commit that updates the version number Referencing the issues that are part of the release in the commit message ensures the name of the release shows up on those issue pages, e.g. `here `__. -You can generate the list of issue references for a specific release by pasting the following into the browser devtools while looking at the :ref:`changelog` page (replace ``v0-44`` with the most recent version): - -.. code-block:: javascript - - [ - ...new Set( - Array.from( - document.getElementById("v0-44").querySelectorAll("a[href*=issues]") - ).map((a) => "#" + a.href.split("/issues/")[1]) - ), - ].sort().join(", "); +You can generate the list of issue references for a specific release by copying and pasting text from the release notes or GitHub changes-since-last-release view into this `Extract issue numbers from pasted text `__ tool. To create the tag for the release, create `a new release `__ on GitHub matching the new version number. You can convert the release notes to Markdown by copying and pasting the rendered HTML into this `Paste to Markdown tool `__. From 25c2933667680db045851b2cedcf4666d737d352 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 22 Jan 2021 16:46:16 -0800 Subject: [PATCH 0377/1705] publish heroku now uses python-3.8.7 --- datasette/publish/heroku.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index c772b476..c0c70e12 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -173,7 +173,7 @@ def temporary_heroku_directory( if metadata_content: open("metadata.json", "w").write(json.dumps(metadata_content, indent=2)) - open("runtime.txt", "w").write("python-3.8.6") + open("runtime.txt", "w").write("python-3.8.7") if branch: install = [ From f78e956eca1f363e3a3f93c69fd9fc31bed14629 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 24 Jan 2021 12:38:29 -0800 Subject: [PATCH 0378/1705] Plugin testing documentation on using pytest-httpx Closes #1198 --- docs/testing_plugins.rst | 71 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 71 insertions(+) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index bacfd57b..4261f639 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -118,3 +118,74 @@ If you want to create that test database repeatedly for every individual test fu @pytest.fixture def datasette(tmp_path_factory): # This fixture will be executed repeatedly for every test + +.. _testing_plugins_pytest_httpx: + +Testing outbound HTTP calls with pytest-httpx +--------------------------------------------- + +If your plugin makes outbound HTTP calls - for example datasette-auth-github or datasette-import-table - you may need to mock those HTTP requests in your tests. + +The `pytest-httpx `__ package is a useful library for mocking calls. It can be tricky to use with Datasette though since it mocks all HTTPX requests, and Datasette's own testing mechanism uses HTTPX internally. + +To avoid breaking your tests, you can return ``["localhost"]`` from the ``non_mocked_hosts()`` fixture. + +As an example, here's a very simple plugin which executes an HTTP response and returns the resulting content: + +.. code-block:: python + + from datasette import hookimpl + from datasette.utils.asgi import Response + import httpx + + + @hookimpl + def register_routes(): + return [ + (r"^/-/fetch-url$", fetch_url), + ] + + + async def fetch_url(datasette, request): + if request.method == "GET": + return Response.html( + """ +
    + + + """.format( + request.scope["csrftoken"]() + ) + ) + vars = await request.post_vars() + url = vars["url"] + return Response.text(httpx.get(url).text) + +Here's a test for that plugin that mocks the HTTPX outbound request: + +.. code-block:: python + + from datasette.app import Datasette + import pytest + + + @pytest.fixture + def non_mocked_hosts(): + # This ensures httpx-mock will not affect Datasette's own + # httpx calls made in the tests by datasette.client: + return ["localhost"] + + + async def test_outbound_http_call(httpx_mock): + httpx_mock.add_response( + url='https://www.example.com/', + data='Hello world', + ) + datasette = Datasette([], memory=True) + response = await datasette.client.post("/-/fetch-url", data={ + "url": "https://www.example.com/" + }) + asert response.text == "Hello world" + + outbound_request = httpx_mock.get_request() + assert outbound_request.url == "https://www.example.com/" From b6a7b58fa01af0cd5a5e94bd17d686d283a46819 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 24 Jan 2021 16:08:29 -0800 Subject: [PATCH 0379/1705] Initial docs for _internal database, closes #1154 --- docs/internals.rst | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/internals.rst b/docs/internals.rst index f7b0cc0b..4a2c0a8e 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -747,3 +747,19 @@ If your plugin implements a ``
    `` anywhere you will need to i .. code-block:: html + +.. _internals_internal: + +The _internal database +====================== + +.. warning:: + This API should be considered unstable - the structure of these tables may change prior to the release of Datasette 1.0. + +Datasette maintains an in-memory SQLite database with details of the the databases, tables and columns for all of the attached databases. + +By default all actors are denied access to the ``view-database`` permission for the ``_internal`` database, so the database is not visible to anyone unless they :ref:`sign in as root `. + +Plugins can access this database by calling ``db = datasette.get_database("_internal")`` and then executing queries using the :ref:`Database API `. + +You can explore an example of this database by `signing in as root `__ to the ``latest.datasette.io`` demo instance and then navigating to `latest.datasette.io/_internal `__. \ No newline at end of file From ffff3a4c5398a9f40b61d59736f386444da19289 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 24 Jan 2021 17:41:46 -0800 Subject: [PATCH 0380/1705] Easier way to run Prettier locally (#1203) Thanks, Ben Pickles - refs #1167 --- .github/workflows/prettier.yml | 2 +- package.json | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/prettier.yml b/.github/workflows/prettier.yml index d846cca7..9dfe7ee0 100644 --- a/.github/workflows/prettier.yml +++ b/.github/workflows/prettier.yml @@ -19,4 +19,4 @@ jobs: run: npm ci - name: Run prettier run: |- - npx --no-install prettier --check 'datasette/static/*[!.min].js' + npm run prettier -- --check diff --git a/package.json b/package.json index 67452d2f..5c6dfe61 100644 --- a/package.json +++ b/package.json @@ -3,5 +3,9 @@ "private": true, "devDependencies": { "prettier": "^2.2.1" + }, + "scripts": { + "fix": "npm run prettier -- --write", + "prettier": "prettier 'datasette/static/*[!.min].js'" } } From f3a155531807c586e62b8ff0e97b96a76e949c8d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 24 Jan 2021 17:58:15 -0800 Subject: [PATCH 0381/1705] Contributing docs for Black and Prettier, closes #1167 Refs #1203 --- docs/contributing.rst | 52 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index 3a4b2caa..2cf641fd 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -97,6 +97,58 @@ You can tell Datasette to open an interactive ``pdb`` debugger session if an err datasette --pdb fixtures.db +.. _contributing_formatting: + +Code formatting +--------------- + +Datasette uses opinionated code formatters: `Black `__ for Python and `Prettier `__ for JavaScript. + +These formatters are enforced by Datasette's continuous integration: if a commit includes Python or JavaScript code that does not match the style enforced by those tools, the tests will fail. + +When developing locally, you can verify and correct the formatting of your code using these tools. + +.. _contributing_formatting_black: + +Running Black +~~~~~~~~~~~~~ + +Black will be installed when you run ``pip install -e '.[test]'``. To test that your code complies with Black, run the following in your root ``datasette`` repository checkout:: + + $ black . --check + All done! ✨ 🍰 ✨ + 95 files would be left unchanged. + +If any of your code does not conform to Black you can run this to automatically fix those problems:: + + $ black . + reformatted ../datasette/setup.py + All done! ✨ 🍰 ✨ + 1 file reformatted, 94 files left unchanged. + +.. _contributing_formatting_prettier: + +Prettier +~~~~~~~~ + +To install Prettier, `install Node.js `__ and then run the following in the root of your ``datasette`` repository checkout:: + + $ npm install + +This will install Prettier in a ``node_modules`` directory. You can then check that your code matches the coding style like so:: + + $ npm run prettier -- --check + > prettier + > prettier 'datasette/static/*[!.min].js' "--check" + + Checking formatting... + [warn] datasette/static/plugins.js + [warn] Code style issues found in the above file(s). Forgot to run Prettier? + +You can fix any problems by running:: + + $ npm run fix + .. _contributing_documentation: Editing and building the documentation From 07e163561592c743e4117f72102fcd350a600909 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 24 Jan 2021 19:10:10 -0800 Subject: [PATCH 0382/1705] All ?_ parameters now copied to hidden form fields, closes #1194 --- datasette/views/table.py | 17 +++++------------ tests/test_html.py | 22 ++++++++++++++++++++++ 2 files changed, 27 insertions(+), 12 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index cc8ef9f1..0a3504b3 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -812,19 +812,12 @@ class TableView(RowTableShared): .get(table, {}) ) self.ds.update_with_inherited_metadata(metadata) + form_hidden_args = [] - # Add currently selected facets - for arg in special_args: - if arg == "_facet" or arg.startswith("_facet_"): - form_hidden_args.extend( - (arg, item) for item in request.args.getlist(arg) - ) - for arg in ("_fts_table", "_fts_pk"): - if arg in special_args: - form_hidden_args.append((arg, special_args[arg])) - if request.args.get("_where"): - for where_text in request.args.getlist("_where"): - form_hidden_args.append(("_where", where_text)) + for key in request.args: + if key.startswith("_"): + for value in request.args.getlist(key): + form_hidden_args.append((key, value)) # if no sort specified AND table has a single primary key, # set sort to that so arrow is displayed diff --git a/tests/test_html.py b/tests/test_html.py index c7dd9d97..08d17ca7 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1250,6 +1250,28 @@ def test_extra_where_clauses(app_client): ] +@pytest.mark.parametrize( + "path,expected_hidden", + [ + ("/fixtures/facetable?_size=10", [("_size", "10")]), + ( + "/fixtures/facetable?_size=10&_ignore=1&_ignore=2", + [ + ("_size", "10"), + ("_ignore", "1"), + ("_ignore", "2"), + ], + ), + ], +) +def test_other_hidden_form_fields(app_client, path, expected_hidden): + response = app_client.get(path) + soup = Soup(response.body, "html.parser") + inputs = soup.find("form").findAll("input") + hiddens = [i for i in inputs if i["type"] == "hidden"] + assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == expected_hidden + + def test_binary_data_display_in_table(app_client): response = app_client.get("/fixtures/binary_data") assert response.status == 200 From a5ede3cdd455e2bb1a1fb2f4e1b5a9855caf5179 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 24 Jan 2021 21:13:05 -0800 Subject: [PATCH 0383/1705] Fixed bug loading database called 'test-database (1).sqlite' Closes #1181. Also now ensures that database URLs have special characters URL-quoted. --- datasette/url_builder.py | 6 ++++-- datasette/views/base.py | 3 ++- docs/changelog.rst | 10 ++++++---- tests/test_api.py | 14 +++++++------- tests/test_cli.py | 23 +++++++++++++++++++++++ tests/test_html.py | 6 +++--- tests/test_internals_urls.py | 20 ++++++++++---------- 7 files changed, 55 insertions(+), 27 deletions(-) diff --git a/datasette/url_builder.py b/datasette/url_builder.py index 3034b664..2bcda869 100644 --- a/datasette/url_builder.py +++ b/datasette/url_builder.py @@ -30,9 +30,11 @@ class Urls: def database(self, database, format=None): db = self.ds.databases[database] if self.ds.setting("hash_urls") and db.hash: - path = self.path(f"{database}-{db.hash[:HASH_LENGTH]}", format=format) + path = self.path( + f"{urllib.parse.quote(database)}-{db.hash[:HASH_LENGTH]}", format=format + ) else: - path = self.path(database, format=format) + path = self.path(urllib.parse.quote(database), format=format) return path def table(self, database, table, format=None): diff --git a/datasette/views/base.py b/datasette/views/base.py index a21b9298..ba0f7d4c 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -181,6 +181,7 @@ class DataView(BaseView): async def resolve_db_name(self, request, db_name, **kwargs): hash = None name = None + db_name = urllib.parse.unquote_plus(db_name) if db_name not in self.ds.databases and "-" in db_name: # No matching DB found, maybe it's a name-hash? name_bit, hash_bit = db_name.rsplit("-", 1) @@ -191,7 +192,7 @@ class DataView(BaseView): hash = hash_bit else: name = db_name - name = urllib.parse.unquote_plus(name) + try: db = self.ds.databases[name] except KeyError: diff --git a/docs/changelog.rst b/docs/changelog.rst index ac2ac8c9..abc2f4f9 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,12 +4,14 @@ Changelog ========= -.. _v0_54_a0: +.. _v0_54: + +0.54 (2021-01-24) +----------------- + + -0.54a0 (2020-12-19) -------------------- -**Alpha release**. Release notes in progress. - Improved support for named in-memory databases. (`#1151 `__) - New ``_internal`` in-memory database tracking attached databases, tables and columns. (`#1150 `__) diff --git a/tests/test_api.py b/tests/test_api.py index 3b4f3437..0d1bddd3 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -609,17 +609,17 @@ def test_no_files_uses_memory_database(app_client_no_files): assert response.status == 200 assert { ":memory:": { + "name": ":memory:", "hash": None, "color": "f7935d", + "path": "/%3Amemory%3A", + "tables_and_views_truncated": [], + "tables_and_views_more": False, + "tables_count": 0, + "table_rows_sum": 0, + "show_table_row_counts": False, "hidden_table_rows_sum": 0, "hidden_tables_count": 0, - "name": ":memory:", - "show_table_row_counts": False, - "path": "/:memory:", - "table_rows_sum": 0, - "tables_count": 0, - "tables_and_views_more": False, - "tables_and_views_truncated": [], "views_count": 0, "private": False, } diff --git a/tests/test_cli.py b/tests/test_cli.py index 1d806bff..c42c22ea 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -17,6 +17,7 @@ import pytest import sys import textwrap from unittest import mock +import urllib @pytest.fixture @@ -255,3 +256,25 @@ def test_serve_duplicate_database_names(ensure_eventloop, tmpdir): assert result.exit_code == 0, result.output databases = json.loads(result.output) assert {db["name"] for db in databases} == {"db", "db_2"} + + +@pytest.mark.parametrize( + "filename", ["test-database (1).sqlite", "database (1).sqlite"] +) +def test_weird_database_names(ensure_eventloop, tmpdir, filename): + # https://github.com/simonw/datasette/issues/1181 + runner = CliRunner() + db_path = str(tmpdir / filename) + sqlite3.connect(db_path).execute("vacuum") + result1 = runner.invoke(cli, [db_path, "--get", "/"]) + assert result1.exit_code == 0, result1.output + filename_no_stem = filename.rsplit(".", 1)[0] + expected_link = '{}'.format( + urllib.parse.quote(filename_no_stem), filename_no_stem + ) + assert expected_link in result1.output + # Now try hitting that database page + result2 = runner.invoke( + cli, [db_path, "--get", "/{}".format(urllib.parse.quote(filename_no_stem))] + ) + assert result2.exit_code == 0, result2.output diff --git a/tests/test_html.py b/tests/test_html.py index 08d17ca7..6c33fba7 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -30,7 +30,7 @@ def test_homepage(app_client_two_attached_databases): # Should be two attached databases assert [ {"href": "/fixtures", "text": "fixtures"}, - {"href": "/extra database", "text": "extra database"}, + {"href": r"/extra%20database", "text": "extra database"}, ] == [{"href": a["href"], "text": a.text.strip()} for a in soup.select("h2 a")] # The first attached database should show count text and attached tables h2 = soup.select("h2")[1] @@ -44,8 +44,8 @@ def test_homepage(app_client_two_attached_databases): {"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a") ] assert [ - {"href": "/extra database/searchable", "text": "searchable"}, - {"href": "/extra database/searchable_view", "text": "searchable_view"}, + {"href": r"/extra%20database/searchable", "text": "searchable"}, + {"href": r"/extra%20database/searchable_view", "text": "searchable_view"}, ] == table_links diff --git a/tests/test_internals_urls.py b/tests/test_internals_urls.py index fd05c1b6..e6f405b3 100644 --- a/tests/test_internals_urls.py +++ b/tests/test_internals_urls.py @@ -103,9 +103,9 @@ def test_logout(ds, base_url, expected): @pytest.mark.parametrize( "base_url,format,expected", [ - ("/", None, "/:memory:"), - ("/prefix/", None, "/prefix/:memory:"), - ("/", "json", "/:memory:.json"), + ("/", None, "/%3Amemory%3A"), + ("/prefix/", None, "/prefix/%3Amemory%3A"), + ("/", "json", "/%3Amemory%3A.json"), ], ) def test_database(ds, base_url, format, expected): @@ -118,10 +118,10 @@ def test_database(ds, base_url, format, expected): @pytest.mark.parametrize( "base_url,name,format,expected", [ - ("/", "name", None, "/:memory:/name"), - ("/prefix/", "name", None, "/prefix/:memory:/name"), - ("/", "name", "json", "/:memory:/name.json"), - ("/", "name.json", "json", "/:memory:/name.json?_format=json"), + ("/", "name", None, "/%3Amemory%3A/name"), + ("/prefix/", "name", None, "/prefix/%3Amemory%3A/name"), + ("/", "name", "json", "/%3Amemory%3A/name.json"), + ("/", "name.json", "json", "/%3Amemory%3A/name.json?_format=json"), ], ) def test_table_and_query(ds, base_url, name, format, expected): @@ -137,9 +137,9 @@ def test_table_and_query(ds, base_url, name, format, expected): @pytest.mark.parametrize( "base_url,format,expected", [ - ("/", None, "/:memory:/facetable/1"), - ("/prefix/", None, "/prefix/:memory:/facetable/1"), - ("/", "json", "/:memory:/facetable/1.json"), + ("/", None, "/%3Amemory%3A/facetable/1"), + ("/prefix/", None, "/prefix/%3Amemory%3A/facetable/1"), + ("/", "json", "/%3Amemory%3A/facetable/1.json"), ], ) def test_row(ds, base_url, format, expected): From 0b9ac1b2e9c855f1b823a06a898891da87c720ef Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 25 Jan 2021 09:33:29 -0800 Subject: [PATCH 0384/1705] Release 0.54 Refs #509, #1091, #1150, #1151, #1166, #1167, #1178, #1181, #1182, #1184, #1185, #1186, #1187, #1194, #1198 --- datasette/version.py | 2 +- docs/changelog.rst | 54 ++++++++++++++++++++++++++++++++++++++++---- 2 files changed, 50 insertions(+), 6 deletions(-) diff --git a/datasette/version.py b/datasette/version.py index b19423a9..8fb7217d 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.54a0" +__version__ = "0.54" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index abc2f4f9..8fca312d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,16 +6,61 @@ Changelog .. _v0_54: -0.54 (2021-01-24) +0.54 (2021-01-25) ----------------- +The two big new features in this release are the ``_internal`` SQLite in-memory database storing details of all connected databases and tables, and support for JavaScript modules in plugins and additional scripts. +For additional commentary on this release, see `Datasette 0.54, the annotated release notes `__. +The _internal database +~~~~~~~~~~~~~~~~~~~~~~ +As part of ongoing work to help Datasette handle much larger numbers of connected databases and tables (see `Datasette Library `__) Datasette now maintains an in-memory SQLite database with details of all of the attached databases, tables, columns, indexes and foreign keys. (`#1150 `__) + +This will support future improvements such as a searchable, paginated homepage of all available tables. + +You can explore an example of this database by `signing in as root `__ to the ``latest.datasette.io`` demo instance and then navigating to `latest.datasette.io/_internal `__. + +Plugins can use these tables to introspect attached data in an efficient way. Plugin authors should note that this is not yet considered a stable interface, so any plugins that use this may need to make changes prior to Datasette 1.0 if the ``_internal`` table schemas change. + +Named in-memory database support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +As part of the work building the ``_internal`` database, Datasette now supports named in-memory databases that can be shared across multiple connections. This allows plugins to create in-memory databases which will persist data for the lifetime of the Datasette server process. (`#1151 `__) + +The new ``memory_name=`` parameter to the :ref:`internals_database` can be used to create named, shared in-memory databases. + +JavaScript modules +~~~~~~~~~~~~~~~~~~ + +`JavaScript modules `__ were introduced in ECMAScript 2015 and provide native browser support for the ``import`` and ``export`` keywords. + +To use modules, JavaScript needs to be included in `` + diff --git a/datasette/templates/_codemirror_foot.html b/datasette/templates/_codemirror_foot.html index 4019d448..ee09cff1 100644 --- a/datasette/templates/_codemirror_foot.html +++ b/datasette/templates/_codemirror_foot.html @@ -23,6 +23,7 @@ window.onload = () => { editor.setValue(sqlFormatter.format(editor.getValue())); }) } + cmResize(editor, {resizableWidth: false}); } if (sqlFormat && readOnly) { const formatted = sqlFormatter.format(readOnly.innerHTML); From 42caabf7e9e6e4d69ef6dd7de16f2cd96bc79d5b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 22 Feb 2021 09:35:41 -0800 Subject: [PATCH 0403/1705] Fixed typo --- docs/testing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 8ea5e79b..1291a875 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -79,7 +79,7 @@ Using pytest fixtures A common pattern for Datasette plugins is to create a fixture which sets up a temporary test database and wraps it in a Datasette instance. -Here's an example that uses the `sqlite-utils library `__ to populate a temporary test database. It also sets the title of that table using a simulated ``metadata.json`` congiguration: +Here's an example that uses the `sqlite-utils library `__ to populate a temporary test database. It also sets the title of that table using a simulated ``metadata.json`` configuration: .. code-block:: python From 726f781c50e88f557437f6490b8479c3d6fabfc2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 22 Feb 2021 16:22:47 -0800 Subject: [PATCH 0404/1705] Fix for arraycontains bug, closes #1239 --- datasette/filters.py | 4 ++-- tests/test_filters.py | 8 ++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/datasette/filters.py b/datasette/filters.py index 152a26b4..2b859d99 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -150,7 +150,7 @@ class Filters: "arraycontains", "array contains", """rowid in ( - select {t}.rowid from {t}, json_each({t}.{c}) j + select {t}.rowid from {t}, json_each([{t}].[{c}]) j where j.value = :{p} )""", '{c} contains "{v}"', @@ -159,7 +159,7 @@ class Filters: "arraynotcontains", "array does not contain", """rowid not in ( - select {t}.rowid from {t}, json_each({t}.{c}) j + select {t}.rowid from {t}, json_each([{t}].[{c}]) j where j.value = :{p} )""", '{c} does not contain "{v}"', diff --git a/tests/test_filters.py b/tests/test_filters.py index 75a779b9..f22b7b5c 100644 --- a/tests/test_filters.py +++ b/tests/test_filters.py @@ -56,6 +56,14 @@ import pytest # Not in, and JSON array not in ((("foo__notin", "1,2,3"),), ["foo not in (:p0, :p1, :p2)"], ["1", "2", "3"]), ((("foo__notin", "[1,2,3]"),), ["foo not in (:p0, :p1, :p2)"], [1, 2, 3]), + # JSON arraycontains + ( + (("Availability+Info__arraycontains", "yes"),), + [ + "rowid in (\n select table.rowid from table, json_each([table].[Availability+Info]) j\n where j.value = :p0\n )" + ], + ["yes"], + ), ], ) def test_build_where(args, expected_where, expected_params): From afed51b1e36cf275c39e71c7cb262d6c5bdbaa31 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 26 Feb 2021 09:27:09 -0800 Subject: [PATCH 0405/1705] Note about where to find plugin examples, closes #1244 --- docs/writing_plugins.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index b43ecb27..6afee1c3 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -5,6 +5,8 @@ Writing plugins You can write one-off plugins that apply to just one Datasette instance, or you can write plugins which can be installed using ``pip`` and can be shipped to the Python Package Index (`PyPI `__) for other people to install. +Want to start by looking at an example? The `Datasette plugins directory `__ lists more than 50 open source plugins with code you can explore. The :ref:`plugin hooks ` page includes links to example plugins for each of the documented hooks. + .. _writing_plugins_one_off: Writing one-off plugins From cc6774cbaaba2359e0a92cfcc41ad988680075d6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Feb 2021 14:34:44 -0800 Subject: [PATCH 0406/1705] Upgrade httpx and remove xfail from tests, refs #1005 --- setup.py | 2 +- tests/test_api.py | 2 -- tests/test_html.py | 3 --- 3 files changed, 1 insertion(+), 6 deletions(-) diff --git a/setup.py b/setup.py index 34b6b396..15ee63fe 100644 --- a/setup.py +++ b/setup.py @@ -48,7 +48,7 @@ setup( "click-default-group~=1.2.2", "Jinja2>=2.10.3,<2.12.0", "hupper~=1.9", - "httpx>=0.15", + "httpx>=0.17", "pint~=0.9", "pluggy~=0.13.0", "uvicorn~=0.11", diff --git a/tests/test_api.py b/tests/test_api.py index 0b5401d6..caf23329 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -810,7 +810,6 @@ def test_table_shape_object_compound_primary_key(app_client): assert {"a,b": {"pk1": "a", "pk2": "b", "content": "c"}} == response.json -@pytest.mark.xfail def test_table_with_slashes_in_name(app_client): response = app_client.get( "/fixtures/table%2Fwith%2Fslashes.csv?_shape=objects&_format=json" @@ -1286,7 +1285,6 @@ def test_row_format_in_querystring(app_client): assert [{"id": "1", "content": "hello"}] == response.json["rows"] -@pytest.mark.xfail def test_row_strange_table_name(app_client): response = app_client.get( "/fixtures/table%2Fwith%2Fslashes.csv/3.json?_shape=objects" diff --git a/tests/test_html.py b/tests/test_html.py index e21bd64d..3482ec35 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -158,7 +158,6 @@ def test_row_redirects_with_url_hash(app_client_with_hash): assert response.status == 200 -@pytest.mark.xfail def test_row_strange_table_name_with_url_hash(app_client_with_hash): response = app_client_with_hash.get( "/fixtures/table%2Fwith%2Fslashes.csv/3", allow_redirects=False @@ -552,7 +551,6 @@ def test_facets_persist_through_filter_form(app_client): ] -@pytest.mark.xfail @pytest.mark.parametrize( "path,expected_classes", [ @@ -584,7 +582,6 @@ def test_css_classes_on_body(app_client, path, expected_classes): assert classes == expected_classes -@pytest.mark.xfail @pytest.mark.parametrize( "path,expected_considered", [ From 47eb885cc2c3aafa03645c330c6f597bee9b3b25 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Feb 2021 19:44:04 -0800 Subject: [PATCH 0407/1705] JSON faceting now suggested even if column has blank strings, closes #1246 --- datasette/facets.py | 11 ++++++++--- tests/test_facets.py | 22 ++++++++++++++++++++++ 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index 207d819d..01628760 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -279,6 +279,7 @@ class ArrayFacet(Facet): suggested_facet_sql = """ select distinct json_type({column}) from ({sql}) + where {column} is not null and {column} != '' """.format( column=escape_sqlite(column), sql=self.sql ) @@ -298,9 +299,13 @@ class ArrayFacet(Facet): v[0] for v in await self.ds.execute( self.database, - "select {column} from ({sql}) where {column} is not null and json_array_length({column}) > 0 limit 100".format( - column=escape_sqlite(column), sql=self.sql - ), + ( + "select {column} from ({sql}) " + "where {column} is not null " + "and {column} != '' " + "and json_array_length({column}) > 0 " + "limit 100" + ).format(column=escape_sqlite(column), sql=self.sql), self.params, truncate=False, custom_time_limit=self.ds.setting( diff --git a/tests/test_facets.py b/tests/test_facets.py index 1e19dc3a..31518682 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -1,3 +1,5 @@ +from datasette.app import Datasette +from datasette.database import Database from datasette.facets import ColumnFacet, ArrayFacet, DateFacet from datasette.utils.asgi import Request from datasette.utils import detect_json1 @@ -325,3 +327,23 @@ async def test_date_facet_results(app_client): "truncated": False, } } == buckets + + +@pytest.mark.asyncio +async def test_json_array_with_blanks_and_nulls(): + ds = Datasette([], memory=True) + db = ds.add_database(Database(ds, memory_name="test_json_array")) + await db.execute_write("create table foo(json_column text)", block=True) + for value in ('["a", "b", "c"]', '["a", "b"]', "", None): + await db.execute_write( + "insert into foo (json_column) values (?)", [value], block=True + ) + response = await ds.client.get("/test_json_array/foo.json") + data = response.json() + assert data["suggested_facets"] == [ + { + "name": "json_column", + "type": "array", + "toggle_url": "http://localhost/test_json_array/foo.json?_facet_array=json_column", + } + ] From 7c87532acc4e9d92caa1c4ee29a3446200928018 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Feb 2021 20:02:18 -0800 Subject: [PATCH 0408/1705] New .add_memory_database() method, closes #1247 --- datasette/app.py | 3 +++ docs/internals.rst | 29 ++++++++++++++++++++--------- tests/test_internals_database.py | 4 ++-- 3 files changed, 25 insertions(+), 11 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index e3272c6e..02d432df 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -390,6 +390,9 @@ class Datasette: self.databases[name] = db return db + def add_memory_database(self, memory_name): + return self.add_database(Database(self, memory_name=memory_name)) + def remove_database(self, name): self.databases.pop(name) diff --git a/docs/internals.rst b/docs/internals.rst index 713f5d7d..e3bb83fd 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -273,7 +273,25 @@ The ``db`` parameter should be an instance of the ``datasette.database.Database` This will add a mutable database and serve it at ``/my-new-database``. -To create a shared in-memory database named ``statistics``, use the following: +``.add_database()`` returns the Database instance, with its name set as the ``database.name`` attribute. Any time you are working with a newly added database you should use the return value of ``.add_database()``, for example: + +.. code-block:: python + + db = datasette.add_database(Database(datasette, memory_name="statistics")) + await db.execute_write("CREATE TABLE foo(id integer primary key)", block=True) + +.. _datasette_add_memory_database: + +.add_memory_database(name) +-------------------------- + +Adds a shared in-memory database with the specified name: + +.. code-block:: python + + datasette.add_memory_database("statistics") + +This is a shortcut for the following: .. code-block:: python @@ -284,14 +302,7 @@ To create a shared in-memory database named ``statistics``, use the following: memory_name="statistics" )) -This database will be served at ``/statistics``. - -``.add_database()`` returns the Database instance, with its name set as the ``database.name`` attribute. Any time you are working with a newly added database you should use the return value of ``.add_database()``, for example: - -.. code-block:: python - - db = datasette.add_database(Database(datasette, memory_name="statistics")) - await db.execute_write("CREATE TABLE foo(id integer primary key)", block=True) +Using either of these pattern will result in the in-memory database being served at ``/statistics``. .. _datasette_remove_database: diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 086f1a48..b60aaa8e 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -479,9 +479,9 @@ async def test_attached_databases(app_client_two_attached_databases_crossdb_enab async def test_database_memory_name(app_client): ds = app_client.ds foo1 = ds.add_database(Database(ds, memory_name="foo")) - foo2 = ds.add_database(Database(ds, memory_name="foo")) + foo2 = ds.add_memory_database("foo") bar1 = ds.add_database(Database(ds, memory_name="bar")) - bar2 = ds.add_database(Database(ds, memory_name="bar")) + bar2 = ds.add_memory_database("bar") for db in (foo1, foo2, bar1, bar2): table_names = await db.table_names() assert table_names == [] From 4f9a2f1f47dcf7e8561d68a8a07f5009a13cfdb3 Mon Sep 17 00:00:00 2001 From: David Boucha Date: Wed, 3 Mar 2021 22:46:10 -0700 Subject: [PATCH 0409/1705] Fix small typo (#1243) Thanks, @UtahDave --- docs/deploying.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index 4e04ea1d..0f892f83 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -58,7 +58,7 @@ Add a random value for the ``DATASETTE_SECRET`` - this will be used to sign Data $ python3 -c 'import secrets; print(secrets.token_hex(32))' -This configuration will run Datasette against all database files contained in the ``/home/ubunt/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`config_dir` for details. +This configuration will run Datasette against all database files contained in the ``/home/ubuntu/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`config_dir` for details. You can start the Datasette process running using the following:: From d0fd833b8cdd97e1b91d0f97a69b494895d82bee Mon Sep 17 00:00:00 2001 From: Bob Whitelock Date: Sun, 7 Mar 2021 07:41:17 +0000 Subject: [PATCH 0410/1705] Add compile option to Dockerfile to fix failing test (fixes #696) (#1223) This test was failing when run inside the Docker container: `test_searchable[/fixtures/searchable.json?_search=te*+AND+do*&_searchmode=raw-expected_rows3]`, with this error: ``` def test_searchable(app_client, path, expected_rows): response = app_client.get(path) > assert expected_rows == response.json["rows"] E AssertionError: assert [[1, 'barry c...sel', 'puma']] == [] E Left contains 2 more items, first extra item: [1, 'barry cat', 'terry dog', 'panther'] E Full diff: E + [] E - [[1, 'barry cat', 'terry dog', 'panther'], E - [2, 'terry dog', 'sara weasel', 'puma']] ``` The issue was that the version of sqlite3 built inside the Docker container was built with FTS3 and FTS4 enabled, but without the `SQLITE_ENABLE_FTS3_PARENTHESIS` compile option passed, which adds support for using `AND` and `NOT` within `match` expressions (see https://sqlite.org/fts3.html#compiling_and_enabling_fts3_and_fts4 and https://www.sqlite.org/compile.html). Without this, the `AND` used in the search in this test was being interpreted as a literal string, and so no matches were found. Adding this compile option fixes this. Thanks, @bobwhitelock --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index aba701ab..f4b14146 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,7 +7,7 @@ RUN apt update \ RUN wget "https://www.sqlite.org/2020/sqlite-autoconf-3310100.tar.gz" && tar xzf sqlite-autoconf-3310100.tar.gz \ - && cd sqlite-autoconf-3310100 && ./configure --disable-static --enable-fts5 --enable-json1 CFLAGS="-g -O2 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS4=1 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_JSON1" \ + && cd sqlite-autoconf-3310100 && ./configure --disable-static --enable-fts5 --enable-json1 CFLAGS="-g -O2 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS -DSQLITE_ENABLE_FTS4=1 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_JSON1" \ && make && make install RUN wget "http://www.gaia-gis.it/gaia-sins/freexl-sources/freexl-1.0.5.tar.gz" && tar zxf freexl-1.0.5.tar.gz \ From a1bcd2fbe5e47bb431045f65eeceb5eb3a6718d5 Mon Sep 17 00:00:00 2001 From: Jean-Baptiste Pressac Date: Wed, 10 Mar 2021 19:26:39 +0100 Subject: [PATCH 0411/1705] Minor typo in IP adress (#1256) 127.0.01 replaced by 127.0.0.1 --- docs/deploying.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index 0f892f83..48261b59 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -67,7 +67,7 @@ You can start the Datasette process running using the following:: You can confirm that Datasette is running on port 8000 like so:: - curl 127.0.01:8000/-/versions.json + curl 127.0.0.1:8000/-/versions.json # Should output JSON showing the installed version Datasette will not be accessible from outside the server because it is listening on ``127.0.0.1``. You can expose it by instead listening on ``0.0.0.0``, but a better way is to set up a proxy such as ``nginx``. From 8e18c7943181f228ce5ebcea48deb59ce50bee1f Mon Sep 17 00:00:00 2001 From: Konstantin Baikov <4488943+kbaikov@users.noreply.github.com> Date: Thu, 11 Mar 2021 17:15:49 +0100 Subject: [PATCH 0412/1705] Use context manager instead of plain open (#1211) Context manager with open closes the files after usage. When the object is already a pathlib.Path i used read_text write_text functions In some cases pathlib.Path.open were used in context manager, it is basically the same as builtin open. Thanks, Konstantin Baikov! --- datasette/app.py | 13 ++++++------- datasette/cli.py | 13 +++++++------ datasette/publish/cloudrun.py | 6 ++++-- datasette/publish/heroku.py | 17 ++++++++++------- datasette/utils/__init__.py | 6 ++++-- setup.py | 3 ++- tests/conftest.py | 6 ++---- tests/fixtures.py | 5 +++-- tests/test_cli.py | 3 ++- tests/test_cli_serve_get.py | 3 ++- tests/test_docs.py | 8 ++++---- tests/test_package.py | 6 ++++-- tests/test_plugins.py | 3 ++- tests/test_publish_cloudrun.py | 32 ++++++++++++++++++++------------ tests/test_publish_heroku.py | 12 ++++++++---- tests/test_utils.py | 18 ++++++++++++------ update-docs-help.py | 2 +- 17 files changed, 93 insertions(+), 63 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 02d432df..f43ec205 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -212,7 +212,7 @@ class Datasette: and (config_dir / "inspect-data.json").exists() and not inspect_data ): - inspect_data = json.load((config_dir / "inspect-data.json").open()) + inspect_data = json.loads((config_dir / "inspect-data.json").read_text()) if immutables is None: immutable_filenames = [i["file"] for i in inspect_data.values()] immutables = [ @@ -269,7 +269,7 @@ class Datasette: if config_dir and (config_dir / "config.json").exists(): raise StartupError("config.json should be renamed to settings.json") if config_dir and (config_dir / "settings.json").exists() and not config: - config = json.load((config_dir / "settings.json").open()) + config = json.loads((config_dir / "settings.json").read_text()) self._settings = dict(DEFAULT_SETTINGS, **(config or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note @@ -450,11 +450,10 @@ class Datasette: def app_css_hash(self): if not hasattr(self, "_app_css_hash"): - self._app_css_hash = hashlib.sha1( - open(os.path.join(str(app_root), "datasette/static/app.css")) - .read() - .encode("utf8") - ).hexdigest()[:6] + with open(os.path.join(str(app_root), "datasette/static/app.css")) as fp: + self._app_css_hash = hashlib.sha1(fp.read().encode("utf8")).hexdigest()[ + :6 + ] return self._app_css_hash async def get_canned_queries(self, database_name, actor): diff --git a/datasette/cli.py b/datasette/cli.py index 96a41740..2fa039a0 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -125,13 +125,13 @@ def cli(): @sqlite_extensions def inspect(files, inspect_file, sqlite_extensions): app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions) - if inspect_file == "-": - out = sys.stdout - else: - out = open(inspect_file, "w") loop = asyncio.get_event_loop() inspect_data = loop.run_until_complete(inspect_(files, sqlite_extensions)) - out.write(json.dumps(inspect_data, indent=2)) + if inspect_file == "-": + sys.stdout.write(json.dumps(inspect_data, indent=2)) + else: + with open(inspect_file, "w") as fp: + fp.write(json.dumps(inspect_data, indent=2)) async def inspect_(files, sqlite_extensions): @@ -475,7 +475,8 @@ def serve( inspect_data = None if inspect_file: - inspect_data = json.load(open(inspect_file)) + with open(inspect_file) as fp: + inspect_data = json.load(fp) metadata_data = None if metadata: diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 7f9e89e2..bad223a1 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -141,9 +141,11 @@ def publish_subcommand(publish): if show_files: if os.path.exists("metadata.json"): print("=== metadata.json ===\n") - print(open("metadata.json").read()) + with open("metadata.json") as fp: + print(fp.read()) print("\n==== Dockerfile ====\n") - print(open("Dockerfile").read()) + with open("Dockerfile") as fp: + print(fp.read()) print("\n====================\n") image_id = f"gcr.io/{project}/{name}" diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index c0c70e12..19fe3fbe 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -171,9 +171,11 @@ def temporary_heroku_directory( os.chdir(tmp.name) if metadata_content: - open("metadata.json", "w").write(json.dumps(metadata_content, indent=2)) + with open("metadata.json", "w") as fp: + fp.write(json.dumps(metadata_content, indent=2)) - open("runtime.txt", "w").write("python-3.8.7") + with open("runtime.txt", "w") as fp: + fp.write("python-3.8.7") if branch: install = [ @@ -182,11 +184,11 @@ def temporary_heroku_directory( else: install = ["datasette"] + list(install) - open("requirements.txt", "w").write("\n".join(install)) + with open("requirements.txt", "w") as fp: + fp.write("\n".join(install)) os.mkdir("bin") - open("bin/post_compile", "w").write( - "datasette inspect --inspect-file inspect-data.json" - ) + with open("bin/post_compile", "w") as fp: + fp.write("datasette inspect --inspect-file inspect-data.json") extras = [] if template_dir: @@ -218,7 +220,8 @@ def temporary_heroku_directory( procfile_cmd = "web: datasette serve --host 0.0.0.0 {quoted_files} --cors --port $PORT --inspect-file inspect-data.json {extras}".format( quoted_files=quoted_files, extras=" ".join(extras) ) - open("Procfile", "w").write(procfile_cmd) + with open("Procfile", "w") as fp: + fp.write(procfile_cmd) for path, filename in zip(file_paths, file_names): link_or_copy(path, os.path.join(tmp.name, filename)) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 47ca0551..1fedb69c 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -428,8 +428,10 @@ def temporary_docker_directory( ) os.chdir(datasette_dir) if metadata_content: - open("metadata.json", "w").write(json.dumps(metadata_content, indent=2)) - open("Dockerfile", "w").write(dockerfile) + with open("metadata.json", "w") as fp: + fp.write(json.dumps(metadata_content, indent=2)) + with open("Dockerfile", "w") as fp: + fp.write(dockerfile) for path, filename in zip(file_paths, file_names): link_or_copy(path, os.path.join(datasette_dir, filename)) if template_dir: diff --git a/setup.py b/setup.py index 15ee63fe..3540e30a 100644 --- a/setup.py +++ b/setup.py @@ -17,7 +17,8 @@ def get_version(): os.path.dirname(os.path.abspath(__file__)), "datasette", "version.py" ) g = {} - exec(open(path).read(), g) + with open(path) as fp: + exec(fp.read(), g) return g["__version__"] diff --git a/tests/conftest.py b/tests/conftest.py index b00ea006..ad3eb9f1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -75,10 +75,8 @@ def check_permission_actions_are_documented(): from datasette.plugins import pm content = ( - (pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst") - .open() - .read() - ) + pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst" + ).read_text() permissions_re = re.compile(r"\.\. _permissions_([^\s:]+):") documented_permission_actions = set(permissions_re.findall(content)).union( UNDOCUMENTED_PERMISSIONS diff --git a/tests/fixtures.py b/tests/fixtures.py index 30113ff2..2fd8e9cb 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -789,7 +789,8 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename): conn.executescript(GENERATED_COLUMNS_SQL) print(f"Test tables written to {db_filename}") if metadata: - open(metadata, "w").write(json.dumps(METADATA, indent=4)) + with open(metadata, "w") as fp: + fp.write(json.dumps(METADATA, indent=4)) print(f"- metadata written to {metadata}") if plugins_path: path = pathlib.Path(plugins_path) @@ -798,7 +799,7 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename): test_plugins = pathlib.Path(__file__).parent / "plugins" for filepath in test_plugins.glob("*.py"): newpath = path / filepath.name - newpath.write_text(filepath.open().read()) + newpath.write_text(filepath.read_text()) print(f" Wrote plugin: {newpath}") if extra_db_filename: if pathlib.Path(extra_db_filename).exists(): diff --git a/tests/test_cli.py b/tests/test_cli.py index 8ddd32f6..e094ccb6 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -49,7 +49,8 @@ def test_inspect_cli_writes_to_file(app_client): cli, ["inspect", "fixtures.db", "--inspect-file", "foo.json"] ) assert 0 == result.exit_code, result.output - data = json.load(open("foo.json")) + with open("foo.json") as fp: + data = json.load(fp) assert ["fixtures"] == list(data.keys()) diff --git a/tests/test_cli_serve_get.py b/tests/test_cli_serve_get.py index aaa692e5..90fbfe3b 100644 --- a/tests/test_cli_serve_get.py +++ b/tests/test_cli_serve_get.py @@ -14,7 +14,8 @@ def test_serve_with_get(tmp_path_factory): @hookimpl def startup(datasette): - open("{}", "w").write("hello") + with open("{}", "w") as fp: + fp.write("hello") """.format( str(plugins_dir / "hello.txt") ), diff --git a/tests/test_docs.py b/tests/test_docs.py index 44b0810a..efd267b9 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -19,13 +19,13 @@ def get_headings(content, underline="-"): def get_labels(filename): - content = (docs_path / filename).open().read() + content = (docs_path / filename).read_text() return set(label_re.findall(content)) @pytest.fixture(scope="session") def settings_headings(): - return get_headings((docs_path / "settings.rst").open().read(), "~") + return get_headings((docs_path / "settings.rst").read_text(), "~") @pytest.mark.parametrize("setting", app.SETTINGS) @@ -43,7 +43,7 @@ def test_settings_are_documented(settings_headings, setting): ), ) def test_help_includes(name, filename): - expected = open(str(docs_path / filename)).read() + expected = (docs_path / filename).read_text() runner = CliRunner() result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88) actual = f"$ datasette {name} --help\n\n{result.output}" @@ -55,7 +55,7 @@ def test_help_includes(name, filename): @pytest.fixture(scope="session") def plugin_hooks_content(): - return (docs_path / "plugin_hooks.rst").open().read() + return (docs_path / "plugin_hooks.rst").read_text() @pytest.mark.parametrize( diff --git a/tests/test_package.py b/tests/test_package.py index 3248b3a4..bb939643 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -32,7 +32,8 @@ def test_package(mock_call, mock_which): capture = CaptureDockerfile() mock_call.side_effect = capture with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["package", "test.db", "--secret", "sekrit"]) assert 0 == result.exit_code mock_call.assert_has_calls([mock.call(["docker", "build", "."])]) @@ -47,7 +48,8 @@ def test_package_with_port(mock_call, mock_which): mock_call.side_effect = capture runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["package", "test.db", "-p", "8080", "--secret", "sekrit"] ) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 715c7c17..ee6f1efa 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -225,7 +225,8 @@ def test_plugin_config_env_from_list(app_client): def test_plugin_config_file(app_client): - open(TEMP_PLUGIN_SECRET_FILE, "w").write("FROM_FILE") + with open(TEMP_PLUGIN_SECRET_FILE, "w") as fp: + fp.write("FROM_FILE") assert {"foo": "FROM_FILE"} == app_client.ds.plugin_config("file-plugin") # Ensure secrets aren't visible in /-/metadata.json metadata = app_client.get("/-/metadata.json") diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 2ef90705..7881ebae 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -11,7 +11,8 @@ def test_publish_cloudrun_requires_gcloud(mock_which): mock_which.return_value = False runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"]) assert result.exit_code == 1 assert "Publishing to Google Cloud requires gcloud" in result.output @@ -40,7 +41,8 @@ def test_publish_cloudrun_prompts_for_service( mock_which.return_value = True runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "cloudrun", "test.db"], input="input-service" ) @@ -81,7 +83,8 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which): mock_which.return_value = True runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"] ) @@ -120,7 +123,8 @@ def test_publish_cloudrun_memory( mock_which.return_value = True runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "cloudrun", "test.db", "--service", "test", "--memory", memory], @@ -152,17 +156,19 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which): runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") - open("metadata.yml", "w").write( - textwrap.dedent( - """ + with open("test.db", "w") as fp: + fp.write("data") + with open("metadata.yml", "w") as fp: + fp.write( + textwrap.dedent( + """ title: Hello from metadata YAML plugins: datasette-auth-github: foo: bar """ - ).strip() - ) + ).strip() + ) result = runner.invoke( cli.cli, [ @@ -228,7 +234,8 @@ def test_publish_cloudrun_apt_get_install(mock_call, mock_output, mock_which): runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, [ @@ -295,7 +302,8 @@ def test_publish_cloudrun_extra_options( runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, [ diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py index c7a38031..c011ab43 100644 --- a/tests/test_publish_heroku.py +++ b/tests/test_publish_heroku.py @@ -8,7 +8,8 @@ def test_publish_heroku_requires_heroku(mock_which): mock_which.return_value = False runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["publish", "heroku", "test.db"]) assert result.exit_code == 1 assert "Publishing to Heroku requires heroku" in result.output @@ -22,7 +23,8 @@ def test_publish_heroku_installs_plugin(mock_call, mock_check_output, mock_which mock_check_output.side_effect = lambda s: {"['heroku', 'plugins']": b""}[repr(s)] runner = CliRunner() with runner.isolated_filesystem(): - open("t.db", "w").write("data") + with open("t.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["publish", "heroku", "t.db"], input="y\n") assert 0 != result.exit_code mock_check_output.assert_has_calls( @@ -54,7 +56,8 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which): }[repr(s)] runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "heroku", "test.db", "--tar", "gtar"] ) @@ -88,7 +91,8 @@ def test_publish_heroku_plugin_secrets(mock_call, mock_check_output, mock_which) }[repr(s)] runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, [ diff --git a/tests/test_utils.py b/tests/test_utils.py index 56306339..ecef6f7a 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -232,7 +232,8 @@ def test_to_css_class(s, expected): def test_temporary_docker_directory_uses_hard_link(): with tempfile.TemporaryDirectory() as td: os.chdir(td) - open("hello", "w").write("world") + with open("hello", "w") as fp: + fp.write("world") # Default usage of this should use symlink with utils.temporary_docker_directory( files=["hello"], @@ -249,7 +250,8 @@ def test_temporary_docker_directory_uses_hard_link(): secret="secret", ) as temp_docker: hello = os.path.join(temp_docker, "hello") - assert "world" == open(hello).read() + with open(hello) as fp: + assert "world" == fp.read() # It should be a hard link assert 2 == os.stat(hello).st_nlink @@ -260,7 +262,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): mock_link.side_effect = OSError with tempfile.TemporaryDirectory() as td: os.chdir(td) - open("hello", "w").write("world") + with open("hello", "w") as fp: + fp.write("world") # Default usage of this should use symlink with utils.temporary_docker_directory( files=["hello"], @@ -277,7 +280,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): secret=None, ) as temp_docker: hello = os.path.join(temp_docker, "hello") - assert "world" == open(hello).read() + with open(hello) as fp: + assert "world" == fp.read() # It should be a copy, not a hard link assert 1 == os.stat(hello).st_nlink @@ -285,7 +289,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): def test_temporary_docker_directory_quotes_args(): with tempfile.TemporaryDirectory() as td: os.chdir(td) - open("hello", "w").write("world") + with open("hello", "w") as fp: + fp.write("world") with utils.temporary_docker_directory( files=["hello"], name="t", @@ -301,7 +306,8 @@ def test_temporary_docker_directory_quotes_args(): secret="secret", ) as temp_docker: df = os.path.join(temp_docker, "Dockerfile") - df_contents = open(df).read() + with open(df) as fp: + df_contents = fp.read() assert "'$PWD'" in df_contents assert "'--$HOME'" in df_contents assert "ENV DATASETTE_SECRET 'secret'" in df_contents diff --git a/update-docs-help.py b/update-docs-help.py index 3a192575..292d1dcd 100644 --- a/update-docs-help.py +++ b/update-docs-help.py @@ -18,7 +18,7 @@ def update_help_includes(): result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88) actual = f"$ datasette {name} --help\n\n{result.output}" actual = actual.replace("Usage: cli ", "Usage: datasette ") - open(docs_path / filename, "w").write(actual) + (docs_path / filename).write_text(actual) if __name__ == "__main__": From c4f1ec7f33fd7d5b93f0f895dafb5351cc3bfc5b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Mar 2021 14:32:23 -0700 Subject: [PATCH 0413/1705] Documentation for Response.asgi_send(), closes #1266 --- docs/internals.rst | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/docs/internals.rst b/docs/internals.rst index e3bb83fd..18032406 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -138,6 +138,28 @@ Each of these responses will use the correct corresponding content-type - ``text Each of the helper methods take optional ``status=`` and ``headers=`` arguments, documented above. +.. _internals_response_asgi_send: + +Returning a response with .asgi_send(send) +------------------------------------------ + + +In most cases you will return ``Response`` objects from your own view functions. You can also use a ``Response`` instance to respond at a lower level via ASGI, for example if you are writing code that uses the :ref:`plugin_asgi_wrapper` hook. + +Create a ``Response`` object and then use ``await response.asgi_send(send)``, passing the ASGI ``send`` function. For example: + +.. code-block:: python + + async def require_authorization(scope, recieve, send): + response = Response.text( + "401 Authorization Required", + headers={ + "www-authenticate": 'Basic realm="Datasette", charset="UTF-8"' + }, + status=401, + ) + await response.asgi_send(send) + .. _internals_response_set_cookie: Setting cookies with response.set_cookie() From 6ad544df5e6bd027a8e27317041e6168aee07459 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 23 Mar 2021 09:19:41 -0700 Subject: [PATCH 0414/1705] Fixed master -> main in a bunch of places, mainly docs --- datasette/cli.py | 2 +- datasette/publish/common.py | 2 +- datasette/templates/patterns.html | 16 ++++++++-------- docs/contributing.rst | 2 +- docs/custom_templates.rst | 2 +- docs/datasette-package-help.txt | 2 +- docs/datasette-publish-cloudrun-help.txt | 2 +- docs/datasette-publish-heroku-help.txt | 2 +- docs/plugin_hooks.rst | 4 ++-- docs/publish.rst | 4 ++-- docs/spatialite.rst | 2 +- tests/fixtures.py | 4 ++-- tests/test_html.py | 9 ++++----- 13 files changed, 26 insertions(+), 27 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 2fa039a0..42b5c115 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -191,7 +191,7 @@ def plugins(all, plugins_dir): help="Path to JSON/YAML file containing metadata to publish", ) @click.option("--extra-options", help="Extra options to pass to datasette serve") -@click.option("--branch", help="Install datasette from a GitHub branch e.g. master") +@click.option("--branch", help="Install datasette from a GitHub branch e.g. main") @click.option( "--template-dir", type=click.Path(exists=True, file_okay=False, dir_okay=True), diff --git a/datasette/publish/common.py b/datasette/publish/common.py index b6570290..29665eb3 100644 --- a/datasette/publish/common.py +++ b/datasette/publish/common.py @@ -19,7 +19,7 @@ def add_common_publish_arguments_and_options(subcommand): "--extra-options", help="Extra options to pass to datasette serve" ), click.option( - "--branch", help="Install datasette from a GitHub branch e.g. master" + "--branch", help="Install datasette from a GitHub branch e.g. main" ), click.option( "--template-dir", diff --git a/datasette/templates/patterns.html b/datasette/templates/patterns.html index 984c1bf6..3f9b5a16 100644 --- a/datasette/templates/patterns.html +++ b/datasette/templates/patterns.html @@ -70,10 +70,10 @@

    Data license: - Apache License 2.0 + Apache License 2.0 · Data source: - + tests/fixtures.py · About: @@ -118,10 +118,10 @@

    Data license: - Apache License 2.0 + Apache License 2.0 · Data source: - + tests/fixtures.py · About: @@ -177,10 +177,10 @@

    Data license: - Apache License 2.0 + Apache License 2.0 · Data source: - + tests/fixtures.py · About: @@ -478,10 +478,10 @@

    Powered by Datasette · Data license: - Apache License 2.0 + Apache License 2.0 · Data source: - + tests/fixtures.py · About: diff --git a/docs/contributing.rst b/docs/contributing.rst index 2cf641fd..7e16280b 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -10,7 +10,7 @@ This document describes how to contribute to Datasette core. You can also contri General guidelines ------------------ -* **master should always be releasable**. Incomplete features should live in branches. This ensures that any small bug fixes can be quickly released. +* **main should always be releasable**. Incomplete features should live in branches. This ensures that any small bug fixes can be quickly released. * **The ideal commit** should bundle together the implementation, unit tests and associated documentation updates. The commit message should link to an associated issue. * **New plugin hooks** should only be shipped if accompanied by a separate release of a non-demo plugin that uses them. diff --git a/docs/custom_templates.rst b/docs/custom_templates.rst index a7236873..efb5b842 100644 --- a/docs/custom_templates.rst +++ b/docs/custom_templates.rst @@ -303,7 +303,7 @@ from the default template. The ``_table.html`` template is included by both the row and the table pages, and a list of rows. The default ``_table.html`` template renders them as an -HTML template and `can be seen here `_. +HTML template and `can be seen here `_. You can provide a custom template that applies to all of your databases and tables, or you can provide custom templates for specific tables using the diff --git a/docs/datasette-package-help.txt b/docs/datasette-package-help.txt index ab5cf8a3..5f5ce070 100644 --- a/docs/datasette-package-help.txt +++ b/docs/datasette-package-help.txt @@ -10,7 +10,7 @@ Options: -m, --metadata FILENAME Path to JSON/YAML file containing metadata to publish --extra-options TEXT Extra options to pass to datasette serve - --branch TEXT Install datasette from a GitHub branch e.g. master + --branch TEXT Install datasette from a GitHub branch e.g. main --template-dir DIRECTORY Path to directory containing custom templates --plugins-dir DIRECTORY Path to directory containing custom plugins --static MOUNT:DIRECTORY Serve static files from this directory at /MOUNT/... diff --git a/docs/datasette-publish-cloudrun-help.txt b/docs/datasette-publish-cloudrun-help.txt index 8cf293d9..c706d921 100644 --- a/docs/datasette-publish-cloudrun-help.txt +++ b/docs/datasette-publish-cloudrun-help.txt @@ -5,7 +5,7 @@ Usage: datasette publish cloudrun [OPTIONS] [FILES]... Options: -m, --metadata FILENAME Path to JSON/YAML file containing metadata to publish --extra-options TEXT Extra options to pass to datasette serve - --branch TEXT Install datasette from a GitHub branch e.g. master + --branch TEXT Install datasette from a GitHub branch e.g. main --template-dir DIRECTORY Path to directory containing custom templates --plugins-dir DIRECTORY Path to directory containing custom plugins --static MOUNT:DIRECTORY Serve static files from this directory at /MOUNT/... diff --git a/docs/datasette-publish-heroku-help.txt b/docs/datasette-publish-heroku-help.txt index 991bd8f4..c4b852de 100644 --- a/docs/datasette-publish-heroku-help.txt +++ b/docs/datasette-publish-heroku-help.txt @@ -5,7 +5,7 @@ Usage: datasette publish heroku [OPTIONS] [FILES]... Options: -m, --metadata FILENAME Path to JSON/YAML file containing metadata to publish --extra-options TEXT Extra options to pass to datasette serve - --branch TEXT Install datasette from a GitHub branch e.g. master + --branch TEXT Install datasette from a GitHub branch e.g. main --template-dir DIRECTORY Path to directory containing custom templates --plugins-dir DIRECTORY Path to directory containing custom plugins --static MOUNT:DIRECTORY Serve static files from this directory at /MOUNT/... diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 23e57278..0a176add 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -304,7 +304,7 @@ publish_subcommand(publish) This hook allows you to create new providers for the ``datasette publish`` command. Datasette uses this hook internally to implement the default ``now`` and ``heroku`` subcommands, so you can read -`their source `_ +`their source `_ to see examples of this hook in action. Let's say you want to build a plugin that adds a ``datasette publish my_hosting_provider --api_key=xxx mydatabase.db`` publish command. Your implementation would start like this: @@ -641,7 +641,7 @@ Each Facet subclass implements a new type of facet operation. The class should l return facet_results, facets_timed_out -See `datasette/facets.py `__ for examples of how these classes can work. +See `datasette/facets.py `__ for examples of how these classes can work. The plugin hook can then be used to register the new facet class like this: diff --git a/docs/publish.rst b/docs/publish.rst index d5015e21..780933fc 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -78,7 +78,7 @@ Publishing to Vercel pip install datasette-publish-vercel datasette publish vercel mydatabase.db --project my-database-project -Not every feature is supported: consult the `datasette-publish-vercel README `__ for more details. +Not every feature is supported: consult the `datasette-publish-vercel README `__ for more details. .. _publish_fly: @@ -92,7 +92,7 @@ Publishing to Fly pip install datasette-publish-fly datasette publish fly mydatabase.db -Consult the `datasette-publish-fly README `__ for more details. +Consult the `datasette-publish-fly README `__ for more details. .. _publish_custom_metadata_and_plugins: diff --git a/docs/spatialite.rst b/docs/spatialite.rst index 0871d72d..234d97e5 100644 --- a/docs/spatialite.rst +++ b/docs/spatialite.rst @@ -53,7 +53,7 @@ If you are unsure of the location of the module, try running ``locate mod_spatia Building SpatiaLite from source ------------------------------- -The packaged versions of SpatiaLite usually provide SpatiaLite 4.3.0a. For an example of how to build the most recent unstable version, 4.4.0-RC0 (which includes the powerful `VirtualKNN module `_), take a look at the `Datasette Dockerfile `_. +The packaged versions of SpatiaLite usually provide SpatiaLite 4.3.0a. For an example of how to build the most recent unstable version, 4.4.0-RC0 (which includes the powerful `VirtualKNN module `_), take a look at the `Datasette Dockerfile `_. Spatial indexing latitude/longitude columns =========================================== diff --git a/tests/fixtures.py b/tests/fixtures.py index 2fd8e9cb..0a721d3a 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -283,9 +283,9 @@ METADATA = { "title": "Datasette Fixtures", "description_html": 'An example SQLite database demonstrating Datasette. Sign in as root user', "license": "Apache License 2.0", - "license_url": "https://github.com/simonw/datasette/blob/master/LICENSE", + "license_url": "https://github.com/simonw/datasette/blob/main/LICENSE", "source": "tests/fixtures.py", - "source_url": "https://github.com/simonw/datasette/blob/master/tests/fixtures.py", + "source_url": "https://github.com/simonw/datasette/blob/main/tests/fixtures.py", "about": "About Datasette", "about_url": "https://github.com/simonw/datasette", "extra_css_urls": ["/static/extra-css-urls.css"], diff --git a/tests/test_html.py b/tests/test_html.py index 3482ec35..9e86ebc2 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1093,12 +1093,11 @@ def assert_footer_links(soup): assert "About Datasette" == about_link.text.strip() assert "https://datasette.io/" == datasette_link["href"] assert ( - "https://github.com/simonw/datasette/blob/master/tests/fixtures.py" + "https://github.com/simonw/datasette/blob/main/tests/fixtures.py" == source_link["href"] ) assert ( - "https://github.com/simonw/datasette/blob/master/LICENSE" - == license_link["href"] + "https://github.com/simonw/datasette/blob/main/LICENSE" == license_link["href"] ) assert "https://github.com/simonw/datasette" == about_link["href"] @@ -1513,8 +1512,8 @@ def test_base_url_config(app_client_base_url_prefix, path): and href not in { "https://datasette.io/", - "https://github.com/simonw/datasette/blob/master/LICENSE", - "https://github.com/simonw/datasette/blob/master/tests/fixtures.py", + "https://github.com/simonw/datasette/blob/main/LICENSE", + "https://github.com/simonw/datasette/blob/main/tests/fixtures.py", "/login-as-root", # Only used for the latest.datasette.io demo } and not href.startswith("https://plugin-example.datasette.io/") From 5fd02890650db790b2ffdb90eb9f78f8e0639c37 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 26 Mar 2021 21:27:40 -0700 Subject: [PATCH 0415/1705] Build Dockerfile with SpatiaLite 5, refs #1249 --- .dockerignore | 2 ++ .github/workflows/publish.yml | 13 +++++---- Dockerfile | 55 ++++++++++++----------------------- 3 files changed, 29 insertions(+), 41 deletions(-) diff --git a/.dockerignore b/.dockerignore index 490f509e..5078bf47 100644 --- a/.dockerignore +++ b/.dockerignore @@ -9,3 +9,5 @@ build dist scratchpad venv +*.db +*.sqlite diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index c1909bbe..a3b29dd7 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -66,8 +66,11 @@ jobs: DOCKER_USER: ${{ secrets.DOCKER_USER }} DOCKER_PASS: ${{ secrets.DOCKER_PASS }} run: |- - docker login -u $DOCKER_USER -p $DOCKER_PASS - export REPO=datasetteproject/datasette - docker build -f Dockerfile -t $REPO:${GITHUB_REF#refs/tags/} . - docker tag $REPO:${GITHUB_REF#refs/tags/} $REPO:latest - docker push $REPO + sleep 60 # Give PyPI time to make the new release available + docker login -u $DOCKER_USER -p $DOCKER_PASS + export REPO=datasetteproject/datasette + docker build -f Dockerfile \ + -t $REPO:${GITHUB_REF#refs/tags/} \ + --build-arg VERSION=${GITHUB_REF#refs/tags/} . + docker tag $REPO:${GITHUB_REF#refs/tags/} $REPO:latest + docker push $REPO diff --git a/Dockerfile b/Dockerfile index f4b14146..8193700d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,42 +1,25 @@ -FROM python:3.7.10-slim-stretch as build +FROM python:3.9.2-slim-buster as build -# Setup build dependencies -RUN apt update \ -&& apt install -y python3-dev build-essential wget libxml2-dev libproj-dev libgeos-dev libsqlite3-dev zlib1g-dev pkg-config git \ - && apt clean +# Version of Datasette to install, e.g. 0.55 +# docker build . -t datasette --build-arg VERSION=0.55 +ARG VERSION +# software-properties-common provides add-apt-repository +# which we need in order to install a more recent release +# of libsqlite3-mod-spatialite from the sid distribution +RUN apt-get update && \ + apt-get -y --no-install-recommends install software-properties-common && \ + add-apt-repository "deb http://httpredir.debian.org/debian sid main" && \ + apt-get update && \ + apt-get -t sid install -y --no-install-recommends libsqlite3-mod-spatialite && \ + apt-get remove -y software-properties-common && \ + apt clean && \ + rm -rf /var/lib/apt && \ + rm -rf /var/lib/dpkg -RUN wget "https://www.sqlite.org/2020/sqlite-autoconf-3310100.tar.gz" && tar xzf sqlite-autoconf-3310100.tar.gz \ - && cd sqlite-autoconf-3310100 && ./configure --disable-static --enable-fts5 --enable-json1 CFLAGS="-g -O2 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS -DSQLITE_ENABLE_FTS4=1 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_JSON1" \ - && make && make install - -RUN wget "http://www.gaia-gis.it/gaia-sins/freexl-sources/freexl-1.0.5.tar.gz" && tar zxf freexl-1.0.5.tar.gz \ - && cd freexl-1.0.5 && ./configure && make && make install - -RUN wget "http://www.gaia-gis.it/gaia-sins/libspatialite-sources/libspatialite-4.4.0-RC0.tar.gz" && tar zxf libspatialite-4.4.0-RC0.tar.gz \ - && cd libspatialite-4.4.0-RC0 && ./configure && make && make install - -RUN wget "http://www.gaia-gis.it/gaia-sins/readosm-sources/readosm-1.1.0.tar.gz" && tar zxf readosm-1.1.0.tar.gz && cd readosm-1.1.0 && ./configure && make && make install - -RUN wget "http://www.gaia-gis.it/gaia-sins/spatialite-tools-sources/spatialite-tools-4.4.0-RC0.tar.gz" && tar zxf spatialite-tools-4.4.0-RC0.tar.gz \ - && cd spatialite-tools-4.4.0-RC0 && ./configure && make && make install - - -# Add local code to the image instead of fetching from pypi. -COPY . /datasette - -RUN pip install /datasette - -FROM python:3.7.10-slim-stretch - -# Copy python dependencies and spatialite libraries -COPY --from=build /usr/local/lib/ /usr/local/lib/ -# Copy executables -COPY --from=build /usr/local/bin /usr/local/bin -# Copy spatial extensions -COPY --from=build /usr/lib/x86_64-linux-gnu /usr/lib/x86_64-linux-gnu - -ENV LD_LIBRARY_PATH=/usr/local/lib +RUN pip install https://github.com/simonw/datasette/archive/refs/tags/${VERSION}.zip && \ + find /usr/local/lib -name '__pycache__' | xargs rm -r && \ + rm -rf /root/.cache/pip EXPOSE 8001 CMD ["datasette"] From 8ebdcc916d556f7fb7fc2bbbb56904a6d8e1936c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 26 Mar 2021 21:33:15 -0700 Subject: [PATCH 0416/1705] Remove obsolete note about building SpatiaLite from source, refs #1249 --- docs/spatialite.rst | 5 ----- 1 file changed, 5 deletions(-) diff --git a/docs/spatialite.rst b/docs/spatialite.rst index 234d97e5..985954de 100644 --- a/docs/spatialite.rst +++ b/docs/spatialite.rst @@ -50,11 +50,6 @@ Depending on your distribution, you should be able to run Datasette something li If you are unsure of the location of the module, try running ``locate mod_spatialite`` and see what comes back. -Building SpatiaLite from source -------------------------------- - -The packaged versions of SpatiaLite usually provide SpatiaLite 4.3.0a. For an example of how to build the most recent unstable version, 4.4.0-RC0 (which includes the powerful `VirtualKNN module `_), take a look at the `Datasette Dockerfile `_. - Spatial indexing latitude/longitude columns =========================================== From 3fcfc8513465339ac5f055296cbb67f5262af02b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 27 Mar 2021 09:16:42 -0700 Subject: [PATCH 0417/1705] Fix links in SpatiaLite tutorial, closes #1278 --- docs/spatialite.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/spatialite.rst b/docs/spatialite.rst index 985954de..d1b300b2 100644 --- a/docs/spatialite.rst +++ b/docs/spatialite.rst @@ -88,7 +88,7 @@ In the above example, the resulting index will be called ``idx_museums_point_geo select * from idx_museums_point_geom limit 10; -Here's a live example: `timezones-api.now.sh/timezones/idx_timezones_Geometry `_ +Here's a live example: `timezones-api.datasette.io/timezones/idx_timezones_Geometry `_ +--------+----------------------+----------------------+---------------------+---------------------+ | pkid | xmin | xmax | ymin | ymax | From 48d5e0e6ac8975cfd869d4e8c69c64ca0c65e29e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Mar 2021 16:44:29 -0700 Subject: [PATCH 0418/1705] Fix for no such table: pragma_database_list, refs #1276 --- datasette/database.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 3579cce9..9f3bbddc 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -247,10 +247,12 @@ class Database: return Path(self.path).stat().st_mtime_ns async def attached_databases(self): - results = await self.execute( - "select seq, name, file from pragma_database_list() where seq > 0" - ) - return [AttachedDatabase(*row) for row in results.rows] + # This used to be: + # select seq, name, file from pragma_database_list() where seq > 0 + # But SQLite prior to 3.16.0 doesn't support pragma functions + results = await self.execute("PRAGMA database_list;") + # {'seq': 0, 'name': 'main', 'file': ''} + return [AttachedDatabase(*row) for row in results.rows if row["seq"] > 0] async def table_exists(self, table): results = await self.execute( From c96a3826cf50cb347f6a415b56d8105ba6d8dcb0 Mon Sep 17 00:00:00 2001 From: vincent d warmerdam Date: Mon, 29 Mar 2021 02:11:55 +0200 Subject: [PATCH 0419/1705] Added `--app` to fly install command. (#1279) --- docs/publish.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/publish.rst b/docs/publish.rst index 780933fc..cbd18a00 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -90,7 +90,7 @@ Publishing to Fly :: pip install datasette-publish-fly - datasette publish fly mydatabase.db + datasette publish fly mydatabase.db --app="my-app" Consult the `datasette-publish-fly README `__ for more details. From e72397d65b06b019521b6411243687464ac8d8ca Mon Sep 17 00:00:00 2001 From: Bob Whitelock Date: Mon, 29 Mar 2021 01:14:04 +0100 Subject: [PATCH 0420/1705] Add styling to lists within table cells (fixes #1141) (#1252) This overrides the Datasette reset (see https://github.com/simonw/datasette/blob/d0fd833b8cdd97e1b91d0f97a69b494895d82bee/datasette/static/app.css#L35-L38), to add back the default styling of list items displayed within Datasette table cells. --- datasette/static/app.css | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/datasette/static/app.css b/datasette/static/app.css index 9e498ab9..fad11a3a 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -452,6 +452,10 @@ table a:link { margin-left: -10%; font-size: 0.8em; } +.rows-and-columns td ol,ul { + list-style: initial; + list-style-position: inside; +} a.blob-download { display: inline-block; } From f92d823766872a6fd7e76c5249a6b2de1ab0f447 Mon Sep 17 00:00:00 2001 From: Campbell Allen Date: Mon, 29 Mar 2021 01:17:31 +0100 Subject: [PATCH 0421/1705] ensure immutable databses when starting in configuration directory mode with (#1229) * check if immutables is empty list of None * update docs on how to create the inspect-data.json --- datasette/app.py | 2 +- docs/settings.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f43ec205..6a7a6c6d 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -213,7 +213,7 @@ class Datasette: and not inspect_data ): inspect_data = json.loads((config_dir / "inspect-data.json").read_text()) - if immutables is None: + if not immutables: immutable_filenames = [i["file"] for i in inspect_data.values()] immutables = [ f for f in self.files if Path(f).name in immutable_filenames diff --git a/docs/settings.rst b/docs/settings.rst index f2467aa4..b4c8a50e 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -48,7 +48,7 @@ The files that can be included in this directory are as follows. All are optiona * ``*.db`` - SQLite database files that will be served by Datasette * ``metadata.json`` - :ref:`metadata` for those databases - ``metadata.yaml`` or ``metadata.yml`` can be used as well -* ``inspect-data.json`` - the result of running ``datasette inspect`` - any database files listed here will be treated as immutable, so they should not be changed while Datasette is running +* ``inspect-data.json`` - the result of running ``datasette inspect *.db --inspect-file=inspect-data.json`` from the configuration directory - any database files listed here will be treated as immutable, so they should not be changed while Datasette is running * ``settings.json`` - settings that would normally be passed using ``--setting`` - here they should be stored as a JSON object of key/value pairs * ``templates/`` - a directory containing :ref:`customization_custom_templates` * ``plugins/`` - a directory containing plugins, see :ref:`writing_plugins_one_off` From d579fcf4f713f98c7365453ce94f36b91ce98c98 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Mar 2021 17:20:55 -0700 Subject: [PATCH 0422/1705] Applied some fixes suggested by @withshubh in #1260 --- datasette/app.py | 4 ++-- tests/plugins/my_plugin.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 6a7a6c6d..ee816426 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -317,7 +317,7 @@ class Datasette: loader=template_loader, autoescape=True, enable_async=True ) self.jinja_env.filters["escape_css_string"] = escape_css_string - self.jinja_env.filters["quote_plus"] = lambda u: urllib.parse.quote_plus(u) + self.jinja_env.filters["quote_plus"] = urllib.parse.quote_plus self.jinja_env.filters["escape_sqlite"] = escape_sqlite self.jinja_env.filters["to_css_class"] = to_css_class # pylint: disable=no-member @@ -767,7 +767,7 @@ class Datasette: hook_renderers = [] # pylint: disable=no-member for hook in pm.hook.register_output_renderer(datasette=self): - if type(hook) == list: + if type(hook) is list: hook_renderers += hook else: hook_renderers.append(hook) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 8d192d28..26d06091 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -196,7 +196,7 @@ def permission_allowed(actor, action): elif action == "this_is_denied": return False elif action == "view-database-download": - return (actor and actor.get("can_download")) or None + return actor.get("can_download") if actor else None @hookimpl From af5a7f1c09f6a902bb2a25e8edf39c7034d2e5de Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Mar 2021 17:41:12 -0700 Subject: [PATCH 0423/1705] Release 0.56 Refs #1005, #1031, #1141, #1229, #1236, #1239, #1246, #1247, #1252, #1266, #1276, #1278 --- datasette/version.py | 2 +- docs/changelog.rst | 18 ++++++++++++++++++ docs/internals.rst | 1 - 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/datasette/version.py b/datasette/version.py index 78eaa333..4dcf73b0 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.55" +__version__ = "0.56" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index eda87dbf..756badce 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,24 @@ Changelog ========= +.. _v0_56: + +0.56 (2021-03-28) +----------------- + +Documentation improvements, bug fixes and support for SpatiaLite 5. + +- The SQL editor can now be resized by dragging a handle. (:issue:`1236`) +- Fixed a bug with JSON faceting and the ``__arraycontains`` filter caused by tables with spaces in their names. (:issue:`1239`) +- Upgraded ``httpx`` dependency. (:issue:`1005`) +- JSON faceting is now suggested even if a column contains blank strings. (:issue:`1246`) +- New :ref:`datasette.add_memory_database() ` method. (:issue:`1247`) +- The :ref:`Response.asgi_send() ` method is now documented. (:issue:`1266`) +- The official Datasette Docker image now bundles SpatiaLite version 5. (:issue:`1278`) +- Fixed a ``no such table: pragma_database_list`` bug when running Datasette against SQLite versions prior to SQLite 3.16.0. (:issue:`1276`) +- HTML lists displayed in table cells are now styled correctly. Thanks, Bob Whitelock. (:issue:`1141`, `#1252 `__) +- Configuration directory mode now correctly serves immutable databases that are listed in ``inspect-data.json``. Thanks Campbell Allen and Frankie Robertson. (`#1031 `__, `#1229 `__) + .. _v0_55: 0.55 (2021-02-18) diff --git a/docs/internals.rst b/docs/internals.rst index 18032406..72c86083 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -143,7 +143,6 @@ Each of the helper methods take optional ``status=`` and ``headers=`` arguments, Returning a response with .asgi_send(send) ------------------------------------------ - In most cases you will return ``Response`` objects from your own view functions. You can also use a ``Response`` instance to respond at a lower level via ASGI, for example if you are writing code that uses the :ref:`plugin_asgi_wrapper` hook. Create a ``Response`` object and then use ``await response.asgi_send(send)``, passing the ASGI ``send`` function. For example: From 13fd9bdf01451decd55e1cbbd4017c0e5d0522e7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Mar 2021 18:07:49 -0700 Subject: [PATCH 0424/1705] docker push --all-tags, refs #1281 --- .github/workflows/publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index a3b29dd7..ad1e794d 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -73,4 +73,4 @@ jobs: -t $REPO:${GITHUB_REF#refs/tags/} \ --build-arg VERSION=${GITHUB_REF#refs/tags/} . docker tag $REPO:${GITHUB_REF#refs/tags/} $REPO:latest - docker push $REPO + docker push --all-tags $REPO From 849c4f06ea766ccdb664eab4e82b80be574a0f03 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Mar 2021 18:35:56 -0700 Subject: [PATCH 0425/1705] Workflow for manually pushing a Docker tag, refs #1281 --- .github/workflows/push_docker_tag.yml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 .github/workflows/push_docker_tag.yml diff --git a/.github/workflows/push_docker_tag.yml b/.github/workflows/push_docker_tag.yml new file mode 100644 index 00000000..02391972 --- /dev/null +++ b/.github/workflows/push_docker_tag.yml @@ -0,0 +1,26 @@ +name: Push specific Docker tag + +on: + workflow_dispatch: + inputs: + version_tag: + description: Tag to build and push + +jobs: + deploy_docker: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Build and push to Docker Hub + env: + DOCKER_USER: ${{ secrets.DOCKER_USER }} + DOCKER_PASS: ${{ secrets.DOCKER_PASS }} + VERSION_TAG: ${{ github.event.inputs.version_tag }} + run: |- + docker login -u $DOCKER_USER -p $DOCKER_PASS + export REPO=datasetteproject/datasette + docker build -f Dockerfile \ + -t $REPO:${VERSION_TAG} \ + --build-arg VERSION=${VERSION_TAG} . + docker tag $REPO:${VERSION_TAG} + docker push $REPO From 8291065b13bf2a4af27d61a971a9ba96aff59417 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Mar 2021 18:39:02 -0700 Subject: [PATCH 0426/1705] Hopeful fix for Docker tag error, refs #1281 --- .github/workflows/push_docker_tag.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/push_docker_tag.yml b/.github/workflows/push_docker_tag.yml index 02391972..9a3969f0 100644 --- a/.github/workflows/push_docker_tag.yml +++ b/.github/workflows/push_docker_tag.yml @@ -22,5 +22,4 @@ jobs: docker build -f Dockerfile \ -t $REPO:${VERSION_TAG} \ --build-arg VERSION=${VERSION_TAG} . - docker tag $REPO:${VERSION_TAG} - docker push $REPO + docker push $REPO:${VERSION_TAG} From 0486303b60ce2784fd2e2ecdbecf304b7d6e6659 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Mar 2021 18:42:42 -0700 Subject: [PATCH 0427/1705] Explicitly push version tag, refs #1281 --- .github/workflows/publish.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index ad1e794d..90fa4505 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -73,4 +73,5 @@ jobs: -t $REPO:${GITHUB_REF#refs/tags/} \ --build-arg VERSION=${GITHUB_REF#refs/tags/} . docker tag $REPO:${GITHUB_REF#refs/tags/} $REPO:latest - docker push --all-tags $REPO + docker push $REPO:${VERSION_TAG} + docker push $REPO:latest From 7b1a9a1999eb9326ce8ec830d75ac200e5279c46 Mon Sep 17 00:00:00 2001 From: Marjorie Roswell Date: Mon, 29 Mar 2021 15:57:34 -0400 Subject: [PATCH 0428/1705] Fix little typo (#1282) --- datasette/static/app.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index fad11a3a..4c41ea98 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -97,7 +97,7 @@ p { margin-bottom: 0.75rem; } .context-text { - /* for accessibility and hiden from sight */ + /* for accessibility and hidden from sight */ text-indent: -999em; display: block; width:0; From 87b583a128986982552421d2510e467e74ac5046 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 2 Apr 2021 13:20:51 -0700 Subject: [PATCH 0429/1705] Clearer help text for --reload Immutable databases are not commonly used, but it's useful to clarify that --reload will pick up on changes to metadata. --- README.md | 2 +- datasette/cli.py | 2 +- docs/datasette-serve-help.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index a4fe36c0..4f3c9a94 100644 --- a/README.md +++ b/README.md @@ -66,7 +66,7 @@ Now visiting http://localhost:8001/History/downloads will show you a web interfa allowed. Use 0.0.0.0 to listen to all IPs and allow access from other machines. -p, --port INTEGER Port for server, defaults to 8001 - --reload Automatically reload if database or code change + --reload Automatically reload if code or metadata change detected - useful for development --cors Enable CORS by serving Access-Control-Allow- Origin: * diff --git a/datasette/cli.py b/datasette/cli.py index 42b5c115..71bbc353 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -336,7 +336,7 @@ def uninstall(packages, yes): @click.option( "--reload", is_flag=True, - help="Automatically reload if database or code change detected - useful for development", + help="Automatically reload if code or metadata change detected - useful for development", ) @click.option( "--cors", is_flag=True, help="Enable CORS by serving Access-Control-Allow-Origin: *" diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index f0dab3ea..8f770afb 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -14,7 +14,7 @@ Options: -p, --port INTEGER RANGE Port for server, defaults to 8001. Use -p 0 to automatically assign an available port. - --reload Automatically reload if database or code change detected - + --reload Automatically reload if code or metadata change detected - useful for development --cors Enable CORS by serving Access-Control-Allow-Origin: * From 59ef4a20cba1533bc347378415f4ffcd025f32c8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 2 Apr 2021 13:27:03 -0700 Subject: [PATCH 0430/1705] =?UTF-8?q?=C2=A9=202017-2021?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index dd0f7c62..89009ea9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -51,7 +51,7 @@ master_doc = "index" # General information about the project. project = "Datasette" -copyright = "2017-2020, Simon Willison" +copyright = "2017-2021, Simon Willison" author = "Simon Willison" # Disable -- turning into – From 0a7621f96f8ad14da17e7172e8a7bce24ef78966 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 2 Apr 2021 20:42:28 -0700 Subject: [PATCH 0431/1705] Use pytest-xdist to speed up tests (#1290) * Run tests in CI using pytest-xdist * Documentation for pytest-xdist Closes #1289 --- .github/workflows/test.yml | 3 ++- docs/contributing.rst | 18 ++++++++++++++++++ pytest.ini | 2 ++ setup.py | 1 + tests/test_cli_serve_server.py | 3 +++ 5 files changed, 26 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a1774213..bcb241d3 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -26,4 +26,5 @@ jobs: pip install -e '.[test]' - name: Run tests run: | - pytest + pytest -n auto -m "not serial" + pytest -m "serial" diff --git a/docs/contributing.rst b/docs/contributing.rst index 7e16280b..c3d0989a 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -43,10 +43,28 @@ The next step is to create a virtual environment for your project and use it to That last line does most of the work: ``pip install -e`` means "install this package in a way that allows me to edit the source code in place". The ``.[test]`` option means "use the setup.py in this directory and install the optional testing dependencies as well". +.. _contributing_running_tests: + +Running the tests +----------------- + Once you have done this, you can run the Datasette unit tests from inside your ``datasette/`` directory using `pytest `__ like so:: pytest +You can run the tests faster using multiple CPU cores with `pytest-xdist `__ like this:: + + pytest -n auto -m "not serial" + +``-n auto`` detects the number of available cores automatically. The ``-m "not serial"`` skips tests that don't work well in a parallel test environment. You can run those tests separately like so:: + + pytest -m "serial" + +.. _contributing_using_fixtures: + +Using fixtures +-------------- + To run Datasette itself, type ``datasette``. You're going to need at least one SQLite database. A quick way to get started is to use the fixtures database that Datasette uses for its own tests. diff --git a/pytest.ini b/pytest.ini index aa292efc..d702ce5f 100644 --- a/pytest.ini +++ b/pytest.ini @@ -6,3 +6,5 @@ filterwarnings= ignore:Using or importing the ABCs::bs4.element # Python 3.7 PendingDeprecationWarning: Task.current_task() ignore:.*current_task.*:PendingDeprecationWarning +markers = + serial: tests to avoid using with pytest-xdist diff --git a/setup.py b/setup.py index 3540e30a..c67aa6a3 100644 --- a/setup.py +++ b/setup.py @@ -70,6 +70,7 @@ setup( "docs": ["sphinx_rtd_theme", "sphinx-autobuild"], "test": [ "pytest>=5.2.2,<6.3.0", + "pytest-xdist>=2.2.1,<2.3", "pytest-asyncio>=0.10,<0.15", "beautifulsoup4>=4.8.1,<4.10.0", "black==20.8b1", diff --git a/tests/test_cli_serve_server.py b/tests/test_cli_serve_server.py index 6962d2fd..6f5366d1 100644 --- a/tests/test_cli_serve_server.py +++ b/tests/test_cli_serve_server.py @@ -1,6 +1,8 @@ import httpx +import pytest +@pytest.mark.serial def test_serve_localhost_http(ds_localhost_http_server): response = httpx.get("http://localhost:8041/_memory.json") assert { @@ -10,6 +12,7 @@ def test_serve_localhost_http(ds_localhost_http_server): }.items() <= response.json().items() +@pytest.mark.serial def test_serve_localhost_https(ds_localhost_https_server): _, client_cert = ds_localhost_https_server response = httpx.get("https://localhost:8042/_memory.json", verify=client_cert) From 6ed9238178a56da5fb019f37fb1e1e15886be1d1 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 19 Apr 2021 11:18:17 -0700 Subject: [PATCH 0432/1705] Update pytest-asyncio requirement from <0.15,>=0.10 to >=0.10,<0.16 (#1303) Updates the requirements on [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) - [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.10.0...v0.15.0) Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index c67aa6a3..03457261 100644 --- a/setup.py +++ b/setup.py @@ -71,7 +71,7 @@ setup( "test": [ "pytest>=5.2.2,<6.3.0", "pytest-xdist>=2.2.1,<2.3", - "pytest-asyncio>=0.10,<0.15", + "pytest-asyncio>=0.10,<0.16", "beautifulsoup4>=4.8.1,<4.10.0", "black==20.8b1", "pytest-timeout>=1.4.2,<1.5", From a4bb2abce0764d49d255e5379f9e9c70981834ca Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 23 Apr 2021 23:07:37 -0700 Subject: [PATCH 0433/1705] Show primary key cells in bold without affecting columns called 'link', closes #1308 --- datasette/static/app.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index 4c41ea98..617bd2b1 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -432,7 +432,7 @@ td { vertical-align: top; white-space: pre-wrap; } -td.col-link { +td.type-pk { font-weight: bold; } td em { From 5e60bad40460f68122006ce704cfc163d6076f34 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Thu, 29 Apr 2021 08:47:21 -0700 Subject: [PATCH 0434/1705] Upgrade to GitHub-native Dependabot (#1314) Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- .github/dependabot.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..b969c4c1 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,13 @@ +version: 2 +updates: +- package-ecosystem: pip + directory: "/" + schedule: + interval: daily + time: "13:00" + open-pull-requests-limit: 10 + ignore: + - dependency-name: black + versions: + - 21.4b0 + - 21.4b1 From 1b697539f5b53cec3fe13c0f4ada13ba655c88c7 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Thu, 29 Apr 2021 08:47:49 -0700 Subject: [PATCH 0435/1705] Bump black from 20.8b1 to 21.4b2 (#1313) Bumps [black](https://github.com/psf/black) from 20.8b1 to 21.4b2. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/master/CHANGES.md) - [Commits](https://github.com/psf/black/commits) Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 03457261..6f3d9a1c 100644 --- a/setup.py +++ b/setup.py @@ -73,7 +73,7 @@ setup( "pytest-xdist>=2.2.1,<2.3", "pytest-asyncio>=0.10,<0.16", "beautifulsoup4>=4.8.1,<4.10.0", - "black==20.8b1", + "black==21.4b2", "pytest-timeout>=1.4.2,<1.5", "trustme>=0.7,<0.8", ], From 9b3b7e280ca718254b4ca15d40864297146a85b3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 May 2021 10:19:40 -0700 Subject: [PATCH 0436/1705] Update jinja2 requirement from <2.12.0,>=2.10.3 to >=2.10.3,<3.1.0 (#1324) Updates the requirements on [jinja2](https://github.com/pallets/jinja) to permit the latest version. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/2.10.3...3.0.0) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 6f3d9a1c..124ce29d 100644 --- a/setup.py +++ b/setup.py @@ -47,7 +47,7 @@ setup( "asgiref>=3.2.10,<3.4.0", "click~=7.1.1", "click-default-group~=1.2.2", - "Jinja2>=2.10.3,<2.12.0", + "Jinja2>=2.10.3,<3.1.0", "hupper~=1.9", "httpx>=0.17", "pint~=0.9", From 459259175eddeed727fd8f08dc19a332779a4f6b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Abdussamet=20Ko=C3=A7ak?= Date: Sun, 23 May 2021 02:53:34 +0300 Subject: [PATCH 0437/1705] Fix small typo (#1335) --- docs/settings.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/settings.rst b/docs/settings.rst index b4c8a50e..af8e4406 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -261,7 +261,7 @@ database file to the URL path for every table and query within that database. When combined with far-future expire headers this ensures that queries can be cached forever, safe in the knowledge that any modifications to the database -itself will result in new, uncachcacheed URL paths. +itself will result in new, uncached URL paths. :: From 593d3e8173b45e20ff3c95afb3df7ceb85bf7fef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 22 May 2021 16:53:56 -0700 Subject: [PATCH 0438/1705] Update aiofiles requirement from <0.7,>=0.4 to >=0.4,<0.8 (#1330) Updates the requirements on [aiofiles](https://github.com/Tinche/aiofiles) to permit the latest version. - [Release notes](https://github.com/Tinche/aiofiles/releases) - [Commits](https://github.com/Tinche/aiofiles/compare/v0.4.0...v0.7.0) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 124ce29d..c98cb012 100644 --- a/setup.py +++ b/setup.py @@ -53,7 +53,7 @@ setup( "pint~=0.9", "pluggy~=0.13.0", "uvicorn~=0.11", - "aiofiles>=0.4,<0.7", + "aiofiles>=0.4,<0.8", "janus>=0.4,<0.7", "asgi-csrf>=0.6", "PyYAML~=5.3", From b64d87204612a84663616e075f542499a5d82a03 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 22 May 2021 16:54:24 -0700 Subject: [PATCH 0439/1705] Update itsdangerous requirement from ~=1.1 to >=1.1,<3.0 (#1325) Updates the requirements on [itsdangerous](https://github.com/pallets/itsdangerous) to permit the latest version. - [Release notes](https://github.com/pallets/itsdangerous/releases) - [Changelog](https://github.com/pallets/itsdangerous/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/itsdangerous/compare/1.1.0...2.0.0) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index c98cb012..6072044f 100644 --- a/setup.py +++ b/setup.py @@ -58,7 +58,7 @@ setup( "asgi-csrf>=0.6", "PyYAML~=5.3", "mergedeep>=1.1.1,<1.4.0", - "itsdangerous~=1.1", + "itsdangerous>=1.1,<3.0", "python-baseconv==1.2.2", ], entry_points=""" From 5c3b3ef97eed55895cf48d4a9ee0635c1c4d03b8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 22 May 2021 16:54:48 -0700 Subject: [PATCH 0440/1705] Update click requirement from ~=7.1.1 to >=7.1.1,<8.1.0 (#1323) Updates the requirements on [click](https://github.com/pallets/click) to permit the latest version. - [Release notes](https://github.com/pallets/click/releases) - [Changelog](https://github.com/pallets/click/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/click/compare/7.1.1...8.0.0) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 6072044f..b9f8dd7b 100644 --- a/setup.py +++ b/setup.py @@ -45,7 +45,7 @@ setup( python_requires=">=3.6", install_requires=[ "asgiref>=3.2.10,<3.4.0", - "click~=7.1.1", + "click>=7.1.1,<8.1.0", "click-default-group~=1.2.2", "Jinja2>=2.10.3,<3.1.0", "hupper~=1.9", From 5e9672c9bb33e41686472db4aa427168f9e67dbe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 22 May 2021 16:55:39 -0700 Subject: [PATCH 0441/1705] Bump black from 21.4b2 to 21.5b1 (#1321) Bumps [black](https://github.com/psf/black) from 21.4b2 to 21.5b1. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/commits) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b9f8dd7b..60a94a5e 100644 --- a/setup.py +++ b/setup.py @@ -73,7 +73,7 @@ setup( "pytest-xdist>=2.2.1,<2.3", "pytest-asyncio>=0.10,<0.16", "beautifulsoup4>=4.8.1,<4.10.0", - "black==21.4b2", + "black==21.5b1", "pytest-timeout>=1.4.2,<1.5", "trustme>=0.7,<0.8", ], From 9789b94da48183dabf105c6419bdcde2634b36a5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 22 May 2021 17:34:33 -0700 Subject: [PATCH 0442/1705] ?_facet_size=100 parameter, closes #1332 --- datasette/facets.py | 16 +++++++++---- docs/facets.rst | 2 ++ docs/json_api.rst | 6 +++++ docs/plugin_hooks.rst | 1 + tests/test_facets.py | 52 +++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 73 insertions(+), 4 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index 01628760..ff6396d7 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -101,6 +101,14 @@ class Facet: # [('_foo', 'bar'), ('_foo', '2'), ('empty', '')] return urllib.parse.parse_qsl(self.request.query_string, keep_blank_values=True) + def get_facet_size(self): + facet_size = self.ds.setting("default_facet_size") + max_returned_rows = self.ds.setting("max_returned_rows") + custom_facet_size = self.request.args.get("_facet_size") + if custom_facet_size and custom_facet_size.isdigit(): + facet_size = int(custom_facet_size) + return min(facet_size, max_returned_rows) + async def suggest(self): return [] @@ -136,7 +144,7 @@ class ColumnFacet(Facet): async def suggest(self): row_count = await self.get_row_count() columns = await self.get_columns(self.sql, self.params) - facet_size = self.ds.setting("default_facet_size") + facet_size = self.get_facet_size() suggested_facets = [] already_enabled = [c["config"]["simple"] for c in self.get_configs()] for column in columns: @@ -186,7 +194,7 @@ class ColumnFacet(Facet): qs_pairs = self.get_querystring_pairs() - facet_size = self.ds.setting("default_facet_size") + facet_size = self.get_facet_size() for source_and_config in self.get_configs(): config = source_and_config["config"] source = source_and_config["source"] @@ -338,7 +346,7 @@ class ArrayFacet(Facet): facet_results = {} facets_timed_out = [] - facet_size = self.ds.setting("default_facet_size") + facet_size = self.get_facet_size() for source_and_config in self.get_configs(): config = source_and_config["config"] source = source_and_config["source"] @@ -449,7 +457,7 @@ class DateFacet(Facet): facet_results = {} facets_timed_out = [] args = dict(self.get_querystring_pairs()) - facet_size = self.ds.setting("default_facet_size") + facet_size = self.get_facet_size() for source_and_config in self.get_configs(): config = source_and_config["config"] source = source_and_config["source"] diff --git a/docs/facets.rst b/docs/facets.rst index 3f2f6879..5061d11c 100644 --- a/docs/facets.rst +++ b/docs/facets.rst @@ -84,6 +84,8 @@ This works for both the HTML interface and the ``.json`` view. When enabled, fac If Datasette detects that a column is a foreign key, the ``"label"`` property will be automatically derived from the detected label column on the referenced table. +The default number of facet results returned is 30, controlled by the :ref:`setting_default_facet_size` setting. You can increase this on an individual page by adding ``?_facet_size=100`` to the query string, up to a maximum of :ref:`setting_max_returned_rows` (which defaults to 1000). + Facets in metadata.json ----------------------- diff --git a/docs/json_api.rst b/docs/json_api.rst index 0f88cb07..9efacf35 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -371,6 +371,12 @@ Special table arguments Pagination by continuation token - pass the token that was returned in the ``"next"`` property by the previous page. +``?_facet=column`` + Facet by column. Can be applied multiple times, see :ref:`facets`. Only works on the default JSON output, not on any of the custom shapes. + +``?_facet_size=100`` + Increase the number of facet results returned for each facet. + ``?_trace=1`` Turns on tracing for this page: SQL queries executed during the request will be gathered and included in the response, either in a new ``"_traces"`` key diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 0a176add..7a1645ec 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -619,6 +619,7 @@ Each Facet subclass implements a new type of facet operation. The class should l # using self.sql and self.params as the starting point facet_results = {} facets_timed_out = [] + facet_size = self.get_facet_size() # Do some calculations here... for column in columns_selected_for_facet: try: diff --git a/tests/test_facets.py b/tests/test_facets.py index 31518682..a1a14e71 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -347,3 +347,55 @@ async def test_json_array_with_blanks_and_nulls(): "toggle_url": "http://localhost/test_json_array/foo.json?_facet_array=json_column", } ] + + +@pytest.mark.asyncio +async def test_facet_size(): + ds = Datasette([], memory=True, config={"max_returned_rows": 50}) + db = ds.add_database(Database(ds, memory_name="test_facet_size")) + await db.execute_write( + "create table neighbourhoods(city text, neighbourhood text)", block=True + ) + for i in range(1, 51): + for j in range(1, 4): + await db.execute_write( + "insert into neighbourhoods (city, neighbourhood) values (?, ?)", + ["City {}".format(i), "Neighbourhood {}".format(j)], + block=True, + ) + response = await ds.client.get("/test_facet_size/neighbourhoods.json") + data = response.json() + assert data["suggested_facets"] == [ + { + "name": "neighbourhood", + "toggle_url": "http://localhost/test_facet_size/neighbourhoods.json?_facet=neighbourhood", + } + ] + # Bump up _facet_size= to suggest city too + response2 = await ds.client.get( + "/test_facet_size/neighbourhoods.json?_facet_size=50" + ) + data2 = response2.json() + assert sorted(data2["suggested_facets"], key=lambda f: f["name"]) == [ + { + "name": "city", + "toggle_url": "http://localhost/test_facet_size/neighbourhoods.json?_facet_size=50&_facet=city", + }, + { + "name": "neighbourhood", + "toggle_url": "http://localhost/test_facet_size/neighbourhoods.json?_facet_size=50&_facet=neighbourhood", + }, + ] + # Facet by city should return expected number of results + response3 = await ds.client.get( + "/test_facet_size/neighbourhoods.json?_facet_size=50&_facet=city" + ) + data3 = response3.json() + assert len(data3["facet_results"]["city"]["results"]) == 50 + # Reduce max_returned_rows and check that it's respected + ds._settings["max_returned_rows"] = 20 + response4 = await ds.client.get( + "/test_facet_size/neighbourhoods.json?_facet_size=50&_facet=city" + ) + data4 = response4.json() + assert len(data4["facet_results"]["city"]["results"]) == 20 From a443dba82f43c22b03402a4f86c85558ccb526b8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 22 May 2021 17:45:54 -0700 Subject: [PATCH 0443/1705] Release 0.57a0 Refs #1281, #1282, #1289, #1290, #1308, #1313, #1314, #1321, #1323, #1325, #1330, #1332, #1335 --- datasette/version.py | 2 +- docs/changelog.rst | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 4dcf73b0..4da56e0a 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.56" +__version__ = "0.57a0" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 756badce..e00791f8 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,17 @@ Changelog ========= +.. _v0_57_a0: + +0.57a0 (2021-05-22) +------------------- + +Mainly dependency bumps, plus a new ``?_facet_size=`` argument. + +- Updated dependencies: pytest-asyncio, Black, jinja2, aiofiles, itsdangerous +- Fixed bug where columns called "Link" were incorrectly displayed in bold. (:issue:`1308`) +- New ``?_facet_size=`` argument for customizing the number of facet results returned on a page. (:issue:`1332`) + .. _v0_56: 0.56 (2021-03-28) From 2bd9d54b2762c991e11950c22c88c0336158d49b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 23 May 2021 18:41:50 -0700 Subject: [PATCH 0444/1705] Fix Jinja warnings, closes #1338, refs #1331 --- datasette/app.py | 5 ++--- datasette/views/database.py | 10 +++++----- datasette/views/table.py | 24 +++++++++++++----------- docs/plugin_hooks.rst | 8 ++++---- tests/plugins/my_plugin_2.py | 8 ++++---- 5 files changed, 28 insertions(+), 27 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index ee816426..e284995a 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -19,9 +19,8 @@ import urllib.parse from concurrent import futures from pathlib import Path -from markupsafe import Markup +from markupsafe import Markup, escape from itsdangerous import URLSafeSerializer -import jinja2 from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader, escape from jinja2.environment import Template from jinja2.exceptions import TemplateNotFound @@ -864,7 +863,7 @@ class Datasette: } if request and request.args.get("_context") and self.setting("template_debug"): return "
    {}
    ".format( - jinja2.escape(json.dumps(template_context, default=repr, indent=4)) + escape(json.dumps(template_context, default=repr, indent=4)) ) return await template.render_async(template_context) diff --git a/datasette/views/database.py b/datasette/views/database.py index 0c58a351..96b2ca91 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -1,8 +1,8 @@ import os import hashlib import itertools -import jinja2 import json +from markupsafe import Markup, escape from urllib.parse import parse_qsl, urlencode from datasette.utils import ( @@ -354,11 +354,11 @@ class QueryView(DataView): display_value = plugin_value else: if value in ("", None): - display_value = jinja2.Markup(" ") + display_value = Markup(" ") elif is_url(str(display_value).strip()): - display_value = jinja2.Markup( + display_value = Markup( '{url}'.format( - url=jinja2.escape(value.strip()) + url=escape(value.strip()) ) ) elif isinstance(display_value, bytes): @@ -372,7 +372,7 @@ class QueryView(DataView): ).hexdigest(), }, ) - display_value = jinja2.Markup( + display_value = Markup( '<Binary: {} byte{}>'.format( blob_url, len(display_value), diff --git a/datasette/views/table.py b/datasette/views/table.py index 48792284..8007377a 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -2,7 +2,7 @@ import urllib import itertools import json -import jinja2 +import markupsafe from datasette.plugins import pm from datasette.database import QueryInterrupted @@ -135,12 +135,12 @@ class RowTableShared(DataView): "value_type": "pk", "is_special_link_column": is_special_link_column, "raw": pk_path, - "value": jinja2.Markup( + "value": markupsafe.Markup( '{flat_pks}'.format( base_url=base_url, database=database, table=urllib.parse.quote_plus(table), - flat_pks=str(jinja2.escape(pk_path)), + flat_pks=str(markupsafe.escape(pk_path)), flat_pks_quoted=path_from_row_pks(row, pks, not pks), ) ), @@ -166,7 +166,7 @@ class RowTableShared(DataView): if plugin_display_value is not None: display_value = plugin_display_value elif isinstance(value, bytes): - display_value = jinja2.Markup( + display_value = markupsafe.Markup( '<Binary: {} byte{}>'.format( self.ds.urls.row_blob( database, @@ -187,22 +187,22 @@ class RowTableShared(DataView): link_template = ( LINK_WITH_LABEL if (label != value) else LINK_WITH_VALUE ) - display_value = jinja2.Markup( + display_value = markupsafe.Markup( link_template.format( database=database, base_url=base_url, table=urllib.parse.quote_plus(other_table), link_id=urllib.parse.quote_plus(str(value)), - id=str(jinja2.escape(value)), - label=str(jinja2.escape(label)) or "-", + id=str(markupsafe.escape(value)), + label=str(markupsafe.escape(label)) or "-", ) ) elif value in ("", None): - display_value = jinja2.Markup(" ") + display_value = markupsafe.Markup(" ") elif is_url(str(value).strip()): - display_value = jinja2.Markup( + display_value = markupsafe.Markup( '{url}'.format( - url=jinja2.escape(value.strip()) + url=markupsafe.escape(value.strip()) ) ) elif column in table_metadata.get("units", {}) and value != "": @@ -212,7 +212,9 @@ class RowTableShared(DataView): # representation, which we have to round off to avoid ugliness. In the vast # majority of cases this rounding will be inconsequential. I hope. value = round(value.to_compact(), 6) - display_value = jinja2.Markup(f"{value:~P}".replace(" ", " ")) + display_value = markupsafe.Markup( + f"{value:~P}".replace(" ", " ") + ) else: display_value = str(value) if truncate_cells and len(display_value) > truncate_cells: diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 7a1645ec..688eaa61 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -389,7 +389,7 @@ If the value matches that pattern, the plugin returns an HTML link element: .. code-block:: python from datasette import hookimpl - import jinja2 + import markupsafe import json @@ -415,9 +415,9 @@ If the value matches that pattern, the plugin returns an HTML link element: or href.startswith("https://") ): return None - return jinja2.Markup('{label}'.format( - href=jinja2.escape(data["href"]), - label=jinja2.escape(data["label"] or "") or " " + return markupsafe.Markup('{label}'.format( + href=markupsafe.escape(data["href"]), + label=markupsafe.escape(data["label"] or "") or " " )) Examples: `datasette-render-binary `_, `datasette-render-markdown `__, `datasette-json-html `__ diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index 6cd222e6..f3b794cf 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -1,6 +1,6 @@ from datasette import hookimpl from functools import wraps -import jinja2 +import markupsafe import json @@ -38,11 +38,11 @@ def render_cell(value, database): or href.startswith("https://") ): return None - return jinja2.Markup( + return markupsafe.Markup( '{label}'.format( database=database, - href=jinja2.escape(data["href"]), - label=jinja2.escape(data["label"] or "") or " ", + href=markupsafe.escape(data["href"]), + label=markupsafe.escape(data["label"] or "") or " ", ) ) From eae3084b46e2c3931db12cdef79093ad0e644bce Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 24 May 2021 10:52:09 -0700 Subject: [PATCH 0445/1705] Fixed another Jinja warning, refs #1338 --- datasette/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index e284995a..957ced7c 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -21,7 +21,7 @@ from pathlib import Path from markupsafe import Markup, escape from itsdangerous import URLSafeSerializer -from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader, escape +from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader from jinja2.environment import Template from jinja2.exceptions import TemplateNotFound import uvicorn From fc972350a8a0276d87a6a83efbbdfab0edd060d4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 24 May 2021 11:07:03 -0700 Subject: [PATCH 0446/1705] Docker image should now allow apt-get install, closes #1320 --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 8193700d..7c56cf56 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,7 +15,7 @@ RUN apt-get update && \ apt-get remove -y software-properties-common && \ apt clean && \ rm -rf /var/lib/apt && \ - rm -rf /var/lib/dpkg + rm -rf /var/lib/dpkg/info/* RUN pip install https://github.com/simonw/datasette/archive/refs/tags/${VERSION}.zip && \ find /usr/local/lib -name '__pycache__' | xargs rm -r && \ From 56af118fc158a59a98688f2caa6f01db6b68da83 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 24 May 2021 11:14:45 -0700 Subject: [PATCH 0447/1705] How to apt-get install in Docker container, refs #1320 --- docs/installation.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/installation.rst b/docs/installation.rst index 6ac67f59..381d9a63 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -195,3 +195,12 @@ You can now run the new custom image like so:: You can confirm that the plugins are installed by visiting http://127.0.0.1:8001/-/plugins + +Some plugins such as `datasette-ripgrep `__ may need additional system packages. You can install these by running `apt-get install` inside the container: + + docker run datasette-057a0 bash -c ' + apt-get update && + apt-get install ripgrep && + pip install datasette-ripgrep' + + docker commit $(docker ps -lq) datasette-with-ripgrep From c0a748e5c3f498fa8c139b420d07dd3dea612379 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 24 May 2021 11:15:15 -0700 Subject: [PATCH 0448/1705] Markup fix, refs #1320 --- docs/installation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/installation.rst b/docs/installation.rst index 381d9a63..b6881bc0 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -196,7 +196,7 @@ You can now run the new custom image like so:: You can confirm that the plugins are installed by visiting http://127.0.0.1:8001/-/plugins -Some plugins such as `datasette-ripgrep `__ may need additional system packages. You can install these by running `apt-get install` inside the container: +Some plugins such as `datasette-ripgrep `__ may need additional system packages. You can install these by running `apt-get install` inside the container:: docker run datasette-057a0 bash -c ' apt-get update && From f1c29fd6a184254aa68efadf096bcf21e848f921 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 May 2021 21:17:43 -0700 Subject: [PATCH 0449/1705] ?_col=/?_nocol= to show/hide columns on the table page Closes #615 * Cog icon for hiding columns * Show all columns cog menu item * Do not allow hide column on primary keys * Allow both ?_col= and ?_nocol= * De-duplicate if ?_col= passed multiple times * 400 error if user tries to ?_nocol= a primary key * Documentation for ?_col= and ?_nocol= --- datasette/static/table.js | 45 +++++++++++++++++++++++------ datasette/views/table.py | 47 +++++++++++++++++++++++++++---- docs/json_api.rst | 6 ++++ tests/test_api.py | 59 +++++++++++++++++++++++++++++++++++++++ 4 files changed, 142 insertions(+), 15 deletions(-) diff --git a/datasette/static/table.js b/datasette/static/table.js index b4e1e113..4c24d772 100644 --- a/datasette/static/table.js +++ b/datasette/static/table.js @@ -4,6 +4,8 @@ var DROPDOWN_HTML = ` diff --git a/tests/fixtures.py b/tests/fixtures.py index 0a721d3a..5730c1bf 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -129,16 +129,16 @@ def make_app_client( files.append(extra_filepath) os.chdir(os.path.dirname(filepath)) config = config or {} - config.update( - { - "default_page_size": 50, - "max_returned_rows": max_returned_rows or 100, - "sql_time_limit_ms": sql_time_limit_ms or 200, - # Default is 3 but this results in "too many open files" - # errors when running the full test suite: - "num_sql_threads": 1, - } - ) + for key, value in { + "default_page_size": 50, + "max_returned_rows": max_returned_rows or 100, + "sql_time_limit_ms": sql_time_limit_ms or 200, + # Default is 3 but this results in "too many open files" + # errors when running the full test suite: + "num_sql_threads": 1, + }.items(): + if key not in config: + config[key] = value ds = Datasette( files, immutables=immutables, diff --git a/tests/test_html.py b/tests/test_html.py index 9e86ebc2..4f2cc8ad 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1612,3 +1612,65 @@ def test_navigation_menu_links( assert ( details.find("a", {"href": link}) is None ), f"{link} found but should not have been in nav menu" + + +@pytest.mark.parametrize( + "max_returned_rows,path,expected_num_facets,expected_ellipses,expected_ellipses_url", + ( + ( + 5, + # Default should show 2 facets + "/fixtures/facetable?_facet=neighborhood", + 2, + True, + "/fixtures/facetable?_facet=neighborhood&_facet_size=max", + ), + # _facet_size above max_returned_rows should show max_returned_rows (5) + ( + 5, + "/fixtures/facetable?_facet=neighborhood&_facet_size=50", + 5, + True, + "/fixtures/facetable?_facet=neighborhood&_facet_size=max", + ), + # If max_returned_rows is high enough, should return all + ( + 20, + "/fixtures/facetable?_facet=neighborhood&_facet_size=max", + 14, + False, + None, + ), + # If num facets > max_returned_rows, show ... without a link + # _facet_size above max_returned_rows should show max_returned_rows (5) + ( + 5, + "/fixtures/facetable?_facet=neighborhood&_facet_size=max", + 5, + True, + None, + ), + ), +) +def test_facet_more_links( + max_returned_rows, + path, + expected_num_facets, + expected_ellipses, + expected_ellipses_url, +): + with make_app_client( + config={"max_returned_rows": max_returned_rows, "default_facet_size": 2} + ) as client: + response = client.get(path) + soup = Soup(response.body, "html.parser") + lis = soup.select("#facet-neighborhood ul li:not(.facet-truncated)") + facet_truncated = soup.select_one(".facet-truncated") + assert len(lis) == expected_num_facets + if not expected_ellipses: + assert facet_truncated is None + else: + if expected_ellipses_url: + assert facet_truncated.find("a")["href"] == expected_ellipses_url + else: + assert facet_truncated.find("a") is None From 4545120c920165aad9659d27111f63f977b8a399 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 May 2021 09:04:26 -0700 Subject: [PATCH 0452/1705] Test and docs for ?_facet_size=max, refs #1337 --- docs/json_api.rst | 2 +- tests/test_facets.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/json_api.rst b/docs/json_api.rst index 787b1203..e48ec514 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -381,7 +381,7 @@ Special table arguments Facet by column. Can be applied multiple times, see :ref:`facets`. Only works on the default JSON output, not on any of the custom shapes. ``?_facet_size=100`` - Increase the number of facet results returned for each facet. + Increase the number of facet results returned for each facet. Use ``?_facet_size=max`` for the maximum available size, determined by :ref:`setting_max_returned_rows`. ``?_trace=1`` Turns on tracing for this page: SQL queries executed during the request will diff --git a/tests/test_facets.py b/tests/test_facets.py index a1a14e71..18fb8c3b 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -399,3 +399,9 @@ async def test_facet_size(): ) data4 = response4.json() assert len(data4["facet_results"]["city"]["results"]) == 20 + # Test _facet_size=max + response5 = await ds.client.get( + "/test_facet_size/neighbourhoods.json?_facet_size=max&_facet=city" + ) + data5 = response5.json() + assert len(data5["facet_results"]["city"]["results"]) == 20 From 1a8972f9c012cd22b088c6b70661a9c3d3847853 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 May 2021 09:11:03 -0700 Subject: [PATCH 0453/1705] Upgrade Heroku runtime to python-3.8.10 --- datasette/publish/heroku.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index 19fe3fbe..2ebbd4bd 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -175,7 +175,7 @@ def temporary_heroku_directory( fp.write(json.dumps(metadata_content, indent=2)) with open("runtime.txt", "w") as fp: - fp.write("python-3.8.7") + fp.write("python-3.8.10") if branch: install = [ From 89822d10be0da446471986addea91d9766f12efb Mon Sep 17 00:00:00 2001 From: Blair Drummond <10801138+blairdrummond@users.noreply.github.com> Date: Thu, 27 May 2021 12:49:23 -0400 Subject: [PATCH 0454/1705] Docker multi-arch support with Buildx (#1319) Thanks, @blairdrummond --- .github/workflows/push_docker_tag.yml | 34 ++++++++++++++++++++------- 1 file changed, 25 insertions(+), 9 deletions(-) diff --git a/.github/workflows/push_docker_tag.yml b/.github/workflows/push_docker_tag.yml index 9a3969f0..e61150a5 100644 --- a/.github/workflows/push_docker_tag.yml +++ b/.github/workflows/push_docker_tag.yml @@ -11,15 +11,31 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v1 + + - name: Available platforms + run: echo ${{ steps.buildx.outputs.platforms }} + + - name: Login to DockerHub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKER_USER }} + password: ${{ secrets.DOCKER_PASS }} + - name: Build and push to Docker Hub + run: | + docker buildx build \ + --file Dockerfile . \ + --tag $REPO:${VERSION_TAG} \ + --build-arg VERSION=${VERSION_TAG} \ + --platform linux/386,linux/amd64,linux/arm/v6,linux/arm/v7,linux/arm64,linux/ppc64le,linux/s390x \ + --push env: - DOCKER_USER: ${{ secrets.DOCKER_USER }} - DOCKER_PASS: ${{ secrets.DOCKER_PASS }} + REPO: datasetteproject/datasette VERSION_TAG: ${{ github.event.inputs.version_tag }} - run: |- - docker login -u $DOCKER_USER -p $DOCKER_PASS - export REPO=datasetteproject/datasette - docker build -f Dockerfile \ - -t $REPO:${VERSION_TAG} \ - --build-arg VERSION=${VERSION_TAG} . - docker push $REPO:${VERSION_TAG} From 7b106e106000713bbee31b34d694b3dadbd4818c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 May 2021 09:54:21 -0700 Subject: [PATCH 0455/1705] Release 0.57a1 Refs #1319, #1320, #1331, #1337, #1338, #1341 --- datasette/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 4da56e0a..cc98e271 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.57a0" +__version__ = "0.57a1" __version_info__ = tuple(__version__.split(".")) From f7d3e76fb3d1fa5aabe339251e4a930610643822 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 30 May 2021 22:31:14 -0400 Subject: [PATCH 0456/1705] Facets now execute ignoring ?_col and ?_nocol, fixes #1345 --- datasette/views/table.py | 30 ++++++++++++++++++++++-------- tests/test_api.py | 15 +++++++++++++++ 2 files changed, 37 insertions(+), 8 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index b54a908a..c5703292 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -358,16 +358,21 @@ class TableView(RowTableShared): ) pks = await db.primary_keys(table) - table_columns = await self.columns_to_select(db, table, request) - select_clause = ", ".join(escape_sqlite(t) for t in table_columns) + table_columns = await db.table_columns(table) + + specified_columns = await self.columns_to_select(db, table, request) + select_specified_columns = ", ".join( + escape_sqlite(t) for t in specified_columns + ) + select_all_columns = ", ".join(escape_sqlite(t) for t in table_columns) use_rowid = not pks and not is_view if use_rowid: - select = f"rowid, {select_clause}" + select_specified_columns = f"rowid, {select_specified_columns}" + select_all_columns = f"rowid, {select_all_columns}" order_by = "rowid" order_by_pks = "rowid" else: - select = select_clause order_by_pks = ", ".join([escape_sqlite(pk) for pk in pks]) order_by = order_by_pks @@ -633,7 +638,7 @@ class TableView(RowTableShared): where_clause = f"where {' and '.join(where_clauses)} " if order_by: - order_by = f"order by {order_by} " + order_by = f"order by {order_by}" extra_args = {} # Handle ?_size=500 @@ -656,13 +661,22 @@ class TableView(RowTableShared): else: page_size = self.ds.page_size - sql_no_limit = "select {select} from {table_name} {where}{order_by}".format( - select=select, + sql_no_limit = ( + "select {select_all_columns} from {table_name} {where}{order_by}".format( + select_all_columns=select_all_columns, + table_name=escape_sqlite(table), + where=where_clause, + order_by=order_by, + ) + ) + sql = "select {select_specified_columns} from {table_name} {where}{order_by} limit {page_size}{offset}".format( + select_specified_columns=select_specified_columns, table_name=escape_sqlite(table), where=where_clause, order_by=order_by, + page_size=page_size + 1, + offset=offset, ) - sql = f"{sql_no_limit.rstrip()} limit {page_size + 1}{offset}" if request.args.get("_timelimit"): extra_args["custom_time_limit"] = int(request.args.get("_timelimit")) diff --git a/tests/test_api.py b/tests/test_api.py index 00de84e6..2c5d7516 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -2041,6 +2041,21 @@ def test_http_options_request(app_client): "/fixtures/facetable.json?_col=state&_col=created&_nocol=created", ["pk", "state"], ), + ( + # Ensure faceting doesn't break, https://github.com/simonw/datasette/issues/1345 + "/fixtures/facetable.json?_nocol=state&_facet=state", + [ + "pk", + "created", + "planet_int", + "on_earth", + "city_id", + "neighborhood", + "tags", + "complex_array", + "distinct_some_null", + ], + ), ( "/fixtures/simple_view.json?_nocol=content", ["upper_content"], From c5ae1197a208e1b034c88882e3ac865813a40980 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 30 May 2021 22:39:14 -0400 Subject: [PATCH 0457/1705] ?_nofacets=1 option, closes #1350 --- datasette/views/table.py | 16 +++++++++------- docs/facets.rst | 2 ++ docs/json_api.rst | 3 +++ tests/test_api.py | 14 ++++++++++++++ 4 files changed, 28 insertions(+), 7 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index c5703292..83c2b922 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -731,13 +731,14 @@ class TableView(RowTableShared): ) ) - for facet in facet_instances: - ( - instance_facet_results, - instance_facets_timed_out, - ) = await facet.facet_results() - facet_results.update(instance_facet_results) - facets_timed_out.extend(instance_facets_timed_out) + if not request.args.get("_nofacets"): + for facet in facet_instances: + ( + instance_facet_results, + instance_facets_timed_out, + ) = await facet.facet_results() + facet_results.update(instance_facet_results) + facets_timed_out.extend(instance_facets_timed_out) # Figure out columns and rows for the query columns = [r[0] for r in results.description] @@ -828,6 +829,7 @@ class TableView(RowTableShared): self.ds.setting("suggest_facets") and self.ds.setting("allow_facet") and not _next + and not request.args.get("_nofacets") ): for facet in facet_instances: suggested_facets.extend(await facet.suggest()) diff --git a/docs/facets.rst b/docs/facets.rst index 5061d11c..7730e4ac 100644 --- a/docs/facets.rst +++ b/docs/facets.rst @@ -86,6 +86,8 @@ If Datasette detects that a column is a foreign key, the ``"label"`` property wi The default number of facet results returned is 30, controlled by the :ref:`setting_default_facet_size` setting. You can increase this on an individual page by adding ``?_facet_size=100`` to the query string, up to a maximum of :ref:`setting_max_returned_rows` (which defaults to 1000). +.. _facets_metadata: + Facets in metadata.json ----------------------- diff --git a/docs/json_api.rst b/docs/json_api.rst index e48ec514..62c208a2 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -383,6 +383,9 @@ Special table arguments ``?_facet_size=100`` Increase the number of facet results returned for each facet. Use ``?_facet_size=max`` for the maximum available size, determined by :ref:`setting_max_returned_rows`. +``?_nofacets=1`` + Disable all facets and facet suggestions for this page, including any defined by :ref:`facets_metadata`. + ``?_trace=1`` Turns on tracing for this page: SQL queries executed during the request will be gathered and included in the response, either in a new ``"_traces"`` key diff --git a/tests/test_api.py b/tests/test_api.py index 2c5d7516..3d6d0330 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1669,6 +1669,20 @@ def test_suggest_facets_off(): assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] +@pytest.mark.parametrize("nofacets", (True, False)) +def test_nofacets(app_client, nofacets): + path = "/fixtures/facetable.json?_facet=state" + if nofacets: + path += "&_nofacets=1" + response = app_client.get(path) + if nofacets: + assert response.json["suggested_facets"] == [] + assert response.json["facet_results"] == {} + else: + assert response.json["suggested_facets"] != [] + assert response.json["facet_results"] != {} + + def test_expand_labels(app_client): response = app_client.get( "/fixtures/facetable.json?_shape=object&_labels=1&_size=2" From d1d06ace49606da790a765689b4fbffa4c6deecb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Jun 2021 08:49:50 -0700 Subject: [PATCH 0458/1705] ?_trac=1 for CSV, plus ?_nofacets=1 when rendering CSV Closes #1351, closes #1350 --- datasette/utils/__init__.py | 9 +++++++++ datasette/views/base.py | 38 +++++++++++++++++++++++++++++++++---- tests/test_csv.py | 24 ++++++++++++++++++++--- 3 files changed, 64 insertions(+), 7 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 1fedb69c..dd47771f 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -7,6 +7,7 @@ import hashlib import inspect import itertools import json +import markupsafe import mergedeep import os import re @@ -777,6 +778,14 @@ class LimitedWriter: await self.writer.write(bytes) +class EscapeHtmlWriter: + def __init__(self, writer): + self.writer = writer + + async def write(self, content): + await self.writer.write(markupsafe.escape(content)) + + _infinities = {float("inf"), float("-inf")} diff --git a/datasette/views/base.py b/datasette/views/base.py index ba0f7d4c..aefaec6c 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -13,6 +13,7 @@ from datasette.plugins import pm from datasette.database import QueryInterrupted from datasette.utils import ( await_me_maybe, + EscapeHtmlWriter, InvalidSql, LimitedWriter, call_with_supported_arguments, @@ -262,6 +263,16 @@ class DataView(BaseView): async def as_csv(self, request, database, hash, **kwargs): stream = request.args.get("_stream") + # Do not calculate facets: + if not request.args.get("_nofacets"): + if not request.query_string: + new_query_string = "_nofacets=1" + else: + new_query_string = request.query_string + "&_nofacets=1" + new_scope = dict( + request.scope, query_string=new_query_string.encode("latin-1") + ) + request.scope = new_scope if stream: # Some quick sanity checks if not self.ds.setting("allow_csv_stream"): @@ -298,9 +309,27 @@ class DataView(BaseView): if column in expanded_columns: headings.append(f"{column}_label") + content_type = "text/plain; charset=utf-8" + preamble = "" + postamble = "" + + trace = request.args.get("_trace") + if trace: + content_type = "text/html; charset=utf-8" + preamble = ( + "CSV debug" + '" + async def stream_fn(r): - nonlocal data - writer = csv.writer(LimitedWriter(r, self.ds.setting("max_csv_mb"))) + nonlocal data, trace + limited_writer = LimitedWriter(r, self.ds.setting("max_csv_mb")) + if trace: + await limited_writer.write(preamble) + writer = csv.writer(EscapeHtmlWriter(limited_writer)) + else: + writer = csv.writer(limited_writer) first = True next = None while first or (next and stream): @@ -371,13 +400,14 @@ class DataView(BaseView): sys.stderr.flush() await r.write(str(e)) return + await limited_writer.write(postamble) - content_type = "text/plain; charset=utf-8" headers = {} if self.ds.cors: headers["Access-Control-Allow-Origin"] = "*" if request.args.get("_dl", None): - content_type = "text/csv; charset=utf-8" + if not trace: + content_type = "text/csv; charset=utf-8" disposition = 'attachment; filename="{}.csv"'.format( kwargs.get("table", database) ) diff --git a/tests/test_csv.py b/tests/test_csv.py index 6b17033c..30afbd9e 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -1,3 +1,4 @@ +from bs4 import BeautifulSoup as Soup from .fixtures import ( # noqa app_client, app_client_csv_max_mb_one, @@ -51,7 +52,7 @@ pk,foreign_key_with_label,foreign_key_with_label_label,foreign_key_with_blank_la def test_table_csv(app_client): - response = app_client.get("/fixtures/simple_primary_key.csv") + response = app_client.get("/fixtures/simple_primary_key.csv?_oh=1") assert response.status == 200 assert not response.headers.get("Access-Control-Allow-Origin") assert "text/plain; charset=utf-8" == response.headers["content-type"] @@ -104,8 +105,8 @@ def test_custom_sql_csv_blob_columns(app_client): assert "text/plain; charset=utf-8" == response.headers["content-type"] assert response.text == ( "rowid,data\r\n" - '1,"http://localhost/fixtures.blob?sql=select+rowid,+data+from+binary_data&_blob_column=data&_blob_hash=f3088978da8f9aea479ffc7f631370b968d2e855eeb172bea7f6c7a04262bb6d"\r\n' - '2,"http://localhost/fixtures.blob?sql=select+rowid,+data+from+binary_data&_blob_column=data&_blob_hash=b835b0483cedb86130b9a2c280880bf5fadc5318ddf8c18d0df5204d40df1724"\r\n' + '1,"http://localhost/fixtures.blob?sql=select+rowid,+data+from+binary_data&_nofacets=1&_blob_column=data&_blob_hash=f3088978da8f9aea479ffc7f631370b968d2e855eeb172bea7f6c7a04262bb6d"\r\n' + '2,"http://localhost/fixtures.blob?sql=select+rowid,+data+from+binary_data&_nofacets=1&_blob_column=data&_blob_hash=b835b0483cedb86130b9a2c280880bf5fadc5318ddf8c18d0df5204d40df1724"\r\n' "3,\r\n" ) @@ -157,3 +158,20 @@ def test_table_csv_stream(app_client): # With _stream=1 should return header + 1001 rows response = app_client.get("/fixtures/compound_three_primary_keys.csv?_stream=1") assert 1002 == len([b for b in response.body.split(b"\r\n") if b]) + + +def test_csv_trace(app_client): + response = app_client.get("/fixtures/simple_primary_key.csv?_trace=1") + assert response.headers["content-type"] == "text/html; charset=utf-8" + soup = Soup(response.text, "html.parser") + assert ( + soup.find("textarea").text + == "id,content\r\n1,hello\r\n2,world\r\n3,\r\n4,RENDER_CELL_DEMO\r\n" + ) + assert "select id, content from simple_primary_key" in soup.find("pre").text + + +def test_table_csv_stream_does_not_calculate_facets(app_client): + response = app_client.get("/fixtures/simple_primary_key.csv?_trace=1") + soup = Soup(response.text, "html.parser") + assert "select content, count(*) as n" not in soup.find("pre").text From 8bde6c54615af529e81de559cbb3bf3ee5fe17cb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Jun 2021 08:55:28 -0700 Subject: [PATCH 0459/1705] Rename ?_nofacets=1 to ?_nofacet=1, refs #1353 --- datasette/views/base.py | 6 +++--- datasette/views/table.py | 4 ++-- docs/json_api.rst | 2 +- tests/test_api.py | 10 +++++----- tests/test_csv.py | 4 ++-- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index aefaec6c..b8c581fc 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -264,11 +264,11 @@ class DataView(BaseView): async def as_csv(self, request, database, hash, **kwargs): stream = request.args.get("_stream") # Do not calculate facets: - if not request.args.get("_nofacets"): + if not request.args.get("_nofacet"): if not request.query_string: - new_query_string = "_nofacets=1" + new_query_string = "_nofacet=1" else: - new_query_string = request.query_string + "&_nofacets=1" + new_query_string = request.query_string + "&_nofacet=1" new_scope = dict( request.scope, query_string=new_query_string.encode("latin-1") ) diff --git a/datasette/views/table.py b/datasette/views/table.py index 83c2b922..7fbf670b 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -731,7 +731,7 @@ class TableView(RowTableShared): ) ) - if not request.args.get("_nofacets"): + if not request.args.get("_nofacet"): for facet in facet_instances: ( instance_facet_results, @@ -829,7 +829,7 @@ class TableView(RowTableShared): self.ds.setting("suggest_facets") and self.ds.setting("allow_facet") and not _next - and not request.args.get("_nofacets") + and not request.args.get("_nofacet") ): for facet in facet_instances: suggested_facets.extend(await facet.suggest()) diff --git a/docs/json_api.rst b/docs/json_api.rst index 62c208a2..f1c347b7 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -383,7 +383,7 @@ Special table arguments ``?_facet_size=100`` Increase the number of facet results returned for each facet. Use ``?_facet_size=max`` for the maximum available size, determined by :ref:`setting_max_returned_rows`. -``?_nofacets=1`` +``?_nofacet=1`` Disable all facets and facet suggestions for this page, including any defined by :ref:`facets_metadata`. ``?_trace=1`` diff --git a/tests/test_api.py b/tests/test_api.py index 3d6d0330..5e639133 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1669,13 +1669,13 @@ def test_suggest_facets_off(): assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] -@pytest.mark.parametrize("nofacets", (True, False)) -def test_nofacets(app_client, nofacets): +@pytest.mark.parametrize("nofacet", (True, False)) +def test_nofacet(app_client, nofacet): path = "/fixtures/facetable.json?_facet=state" - if nofacets: - path += "&_nofacets=1" + if nofacet: + path += "&_nofacet=1" response = app_client.get(path) - if nofacets: + if nofacet: assert response.json["suggested_facets"] == [] assert response.json["facet_results"] == {} else: diff --git a/tests/test_csv.py b/tests/test_csv.py index 30afbd9e..40549fd8 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -105,8 +105,8 @@ def test_custom_sql_csv_blob_columns(app_client): assert "text/plain; charset=utf-8" == response.headers["content-type"] assert response.text == ( "rowid,data\r\n" - '1,"http://localhost/fixtures.blob?sql=select+rowid,+data+from+binary_data&_nofacets=1&_blob_column=data&_blob_hash=f3088978da8f9aea479ffc7f631370b968d2e855eeb172bea7f6c7a04262bb6d"\r\n' - '2,"http://localhost/fixtures.blob?sql=select+rowid,+data+from+binary_data&_nofacets=1&_blob_column=data&_blob_hash=b835b0483cedb86130b9a2c280880bf5fadc5318ddf8c18d0df5204d40df1724"\r\n' + '1,"http://localhost/fixtures.blob?sql=select+rowid,+data+from+binary_data&_nofacet=1&_blob_column=data&_blob_hash=f3088978da8f9aea479ffc7f631370b968d2e855eeb172bea7f6c7a04262bb6d"\r\n' + '2,"http://localhost/fixtures.blob?sql=select+rowid,+data+from+binary_data&_nofacet=1&_blob_column=data&_blob_hash=b835b0483cedb86130b9a2c280880bf5fadc5318ddf8c18d0df5204d40df1724"\r\n' "3,\r\n" ) From fd368d3b2c5a5d9c3e10a21638f6ea9a71471b52 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Jun 2021 09:12:32 -0700 Subject: [PATCH 0460/1705] New _nocount=1 option, used to speed up CSVs - closes #1353 --- datasette/views/base.py | 15 +++++++++++---- datasette/views/table.py | 6 +++++- docs/json_api.rst | 3 +++ tests/test_api.py | 9 +++++++++ tests/test_csv.py | 6 ++++++ 5 files changed, 34 insertions(+), 5 deletions(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index b8c581fc..26edfde5 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -263,12 +263,19 @@ class DataView(BaseView): async def as_csv(self, request, database, hash, **kwargs): stream = request.args.get("_stream") - # Do not calculate facets: - if not request.args.get("_nofacet"): + # Do not calculate facets or counts: + extra_parameters = [ + "{}=1".format(key) + for key in ("_nofacet", "_nocount") + if not request.args.get(key) + ] + if extra_parameters: if not request.query_string: - new_query_string = "_nofacet=1" + new_query_string = "&".join(extra_parameters) else: - new_query_string = request.query_string + "&_nofacet=1" + new_query_string = ( + request.query_string + "&" + "&".join(extra_parameters) + ) new_scope = dict( request.scope, query_string=new_query_string.encode("latin-1") ) diff --git a/datasette/views/table.py b/datasette/views/table.py index 7fbf670b..d47865f0 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -697,7 +697,11 @@ class TableView(RowTableShared): except KeyError: pass - if count_sql and filtered_table_rows_count is None: + if ( + count_sql + and filtered_table_rows_count is None + and not request.args.get("_nocount") + ): try: count_rows = list(await db.execute(count_sql, from_sql_params)) filtered_table_rows_count = count_rows[0][0] diff --git a/docs/json_api.rst b/docs/json_api.rst index f1c347b7..660fbc1c 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -386,6 +386,9 @@ Special table arguments ``?_nofacet=1`` Disable all facets and facet suggestions for this page, including any defined by :ref:`facets_metadata`. +``?_nocount=1`` + Disable the ``select count(*)`` query used on this page - a count of ``None`` will be returned instead. + ``?_trace=1`` Turns on tracing for this page: SQL queries executed during the request will be gathered and included in the response, either in a new ``"_traces"`` key diff --git a/tests/test_api.py b/tests/test_api.py index 5e639133..49b3bbe9 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1683,6 +1683,15 @@ def test_nofacet(app_client, nofacet): assert response.json["facet_results"] != {} +@pytest.mark.parametrize("nocount,expected_count", ((True, None), (False, 15))) +def test_nocount(app_client, nocount, expected_count): + path = "/fixtures/facetable.json" + if nocount: + path += "?_nocount=1" + response = app_client.get(path) + assert response.json["filtered_table_rows_count"] == expected_count + + def test_expand_labels(app_client): response = app_client.get( "/fixtures/facetable.json?_shape=object&_labels=1&_size=2" diff --git a/tests/test_csv.py b/tests/test_csv.py index 40549fd8..02fe5766 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -175,3 +175,9 @@ def test_table_csv_stream_does_not_calculate_facets(app_client): response = app_client.get("/fixtures/simple_primary_key.csv?_trace=1") soup = Soup(response.text, "html.parser") assert "select content, count(*) as n" not in soup.find("pre").text + + +def test_table_csv_stream_does_not_calculate_counts(app_client): + response = app_client.get("/fixtures/simple_primary_key.csv?_trace=1") + soup = Soup(response.text, "html.parser") + assert "select count(*)" not in soup.find("pre").text From ff45ed0ce5e1f151f24f089c6b78ab7f7a5cd0dc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Jun 2021 09:16:58 -0700 Subject: [PATCH 0461/1705] Updated --help output for latest Click, closes #1354 --- docs/datasette-package-help.txt | 4 +--- docs/datasette-publish-cloudrun-help.txt | 2 -- docs/datasette-publish-heroku-help.txt | 3 --- docs/datasette-serve-help.txt | 8 +------- 4 files changed, 2 insertions(+), 15 deletions(-) diff --git a/docs/datasette-package-help.txt b/docs/datasette-package-help.txt index 5f5ce070..7cfac1b1 100644 --- a/docs/datasette-package-help.txt +++ b/docs/datasette-package-help.txt @@ -7,7 +7,6 @@ Usage: datasette package [OPTIONS] FILES... Options: -t, --tag TEXT Name for the resulting Docker container, can optionally use name:tag format - -m, --metadata FILENAME Path to JSON/YAML file containing metadata to publish --extra-options TEXT Extra options to pass to datasette serve --branch TEXT Install datasette from a GitHub branch e.g. main @@ -19,8 +18,7 @@ Options: --version-note TEXT Additional note to show on /-/versions --secret TEXT Secret used for signing secure values, such as signed cookies - - -p, --port INTEGER RANGE Port to run the server on, defaults to 8001 + -p, --port INTEGER RANGE Port to run the server on, defaults to 8001 [1<=x<=65535] --title TEXT Title for metadata --license TEXT License label for metadata --license_url TEXT License URL for metadata diff --git a/docs/datasette-publish-cloudrun-help.txt b/docs/datasette-publish-cloudrun-help.txt index c706d921..3d05efb6 100644 --- a/docs/datasette-publish-cloudrun-help.txt +++ b/docs/datasette-publish-cloudrun-help.txt @@ -13,11 +13,9 @@ Options: --plugin-secret ... Secrets to pass to plugins, e.g. --plugin-secret datasette-auth-github client_id xxx - --version-note TEXT Additional note to show on /-/versions --secret TEXT Secret used for signing secure values, such as signed cookies - --title TEXT Title for metadata --license TEXT License label for metadata --license_url TEXT License URL for metadata diff --git a/docs/datasette-publish-heroku-help.txt b/docs/datasette-publish-heroku-help.txt index c4b852de..9d633e95 100644 --- a/docs/datasette-publish-heroku-help.txt +++ b/docs/datasette-publish-heroku-help.txt @@ -13,11 +13,9 @@ Options: --plugin-secret ... Secrets to pass to plugins, e.g. --plugin-secret datasette-auth-github client_id xxx - --version-note TEXT Additional note to show on /-/versions --secret TEXT Secret used for signing secure values, such as signed cookies - --title TEXT Title for metadata --license TEXT License label for metadata --license_url TEXT License URL for metadata @@ -28,5 +26,4 @@ Options: -n, --name TEXT Application name to use when deploying --tar TEXT --tar option to pass to Heroku, e.g. --tar=/usr/local/bin/gtar - --help Show this message and exit. diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index 8f770afb..db51dd80 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -10,13 +10,10 @@ Options: connections from the local machine will be allowed. Use 0.0.0.0 to listen to all IPs and allow access from other machines. - -p, --port INTEGER RANGE Port for server, defaults to 8001. Use -p 0 to automatically - assign an available port. - + assign an available port. [0<=x<=65535] --reload Automatically reload if code or metadata change detected - useful for development - --cors Enable CORS by serving Access-Control-Allow-Origin: * --load-extension TEXT Path to a SQLite extension to load --inspect-file TEXT Path to JSON file created using "datasette inspect" @@ -27,15 +24,12 @@ Options: --memory Make /_memory database available --config CONFIG Deprecated: set config option using configname:value. Use --setting instead. - --setting SETTING... Setting, see docs.datasette.io/en/stable/config.html --secret TEXT Secret used for signing secure values, such as signed cookies - --root Output URL that sets a cookie authenticating the root user --get TEXT Run an HTTP GET request against this path, print results and exit - --version-note TEXT Additional note to show on /-/versions --help-config Show available config options --pdb Launch debugger on any errors From a18e8641bc33e51b265855bc6e8a1939597b3a76 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Jun 2021 15:35:33 -0700 Subject: [PATCH 0462/1705] Don't reflect nofacet=1 and nocount=1 in BLOB URLs, refs #1353 --- datasette/views/base.py | 5 ++++- tests/test_csv.py | 4 ++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index 26edfde5..e2583034 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -369,7 +369,7 @@ class DataView(BaseView): ) else: # Otherwise generate URL for this query - cell = self.ds.absolute_url( + url = self.ds.absolute_url( request, path_with_format( request=request, @@ -383,6 +383,9 @@ class DataView(BaseView): replace_format="csv", ), ) + cell = url.replace("&_nocount=1", "").replace( + "&_nofacet=1", "" + ) new_row.append(cell) row = new_row if not expanded_columns: diff --git a/tests/test_csv.py b/tests/test_csv.py index 02fe5766..01f739e2 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -105,8 +105,8 @@ def test_custom_sql_csv_blob_columns(app_client): assert "text/plain; charset=utf-8" == response.headers["content-type"] assert response.text == ( "rowid,data\r\n" - '1,"http://localhost/fixtures.blob?sql=select+rowid,+data+from+binary_data&_nofacet=1&_blob_column=data&_blob_hash=f3088978da8f9aea479ffc7f631370b968d2e855eeb172bea7f6c7a04262bb6d"\r\n' - '2,"http://localhost/fixtures.blob?sql=select+rowid,+data+from+binary_data&_nofacet=1&_blob_column=data&_blob_hash=b835b0483cedb86130b9a2c280880bf5fadc5318ddf8c18d0df5204d40df1724"\r\n' + '1,"http://localhost/fixtures.blob?sql=select+rowid,+data+from+binary_data&_blob_column=data&_blob_hash=f3088978da8f9aea479ffc7f631370b968d2e855eeb172bea7f6c7a04262bb6d"\r\n' + '2,"http://localhost/fixtures.blob?sql=select+rowid,+data+from+binary_data&_blob_column=data&_blob_hash=b835b0483cedb86130b9a2c280880bf5fadc5318ddf8c18d0df5204d40df1724"\r\n' "3,\r\n" ) From 0539bf0816b58c7f0ba769331f1509656bff3619 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Jun 2021 19:53:00 -0700 Subject: [PATCH 0463/1705] Don't execute facets/counts for _shape=array or object, closes #263 --- datasette/views/table.py | 17 ++++++++++------- tests/test_api.py | 5 +++++ 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index d47865f0..b51d5e5e 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -379,6 +379,13 @@ class TableView(RowTableShared): if is_view: order_by = "" + nocount = request.args.get("_nocount") + nofacet = request.args.get("_nofacet") + + if request.args.get("_shape") in ("array", "object"): + nocount = True + nofacet = True + # Ensure we don't drop anything with an empty value e.g. ?name__exact= args = MultiParams( urllib.parse.parse_qs(request.query_string, keep_blank_values=True) @@ -697,11 +704,7 @@ class TableView(RowTableShared): except KeyError: pass - if ( - count_sql - and filtered_table_rows_count is None - and not request.args.get("_nocount") - ): + if count_sql and filtered_table_rows_count is None and not nocount: try: count_rows = list(await db.execute(count_sql, from_sql_params)) filtered_table_rows_count = count_rows[0][0] @@ -735,7 +738,7 @@ class TableView(RowTableShared): ) ) - if not request.args.get("_nofacet"): + if not nofacet: for facet in facet_instances: ( instance_facet_results, @@ -833,7 +836,7 @@ class TableView(RowTableShared): self.ds.setting("suggest_facets") and self.ds.setting("allow_facet") and not _next - and not request.args.get("_nofacet") + and not nofacet ): for facet in facet_instances: suggested_facets.extend(await facet.suggest()) diff --git a/tests/test_api.py b/tests/test_api.py index 49b3bbe9..078aad35 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1692,6 +1692,11 @@ def test_nocount(app_client, nocount, expected_count): assert response.json["filtered_table_rows_count"] == expected_count +def test_nocount_nofacet_if_shape_is_object(app_client): + response = app_client.get("/fixtures/facetable.json?_trace=1&_shape=object") + assert "count(*)" not in response.text + + def test_expand_labels(app_client): response = app_client.get( "/fixtures/facetable.json?_shape=object&_labels=1&_size=2" From 03b35d70e281ea48bd9b8058738ed87b13cea2de Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Jun 2021 19:56:44 -0700 Subject: [PATCH 0464/1705] Bump black from 21.5b1 to 21.5b2 (#1352) Bumps [black](https://github.com/psf/black) from 21.5b1 to 21.5b2. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/commits) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 60a94a5e..e66fefc3 100644 --- a/setup.py +++ b/setup.py @@ -73,7 +73,7 @@ setup( "pytest-xdist>=2.2.1,<2.3", "pytest-asyncio>=0.10,<0.16", "beautifulsoup4>=4.8.1,<4.10.0", - "black==21.5b1", + "black==21.5b2", "pytest-timeout>=1.4.2,<1.5", "trustme>=0.7,<0.8", ], From 807de378d08752a0f05bb1b980a0a62620a70520 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Jun 2021 20:03:07 -0700 Subject: [PATCH 0465/1705] /-/databases and homepage maintain connection order, closes #1216 --- datasette/app.py | 2 +- tests/fixtures.py | 3 ++- tests/test_api.py | 2 +- tests/test_html.py | 6 +++--- 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 957ced7c..018a8d5b 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -646,7 +646,7 @@ class Datasette: "is_memory": d.is_memory, "hash": d.hash, } - for name, d in sorted(self.databases.items(), key=lambda p: p[1].name) + for name, d in self.databases.items() if name != "_internal" ] diff --git a/tests/fixtures.py b/tests/fixtures.py index 5730c1bf..2690052a 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -126,7 +126,8 @@ def make_app_client( for extra_filename, extra_sql in extra_databases.items(): extra_filepath = os.path.join(tmpdir, extra_filename) sqlite3.connect(extra_filepath).executescript(extra_sql) - files.append(extra_filepath) + # Insert at start to help test /-/databases ordering: + files.insert(0, extra_filepath) os.chdir(os.path.dirname(filepath)) config = config or {} for key, value in { diff --git a/tests/test_api.py b/tests/test_api.py index 078aad35..3b789bb7 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1918,7 +1918,7 @@ def test_database_with_space_in_name(app_client_two_attached_databases, path): def test_common_prefix_database_names(app_client_conflicting_database_names): # https://github.com/simonw/datasette/issues/597 - assert ["fixtures", "foo", "foo-bar"] == [ + assert ["foo-bar", "foo", "fixtures"] == [ d["name"] for d in app_client_conflicting_database_names.get("/-/databases.json").json ] diff --git a/tests/test_html.py b/tests/test_html.py index 4f2cc8ad..fd60cdc9 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -29,11 +29,11 @@ def test_homepage(app_client_two_attached_databases): ) # Should be two attached databases assert [ - {"href": "/fixtures", "text": "fixtures"}, {"href": r"/extra%20database", "text": "extra database"}, + {"href": "/fixtures", "text": "fixtures"}, ] == [{"href": a["href"], "text": a.text.strip()} for a in soup.select("h2 a")] - # The first attached database should show count text and attached tables - h2 = soup.select("h2")[1] + # Database should show count text and attached tables + h2 = soup.select("h2")[0] assert "extra database" == h2.text.strip() counts_p, links_p = h2.find_all_next("p")[:2] assert ( From 0f1e47287cf2185e140bd87a03c985c2a7afb450 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Jun 2021 20:27:04 -0700 Subject: [PATCH 0466/1705] Fixed bug with detect_fts for table with single quote in name, closes #1257 --- datasette/utils/__init__.py | 2 +- tests/test_utils.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index dd47771f..73122976 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -547,7 +547,7 @@ def detect_fts_sql(table): ) ) """.format( - table=table + table=table.replace("'", "''") ) diff --git a/tests/test_utils.py b/tests/test_utils.py index ecef6f7a..be3daf2e 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -200,6 +200,22 @@ def test_detect_fts(open_quote, close_quote): assert "Street_Tree_List_fts" == utils.detect_fts(conn, "Street_Tree_List") +@pytest.mark.parametrize("table", ("regular", "has'single quote")) +def test_detect_fts_different_table_names(table): + sql = """ + CREATE TABLE [{table}] ( + "TreeID" INTEGER, + "qSpecies" TEXT + ); + CREATE VIRTUAL TABLE [{table}_fts] USING FTS4 ("qSpecies", content="{table}"); + """.format( + table=table + ) + conn = utils.sqlite3.connect(":memory:") + conn.executescript(sql) + assert "{table}_fts".format(table=table) == utils.detect_fts(conn, table) + + @pytest.mark.parametrize( "url,expected", [ From 9552414e1f968c6fc704031cec349c05e6bc2371 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Jun 2021 23:46:20 -0400 Subject: [PATCH 0467/1705] Re-display user's query with an error message if an error occurs (#1346) * Ignore _shape when returning errors --- datasette/renderer.py | 4 ++++ datasette/templates/query.html | 5 ++++- datasette/views/base.py | 21 +++++++++++++++++---- datasette/views/database.py | 25 ++++++++++++++++++------- tests/test_canned_queries.py | 2 +- 5 files changed, 44 insertions(+), 13 deletions(-) diff --git a/datasette/renderer.py b/datasette/renderer.py index 66ac169b..45089498 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -29,6 +29,7 @@ def convert_specific_columns_to_json(rows, columns, json_cols): def json_renderer(args, data, view_name): """Render a response as JSON""" status_code = 200 + # Handle the _json= parameter which may modify data["rows"] json_cols = [] if "_json" in args: @@ -44,6 +45,9 @@ def json_renderer(args, data, view_name): # Deal with the _shape option shape = args.get("_shape", "arrays") + # if there's an error, ignore the shape entirely + if data.get("error"): + shape = "arrays" next_url = data.get("next_url") diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 9b3fff25..633e53b4 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -33,7 +33,10 @@ {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} -

    Custom SQL query{% if display_rows %} returning {% if truncated %}more than {% endif %}{{ "{:,}".format(display_rows|length) }} row{% if display_rows|length == 1 %}{% else %}s{% endif %}{% endif %} {% if hide_sql %}(show){% else %}(hide){% endif %}

    +

    Custom SQL query{% if display_rows %} returning {% if truncated %}more than {% endif %}{{ "{:,}".format(display_rows|length) }} row{% if display_rows|length == 1 %}{% else %}s{% endif %}{% endif %}{% if not query_error %} {% if hide_sql %}(show){% else %}(hide){% endif %}{% endif %}

    + {% if query_error %} +

    {{ query_error }}

    + {% endif %} {% if not hide_sql %} {% if editable and allow_execute_sql %}

    diff --git a/datasette/views/base.py b/datasette/views/base.py index e2583034..94f54787 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -294,6 +294,8 @@ class DataView(BaseView): ) if isinstance(response_or_template_contexts, Response): return response_or_template_contexts + elif len(response_or_template_contexts) == 4: + data, _, _, _ = response_or_template_contexts else: data, _, _ = response_or_template_contexts except (sqlite3.OperationalError, InvalidSql) as e: @@ -467,7 +469,7 @@ class DataView(BaseView): extra_template_data = {} start = time.perf_counter() - status_code = 200 + status_code = None templates = [] try: response_or_template_contexts = await self.data( @@ -475,7 +477,14 @@ class DataView(BaseView): ) if isinstance(response_or_template_contexts, Response): return response_or_template_contexts - + # If it has four items, it includes an HTTP status code + if len(response_or_template_contexts) == 4: + ( + data, + extra_template_data, + templates, + status_code, + ) = response_or_template_contexts else: data, extra_template_data, templates = response_or_template_contexts except QueryInterrupted: @@ -542,12 +551,15 @@ class DataView(BaseView): if isinstance(result, dict): r = Response( body=result.get("body"), - status=result.get("status_code", 200), + status=result.get("status_code", status_code or 200), content_type=result.get("content_type", "text/plain"), headers=result.get("headers"), ) elif isinstance(result, Response): r = result + if status_code is not None: + # Over-ride the status code + r.status = status_code else: assert False, f"{result} should be dict or Response" else: @@ -607,7 +619,8 @@ class DataView(BaseView): if "metadata" not in context: context["metadata"] = self.ds.metadata r = await self.render(templates, request=request, context=context) - r.status = status_code + if status_code is not None: + r.status = status_code ttl = request.args.get("_ttl", None) if ttl is None or not ttl.isdigit(): diff --git a/datasette/views/database.py b/datasette/views/database.py index 96b2ca91..58168ed7 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -14,6 +14,7 @@ from datasette.utils import ( path_with_added_args, path_with_format, path_with_removed_args, + sqlite3, InvalidSql, ) from datasette.utils.asgi import AsgiFileDownload, Response, Forbidden @@ -239,6 +240,8 @@ class QueryView(DataView): templates = [f"query-{to_css_class(database)}.html", "query.html"] + query_error = None + # Execute query - as write or as read if write: if request.method == "POST": @@ -320,10 +323,15 @@ class QueryView(DataView): params_for_query = MagicParameters(params, request, self.ds) else: params_for_query = params - results = await self.ds.execute( - database, sql, params_for_query, truncate=True, **extra_args - ) - columns = [r[0] for r in results.description] + try: + results = await self.ds.execute( + database, sql, params_for_query, truncate=True, **extra_args + ) + columns = [r[0] for r in results.description] + except sqlite3.DatabaseError as e: + query_error = e + results = None + columns = [] if canned_query: templates.insert( @@ -337,7 +345,7 @@ class QueryView(DataView): async def extra_template(): display_rows = [] - for row in results.rows: + for row in results.rows if results else []: display_row = [] for column, value in zip(results.columns, row): display_value = value @@ -423,17 +431,20 @@ class QueryView(DataView): return ( { + "ok": not query_error, "database": database, "query_name": canned_query, - "rows": results.rows, - "truncated": results.truncated, + "rows": results.rows if results else [], + "truncated": results.truncated if results else False, "columns": columns, "query": {"sql": sql, "params": params}, + "error": str(query_error) if query_error else None, "private": private, "allow_execute_sql": allow_execute_sql, }, extra_template, templates, + 400 if query_error else 200, ) diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index 65f23cc7..4186a97c 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -352,5 +352,5 @@ def test_magic_parameters_cannot_be_used_in_arbitrary_queries(magic_parameters_c response = magic_parameters_client.get( "/data.json?sql=select+:_header_host&_shape=array" ) - assert 500 == response.status + assert 400 == response.status assert "You did not supply a value for binding 1." == response.json["error"] From ea5b2378007ef524f7a17989c8df54a76a001e49 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Jun 2021 20:59:29 -0700 Subject: [PATCH 0468/1705] Show error message on bad query, closes #619 --- datasette/templates/query.html | 4 ++-- tests/test_html.py | 10 ++++++++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 633e53b4..8b6ad138 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -34,8 +34,8 @@

    Custom SQL query{% if display_rows %} returning {% if truncated %}more than {% endif %}{{ "{:,}".format(display_rows|length) }} row{% if display_rows|length == 1 %}{% else %}s{% endif %}{% endif %}{% if not query_error %} {% if hide_sql %}(show){% else %}(hide){% endif %}{% endif %}

    - {% if query_error %} -

    {{ query_error }}

    + {% if error %} +

    {{ error }}

    {% endif %} {% if not hide_sql %} {% if editable and allow_execute_sql %} diff --git a/tests/test_html.py b/tests/test_html.py index fd60cdc9..5fca76c3 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1402,6 +1402,16 @@ def test_zero_results(app_client, path): assert 1 == len(soup.select("p.zero-results")) +def test_query_error(app_client): + response = app_client.get("/fixtures?sql=select+*+from+notatable") + html = response.text + assert '

    no such table: notatable

    ' in html + assert ( + '' + in html + ) + + def test_config_template_debug_on(): with make_app_client(config={"template_debug": True}) as client: response = client.get("/fixtures/facetable?_context=1") From f40d1b99d67b0da4f3aff5b3483f4e09db7e8e6b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Jun 2021 21:09:10 -0700 Subject: [PATCH 0469/1705] Don't show '0 results' on error page, refs #619 --- datasette/templates/query.html | 2 +- tests/test_html.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 8b6ad138..b6c74883 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -80,7 +80,7 @@
    {% if td == None %}{{ " "|safe }}{% else %}{{ td }}{% endif %}{{ td }}
    1<Binary:\xa07\xa0bytes><Binary:\xa07\xa0bytes>2<Binary:\xa07\xa0bytes><Binary:\xa07\xa0bytes><Binary:\xa07\xa0bytes><Binary:\xa07\xa0bytes>\xa01hello\xa01-\xa0312\xa0\xa0\xa01131{}{}a{}b{}c{}{i}{i}a{i}b{i}c{i}hello\xa01-\xa031ab2\xa0\xa0\xa0\xa0\xa0131ab
    {% else %} - {% if not canned_write %} + {% if not canned_write and not error %}

    0 results

    {% endif %} {% endif %} diff --git a/tests/test_html.py b/tests/test_html.py index 5fca76c3..90373c28 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1410,6 +1410,7 @@ def test_query_error(app_client): '' in html ) + assert "0 results" not in html def test_config_template_debug_on(): From 0f41db1ba8a8a49a4adc1046a25ccf32790e863f Mon Sep 17 00:00:00 2001 From: Guy Freeman Date: Wed, 2 Jun 2021 07:25:27 +0300 Subject: [PATCH 0470/1705] Avoid error sorting by relationships if related tables are not allowed Refs #1306 --- datasette/views/index.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/datasette/views/index.py b/datasette/views/index.py index b6b8cbe5..8ac117a6 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -78,8 +78,9 @@ class IndexView(BaseView): # We will be sorting by number of relationships, so populate that field all_foreign_keys = await db.get_all_foreign_keys() for table, foreign_keys in all_foreign_keys.items(): - count = len(foreign_keys["incoming"] + foreign_keys["outgoing"]) - tables[table]["num_relationships_for_sorting"] = count + if table in tables.keys(): + count = len(foreign_keys["incoming"] + foreign_keys["outgoing"]) + tables[table]["num_relationships_for_sorting"] = count hidden_tables = [t for t in tables.values() if t["hidden"]] visible_tables = [t for t in tables.values() if not t["hidden"]] From 80d8b0eb415faf5caadd7cc7036407e6ee55bd44 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Jun 2021 21:26:25 -0700 Subject: [PATCH 0471/1705] Test demonstrating fixed #1305, refs #1306 --- tests/test_html.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/test_html.py b/tests/test_html.py index 90373c28..8bc53339 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1685,3 +1685,22 @@ def test_facet_more_links( assert facet_truncated.find("a")["href"] == expected_ellipses_url else: assert facet_truncated.find("a") is None + + +def test_unavailable_table_does_not_break_sort_relationships(): + # https://github.com/simonw/datasette/issues/1305 + with make_app_client( + metadata={ + "databases": { + "fixtures": { + "tables": { + "foreign_key_references": { + "allow": False + } + } + } + } + } + ) as client: + response = client.get("/?_sort=relationships") + assert response.status == 200 From d5d387abfe68ea546c53698ebb2b8eeeb4d32c3f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Jun 2021 21:30:44 -0700 Subject: [PATCH 0472/1705] Applied Black, refs #1305 --- tests/test_html.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/tests/test_html.py b/tests/test_html.py index 8bc53339..31bb6667 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1692,13 +1692,7 @@ def test_unavailable_table_does_not_break_sort_relationships(): with make_app_client( metadata={ "databases": { - "fixtures": { - "tables": { - "foreign_key_references": { - "allow": False - } - } - } + "fixtures": {"tables": {"foreign_key_references": {"allow": False}}} } } ) as client: From f78ebdc04537a6102316d6dbbf6c887565806078 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 2 Jun 2021 10:00:30 -0700 Subject: [PATCH 0473/1705] Better "uploading and publishing your own CSV data" link --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 4f3c9a94..5682f59e 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ Datasette is a tool for exploring and publishing data. It helps people take data Datasette is aimed at data journalists, museum curators, archivists, local governments and anyone else who has data that they wish to share with the world. -[Explore a demo](https://global-power-plants.datasettes.com/global-power-plants/global-power-plants), watch [a video about the project](https://simonwillison.net/2021/Feb/7/video/) or try it out by [uploading and publishing your own CSV data](https://simonwillison.net/2019/Apr/23/datasette-glitch/). +[Explore a demo](https://global-power-plants.datasettes.com/global-power-plants/global-power-plants), watch [a video about the project](https://simonwillison.net/2021/Feb/7/video/) or try it out by [uploading and publishing your own CSV data](https://docs.datasette.io/en/stable/getting_started.html#try-datasette-without-installing-anything-using-glitch). * [datasette.io](https://datasette.io/) is the official project website * Latest [Datasette News](https://datasette.io/news) From 6e9b07be92905011211d8df7a872fb7c1f2737b2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 2 Jun 2021 21:45:03 -0700 Subject: [PATCH 0474/1705] More inclusive language --- datasette/cli.py | 2 +- datasette/facets.py | 2 +- datasette/views/base.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 71bbc353..12ee92c3 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -540,7 +540,7 @@ def serve( # Run the "startup" plugin hooks asyncio.get_event_loop().run_until_complete(ds.invoke_startup()) - # Run async sanity checks - but only if we're not under pytest + # Run async soundness checks - but only if we're not under pytest asyncio.get_event_loop().run_until_complete(check_databases(ds)) if get: diff --git a/datasette/facets.py b/datasette/facets.py index 9d95d0f3..250734fd 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -304,7 +304,7 @@ class ArrayFacet(Facet): ) types = tuple(r[0] for r in results.rows) if types in (("array",), ("array", None)): - # Now sanity check that first 100 arrays contain only strings + # Now check that first 100 arrays contain only strings first_100 = [ v[0] for v in await self.ds.execute( diff --git a/datasette/views/base.py b/datasette/views/base.py index 94f54787..1a03b97f 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -281,7 +281,7 @@ class DataView(BaseView): ) request.scope = new_scope if stream: - # Some quick sanity checks + # Some quick soundness checks if not self.ds.setting("allow_csv_stream"): raise BadRequest("CSV streaming is disabled") if request.args.get("_next"): From a63412152518581c6a3d4e142b937e27dabdbfdb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 5 Jun 2021 11:59:54 -0700 Subject: [PATCH 0475/1705] Make custom pages compatible with base_url setting Closes #1238 - base_url no longer causes custom page routing to fail - new route_path key in request.scope storing the path that was used for routing with the base_url prefix stripped - TestClient used by tests now avoids accidentally double processing of the base_url prefix --- datasette/app.py | 17 ++++++++++------- datasette/utils/testing.py | 1 + tests/test_custom_pages.py | 16 +++++++++++++++- tests/test_html.py | 1 + 4 files changed, 27 insertions(+), 8 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 018a8d5b..c0e8ad01 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1089,6 +1089,7 @@ class DatasetteRouter: base_url = self.ds.setting("base_url") if base_url != "/" and path.startswith(base_url): path = "/" + path[len(base_url) :] + scope = dict(scope, route_path=path) request = Request(scope, receive) # Populate request_messages if ds_messages cookie is present try: @@ -1143,9 +1144,8 @@ class DatasetteRouter: await asgi_send_redirect(send, path.decode("latin1")) else: # Is there a pages/* template matching this path? - template_path = ( - os.path.join("pages", *request.scope["path"].split("/")) + ".html" - ) + route_path = request.scope.get("route_path", request.scope["path"]) + template_path = os.path.join("pages", *route_path.split("/")) + ".html" try: template = self.ds.jinja_env.select_template([template_path]) except TemplateNotFound: @@ -1153,7 +1153,7 @@ class DatasetteRouter: if template is None: # Try for a pages/blah/{name}.html template match for regex, wildcard_template in self.page_routes: - match = regex.match(request.scope["path"]) + match = regex.match(route_path) if match is not None: context.update(match.groupdict()) template = wildcard_template @@ -1356,8 +1356,8 @@ class DatasetteClient: self.ds = ds self.app = ds.app() - def _fix(self, path): - if not isinstance(path, PrefixedUrlString): + def _fix(self, path, avoid_path_rewrites=False): + if not isinstance(path, PrefixedUrlString) and not avoid_path_rewrites: path = self.ds.urls.path(path) if path.startswith("/"): path = f"http://localhost{path}" @@ -1392,5 +1392,8 @@ class DatasetteClient: return await client.delete(self._fix(path), **kwargs) async def request(self, method, path, **kwargs): + avoid_path_rewrites = kwargs.pop("avoid_path_rewrites", None) async with httpx.AsyncClient(app=self.app) as client: - return await client.request(method, self._fix(path), **kwargs) + return await client.request( + method, self._fix(path, avoid_path_rewrites), **kwargs + ) diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index 57b19ea5..a169a83d 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -140,6 +140,7 @@ class TestClient: method, path, allow_redirects=allow_redirects, + avoid_path_rewrites=True, cookies=cookies, headers=headers, content=post_body, diff --git a/tests/test_custom_pages.py b/tests/test_custom_pages.py index 6a231920..5a71f56d 100644 --- a/tests/test_custom_pages.py +++ b/tests/test_custom_pages.py @@ -2,11 +2,19 @@ import pathlib import pytest from .fixtures import make_app_client +TEST_TEMPLATE_DIRS = str(pathlib.Path(__file__).parent / "test_templates") + @pytest.fixture(scope="session") def custom_pages_client(): + with make_app_client(template_dir=TEST_TEMPLATE_DIRS) as client: + yield client + + +@pytest.fixture(scope="session") +def custom_pages_client_with_base_url(): with make_app_client( - template_dir=str(pathlib.Path(__file__).parent / "test_templates") + template_dir=TEST_TEMPLATE_DIRS, config={"base_url": "/prefix/"} ) as client: yield client @@ -23,6 +31,12 @@ def test_request_is_available(custom_pages_client): assert "path:/request" == response.text +def test_custom_pages_with_base_url(custom_pages_client_with_base_url): + response = custom_pages_client_with_base_url.get("/prefix/request") + assert 200 == response.status + assert "path:/prefix/request" == response.text + + def test_custom_pages_nested(custom_pages_client): response = custom_pages_client.get("/nested/nest") assert 200 == response.status diff --git a/tests/test_html.py b/tests/test_html.py index 31bb6667..f1d4bd70 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1523,6 +1523,7 @@ def test_base_url_config(app_client_base_url_prefix, path): and href not in { "https://datasette.io/", + "https://github.com/simonw/datasette", "https://github.com/simonw/datasette/blob/main/LICENSE", "https://github.com/simonw/datasette/blob/main/tests/fixtures.py", "/login-as-root", # Only used for the latest.datasette.io demo From 368aa5f1b16ca35f82d90ff747023b9a2bfa27c1 Mon Sep 17 00:00:00 2001 From: louispotok Date: Sun, 6 Jun 2021 02:48:51 +0700 Subject: [PATCH 0476/1705] Update docs: explain allow_download setting (#1291) * Update docs: explain allow_download setting This fixes one possible source of confusion seen in #502 and clarifies when database downloads will be shown and allowed. --- docs/settings.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/settings.rst b/docs/settings.rst index af8e4406..db17a45e 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -163,7 +163,7 @@ Should Datasette calculate suggested facets? On by default, turn this off like s allow_download ~~~~~~~~~~~~~~ -Should users be able to download the original SQLite database using a link on the database index page? This is turned on by default - to disable database downloads, use the following:: +Should users be able to download the original SQLite database using a link on the database index page? This is turned on by default. However, databases can only be downloaded if they are served in immutable mode and not in-memory. If downloading is unavailable for either of these reasons, the download link is hidden even if ``allow_download`` is on. To disable database downloads, use the following:: datasette mydatabase.db --setting allow_download off From ff29dd55fafd7c3d27bd30f40945847aa4278309 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 5 Jun 2021 13:15:58 -0700 Subject: [PATCH 0477/1705] ?_trace=1 now depends on trace_debug setting, closes #1359 --- .github/workflows/deploy-latest.yml | 2 +- datasette/app.py | 20 +++++++++++++------- docs/json_api.rst | 18 ++++++++++-------- docs/settings.rst | 16 ++++++++++++++++ tests/fixtures.py | 6 ++++++ tests/test_api.py | 20 ++++++++++++++++---- tests/test_csv.py | 13 +++++++------ 7 files changed, 69 insertions(+), 26 deletions(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 43e46fb4..d9f23f7d 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -53,7 +53,7 @@ jobs: --plugins-dir=plugins \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ - --extra-options="--setting template_debug 1 --crossdb" \ + --extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \ --install=pysqlite3-binary \ --service=datasette-latest # Deploy docs.db to a different service diff --git a/datasette/app.py b/datasette/app.py index c0e8ad01..d85517e6 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -162,6 +162,11 @@ SETTINGS = ( False, "Allow display of template debug information with ?_context=1", ), + Setting( + "trace_debug", + False, + "Allow display of SQL trace debug information with ?_trace=1", + ), Setting("base_url", "/", "Datasette URLs should use this base path"), ) @@ -1041,14 +1046,15 @@ class Datasette: if not database.is_mutable: await database.table_counts(limit=60 * 60 * 1000) + asgi = asgi_csrf.asgi_csrf( + DatasetteRouter(self, routes), + signing_secret=self._secret, + cookie_name="ds_csrftoken", + ) + if self.setting("trace_debug"): + asgi = AsgiTracer(asgi) asgi = AsgiLifespan( - AsgiTracer( - asgi_csrf.asgi_csrf( - DatasetteRouter(self, routes), - signing_secret=self._secret, - cookie_name="ds_csrftoken", - ) - ), + asgi, on_startup=setup_db, ) for wrapper in pm.hook.asgi_wrapper(datasette=self): diff --git a/docs/json_api.rst b/docs/json_api.rst index 660fbc1c..09cac1f9 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -206,6 +206,16 @@ query string arguments: For how many seconds should this response be cached by HTTP proxies? Use ``?_ttl=0`` to disable HTTP caching entirely for this request. +``?_trace=1`` + Turns on tracing for this page: SQL queries executed during the request will + be gathered and included in the response, either in a new ``"_traces"`` key + for JSON responses or at the bottom of the page if the response is in HTML. + + The structure of the data returned here should be considered highly unstable + and very likely to change. + + Only available if the :ref:`setting_trace_debug` setting is enabled. + .. _table_arguments: Table arguments @@ -389,14 +399,6 @@ Special table arguments ``?_nocount=1`` Disable the ``select count(*)`` query used on this page - a count of ``None`` will be returned instead. -``?_trace=1`` - Turns on tracing for this page: SQL queries executed during the request will - be gathered and included in the response, either in a new ``"_traces"`` key - for JSON responses or at the bottom of the page if the response is in HTML. - - The structure of the data returned here should be considered highly unstable - and very likely to change. - .. _expand_foreign_keys: Expanding foreign key references diff --git a/docs/settings.rst b/docs/settings.rst index db17a45e..c246d33a 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -286,6 +286,22 @@ Some examples: * https://latest.datasette.io/fixtures?_context=1 * https://latest.datasette.io/fixtures/roadside_attractions?_context=1 +.. _setting_trace_debug: + +trace_debug +~~~~~~~~~~~ + +This setting enables appending ``?_trace=1`` to any page in order to see the SQL queries and other trace information that was used to generate that page. + +Enable it like this:: + + datasette mydatabase.db --setting trace_debug 1 + +Some examples: + +* https://latest.datasette.io/?_trace=1 +* https://latest.datasette.io/fixtures/roadside_attractions?_trace=1 + .. _setting_base_url: base_url diff --git a/tests/fixtures.py b/tests/fixtures.py index 2690052a..cdd2e987 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -214,6 +214,12 @@ def app_client_with_hash(): yield client +@pytest.fixture(scope="session") +def app_client_with_trace(): + with make_app_client(config={"trace_debug": True}, is_immutable=True) as client: + yield client + + @pytest.fixture(scope="session") def app_client_shorter_time_limit(): with make_app_client(20) as client: diff --git a/tests/test_api.py b/tests/test_api.py index 3b789bb7..e5e609d6 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -15,6 +15,7 @@ from .fixtures import ( # noqa app_client_conflicting_database_names, app_client_with_cors, app_client_with_dot, + app_client_with_trace, app_client_immutable_and_inspect_file, generate_compound_rows, generate_sortable_rows, @@ -1422,6 +1423,7 @@ def test_settings_json(app_client): "force_https_urls": False, "hash_urls": False, "template_debug": False, + "trace_debug": False, "base_url": "/", } == response.json @@ -1692,8 +1694,10 @@ def test_nocount(app_client, nocount, expected_count): assert response.json["filtered_table_rows_count"] == expected_count -def test_nocount_nofacet_if_shape_is_object(app_client): - response = app_client.get("/fixtures/facetable.json?_trace=1&_shape=object") +def test_nocount_nofacet_if_shape_is_object(app_client_with_trace): + response = app_client_with_trace.get( + "/fixtures/facetable.json?_trace=1&_shape=object" + ) assert "count(*)" not in response.text @@ -1863,9 +1867,17 @@ def test_custom_query_with_unicode_characters(app_client): assert [{"id": 1, "name": "San Francisco"}] == response.json -def test_trace(app_client): - response = app_client.get("/fixtures/simple_primary_key.json?_trace=1") +@pytest.mark.parametrize("trace_debug", (True, False)) +def test_trace(trace_debug): + with make_app_client(config={"trace_debug": trace_debug}) as client: + response = client.get("/fixtures/simple_primary_key.json?_trace=1") + assert response.status == 200 + data = response.json + if not trace_debug: + assert "_trace" not in data + return + assert "_trace" in data trace_info = data["_trace"] assert isinstance(trace_info["request_duration_ms"], float) diff --git a/tests/test_csv.py b/tests/test_csv.py index 01f739e2..3debf320 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -3,6 +3,7 @@ from .fixtures import ( # noqa app_client, app_client_csv_max_mb_one, app_client_with_cors, + app_client_with_trace, ) EXPECTED_TABLE_CSV = """id,content @@ -160,8 +161,8 @@ def test_table_csv_stream(app_client): assert 1002 == len([b for b in response.body.split(b"\r\n") if b]) -def test_csv_trace(app_client): - response = app_client.get("/fixtures/simple_primary_key.csv?_trace=1") +def test_csv_trace(app_client_with_trace): + response = app_client_with_trace.get("/fixtures/simple_primary_key.csv?_trace=1") assert response.headers["content-type"] == "text/html; charset=utf-8" soup = Soup(response.text, "html.parser") assert ( @@ -171,13 +172,13 @@ def test_csv_trace(app_client): assert "select id, content from simple_primary_key" in soup.find("pre").text -def test_table_csv_stream_does_not_calculate_facets(app_client): - response = app_client.get("/fixtures/simple_primary_key.csv?_trace=1") +def test_table_csv_stream_does_not_calculate_facets(app_client_with_trace): + response = app_client_with_trace.get("/fixtures/simple_primary_key.csv?_trace=1") soup = Soup(response.text, "html.parser") assert "select content, count(*) as n" not in soup.find("pre").text -def test_table_csv_stream_does_not_calculate_counts(app_client): - response = app_client.get("/fixtures/simple_primary_key.csv?_trace=1") +def test_table_csv_stream_does_not_calculate_counts(app_client_with_trace): + response = app_client_with_trace.get("/fixtures/simple_primary_key.csv?_trace=1") soup = Soup(response.text, "html.parser") assert "select count(*)" not in soup.find("pre").text From 8f311d6c1d9f73f4ec643009767749c17b5ca5dd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 5 Jun 2021 14:49:16 -0700 Subject: [PATCH 0478/1705] Correctly escape output of ?_trace, refs #1360 --- datasette/tracer.py | 3 ++- tests/test_html.py | 6 ++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/tracer.py b/datasette/tracer.py index 772f0405..62c3c90c 100644 --- a/datasette/tracer.py +++ b/datasette/tracer.py @@ -1,5 +1,6 @@ import asyncio from contextlib import contextmanager +from markupsafe import escape import time import json import traceback @@ -123,7 +124,7 @@ class AsgiTracer: except IndexError: content_type = "" if "text/html" in content_type and b"" in accumulated_body: - extra = json.dumps(trace_info, indent=2) + extra = escape(json.dumps(trace_info, indent=2)) extra_html = f"
    {extra}
    ".encode("utf8") accumulated_body = accumulated_body.replace(b"", extra_html) elif "json" in content_type and accumulated_body.startswith(b"{"): diff --git a/tests/test_html.py b/tests/test_html.py index f1d4bd70..8714d254 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1699,3 +1699,9 @@ def test_unavailable_table_does_not_break_sort_relationships(): ) as client: response = client.get("/?_sort=relationships") assert response.status == 200 + + +def test_trace_correctly_escaped(app_client): + response = app_client.get("/fixtures?sql=select+'

    Hello'&_trace=1") + assert "select '

    Hello" not in response.text + assert "select '<h1>Hello" in response.text From 58746d3c514004f504223a724e948469a0d4abb3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 5 Jun 2021 15:06:52 -0700 Subject: [PATCH 0479/1705] Release 0.57 Refs #263, #615, #619, #1238, #1257, #1305, #1308, #1320, #1332, #1337, #1349, #1353, #1359, #1360 --- datasette/version.py | 2 +- docs/changelog.rst | 43 ++++++++++++++++++++++++++++++++++++------- 2 files changed, 37 insertions(+), 8 deletions(-) diff --git a/datasette/version.py b/datasette/version.py index cc98e271..93af8b3b 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.57a1" +__version__ = "0.57" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index e00791f8..842ca839 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,16 +4,45 @@ Changelog ========= -.. _v0_57_a0: +.. _v0_57: -0.57a0 (2021-05-22) +0.57 (2021-06-05) +----------------- + +.. warning:: + This release fixes a `reflected cross-site scripting `__ security hole with the ``?_trace=1`` feature. You should upgrade to this version, or to Datasette 0.56.1, as soon as possible. (:issue:`1360`) + +In addition to the security fix, this release includes ``?_col=`` and ``?_nocol=`` options for controlling which columns are displayed for a table, ``?_facet_size=`` for increasing the number of facet results returned, re-display of your SQL query should an error occur and numerous bug fixes. + +New features +~~~~~~~~~~~~ + +- If an error occurs while executing a user-provided SQL query, that query is now re-displayed in an editable form along with the error message. (:issue:`619`) +- New ``?_col=`` and ``?_nocol=`` parameters to show and hide columns in a table, plus an interface for hiding and showing columns in the column cog menu. (:issue:`615`) +- A new ``?_facet_size=`` parameter for customizing the number of facet results returned on a table or view page. (:issue:`1332`) +- ``?_facet_size=max`` sets that to the maximum, which defaults to 1,000 and is controlled by the the :ref:`setting_max_returned_rows` setting. If facet results are truncated the … at the bottom of the facet list now links to this parameter. (:issue:`1337`) +- ``?_nofacet=1`` option to disable all facet calculations on a page, used as a performance optimization for CSV exports and ``?_shape=array/object``. (:issue:`1349`, :issue:`263`) +- ``?_nocount=1`` option to disable full query result counts. (:issue:`1353`) +- ``?_trace=1`` debugging option is now controlled by the new :ref:`setting_trace_debug` setting, which is turned off by default. (:issue:`1359`) + +Bug fixes and other improvements +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- :ref:`custom_pages` now work correctly when combined with the :ref:`setting_base_url` setting. (:issue:`1238`) +- Fixed intermittent error displaying the index page when the user did not have permission to access one of the tables. Thanks, Guy Freeman. (:issue:`1305`) +- Columns with the name "Link" are no longer incorrectly displayed in bold. (:issue:`1308`) +- Fixed error caused by tables with a single quote in their names. (:issue:`1257`) +- Updated dependencies: ``pytest-asyncio``, ``Black``, ``jinja2``, ``aiofiles``, ``click``, and ``itsdangerous``. +- The official Datasette Docker image now supports ``apt-get install``. (:issue:`1320`) +- The Heroku runtime used by ``datasette publish heroku`` is now ``python-3.8.10``. + +.. _v0_56_1: + +0.56.1 (2021-06-05) ------------------- -Mainly dependency bumps, plus a new ``?_facet_size=`` argument. - -- Updated dependencies: pytest-asyncio, Black, jinja2, aiofiles, itsdangerous -- Fixed bug where columns called "Link" were incorrectly displayed in bold. (:issue:`1308`) -- New ``?_facet_size=`` argument for customizing the number of facet results returned on a page. (:issue:`1332`) +.. warning:: + This release fixes a `reflected cross-site scripting `__ security hole with the ``?_trace=1`` feature. You should upgrade to this version, or to Datasette 0.57, as soon as possible. (:issue:`1360`) .. _v0_56: From 0dfb9241718139f8ad626d22aac25bcebd3a9c9c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 5 Jun 2021 15:55:07 -0700 Subject: [PATCH 0480/1705] Temporarily reverting buildx support I need to push a container for 0.57 using this action, and I'm not ready to ship other architecture builds until we have tested them in #1344. --- .github/workflows/push_docker_tag.yml | 34 +++++++-------------------- 1 file changed, 9 insertions(+), 25 deletions(-) diff --git a/.github/workflows/push_docker_tag.yml b/.github/workflows/push_docker_tag.yml index e61150a5..9a3969f0 100644 --- a/.github/workflows/push_docker_tag.yml +++ b/.github/workflows/push_docker_tag.yml @@ -11,31 +11,15 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - - - name: Set up Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v1 - - - name: Available platforms - run: echo ${{ steps.buildx.outputs.platforms }} - - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKER_USER }} - password: ${{ secrets.DOCKER_PASS }} - - name: Build and push to Docker Hub - run: | - docker buildx build \ - --file Dockerfile . \ - --tag $REPO:${VERSION_TAG} \ - --build-arg VERSION=${VERSION_TAG} \ - --platform linux/386,linux/amd64,linux/arm/v6,linux/arm/v7,linux/arm64,linux/ppc64le,linux/s390x \ - --push env: - REPO: datasetteproject/datasette + DOCKER_USER: ${{ secrets.DOCKER_USER }} + DOCKER_PASS: ${{ secrets.DOCKER_PASS }} VERSION_TAG: ${{ github.event.inputs.version_tag }} + run: |- + docker login -u $DOCKER_USER -p $DOCKER_PASS + export REPO=datasetteproject/datasette + docker build -f Dockerfile \ + -t $REPO:${VERSION_TAG} \ + --build-arg VERSION=${VERSION_TAG} . + docker push $REPO:${VERSION_TAG} From 030deb4b25cda842ff7129ab7c18550c44dd8379 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 5 Jun 2021 16:01:34 -0700 Subject: [PATCH 0481/1705] Try to handle intermittent FileNotFoundError in tests Refs #1361 --- tests/conftest.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index ad3eb9f1..c6a3eee6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -61,13 +61,18 @@ def move_to_front(items, test_name): @pytest.fixture def restore_working_directory(tmpdir, request): - previous_cwd = os.getcwd() + try: + previous_cwd = os.getcwd() + except OSError: + # https://github.com/simonw/datasette/issues/1361 + previous_cwd = None tmpdir.chdir() def return_to_previous(): os.chdir(previous_cwd) - request.addfinalizer(return_to_previous) + if previous_cwd is not None: + request.addfinalizer(return_to_previous) @pytest.fixture(scope="session", autouse=True) From 03ec71193b9545536898a4bc7493274fec48bdd7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 6 Jun 2021 15:07:45 -0700 Subject: [PATCH 0482/1705] Don't truncate list of columns on /db page, closes #1364 --- datasette/templates/database.html | 2 +- tests/test_html.py | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 3fe7c891..2d182d1b 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -71,7 +71,7 @@ {% if show_hidden or not table.hidden %}

    {{ table.name }}{% if table.private %} 🔒{% endif %}{% if table.hidden %} (hidden){% endif %}

    -

    {% for column in table.columns[:9] %}{{ column }}{% if not loop.last %}, {% endif %}{% endfor %}{% if table.columns|length > 9 %}...{% endif %}

    +

    {% for column in table.columns %}{{ column }}{% if not loop.last %}, {% endif %}{% endfor %}

    {% if table.count is none %}Many rows{% else %}{{ "{:,}".format(table.count) }} row{% if table.count == 1 %}{% else %}s{% endif %}{% endif %}

    {% endif %} diff --git a/tests/test_html.py b/tests/test_html.py index 8714d254..ccee8b7e 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -101,6 +101,11 @@ def test_database_page_redirects_with_url_hash(app_client_with_hash): def test_database_page(app_client): response = app_client.get("/fixtures") + assert ( + b"

    pk, foreign_key_with_label, foreign_key_with_blank_label, " + b"foreign_key_with_no_label, foreign_key_compound_pk1, " + b"foreign_key_compound_pk2

    " + ) in response.body soup = Soup(response.body, "html.parser") queries_ul = soup.find("h2", text="Queries").find_next_sibling("ul") assert queries_ul is not None From f4c5777c7e4ed406313583de09a3bf746552167f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 7 Jun 2021 11:24:14 -0700 Subject: [PATCH 0483/1705] Fix visual glitch in nav menu, closes #1367 --- datasette/static/app.css | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index 7f04a162..ad517c98 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -452,7 +452,8 @@ table a:link { margin-left: -10%; font-size: 0.8em; } -.rows-and-columns td ol,ul { +.rows-and-columns td ol, +.rows-and-columns td ul { list-style: initial; list-style-position: inside; } From a3faf378834cc9793adeb22dee19ef57c417457e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 8 Jun 2021 09:26:45 -0700 Subject: [PATCH 0484/1705] Release 0.57.1 Refs #1364, #1367 --- datasette/version.py | 2 +- docs/changelog.rst | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 93af8b3b..14a7be17 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.57" +__version__ = "0.57.1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 842ca839..89b8fcf5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,14 @@ Changelog ========= +.. _v0_57_1: + +0.57.1 (2021-06-08) +------------------- + +- Fixed visual display glitch with global navigation menu. (:issue:`1367`) +- No longer truncates the list of table columns displayed on the ``/database`` page. (:issue:`1364`) + .. _v0_57: 0.57 (2021-06-05) From d23a2671386187f61872b9f6b58e0f80ac61f8fe Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Jun 2021 21:45:24 -0700 Subject: [PATCH 0485/1705] Make request available to menu plugin hooks, closes #1371 --- datasette/app.py | 4 +++- datasette/hookspecs.py | 6 +++--- datasette/views/database.py | 1 + datasette/views/table.py | 1 + docs/plugin_hooks.rst | 22 +++++++++++++++------- tests/plugins/my_plugin.py | 14 ++++++++++---- tests/plugins/my_plugin_2.py | 7 +++++-- tests/test_plugins.py | 12 ++++++------ 8 files changed, 44 insertions(+), 23 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index d85517e6..fc5b7d9d 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -833,7 +833,9 @@ class Datasette: async def menu_links(): links = [] for hook in pm.hook.menu_links( - datasette=self, actor=request.actor if request else None + datasette=self, + actor=request.actor if request else None, + request=request or None, ): extra_links = await await_me_maybe(hook) if extra_links: diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 13a10680..579787a2 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -100,15 +100,15 @@ def forbidden(datasette, request, message): @hookspec -def menu_links(datasette, actor): +def menu_links(datasette, actor, request): """Links for the navigation menu""" @hookspec -def table_actions(datasette, actor, database, table): +def table_actions(datasette, actor, database, table, request): """Links for the table actions menu""" @hookspec -def database_actions(datasette, actor, database): +def database_actions(datasette, actor, database, request): """Links for the database actions menu""" diff --git a/datasette/views/database.py b/datasette/views/database.py index 58168ed7..53bdceed 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -110,6 +110,7 @@ class DatabaseView(DataView): datasette=self.ds, database=database, actor=request.actor, + request=request, ): extra_links = await await_me_maybe(hook) if extra_links: diff --git a/datasette/views/table.py b/datasette/views/table.py index b51d5e5e..81d4d721 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -894,6 +894,7 @@ class TableView(RowTableShared): table=table, database=database, actor=request.actor, + request=request, ): extra_links = await await_me_maybe(hook) if extra_links: diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 688eaa61..2c31e6f4 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1015,8 +1015,8 @@ The function can alternatively return an awaitable function if it needs to make .. _plugin_hook_menu_links: -menu_links(datasette, actor) ----------------------------- +menu_links(datasette, actor, request) +------------------------------------- ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. @@ -1024,6 +1024,9 @@ menu_links(datasette, actor) ``actor`` - dictionary or None The currently authenticated :ref:`actor `. +``request`` - object or None + The current HTTP :ref:`internals_request`. This can be ``None`` if the request object is not available. + This hook allows additional items to be included in the menu displayed by Datasette's top right menu icon. The hook should return a list of ``{"href": "...", "label": "..."}`` menu items. These will be added to the menu. @@ -1045,11 +1048,10 @@ This example adds a new menu item but only if the signed in user is ``"root"``: Using :ref:`internals_datasette_urls` here ensures that links in the menu will take the :ref:`setting_base_url` setting into account. - .. _plugin_hook_table_actions: -table_actions(datasette, actor, database, table) ------------------------------------------------- +table_actions(datasette, actor, database, table, request) +--------------------------------------------------------- ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. @@ -1063,6 +1065,9 @@ table_actions(datasette, actor, database, table) ``table`` - string The name of the table. +``request`` - object + The current HTTP :ref:`internals_request`. This can be ``None`` if the request object is not available. + This hook allows table actions to be displayed in a menu accessed via an action icon at the top of the table page. It should return a list of ``{"href": "...", "label": "..."}`` menu items. It can alternatively return an ``async def`` awaitable function which returns a list of menu items. @@ -1083,8 +1088,8 @@ This example adds a new table action if the signed in user is ``"root"``: .. _plugin_hook_database_actions: -database_actions(datasette, actor, database) --------------------------------------------- +database_actions(datasette, actor, database, request) +----------------------------------------------------- ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. @@ -1095,4 +1100,7 @@ database_actions(datasette, actor, database) ``database`` - string The name of the database. +``request`` - object + The current HTTP :ref:`internals_request`. + This hook is similar to :ref:`plugin_hook_table_actions` but populates an actions menu on the database page. diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 26d06091..85a7467d 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -316,9 +316,12 @@ def forbidden(datasette, request, message): @hookimpl -def menu_links(datasette, actor): +def menu_links(datasette, actor, request): if actor: - return [{"href": datasette.urls.instance(), "label": "Hello"}] + label = "Hello" + if request.args.get("_hello"): + label += ", " + request.args["_hello"] + return [{"href": datasette.urls.instance(), "label": label}] @hookimpl @@ -334,11 +337,14 @@ def table_actions(datasette, database, table, actor): @hookimpl -def database_actions(datasette, database, actor): +def database_actions(datasette, database, actor, request): if actor: + label = f"Database: {database}" + if request.args.get("_hello"): + label += " - " + request.args["_hello"] return [ { "href": datasette.urls.instance(), - "label": f"Database: {database}", + "label": label, } ] diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index f3b794cf..b70372f3 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -158,9 +158,12 @@ def menu_links(datasette, actor): @hookimpl -def table_actions(datasette, database, table, actor): +def table_actions(datasette, database, table, actor, request): async def inner(): if actor: - return [{"href": datasette.urls.instance(), "label": "From async"}] + label = "From async" + if request.args.get("_hello"): + label += " " + request.args["_hello"] + return [{"href": datasette.urls.instance(), "label": label}] return inner diff --git a/tests/test_plugins.py b/tests/test_plugins.py index ee6f1efa..b3561dd5 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -781,9 +781,9 @@ def test_hook_menu_links(app_client): response = app_client.get("/") assert get_menu_links(response.text) == [] - response_2 = app_client.get("/?_bot=1") + response_2 = app_client.get("/?_bot=1&_hello=BOB") assert get_menu_links(response_2.text) == [ - {"label": "Hello", "href": "/"}, + {"label": "Hello, BOB", "href": "/"}, {"label": "Hello 2", "href": "/"}, ] @@ -800,12 +800,12 @@ def test_hook_table_actions(app_client, table_or_view): response = app_client.get(f"/fixtures/{table_or_view}") assert get_table_actions_links(response.text) == [] - response_2 = app_client.get(f"/fixtures/{table_or_view}?_bot=1") + response_2 = app_client.get(f"/fixtures/{table_or_view}?_bot=1&_hello=BOB") assert sorted( get_table_actions_links(response_2.text), key=lambda l: l["label"] ) == [ {"label": "Database: fixtures", "href": "/"}, - {"label": "From async", "href": "/"}, + {"label": "From async BOB", "href": "/"}, {"label": f"Table: {table_or_view}", "href": "/"}, ] @@ -821,7 +821,7 @@ def test_hook_database_actions(app_client): response = app_client.get("/fixtures") assert get_table_actions_links(response.text) == [] - response_2 = app_client.get("/fixtures?_bot=1") + response_2 = app_client.get("/fixtures?_bot=1&_hello=BOB") assert get_table_actions_links(response_2.text) == [ - {"label": "Database: fixtures", "href": "/"}, + {"label": "Database: fixtures - BOB", "href": "/"}, ] From cd7678fde65319d7b6955ce9f4678ba4b9e64b66 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Jun 2021 21:51:14 -0700 Subject: [PATCH 0486/1705] Release 0.58a0 Refs #1371 --- datasette/version.py | 2 +- docs/changelog.rst | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 14a7be17..a46b4706 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.57.1" +__version__ = "0.58a0" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 89b8fcf5..99fc5ea5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_58a0: + +0.58a0 (2021-06-09) +------------------- + +- The :ref:`menu_links() `, :ref:`table_actions() ` and :ref:`database_actions() ` plugin hooks all gained a new optional ``request`` argument providing access to the current request. (:issue:`1371`) + .. _v0_57_1: 0.57.1 (2021-06-08) From e7975657656ce02717f03703bb8ec17f2fe9b717 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 13 Jun 2021 08:33:22 -0700 Subject: [PATCH 0487/1705] Bump black from 21.5b2 to 21.6b0 (#1374) Bumps [black](https://github.com/psf/black) from 21.5b2 to 21.6b0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/commits) --- updated-dependencies: - dependency-name: black dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index e66fefc3..767148ea 100644 --- a/setup.py +++ b/setup.py @@ -73,7 +73,7 @@ setup( "pytest-xdist>=2.2.1,<2.3", "pytest-asyncio>=0.10,<0.16", "beautifulsoup4>=4.8.1,<4.10.0", - "black==21.5b2", + "black==21.6b0", "pytest-timeout>=1.4.2,<1.5", "trustme>=0.7,<0.8", ], From 83e9c8bc7585dcc62f200e37c2daefcd669ee05e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 13 Jun 2021 08:38:47 -0700 Subject: [PATCH 0488/1705] Update trustme requirement from <0.8,>=0.7 to >=0.7,<0.9 (#1373) Updates the requirements on [trustme](https://github.com/python-trio/trustme) to permit the latest version. - [Release notes](https://github.com/python-trio/trustme/releases) - [Commits](https://github.com/python-trio/trustme/compare/v0.7.0...v0.8.0) --- updated-dependencies: - dependency-name: trustme dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 767148ea..e9d4c8d1 100644 --- a/setup.py +++ b/setup.py @@ -75,7 +75,7 @@ setup( "beautifulsoup4>=4.8.1,<4.10.0", "black==21.6b0", "pytest-timeout>=1.4.2,<1.5", - "trustme>=0.7,<0.8", + "trustme>=0.7,<0.9", ], }, tests_require=["datasette[test]"], From 5335f360f4d57d70cab3694b08f15729c4ca2fe2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 19 Jun 2021 17:17:06 -0700 Subject: [PATCH 0489/1705] Update pytest-xdist requirement from <2.3,>=2.2.1 to >=2.2.1,<2.4 (#1378) Updates the requirements on [pytest-xdist](https://github.com/pytest-dev/pytest-xdist) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest-xdist/releases) - [Changelog](https://github.com/pytest-dev/pytest-xdist/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-xdist/compare/v2.2.1...v2.3.0) --- updated-dependencies: - dependency-name: pytest-xdist dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index e9d4c8d1..4f095f29 100644 --- a/setup.py +++ b/setup.py @@ -70,7 +70,7 @@ setup( "docs": ["sphinx_rtd_theme", "sphinx-autobuild"], "test": [ "pytest>=5.2.2,<6.3.0", - "pytest-xdist>=2.2.1,<2.3", + "pytest-xdist>=2.2.1,<2.4", "pytest-asyncio>=0.10,<0.16", "beautifulsoup4>=4.8.1,<4.10.0", "black==21.6b0", From a6c55afe8c82ead8deb32f90c9324022fd422324 Mon Sep 17 00:00:00 2001 From: Chris Amico Date: Mon, 21 Jun 2021 11:57:38 -0400 Subject: [PATCH 0490/1705] Ensure db.path is a string before trying to insert into internal database (#1370) Thanks, @eyeseast --- datasette/app.py | 2 +- tests/test_api.py | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index fc5b7d9d..ce59ef54 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -354,7 +354,7 @@ class Datasette: INSERT OR REPLACE INTO databases (database_name, path, is_memory, schema_version) VALUES (?, ?, ?, ?) """, - [database_name, db.path, db.is_memory, schema_version], + [database_name, str(db.path), db.is_memory, schema_version], block=True, ) await populate_schema_tables(internal_db, db) diff --git a/tests/test_api.py b/tests/test_api.py index e5e609d6..2d891aae 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -25,6 +25,7 @@ from .fixtures import ( # noqa METADATA, ) import json +import pathlib import pytest import sys import urllib @@ -2123,3 +2124,16 @@ def test_col_nocol_errors(app_client, path, expected_error): response = app_client.get(path) assert response.status == 400 assert response.json["error"] == expected_error + + +@pytest.mark.asyncio +async def test_db_path(app_client): + db = app_client.ds.get_database() + path = pathlib.Path(db.path) + + assert path.exists() + + datasette = Datasette([path]) + + # this will break with a path + await datasette.refresh_schemas() From 7bc85b26d6b9c865caf949ff4660d855526c346e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 23 Jun 2021 12:30:03 -0700 Subject: [PATCH 0491/1705] Deploy stable-docs.datasette.io on publish Refs https://github.com/simonw/datasette.io/issues/67 --- .github/workflows/publish.yml | 42 +++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 90fa4505..8e4c2d02 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -29,6 +29,7 @@ jobs: - name: Run tests run: | pytest + deploy: runs-on: ubuntu-latest needs: [test] @@ -55,6 +56,47 @@ jobs: run: | python setup.py sdist bdist_wheel twine upload dist/* + + deploy_static_docs: + runs-on: ubuntu-latest + needs: [deploy] + # if: "!github.event.release.prerelease" + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.9' + - uses: actions/cache@v2 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-publish-pip- + - name: Install dependencies + run: | + python -m pip install -e .[docs] + python -m pip install sphinx-to-sqlite==0.1a1 + - name: Build docs.db + run: |- + cd docs + sphinx-build -b xml . _build + sphinx-to-sqlite ../docs.db _build + cd .. + - name: Set up Cloud Run + uses: google-github-actions/setup-gcloud@master + with: + version: '275.0.0' + service_account_email: ${{ secrets.GCP_SA_EMAIL }} + service_account_key: ${{ secrets.GCP_SA_KEY }} + - name: Deploy stable-docs.datasette.io to Cloud Run + run: |- + gcloud config set run/region us-central1 + gcloud config set project datasette-222320 + datasette publish cloudrun docs.db \ + --service=datasette-docs-stable + deploy_docker: runs-on: ubuntu-latest needs: [deploy] From 403e370e5a3649333812edbbcba8467e6134cc16 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 23 Jun 2021 12:50:19 -0700 Subject: [PATCH 0492/1705] Fixed reference to default publish implementation --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 2c31e6f4..331f8061 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -302,7 +302,7 @@ publish_subcommand(publish) The Click command group for the ``datasette publish`` subcommand This hook allows you to create new providers for the ``datasette publish`` -command. Datasette uses this hook internally to implement the default ``now`` +command. Datasette uses this hook internally to implement the default ``cloudrun`` and ``heroku`` subcommands, so you can read `their source `_ to see examples of this hook in action. From 3a500155663a07720a8a7baa04acda8c4c937692 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 23 Jun 2021 12:51:19 -0700 Subject: [PATCH 0493/1705] datasette-publish-now is now called datasette-publish-vercel --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 331f8061..8b2a691a 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -348,7 +348,7 @@ Let's say you want to build a plugin that adds a ``datasette publish my_hosting_ ): # Your implementation goes here -Examples: `datasette-publish-fly `_, `datasette-publish-now `_ +Examples: `datasette-publish-fly `_, `datasette-publish-vercel `_ .. _plugin_hook_render_cell: From 4a3e8561ab109f3f171726bc2a7ebac1f23b72a6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 23 Jun 2021 15:27:30 -0700 Subject: [PATCH 0494/1705] Default 405 for POST, plus tests --- datasette/views/base.py | 3 +++ tests/test_html.py | 7 +++++++ 2 files changed, 10 insertions(+) diff --git a/datasette/views/base.py b/datasette/views/base.py index 1a03b97f..a87a0e77 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -106,6 +106,9 @@ class BaseView: async def options(self, request, *args, **kwargs): return Response.text("Method not allowed", status=405) + async def post(self, request, *args, **kwargs): + return Response.text("Method not allowed", status=405) + async def put(self, request, *args, **kwargs): return Response.text("Method not allowed", status=405) diff --git a/tests/test_html.py b/tests/test_html.py index ccee8b7e..aee6bce1 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -92,6 +92,13 @@ def test_memory_database_page(): assert response.status == 200 +def test_not_allowed_methods(): + with make_app_client(memory=True) as client: + for method in ("post", "put", "patch", "delete"): + response = client.request(path="/_memory", method=method.upper()) + assert response.status == 405 + + def test_database_page_redirects_with_url_hash(app_client_with_hash): response = app_client_with_hash.get("/fixtures", allow_redirects=False) assert response.status == 302 From b1fd24ac9f9035464af0a8ce92391c166a783253 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 23 Jun 2021 15:39:52 -0700 Subject: [PATCH 0495/1705] skip_csrf(datasette, scope) plugin hook, refs #1377 --- datasette/app.py | 3 +++ datasette/hookspecs.py | 5 +++++ docs/internals.rst | 2 ++ docs/plugin_hooks.rst | 25 +++++++++++++++++++++++++ setup.py | 2 +- tests/fixtures.py | 2 ++ tests/plugins/my_plugin.py | 5 +++++ tests/test_plugins.py | 25 +++++++++++++++++++++++++ 8 files changed, 68 insertions(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index ce59ef54..e11c12eb 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1052,6 +1052,9 @@ class Datasette: DatasetteRouter(self, routes), signing_secret=self._secret, cookie_name="ds_csrftoken", + skip_if_scope=lambda scope: any( + pm.hook.skip_csrf(datasette=self, scope=scope) + ), ) if self.setting("trace_debug"): asgi = AsgiTracer(asgi) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 579787a2..63b06097 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -112,3 +112,8 @@ def table_actions(datasette, actor, database, table, request): @hookspec def database_actions(datasette, actor, database, request): """Links for the database actions menu""" + + +@hookspec +def skip_csrf(datasette, scope): + """Mechanism for skipping CSRF checks for certain requests""" diff --git a/docs/internals.rst b/docs/internals.rst index 72c86083..98df998a 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -778,6 +778,8 @@ If your plugin implements a ```` anywhere you will need to i +You can selectively disable CSRF protection using the :ref:`plugin_hook_skip_csrf` hook. + .. _internals_internal: The _internal database diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 8b2a691a..5af601b4 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1104,3 +1104,28 @@ database_actions(datasette, actor, database, request) The current HTTP :ref:`internals_request`. This hook is similar to :ref:`plugin_hook_table_actions` but populates an actions menu on the database page. + +.. _plugin_hook_skip_csrf: + +skip_csrf(datasette, scope) +--------------------------- + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + +``scope`` - dictionary + The `ASGI scope `__ for the incoming HTTP request. + +This hook can be used to skip :ref:`internals_csrf` for a specific incoming request. For example, you might have a custom path at ``/submit-comment`` which is designed to accept comments from anywhere, whether or not the incoming request originated on the site and has an accompanying CSRF token. + +This example will disable CSRF protection for that specific URL path: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def skip_csrf(scope): + return scope["path"] == "/submit-comment" + +If any of the currently active ``skip_csrf()`` plugin hooks return ``True``, CSRF protection will be skipped for the request. diff --git a/setup.py b/setup.py index 4f095f29..8a651d32 100644 --- a/setup.py +++ b/setup.py @@ -55,7 +55,7 @@ setup( "uvicorn~=0.11", "aiofiles>=0.4,<0.8", "janus>=0.4,<0.7", - "asgi-csrf>=0.6", + "asgi-csrf>=0.9", "PyYAML~=5.3", "mergedeep>=1.1.1,<1.4.0", "itsdangerous>=1.1,<3.0", diff --git a/tests/fixtures.py b/tests/fixtures.py index cdd2e987..a79fc246 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -52,6 +52,7 @@ EXPECTED_PLUGINS = [ "register_magic_parameters", "register_routes", "render_cell", + "skip_csrf", "startup", "table_actions", ], @@ -152,6 +153,7 @@ def make_app_client( static_mounts=static_mounts, template_dir=template_dir, crossdb=crossdb, + pdb=True, ) ds.sqlite_functions.append(("sleep", 1, lambda n: time.sleep(float(n)))) yield TestClient(ds) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 85a7467d..0e625623 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -348,3 +348,8 @@ def database_actions(datasette, database, actor, request): "label": label, } ] + + +@hookimpl +def skip_csrf(scope): + return scope["path"] == "/skip-csrf" diff --git a/tests/test_plugins.py b/tests/test_plugins.py index b3561dd5..14273282 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -825,3 +825,28 @@ def test_hook_database_actions(app_client): assert get_table_actions_links(response_2.text) == [ {"label": "Database: fixtures - BOB", "href": "/"}, ] + + +def test_hook_skip_csrf(app_client): + cookie = app_client.actor_cookie({"id": "test"}) + csrf_response = app_client.post( + "/post/", + post_data={"this is": "post data"}, + csrftoken_from=True, + cookies={"ds_actor": cookie}, + ) + assert csrf_response.status == 200 + missing_csrf_response = app_client.post( + "/post/", post_data={"this is": "post data"}, cookies={"ds_actor": cookie} + ) + assert missing_csrf_response.status == 403 + # But "/skip-csrf" should allow + allow_csrf_response = app_client.post( + "/skip-csrf", post_data={"this is": "post data"}, cookies={"ds_actor": cookie} + ) + assert allow_csrf_response.status == 405 # Method not allowed + # /skip-csrf-2 should not + second_missing_csrf_response = app_client.post( + "/skip-csrf-2", post_data={"this is": "post data"}, cookies={"ds_actor": cookie} + ) + assert second_missing_csrf_response.status == 403 From 02b19c7a9afd328f22040ab33b5c1911cd904c7c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 23 Jun 2021 15:50:48 -0700 Subject: [PATCH 0496/1705] Removed rogue pdb=True, refs #1377 --- tests/fixtures.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index a79fc246..1fb52bf9 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -153,7 +153,6 @@ def make_app_client( static_mounts=static_mounts, template_dir=template_dir, crossdb=crossdb, - pdb=True, ) ds.sqlite_functions.append(("sleep", 1, lambda n: time.sleep(float(n)))) yield TestClient(ds) From ff17970ed4988a80b699d417bbeec07d63400e24 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 24 Jun 2021 09:24:59 -0700 Subject: [PATCH 0497/1705] Release 0.58a1 Refs #1365, #1377 --- datasette/version.py | 2 +- docs/changelog.rst | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index a46b4706..e5a29931 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.58a0" +__version__ = "0.58a1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 99fc5ea5..bcd8b987 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,15 @@ Changelog ========= +.. _v0_58a1: + +0.58a1 (2021-06-24) +------------------- + +- New plugin hook: :ref:`plugin_hook_skip_csrf`, for opting out of CSRF protection based on the incoming request. (:issue:`1377`) +- ``POST`` requests to endpoints that do not support that HTTP verb now return a 405 error. +- ``db.path`` can now be provided as a ``pathlib.Path`` object, useful when writing unit tests for plugins. Thanks, Chris Amico. (:issue:`1365`) + .. _v0_58a0: 0.58a0 (2021-06-09) From 953a64467d78bca29fe6cc18bdb2baa7848e53ff Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 24 Jun 2021 09:42:02 -0700 Subject: [PATCH 0498/1705] Only publish stable docs on non-preview release Refs https://github.com/simonw/datasette.io/issues/67 --- .github/workflows/publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 8e4c2d02..727f9933 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -60,7 +60,7 @@ jobs: deploy_static_docs: runs-on: ubuntu-latest needs: [deploy] - # if: "!github.event.release.prerelease" + if: "!github.event.release.prerelease" steps: - uses: actions/checkout@v2 - name: Set up Python From baf986c871708c01ca183be760995cf306ba21bf Mon Sep 17 00:00:00 2001 From: Brandon Roberts Date: Sat, 26 Jun 2021 15:24:54 -0700 Subject: [PATCH 0499/1705] New get_metadata() plugin hook for dynamic metadata The following hook is added: get_metadata( datasette=self, key=key, database=database, table=table, fallback=fallback ) This gets called when we're building our metdata for the rest of the system to use. We merge whatever the plugins return with any local metadata (from metadata.yml/yaml/json) allowing for a live-editable dynamic Datasette. As a security precation, local meta is *not* overwritable by plugin hooks. The workflow for transitioning to live-meta would be to load the plugin with the full metadata.yaml and save. Then remove the parts of the metadata that you want to be able to change from the file. * Avoid race condition: don't mutate databases list This avoids the nasty "RuntimeError: OrderedDict mutated during iteration" error that randomly happens when a plugin adds a new database to Datasette, using `add_database`. This change makes the add and remove database functions more expensive, but it prevents the random explosion race conditions that make for confusing user experience when importing live databases. Thanks, @brandonrobertz --- .gitignore | 1 + datasette/app.py | 47 ++++++++++++++++++++++++++++++++----- datasette/hookspecs.py | 5 ++++ datasette/utils/__init__.py | 1 - docs/plugin_hooks.rst | 35 +++++++++++++++++++++++++++ tests/test_permissions.py | 6 ++--- tests/test_plugins.py | 29 +++++++++++++++++++++++ 7 files changed, 114 insertions(+), 10 deletions(-) diff --git a/.gitignore b/.gitignore index 29ac176f..066009f0 100644 --- a/.gitignore +++ b/.gitignore @@ -117,3 +117,4 @@ ENV/ # macOS files .DS_Store node_modules +.*.swp diff --git a/datasette/app.py b/datasette/app.py index e11c12eb..05ad5a8d 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -251,7 +251,7 @@ class Datasette: if config_dir and metadata_files and not metadata: with metadata_files[0].open() as fp: metadata = parse_metadata(fp.read()) - self._metadata = metadata or {} + self._metadata_local = metadata or {} self.sqlite_functions = [] self.sqlite_extensions = [] for extension in sqlite_extensions or []: @@ -380,6 +380,7 @@ class Datasette: return self.databases[name] def add_database(self, db, name=None): + new_databases = self.databases.copy() if name is None: # Pick a unique name for this database suggestion = db.suggest_name() @@ -391,14 +392,18 @@ class Datasette: name = "{}_{}".format(suggestion, i) i += 1 db.name = name - self.databases[name] = db + new_databases[name] = db + # don't mutate! that causes race conditions with live import + self.databases = new_databases return db def add_memory_database(self, memory_name): return self.add_database(Database(self, memory_name=memory_name)) def remove_database(self, name): - self.databases.pop(name) + new_databases = self.databases.copy() + new_databases.pop(name) + self.databases = new_databases def setting(self, key): return self._settings.get(key, None) @@ -407,6 +412,17 @@ class Datasette: # Returns a fully resolved config dictionary, useful for templates return {option.name: self.setting(option.name) for option in SETTINGS} + def _metadata_recursive_update(self, orig, updated): + if not isinstance(orig, dict) or not isinstance(updated, dict): + return orig + + for key, upd_value in updated.items(): + if isinstance(upd_value, dict) and isinstance(orig.get(key), dict): + orig[key] = self._metadata_recursive_update(orig[key], upd_value) + else: + orig[key] = upd_value + return orig + def metadata(self, key=None, database=None, table=None, fallback=True): """ Looks up metadata, cascading backwards from specified level. @@ -415,7 +431,21 @@ class Datasette: assert not ( database is None and table is not None ), "Cannot call metadata() with table= specified but not database=" - databases = self._metadata.get("databases") or {} + metadata = {} + + for hook_dbs in pm.hook.get_metadata( + datasette=self, key=key, database=database, table=table, fallback=fallback + ): + metadata = self._metadata_recursive_update(metadata, hook_dbs) + + # security precaution!! don't allow anything in the local config + # to be overwritten. this is a temporary measure, not sure if this + # is a good idea long term or maybe if it should just be a concern + # of the plugin's implemtnation + metadata = self._metadata_recursive_update(metadata, self._metadata_local) + + databases = metadata.get("databases") or {} + search_list = [] if database is not None: search_list.append(databases.get(database) or {}) @@ -424,7 +454,8 @@ class Datasette: table ) or {} search_list.insert(0, table_metadata) - search_list.append(self._metadata) + + search_list.append(metadata) if not fallback: # No fallback allowed, so just use the first one in the list search_list = search_list[:1] @@ -440,6 +471,10 @@ class Datasette: m.update(item) return m + @property + def _metadata(self): + return self.metadata() + def plugin_config(self, plugin_name, database=None, table=None, fallback=True): """Return config for plugin, falling back from specified database/table""" plugins = self.metadata( @@ -960,7 +995,7 @@ class Datasette: r"/:memory:(?P.*)$", ) add_route( - JsonDataView.as_view(self, "metadata.json", lambda: self._metadata), + JsonDataView.as_view(self, "metadata.json", lambda: self.metadata()), r"/-/metadata(?P(\.json)?)$", ) add_route( diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 63b06097..c40b3148 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -10,6 +10,11 @@ def startup(datasette): """Fires directly after Datasette first starts running""" +@hookspec +def get_metadata(datasette, key, database, table, fallback): + """Get configuration""" + + @hookspec def asgi_wrapper(datasette): """Returns an ASGI middleware callable to wrap our ASGI application with""" diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 73122976..1e193862 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -21,7 +21,6 @@ import numbers import yaml from .shutil_backport import copytree from .sqlite import sqlite3, sqlite_version, supports_table_xinfo -from ..plugins import pm # From https://www.sqlite.org/lang_keywords.html diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 5af601b4..9ec75f34 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1129,3 +1129,38 @@ This example will disable CSRF protection for that specific URL path: return scope["path"] == "/submit-comment" If any of the currently active ``skip_csrf()`` plugin hooks return ``True``, CSRF protection will be skipped for the request. + +get_metadata(datasette, key, database, table, fallback) +------------------------------------------------------- + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + +``actor`` - dictionary or None + The currently authenticated :ref:`actor `. + +``database`` - string or None + The name of the database metadata is being asked for. + +``table`` - string or None + The name of the table. + +``key`` - string or None + The name of the key for which data is being asked for. + +This hook is responsible for returning a dictionary corresponding to Datasette :ref:`metadata`. This function is passed the `database`, `table` and `key` which were passed to the upstream internal request for metadata. Regardless, it is important to return a global metadata object, where `"databases": []` would be a top-level key. The dictionary returned here, will be merged with, and overwritten by, the contents of the physical `metadata.yaml` if one is present. + +.. code-block:: python + + @hookimpl + def get_metadata(datasette, key, database, table, fallback): + metadata = { + "title": "This will be the Datasette landing page title!", + "description": get_instance_description(datasette), + "databases": [], + } + for db_name, db_data_dict in get_my_database_meta(datasette, database, table, key): + metadata["databases"][db_name] = db_data_dict + # whatever we return here will be merged with any other plugins using this hook and + # will be overwritten by a local metadata.yaml if one exists! + return metadata diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 9317c0d9..788523b0 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -440,7 +440,7 @@ def test_permissions_cascade(cascade_app_client, path, permissions, expected_sta """Test that e.g. having view-table but NOT view-database lets you view table page, etc""" allow = {"id": "*"} deny = {} - previous_metadata = cascade_app_client.ds._metadata + previous_metadata = cascade_app_client.ds.metadata() updated_metadata = copy.deepcopy(previous_metadata) actor = {"id": "test"} if "download" in permissions: @@ -457,11 +457,11 @@ def test_permissions_cascade(cascade_app_client, path, permissions, expected_sta updated_metadata["databases"]["fixtures"]["queries"]["magic_parameters"][ "allow" ] = (allow if "query" in permissions else deny) - cascade_app_client.ds._metadata = updated_metadata + cascade_app_client.ds._metadata_local = updated_metadata response = cascade_app_client.get( path, cookies={"ds_actor": cascade_app_client.actor_cookie(actor)}, ) assert expected_status == response.status finally: - cascade_app_client.ds._metadata = previous_metadata + cascade_app_client.ds._metadata_local = previous_metadata diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 14273282..3b9c06b9 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -850,3 +850,32 @@ def test_hook_skip_csrf(app_client): "/skip-csrf-2", post_data={"this is": "post data"}, cookies={"ds_actor": cookie} ) assert second_missing_csrf_response.status == 403 + + +def test_hook_get_metadata(app_client): + app_client.ds._metadata_local = { + "title": "Testing get_metadata hook!", + "databases": { + "from-local": { + "title": "Hello from local metadata" + } + } + } + og_pm_hook_get_metadata = pm.hook.get_metadata + def get_metadata_mock(*args, **kwargs): + return [{ + "databases": { + "from-hook": { + "title": "Hello from the plugin hook" + }, + "from-local": { + "title": "This will be overwritten!" + } + } + }] + pm.hook.get_metadata = get_metadata_mock + meta = app_client.ds.metadata() + assert "Testing get_metadata hook!" == meta["title"] + assert "Hello from local metadata" == meta["databases"]["from-local"]["title"] + assert "Hello from the plugin hook" == meta["databases"]["from-hook"]["title"] + pm.hook.get_metadata = og_pm_hook_get_metadata From 05a312caf3debb51aa1069939923a49e21cd2bd1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 26 Jun 2021 15:25:28 -0700 Subject: [PATCH 0500/1705] Applied Black, refs #1368 --- tests/test_plugins.py | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 3b9c06b9..7a626ce5 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -855,24 +855,20 @@ def test_hook_skip_csrf(app_client): def test_hook_get_metadata(app_client): app_client.ds._metadata_local = { "title": "Testing get_metadata hook!", - "databases": { - "from-local": { - "title": "Hello from local metadata" - } - } + "databases": {"from-local": {"title": "Hello from local metadata"}}, } og_pm_hook_get_metadata = pm.hook.get_metadata + def get_metadata_mock(*args, **kwargs): - return [{ - "databases": { - "from-hook": { - "title": "Hello from the plugin hook" - }, - "from-local": { - "title": "This will be overwritten!" + return [ + { + "databases": { + "from-hook": {"title": "Hello from the plugin hook"}, + "from-local": {"title": "This will be overwritten!"}, } } - }] + ] + pm.hook.get_metadata = get_metadata_mock meta = app_client.ds.metadata() assert "Testing get_metadata hook!" == meta["title"] From 089278b8dbe0cb3d41f27666d97b0096b750fbe2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 26 Jun 2021 15:49:07 -0700 Subject: [PATCH 0501/1705] rST fix, refs #1384 --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 9ec75f34..d3b55747 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1148,7 +1148,7 @@ get_metadata(datasette, key, database, table, fallback) ``key`` - string or None The name of the key for which data is being asked for. -This hook is responsible for returning a dictionary corresponding to Datasette :ref:`metadata`. This function is passed the `database`, `table` and `key` which were passed to the upstream internal request for metadata. Regardless, it is important to return a global metadata object, where `"databases": []` would be a top-level key. The dictionary returned here, will be merged with, and overwritten by, the contents of the physical `metadata.yaml` if one is present. +This hook is responsible for returning a dictionary corresponding to Datasette :ref:`metadata`. This function is passed the ``database``, ``table`` and ``key`` which were passed to the upstream internal request for metadata. Regardless, it is important to return a global metadata object, where ``"databases": []`` would be a top-level key. The dictionary returned here, will be merged with, and overwritten by, the contents of the physical ``metadata.yaml`` if one is present. .. code-block:: python From 0d339a4897c808903e34fa6be228cdaaa5a29c55 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 26 Jun 2021 16:04:39 -0700 Subject: [PATCH 0502/1705] Removed text about executing SQL, refs #1384 --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index d3b55747..d71037d9 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1134,7 +1134,7 @@ get_metadata(datasette, key, database, table, fallback) ------------------------------------------------------- ``datasette`` - :ref:`internals_datasette` - You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``. ``actor`` - dictionary or None The currently authenticated :ref:`actor `. From ea627baccf980d7d8ebc9e1ffff1fe34d556e56f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 26 Jun 2021 17:02:42 -0700 Subject: [PATCH 0503/1705] Removed fallback parameter from get_metadata, refs #1384 --- datasette/app.py | 2 +- datasette/hookspecs.py | 4 ++-- docs/plugin_hooks.rst | 6 +++--- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 05ad5a8d..0b909968 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -434,7 +434,7 @@ class Datasette: metadata = {} for hook_dbs in pm.hook.get_metadata( - datasette=self, key=key, database=database, table=table, fallback=fallback + datasette=self, key=key, database=database, table=table ): metadata = self._metadata_recursive_update(metadata, hook_dbs) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index c40b3148..07b2f5ba 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -11,8 +11,8 @@ def startup(datasette): @hookspec -def get_metadata(datasette, key, database, table, fallback): - """Get configuration""" +def get_metadata(datasette, key, database, table): + """Return metadata to be merged into Datasette's metadata dictionary""" @hookspec diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index d71037d9..b687a6e7 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1130,8 +1130,8 @@ This example will disable CSRF protection for that specific URL path: If any of the currently active ``skip_csrf()`` plugin hooks return ``True``, CSRF protection will be skipped for the request. -get_metadata(datasette, key, database, table, fallback) -------------------------------------------------------- +get_metadata(datasette, key, database, table) +--------------------------------------------- ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``. @@ -1153,7 +1153,7 @@ This hook is responsible for returning a dictionary corresponding to Datasette : .. code-block:: python @hookimpl - def get_metadata(datasette, key, database, table, fallback): + def get_metadata(datasette, key, database, table): metadata = { "title": "This will be the Datasette landing page title!", "description": get_instance_description(datasette), From dbc61a1fd343e4660b6220f60c4ce79341245048 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 2 Jul 2021 10:33:03 -0700 Subject: [PATCH 0504/1705] Documented ProxyPreserveHost On for Apache, closes #1387 --- docs/deploying.rst | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index 48261b59..47dff73d 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -161,6 +161,9 @@ For `Apache `__, you can use the ``ProxyPass`` direct LoadModule proxy_module lib/httpd/modules/mod_proxy.so LoadModule proxy_http_module lib/httpd/modules/mod_proxy_http.so -Then add this directive to proxy traffic:: +Then add these directives to proxy traffic:: - ProxyPass /datasette-prefix/ http://127.0.0.1:8009/datasette-prefix/ + ProxyPass /datasette-prefix/ http://127.0.0.1:8009/datasette-prefix/ + ProxyPreserveHost On + +The `ProxyPreserveHost On `__ directive ensures that the original ``Host:`` header from the incoming request is passed through to Datasette. Datasette needs this to correctly assemble links to other pages using the :ref:`datasette_absolute_url` method. From c8feaf0b628ddb1f98b2a4b89691d3d1b939ed8e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 9 Jul 2021 09:32:32 -0700 Subject: [PATCH 0505/1705] systemctl restart datasette.service, closes #1390 --- docs/deploying.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index 47dff73d..44ddd07b 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -65,7 +65,11 @@ You can start the Datasette process running using the following:: sudo systemctl daemon-reload sudo systemctl start datasette.service -You can confirm that Datasette is running on port 8000 like so:: +You may need to restart the Datasette service after making changes to its ``metadata.json`` configuration or the ``datasette.service`` file. You can do that using:: + + sudo systemctl restart datasette.service + +Once the service has started you can confirm that Datasette is running on port 8000 like so:: curl 127.0.0.1:8000/-/versions.json # Should output JSON showing the installed version From 83f6799a96f48b5acef4911c0273973f15efdf05 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 10 Jul 2021 11:30:48 -0700 Subject: [PATCH 0506/1705] searchmode: raw table metadata property, closes #1389 --- datasette/views/table.py | 8 +++++++- docs/full_text_search.rst | 29 ++++++++++++++++++---------- tests/test_api.py | 40 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 66 insertions(+), 11 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 81d4d721..1bda7496 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -495,7 +495,13 @@ class TableView(RowTableShared): if pair[0].startswith("_search") and pair[0] != "_searchmode" ) search = "" - search_mode_raw = special_args.get("_searchmode") == "raw" + search_mode_raw = table_metadata.get("searchmode") == "raw" + # Or set it from the querystring + qs_searchmode = special_args.get("_searchmode") + if qs_searchmode == "escaped": + search_mode_raw = False + if qs_searchmode == "raw": + search_mode_raw = True if fts_table and search_args: if "_search" in search_args: # Simple ?_search=xxx diff --git a/docs/full_text_search.rst b/docs/full_text_search.rst index b414ff37..f549296f 100644 --- a/docs/full_text_search.rst +++ b/docs/full_text_search.rst @@ -36,7 +36,11 @@ Advanced SQLite search queries SQLite full-text search includes support for `a variety of advanced queries `__, including ``AND``, ``OR``, ``NOT`` and ``NEAR``. -By default Datasette disables these features to ensure they do not cause any confusion for users who are not aware of them. You can disable this escaping and use the advanced queries by adding ``?_searchmode=raw`` to the table page query string. +By default Datasette disables these features to ensure they do not cause errors or confusion for users who are not aware of them. You can disable this escaping and use the advanced queries by adding ``&_searchmode=raw`` to the table page query string. + +If you want to enable these operators by default for a specific table, you can do so by adding ``"searchmode": "raw"`` to the metadata configuration for that table, see :ref:`full_text_search_table_or_view`. + +If that option has been specified in the table metadata but you want to over-ride it and return to the default behavior you can append ``&_searchmode=escaped`` to the query string. .. _full_text_search_table_or_view: @@ -53,19 +57,24 @@ https://latest.datasette.io/fixtures/searchable_view?_fts_table=searchable_fts&_ The ``fts_table`` metadata property can be used to specify an associated FTS table. If the primary key column in your table which was used to populate the FTS table is something other than ``rowid``, you can specify the column to use with the ``fts_pk`` property. -Here is an example which enables full-text search for a ``display_ads`` view which is defined against the ``ads`` table and hence needs to run FTS against the ``ads_fts`` table, using the ``id`` as the primary key:: +The ``"searchmode": "raw"`` property can be used to default the table to accepting SQLite advanced search operators, as described in :ref:`full_text_search_advanced_queries`. + +Here is an example which enables full-text search (with SQLite advanced search operators) for a ``display_ads`` view which is defined against the ``ads`` table and hence needs to run FTS against the ``ads_fts`` table, using the ``id`` as the primary key: + +.. code-block:: json { - "databases": { - "russian-ads": { - "tables": { - "display_ads": { - "fts_table": "ads_fts", - "fts_pk": "id" + "databases": { + "russian-ads": { + "tables": { + "display_ads": { + "fts_table": "ads_fts", + "fts_pk": "id", + "search_mode": "raw" + } + } } - } } - } } .. _full_text_search_custom_sql: diff --git a/tests/test_api.py b/tests/test_api.py index 2d891aae..cb3c255d 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1078,6 +1078,46 @@ def test_searchable(app_client, path, expected_rows): assert expected_rows == response.json["rows"] +_SEARCHMODE_RAW_RESULTS = [ + [1, "barry cat", "terry dog", "panther"], + [2, "terry dog", "sara weasel", "puma"], +] + + +@pytest.mark.parametrize( + "table_metadata,querystring,expected_rows", + [ + ( + {}, + "_search=te*+AND+do*", + [], + ), + ( + {"searchmode": "raw"}, + "_search=te*+AND+do*", + _SEARCHMODE_RAW_RESULTS, + ), + ( + {}, + "_search=te*+AND+do*&_searchmode=raw", + _SEARCHMODE_RAW_RESULTS, + ), + # Can be over-ridden with _searchmode=escaped + ( + {"searchmode": "raw"}, + "_search=te*+AND+do*&_searchmode=escaped", + [], + ), + ], +) +def test_searchmode(table_metadata, querystring, expected_rows): + with make_app_client( + metadata={"databases": {"fixtures": {"tables": {"searchable": table_metadata}}}} + ) as client: + response = client.get("/fixtures/searchable.json?" + querystring) + assert expected_rows == response.json["rows"] + + @pytest.mark.parametrize( "path,expected_rows", [ From 2e8d924cdc2274eb31fb76332bc5269f65c0ad90 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 10 Jul 2021 12:03:19 -0700 Subject: [PATCH 0507/1705] Refactored generated_columns test, no longer in fixtures.db - refs #1391 --- tests/fixtures.py | 19 +-------- tests/test_api.py | 52 ++++++++++++------------ tests/test_internals_database.py | 70 +++++++++++++++----------------- 3 files changed, 59 insertions(+), 82 deletions(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index 1fb52bf9..dce94876 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -1,5 +1,5 @@ from datasette.app import Datasette -from datasette.utils.sqlite import sqlite3, sqlite_version, supports_generated_columns +from datasette.utils.sqlite import sqlite3, sqlite_version from datasette.utils.testing import TestClient import click import contextlib @@ -118,8 +118,6 @@ def make_app_client( immutables = [] conn = sqlite3.connect(filepath) conn.executescript(TABLES) - if supports_generated_columns(): - conn.executescript(GENERATED_COLUMNS_SQL) for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) @@ -720,18 +718,6 @@ INSERT INTO "searchable_fts" (rowid, text1, text2) SELECT rowid, text1, text2 FROM searchable; """ -GENERATED_COLUMNS_SQL = """ -CREATE TABLE generated_columns ( - body TEXT, - id INT GENERATED ALWAYS AS (json_extract(body, '$.number')) STORED, - consideration INT GENERATED ALWAYS AS (json_extract(body, '$.string')) STORED -); -INSERT INTO generated_columns (body) VALUES ('{ - "number": 1, - "string": "This is a string" -}'); -""" - def assert_permissions_checked(datasette, actions): # actions is a list of "action" or (action, resource) tuples @@ -792,9 +778,6 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename): for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) - if supports_generated_columns(): - with conn: - conn.executescript(GENERATED_COLUMNS_SQL) print(f"Test tables written to {db_filename}") if metadata: with open(metadata, "w") as fp: diff --git a/tests/test_api.py b/tests/test_api.py index cb3c255d..3e8d02c8 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -20,7 +20,6 @@ from .fixtures import ( # noqa generate_compound_rows, generate_sortable_rows, make_app_client, - supports_generated_columns, EXPECTED_PLUGINS, METADATA, ) @@ -38,7 +37,7 @@ def test_homepage(app_client): assert response.json.keys() == {"fixtures": 0}.keys() d = response.json["fixtures"] assert d["name"] == "fixtures" - assert d["tables_count"] == 25 if supports_generated_columns() else 24 + assert d["tables_count"] == 24 assert len(d["tables_and_views_truncated"]) == 5 assert d["tables_and_views_more"] is True # 4 hidden FTS tables + no_primary_key (hidden in metadata) @@ -271,22 +270,7 @@ def test_database_page(app_client): }, "private": False, }, - ] + ( - [ - { - "columns": ["body", "id", "consideration"], - "count": 1, - "foreign_keys": {"incoming": [], "outgoing": []}, - "fts_table": None, - "hidden": False, - "name": "generated_columns", - "primary_keys": [], - "private": False, - } - ] - if supports_generated_columns() - else [] - ) + [ + ] + [ { "name": "infinity", "columns": ["value"], @@ -2074,16 +2058,30 @@ def test_paginate_using_link_header(app_client, qs): sqlite_version() < (3, 31, 0), reason="generated columns were added in SQLite 3.31.0", ) -def test_generated_columns_are_visible_in_datasette(app_client): - response = app_client.get("/fixtures/generated_columns.json?_shape=array") - assert response.json == [ - { - "rowid": 1, - "body": '{\n "number": 1,\n "string": "This is a string"\n}', - "id": 1, - "consideration": "This is a string", +def test_generated_columns_are_visible_in_datasette(): + with make_app_client( + extra_databases={ + "generated.db": """ + CREATE TABLE generated_columns ( + body TEXT, + id INT GENERATED ALWAYS AS (json_extract(body, '$.number')) STORED, + consideration INT GENERATED ALWAYS AS (json_extract(body, '$.string')) STORED + ); + INSERT INTO generated_columns (body) VALUES ('{ + "number": 1, + "string": "This is a string" + }');""" } - ] + ) as client: + response = app_client.get("/generated/generated_columns.json?_shape=array") + assert response.json == [ + { + "rowid": 1, + "body": '{\n "number": 1,\n "string": "This is a string"\n}', + "id": 1, + "consideration": "This is a string", + } + ] def test_http_options_request(app_client): diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index b60aaa8e..ad829751 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -2,7 +2,7 @@ Tests for the datasette.database.Database class """ from datasette.database import Database, Results, MultipleValues -from datasette.utils.sqlite import sqlite3, supports_generated_columns +from datasette.utils.sqlite import sqlite3 from datasette.utils import Column from .fixtures import app_client, app_client_two_attached_databases_crossdb_enabled import pytest @@ -340,42 +340,38 @@ async def test_get_all_foreign_keys(db): @pytest.mark.asyncio async def test_table_names(db): table_names = await db.table_names() - assert ( - table_names - == [ - "simple_primary_key", - "primary_key_multiple_columns", - "primary_key_multiple_columns_explicit_label", - "compound_primary_key", - "compound_three_primary_keys", - "foreign_key_references", - "sortable", - "no_primary_key", - "123_starts_with_digits", - "Table With Space In Name", - "table/with/slashes.csv", - "complex_foreign_keys", - "custom_foreign_key_label", - "units", - "tags", - "searchable", - "searchable_tags", - "searchable_fts", - "searchable_fts_segments", - "searchable_fts_segdir", - "searchable_fts_docsize", - "searchable_fts_stat", - "select", - "infinity", - "facet_cities", - "facetable", - "binary_data", - "roadside_attractions", - "attraction_characteristic", - "roadside_attraction_characteristics", - ] - + (["generated_columns"] if supports_generated_columns() else []) - ) + assert table_names == [ + "simple_primary_key", + "primary_key_multiple_columns", + "primary_key_multiple_columns_explicit_label", + "compound_primary_key", + "compound_three_primary_keys", + "foreign_key_references", + "sortable", + "no_primary_key", + "123_starts_with_digits", + "Table With Space In Name", + "table/with/slashes.csv", + "complex_foreign_keys", + "custom_foreign_key_label", + "units", + "tags", + "searchable", + "searchable_tags", + "searchable_fts", + "searchable_fts_segments", + "searchable_fts_segdir", + "searchable_fts_docsize", + "searchable_fts_stat", + "select", + "infinity", + "facet_cities", + "facetable", + "binary_data", + "roadside_attractions", + "attraction_characteristic", + "roadside_attraction_characteristics", + ] @pytest.mark.asyncio From e0064ba7b06973eae70e6222a6208d9fed5bd170 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 10 Jul 2021 12:14:14 -0700 Subject: [PATCH 0508/1705] Fixes for test_generated_columns_are_visible_in_datasette, refs #1391 --- tests/test_api.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index 3e8d02c8..0049d76d 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -2067,17 +2067,16 @@ def test_generated_columns_are_visible_in_datasette(): id INT GENERATED ALWAYS AS (json_extract(body, '$.number')) STORED, consideration INT GENERATED ALWAYS AS (json_extract(body, '$.string')) STORED ); - INSERT INTO generated_columns (body) VALUES ('{ - "number": 1, - "string": "This is a string" - }');""" + INSERT INTO generated_columns (body) VALUES ( + '{"number": 1, "string": "This is a string"}' + );""" } ) as client: - response = app_client.get("/generated/generated_columns.json?_shape=array") + response = client.get("/generated/generated_columns.json?_shape=array") assert response.json == [ { "rowid": 1, - "body": '{\n "number": 1,\n "string": "This is a string"\n}', + "body": '{"number": 1, "string": "This is a string"}', "id": 1, "consideration": "This is a string", } From 180c7a5328457aefdf847ada366e296fef4744f1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 10 Jul 2021 16:37:30 -0700 Subject: [PATCH 0509/1705] --uds option for binding to Unix domain socket, closes #1388 --- datasette/cli.py | 7 +++++++ docs/datasette-serve-help.txt | 1 + docs/deploying.rst | 23 ++++++++++++++++++++++- tests/conftest.py | 20 +++++++++++++++++++- tests/test_cli.py | 1 + tests/test_cli_serve_server.py | 15 +++++++++++++++ 6 files changed, 65 insertions(+), 2 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 12ee92c3..09aebcc8 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -333,6 +333,10 @@ def uninstall(packages, yes): type=click.IntRange(0, 65535), help="Port for server, defaults to 8001. Use -p 0 to automatically assign an available port.", ) +@click.option( + "--uds", + help="Bind to a Unix domain socket", +) @click.option( "--reload", is_flag=True, @@ -428,6 +432,7 @@ def serve( immutable, host, port, + uds, reload, cors, sqlite_extensions, @@ -569,6 +574,8 @@ def serve( uvicorn_kwargs = dict( host=host, port=port, log_level="info", lifespan="on", workers=1 ) + if uds: + uvicorn_kwargs["uds"] = uds if ssl_keyfile: uvicorn_kwargs["ssl_keyfile"] = ssl_keyfile if ssl_certfile: diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index db51dd80..ec3f41a0 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -12,6 +12,7 @@ Options: machines. -p, --port INTEGER RANGE Port for server, defaults to 8001. Use -p 0 to automatically assign an available port. [0<=x<=65535] + --uds TEXT Bind to a Unix domain socket --reload Automatically reload if code or metadata change detected - useful for development --cors Enable CORS by serving Access-Control-Allow-Origin: * diff --git a/docs/deploying.rst b/docs/deploying.rst index 44ddd07b..f3680034 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -148,7 +148,6 @@ Here is an example of an `nginx `__ configuration file that http { server { listen 80; - location /my-datasette { proxy_pass http://127.0.0.1:8009/my-datasette; proxy_set_header X-Real-IP $remote_addr; @@ -157,6 +156,28 @@ Here is an example of an `nginx `__ configuration file that } } +You can also use the ``--uds`` option to Datasette to listen on a Unix domain socket instead of a port, configuring the nginx upstream proxy like this:: + + daemon off; + events { + worker_connections 1024; + } + http { + server { + listen 80; + location / { + proxy_pass http://datasette; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + } + upstream datasette { + server unix:/tmp/datasette.sock; + } + } + +Then run Datasette with ``datasette --uds /tmp/datasette.sock path/to/database.db``. + Apache proxy configuration -------------------------- diff --git a/tests/conftest.py b/tests/conftest.py index c6a3eee6..34a64efc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -131,7 +131,6 @@ def ds_localhost_https_server(tmp_path_factory): for blob in server_cert.cert_chain_pems: blob.write_to_path(path=certfile, append=True) ca.cert_pem.write_to_path(path=client_cert) - ds_proc = subprocess.Popen( [ "datasette", @@ -154,3 +153,22 @@ def ds_localhost_https_server(tmp_path_factory): yield ds_proc, client_cert # Shut it down at the end of the pytest session ds_proc.terminate() + + +@pytest.fixture(scope="session") +def ds_unix_domain_socket_server(tmp_path_factory): + socket_folder = tmp_path_factory.mktemp("uds") + uds = str(socket_folder / "datasette.sock") + ds_proc = subprocess.Popen( + ["datasette", "--memory", "--uds", uds], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + cwd=tempfile.gettempdir(), + ) + # Give the server time to start + time.sleep(1.5) + # Check it started successfully + assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") + yield ds_proc, uds + # Shut it down at the end of the pytest session + ds_proc.terminate() diff --git a/tests/test_cli.py b/tests/test_cli.py index e094ccb6..e31a305e 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -132,6 +132,7 @@ def test_metadata_yaml(): immutable=[], host="127.0.0.1", port=8001, + uds=None, reload=False, cors=False, sqlite_extensions=[], diff --git a/tests/test_cli_serve_server.py b/tests/test_cli_serve_server.py index 6f5366d1..73439125 100644 --- a/tests/test_cli_serve_server.py +++ b/tests/test_cli_serve_server.py @@ -1,5 +1,6 @@ import httpx import pytest +import socket @pytest.mark.serial @@ -21,3 +22,17 @@ def test_serve_localhost_https(ds_localhost_https_server): "path": "/_memory", "tables": [], }.items() <= response.json().items() + + +@pytest.mark.serial +@pytest.mark.skipif(not hasattr(socket, "AF_UNIX"), reason="Requires socket.AF_UNIX support") +def test_serve_unix_domain_socket(ds_unix_domain_socket_server): + _, uds = ds_unix_domain_socket_server + transport = httpx.HTTPTransport(uds=uds) + client = httpx.Client(transport=transport) + response = client.get("http://localhost/_memory.json") + assert { + "database": "_memory", + "path": "/_memory", + "tables": [], + }.items() <= response.json().items() From de2a1063284834ff86cb8d7c693717609d0d647e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 10 Jul 2021 16:46:49 -0700 Subject: [PATCH 0510/1705] Ran Black, refs #1388 --- tests/test_cli_serve_server.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/test_cli_serve_server.py b/tests/test_cli_serve_server.py index 73439125..1c31e2a3 100644 --- a/tests/test_cli_serve_server.py +++ b/tests/test_cli_serve_server.py @@ -25,7 +25,9 @@ def test_serve_localhost_https(ds_localhost_https_server): @pytest.mark.serial -@pytest.mark.skipif(not hasattr(socket, "AF_UNIX"), reason="Requires socket.AF_UNIX support") +@pytest.mark.skipif( + not hasattr(socket, "AF_UNIX"), reason="Requires socket.AF_UNIX support" +) def test_serve_unix_domain_socket(ds_unix_domain_socket_server): _, uds = ds_unix_domain_socket_server transport = httpx.HTTPTransport(uds=uds) From d792fc7cf5fde8fa748168e48c3183266a3a419f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 10 Jul 2021 17:29:42 -0700 Subject: [PATCH 0511/1705] Simplified nginx config examples --- docs/deploying.rst | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index f3680034..ce4acc9d 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -144,14 +144,12 @@ Here is an example of an `nginx `__ configuration file that events { worker_connections 1024; } - http { server { listen 80; location /my-datasette { - proxy_pass http://127.0.0.1:8009/my-datasette; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_pass http://127.0.0.1:8009/my-datasette; + proxy_set_header Host $host; } } } @@ -166,9 +164,8 @@ You can also use the ``--uds`` option to Datasette to listen on a Unix domain so server { listen 80; location / { - proxy_pass http://datasette; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_pass http://datasette; + proxy_set_header Host $host; } } upstream datasette { From f83c84fd51d144036924ae77d99f12b0a69e7e6f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 10 Jul 2021 18:36:18 -0700 Subject: [PATCH 0512/1705] Update asgiref requirement from <3.4.0,>=3.2.10 to >=3.2.10,<3.5.0 (#1386) Updates the requirements on [asgiref](https://github.com/django/asgiref) to permit the latest version. - [Release notes](https://github.com/django/asgiref/releases) - [Changelog](https://github.com/django/asgiref/blob/main/CHANGELOG.txt) - [Commits](https://github.com/django/asgiref/commits) --- updated-dependencies: - dependency-name: asgiref dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 8a651d32..2541be1f 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,7 @@ setup( include_package_data=True, python_requires=">=3.6", install_requires=[ - "asgiref>=3.2.10,<3.4.0", + "asgiref>=3.2.10,<3.5.0", "click>=7.1.1,<8.1.0", "click-default-group~=1.2.2", "Jinja2>=2.10.3,<3.1.0", From 4054e96a3914e821d0880a40a7284aaa9db1eaaa Mon Sep 17 00:00:00 2001 From: Aslak Raanes Date: Tue, 13 Jul 2021 19:42:27 +0200 Subject: [PATCH 0513/1705] Update deploying.rst (#1392) Use same base url for Apache as in the example --- docs/deploying.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index ce4acc9d..3be36df4 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -185,7 +185,7 @@ For `Apache `__, you can use the ``ProxyPass`` direct Then add these directives to proxy traffic:: - ProxyPass /datasette-prefix/ http://127.0.0.1:8009/datasette-prefix/ + ProxyPass /my-datasette/ http://127.0.0.1:8009/my-datasette/ ProxyPreserveHost On The `ProxyPreserveHost On `__ directive ensures that the original ``Host:`` header from the incoming request is passed through to Datasette. Datasette needs this to correctly assemble links to other pages using the :ref:`datasette_absolute_url` method. From d71cac498138ddd86f18607b9043e70286ea884a Mon Sep 17 00:00:00 2001 From: Aslak Raanes Date: Tue, 13 Jul 2021 20:32:49 +0200 Subject: [PATCH 0514/1705] How to configure Unix domain sockets with Apache Example on how to use Unix domain socket option on Apache. Not testet. (Usually I would have used [`ProxyPassReverse`](https://httpd.apache.org/docs/current/mod/mod_proxy.html#proxypassreverse) in combination with `ProxyPass` , i.e. ```apache ProxyPass /my-datasette/ http://127.0.0.1:8009/my-datasette/ ProxyPassReverse /my-datasette/ http://127.0.0.1:8009/my-datasette/ ``` and ```apache ProxyPass /my-datasette/ unix:/tmp/datasette.sock|http://localhost/my-datasette/ ProxyPassReverse /my-datasette/ unix:/tmp/datasette.sock|http://localhost/my-datasette/ ``` --- docs/deploying.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/deploying.rst b/docs/deploying.rst index 3be36df4..c471fad6 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -188,4 +188,8 @@ Then add these directives to proxy traffic:: ProxyPass /my-datasette/ http://127.0.0.1:8009/my-datasette/ ProxyPreserveHost On +Using ``--uds`` you can use Unix domain sockets similiar to the Nginx example: + + ProxyPass /my-datasette/ unix:/tmp/datasette.sock|http://localhost/my-datasette/ + The `ProxyPreserveHost On `__ directive ensures that the original ``Host:`` header from the incoming request is passed through to Datasette. Datasette needs this to correctly assemble links to other pages using the :ref:`datasette_absolute_url` method. From 7f4c854db1ed8c15338e9cf42d2a3f0c92e3b7b2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 13 Jul 2021 11:45:32 -0700 Subject: [PATCH 0515/1705] rST fix --- docs/deploying.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index c471fad6..366c9d61 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -188,7 +188,7 @@ Then add these directives to proxy traffic:: ProxyPass /my-datasette/ http://127.0.0.1:8009/my-datasette/ ProxyPreserveHost On -Using ``--uds`` you can use Unix domain sockets similiar to the Nginx example: +Using ``--uds`` you can use Unix domain sockets similiar to the nginx example:: ProxyPass /my-datasette/ unix:/tmp/datasette.sock|http://localhost/my-datasette/ From 2c4cd7141abb5115eff00ed7aef002af39d51989 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 13 Jul 2021 16:15:48 -0700 Subject: [PATCH 0516/1705] Consistently use /my-datasette in examples --- docs/deploying.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index 366c9d61..c3e3e123 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -163,8 +163,8 @@ You can also use the ``--uds`` option to Datasette to listen on a Unix domain so http { server { listen 80; - location / { - proxy_pass http://datasette; + location /my-datasette { + proxy_pass http://datasette/my-datasette; proxy_set_header Host $host; } } @@ -173,7 +173,7 @@ You can also use the ``--uds`` option to Datasette to listen on a Unix domain so } } -Then run Datasette with ``datasette --uds /tmp/datasette.sock path/to/database.db``. +Then run Datasette with ``datasette --uds /tmp/datasette.sock path/to/database.db --setting base_url /my-datasette/``. Apache proxy configuration -------------------------- From ba11ef27edd6981eeb26d7ecf5aa236707f5f8ce Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 13 Jul 2021 22:43:13 -0700 Subject: [PATCH 0517/1705] Clarify when to use systemd restart --- docs/deploying.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index c3e3e123..31d123e9 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -65,7 +65,7 @@ You can start the Datasette process running using the following:: sudo systemctl daemon-reload sudo systemctl start datasette.service -You may need to restart the Datasette service after making changes to its ``metadata.json`` configuration or the ``datasette.service`` file. You can do that using:: +You will need to restart the Datasette service after making changes to its ``metadata.json`` configuration or adding a new database file to that directory. You can do that using:: sudo systemctl restart datasette.service From a6c8e7fa4cffdeff84e9e755dcff4788fd6154b8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 14 Jul 2021 17:05:18 -0700 Subject: [PATCH 0518/1705] Big performance boost for faceting, closes #1394 --- datasette/views/table.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 1bda7496..876a0c81 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -674,12 +674,11 @@ class TableView(RowTableShared): else: page_size = self.ds.page_size - sql_no_limit = ( - "select {select_all_columns} from {table_name} {where}{order_by}".format( + sql_no_order_no_limit = ( + "select {select_all_columns} from {table_name} {where}".format( select_all_columns=select_all_columns, table_name=escape_sqlite(table), where=where_clause, - order_by=order_by, ) ) sql = "select {select_specified_columns} from {table_name} {where}{order_by} limit {page_size}{offset}".format( @@ -736,7 +735,7 @@ class TableView(RowTableShared): self.ds, request, database, - sql=sql_no_limit, + sql=sql_no_order_no_limit, params=params, table=table, metadata=table_metadata, From 7ea678db228504004b8d32f813c838b1dcfd317a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 14 Jul 2021 17:19:31 -0700 Subject: [PATCH 0519/1705] Warn about potential changes to get_metadata hook, refs #1384 --- docs/plugin_hooks.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index b687a6e7..6c2ad1e5 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1150,6 +1150,9 @@ get_metadata(datasette, key, database, table) This hook is responsible for returning a dictionary corresponding to Datasette :ref:`metadata`. This function is passed the ``database``, ``table`` and ``key`` which were passed to the upstream internal request for metadata. Regardless, it is important to return a global metadata object, where ``"databases": []`` would be a top-level key. The dictionary returned here, will be merged with, and overwritten by, the contents of the physical ``metadata.yaml`` if one is present. +.. warning:: + The design of this plugin hook does not currently provide a mechanism for interacting with async code, and may change in the future. See `issue 1384 `__. + .. code-block:: python @hookimpl From e27dd7c12c2a6977560dbc0005e32c55d9d759f4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 14 Jul 2021 17:32:33 -0700 Subject: [PATCH 0520/1705] Release 0.58 Refs #1365, #1371, #1377, #1384, #1387, #1388, #1389, #1394 --- datasette/version.py | 2 +- docs/changelog.rst | 19 +++++++++---------- docs/plugin_hooks.rst | 2 ++ 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/datasette/version.py b/datasette/version.py index e5a29931..0f94b605 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.58a1" +__version__ = "0.58" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index bcd8b987..201cf4b7 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,22 +4,21 @@ Changelog ========= -.. _v0_58a1: +.. _v0_58: -0.58a1 (2021-06-24) -------------------- +0.58 (2021-07-14) +----------------- +- New ``datasette --uds /tmp/datasette.sock`` option for binding Datasette to a Unix domain socket, see :ref:`proxy documentation ` (:issue:`1388`) +- ``"searchmode": "raw"`` table metadata option for defaulting a table to executing SQLite full-text search syntax without first escaping it, see :ref:`full_text_search_advanced_queries`. (:issue:`1389`) +- New plugin hook: :ref:`plugin_hook_get_metadata`, for returning custom metadata for an instance, database or table. Thanks, Brandon Roberts! (:issue:`1384`) - New plugin hook: :ref:`plugin_hook_skip_csrf`, for opting out of CSRF protection based on the incoming request. (:issue:`1377`) +- The :ref:`menu_links() `, :ref:`table_actions() ` and :ref:`database_actions() ` plugin hooks all gained a new optional ``request`` argument providing access to the current request. (:issue:`1371`) +- Major performance improvement for Datasette faceting. (:issue:`1394`) +- Improved documentation for :ref:`deploying_proxy` to recommend using ``ProxyPreservehost On`` with Apache. (:issue:`1387`) - ``POST`` requests to endpoints that do not support that HTTP verb now return a 405 error. - ``db.path`` can now be provided as a ``pathlib.Path`` object, useful when writing unit tests for plugins. Thanks, Chris Amico. (:issue:`1365`) -.. _v0_58a0: - -0.58a0 (2021-06-09) -------------------- - -- The :ref:`menu_links() `, :ref:`table_actions() ` and :ref:`database_actions() ` plugin hooks all gained a new optional ``request`` argument providing access to the current request. (:issue:`1371`) - .. _v0_57_1: 0.57.1 (2021-06-08) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 6c2ad1e5..63258e2f 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -1130,6 +1130,8 @@ This example will disable CSRF protection for that specific URL path: If any of the currently active ``skip_csrf()`` plugin hooks return ``True``, CSRF protection will be skipped for the request. +.. _plugin_hook_get_metadata: + get_metadata(datasette, key, database, table) --------------------------------------------- From 084cfe1e00e1a4c0515390a513aca286eeea20c2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 14 Jul 2021 18:00:39 -0700 Subject: [PATCH 0521/1705] Removed out-of-date datasette serve help from README --- README.md | 33 --------------------------------- 1 file changed, 33 deletions(-) diff --git a/README.md b/README.md index 5682f59e..55160afe 100644 --- a/README.md +++ b/README.md @@ -53,39 +53,6 @@ Now visiting http://localhost:8001/History/downloads will show you a web interfa ![Downloads table rendered by datasette](https://static.simonwillison.net/static/2017/datasette-downloads.png) -## datasette serve options - - Usage: datasette serve [OPTIONS] [FILES]... - - Serve up specified SQLite database files with a web UI - - Options: - -i, --immutable PATH Database files to open in immutable mode - -h, --host TEXT Host for server. Defaults to 127.0.0.1 which means - only connections from the local machine will be - allowed. Use 0.0.0.0 to listen to all IPs and - allow access from other machines. - -p, --port INTEGER Port for server, defaults to 8001 - --reload Automatically reload if code or metadata change - detected - useful for development - --cors Enable CORS by serving Access-Control-Allow- - Origin: * - --load-extension PATH Path to a SQLite extension to load - --inspect-file TEXT Path to JSON file created using "datasette - inspect" - -m, --metadata FILENAME Path to JSON file containing license/source - metadata - --template-dir DIRECTORY Path to directory containing custom templates - --plugins-dir DIRECTORY Path to directory containing custom plugins - --static STATIC MOUNT mountpoint:path-to-directory for serving static - files - --memory Make /_memory database available - --config CONFIG Set config option using configname:value - docs.datasette.io/en/stable/config.html - --version-note TEXT Additional note to show on /-/versions - --help-config Show available config options - --help Show this message and exit. - ## metadata.json If you want to include licensing and source information in the generated datasette website you can do so using a JSON file that looks something like this: From 721a8d3cd4937f888efd2b52d5a61f0e25b484e1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 14 Jul 2021 18:51:36 -0700 Subject: [PATCH 0522/1705] Hopeful fix for publish problem in #1396 --- .github/workflows/publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 727f9933..54e582f0 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -115,5 +115,5 @@ jobs: -t $REPO:${GITHUB_REF#refs/tags/} \ --build-arg VERSION=${GITHUB_REF#refs/tags/} . docker tag $REPO:${GITHUB_REF#refs/tags/} $REPO:latest - docker push $REPO:${VERSION_TAG} + docker push $REPO:${GITHUB_REF#refs/tags/} docker push $REPO:latest From dd5ee8e66882c94343cd3f71920878c6cfd0da41 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 15 Jul 2021 23:26:06 -0700 Subject: [PATCH 0523/1705] Removed some unused imports I found these with: flake8 datasette | grep unus --- datasette/app.py | 1 - datasette/default_magic_parameters.py | 1 - datasette/facets.py | 2 -- datasette/utils/__init__.py | 4 +--- datasette/utils/asgi.py | 2 -- datasette/views/base.py | 1 - datasette/views/index.py | 2 +- setup.py | 2 -- 8 files changed, 2 insertions(+), 13 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 0b909968..5976d8b8 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -58,7 +58,6 @@ from .utils import ( parse_metadata, resolve_env_secrets, to_css_class, - HASH_LENGTH, ) from .utils.asgi import ( AsgiLifespan, diff --git a/datasette/default_magic_parameters.py b/datasette/default_magic_parameters.py index 0f8f397e..19382207 100644 --- a/datasette/default_magic_parameters.py +++ b/datasette/default_magic_parameters.py @@ -1,5 +1,4 @@ from datasette import hookimpl -from datasette.utils import escape_fts import datetime import os import time diff --git a/datasette/facets.py b/datasette/facets.py index 250734fd..f74e2d01 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -1,6 +1,5 @@ import json import urllib -import re from datasette import hookimpl from datasette.database import QueryInterrupted from datasette.utils import ( @@ -8,7 +7,6 @@ from datasette.utils import ( path_with_added_args, path_with_removed_args, detect_json1, - InvalidSql, sqlite3, ) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 1e193862..aec5a55b 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -5,7 +5,6 @@ from collections import OrderedDict, namedtuple, Counter import base64 import hashlib import inspect -import itertools import json import markupsafe import mergedeep @@ -17,10 +16,9 @@ import time import types import shutil import urllib -import numbers import yaml from .shutil_backport import copytree -from .sqlite import sqlite3, sqlite_version, supports_table_xinfo +from .sqlite import sqlite3, supports_table_xinfo # From https://www.sqlite.org/lang_keywords.html diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 63bf4926..5fa03b0a 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -3,9 +3,7 @@ from datasette.utils import MultiParams from mimetypes import guess_type from urllib.parse import parse_qs, urlunparse, parse_qsl from pathlib import Path -from html import escape from http.cookies import SimpleCookie, Morsel -import re import aiofiles import aiofiles.os diff --git a/datasette/views/base.py b/datasette/views/base.py index a87a0e77..cd584899 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -9,7 +9,6 @@ import urllib import pint from datasette import __version__ -from datasette.plugins import pm from datasette.database import QueryInterrupted from datasette.utils import ( await_me_maybe, diff --git a/datasette/views/index.py b/datasette/views/index.py index 8ac117a6..e37643f9 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -2,7 +2,7 @@ import hashlib import json from datasette.utils import check_visibility, CustomJSONEncoder -from datasette.utils.asgi import Response, Forbidden +from datasette.utils.asgi import Response from datasette.version import __version__ from .base import BaseView diff --git a/setup.py b/setup.py index 2541be1f..cfc1e484 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,5 @@ -from re import VERBOSE from setuptools import setup, find_packages import os -import sys def get_long_description(): From c00f29affcafce8314366852ba1a0f5a7dd25690 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 16 Jul 2021 12:44:58 -0700 Subject: [PATCH 0524/1705] Fix for race condition in refresh_schemas(), closes #1231 --- datasette/app.py | 7 +++++++ datasette/utils/internal_db.py | 10 +++++----- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 5976d8b8..5f348cb5 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -224,6 +224,7 @@ class Datasette: self.inspect_data = inspect_data self.immutables = set(immutables or []) self.databases = collections.OrderedDict() + self._refresh_schemas_lock = asyncio.Lock() self.crossdb = crossdb if memory or crossdb or not self.files: self.add_database(Database(self, is_memory=True), name="_memory") @@ -332,6 +333,12 @@ class Datasette: self.client = DatasetteClient(self) async def refresh_schemas(self): + if self._refresh_schemas_lock.locked(): + return + async with self._refresh_schemas_lock: + await self._refresh_schemas() + + async def _refresh_schemas(self): internal_db = self.databases["_internal"] if not self.internal_db_created: await init_internal_db(internal_db) diff --git a/datasette/utils/internal_db.py b/datasette/utils/internal_db.py index e92625d5..40fe719e 100644 --- a/datasette/utils/internal_db.py +++ b/datasette/utils/internal_db.py @@ -5,7 +5,7 @@ async def init_internal_db(db): await db.execute_write( textwrap.dedent( """ - CREATE TABLE databases ( + CREATE TABLE IF NOT EXISTS databases ( database_name TEXT PRIMARY KEY, path TEXT, is_memory INTEGER, @@ -18,7 +18,7 @@ async def init_internal_db(db): await db.execute_write( textwrap.dedent( """ - CREATE TABLE tables ( + CREATE TABLE IF NOT EXISTS tables ( database_name TEXT, table_name TEXT, rootpage INTEGER, @@ -33,7 +33,7 @@ async def init_internal_db(db): await db.execute_write( textwrap.dedent( """ - CREATE TABLE columns ( + CREATE TABLE IF NOT EXISTS columns ( database_name TEXT, table_name TEXT, cid INTEGER, @@ -54,7 +54,7 @@ async def init_internal_db(db): await db.execute_write( textwrap.dedent( """ - CREATE TABLE indexes ( + CREATE TABLE IF NOT EXISTS indexes ( database_name TEXT, table_name TEXT, seq INTEGER, @@ -73,7 +73,7 @@ async def init_internal_db(db): await db.execute_write( textwrap.dedent( """ - CREATE TABLE foreign_keys ( + CREATE TABLE IF NOT EXISTS foreign_keys ( database_name TEXT, table_name TEXT, id INTEGER, From c73af5dd72305f6a01ea94a2c76d52e5e26de38b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 16 Jul 2021 12:46:13 -0700 Subject: [PATCH 0525/1705] Release 0.58.1 Refs #1231, #1396 --- datasette/version.py | 2 +- docs/changelog.rst | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 0f94b605..1b7b7350 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.58" +__version__ = "0.58.1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 201cf4b7..6a951935 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_58_1: + +0.58.1 (2021-07-16) +------------------- + +- Fix for an intermittent race condition caused by the ``refresh_schemas()`` internal function. (:issue:`1231`) + .. _v0_58: 0.58 (2021-07-14) From 6f1731f3055a5119cc393c118937d749405a1617 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 23 Jul 2021 12:38:09 -0700 Subject: [PATCH 0526/1705] Updated cookiecutter installation link --- docs/writing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index 6afee1c3..bd60a4b6 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -41,7 +41,7 @@ Plugins that can be installed should be written as Python packages using a ``set The quickest way to start writing one an installable plugin is to use the `datasette-plugin `__ cookiecutter template. This creates a new plugin structure for you complete with an example test and GitHub Actions workflows for testing and publishing your plugin. -`Install cookiecutter `__ and then run this command to start building a plugin using the template:: +`Install cookiecutter `__ and then run this command to start building a plugin using the template:: cookiecutter gh:simonw/datasette-plugin From eccfeb0871dd4bc27870faf64f80ac68e5b6bc0d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 26 Jul 2021 16:16:46 -0700 Subject: [PATCH 0527/1705] register_routes() plugin hook datasette argument, closes #1404 --- datasette/app.py | 2 +- datasette/hookspecs.py | 2 +- docs/plugin_hooks.rst | 7 +++++-- tests/fixtures.py | 1 + tests/plugins/my_plugin_2.py | 10 ++++++++++ tests/test_plugins.py | 19 +++++++++++++++++++ 6 files changed, 37 insertions(+), 4 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 5f348cb5..2596ca50 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -960,7 +960,7 @@ class Datasette: """Returns an ASGI app function that serves the whole of Datasette""" routes = [] - for routes_to_add in pm.hook.register_routes(): + for routes_to_add in pm.hook.register_routes(datasette=self): for regex, view_fn in routes_to_add: routes.append((regex, wrap_view(view_fn, self))) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 07b2f5ba..3ef0d4f5 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -75,7 +75,7 @@ def register_facet_classes(): @hookspec -def register_routes(): +def register_routes(datasette): """Register URL routes: return a list of (regex, view_function) pairs""" diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 63258e2f..4700763c 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -529,8 +529,11 @@ Examples: `datasette-atom `_, `dataset .. _plugin_register_routes: -register_routes() ------------------ +register_routes(datasette) +-------------------------- + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` Register additional view functions to execute for specified URL routes. diff --git a/tests/fixtures.py b/tests/fixtures.py index dce94876..93b7dce2 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -70,6 +70,7 @@ EXPECTED_PLUGINS = [ "extra_template_vars", "menu_links", "permission_allowed", + "register_routes", "render_cell", "startup", "table_actions", diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index b70372f3..f7a3f1c0 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -1,4 +1,5 @@ from datasette import hookimpl +from datasette.utils.asgi import Response from functools import wraps import markupsafe import json @@ -167,3 +168,12 @@ def table_actions(datasette, database, table, actor, request): return [{"href": datasette.urls.instance(), "label": label}] return inner + + +@hookimpl +def register_routes(datasette): + config = datasette.plugin_config("register-route-demo") + if not config: + return + path = config["path"] + return [(r"/{}/$".format(path), lambda: Response.text(path.upper()))] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 7a626ce5..0c01b7ae 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -648,6 +648,25 @@ def test_hook_register_routes(app_client, path, body): assert body == response.text +@pytest.mark.parametrize("configured_path", ("path1", "path2")) +def test_hook_register_routes_with_datasette(configured_path): + with make_app_client( + metadata={ + "plugins": { + "register-route-demo": { + "path": configured_path, + } + } + } + ) as client: + response = client.get(f"/{configured_path}/") + assert response.status == 200 + assert configured_path.upper() == response.text + # Other one should 404 + other_path = [p for p in ("path1", "path2") if configured_path != p][0] + assert client.get(f"/{other_path}/").status == 404 + + def test_hook_register_routes_post(app_client): response = app_client.post("/post/", {"this is": "post data"}, csrftoken_from=True) assert 200 == response.status From 121e10c29c5b412fddf0326939f1fe46c3ad9d4a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Jul 2021 16:30:12 -0700 Subject: [PATCH 0528/1705] Doumentation and test for utils.parse_metadata(), closes #1405 --- docs/internals.rst | 18 ++++++++++++++++++ tests/test_utils.py | 16 ++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/docs/internals.rst b/docs/internals.rst index 98df998a..1e41cacd 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -795,3 +795,21 @@ By default all actors are denied access to the ``view-database`` permission for Plugins can access this database by calling ``db = datasette.get_database("_internal")`` and then executing queries using the :ref:`Database API `. You can explore an example of this database by `signing in as root `__ to the ``latest.datasette.io`` demo instance and then navigating to `latest.datasette.io/_internal `__. + +.. _internals_utils: + +The datasette.utils module +========================== + +The ``datasette.utils`` module contains various utility functions used by Datasette. As a general rule you should consider anything in this module to be unstable - functions and classes here could change without warning or be removed entirely between Datasette releases, without being mentioned in the release notes. + +The exception to this rule is anythang that is documented here. If you find a need for an undocumented utility function in your own work, consider `opening an issue `__ requesting that the function you are using be upgraded to documented and supported status. + +.. _internals_utils_parse_metadata: + +parse_metadata(content) +----------------------- + +This function accepts a string containing either JSON or YAML, expected to be of the format described in :ref:`metadata`. It returns a nested Python dictionary representing the parsed data from that string. + +If the metadata cannot be parsed as either JSON or YAML the function will raise a ``utils.BadMetadataError`` exception. diff --git a/tests/test_utils.py b/tests/test_utils.py index be3daf2e..97b70ee5 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -610,3 +610,19 @@ async def test_initial_path_for_datasette(tmp_path_factory, dbs, expected_path): ) path = await utils.initial_path_for_datasette(datasette) assert path == expected_path + + +@pytest.mark.parametrize( + "content,expected", + ( + ("title: Hello", {"title": "Hello"}), + ('{"title": "Hello"}', {"title": "Hello"}), + ("{{ this }} is {{ bad }}", None), + ), +) +def test_parse_metadata(content, expected): + if expected is None: + with pytest.raises(utils.BadMetadataError): + utils.parse_metadata(content) + else: + assert utils.parse_metadata(content) == expected From 2b1c535c128984cc0ee2a097ecaa3ab638ae2a5b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Jul 2021 17:44:16 -0700 Subject: [PATCH 0529/1705] pytest.mark.serial for any test using isolated_filesystem(), refs #1406 --- tests/test_package.py | 3 ++- tests/test_publish_cloudrun.py | 7 +++++++ tests/test_publish_heroku.py | 5 +++++ 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/tests/test_package.py b/tests/test_package.py index bb939643..76693d2f 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -2,7 +2,7 @@ from click.testing import CliRunner from datasette import cli from unittest import mock import pathlib -import json +import pytest class CaptureDockerfile: @@ -24,6 +24,7 @@ CMD datasette serve --host 0.0.0.0 -i test.db --cors --inspect-file inspect-data """.strip() +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.cli.call") def test_package(mock_call, mock_which): diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 7881ebae..826860d7 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -6,6 +6,7 @@ import pytest import textwrap +@pytest.mark.serial @mock.patch("shutil.which") def test_publish_cloudrun_requires_gcloud(mock_which): mock_which.return_value = False @@ -27,6 +28,7 @@ def test_publish_cloudrun_invalid_database(mock_which): assert "Path 'woop.db' does not exist" in result.output +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @@ -75,6 +77,7 @@ Service name: input-service ) +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @@ -103,6 +106,7 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which): ) +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @@ -147,6 +151,7 @@ def test_publish_cloudrun_memory( ) +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @@ -225,6 +230,7 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which): } == json.loads(metadata) +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @@ -280,6 +286,7 @@ def test_publish_cloudrun_apt_get_install(mock_call, mock_output, mock_which): assert expected == dockerfile +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py index c011ab43..acbdafeb 100644 --- a/tests/test_publish_heroku.py +++ b/tests/test_publish_heroku.py @@ -1,8 +1,10 @@ from click.testing import CliRunner from datasette import cli from unittest import mock +import pytest +@pytest.mark.serial @mock.patch("shutil.which") def test_publish_heroku_requires_heroku(mock_which): mock_which.return_value = False @@ -15,6 +17,7 @@ def test_publish_heroku_requires_heroku(mock_which): assert "Publishing to Heroku requires heroku" in result.output +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.heroku.check_output") @mock.patch("datasette.publish.heroku.call") @@ -44,6 +47,7 @@ def test_publish_heroku_invalid_database(mock_which): assert "Path 'woop.db' does not exist" in result.output +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.heroku.check_output") @mock.patch("datasette.publish.heroku.call") @@ -79,6 +83,7 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which): ) +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.heroku.check_output") @mock.patch("datasette.publish.heroku.call") From 74b775e20f870de921ca3c09a75fe69e1c199fc7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Jul 2021 17:50:45 -0700 Subject: [PATCH 0530/1705] Use consistent pattern for test before deploy, refs #1406 --- .github/workflows/deploy-latest.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index d9f23f7d..849adb40 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -29,7 +29,9 @@ jobs: python -m pip install -e .[docs] python -m pip install sphinx-to-sqlite==0.1a1 - name: Run tests - run: pytest + run: | + pytest -n auto -m "not serial" + pytest -m "serial" - name: Build fixtures.db run: python tests/fixtures.py fixtures.db fixtures.json plugins --extra-db-filename extra_database.db - name: Build docs.db From e55cd9dc3f2d920d5cf6d8581ce49937a6ccc44d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Jul 2021 18:16:58 -0700 Subject: [PATCH 0531/1705] Try passing a directory to isolated_filesystem(), refs #1406 --- tests/test_package.py | 10 ++++----- tests/test_publish_cloudrun.py | 39 ++++++++++++++++------------------ tests/test_publish_heroku.py | 25 +++++++++++----------- 3 files changed, 34 insertions(+), 40 deletions(-) diff --git a/tests/test_package.py b/tests/test_package.py index 76693d2f..a72eef94 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -2,7 +2,6 @@ from click.testing import CliRunner from datasette import cli from unittest import mock import pathlib -import pytest class CaptureDockerfile: @@ -24,15 +23,14 @@ CMD datasette serve --host 0.0.0.0 -i test.db --cors --inspect-file inspect-data """.strip() -@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.cli.call") -def test_package(mock_call, mock_which): +def test_package(mock_call, mock_which, tmp_path_factory): mock_which.return_value = True runner = CliRunner() capture = CaptureDockerfile() mock_call.side_effect = capture - with runner.isolated_filesystem(): + with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): with open("test.db", "w") as fp: fp.write("data") result = runner.invoke(cli.cli, ["package", "test.db", "--secret", "sekrit"]) @@ -43,12 +41,12 @@ def test_package(mock_call, mock_which): @mock.patch("shutil.which") @mock.patch("datasette.cli.call") -def test_package_with_port(mock_call, mock_which): +def test_package_with_port(mock_call, mock_which, tmp_path_factory): mock_which.return_value = True capture = CaptureDockerfile() mock_call.side_effect = capture runner = CliRunner() - with runner.isolated_filesystem(): + with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): with open("test.db", "w") as fp: fp.write("data") result = runner.invoke( diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 826860d7..d91b7646 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -6,12 +6,11 @@ import pytest import textwrap -@pytest.mark.serial @mock.patch("shutil.which") -def test_publish_cloudrun_requires_gcloud(mock_which): +def test_publish_cloudrun_requires_gcloud(mock_which, tmp_path_factory): mock_which.return_value = False runner = CliRunner() - with runner.isolated_filesystem(): + with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): with open("test.db", "w") as fp: fp.write("data") result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"]) @@ -28,13 +27,12 @@ def test_publish_cloudrun_invalid_database(mock_which): assert "Path 'woop.db' does not exist" in result.output -@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @mock.patch("datasette.publish.cloudrun.get_existing_services") def test_publish_cloudrun_prompts_for_service( - mock_get_existing_services, mock_call, mock_output, mock_which + mock_get_existing_services, mock_call, mock_output, mock_which, tmp_path_factory ): mock_get_existing_services.return_value = [ {"name": "existing", "created": "2019-01-01", "url": "http://www.example.com/"} @@ -42,7 +40,7 @@ def test_publish_cloudrun_prompts_for_service( mock_output.return_value = "myproject" mock_which.return_value = True runner = CliRunner() - with runner.isolated_filesystem(): + with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): with open("test.db", "w") as fp: fp.write("data") result = runner.invoke( @@ -77,15 +75,14 @@ Service name: input-service ) -@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") -def test_publish_cloudrun(mock_call, mock_output, mock_which): +def test_publish_cloudrun(mock_call, mock_output, mock_which, tmp_path_factory): mock_output.return_value = "myproject" mock_which.return_value = True runner = CliRunner() - with runner.isolated_filesystem(): + with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): with open("test.db", "w") as fp: fp.write("data") result = runner.invoke( @@ -106,7 +103,6 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which): ) -@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @@ -121,12 +117,12 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which): ], ) def test_publish_cloudrun_memory( - mock_call, mock_output, mock_which, memory, should_fail + mock_call, mock_output, mock_which, memory, should_fail, tmp_path_factory ): mock_output.return_value = "myproject" mock_which.return_value = True runner = CliRunner() - with runner.isolated_filesystem(): + with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): with open("test.db", "w") as fp: fp.write("data") result = runner.invoke( @@ -151,16 +147,17 @@ def test_publish_cloudrun_memory( ) -@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") -def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which): +def test_publish_cloudrun_plugin_secrets( + mock_call, mock_output, mock_which, tmp_path_factory +): mock_which.return_value = True mock_output.return_value = "myproject" runner = CliRunner() - with runner.isolated_filesystem(): + with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): with open("test.db", "w") as fp: fp.write("data") with open("metadata.yml", "w") as fp: @@ -230,16 +227,17 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which): } == json.loads(metadata) -@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") -def test_publish_cloudrun_apt_get_install(mock_call, mock_output, mock_which): +def test_publish_cloudrun_apt_get_install( + mock_call, mock_output, mock_which, tmp_path_factory +): mock_which.return_value = True mock_output.return_value = "myproject" runner = CliRunner() - with runner.isolated_filesystem(): + with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): with open("test.db", "w") as fp: fp.write("data") result = runner.invoke( @@ -286,7 +284,6 @@ def test_publish_cloudrun_apt_get_install(mock_call, mock_output, mock_which): assert expected == dockerfile -@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @@ -302,13 +299,13 @@ def test_publish_cloudrun_apt_get_install(mock_call, mock_output, mock_which): ], ) def test_publish_cloudrun_extra_options( - mock_call, mock_output, mock_which, extra_options, expected + mock_call, mock_output, mock_which, extra_options, expected, tmp_path_factory ): mock_which.return_value = True mock_output.return_value = "myproject" runner = CliRunner() - with runner.isolated_filesystem(): + with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): with open("test.db", "w") as fp: fp.write("data") result = runner.invoke( diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py index acbdafeb..a591bcf8 100644 --- a/tests/test_publish_heroku.py +++ b/tests/test_publish_heroku.py @@ -1,15 +1,13 @@ from click.testing import CliRunner from datasette import cli from unittest import mock -import pytest -@pytest.mark.serial @mock.patch("shutil.which") -def test_publish_heroku_requires_heroku(mock_which): +def test_publish_heroku_requires_heroku(mock_which, tmp_path_factory): mock_which.return_value = False runner = CliRunner() - with runner.isolated_filesystem(): + with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): with open("test.db", "w") as fp: fp.write("data") result = runner.invoke(cli.cli, ["publish", "heroku", "test.db"]) @@ -17,15 +15,16 @@ def test_publish_heroku_requires_heroku(mock_which): assert "Publishing to Heroku requires heroku" in result.output -@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.heroku.check_output") @mock.patch("datasette.publish.heroku.call") -def test_publish_heroku_installs_plugin(mock_call, mock_check_output, mock_which): +def test_publish_heroku_installs_plugin( + mock_call, mock_check_output, mock_which, tmp_path_factory +): mock_which.return_value = True mock_check_output.side_effect = lambda s: {"['heroku', 'plugins']": b""}[repr(s)] runner = CliRunner() - with runner.isolated_filesystem(): + with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): with open("t.db", "w") as fp: fp.write("data") result = runner.invoke(cli.cli, ["publish", "heroku", "t.db"], input="y\n") @@ -47,11 +46,10 @@ def test_publish_heroku_invalid_database(mock_which): assert "Path 'woop.db' does not exist" in result.output -@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.heroku.check_output") @mock.patch("datasette.publish.heroku.call") -def test_publish_heroku(mock_call, mock_check_output, mock_which): +def test_publish_heroku(mock_call, mock_check_output, mock_which, tmp_path_factory): mock_which.return_value = True mock_check_output.side_effect = lambda s: { "['heroku', 'plugins']": b"heroku-builds", @@ -59,7 +57,7 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which): "['heroku', 'apps:create', 'datasette', '--json']": b'{"name": "f"}', }[repr(s)] runner = CliRunner() - with runner.isolated_filesystem(): + with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): with open("test.db", "w") as fp: fp.write("data") result = runner.invoke( @@ -83,11 +81,12 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which): ) -@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.heroku.check_output") @mock.patch("datasette.publish.heroku.call") -def test_publish_heroku_plugin_secrets(mock_call, mock_check_output, mock_which): +def test_publish_heroku_plugin_secrets( + mock_call, mock_check_output, mock_which, tmp_path_factory +): mock_which.return_value = True mock_check_output.side_effect = lambda s: { "['heroku', 'plugins']": b"heroku-builds", @@ -95,7 +94,7 @@ def test_publish_heroku_plugin_secrets(mock_call, mock_check_output, mock_which) "['heroku', 'apps:create', 'datasette', '--json']": b'{"name": "f"}', }[repr(s)] runner = CliRunner() - with runner.isolated_filesystem(): + with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): with open("test.db", "w") as fp: fp.write("data") result = runner.invoke( From b46856391de5a819a85d1dd970428cbc702be94a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 29 Jul 2021 17:44:16 -0700 Subject: [PATCH 0532/1705] pytest.mark.serial for any test using isolated_filesystem(), refs #1406 --- tests/test_package.py | 2 ++ tests/test_publish_cloudrun.py | 7 +++++++ tests/test_publish_heroku.py | 5 +++++ 3 files changed, 14 insertions(+) diff --git a/tests/test_package.py b/tests/test_package.py index a72eef94..98e701bf 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -2,6 +2,7 @@ from click.testing import CliRunner from datasette import cli from unittest import mock import pathlib +import pytest class CaptureDockerfile: @@ -23,6 +24,7 @@ CMD datasette serve --host 0.0.0.0 -i test.db --cors --inspect-file inspect-data """.strip() +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.cli.call") def test_package(mock_call, mock_which, tmp_path_factory): diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index d91b7646..ee0c9c95 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -6,6 +6,7 @@ import pytest import textwrap +@pytest.mark.serial @mock.patch("shutil.which") def test_publish_cloudrun_requires_gcloud(mock_which, tmp_path_factory): mock_which.return_value = False @@ -27,6 +28,7 @@ def test_publish_cloudrun_invalid_database(mock_which): assert "Path 'woop.db' does not exist" in result.output +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @@ -75,6 +77,7 @@ Service name: input-service ) +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @@ -103,6 +106,7 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which, tmp_path_factory): ) +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @@ -147,6 +151,7 @@ def test_publish_cloudrun_memory( ) +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @@ -227,6 +232,7 @@ def test_publish_cloudrun_plugin_secrets( } == json.loads(metadata) +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @@ -284,6 +290,7 @@ def test_publish_cloudrun_apt_get_install( assert expected == dockerfile +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py index a591bcf8..1fe02e08 100644 --- a/tests/test_publish_heroku.py +++ b/tests/test_publish_heroku.py @@ -1,8 +1,10 @@ from click.testing import CliRunner from datasette import cli from unittest import mock +import pytest +@pytest.mark.serial @mock.patch("shutil.which") def test_publish_heroku_requires_heroku(mock_which, tmp_path_factory): mock_which.return_value = False @@ -15,6 +17,7 @@ def test_publish_heroku_requires_heroku(mock_which, tmp_path_factory): assert "Publishing to Heroku requires heroku" in result.output +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.heroku.check_output") @mock.patch("datasette.publish.heroku.call") @@ -46,6 +49,7 @@ def test_publish_heroku_invalid_database(mock_which): assert "Path 'woop.db' does not exist" in result.output +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.heroku.check_output") @mock.patch("datasette.publish.heroku.call") @@ -81,6 +85,7 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which, tmp_path_facto ) +@pytest.mark.serial @mock.patch("shutil.which") @mock.patch("datasette.publish.heroku.check_output") @mock.patch("datasette.publish.heroku.call") From 96b1d0b7b42928e657b1aebcc95d55e4685690e0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Jul 2021 11:48:33 -0700 Subject: [PATCH 0533/1705] Attempted fix for too-long UDS bug in #1407 --- tests/conftest.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 34a64efc..215853b3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -157,8 +157,11 @@ def ds_localhost_https_server(tmp_path_factory): @pytest.fixture(scope="session") def ds_unix_domain_socket_server(tmp_path_factory): - socket_folder = tmp_path_factory.mktemp("uds") - uds = str(socket_folder / "datasette.sock") + # This used to use tmp_path_factory.mktemp("uds") but that turned out to + # produce paths that were too long to use as UDS on macOS, see + # https://github.com/simonw/datasette/issues/1407 - so I switched to + # using tempfile.gettempdir() + uds = str(pathlib.Path(tempfile.gettempdir()) / "datasette.sock") ds_proc = subprocess.Popen( ["datasette", "--memory", "--uds", uds], stdout=subprocess.PIPE, From ff253f5242e4b0b5d85d29d38b8461feb5ea997a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Jul 2021 11:49:08 -0700 Subject: [PATCH 0534/1705] Replace all uses of runner.isolated_filesystem, refs #1406 --- tests/test_package.py | 27 ++- tests/test_publish_cloudrun.py | 422 ++++++++++++++++----------------- tests/test_publish_heroku.py | 127 +++++----- 3 files changed, 284 insertions(+), 292 deletions(-) diff --git a/tests/test_package.py b/tests/test_package.py index 98e701bf..02ed1775 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -1,6 +1,7 @@ from click.testing import CliRunner from datasette import cli from unittest import mock +import os import pathlib import pytest @@ -32,12 +33,12 @@ def test_package(mock_call, mock_which, tmp_path_factory): runner = CliRunner() capture = CaptureDockerfile() mock_call.side_effect = capture - with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): - with open("test.db", "w") as fp: - fp.write("data") - result = runner.invoke(cli.cli, ["package", "test.db", "--secret", "sekrit"]) - assert 0 == result.exit_code - mock_call.assert_has_calls([mock.call(["docker", "build", "."])]) + os.chdir(tmp_path_factory.mktemp("runner")) + with open("test.db", "w") as fp: + fp.write("data") + result = runner.invoke(cli.cli, ["package", "test.db", "--secret", "sekrit"]) + assert 0 == result.exit_code + mock_call.assert_has_calls([mock.call(["docker", "build", "."])]) assert EXPECTED_DOCKERFILE.format(port=8001) == capture.captured @@ -48,11 +49,11 @@ def test_package_with_port(mock_call, mock_which, tmp_path_factory): capture = CaptureDockerfile() mock_call.side_effect = capture runner = CliRunner() - with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): - with open("test.db", "w") as fp: - fp.write("data") - result = runner.invoke( - cli.cli, ["package", "test.db", "-p", "8080", "--secret", "sekrit"] - ) - assert 0 == result.exit_code + os.chdir(tmp_path_factory.mktemp("runner")) + with open("test.db", "w") as fp: + fp.write("data") + result = runner.invoke( + cli.cli, ["package", "test.db", "-p", "8080", "--secret", "sekrit"] + ) + assert 0 == result.exit_code assert EXPECTED_DOCKERFILE.format(port=8080) == capture.captured diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index ee0c9c95..47f59d72 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -2,6 +2,7 @@ from click.testing import CliRunner from datasette import cli from unittest import mock import json +import os import pytest import textwrap @@ -11,12 +12,12 @@ import textwrap def test_publish_cloudrun_requires_gcloud(mock_which, tmp_path_factory): mock_which.return_value = False runner = CliRunner() - with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): - with open("test.db", "w") as fp: - fp.write("data") - result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"]) - assert result.exit_code == 1 - assert "Publishing to Google Cloud requires gcloud" in result.output + os.chdir(tmp_path_factory.mktemp("runner")) + with open("test.db", "w") as fp: + fp.write("data") + result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"]) + assert result.exit_code == 1 + assert "Publishing to Google Cloud requires gcloud" in result.output @mock.patch("shutil.which") @@ -42,39 +43,32 @@ def test_publish_cloudrun_prompts_for_service( mock_output.return_value = "myproject" mock_which.return_value = True runner = CliRunner() - with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): - with open("test.db", "w") as fp: - fp.write("data") - result = runner.invoke( - cli.cli, ["publish", "cloudrun", "test.db"], input="input-service" - ) - assert ( - """ -Please provide a service name for this deployment - -Using an existing service name will over-write it - -Your existing services: - - existing - created 2019-01-01 - http://www.example.com/ - -Service name: input-service -""".strip() - == result.output.strip() - ) - assert 0 == result.exit_code - tag = "gcr.io/myproject/datasette" - mock_call.assert_has_calls( - [ - mock.call(f"gcloud builds submit --tag {tag}", shell=True), - mock.call( - "gcloud run deploy --allow-unauthenticated --platform=managed --image {} input-service".format( - tag - ), - shell=True, + os.chdir(tmp_path_factory.mktemp("runner")) + with open("test.db", "w") as fp: + fp.write("data") + result = runner.invoke( + cli.cli, ["publish", "cloudrun", "test.db"], input="input-service" + ) + assert ( + "Please provide a service name for this deployment\n\n" + "Using an existing service name will over-write it\n\n" + "Your existing services:\n\n" + " existing - created 2019-01-01 - http://www.example.com/\n\n" + "Service name: input-service" + ) == result.output.strip() + assert 0 == result.exit_code + tag = "gcr.io/myproject/datasette" + mock_call.assert_has_calls( + [ + mock.call(f"gcloud builds submit --tag {tag}", shell=True), + mock.call( + "gcloud run deploy --allow-unauthenticated --platform=managed --image {} input-service".format( + tag ), - ] - ) + shell=True, + ), + ] + ) @pytest.mark.serial @@ -85,25 +79,25 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which, tmp_path_factory): mock_output.return_value = "myproject" mock_which.return_value = True runner = CliRunner() - with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): - with open("test.db", "w") as fp: - fp.write("data") - result = runner.invoke( - cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"] - ) - assert 0 == result.exit_code - tag = f"gcr.io/{mock_output.return_value}/datasette" - mock_call.assert_has_calls( - [ - mock.call(f"gcloud builds submit --tag {tag}", shell=True), - mock.call( - "gcloud run deploy --allow-unauthenticated --platform=managed --image {} test".format( - tag - ), - shell=True, + os.chdir(tmp_path_factory.mktemp("runner")) + with open("test.db", "w") as fp: + fp.write("data") + result = runner.invoke( + cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"] + ) + assert 0 == result.exit_code + tag = f"gcr.io/{mock_output.return_value}/datasette" + mock_call.assert_has_calls( + [ + mock.call(f"gcloud builds submit --tag {tag}", shell=True), + mock.call( + "gcloud run deploy --allow-unauthenticated --platform=managed --image {} test".format( + tag ), - ] - ) + shell=True, + ), + ] + ) @pytest.mark.serial @@ -126,29 +120,29 @@ def test_publish_cloudrun_memory( mock_output.return_value = "myproject" mock_which.return_value = True runner = CliRunner() - with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): - with open("test.db", "w") as fp: - fp.write("data") - result = runner.invoke( - cli.cli, - ["publish", "cloudrun", "test.db", "--service", "test", "--memory", memory], - ) - if should_fail: - assert 2 == result.exit_code - return - assert 0 == result.exit_code - tag = f"gcr.io/{mock_output.return_value}/datasette" - mock_call.assert_has_calls( - [ - mock.call(f"gcloud builds submit --tag {tag}", shell=True), - mock.call( - "gcloud run deploy --allow-unauthenticated --platform=managed --image {} test --memory {}".format( - tag, memory - ), - shell=True, + os.chdir(tmp_path_factory.mktemp("runner")) + with open("test.db", "w") as fp: + fp.write("data") + result = runner.invoke( + cli.cli, + ["publish", "cloudrun", "test.db", "--service", "test", "--memory", memory], + ) + if should_fail: + assert 2 == result.exit_code + return + assert 0 == result.exit_code + tag = f"gcr.io/{mock_output.return_value}/datasette" + mock_call.assert_has_calls( + [ + mock.call(f"gcloud builds submit --tag {tag}", shell=True), + mock.call( + "gcloud run deploy --allow-unauthenticated --platform=managed --image {} test --memory {}".format( + tag, memory ), - ] - ) + shell=True, + ), + ] + ) @pytest.mark.serial @@ -162,74 +156,74 @@ def test_publish_cloudrun_plugin_secrets( mock_output.return_value = "myproject" runner = CliRunner() - with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): - with open("test.db", "w") as fp: - fp.write("data") - with open("metadata.yml", "w") as fp: - fp.write( - textwrap.dedent( - """ - title: Hello from metadata YAML - plugins: - datasette-auth-github: - foo: bar + os.chdir(tmp_path_factory.mktemp("runner")) + with open("test.db", "w") as fp: + fp.write("data") + with open("metadata.yml", "w") as fp: + fp.write( + textwrap.dedent( """ - ).strip() - ) - result = runner.invoke( - cli.cli, - [ - "publish", - "cloudrun", - "test.db", - "--metadata", - "metadata.yml", - "--service", - "datasette", - "--plugin-secret", - "datasette-auth-github", - "client_id", - "x-client-id", - "--show-files", - "--secret", - "x-secret", - ], + title: Hello from metadata YAML + plugins: + datasette-auth-github: + foo: bar + """ + ).strip() ) - assert result.exit_code == 0 - dockerfile = ( - result.output.split("==== Dockerfile ====\n")[1] - .split("\n====================\n")[0] - .strip() - ) - expected = textwrap.dedent( - r""" - FROM python:3.8 - COPY . /app - WORKDIR /app + result = runner.invoke( + cli.cli, + [ + "publish", + "cloudrun", + "test.db", + "--metadata", + "metadata.yml", + "--service", + "datasette", + "--plugin-secret", + "datasette-auth-github", + "client_id", + "x-client-id", + "--show-files", + "--secret", + "x-secret", + ], + ) + assert result.exit_code == 0 + dockerfile = ( + result.output.split("==== Dockerfile ====\n")[1] + .split("\n====================\n")[0] + .strip() + ) + expected = textwrap.dedent( + r""" + FROM python:3.8 + COPY . /app + WORKDIR /app - ENV DATASETTE_AUTH_GITHUB_CLIENT_ID 'x-client-id' - ENV DATASETTE_SECRET 'x-secret' - RUN pip install -U datasette - RUN datasette inspect test.db --inspect-file inspect-data.json - ENV PORT 8001 - EXPOSE 8001 - CMD datasette serve --host 0.0.0.0 -i test.db --cors --inspect-file inspect-data.json --metadata metadata.json --setting force_https_urls on --port $PORT""" - ).strip() - assert expected == dockerfile - metadata = ( - result.output.split("=== metadata.json ===\n")[1] - .split("\n==== Dockerfile ====\n")[0] - .strip() - ) - assert { - "title": "Hello from metadata YAML", - "plugins": { - "datasette-auth-github": { - "foo": "bar", - "client_id": {"$env": "DATASETTE_AUTH_GITHUB_CLIENT_ID"}, - } + ENV DATASETTE_AUTH_GITHUB_CLIENT_ID 'x-client-id' + ENV DATASETTE_SECRET 'x-secret' + RUN pip install -U datasette + RUN datasette inspect test.db --inspect-file inspect-data.json + ENV PORT 8001 + EXPOSE 8001 + CMD datasette serve --host 0.0.0.0 -i test.db --cors --inspect-file inspect-data.json --metadata metadata.json --setting force_https_urls on --port $PORT""" + ).strip() + assert expected == dockerfile + metadata = ( + result.output.split("=== metadata.json ===\n")[1] + .split("\n==== Dockerfile ====\n")[0] + .strip() + ) + assert { + "title": "Hello from metadata YAML", + "plugins": { + "datasette-auth-github": { + "client_id": {"$env": "DATASETTE_AUTH_GITHUB_CLIENT_ID"}, + "foo": "bar", }, - } == json.loads(metadata) + }, + } == json.loads(metadata) @pytest.mark.serial @@ -243,51 +237,51 @@ def test_publish_cloudrun_apt_get_install( mock_output.return_value = "myproject" runner = CliRunner() - with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): - with open("test.db", "w") as fp: - fp.write("data") - result = runner.invoke( - cli.cli, - [ - "publish", - "cloudrun", - "test.db", - "--service", - "datasette", - "--show-files", - "--secret", - "x-secret", - "--apt-get-install", - "ripgrep", - "--spatialite", - ], - ) - assert result.exit_code == 0 - dockerfile = ( - result.output.split("==== Dockerfile ====\n")[1] - .split("\n====================\n")[0] - .strip() - ) - expected = textwrap.dedent( - r""" - FROM python:3.8 - COPY . /app - WORKDIR /app + os.chdir(tmp_path_factory.mktemp("runner")) + with open("test.db", "w") as fp: + fp.write("data") + result = runner.invoke( + cli.cli, + [ + "publish", + "cloudrun", + "test.db", + "--service", + "datasette", + "--show-files", + "--secret", + "x-secret", + "--apt-get-install", + "ripgrep", + "--spatialite", + ], + ) + assert result.exit_code == 0 + dockerfile = ( + result.output.split("==== Dockerfile ====\n")[1] + .split("\n====================\n")[0] + .strip() + ) + expected = textwrap.dedent( + r""" + FROM python:3.8 + COPY . /app + WORKDIR /app - RUN apt-get update && \ - apt-get install -y ripgrep python3-dev gcc libsqlite3-mod-spatialite && \ - rm -rf /var/lib/apt/lists/* + RUN apt-get update && \ + apt-get install -y ripgrep python3-dev gcc libsqlite3-mod-spatialite && \ + rm -rf /var/lib/apt/lists/* - ENV DATASETTE_SECRET 'x-secret' - ENV SQLITE_EXTENSIONS '/usr/lib/x86_64-linux-gnu/mod_spatialite.so' - RUN pip install -U datasette - RUN datasette inspect test.db --inspect-file inspect-data.json - ENV PORT 8001 - EXPOSE 8001 - CMD datasette serve --host 0.0.0.0 -i test.db --cors --inspect-file inspect-data.json --setting force_https_urls on --port $PORT - """ - ).strip() - assert expected == dockerfile + ENV DATASETTE_SECRET 'x-secret' + ENV SQLITE_EXTENSIONS '/usr/lib/x86_64-linux-gnu/mod_spatialite.so' + RUN pip install -U datasette + RUN datasette inspect test.db --inspect-file inspect-data.json + ENV PORT 8001 + EXPOSE 8001 + CMD datasette serve --host 0.0.0.0 -i test.db --cors --inspect-file inspect-data.json --setting force_https_urls on --port $PORT + """ + ).strip() + assert expected == dockerfile @pytest.mark.serial @@ -312,32 +306,32 @@ def test_publish_cloudrun_extra_options( mock_output.return_value = "myproject" runner = CliRunner() - with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): - with open("test.db", "w") as fp: - fp.write("data") - result = runner.invoke( - cli.cli, - [ - "publish", - "cloudrun", - "test.db", - "--service", - "datasette", - "--show-files", - "--extra-options", - extra_options, - ], - ) - assert result.exit_code == 0 - dockerfile = ( - result.output.split("==== Dockerfile ====\n")[1] - .split("\n====================\n")[0] - .strip() - ) - last_line = dockerfile.split("\n")[-1] - extra_options = ( - last_line.split("--inspect-file inspect-data.json")[1] - .split("--port")[0] - .strip() - ) - assert extra_options == expected + os.chdir(tmp_path_factory.mktemp("runner")) + with open("test.db", "w") as fp: + fp.write("data") + result = runner.invoke( + cli.cli, + [ + "publish", + "cloudrun", + "test.db", + "--service", + "datasette", + "--show-files", + "--extra-options", + extra_options, + ], + ) + assert result.exit_code == 0 + dockerfile = ( + result.output.split("==== Dockerfile ====\n")[1] + .split("\n====================\n")[0] + .strip() + ) + last_line = dockerfile.split("\n")[-1] + extra_options = ( + last_line.split("--inspect-file inspect-data.json")[1] + .split("--port")[0] + .strip() + ) + assert extra_options == expected diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py index 1fe02e08..b5a8af73 100644 --- a/tests/test_publish_heroku.py +++ b/tests/test_publish_heroku.py @@ -1,6 +1,7 @@ from click.testing import CliRunner from datasette import cli from unittest import mock +import os import pytest @@ -9,12 +10,12 @@ import pytest def test_publish_heroku_requires_heroku(mock_which, tmp_path_factory): mock_which.return_value = False runner = CliRunner() - with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): - with open("test.db", "w") as fp: - fp.write("data") - result = runner.invoke(cli.cli, ["publish", "heroku", "test.db"]) - assert result.exit_code == 1 - assert "Publishing to Heroku requires heroku" in result.output + os.chdir(tmp_path_factory.mktemp("runner")) + with open("test.db", "w") as fp: + fp.write("data") + result = runner.invoke(cli.cli, ["publish", "heroku", "test.db"]) + assert result.exit_code == 1 + assert "Publishing to Heroku requires heroku" in result.output @pytest.mark.serial @@ -27,11 +28,11 @@ def test_publish_heroku_installs_plugin( mock_which.return_value = True mock_check_output.side_effect = lambda s: {"['heroku', 'plugins']": b""}[repr(s)] runner = CliRunner() - with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): - with open("t.db", "w") as fp: - fp.write("data") - result = runner.invoke(cli.cli, ["publish", "heroku", "t.db"], input="y\n") - assert 0 != result.exit_code + os.chdir(tmp_path_factory.mktemp("runner")) + with open("t.db", "w") as fp: + fp.write("data") + result = runner.invoke(cli.cli, ["publish", "heroku", "t.db"], input="y\n") + assert 0 != result.exit_code mock_check_output.assert_has_calls( [mock.call(["heroku", "plugins"]), mock.call(["heroku", "apps:list", "--json"])] ) @@ -61,28 +62,26 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which, tmp_path_facto "['heroku', 'apps:create', 'datasette', '--json']": b'{"name": "f"}', }[repr(s)] runner = CliRunner() - with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): - with open("test.db", "w") as fp: - fp.write("data") - result = runner.invoke( - cli.cli, ["publish", "heroku", "test.db", "--tar", "gtar"] - ) - assert 0 == result.exit_code, result.output - mock_call.assert_has_calls( - [ - mock.call( - [ - "heroku", - "builds:create", - "-a", - "f", - "--include-vcs-ignore", - "--tar", - "gtar", - ] - ), - ] - ) + os.chdir(tmp_path_factory.mktemp("runner")) + with open("test.db", "w") as fp: + fp.write("data") + result = runner.invoke(cli.cli, ["publish", "heroku", "test.db", "--tar", "gtar"]) + assert 0 == result.exit_code, result.output + mock_call.assert_has_calls( + [ + mock.call( + [ + "heroku", + "builds:create", + "-a", + "f", + "--include-vcs-ignore", + "--tar", + "gtar", + ] + ), + ] + ) @pytest.mark.serial @@ -99,35 +98,33 @@ def test_publish_heroku_plugin_secrets( "['heroku', 'apps:create', 'datasette', '--json']": b'{"name": "f"}', }[repr(s)] runner = CliRunner() - with runner.isolated_filesystem(tmp_path_factory.mktemp("runner")): - with open("test.db", "w") as fp: - fp.write("data") - result = runner.invoke( - cli.cli, - [ - "publish", - "heroku", - "test.db", - "--plugin-secret", - "datasette-auth-github", - "client_id", - "x-client-id", - ], - ) - assert 0 == result.exit_code, result.output - mock_call.assert_has_calls( - [ - mock.call( - [ - "heroku", - "config:set", - "-a", - "f", - "DATASETTE_AUTH_GITHUB_CLIENT_ID=x-client-id", - ] - ), - mock.call( - ["heroku", "builds:create", "-a", "f", "--include-vcs-ignore"] - ), - ] - ) + os.chdir(tmp_path_factory.mktemp("runner")) + with open("test.db", "w") as fp: + fp.write("data") + result = runner.invoke( + cli.cli, + [ + "publish", + "heroku", + "test.db", + "--plugin-secret", + "datasette-auth-github", + "client_id", + "x-client-id", + ], + ) + assert 0 == result.exit_code, result.output + mock_call.assert_has_calls( + [ + mock.call( + [ + "heroku", + "config:set", + "-a", + "f", + "DATASETTE_AUTH_GITHUB_CLIENT_ID=x-client-id", + ] + ), + mock.call(["heroku", "builds:create", "-a", "f", "--include-vcs-ignore"]), + ] + ) From 4adca0d85077fe504e98cd7487343e76ccf25be5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 31 Jul 2021 17:58:11 -0700 Subject: [PATCH 0535/1705] No hidden SQL on canned query pages, closes #1411 --- datasette/templates/query.html | 2 +- tests/test_html.py | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/datasette/templates/query.html b/datasette/templates/query.html index b6c74883..543561d8 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -44,7 +44,7 @@
    {% if query %}{{ query.sql }}{% endif %}
    {% endif %} {% else %} - + {% if not canned_query %}{% endif %} {% endif %} {% if named_parameter_values %} diff --git a/tests/test_html.py b/tests/test_html.py index aee6bce1..9f5b99e3 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1238,6 +1238,17 @@ def test_show_hide_sql_query(app_client): ] == [(hidden["name"], hidden["value"]) for hidden in hiddens] +def test_canned_query_with_hide_has_no_hidden_sql(app_client): + # For a canned query the show/hide should NOT have a hidden SQL field + # https://github.com/simonw/datasette/issues/1411 + response = app_client.get("/fixtures/neighborhood_search?_hide_sql=1") + soup = Soup(response.body, "html.parser") + hiddens = soup.find("form").select("input[type=hidden]") + assert [ + ("_hide_sql", "1"), + ] == [(hidden["name"], hidden["value"]) for hidden in hiddens] + + def test_extra_where_clauses(app_client): response = app_client.get( "/fixtures/facetable?_where=neighborhood='Dogpatch'&_where=city_id=1" From a679d0de87031e3de9013fc299ba2cbd75808684 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 3 Aug 2021 09:11:18 -0700 Subject: [PATCH 0536/1705] Fixed spelling of 'receive' in a bunch of places --- docs/internals.rst | 2 +- docs/plugin_hooks.rst | 4 ++-- tests/plugins/my_plugin.py | 4 ++-- tests/plugins/my_plugin_2.py | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/internals.rst b/docs/internals.rst index 1e41cacd..cfc4f6d5 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -149,7 +149,7 @@ Create a ``Response`` object and then use ``await response.asgi_send(send)``, pa .. code-block:: python - async def require_authorization(scope, recieve, send): + async def require_authorization(scope, receive, send): response = Response.text( "401 Authorization Required", headers={ diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 4700763c..269cb1c9 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -678,7 +678,7 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att def asgi_wrapper(datasette): def wrap_with_databases_header(app): @wraps(app) - async def add_x_databases_header(scope, recieve, send): + async def add_x_databases_header(scope, receive, send): async def wrapped_send(event): if event["type"] == "http.response.start": original_headers = event.get("headers") or [] @@ -691,7 +691,7 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att ], } await send(event) - await app(scope, recieve, wrapped_send) + await app(scope, receive, wrapped_send) return add_x_databases_header return wrap_with_databases_header diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 0e625623..59ac8add 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -178,11 +178,11 @@ def actor_from_request(datasette, request): @hookimpl def asgi_wrapper(): def wrap(app): - async def maybe_set_actor_in_scope(scope, recieve, send): + async def maybe_set_actor_in_scope(scope, receive, send): if b"_actor_in_scope" in scope.get("query_string", b""): scope = dict(scope, actor={"id": "from-scope"}) print(scope) - await app(scope, recieve, send) + await app(scope, receive, send) return maybe_set_actor_in_scope diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index f7a3f1c0..ba298fd4 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -77,7 +77,7 @@ def extra_template_vars(template, database, table, view_name, request, datasette def asgi_wrapper(datasette): def wrap_with_databases_header(app): @wraps(app) - async def add_x_databases_header(scope, recieve, send): + async def add_x_databases_header(scope, receive, send): async def wrapped_send(event): if event["type"] == "http.response.start": original_headers = event.get("headers") or [] @@ -94,7 +94,7 @@ def asgi_wrapper(datasette): } await send(event) - await app(scope, recieve, wrapped_send) + await app(scope, receive, wrapped_send) return add_x_databases_header From 54b6e96ee8aa553b6671e341a1944f93f3fb89c3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 3 Aug 2021 09:12:48 -0700 Subject: [PATCH 0537/1705] Use optional rich dependency to render tracebacks, closes #1416 --- datasette/app.py | 8 ++++++++ datasette/cli.py | 8 ++++++++ setup.py | 1 + 3 files changed, 17 insertions(+) diff --git a/datasette/app.py b/datasette/app.py index 2596ca50..edd5ab87 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -81,6 +81,11 @@ from .tracer import AsgiTracer from .plugins import pm, DEFAULT_PLUGINS, get_plugins from .version import __version__ +try: + import rich +except ImportError: + rich = None + app_root = Path(__file__).parent.parent # https://github.com/simonw/datasette/issues/283#issuecomment-781591015 @@ -1270,6 +1275,9 @@ class DatasetteRouter: pdb.post_mortem(exception.__traceback__) + if rich is not None: + rich.console.Console().print_exception(show_locals=True) + title = None if isinstance(exception, Forbidden): status = 403 diff --git a/datasette/cli.py b/datasette/cli.py index 09aebcc8..e53f3d8e 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -31,6 +31,14 @@ from .utils.sqlite import sqlite3 from .utils.testing import TestClient from .version import __version__ +# Use Rich for tracebacks if it is installed +try: + from rich.traceback import install + + install(show_locals=True) +except ImportError: + pass + class Config(click.ParamType): # This will be removed in Datasette 1.0 in favour of class Setting diff --git a/setup.py b/setup.py index cfc1e484..c69b9b00 100644 --- a/setup.py +++ b/setup.py @@ -75,6 +75,7 @@ setup( "pytest-timeout>=1.4.2,<1.5", "trustme>=0.7,<0.9", ], + "rich": ["rich"], }, tests_require=["datasette[test]"], classifiers=[ From 2208c3c68e552d343e6a2872ff6e559fca9d1b38 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 3 Aug 2021 09:36:38 -0700 Subject: [PATCH 0538/1705] Spelling corrections plus CI job for codespell * Use codespell to check spelling in documentation, refs #1417 * Fixed spelling errors spotted by codespell, closes #1417 * Make codespell a docs dependency See also this TIL: https://til.simonwillison.net/python/codespell --- .github/workflows/spellcheck.yml | 25 +++++++++++++++++++++++++ docs/authentication.rst | 4 ++-- docs/changelog.rst | 8 ++++---- docs/codespell-ignore-words.txt | 1 + docs/deploying.rst | 2 +- docs/internals.rst | 6 +++--- docs/performance.rst | 2 +- docs/plugin_hooks.rst | 2 +- docs/publish.rst | 2 +- docs/settings.rst | 2 +- docs/sql_queries.rst | 2 +- setup.py | 2 +- 12 files changed, 42 insertions(+), 16 deletions(-) create mode 100644 .github/workflows/spellcheck.yml create mode 100644 docs/codespell-ignore-words.txt diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml new file mode 100644 index 00000000..d498e173 --- /dev/null +++ b/.github/workflows/spellcheck.yml @@ -0,0 +1,25 @@ +name: Check spelling in documentation + +on: [push, pull_request] + +jobs: + spellcheck: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: 3.9 + - uses: actions/cache@v2 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Install dependencies + run: | + pip install -e '.[docs]' + - name: Check spelling + run: codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt diff --git a/docs/authentication.rst b/docs/authentication.rst index 62ed7e8b..0d98cf82 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -60,7 +60,7 @@ The key question the permissions system answers is this: **Actors** are :ref:`described above `. -An **action** is a string describing the action the actor would like to perfom. A full list is :ref:`provided below ` - examples include ``view-table`` and ``execute-sql``. +An **action** is a string describing the action the actor would like to perform. A full list is :ref:`provided below ` - examples include ``view-table`` and ``execute-sql``. A **resource** is the item the actor wishes to interact with - for example a specific database or table. Some actions, such as ``permissions-debug``, are not associated with a particular resource. @@ -73,7 +73,7 @@ Permissions with potentially harmful effects should default to *deny*. Plugin au Defining permissions with "allow" blocks ---------------------------------------- -The standard way to define permissions in Datasette is to use an ``"allow"`` block. This is a JSON document describing which actors are allowed to perfom a permission. +The standard way to define permissions in Datasette is to use an ``"allow"`` block. This is a JSON document describing which actors are allowed to perform a permission. The most basic form of allow block is this (`allow demo `__, `deny demo `__): diff --git a/docs/changelog.rst b/docs/changelog.rst index 6a951935..883cb3eb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -426,7 +426,7 @@ See also `Datasette 0.49: The annotated release notes `__ for conversations about the project that go beyond just bug reports and issues. - Datasette can now be installed on macOS using Homebrew! Run ``brew install simonw/datasette/datasette``. See :ref:`installation_homebrew`. (:issue:`335`) - Two new commands: ``datasette install name-of-plugin`` and ``datasette uninstall name-of-plugin``. These are equivalent to ``pip install`` and ``pip uninstall`` but automatically run in the same virtual environment as Datasette, so users don't have to figure out where that virtual environment is - useful for installations created using Homebrew or ``pipx``. See :ref:`plugins_installing`. (:issue:`925`) -- A new command-line option, ``datasette --get``, accepts a path to a URL within the Datasette instance. It will run that request through Datasette (without starting a web server) and print out the repsonse. See :ref:`getting_started_datasette_get` for an example. (:issue:`926`) +- A new command-line option, ``datasette --get``, accepts a path to a URL within the Datasette instance. It will run that request through Datasette (without starting a web server) and print out the response. See :ref:`getting_started_datasette_get` for an example. (:issue:`926`) .. _v0_46: @@ -500,7 +500,7 @@ New plugin hooks Smaller changes ~~~~~~~~~~~~~~~ -- Cascading view permissons - so if a user has ``view-table`` they can view the table page even if they do not have ``view-database`` or ``view-instance``. (:issue:`832`) +- Cascading view permissions - so if a user has ``view-table`` they can view the table page even if they do not have ``view-database`` or ``view-instance``. (:issue:`832`) - CSRF protection no longer applies to ``Authentication: Bearer token`` requests or requests without cookies. (:issue:`835`) - ``datasette.add_message()`` now works inside plugins. (:issue:`864`) - Workaround for "Too many open files" error in test runs. (:issue:`846`) @@ -714,7 +714,7 @@ Also in this release: * Datasette now has a *pattern portfolio* at ``/-/patterns`` - e.g. https://latest.datasette.io/-/patterns. This is a page that shows every Datasette user interface component in one place, to aid core development and people building custom CSS themes. (:issue:`151`) * SQLite `PRAGMA functions `__ such as ``pragma_table_info(tablename)`` are now allowed in Datasette SQL queries. (:issue:`761`) * Datasette pages now consistently return a ``content-type`` of ``text/html; charset=utf-8"``. (:issue:`752`) -* Datasette now handles an ASGI ``raw_path`` value of ``None``, which should allow compatibilty with the `Mangum `__ adapter for running ASGI apps on AWS Lambda. Thanks, Colin Dellow. (`#719 `__) +* Datasette now handles an ASGI ``raw_path`` value of ``None``, which should allow compatibility with the `Mangum `__ adapter for running ASGI apps on AWS Lambda. Thanks, Colin Dellow. (`#719 `__) * Installation documentation now covers how to :ref:`installation_pipx`. (:issue:`756`) * Improved the documentation for :ref:`full_text_search`. (:issue:`748`) @@ -1169,7 +1169,7 @@ Documentation improvements plus a fix for publishing to Zeit Now. New plugin hooks, improved database view support and an easier way to use more recent versions of SQLite. - New ``publish_subcommand`` plugin hook. A plugin can now add additional ``datasette publish`` publishers in addition to the default ``now`` and ``heroku``, both of which have been refactored into default plugins. :ref:`publish_subcommand documentation `. Closes :issue:`349` -- New ``render_cell`` plugin hook. Plugins can now customize how values are displayed in the HTML tables produced by Datasette's browseable interface. `datasette-json-html `__ and `datasette-render-images `__ are two new plugins that use this hook. :ref:`render_cell documentation `. Closes :issue:`352` +- New ``render_cell`` plugin hook. Plugins can now customize how values are displayed in the HTML tables produced by Datasette's browsable interface. `datasette-json-html `__ and `datasette-render-images `__ are two new plugins that use this hook. :ref:`render_cell documentation `. Closes :issue:`352` - New ``extra_body_script`` plugin hook, enabling plugins to provide additional JavaScript that should be added to the page footer. :ref:`extra_body_script documentation `. - ``extra_css_urls`` and ``extra_js_urls`` hooks now take additional optional parameters, allowing them to be more selective about which pages they apply to. :ref:`Documentation `. - You can now use the :ref:`sortable_columns metadata setting ` to explicitly enable sort-by-column in the interface for database views, as well as for specific tables. diff --git a/docs/codespell-ignore-words.txt b/docs/codespell-ignore-words.txt new file mode 100644 index 00000000..a625cde5 --- /dev/null +++ b/docs/codespell-ignore-words.txt @@ -0,0 +1 @@ +AddWordsToIgnoreHere diff --git a/docs/deploying.rst b/docs/deploying.rst index 31d123e9..83d9e4dd 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -188,7 +188,7 @@ Then add these directives to proxy traffic:: ProxyPass /my-datasette/ http://127.0.0.1:8009/my-datasette/ ProxyPreserveHost On -Using ``--uds`` you can use Unix domain sockets similiar to the nginx example:: +Using ``--uds`` you can use Unix domain sockets similar to the nginx example:: ProxyPass /my-datasette/ unix:/tmp/datasette.sock|http://localhost/my-datasette/ diff --git a/docs/internals.rst b/docs/internals.rst index cfc4f6d5..058a8969 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -431,13 +431,13 @@ It offers the following methods: ``await datasette.client.get(path, **kwargs)`` - returns HTTPX Response Execute an internal GET request against that path. -``await datasette.client.post(path, **kwargs)`` - returns HTTPX Respons +``await datasette.client.post(path, **kwargs)`` - returns HTTPX Response Execute an internal POST request. Use ``data={"name": "value"}`` to pass form parameters. ``await datasette.client.options(path, **kwargs)`` - returns HTTPX Response Execute an internal OPTIONS request. -``await datasette.client.head(path, **kwargs)`` - returns HTTPX Respons +``await datasette.client.head(path, **kwargs)`` - returns HTTPX Response Execute an internal HEAD request. ``await datasette.client.put(path, **kwargs)`` - returns HTTPX Response @@ -714,7 +714,7 @@ The ``Database`` class also provides properties and methods for introspecting th List of names of tables in the database. ``await db.view_names()`` - list of strings - List of names of views in tha database. + List of names of views in the database. ``await db.table_columns(table)`` - list of strings Names of columns in a specific table. diff --git a/docs/performance.rst b/docs/performance.rst index b9e38e2f..bcf3208e 100644 --- a/docs/performance.rst +++ b/docs/performance.rst @@ -39,7 +39,7 @@ Then later you can start Datasette against the ``counts.json`` file and use it t datasette -i data.db --inspect-file=counts.json -You need to use the ``-i`` immutable mode against the databse file here or the counts from the JSON file will be ignored. +You need to use the ``-i`` immutable mode against the database file here or the counts from the JSON file will be ignored. You will rarely need to use this optimization in every-day use, but several of the ``datasette publish`` commands described in :ref:`publishing` use this optimization for better performance when deploying a database file to a hosting provider. diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 269cb1c9..10ec2cf1 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -664,7 +664,7 @@ Return an `ASGI `__ middleware wrapper function th This is a very powerful hook. You can use it to manipulate the entire Datasette response, or even to configure new URL routes that will be handled by your own custom code. -You can write your ASGI code directly against the low-level specification, or you can use the middleware utilites provided by an ASGI framework such as `Starlette `__. +You can write your ASGI code directly against the low-level specification, or you can use the middleware utilities provided by an ASGI framework such as `Starlette `__. This example plugin adds a ``x-databases`` HTTP header listing the currently attached databases: diff --git a/docs/publish.rst b/docs/publish.rst index cbd18a00..f6895f53 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -165,7 +165,7 @@ You can now run the resulting container like so:: This exposes port 8001 inside the container as port 8081 on your host machine, so you can access the application at ``http://localhost:8081/`` -You can customize the port that is exposed by the countainer using the ``--port`` option:: +You can customize the port that is exposed by the container using the ``--port`` option:: datasette package mydatabase.db --port 8080 diff --git a/docs/settings.rst b/docs/settings.rst index c246d33a..7cc4bae0 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -349,7 +349,7 @@ Using secrets with datasette publish The :ref:`cli_publish` and :ref:`cli_package` commands both generate a secret for you automatically when Datasette is deployed. -This means that every time you deploy a new version of a Datasette project, a new secret will be generated. This will cause signed cookies to become inalid on every fresh deploy. +This means that every time you deploy a new version of a Datasette project, a new secret will be generated. This will cause signed cookies to become invalid on every fresh deploy. You can fix this by creating a secret that will be used for multiple deploys and passing it using the ``--secret`` option:: diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index e9077f70..3049593d 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -403,7 +403,7 @@ Datasette can execute joins across multiple databases if it is started with the If it is started in this way, the ``/_memory`` page can be used to execute queries that join across multiple databases. -References to tables in attached databases should be preceeded by the database name and a period. +References to tables in attached databases should be preceded by the database name and a period. For example, this query will show a list of tables across both of the above databases: diff --git a/setup.py b/setup.py index c69b9b00..65e99848 100644 --- a/setup.py +++ b/setup.py @@ -65,7 +65,7 @@ setup( """, setup_requires=["pytest-runner"], extras_require={ - "docs": ["sphinx_rtd_theme", "sphinx-autobuild"], + "docs": ["sphinx_rtd_theme", "sphinx-autobuild", "codespell"], "test": [ "pytest>=5.2.2,<6.3.0", "pytest-xdist>=2.2.1,<2.4", From cd8b7bee8fb5c1cdce7c8dbfeb0166011abc72c6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 3 Aug 2021 10:03:08 -0700 Subject: [PATCH 0539/1705] Run codespell against datasette source code too, refs #1417 --- .github/workflows/spellcheck.yml | 4 +++- datasette/hookspecs.py | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index d498e173..2e24d3eb 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -22,4 +22,6 @@ jobs: run: | pip install -e '.[docs]' - name: Check spelling - run: codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt + run: | + codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt + codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 3ef0d4f5..f31ce538 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -86,12 +86,12 @@ def actor_from_request(datasette, request): @hookspec def permission_allowed(datasette, actor, action, resource): - """Check if actor is allowed to perfom this action - return True, False or None""" + """Check if actor is allowed to perform this action - return True, False or None""" @hookspec def canned_queries(datasette, database, actor): - """Return a dictonary of canned query definitions or an awaitable function that returns them""" + """Return a dictionary of canned query definitions or an awaitable function that returns them""" @hookspec From a1f383035698da8bf188659390af6e53ffeec940 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 3 Aug 2021 22:20:50 -0700 Subject: [PATCH 0540/1705] --cpu option for datasette publish cloudrun, closes #1420 --- datasette/publish/cloudrun.py | 13 +++++- docs/datasette-publish-cloudrun-help.txt | 1 + tests/test_docs.py | 2 +- tests/test_publish_cloudrun.py | 51 ++++++++++++++++-------- 4 files changed, 48 insertions(+), 19 deletions(-) diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index bad223a1..1fabcafd 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -36,6 +36,11 @@ def publish_subcommand(publish): callback=_validate_memory, help="Memory to allocate in Cloud Run, e.g. 1Gi", ) + @click.option( + "--cpu", + type=click.Choice(["1", "2", "4"]), + help="Number of vCPUs to allocate in Cloud Run", + ) @click.option( "--apt-get-install", "apt_get_extras", @@ -66,6 +71,7 @@ def publish_subcommand(publish): spatialite, show_files, memory, + cpu, apt_get_extras, ): fail_if_publish_binary_not_installed( @@ -151,8 +157,11 @@ def publish_subcommand(publish): image_id = f"gcr.io/{project}/{name}" check_call(f"gcloud builds submit --tag {image_id}", shell=True) check_call( - "gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}".format( - image_id, service, " --memory {}".format(memory) if memory else "" + "gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}{}".format( + image_id, + service, + " --memory {}".format(memory) if memory else "", + " --cpu {}".format(cpu) if cpu else "", ), shell=True, ) diff --git a/docs/datasette-publish-cloudrun-help.txt b/docs/datasette-publish-cloudrun-help.txt index 3d05efb6..34481b40 100644 --- a/docs/datasette-publish-cloudrun-help.txt +++ b/docs/datasette-publish-cloudrun-help.txt @@ -28,5 +28,6 @@ Options: --spatialite Enable SpatialLite extension --show-files Output the generated Dockerfile and metadata.json --memory TEXT Memory to allocate in Cloud Run, e.g. 1Gi + --cpu [1|2|4] Number of vCPUs to allocate in Cloud Run --apt-get-install TEXT Additional packages to apt-get install --help Show this message and exit. diff --git a/tests/test_docs.py b/tests/test_docs.py index efd267b9..d0cb036d 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -50,7 +50,7 @@ def test_help_includes(name, filename): # actual has "Usage: cli package [OPTIONS] FILES" # because it doesn't know that cli will be aliased to datasette expected = expected.replace("Usage: datasette", "Usage: cli") - assert expected == actual + assert expected == actual, "Run python update-docs-help.py to fix this" @pytest.fixture(scope="session") diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 47f59d72..9c8c38cf 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -105,17 +105,28 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which, tmp_path_factory): @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @pytest.mark.parametrize( - "memory,should_fail", + "memory,cpu,expected_gcloud_args", [ - ["1Gi", False], - ["2G", False], - ["256Mi", False], - ["4", True], - ["GB", True], + ["1Gi", None, "--memory 1Gi"], + ["2G", None, "--memory 2G"], + ["256Mi", None, "--memory 256Mi"], + ["4", None, None], + ["GB", None, None], + [None, 1, "--cpu 1"], + [None, 2, "--cpu 2"], + [None, 3, None], + [None, 4, "--cpu 4"], + ["2G", 4, "--memory 2G --cpu 4"], ], ) -def test_publish_cloudrun_memory( - mock_call, mock_output, mock_which, memory, should_fail, tmp_path_factory +def test_publish_cloudrun_memory_cpu( + mock_call, + mock_output, + mock_which, + memory, + cpu, + expected_gcloud_args, + tmp_path_factory, ): mock_output.return_value = "myproject" mock_which.return_value = True @@ -123,22 +134,30 @@ def test_publish_cloudrun_memory( os.chdir(tmp_path_factory.mktemp("runner")) with open("test.db", "w") as fp: fp.write("data") - result = runner.invoke( - cli.cli, - ["publish", "cloudrun", "test.db", "--service", "test", "--memory", memory], - ) - if should_fail: + args = ["publish", "cloudrun", "test.db", "--service", "test"] + if memory: + args.extend(["--memory", memory]) + if cpu: + args.extend(["--cpu", str(cpu)]) + result = runner.invoke(cli.cli, args) + if expected_gcloud_args is None: assert 2 == result.exit_code return assert 0 == result.exit_code tag = f"gcr.io/{mock_output.return_value}/datasette" + expected_call = ( + "gcloud run deploy --allow-unauthenticated --platform=managed" + " --image {} test".format(tag) + ) + if memory: + expected_call += " --memory {}".format(memory) + if cpu: + expected_call += " --cpu {}".format(cpu) mock_call.assert_has_calls( [ mock.call(f"gcloud builds submit --tag {tag}", shell=True), mock.call( - "gcloud run deploy --allow-unauthenticated --platform=managed --image {} test --memory {}".format( - tag, memory - ), + expected_call, shell=True, ), ] From acc22436622ff8476c30acf45ed60f54b4aaa5d9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 5 Aug 2021 08:47:18 -0700 Subject: [PATCH 0541/1705] Quotes around '.[test]' for zsh --- docs/contributing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index c3d0989a..8a638e0b 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -39,7 +39,7 @@ The next step is to create a virtual environment for your project and use it to # Now activate the virtual environment, so pip can install into it source venv/bin/activate # Install Datasette and its testing dependencies - python3 -m pip install -e .[test] + python3 -m pip install -e '.[test]' That last line does most of the work: ``pip install -e`` means "install this package in a way that allows me to edit the source code in place". The ``.[test]`` option means "use the setup.py in this directory and install the optional testing dependencies as well". From b7037f5ecea40dc5343250d08d741504b6dcb28f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 4 Aug 2021 19:58:09 -0700 Subject: [PATCH 0542/1705] Bit of breathing space on https://latest.datasette.io/fixtures/pragma_cache_size --- datasette/static/app.css | 3 +++ 1 file changed, 3 insertions(+) diff --git a/datasette/static/app.css b/datasette/static/app.css index ad517c98..c6be1e97 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -497,6 +497,9 @@ label.sort_by_desc { width: auto; padding-right: 1em; } +pre#sql-query { + margin-bottom: 1em; +} form input[type=text], form input[type=search] { border: 1px solid #ccc; From 66e143c76e90f643dc11b6ced5433130c90a2455 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 6 Aug 2021 22:09:00 -0700 Subject: [PATCH 0543/1705] New hide_sql canned query option, refs #1422 --- datasette/templates/query.html | 14 +++++++--- datasette/views/database.py | 32 +++++++++++++++++++-- docs/changelog.rst | 2 +- docs/sql_queries.rst | 25 +++++++++++++---- tests/fixtures.py | 1 + tests/test_html.py | 51 +++++++++++++++++++++++++++++++++- 6 files changed, 111 insertions(+), 14 deletions(-) diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 543561d8..75f7f1b1 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -33,7 +33,9 @@ {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} -

    Custom SQL query{% if display_rows %} returning {% if truncated %}more than {% endif %}{{ "{:,}".format(display_rows|length) }} row{% if display_rows|length == 1 %}{% else %}s{% endif %}{% endif %}{% if not query_error %} {% if hide_sql %}(show){% else %}(hide){% endif %}{% endif %}

    +

    Custom SQL query{% if display_rows %} returning {% if truncated %}more than {% endif %}{{ "{:,}".format(display_rows|length) }} row{% if display_rows|length == 1 %}{% else %}s{% endif %}{% endif %}{% if not query_error %} + ({{ show_hide_text }}) + {% endif %}

    {% if error %}

    {{ error }}

    {% endif %} @@ -44,8 +46,11 @@
    {% if query %}{{ query.sql }}{% endif %}
    {% endif %} {% else %} - {% if not canned_query %}{% endif %} - + {% if not canned_query %} + + {% endif %} {% endif %} {% if named_parameter_values %}

    Query parameters

    @@ -54,9 +59,10 @@ {% endfor %} {% endif %}

    - + {% if not hide_sql %}{% endif %} {% if canned_write %}{% endif %} + {{ show_hide_hidden }} {% if canned_query and edit_sql_url %}Edit SQL{% endif %}

    diff --git a/datasette/views/database.py b/datasette/views/database.py index 53bdceed..d9fe2b49 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -5,6 +5,8 @@ import json from markupsafe import Markup, escape from urllib.parse import parse_qsl, urlencode +import markupsafe + from datasette.utils import ( await_me_maybe, check_visibility, @@ -415,6 +417,29 @@ class QueryView(DataView): } ) ) + + show_hide_hidden = "" + if metadata.get("hide_sql"): + if bool(params.get("_show_sql")): + show_hide_link = path_with_removed_args(request, {"_show_sql"}) + show_hide_text = "hide" + show_hide_hidden = ( + '' + ) + else: + show_hide_link = path_with_added_args(request, {"_show_sql": 1}) + show_hide_text = "show" + else: + if bool(params.get("_hide_sql")): + show_hide_link = path_with_removed_args(request, {"_hide_sql"}) + show_hide_text = "show" + show_hide_hidden = ( + '' + ) + else: + show_hide_link = path_with_added_args(request, {"_hide_sql": 1}) + show_hide_text = "hide" + hide_sql = show_hide_text == "show" return { "display_rows": display_rows, "custom_sql": True, @@ -425,9 +450,10 @@ class QueryView(DataView): "metadata": metadata, "config": self.ds.config_dict(), "request": request, - "path_with_added_args": path_with_added_args, - "path_with_removed_args": path_with_removed_args, - "hide_sql": "_hide_sql" in params, + "show_hide_link": show_hide_link, + "show_hide_text": show_hide_text, + "show_hide_hidden": markupsafe.Markup(show_hide_hidden), + "hide_sql": hide_sql, } return ( diff --git a/docs/changelog.rst b/docs/changelog.rst index 883cb3eb..d0fee19b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -674,7 +674,7 @@ The main focus of this release is a major upgrade to the :ref:`plugin_register_o * Visually distinguish float and integer columns - useful for figuring out why order-by-column might be returning unexpected results. (:issue:`729`) * The :ref:`internals_request`, which is passed to several plugin hooks, is now documented. (:issue:`706`) * New ``metadata.json`` option for setting a custom default page size for specific tables and views, see :ref:`metadata_page_size`. (:issue:`751`) -* Canned queries can now be configured with a default URL fragment hash, useful when working with plugins such as `datasette-vega `__, see :ref:`canned_queries_default_fragment`. (:issue:`706`) +* Canned queries can now be configured with a default URL fragment hash, useful when working with plugins such as `datasette-vega `__, see :ref:`canned_queries_options`. (:issue:`706`) * Fixed a bug in ``datasette publish`` when running on operating systems where the ``/tmp`` directory lives in a different volume, using a backport of the Python 3.8 ``shutil.copytree()`` function. (:issue:`744`) * Every plugin hook is now covered by the unit tests, and a new unit test checks that each plugin hook has at least one corresponding test. (:issue:`771`, :issue:`773`) diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index 3049593d..407e4ba2 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -187,14 +187,28 @@ You can alternatively provide an explicit list of named parameters using the ``" order by neighborhood title: Search neighborhoods -.. _canned_queries_default_fragment: +.. _canned_queries_options: -Setting a default fragment -~~~~~~~~~~~~~~~~~~~~~~~~~~ +Additional canned query options +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Additional options can be specified for canned queries in the YAML or JSON configuration. + +hide_sql +++++++++ + +Canned queries default to displaying their SQL query at the top of the page. If the query is extremely long you may want to hide it by default, with a "show" link that can be used to make it visible. + +Add the ``"hide_sql": true`` option to hide the SQL query by default. + +fragment +++++++++ Some plugins, such as `datasette-vega `__, can be configured by including additional data in the fragment hash of the URL - the bit that comes after a ``#`` symbol. -You can set a default fragment hash that will be included in the link to the canned query from the database index page using the ``"fragment"`` key: +You can set a default fragment hash that will be included in the link to the canned query from the database index page using the ``"fragment"`` key. + +This example demonstrates both ``fragment`` and ``hide_sql``: .. code-block:: json @@ -204,7 +218,8 @@ You can set a default fragment hash that will be included in the link to the can "queries": { "neighborhood_search": { "sql": "select neighborhood, facet_cities.name, state\nfrom facetable join facet_cities on facetable.city_id = facet_cities.id\nwhere neighborhood like '%' || :text || '%' order by neighborhood;", - "fragment": "fragment-goes-here" + "fragment": "fragment-goes-here", + "hide_sql": true } } } diff --git a/tests/fixtures.py b/tests/fixtures.py index 93b7dce2..873f9d55 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -360,6 +360,7 @@ METADATA = { "title": "Search neighborhoods", "description_html": "Demonstrating simple like search", "fragment": "fragment-goes-here", + "hide_sql": True, }, }, } diff --git a/tests/test_html.py b/tests/test_html.py index 9f5b99e3..b1b6c1f3 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1241,7 +1241,7 @@ def test_show_hide_sql_query(app_client): def test_canned_query_with_hide_has_no_hidden_sql(app_client): # For a canned query the show/hide should NOT have a hidden SQL field # https://github.com/simonw/datasette/issues/1411 - response = app_client.get("/fixtures/neighborhood_search?_hide_sql=1") + response = app_client.get("/fixtures/pragma_cache_size?_hide_sql=1") soup = Soup(response.body, "html.parser") hiddens = soup.find("form").select("input[type=hidden]") assert [ @@ -1249,6 +1249,55 @@ def test_canned_query_with_hide_has_no_hidden_sql(app_client): ] == [(hidden["name"], hidden["value"]) for hidden in hiddens] +@pytest.mark.parametrize( + "hide_sql,querystring,expected_hidden,expected_show_hide_link,expected_show_hide_text", + ( + (False, "", None, "/_memory/one?_hide_sql=1", "hide"), + (False, "?_hide_sql=1", "_hide_sql", "/_memory/one", "show"), + (True, "", None, "/_memory/one?_show_sql=1", "show"), + (True, "?_show_sql=1", "_show_sql", "/_memory/one", "hide"), + ), +) +def test_canned_query_show_hide_metadata_option( + hide_sql, + querystring, + expected_hidden, + expected_show_hide_link, + expected_show_hide_text, +): + with make_app_client( + metadata={ + "databases": { + "_memory": { + "queries": { + "one": { + "sql": "select 1 + 1", + "hide_sql": hide_sql, + } + } + } + } + }, + memory=True, + ) as client: + expected_show_hide_fragment = '({})'.format( + expected_show_hide_link, expected_show_hide_text + ) + response = client.get("/_memory/one" + querystring) + html = response.text + show_hide_fragment = html.split('')[1].split( + "" + )[0] + assert show_hide_fragment == expected_show_hide_fragment + if expected_hidden: + assert ( + ''.format(expected_hidden) + in html + ) + else: + assert ' Date: Fri, 6 Aug 2021 22:14:44 -0700 Subject: [PATCH 0544/1705] Fix for rich.console sometimes not being available, refs #1416 --- datasette/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index edd5ab87..f2f75884 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1276,7 +1276,7 @@ class DatasetteRouter: pdb.post_mortem(exception.__traceback__) if rich is not None: - rich.console.Console().print_exception(show_locals=True) + rich.get_console().print_exception(show_locals=True) title = None if isinstance(exception, Forbidden): From 6dd14a1221d0324f9e3d6cfa10d2281d1eba4806 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 6 Aug 2021 22:38:47 -0700 Subject: [PATCH 0545/1705] Improved links to example plugins --- docs/plugin_hooks.rst | 36 ++++++++++++++++++++++-------------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 10ec2cf1..200e0305 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -53,7 +53,7 @@ arguments and can be called like this:: select random_integer(1, 10); -Examples: `datasette-jellyfish `__, `datasette-jq `__, `datasette-haversine `__, `datasette-rure `__ +Examples: `datasette-jellyfish `__, `datasette-jq `__, `datasette-haversine `__, `datasette-rure `__ .. _plugin_hook_prepare_jinja2_environment: @@ -161,7 +161,7 @@ You can then use the new function in a template like so:: SQLite version: {{ sql_first("select sqlite_version()") }} -Examples: `datasette-search-all `_, `datasette-template-sql `_ +Examples: `datasette-search-all `_, `datasette-template-sql `_ .. _plugin_hook_extra_css_urls: @@ -210,7 +210,7 @@ This function can also return an awaitable function, useful if it needs to run a return inner -Examples: `datasette-cluster-map `_, `datasette-vega `_ +Examples: `datasette-cluster-map `_, `datasette-vega `_ .. _plugin_hook_extra_js_urls: @@ -257,7 +257,7 @@ If your code uses `JavaScript modules `_, `datasette-vega `_ +Examples: `datasette-cluster-map `_, `datasette-vega `_ .. _plugin_hook_extra_body_script: @@ -291,7 +291,7 @@ This will add the following to the end of your page: -Example: `datasette-cluster-map `_ +Example: `datasette-cluster-map `_ .. _plugin_hook_publish_subcommand: @@ -348,7 +348,7 @@ Let's say you want to build a plugin that adds a ``datasette publish my_hosting_ ): # Your implementation goes here -Examples: `datasette-publish-fly `_, `datasette-publish-vercel `_ +Examples: `datasette-publish-fly `_, `datasette-publish-vercel `_ .. _plugin_hook_render_cell: @@ -420,7 +420,7 @@ If the value matches that pattern, the plugin returns an HTML link element: label=markupsafe.escape(data["label"] or "") or " " )) -Examples: `datasette-render-binary `_, `datasette-render-markdown `__, `datasette-json-html `__ +Examples: `datasette-render-binary `_, `datasette-render-markdown `__, `datasette-json-html `__ .. _plugin_register_output_renderer: @@ -525,7 +525,7 @@ And here is an example ``can_render`` function which returns ``True`` only if th def can_render_demo(columns): return {"atom_id", "atom_title", "atom_updated"}.issubset(columns) -Examples: `datasette-atom `_, `datasette-ics `_ +Examples: `datasette-atom `_, `datasette-ics `_ .. _plugin_register_routes: @@ -583,7 +583,7 @@ The function can either return a :ref:`internals_response` or it can return noth See :ref:`writing_plugins_designing_urls` for tips on designing the URL routes used by your plugin. -Examples: `datasette-auth-github `__, `datasette-psutil `__ +Examples: `datasette-auth-github `__, `datasette-psutil `__ .. _plugin_register_facet_classes: @@ -695,7 +695,7 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att return add_x_databases_header return wrap_with_databases_header -Example: `datasette-cors `_ +Example: `datasette-cors `_ .. _plugin_hook_startup: @@ -743,7 +743,7 @@ Potential use-cases: await ds.invoke_startup() # Rest of test goes here -Examples: `datasette-saved-queries `__, `datasette-init `__ +Examples: `datasette-saved-queries `__, `datasette-init `__ .. _plugin_hook_canned_queries: @@ -812,7 +812,7 @@ The actor parameter can be used to include the currently authenticated actor in } for result in results} return inner -Example: `datasette-saved-queries `__ +Example: `datasette-saved-queries `__ .. _plugin_hook_actor_from_request: @@ -873,7 +873,7 @@ Instead of returning a dictionary, this function can return an awaitable functio return inner -Example: `datasette-auth-tokens `_ +Example: `datasette-auth-tokens `_ .. _plugin_hook_permission_allowed: @@ -932,7 +932,7 @@ Here's an example that allows users to view the ``admin_log`` table only if thei See :ref:`built-in permissions ` for a full list of permissions that are included in Datasette core. -Example: `datasette-permissions-sql `_ +Example: `datasette-permissions-sql `_ .. _plugin_hook_register_magic_parameters: @@ -1051,6 +1051,8 @@ This example adds a new menu item but only if the signed in user is ``"root"``: Using :ref:`internals_datasette_urls` here ensures that links in the menu will take the :ref:`setting_base_url` setting into account. +Examples: `datasette-search-all `_, `datasette-graphql `_ + .. _plugin_hook_table_actions: table_actions(datasette, actor, database, table, request) @@ -1089,6 +1091,8 @@ This example adds a new table action if the signed in user is ``"root"``: "label": "Edit schema for this table", }] +Example: `datasette-graphql `_ + .. _plugin_hook_database_actions: database_actions(datasette, actor, database, request) @@ -1108,6 +1112,8 @@ database_actions(datasette, actor, database, request) This hook is similar to :ref:`plugin_hook_table_actions` but populates an actions menu on the database page. +Example: `datasette-graphql `_ + .. _plugin_hook_skip_csrf: skip_csrf(datasette, scope) @@ -1172,3 +1178,5 @@ This hook is responsible for returning a dictionary corresponding to Datasette : # whatever we return here will be merged with any other plugins using this hook and # will be overwritten by a local metadata.yaml if one exists! return metadata + +Example: `datasette-remote-metadata plugin `__ From 61505dd0c6717cecdb73897e8613de9e9b7b6c42 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 6 Aug 2021 22:40:07 -0700 Subject: [PATCH 0546/1705] Release 0.59a0 Refs #1404, #1405, #1416, #1420, #1422 --- datasette/version.py | 2 +- docs/changelog.rst | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 1b7b7350..05704728 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.58.1" +__version__ = "0.59a0" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index d0fee19b..2cffef0f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,17 @@ Changelog ========= +.. _v0_59a0: + +0.59a0 (2021-08-06) +------------------- + +- :ref:`plugin_register_routes` plugin hook now accepts an optional ``datasette`` argument. (:issue:`1404`) +- New ``hide_sql`` canned query option for defaulting to hiding the SQL quey used by a canned query, see :ref:`canned_queries_options`. (:issue:`1422`) +- New ``--cpu`` option for :ref:`datasette publish cloudrun `. (:issue:`1420`) +- If `Rich `__ is installed in the same virtual environment as Datasette, it will be used to provide enhanced display of error tracebacks on the console. (:issue:`1416`) +- ``datasette.utils`` :ref:`internals_utils_parse_metadata` function, used by the new `datasette-remote-metadata plugin `__, is now a documented API. (:issue:`1405`) + .. _v0_58_1: 0.58.1 (2021-07-16) From de5ce2e56339ad8966f417a4758f7c210c017dec Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 8 Aug 2021 10:37:51 -0700 Subject: [PATCH 0547/1705] datasette-pyinstrument --- docs/plugin_hooks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 200e0305..64c56309 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -695,7 +695,7 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att return add_x_databases_header return wrap_with_databases_header -Example: `datasette-cors `_ +Examples: `datasette-cors `__, `datasette-pyinstrument `__ .. _plugin_hook_startup: From 3bb6409a6cb8eaee32eb572423d9c0485a1dd917 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 8 Aug 2021 16:04:42 -0700 Subject: [PATCH 0548/1705] render_cell() can now return an awaitable, refs --- datasette/views/database.py | 1 + datasette/views/table.py | 1 + docs/plugin_hooks.rst | 4 +++- tests/fixtures.py | 1 + tests/plugins/my_plugin.py | 38 ++++++++++++++++++++++--------------- tests/test_api.py | 37 +++++++++++++++++++++++++++++++----- tests/test_plugins.py | 5 +++++ 7 files changed, 66 insertions(+), 21 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index d9fe2b49..f835dfac 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -361,6 +361,7 @@ class QueryView(DataView): database=database, datasette=self.ds, ) + plugin_value = await await_me_maybe(plugin_value) if plugin_value is not None: display_value = plugin_value else: diff --git a/datasette/views/table.py b/datasette/views/table.py index 876a0c81..3d25a1a5 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -198,6 +198,7 @@ class RowTableShared(DataView): database=database, datasette=self.ds, ) + plugin_display_value = await await_me_maybe(plugin_display_value) if plugin_display_value is not None: display_value = plugin_display_value elif isinstance(value, bytes): diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 64c56309..5cdb1623 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -370,7 +370,7 @@ Lets you customize the display of values within table cells in the HTML table vi The name of the database ``datasette`` - :ref:`internals_datasette` - You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. If your hook returns ``None``, it will be ignored. Use this to indicate that your hook is not able to custom render this particular value. @@ -378,6 +378,8 @@ If the hook returns a string, that string will be rendered in the table cell. If you want to return HTML markup you can do so by returning a ``jinja2.Markup`` object. +You can also return an awaitable function which returns a value. + Datasette will loop through all available ``render_cell`` hooks and display the value returned by the first one that does not return ``None``. Here is an example of a custom ``render_cell()`` plugin which looks for values that are a JSON string matching the following format:: diff --git a/tests/fixtures.py b/tests/fixtures.py index 873f9d55..880e4347 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -644,6 +644,7 @@ INSERT INTO simple_primary_key VALUES (1, 'hello'); INSERT INTO simple_primary_key VALUES (2, 'world'); INSERT INTO simple_primary_key VALUES (3, ''); INSERT INTO simple_primary_key VALUES (4, 'RENDER_CELL_DEMO'); +INSERT INTO simple_primary_key VALUES (5, 'RENDER_CELL_ASYNC'); INSERT INTO primary_key_multiple_columns VALUES (1, 'hey', 'world'); INSERT INTO primary_key_multiple_columns_explicit_label VALUES (1, 'hey', 'world2'); diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 59ac8add..75c76ea8 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -97,21 +97,29 @@ def extra_body_script( @hookimpl def render_cell(value, column, table, database, datasette): - # Render some debug output in cell with value RENDER_CELL_DEMO - if value != "RENDER_CELL_DEMO": - return None - return json.dumps( - { - "column": column, - "table": table, - "database": database, - "config": datasette.plugin_config( - "name-of-plugin", - database=database, - table=table, - ), - } - ) + async def inner(): + # Render some debug output in cell with value RENDER_CELL_DEMO + if value == "RENDER_CELL_DEMO": + return json.dumps( + { + "column": column, + "table": table, + "database": database, + "config": datasette.plugin_config( + "name-of-plugin", + database=database, + table=table, + ), + } + ) + elif value == "RENDER_CELL_ASYNC": + return ( + await datasette.get_database(database).execute( + "select 'RENDER_CELL_ASYNC_RESULT'" + ) + ).single_value() + + return inner @hookimpl diff --git a/tests/test_api.py b/tests/test_api.py index 0049d76d..83cca521 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -415,7 +415,7 @@ def test_database_page(app_client): "name": "simple_primary_key", "columns": ["id", "content"], "primary_keys": ["id"], - "count": 4, + "count": 5, "hidden": False, "fts_table": None, "foreign_keys": { @@ -652,6 +652,7 @@ def test_custom_sql(app_client): {"content": "world"}, {"content": ""}, {"content": "RENDER_CELL_DEMO"}, + {"content": "RENDER_CELL_ASYNC"}, ] == data["rows"] assert ["content"] == data["columns"] assert "fixtures" == data["database"] @@ -693,6 +694,7 @@ def test_table_json(app_client): {"id": "2", "content": "world"}, {"id": "3", "content": ""}, {"id": "4", "content": "RENDER_CELL_DEMO"}, + {"id": "5", "content": "RENDER_CELL_ASYNC"}, ] @@ -723,6 +725,7 @@ def test_table_shape_arrays(app_client): ["2", "world"], ["3", ""], ["4", "RENDER_CELL_DEMO"], + ["5", "RENDER_CELL_ASYNC"], ] == response.json["rows"] @@ -736,7 +739,13 @@ def test_table_shape_arrayfirst(app_client): } ) ) - assert ["hello", "world", "", "RENDER_CELL_DEMO"] == response.json + assert [ + "hello", + "world", + "", + "RENDER_CELL_DEMO", + "RENDER_CELL_ASYNC", + ] == response.json def test_table_shape_objects(app_client): @@ -746,6 +755,7 @@ def test_table_shape_objects(app_client): {"id": "2", "content": "world"}, {"id": "3", "content": ""}, {"id": "4", "content": "RENDER_CELL_DEMO"}, + {"id": "5", "content": "RENDER_CELL_ASYNC"}, ] == response.json["rows"] @@ -756,6 +766,7 @@ def test_table_shape_array(app_client): {"id": "2", "content": "world"}, {"id": "3", "content": ""}, {"id": "4", "content": "RENDER_CELL_DEMO"}, + {"id": "5", "content": "RENDER_CELL_ASYNC"}, ] == response.json @@ -768,6 +779,7 @@ def test_table_shape_array_nl(app_client): {"id": "2", "content": "world"}, {"id": "3", "content": ""}, {"id": "4", "content": "RENDER_CELL_DEMO"}, + {"id": "5", "content": "RENDER_CELL_ASYNC"}, ] == results @@ -788,6 +800,7 @@ def test_table_shape_object(app_client): "2": {"id": "2", "content": "world"}, "3": {"id": "3", "content": ""}, "4": {"id": "4", "content": "RENDER_CELL_DEMO"}, + "5": {"id": "5", "content": "RENDER_CELL_ASYNC"}, } == response.json @@ -1145,12 +1158,21 @@ def test_searchable_invalid_column(app_client): ("/fixtures/simple_primary_key.json?content=hello", [["1", "hello"]]), ( "/fixtures/simple_primary_key.json?content__contains=o", - [["1", "hello"], ["2", "world"], ["4", "RENDER_CELL_DEMO"]], + [ + ["1", "hello"], + ["2", "world"], + ["4", "RENDER_CELL_DEMO"], + ], ), ("/fixtures/simple_primary_key.json?content__exact=", [["3", ""]]), ( "/fixtures/simple_primary_key.json?content__not=world", - [["1", "hello"], ["3", ""], ["4", "RENDER_CELL_DEMO"]], + [ + ["1", "hello"], + ["3", ""], + ["4", "RENDER_CELL_DEMO"], + ["5", "RENDER_CELL_ASYNC"], + ], ), ], ) @@ -1163,7 +1185,11 @@ def test_table_filter_queries_multiple_of_same_type(app_client): response = app_client.get( "/fixtures/simple_primary_key.json?content__not=world&content__not=hello" ) - assert [["3", ""], ["4", "RENDER_CELL_DEMO"]] == response.json["rows"] + assert [ + ["3", ""], + ["4", "RENDER_CELL_DEMO"], + ["5", "RENDER_CELL_ASYNC"], + ] == response.json["rows"] @pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module") @@ -1293,6 +1319,7 @@ def test_view(app_client): {"upper_content": "WORLD", "content": "world"}, {"upper_content": "", "content": ""}, {"upper_content": "RENDER_CELL_DEMO", "content": "RENDER_CELL_DEMO"}, + {"upper_content": "RENDER_CELL_ASYNC", "content": "RENDER_CELL_ASYNC"}, ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 0c01b7ae..9bda7420 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -185,6 +185,11 @@ def test_hook_render_cell_demo(app_client): } == json.loads(td.string) +def test_hook_render_cell_async(app_client): + response = app_client.get("/fixtures?sql=select+'RENDER_CELL_ASYNC'") + assert b"RENDER_CELL_ASYNC_RESULT" in response.body + + def test_plugin_config(app_client): assert {"depth": "table"} == app_client.ds.plugin_config( "name-of-plugin", database="fixtures", table="sortable" From 818b0b76a2d58f7c2d850570efcdc22d345b4059 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 8 Aug 2021 16:07:52 -0700 Subject: [PATCH 0549/1705] Test table render_cell async as well as query results, refs #1425 --- tests/test_plugins.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 9bda7420..ec8ff0c5 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -185,8 +185,11 @@ def test_hook_render_cell_demo(app_client): } == json.loads(td.string) -def test_hook_render_cell_async(app_client): - response = app_client.get("/fixtures?sql=select+'RENDER_CELL_ASYNC'") +@pytest.mark.parametrize( + "path", ("/fixtures?sql=select+'RENDER_CELL_ASYNC'", "/fixtures/simple_primary_key") +) +def test_hook_render_cell_async(app_client, path): + response = app_client.get(path) assert b"RENDER_CELL_ASYNC_RESULT" in response.body From f3c9edb376a13c09b5ecf97c7390f4e49efaadf2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 8 Aug 2021 16:11:40 -0700 Subject: [PATCH 0550/1705] Fixed some tests I broke in #1425 --- tests/test_csv.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_csv.py b/tests/test_csv.py index 3debf320..5e9406e7 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -11,6 +11,7 @@ EXPECTED_TABLE_CSV = """id,content 2,world 3, 4,RENDER_CELL_DEMO +5,RENDER_CELL_ASYNC """.replace( "\n", "\r\n" ) @@ -167,7 +168,7 @@ def test_csv_trace(app_client_with_trace): soup = Soup(response.text, "html.parser") assert ( soup.find("textarea").text - == "id,content\r\n1,hello\r\n2,world\r\n3,\r\n4,RENDER_CELL_DEMO\r\n" + == "id,content\r\n1,hello\r\n2,world\r\n3,\r\n4,RENDER_CELL_DEMO\r\n5,RENDER_CELL_ASYNC\r\n" ) assert "select id, content from simple_primary_key" in soup.find("pre").text From a390bdf9cef01d8723d025fc3348e81345ff4856 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 8 Aug 2021 17:38:42 -0700 Subject: [PATCH 0551/1705] Stop using firstresult=True on render_cell, refs #1425 See https://github.com/simonw/datasette/issues/1425#issuecomment-894883664 --- datasette/hookspecs.py | 2 +- datasette/views/database.py | 14 +++++++++----- datasette/views/table.py | 12 ++++++++---- 3 files changed, 18 insertions(+), 10 deletions(-) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index f31ce538..56c79d23 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -59,7 +59,7 @@ def publish_subcommand(publish): """Subcommands for 'datasette publish'""" -@hookspec(firstresult=True) +@hookspec def render_cell(value, column, table, database, datasette): """Customize rendering of HTML table cell values""" diff --git a/datasette/views/database.py b/datasette/views/database.py index f835dfac..29600659 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -354,16 +354,20 @@ class QueryView(DataView): display_value = value # Let the plugins have a go # pylint: disable=no-member - plugin_value = pm.hook.render_cell( + plugin_display_value = None + for candidate in pm.hook.render_cell( value=value, column=column, table=None, database=database, datasette=self.ds, - ) - plugin_value = await await_me_maybe(plugin_value) - if plugin_value is not None: - display_value = plugin_value + ): + candidate = await await_me_maybe(candidate) + if candidate is not None: + plugin_display_value = candidate + break + if plugin_display_value is not None: + display_value = plugin_display_value else: if value in ("", None): display_value = Markup(" ") diff --git a/datasette/views/table.py b/datasette/views/table.py index 3d25a1a5..456d8069 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -191,15 +191,19 @@ class RowTableShared(DataView): # First let the plugins have a go # pylint: disable=no-member - plugin_display_value = pm.hook.render_cell( + plugin_display_value = None + for candidate in pm.hook.render_cell( value=value, column=column, table=table, database=database, datasette=self.ds, - ) - plugin_display_value = await await_me_maybe(plugin_display_value) - if plugin_display_value is not None: + ): + candidate = await await_me_maybe(candidate) + if candidate is not None: + plugin_display_value = candidate + break + if plugin_display_value: display_value = plugin_display_value elif isinstance(value, bytes): display_value = markupsafe.Markup( From ad90a72afa21b737b162e2bbdddc301a97d575cd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 8 Aug 2021 18:13:03 -0700 Subject: [PATCH 0552/1705] Release 0.59a1 Refs #1425 --- datasette/version.py | 2 +- docs/changelog.rst | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 05704728..f5fbfb3f 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.59a0" +__version__ = "0.59a1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2cffef0f..1406a7ca 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_59a1: + +0.59a1 (2021-08-08) +------------------- + +- The :ref:`render_cell() ` plugin hook can now return an awaitable function. This means the hook can execute SQL queries. (:issue:`1425`) + .. _v0_59a0: 0.59a0 (2021-08-06) From fc4846850fffd54561bc125332dfe97bb41ff42e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 8 Aug 2021 20:21:13 -0700 Subject: [PATCH 0553/1705] New way of deriving named parameters using explain, refs #1421 --- datasette/utils/__init__.py | 12 ++++++++++++ datasette/views/base.py | 1 - datasette/views/database.py | 5 ++++- tests/test_utils.py | 15 +++++++++++++++ 4 files changed, 31 insertions(+), 2 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index aec5a55b..44641a87 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1076,3 +1076,15 @@ class PrefixedUrlString(str): class StartupError(Exception): pass + + +_re_named_parameter = re.compile(":([a-zA-Z0-9_]+)") + +async def derive_named_parameters(db, sql): + explain = 'explain {}'.format(sql.strip().rstrip(";")) + possible_params = _re_named_parameter.findall(sql) + try: + results = await db.execute(explain, {p: None for p in possible_params}) + return [row["p4"].lstrip(":") for row in results if row["opcode"] == "Variable"] + except sqlite3.DatabaseError: + return [] diff --git a/datasette/views/base.py b/datasette/views/base.py index cd584899..1cea1386 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -159,7 +159,6 @@ class BaseView: class DataView(BaseView): name = "" - re_named_parameter = re.compile(":([a-zA-Z0-9_]+)") async def options(self, request, *args, **kwargs): r = Response.text("ok") diff --git a/datasette/views/database.py b/datasette/views/database.py index 29600659..7c36034c 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -10,6 +10,7 @@ import markupsafe from datasette.utils import ( await_me_maybe, check_visibility, + derive_named_parameters, to_css_class, validate_sql_select, is_url, @@ -223,7 +224,9 @@ class QueryView(DataView): await self.check_permission(request, "execute-sql", database) # Extract any :named parameters - named_parameters = named_parameters or self.re_named_parameter.findall(sql) + named_parameters = named_parameters or await derive_named_parameters( + self.ds.get_database(database), sql + ) named_parameter_values = { named_parameter: params.get(named_parameter) or "" for named_parameter in named_parameters diff --git a/tests/test_utils.py b/tests/test_utils.py index 97b70ee5..e04efb4b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -626,3 +626,18 @@ def test_parse_metadata(content, expected): utils.parse_metadata(content) else: assert utils.parse_metadata(content) == expected + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sql,expected", ( + ("select 1", []), + ("select 1 + :one", ["one"]), + ("select 1 + :one + :two", ["one", "two"]), + ("select 'bob' || '0:00' || :cat", ["cat"]), + ("select this is invalid", []), +)) +async def test_derive_named_parameters(sql, expected): + ds = Datasette([], memory=True) + db = ds.get_database("_memory") + params = await utils.derive_named_parameters(db, sql) + assert params == expected From b1fed48a95516ae84c0f020582303ab50ab817e2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 8 Aug 2021 20:26:08 -0700 Subject: [PATCH 0554/1705] derive_named_parameters falls back to regex on SQL error, refs #1421 --- datasette/utils/__init__.py | 5 +++-- tests/test_utils.py | 17 ++++++++++------- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 44641a87..70ac8976 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1080,11 +1080,12 @@ class StartupError(Exception): _re_named_parameter = re.compile(":([a-zA-Z0-9_]+)") + async def derive_named_parameters(db, sql): - explain = 'explain {}'.format(sql.strip().rstrip(";")) + explain = "explain {}".format(sql.strip().rstrip(";")) possible_params = _re_named_parameter.findall(sql) try: results = await db.execute(explain, {p: None for p in possible_params}) return [row["p4"].lstrip(":") for row in results if row["opcode"] == "Variable"] except sqlite3.DatabaseError: - return [] + return possible_params diff --git a/tests/test_utils.py b/tests/test_utils.py index e04efb4b..e1b61072 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -629,13 +629,16 @@ def test_parse_metadata(content, expected): @pytest.mark.asyncio -@pytest.mark.parametrize("sql,expected", ( - ("select 1", []), - ("select 1 + :one", ["one"]), - ("select 1 + :one + :two", ["one", "two"]), - ("select 'bob' || '0:00' || :cat", ["cat"]), - ("select this is invalid", []), -)) +@pytest.mark.parametrize( + "sql,expected", + ( + ("select 1", []), + ("select 1 + :one", ["one"]), + ("select 1 + :one + :two", ["one", "two"]), + ("select 'bob' || '0:00' || :cat", ["cat"]), + ("select this is invalid :one, :two, :three", ["one", "two", "three"]), + ), +) async def test_derive_named_parameters(sql, expected): ds = Datasette([], memory=True) db = ds.get_database("_memory") From e837095ef35ae155b4c78cc9a8b7133a48c94f03 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 12 Aug 2021 16:53:23 -0700 Subject: [PATCH 0555/1705] Column metadata, closes #942 --- datasette/static/app.css | 17 ++++++++++++++++- datasette/static/table.js | 9 +++++++++ datasette/templates/_table.html | 2 +- datasette/templates/table.html | 8 ++++++++ datasette/views/table.py | 2 ++ docs/metadata.rst | 28 ++++++++++++++++++++++++++++ tests/fixtures.py | 6 ++++++ tests/test_html.py | 18 ++++++++++++++++++ 8 files changed, 88 insertions(+), 2 deletions(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index c6be1e97..bf068fdf 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -784,9 +784,14 @@ svg.dropdown-menu-icon { font-size: 0.7em; color: #666; margin: 0; - padding: 0; padding: 4px 8px 4px 8px; } +.dropdown-menu .dropdown-column-description { + margin: 0; + color: #666; + padding: 4px 8px 4px 8px; + max-width: 20em; +} .dropdown-menu li { border-bottom: 1px solid #ccc; } @@ -836,6 +841,16 @@ svg.dropdown-menu-icon { background-repeat: no-repeat; } +dl.column-descriptions dt { + font-weight: bold; +} +dl.column-descriptions dd { + padding-left: 1.5em; + white-space: pre-wrap; + line-height: 1.1em; + color: #666; +} + .anim-scale-in { animation-name: scale-in; animation-duration: 0.15s; diff --git a/datasette/static/table.js b/datasette/static/table.js index 991346df..85bf073f 100644 --- a/datasette/static/table.js +++ b/datasette/static/table.js @@ -9,6 +9,7 @@ var DROPDOWN_HTML = ``; var DROPDOWN_ICON_SVG = ` @@ -166,6 +167,14 @@ var DROPDOWN_ICON_SVG = `
    {% for column in display_columns %} - + {% if not column.sortable %} {{ column.name }} {% else %} diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 211352b5..466e8a47 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -51,6 +51,14 @@ {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} +{% if metadata.columns %} +
    + {% for column_name, column_description in metadata.columns.items() %} +
    {{ column_name }}
    {{ column_description }}
    + {% endfor %} +
    +{% endif %} + {% if filtered_table_rows_count or human_description_en %}

    {% if filtered_table_rows_count or filtered_table_rows_count == 0 %}{{ "{:,}".format(filtered_table_rows_count) }} row{% if filtered_table_rows_count == 1 %}{% else %}s{% endif %}{% endif %} {% if human_description_en %}{{ human_description_en }}{% endif %} diff --git a/datasette/views/table.py b/datasette/views/table.py index 456d8069..486a6131 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -125,6 +125,7 @@ class RowTableShared(DataView): """Returns columns, rows for specified table - including fancy foreign key treatment""" db = self.ds.databases[database] table_metadata = self.ds.table_metadata(database, table) + column_descriptions = table_metadata.get("columns") or {} column_details = {col.name: col for col in await db.table_column_details(table)} sortable_columns = await self.sortable_columns_for_table(database, table, True) pks = await db.primary_keys(table) @@ -147,6 +148,7 @@ class RowTableShared(DataView): "is_pk": r[0] in pks_for_display, "type": type_, "notnull": notnull, + "description": column_descriptions.get(r[0]), } ) diff --git a/docs/metadata.rst b/docs/metadata.rst index dad5adca..35b8aede 100644 --- a/docs/metadata.rst +++ b/docs/metadata.rst @@ -78,6 +78,34 @@ The three visible metadata fields you can apply to everything, specific database For each of these you can provide just the ``*_url`` field and Datasette will treat that as the default link label text and display the URL directly on the page. +.. _metadata_column_descriptions: + +Column descriptions +------------------- + +You can include descriptions for your columns by adding a ``"columns": {"name-of-column": "description-of-column"}`` block to your table metadata: + +.. code-block:: json + + { + "databases": { + "database1": { + "tables": { + "example_table": { + "columns": { + "column1": "Description of column 1", + "column2": "Description of column 2" + } + } + } + } + } + } + +These will be displayed at the top of the table page, and will also show in the cog menu for each column. + +You can see an example of how these look at `latest.datasette.io/fixtures/roadside_attractions `__. + Specifying units for a column ----------------------------- diff --git a/tests/fixtures.py b/tests/fixtures.py index 880e4347..4a420e4b 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -336,6 +336,12 @@ METADATA = { "fts_table": "searchable_fts", "fts_pk": "pk", }, + "roadside_attractions": { + "columns": { + "name": "The name of the attraction", + "address": "The street address for the attraction", + } + }, "attraction_characteristic": {"sort_desc": "pk"}, "facet_cities": {"sort": "name"}, "paginated_view": {"size": 25}, diff --git a/tests/test_html.py b/tests/test_html.py index b1b6c1f3..f12f89cd 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1777,3 +1777,21 @@ def test_trace_correctly_escaped(app_client): response = app_client.get("/fixtures?sql=select+'

    Hello'&_trace=1") assert "select '

    Hello" not in response.text assert "select '<h1>Hello" in response.text + + +def test_column_metadata(app_client): + response = app_client.get("/fixtures/roadside_attractions") + soup = Soup(response.body, "html.parser") + dl = soup.find("dl") + assert [(dt.text, dt.nextSibling.text) for dt in dl.findAll("dt")] == [ + ("name", "The name of the attraction"), + ("address", "The street address for the attraction"), + ] + assert ( + soup.select("th[data-column=name]")[0]["data-column-description"] + == "The name of the attraction" + ) + assert ( + soup.select("th[data-column=address]")[0]["data-column-description"] + == "The street address for the attraction" + ) From 77f46297a88ac7e49dad2139410b01ee56d5f99c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 12 Aug 2021 18:01:57 -0700 Subject: [PATCH 0556/1705] Rename --help-config to --help-settings, closes #1431 --- datasette/cli.py | 12 ++++++------ docs/datasette-serve-help.txt | 2 +- tests/test_cli.py | 10 +++++++++- 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index e53f3d8e..d4e23c70 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -51,7 +51,7 @@ class Config(click.ParamType): name, value = config.split(":", 1) if name not in DEFAULT_SETTINGS: self.fail( - f"{name} is not a valid option (--help-config to see all)", + f"{name} is not a valid option (--help-settings to see all)", param, ctx, ) @@ -84,7 +84,7 @@ class Setting(CompositeParamType): name, value = config if name not in DEFAULT_SETTINGS: self.fail( - f"{name} is not a valid option (--help-config to see all)", + f"{name} is not a valid option (--help-settings to see all)", param, ctx, ) @@ -408,7 +408,7 @@ def uninstall(packages, yes): help="Run an HTTP GET request against this path, print results and exit", ) @click.option("--version-note", help="Additional note to show on /-/versions") -@click.option("--help-config", is_flag=True, help="Show available config options") +@click.option("--help-settings", is_flag=True, help="Show available settings") @click.option("--pdb", is_flag=True, help="Launch debugger on any errors") @click.option( "-o", @@ -456,7 +456,7 @@ def serve( root, get, version_note, - help_config, + help_settings, pdb, open_browser, create, @@ -466,9 +466,9 @@ def serve( return_instance=False, ): """Serve up specified SQLite database files with a web UI""" - if help_config: + if help_settings: formatter = formatting.HelpFormatter() - with formatter.section("Config options"): + with formatter.section("Settings"): formatter.write_dl( [ (option.name, f"{option.help} (default={option.default})") diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index ec3f41a0..2911977a 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -32,7 +32,7 @@ Options: --get TEXT Run an HTTP GET request against this path, print results and exit --version-note TEXT Additional note to show on /-/versions - --help-config Show available config options + --help-settings Show available settings --pdb Launch debugger on any errors -o, --open Open Datasette in your web browser --create Create database files if they do not exist diff --git a/tests/test_cli.py b/tests/test_cli.py index e31a305e..763fe2e7 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -5,6 +5,7 @@ from .fixtures import ( EXPECTED_PLUGINS, ) import asyncio +from datasette.app import SETTINGS from datasette.plugins import DEFAULT_PLUGINS from datasette.cli import cli, serve from datasette.version import __version__ @@ -147,7 +148,7 @@ def test_metadata_yaml(): root=False, version_note=None, get=None, - help_config=False, + help_settings=False, pdb=False, crossdb=False, open_browser=False, @@ -291,3 +292,10 @@ def test_weird_database_names(ensure_eventloop, tmpdir, filename): cli, [db_path, "--get", "/{}".format(urllib.parse.quote(filename_no_stem))] ) assert result2.exit_code == 0, result2.output + + +def test_help_settings(): + runner = CliRunner() + result = runner.invoke(cli, ["--help-settings"]) + for setting in SETTINGS: + assert setting.name in result.output From ca4f83dc7b1d573b92a8921fca96d3ed490614c3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 12 Aug 2021 18:10:36 -0700 Subject: [PATCH 0557/1705] Rename config= to settings=, refs #1432 --- datasette/app.py | 8 ++++---- datasette/cli.py | 8 ++++---- datasette/templates/table.html | 2 +- datasette/views/base.py | 2 +- datasette/views/database.py | 2 +- tests/fixtures.py | 20 ++++++++++---------- tests/test_api.py | 8 ++++---- tests/test_custom_pages.py | 2 +- tests/test_facets.py | 2 +- tests/test_html.py | 14 ++++++++------ 10 files changed, 35 insertions(+), 33 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f2f75884..8cbaaf9f 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -200,7 +200,7 @@ class Datasette: plugins_dir=None, static_mounts=None, memory=False, - config=None, + settings=None, secret=None, version_note=None, config_dir=None, @@ -279,7 +279,7 @@ class Datasette: raise StartupError("config.json should be renamed to settings.json") if config_dir and (config_dir / "settings.json").exists() and not config: config = json.loads((config_dir / "settings.json").read_text()) - self._settings = dict(DEFAULT_SETTINGS, **(config or {})) + self._settings = dict(DEFAULT_SETTINGS, **(settings or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note self.executor = futures.ThreadPoolExecutor( @@ -419,8 +419,8 @@ class Datasette: def setting(self, key): return self._settings.get(key, None) - def config_dict(self): - # Returns a fully resolved config dictionary, useful for templates + def settings_dict(self): + # Returns a fully resolved settings dictionary, useful for templates return {option.name: self.setting(option.name) for option in SETTINGS} def _metadata_recursive_update(self, orig, updated): diff --git a/datasette/cli.py b/datasette/cli.py index d4e23c70..ea6da748 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -495,14 +495,14 @@ def serve( if metadata: metadata_data = parse_metadata(metadata.read()) - combined_config = {} + combined_settings = {} if config: click.echo( "--config name:value will be deprecated in Datasette 1.0, use --setting name value instead", err=True, ) - combined_config.update(config) - combined_config.update(settings) + combined_settings.update(config) + combined_settings.update(settings) kwargs = dict( immutables=immutable, @@ -514,7 +514,7 @@ def serve( template_dir=template_dir, plugins_dir=plugins_dir, static_mounts=static, - config=combined_config, + settings=combined_settings, memory=memory, secret=secret, version_note=version_note, diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 466e8a47..a28945ad 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -201,7 +201,7 @@ CSV options: {% if expandable_columns %}{% endif %} - {% if next_url and config.allow_csv_stream %}{% endif %} + {% if next_url and settings.allow_csv_stream %}{% endif %} {% for key, value in url_csv_hidden_args %} diff --git a/datasette/views/base.py b/datasette/views/base.py index 1cea1386..3333781c 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -614,7 +614,7 @@ class DataView(BaseView): ] + [("_size", "max")], "datasette_version": __version__, - "config": self.ds.config_dict(), + "settings": self.ds.settings_dict(), }, } if "metadata" not in context: diff --git a/datasette/views/database.py b/datasette/views/database.py index 7c36034c..e3070ce6 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -456,7 +456,7 @@ class QueryView(DataView): "canned_query": canned_query, "edit_sql_url": edit_sql_url, "metadata": metadata, - "config": self.ds.config_dict(), + "settings": self.ds.settings_dict(), "request": request, "show_hide_link": show_hide_link, "show_hide_text": show_hide_text, diff --git a/tests/fixtures.py b/tests/fixtures.py index 4a420e4b..dc22c609 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -99,7 +99,7 @@ def make_app_client( max_returned_rows=None, cors=False, memory=False, - config=None, + settings=None, filename="fixtures.db", is_immutable=False, extra_databases=None, @@ -129,7 +129,7 @@ def make_app_client( # Insert at start to help test /-/databases ordering: files.insert(0, extra_filepath) os.chdir(os.path.dirname(filepath)) - config = config or {} + settings = settings or {} for key, value in { "default_page_size": 50, "max_returned_rows": max_returned_rows or 100, @@ -138,8 +138,8 @@ def make_app_client( # errors when running the full test suite: "num_sql_threads": 1, }.items(): - if key not in config: - config[key] = value + if key not in settings: + settings[key] = value ds = Datasette( files, immutables=immutables, @@ -147,7 +147,7 @@ def make_app_client( cors=cors, metadata=metadata or METADATA, plugins_dir=PLUGINS_DIR, - config=config, + settings=settings, inspect_data=inspect_data, static_mounts=static_mounts, template_dir=template_dir, @@ -171,7 +171,7 @@ def app_client_no_files(): @pytest.fixture(scope="session") def app_client_base_url_prefix(): - with make_app_client(config={"base_url": "/prefix/"}) as client: + with make_app_client(settings={"base_url": "/prefix/"}) as client: yield client @@ -210,13 +210,13 @@ def app_client_two_attached_databases_one_immutable(): @pytest.fixture(scope="session") def app_client_with_hash(): - with make_app_client(config={"hash_urls": True}, is_immutable=True) as client: + with make_app_client(settings={"hash_urls": True}, is_immutable=True) as client: yield client @pytest.fixture(scope="session") def app_client_with_trace(): - with make_app_client(config={"trace_debug": True}, is_immutable=True) as client: + with make_app_client(settings={"trace_debug": True}, is_immutable=True) as client: yield client @@ -234,13 +234,13 @@ def app_client_returned_rows_matches_page_size(): @pytest.fixture(scope="session") def app_client_larger_cache_size(): - with make_app_client(config={"cache_size_kb": 2500}) as client: + with make_app_client(settings={"cache_size_kb": 2500}) as client: yield client @pytest.fixture(scope="session") def app_client_csv_max_mb_one(): - with make_app_client(config={"max_csv_mb": 1}) as client: + with make_app_client(settings={"max_csv_mb": 1}) as client: yield client diff --git a/tests/test_api.py b/tests/test_api.py index 83cca521..1e93c62e 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1711,14 +1711,14 @@ def test_suggested_facets(app_client): def test_allow_facet_off(): - with make_app_client(config={"allow_facet": False}) as client: + with make_app_client(settings={"allow_facet": False}) as client: assert 400 == client.get("/fixtures/facetable.json?_facet=planet_int").status # Should not suggest any facets either: assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] def test_suggest_facets_off(): - with make_app_client(config={"suggest_facets": False}) as client: + with make_app_client(settings={"suggest_facets": False}) as client: # Now suggested_facets should be [] assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] @@ -1883,7 +1883,7 @@ def test_config_cache_size(app_client_larger_cache_size): def test_config_force_https_urls(): - with make_app_client(config={"force_https_urls": True}) as client: + with make_app_client(settings={"force_https_urls": True}) as client: response = client.get("/fixtures/facetable.json?_size=3&_facet=state") assert response.json["next_url"].startswith("https://") assert response.json["facet_results"]["state"]["results"][0][ @@ -1921,7 +1921,7 @@ def test_custom_query_with_unicode_characters(app_client): @pytest.mark.parametrize("trace_debug", (True, False)) def test_trace(trace_debug): - with make_app_client(config={"trace_debug": trace_debug}) as client: + with make_app_client(settings={"trace_debug": trace_debug}) as client: response = client.get("/fixtures/simple_primary_key.json?_trace=1") assert response.status == 200 diff --git a/tests/test_custom_pages.py b/tests/test_custom_pages.py index 5a71f56d..76c67397 100644 --- a/tests/test_custom_pages.py +++ b/tests/test_custom_pages.py @@ -14,7 +14,7 @@ def custom_pages_client(): @pytest.fixture(scope="session") def custom_pages_client_with_base_url(): with make_app_client( - template_dir=TEST_TEMPLATE_DIRS, config={"base_url": "/prefix/"} + template_dir=TEST_TEMPLATE_DIRS, settings={"base_url": "/prefix/"} ) as client: yield client diff --git a/tests/test_facets.py b/tests/test_facets.py index 18fb8c3b..22927512 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -351,7 +351,7 @@ async def test_json_array_with_blanks_and_nulls(): @pytest.mark.asyncio async def test_facet_size(): - ds = Datasette([], memory=True, config={"max_returned_rows": 50}) + ds = Datasette([], memory=True, settings={"max_returned_rows": 50}) db = ds.add_database(Database(ds, memory_name="test_facet_size")) await db.execute_write( "create table neighbourhoods(city text, neighbourhood text)", block=True diff --git a/tests/test_html.py b/tests/test_html.py index f12f89cd..90fcdae7 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -214,7 +214,7 @@ def test_definition_sql(path, expected_definition_sql, app_client): def test_table_cell_truncation(): - with make_app_client(config={"truncate_cells_html": 5}) as client: + with make_app_client(settings={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") @@ -239,7 +239,7 @@ def test_table_cell_truncation(): def test_row_page_does_not_truncate(): - with make_app_client(config={"truncate_cells_html": 5}) as client: + with make_app_client(settings={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable/1") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") @@ -1072,7 +1072,9 @@ def test_database_download_disallowed_for_memory(): def test_allow_download_off(): - with make_app_client(is_immutable=True, config={"allow_download": False}) as client: + with make_app_client( + is_immutable=True, settings={"allow_download": False} + ) as client: response = client.get("/fixtures") soup = Soup(response.body, "html.parser") assert not len(soup.findAll("a", {"href": re.compile(r"\.db$")})) @@ -1486,7 +1488,7 @@ def test_query_error(app_client): def test_config_template_debug_on(): - with make_app_client(config={"template_debug": True}) as client: + with make_app_client(settings={"template_debug": True}) as client: response = client.get("/fixtures/facetable?_context=1") assert response.status == 200 assert response.text.startswith("
    {")
    @@ -1500,7 +1502,7 @@ def test_config_template_debug_off(app_client):
     
     def test_debug_context_includes_extra_template_vars():
         # https://github.com/simonw/datasette/issues/693
    -    with make_app_client(config={"template_debug": True}) as client:
    +    with make_app_client(settings={"template_debug": True}) as client:
             response = client.get("/fixtures/facetable?_context=1")
             # scope_path is added by PLUGIN1
             assert "scope_path" in response.text
    @@ -1744,7 +1746,7 @@ def test_facet_more_links(
         expected_ellipses_url,
     ):
         with make_app_client(
    -        config={"max_returned_rows": max_returned_rows, "default_facet_size": 2}
    +        settings={"max_returned_rows": max_returned_rows, "default_facet_size": 2}
         ) as client:
             response = client.get(path)
             soup = Soup(response.body, "html.parser")
    
    From bbc4756f9e8180c7a40c57f8a35e39dee7be7807 Mon Sep 17 00:00:00 2001
    From: Simon Willison 
    Date: Thu, 12 Aug 2021 20:54:25 -0700
    Subject: [PATCH 0558/1705] Settings fix, refs #1433
    
    ---
     datasette/app.py | 2 +-
     1 file changed, 1 insertion(+), 1 deletion(-)
    
    diff --git a/datasette/app.py b/datasette/app.py
    index 8cbaaf9f..adc543ef 100644
    --- a/datasette/app.py
    +++ b/datasette/app.py
    @@ -277,7 +277,7 @@ class Datasette:
             self.static_mounts = static_mounts or []
             if config_dir and (config_dir / "config.json").exists():
                 raise StartupError("config.json should be renamed to settings.json")
    -        if config_dir and (config_dir / "settings.json").exists() and not config:
    +        if config_dir and (config_dir / "settings.json").exists() and not settings:
                 config = json.loads((config_dir / "settings.json").read_text())
             self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
             self.renderers = {}  # File extension -> (renderer, can_render) functions
    
    From 2883098770fc66e50183b2b231edbde20848d4d6 Mon Sep 17 00:00:00 2001
    From: Simon Willison 
    Date: Thu, 12 Aug 2021 22:10:07 -0700
    Subject: [PATCH 0559/1705] Fixed config_dir mode, refs #1432
    
    ---
     datasette/app.py | 2 +-
     1 file changed, 1 insertion(+), 1 deletion(-)
    
    diff --git a/datasette/app.py b/datasette/app.py
    index adc543ef..06db740e 100644
    --- a/datasette/app.py
    +++ b/datasette/app.py
    @@ -278,7 +278,7 @@ class Datasette:
             if config_dir and (config_dir / "config.json").exists():
                 raise StartupError("config.json should be renamed to settings.json")
             if config_dir and (config_dir / "settings.json").exists() and not settings:
    -            config = json.loads((config_dir / "settings.json").read_text())
    +            settings = json.loads((config_dir / "settings.json").read_text())
             self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
             self.renderers = {}  # File extension -> (renderer, can_render) functions
             self.version_note = version_note
    
    From adb5b70de5cec3c3dd37184defe606a082c232cf Mon Sep 17 00:00:00 2001
    From: Simon Willison 
    Date: Mon, 16 Aug 2021 11:56:32 -0700
    Subject: [PATCH 0560/1705] Show count of facet values if ?_facet_size=max,
     closes #1423
    
    ---
     datasette/static/app.css       |  5 +++++
     datasette/templates/table.html |  4 +++-
     datasette/views/table.py       |  1 +
     tests/test_html.py             | 22 +++++++++++++++++++++-
     4 files changed, 30 insertions(+), 2 deletions(-)
    
    diff --git a/datasette/static/app.css b/datasette/static/app.css
    index bf068fdf..af3e14d5 100644
    --- a/datasette/static/app.css
    +++ b/datasette/static/app.css
    @@ -633,6 +633,11 @@ form button[type=button] {
         width: 250px;
         margin-right: 15px;
     }
    +.facet-info-total {
    +    font-size: 0.8em;
    +    color: #666;
    +    padding-right: 0.25em;
    +}
     .facet-info li,
     .facet-info ul {
         margin: 0;
    diff --git a/datasette/templates/table.html b/datasette/templates/table.html
    index a28945ad..6ba301b5 100644
    --- a/datasette/templates/table.html
    +++ b/datasette/templates/table.html
    @@ -156,7 +156,9 @@
             {% for facet_info in sorted_facet_results %}
                 

    - {{ facet_info.name }}{% if facet_info.type != "column" %} ({{ facet_info.type }}){% endif %} + {{ facet_info.name }}{% if facet_info.type != "column" %} ({{ facet_info.type }}){% endif %} + {% if show_facet_counts %} {% if facet_info.truncated %}>{% endif %}{{ facet_info.results|length }}{% endif %} + {% if facet_info.hideable %} {% endif %} diff --git a/datasette/views/table.py b/datasette/views/table.py index 486a6131..83f7c7cb 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -928,6 +928,7 @@ class TableView(RowTableShared): key=lambda f: (len(f["results"]), f["name"]), reverse=True, ), + "show_facet_counts": special_args.get("_facet_size") == "max", "extra_wheres_for_ui": extra_wheres_for_ui, "form_hidden_args": form_hidden_args, "is_sortable": any(c["sortable"] for c in display_columns), diff --git a/tests/test_html.py b/tests/test_html.py index 90fcdae7..e73ccd2f 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -479,7 +479,7 @@ def test_facet_display(app_client): for div in divs: actual.append( { - "name": div.find("strong").text, + "name": div.find("strong").text.split()[0], "items": [ { "name": a.text, @@ -1797,3 +1797,23 @@ def test_column_metadata(app_client): soup.select("th[data-column=address]")[0]["data-column-description"] == "The street address for the attraction" ) + + +@pytest.mark.parametrize("use_facet_size_max", (True, False)) +def test_facet_total_shown_if_facet_max_size(use_facet_size_max): + # https://github.com/simonw/datasette/issues/1423 + with make_app_client(settings={"max_returned_rows": 100}) as client: + path = "/fixtures/sortable?_facet=content&_facet=pk1" + if use_facet_size_max: + path += "&_facet_size=max" + response = client.get(path) + assert response.status == 200 + fragments = ( + '>100', + '8', + ) + for fragment in fragments: + if use_facet_size_max: + assert fragment in response.text + else: + assert fragment not in response.text From d84e574e59c51ddcd6cf60a6f9b3d45182daf824 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 19 Aug 2021 14:09:38 -0700 Subject: [PATCH 0561/1705] Ability to deploy demos of branches * Ability to deploy additional branch demos, closes #1442 * Only run tests before deploy on main branch * Documentation for continuous deployment --- .github/workflows/deploy-latest.yml | 8 +++++++- docs/contributing.rst | 11 +++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 849adb40..1a07503a 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -29,6 +29,7 @@ jobs: python -m pip install -e .[docs] python -m pip install sphinx-to-sqlite==0.1a1 - name: Run tests + if: ${{ github.ref == 'refs/heads/main' }} run: | pytest -n auto -m "not serial" pytest -m "serial" @@ -50,6 +51,8 @@ jobs: run: |- gcloud config set run/region us-central1 gcloud config set project datasette-222320 + export SUFFIX="-${GITHUB_REF#refs/heads/}" + export SUFFIX=${SUFFIX#-main} datasette publish cloudrun fixtures.db extra_database.db \ -m fixtures.json \ --plugins-dir=plugins \ @@ -57,7 +60,10 @@ jobs: --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \ --install=pysqlite3-binary \ - --service=datasette-latest + --service "datasette-latest$SUFFIX" + - name: Deploy to docs as well (only for main) + if: ${{ github.ref == 'refs/heads/main' }} + run: |- # Deploy docs.db to a different service datasette publish cloudrun docs.db \ --branch=$GITHUB_SHA \ diff --git a/docs/contributing.rst b/docs/contributing.rst index 8a638e0b..07f2a0e4 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -202,6 +202,17 @@ For added productivity, you can use use `sphinx-autobuild `__ is re-deployed automatically to Google Cloud Run for every push to ``main`` that passes the test suite. This is implemented by the GitHub Actions workflow at `.github/workflows/deploy-latest.yml `__. + +Specific branches can also be set to automatically deploy by adding them to the ``on: push: branches`` block at the top of the workflow YAML file. Branches configured in this way will be deployed to a new Cloud Run service whether or not their tests pass. + +The Cloud Run URL for a branch demo can be found in the GitHub Actions logs. + .. _contributing_release: Release process From 4eb3ae40fb223a66ae574fb84fac99e96183b08d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 19 Aug 2021 14:17:44 -0700 Subject: [PATCH 0562/1705] Don't bother building docs if not on main Refs ##1442 --- .github/workflows/deploy-latest.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 1a07503a..1ae96e89 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -36,6 +36,7 @@ jobs: - name: Build fixtures.db run: python tests/fixtures.py fixtures.db fixtures.json plugins --extra-db-filename extra_database.db - name: Build docs.db + if: ${{ github.ref == 'refs/heads/main' }} run: |- cd docs sphinx-build -b xml . _build From 7e15422aacfa9e9735cb9f9beaa32250edbf4905 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 19 Aug 2021 14:23:43 -0700 Subject: [PATCH 0563/1705] Documentation for datasette.databases property, closes #1443 --- docs/internals.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/internals.rst b/docs/internals.rst index 058a8969..d5db7ffa 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -196,6 +196,17 @@ Datasette class This object is an instance of the ``Datasette`` class, passed to many plugin hooks as an argument called ``datasette``. +.. _datasette_databases: + +.databases +---------- + +Property exposing an ordered dictionary of databases currently connected to Datasette. + +The dictionary keys are the name of the database that is used in the URL - e.g. ``/fixtures`` would have a key of ``"fixtures"``. The values are :ref:`internals_database` instances. + +All databases are listed, irrespective of user permissions. This means that the ``_internal`` database will always be listed here. + .. _datasette_plugin_config: .plugin_config(plugin_name, database=None, table=None) From 92a99d969c01633dba14cceebeda65daaedaec17 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 24 Aug 2021 11:13:42 -0700 Subject: [PATCH 0564/1705] Added not-footer wrapper div, refs #1446 --- datasette/templates/base.html | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/datasette/templates/base.html b/datasette/templates/base.html index e61edc4f..c9aa7e31 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -13,6 +13,7 @@ {% block extra_head %}{% endblock %} +

    {% block footer %}{% include "_footer.html" %}{% endblock %}
    {% include "_close_open_menus.html" %} From 93c3a7ffbfb3378f743ebce87d033cf1ce7689e0 Mon Sep 17 00:00:00 2001 From: Tim Sherratt Date: Wed, 25 Aug 2021 11:28:58 +1000 Subject: [PATCH 0565/1705] Remove underscore from search mode parameter name (#1447) The text refers to the parameter as `searchmode` but the `metadata.json` example uses `search_mode`. The latter doesn't actually seem to work. --- docs/full_text_search.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/full_text_search.rst b/docs/full_text_search.rst index f549296f..90b2e8c1 100644 --- a/docs/full_text_search.rst +++ b/docs/full_text_search.rst @@ -70,7 +70,7 @@ Here is an example which enables full-text search (with SQLite advanced search o "display_ads": { "fts_table": "ads_fts", "fts_pk": "id", - "search_mode": "raw" + "searchmode": "raw" } } } From 5161422b7fa249c6b7d6dc47ec6f483d3fdbd170 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Aug 2021 18:29:26 -0700 Subject: [PATCH 0566/1705] Update trustme requirement from <0.9,>=0.7 to >=0.7,<0.10 (#1433) Updates the requirements on [trustme](https://github.com/python-trio/trustme) to permit the latest version. - [Release notes](https://github.com/python-trio/trustme/releases) - [Commits](https://github.com/python-trio/trustme/compare/v0.7.0...v0.9.0) --- updated-dependencies: - dependency-name: trustme dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 65e99848..a3866515 100644 --- a/setup.py +++ b/setup.py @@ -73,7 +73,7 @@ setup( "beautifulsoup4>=4.8.1,<4.10.0", "black==21.6b0", "pytest-timeout>=1.4.2,<1.5", - "trustme>=0.7,<0.9", + "trustme>=0.7,<0.10", ], "rich": ["rich"], }, From a1a33bb5822214be1cebd98cd858b2058d91a4aa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Aug 2021 18:29:55 -0700 Subject: [PATCH 0567/1705] Bump black from 21.6b0 to 21.7b0 (#1400) Bumps [black](https://github.com/psf/black) from 21.6b0 to 21.7b0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/commits) --- updated-dependencies: - dependency-name: black dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index a3866515..84f32087 100644 --- a/setup.py +++ b/setup.py @@ -71,7 +71,7 @@ setup( "pytest-xdist>=2.2.1,<2.4", "pytest-asyncio>=0.10,<0.16", "beautifulsoup4>=4.8.1,<4.10.0", - "black==21.6b0", + "black==21.7b0", "pytest-timeout>=1.4.2,<1.5", "trustme>=0.7,<0.10", ], From 3655bb49a464bcc8004e491cc4d4de292f1acd62 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 27 Aug 2021 17:48:54 -0700 Subject: [PATCH 0568/1705] Better default help text, closes #1450 --- datasette/cli.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/datasette/cli.py b/datasette/cli.py index ea6da748..65da5613 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -123,7 +123,11 @@ def sqlite_extensions(fn): @click.version_option(version=__version__) def cli(): """ - Datasette! + Datasette is an open source multi-tool for exploring and publishing data + + \b + About Datasette: https://datasette.io/ + Full documentation: https://docs.datasette.io/ """ From 30c18576d603366dc3bd83ba50de1b7e70844430 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 27 Aug 2021 18:39:42 -0700 Subject: [PATCH 0569/1705] register_commands() plugin hook, closes #1449 --- datasette/cli.py | 3 +++ datasette/hookspecs.py | 5 ++++ docs/plugin_hooks.rst | 45 +++++++++++++++++++++++++++++++++ tests/test_plugins.py | 57 +++++++++++++++++++++++++++++++++++++++++- 4 files changed, 109 insertions(+), 1 deletion(-) diff --git a/datasette/cli.py b/datasette/cli.py index 65da5613..22e2338a 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -595,6 +595,9 @@ def serve( uvicorn.run(ds.app(), **uvicorn_kwargs) +pm.hook.register_commands(cli=cli) + + async def check_databases(ds): # Run check_connection against every connected database # to confirm they are all usable diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 56c79d23..1d4e3b27 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -79,6 +79,11 @@ def register_routes(datasette): """Register URL routes: return a list of (regex, view_function) pairs""" +@hookspec +def register_commands(cli): + """Register additional CLI commands, e.g. 'datasette mycommand ...'""" + + @hookspec def actor_from_request(datasette, request): """Return an actor dictionary based on the incoming request""" diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 5cdb1623..a6fe1071 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -587,6 +587,51 @@ See :ref:`writing_plugins_designing_urls` for tips on designing the URL routes u Examples: `datasette-auth-github `__, `datasette-psutil `__ +.. _plugin_register_commands: + +register_commands(cli) +---------------------- + +``cli`` - the root Datasette `Click command group `__ + Use this to register additional CLI commands + +Register additional CLI commands that can be run using ``datsette yourcommand ...``. This provides a mechanism by which plugins can add new CLI commands to Datasette. + +This example registers a new ``datasette verify file1.db file2.db`` command that checks if the provided file paths are valid SQLite databases: + +.. code-block:: python + + from datasette import hookimpl + import click + import sqlite3 + + @hookimpl + def register_commands(cli): + @cli.command() + @click.argument("files", type=click.Path(exists=True), nargs=-1) + def verify(files): + "Verify that files can be opened by Datasette" + for file in files: + conn = sqlite3.connect(str(file)) + try: + conn.execute("select * from sqlite_master") + except sqlite3.DatabaseError: + raise click.ClickException("Invalid database: {}".format(file)) + +The new command can then be executed like so:: + + datasette verify fixtures.db + +Help text (from the docstring for the function plus any defined Click arguments or options) will become available using:: + + datasette verify --help + +Plugins can register multiple commands by making multiple calls to the ``@cli.command()`` decorator.Consult the `Click documentation `__ for full details on how to build a CLI command, including how to define arguments and options. + +Note that ``register_commands()`` plugins cannot used with the :ref:`--plugins-dir mechanism ` - they need to be installed into the same virtual environment as Datasette using ``pip install``. Provided it has a ``setup.py`` file (see :ref:`writing_plugins_packaging`) you can run ``pip install`` directly against the directory in which you are developing your plugin like so:: + + pip install -e path/to/my/datasette-plugin + .. _plugin_register_facet_classes: register_facet_classes() diff --git a/tests/test_plugins.py b/tests/test_plugins.py index ec8ff0c5..a024c39b 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -6,13 +6,15 @@ from .fixtures import ( TEMP_PLUGIN_SECRET_FILE, TestClient as _TestClient, ) # noqa +from click.testing import CliRunner from datasette.app import Datasette -from datasette import cli +from datasette import cli, hookimpl from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm from datasette.utils.sqlite import sqlite3 from datasette.utils import CustomRow from jinja2.environment import Template import base64 +import importlib import json import os import pathlib @@ -902,3 +904,56 @@ def test_hook_get_metadata(app_client): assert "Hello from local metadata" == meta["databases"]["from-local"]["title"] assert "Hello from the plugin hook" == meta["databases"]["from-hook"]["title"] pm.hook.get_metadata = og_pm_hook_get_metadata + + +def _extract_commands(output): + lines = output.split("Commands:\n", 1)[1].split("\n") + return {line.split()[0].replace("*", "") for line in lines if line.strip()} + + +def test_hook_register_commands(): + # Without the plugin should have seven commands + runner = CliRunner() + result = runner.invoke(cli.cli, "--help") + commands = _extract_commands(result.output) + assert commands == { + "serve", + "inspect", + "install", + "package", + "plugins", + "publish", + "uninstall", + } + + # Now install a plugin + class VerifyPlugin: + __name__ = "VerifyPlugin" + + @hookimpl + def register_commands(self, cli): + @cli.command() + def verify(): + pass + + @cli.command() + def unverify(): + pass + + pm.register(VerifyPlugin(), name="verify") + importlib.reload(cli) + result2 = runner.invoke(cli.cli, "--help") + commands2 = _extract_commands(result2.output) + assert commands2 == { + "serve", + "inspect", + "install", + "package", + "plugins", + "publish", + "uninstall", + "verify", + "unverify", + } + pm.unregister(name="verify") + importlib.reload(cli) From d3ea36713194e3d92ed4c066337400146c921d0e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 27 Aug 2021 18:55:54 -0700 Subject: [PATCH 0570/1705] Release 0.59a2 Refs #942, #1421, #1423, #1431, #1443, #1446, #1449 --- datasette/version.py | 2 +- docs/changelog.rst | 13 +++++++++++++ docs/plugin_hooks.rst | 2 +- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/datasette/version.py b/datasette/version.py index f5fbfb3f..87b18fab 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.59a1" +__version__ = "0.59a2" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 1406a7ca..737a151b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,19 @@ Changelog ========= +.. _v0_59a2: + +0.59a2 (2021-08-27) +------------------- + +- Columns can now have associated metadata descriptions in ``metadata.json``, see :ref:`metadata_column_descriptions`. (:issue:`942`) +- New :ref:`register_commands() ` plugin hook allows plugins to register additional Datasette CLI commands, e.g. ``datasette mycommand file.db``. (:issue:`1449`) +- Adding ``?_facet_size=max`` to a table page now shows the number of unique values in each facet. (:issue:`1423`) +- Code that figures out which named parameters a SQL query takes in order to display form fields for them is no longer confused by strings that contain colon characters. (:issue:`1421`) +- Renamed ``--help-config`` option to ``--help-settings``. (:issue:`1431`) +- ``datasette.databases`` property is now a documented API. (:issue:`1443`) +- Datasette base template now wraps everything other than the ``
    `` in a ``

    " in response.text + assert ">Table With Space In Name 🔒

    " in response.text + # Queries + assert ">from_async_hook 🔒" in response.text + assert ">query_two" in response.text + # Views + assert ">paginated_view 🔒" in response.text + assert ">simple_view" in response.text + finally: + cascade_app_client.ds._metadata_local = previous_metadata From 602c0888ce633000cfae42be00de474ef681bda7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 23 Oct 2022 20:07:09 -0700 Subject: [PATCH 0924/1705] Release 0.63a1 Refs #1646, #1819, #1825, #1829, #1831, #1832, #1834, #1844, #1848 --- datasette/version.py | 2 +- docs/changelog.rst | 16 +++++++++++++++- docs/internals.rst | 2 +- docs/performance.rst | 2 ++ 4 files changed, 19 insertions(+), 3 deletions(-) diff --git a/datasette/version.py b/datasette/version.py index e5ad585f..eb36da45 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.63a0" +__version__ = "0.63a1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index f5cf03e8..dd4c20b7 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,20 @@ Changelog ========= +.. _v0_63a1: + +0.63a1 (2022-10-23) +------------------- + +- SQL query is now re-displayed when terminated with a time limit error. (:issue:`1819`) +- New documentation on :ref:`deploying_openrc` - thanks, Adam Simpson. (`#1825 `__) +- The :ref:`inspect data ` mechanism is now used to speed up server startup - thanks, Forest Gregg. (:issue:`1834`) +- In :ref:`config_dir` databases with filenames ending in ``.sqlite`` or ``.sqlite3`` are now automatically added to the Datasette instance. (:issue:`1646`) +- Breadcrumb navigation display now respects the current user's permissions. (:issue:`1831`) +- Screenshots in the documentation are now maintained using `shot-scraper `__, as described in `Automating screenshots for the Datasette documentation using shot-scraper `__. (:issue:`1844`) +- The :ref:`datasette.check_visibility() ` method now accepts an optional ``permissions=`` list, allowing it to take multiple permissions into account at once when deciding if something should be shown as public or private. This has been used to correctly display padlock icons in more places in the Datasette interface. (:issue:`1829`) + + .. _v0_63a0: 0.63a0 (2022-09-26) @@ -91,7 +105,7 @@ Datasette also now requires Python 3.7 or higher. - Python 3.6 is no longer supported. (:issue:`1577`) - Tests now run against Python 3.11-dev. (:issue:`1621`) - New :ref:`datasette.ensure_permissions(actor, permissions) ` internal method for checking multiple permissions at once. (:issue:`1675`) -- New :ref:`datasette.check_visibility(actor, action, resource=None) ` internal method for checking if a user can see a resource that would otherwise be invisible to unauthenticated users. (:issue:`1678`) +- New :ref:`datasette.check_visibility(actor, action, resource=None) ` internal method for checking if a user can see a resource that would otherwise be invisible to unauthenticated users. (:issue:`1678`) - Table and row HTML pages now include a ```` element and return a ``Link: URL; rel="alternate"; type="application/json+datasette"`` HTTP header pointing to the JSON version of those pages. (:issue:`1533`) - ``Access-Control-Expose-Headers: Link`` is now added to the CORS headers, allowing remote JavaScript to access that header. - Canned queries are now shown at the top of the database page, directly below the SQL editor. Previously they were shown at the bottom, below the list of tables. (:issue:`1612`) diff --git a/docs/internals.rst b/docs/internals.rst index 92f4efee..c3892a7c 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -364,7 +364,7 @@ This is useful when you need to check multiple permissions at once. For example, ], ) -.. _datasette_check_visibilty: +.. _datasette_check_visibility: await .check_visibility(actor, action=None, resource=None, permissions=None) ---------------------------------------------------------------------------- diff --git a/docs/performance.rst b/docs/performance.rst index 89bbf5ae..4427757c 100644 --- a/docs/performance.rst +++ b/docs/performance.rst @@ -24,6 +24,8 @@ To open a file in immutable mode pass it to the datasette command using the ``-i When you open a file in immutable mode like this Datasette will also calculate and cache the row counts for each table in that database when it first starts up, further improving performance. +.. _performance_inspect: + Using "datasette inspect" ------------------------- From a0dd5fa02fb1e6d5477b962a2062f1a4be3354a5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 23 Oct 2022 20:14:49 -0700 Subject: [PATCH 0925/1705] Fixed typo in release notes --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index dd4c20b7..2255dcce 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -31,7 +31,7 @@ Changelog - ``Database(is_mutable=)`` now defaults to ``True``. (:issue:`1808`) - Non-JavaScript textarea now increases height to fit the SQL query. (:issue:`1786`) - More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) -- Datasette no longer enforces upper bounds on its depenedencies. (:issue:`1800`) +- Datasette no longer enforces upper bounds on its dependencies. (:issue:`1800`) - Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) - The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) From 83adf55b2da83fd9a227f7e4c8506d72def72294 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 23 Oct 2022 20:28:15 -0700 Subject: [PATCH 0926/1705] Deploy one-dot-zero branch preview --- .github/workflows/deploy-latest.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 2b94a7f1..43a843ed 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -3,7 +3,8 @@ name: Deploy latest.datasette.io on: push: branches: - - main + - main + - 1.0-dev permissions: contents: read @@ -68,6 +69,8 @@ jobs: gcloud config set project datasette-222320 export SUFFIX="-${GITHUB_REF#refs/heads/}" export SUFFIX=${SUFFIX#-main} + # Replace 1.0 with one-dot-zero in SUFFIX + export SUFFIX=${SUFFIX//1.0/one-dot-zero} datasette publish cloudrun fixtures.db fixtures2.db extra_database.db \ -m fixtures.json \ --plugins-dir=plugins \ From e135da8efe8fccecf9a137a941cc1f1db0db583a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 07:13:43 -0700 Subject: [PATCH 0927/1705] Python 3.11 in CI --- .github/workflows/publish.yml | 16 ++++++++-------- .github/workflows/test.yml | 8 ++++---- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 9ef09d2e..fa608055 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -12,14 +12,14 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip @@ -37,12 +37,12 @@ jobs: runs-on: ubuntu-latest needs: [test] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: '3.10' - - uses: actions/cache@v2 + python-version: '3.11' + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e38d5ee9..886f649a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -10,14 +10,14 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11-dev"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip From 02ae1a002918eb91f794e912c32742559da34cf5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 11:59:03 -0700 Subject: [PATCH 0928/1705] Upgrade Docker images to Python 3.11, closes #1853 --- Dockerfile | 2 +- datasette/utils/__init__.py | 2 +- demos/apache-proxy/Dockerfile | 2 +- docs/publish.rst | 2 +- tests/test_package.py | 2 +- tests/test_publish_cloudrun.py | 4 ++-- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index ee7ed957..9a8f06cf 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10.6-slim-bullseye as build +FROM python:3.11.0-slim-bullseye as build # Version of Datasette to install, e.g. 0.55 # docker build . -t datasette --build-arg VERSION=0.55 diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 2bdea673..803ba96d 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -390,7 +390,7 @@ def make_dockerfile( "SQLITE_EXTENSIONS" ] = "/usr/lib/x86_64-linux-gnu/mod_spatialite.so" return """ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app {apt_get_extras} diff --git a/demos/apache-proxy/Dockerfile b/demos/apache-proxy/Dockerfile index 70b33bec..9a8448da 100644 --- a/demos/apache-proxy/Dockerfile +++ b/demos/apache-proxy/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye RUN apt-get update && \ apt-get install -y apache2 supervisor && \ diff --git a/docs/publish.rst b/docs/publish.rst index d817ed31..4ba94792 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -146,7 +146,7 @@ Here's example output for the package command:: $ datasette package parlgov.db --extra-options="--setting sql_time_limit_ms 2500" Sending build context to Docker daemon 4.459MB - Step 1/7 : FROM python:3.10.6-slim-bullseye + Step 1/7 : FROM python:3.11.0-slim-bullseye ---> 79e1dc9af1c1 Step 2/7 : COPY . /app ---> Using cache diff --git a/tests/test_package.py b/tests/test_package.py index ac15e61e..f05f3ece 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -12,7 +12,7 @@ class CaptureDockerfile: EXPECTED_DOCKERFILE = """ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index e64534d2..158a090e 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -242,7 +242,7 @@ def test_publish_cloudrun_plugin_secrets( ) expected = textwrap.dedent( r""" - FROM python:3.10.6-slim-bullseye + FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app @@ -309,7 +309,7 @@ def test_publish_cloudrun_apt_get_install( ) expected = textwrap.dedent( r""" - FROM python:3.10.6-slim-bullseye + FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app From 9676b2deb07cff20247ba91dad3e84a4ab0b00d1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 11:59:03 -0700 Subject: [PATCH 0929/1705] Upgrade Docker images to Python 3.11, closes #1853 --- Dockerfile | 2 +- datasette/utils/__init__.py | 2 +- demos/apache-proxy/Dockerfile | 2 +- docs/publish.rst | 2 +- tests/test_package.py | 2 +- tests/test_publish_cloudrun.py | 4 ++-- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index ee7ed957..9a8f06cf 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10.6-slim-bullseye as build +FROM python:3.11.0-slim-bullseye as build # Version of Datasette to install, e.g. 0.55 # docker build . -t datasette --build-arg VERSION=0.55 diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 2bdea673..803ba96d 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -390,7 +390,7 @@ def make_dockerfile( "SQLITE_EXTENSIONS" ] = "/usr/lib/x86_64-linux-gnu/mod_spatialite.so" return """ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app {apt_get_extras} diff --git a/demos/apache-proxy/Dockerfile b/demos/apache-proxy/Dockerfile index 70b33bec..9a8448da 100644 --- a/demos/apache-proxy/Dockerfile +++ b/demos/apache-proxy/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye RUN apt-get update && \ apt-get install -y apache2 supervisor && \ diff --git a/docs/publish.rst b/docs/publish.rst index d817ed31..4ba94792 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -146,7 +146,7 @@ Here's example output for the package command:: $ datasette package parlgov.db --extra-options="--setting sql_time_limit_ms 2500" Sending build context to Docker daemon 4.459MB - Step 1/7 : FROM python:3.10.6-slim-bullseye + Step 1/7 : FROM python:3.11.0-slim-bullseye ---> 79e1dc9af1c1 Step 2/7 : COPY . /app ---> Using cache diff --git a/tests/test_package.py b/tests/test_package.py index ac15e61e..f05f3ece 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -12,7 +12,7 @@ class CaptureDockerfile: EXPECTED_DOCKERFILE = """ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index e64534d2..158a090e 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -242,7 +242,7 @@ def test_publish_cloudrun_plugin_secrets( ) expected = textwrap.dedent( r""" - FROM python:3.10.6-slim-bullseye + FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app @@ -309,7 +309,7 @@ def test_publish_cloudrun_apt_get_install( ) expected = textwrap.dedent( r""" - FROM python:3.10.6-slim-bullseye + FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app From 613ad05c095f92653221db267ef53d54d00cdfbb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 12:16:48 -0700 Subject: [PATCH 0930/1705] Don't need pysqlite3-binary any more, refs #1853 --- .github/workflows/deploy-latest.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 2b94a7f1..e423b8fa 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -13,12 +13,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out datasette - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: "3.10" - - uses: actions/cache@v2 + python-version: "3.11" + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip @@ -74,7 +74,6 @@ jobs: --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \ - --install=pysqlite3-binary \ --service "datasette-latest$SUFFIX" - name: Deploy to docs as well (only for main) if: ${{ github.ref == 'refs/heads/main' }} From c7dd76c26257ded5bcdfd0570e12412531b8b88f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 12:42:21 -0700 Subject: [PATCH 0931/1705] Poll until servers start, refs #1854 --- tests/conftest.py | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 215853b3..f4638a14 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,4 @@ +import httpx import os import pathlib import pytest @@ -110,8 +111,13 @@ def ds_localhost_http_server(): # Avoid FileNotFoundError: [Errno 2] No such file or directory: cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + # Loop until port 8041 serves traffic + while True: + try: + httpx.get("http://localhost:8041/") + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc @@ -146,8 +152,12 @@ def ds_localhost_https_server(tmp_path_factory): stderr=subprocess.STDOUT, cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + while True: + try: + httpx.get("https://localhost:8042/", verify=client_cert) + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc, client_cert @@ -168,8 +178,15 @@ def ds_unix_domain_socket_server(tmp_path_factory): stderr=subprocess.STDOUT, cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + # Poll until available + transport = httpx.HTTPTransport(uds=uds) + client = httpx.Client(transport=transport) + while True: + try: + client.get("http://localhost/_memory.json") + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc, uds From 6d085af28c63c28ecda388fc0552c91f756be0c6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 07:13:43 -0700 Subject: [PATCH 0932/1705] Python 3.11 in CI --- .github/workflows/publish.yml | 16 ++++++++-------- .github/workflows/test.yml | 8 ++++---- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 9ef09d2e..fa608055 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -12,14 +12,14 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip @@ -37,12 +37,12 @@ jobs: runs-on: ubuntu-latest needs: [test] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: '3.10' - - uses: actions/cache@v2 + python-version: '3.11' + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e38d5ee9..886f649a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -10,14 +10,14 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11-dev"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip From 05b479224fa57af3ab2d03769edd5081dad62a19 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 12:16:48 -0700 Subject: [PATCH 0933/1705] Don't need pysqlite3-binary any more, refs #1853 --- .github/workflows/deploy-latest.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 43a843ed..5598dc12 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -14,12 +14,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out datasette - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: "3.10" - - uses: actions/cache@v2 + python-version: "3.11" + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip @@ -77,7 +77,6 @@ jobs: --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \ - --install=pysqlite3-binary \ --service "datasette-latest$SUFFIX" - name: Deploy to docs as well (only for main) if: ${{ github.ref == 'refs/heads/main' }} From f9ae92b37796f7f559d57b1ee9718aa4d43547e8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 12:42:21 -0700 Subject: [PATCH 0934/1705] Poll until servers start, refs #1854 --- tests/conftest.py | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 215853b3..f4638a14 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,4 @@ +import httpx import os import pathlib import pytest @@ -110,8 +111,13 @@ def ds_localhost_http_server(): # Avoid FileNotFoundError: [Errno 2] No such file or directory: cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + # Loop until port 8041 serves traffic + while True: + try: + httpx.get("http://localhost:8041/") + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc @@ -146,8 +152,12 @@ def ds_localhost_https_server(tmp_path_factory): stderr=subprocess.STDOUT, cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + while True: + try: + httpx.get("https://localhost:8042/", verify=client_cert) + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc, client_cert @@ -168,8 +178,15 @@ def ds_unix_domain_socket_server(tmp_path_factory): stderr=subprocess.STDOUT, cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + # Poll until available + transport = httpx.HTTPTransport(uds=uds) + client = httpx.Client(transport=transport) + while True: + try: + client.get("http://localhost/_memory.json") + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc, uds From 42f8b402e6aa56af4bbe921e346af8df42acd50f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 17:07:58 -0700 Subject: [PATCH 0935/1705] Initial prototype of create API token page, refs #1852 --- datasette/app.py | 5 ++ datasette/templates/create_token.html | 83 +++++++++++++++++++++++++++ datasette/views/special.py | 54 +++++++++++++++++ 3 files changed, 142 insertions(+) create mode 100644 datasette/templates/create_token.html diff --git a/datasette/app.py b/datasette/app.py index 9df16558..cab9d142 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -33,6 +33,7 @@ from .views.special import ( JsonDataView, PatternPortfolioView, AuthTokenView, + CreateTokenView, LogoutView, AllowDebugView, PermissionsDebugView, @@ -1212,6 +1213,10 @@ class Datasette: AuthTokenView.as_view(self), r"/-/auth-token$", ) + add_route( + CreateTokenView.as_view(self), + r"/-/create-token$", + ) add_route( LogoutView.as_view(self), r"/-/logout$", diff --git a/datasette/templates/create_token.html b/datasette/templates/create_token.html new file mode 100644 index 00000000..a94881ed --- /dev/null +++ b/datasette/templates/create_token.html @@ -0,0 +1,83 @@ +{% extends "base.html" %} + +{% block title %}Create an API token{% endblock %} + +{% block content %} + +

    Create an API token

    + +

    This token will allow API access with the same abilities as your current user.

    + +{% if errors %} + {% for error in errors %} +

    {{ error }}

    + {% endfor %} +{% endif %} + + +
    +
    + +
    + + + +
    + + +{% if token %} +
    +

    Your API token

    +
    + + +
    + +
    + Token details +
    {{ token_bits|tojson }}
    +
    +
    + {% endif %} + + + +{% endblock %} diff --git a/datasette/views/special.py b/datasette/views/special.py index dd834528..f2e69412 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -3,6 +3,7 @@ from datasette.utils.asgi import Response, Forbidden from datasette.utils import actor_matches_allow, add_cors_headers from .base import BaseView import secrets +import time class JsonDataView(BaseView): @@ -163,3 +164,56 @@ class MessagesDebugView(BaseView): else: datasette.add_message(request, message, getattr(datasette, message_type)) return Response.redirect(self.ds.urls.instance()) + + +class CreateTokenView(BaseView): + name = "create_token" + has_json_alternate = False + + async def get(self, request): + if not request.actor: + raise Forbidden("You must be logged in to create a token") + return await self.render( + ["create_token.html"], + request, + {"actor": request.actor}, + ) + + async def post(self, request): + if not request.actor: + raise Forbidden("You must be logged in to create a token") + post = await request.post_vars() + expires = None + errors = [] + if post.get("expire_type"): + duration = post.get("expire_duration") + if not duration or not duration.isdigit() or not int(duration) > 0: + errors.append("Invalid expire duration") + else: + unit = post["expire_type"] + if unit == "minutes": + expires = int(duration) * 60 + elif unit == "hours": + expires = int(duration) * 60 * 60 + elif unit == "days": + expires = int(duration) * 60 * 60 * 24 + else: + errors.append("Invalid expire duration unit") + token_bits = None + token = None + if not errors: + token_bits = { + "a": request.actor, + "e": (int(time.time()) + expires) if expires else None, + } + token = self.ds.sign(token_bits, "token") + return await self.render( + ["create_token.html"], + request, + { + "actor": request.actor, + "errors": errors, + "token": token, + "token_bits": token_bits, + }, + ) From 68ccb7578b5d3bf68b86fb2f5cf8753098dfe075 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 18:40:07 -0700 Subject: [PATCH 0936/1705] dstoke_ prefix for tokens Refs https://github.com/simonw/datasette/issues/1852#issuecomment-1291290451 --- datasette/views/special.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/views/special.py b/datasette/views/special.py index f2e69412..d3f202f4 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -206,7 +206,7 @@ class CreateTokenView(BaseView): "a": request.actor, "e": (int(time.time()) + expires) if expires else None, } - token = self.ds.sign(token_bits, "token") + token = "dstok_{}".format(self.ds.sign(token_bits, "token")) return await self.render( ["create_token.html"], request, From 7ab091e8ef8d3af1e23b5a81ffad2bd8c96cc47c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 19:04:05 -0700 Subject: [PATCH 0937/1705] Tests and docs for /-/create-token, refs #1852 --- datasette/views/special.py | 14 +++++--- docs/authentication.rst | 15 +++++++++ tests/test_auth.py | 68 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 93 insertions(+), 4 deletions(-) diff --git a/datasette/views/special.py b/datasette/views/special.py index d3f202f4..7f70eb1f 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -170,9 +170,16 @@ class CreateTokenView(BaseView): name = "create_token" has_json_alternate = False - async def get(self, request): + def check_permission(self, request): if not request.actor: raise Forbidden("You must be logged in to create a token") + if not request.actor.get("id"): + raise Forbidden( + "You must be logged in as an actor with an ID to create a token" + ) + + async def get(self, request): + self.check_permission(request) return await self.render( ["create_token.html"], request, @@ -180,8 +187,7 @@ class CreateTokenView(BaseView): ) async def post(self, request): - if not request.actor: - raise Forbidden("You must be logged in to create a token") + self.check_permission(request) post = await request.post_vars() expires = None errors = [] @@ -203,7 +209,7 @@ class CreateTokenView(BaseView): token = None if not errors: token_bits = { - "a": request.actor, + "a": request.actor["id"], "e": (int(time.time()) + expires) if expires else None, } token = "dstok_{}".format(self.ds.sign(token_bits, "token")) diff --git a/docs/authentication.rst b/docs/authentication.rst index 685dab15..fc903fbb 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -333,6 +333,21 @@ To limit this ability for just one specific database, use this: } } +.. _CreateTokenView: + +API Tokens +========== + +Datasette includes a default mechanism for generating API tokens that can be used to authenticate requests. + +Authenticated users can create new API tokens using a form on the ``/-/create-token`` page. + +Created tokens can then be passed in the ``Authorization: Bearer token_here`` header of HTTP requests to Datasette. + +A token created by a user will include that user's ``"id"`` in the token payload, so any permissions granted to that user based on their ID will be made available to the token as well. + +Coming soon: a mechanism for creating tokens that can only perform a subset of the actions available to the user who created them. + .. _permissions_plugins: Checking permissions in plugins diff --git a/tests/test_auth.py b/tests/test_auth.py index 4ef35a76..3aaab50d 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -110,3 +110,71 @@ def test_no_logout_button_in_navigation_if_no_ds_actor_cookie(app_client, path): response = app_client.get(path + "?_bot=1") assert "bot" in response.text assert '
    ' not in response.text + + +@pytest.mark.parametrize( + "post_data,errors,expected_duration", + ( + ({"expire_type": ""}, [], None), + ({"expire_type": "x"}, ["Invalid expire duration"], None), + ({"expire_type": "minutes"}, ["Invalid expire duration"], None), + ( + {"expire_type": "minutes", "expire_duration": "x"}, + ["Invalid expire duration"], + None, + ), + ( + {"expire_type": "minutes", "expire_duration": "-1"}, + ["Invalid expire duration"], + None, + ), + ( + {"expire_type": "minutes", "expire_duration": "0"}, + ["Invalid expire duration"], + None, + ), + ( + {"expire_type": "minutes", "expire_duration": "10"}, + [], + 600, + ), + ( + {"expire_type": "hours", "expire_duration": "10"}, + [], + 10 * 60 * 60, + ), + ( + {"expire_type": "days", "expire_duration": "3"}, + [], + 60 * 60 * 24 * 3, + ), + ), +) +def test_auth_create_token(app_client, post_data, errors, expected_duration): + assert app_client.get("/-/create-token").status == 403 + ds_actor = app_client.actor_cookie({"id": "test"}) + response = app_client.get("/-/create-token", cookies={"ds_actor": ds_actor}) + assert response.status == 200 + assert ">Create an API token<" in response.text + # Now try actually creating one + response2 = app_client.post( + "/-/create-token", + post_data, + csrftoken_from=True, + cookies={"ds_actor": ds_actor}, + ) + assert response2.status == 200 + if errors: + for error in errors: + assert '

    {}

    '.format(error) in response2.text + else: + # Extract token from page + token = response2.text.split('value="dstok_')[1].split('"')[0] + details = app_client.ds.unsign(token, "token") + assert details.keys() == {"a", "e"} + assert details["a"] == "test" + if expected_duration is None: + assert details["e"] is None + else: + about_right = int(time.time()) + expected_duration + assert about_right - 2 < details["e"] < about_right + 2 From b29e487bc3fde6418bf45bda7cfed2e081ff03fb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 19:18:41 -0700 Subject: [PATCH 0938/1705] actor_from_request for dstok_ tokens, refs #1852 --- datasette/default_permissions.py | 25 +++++++++++++++++++++++++ datasette/utils/testing.py | 2 ++ tests/test_auth.py | 32 ++++++++++++++++++++++++++++++++ 3 files changed, 59 insertions(+) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index b58d8d1b..4d836ddc 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -1,5 +1,7 @@ from datasette import hookimpl from datasette.utils import actor_matches_allow +import itsdangerous +import time @hookimpl(tryfirst=True) @@ -45,3 +47,26 @@ def permission_allowed(datasette, actor, action, resource): return actor_matches_allow(actor, database_allow_sql) return inner + + +@hookimpl +def actor_from_request(datasette, request): + prefix = "dstok_" + authorization = request.headers.get("authorization") + if not authorization: + return None + if not authorization.startswith("Bearer "): + return None + token = authorization[len("Bearer ") :] + if not token.startswith(prefix): + return None + token = token[len(prefix) :] + try: + decoded = datasette.unsign(token, namespace="token") + except itsdangerous.BadSignature: + return None + expires_at = decoded.get("e") + if expires_at is not None: + if expires_at < time.time(): + return None + return {"id": decoded["a"], "dstok": True} diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index b28fc575..4f76a799 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -62,6 +62,7 @@ class TestClient: method="GET", cookies=None, if_none_match=None, + headers=None, ): return await self._request( path=path, @@ -70,6 +71,7 @@ class TestClient: method=method, cookies=cookies, if_none_match=if_none_match, + headers=headers, ) @async_to_sync diff --git a/tests/test_auth.py b/tests/test_auth.py index 3aaab50d..be21d6a5 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -178,3 +178,35 @@ def test_auth_create_token(app_client, post_data, errors, expected_duration): else: about_right = int(time.time()) + expected_duration assert about_right - 2 < details["e"] < about_right + 2 + + +@pytest.mark.parametrize( + "scenario,should_work", + ( + ("no_token", False), + ("invalid_token", False), + ("expired_token", False), + ("valid_unlimited_token", True), + ("valid_expiring_token", True), + ), +) +def test_auth_with_dstok_token(app_client, scenario, should_work): + token = None + if scenario == "valid_unlimited_token": + token = app_client.ds.sign({"a": "test"}, "token") + elif scenario == "valid_expiring_token": + token = app_client.ds.sign({"a": "test", "e": int(time.time()) + 1000}, "token") + elif scenario == "expired_token": + token = app_client.ds.sign({"a": "test", "e": int(time.time()) - 1000}, "token") + elif scenario == "invalid_token": + token = "invalid" + if token: + token = "dstok_{}".format(token) + headers = {} + if token: + headers["Authorization"] = "Bearer {}".format(token) + response = app_client.get("/-/actor.json", headers=headers) + if should_work: + assert response.json == {"actor": {"id": "test", "dstok": True}} + else: + assert response.json == {"actor": None} From 0f013ff497df62e1dd2075777b9817555646010e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 19:43:55 -0700 Subject: [PATCH 0939/1705] Mechanism to prevent tokens creating tokens, closes #1857 --- datasette/default_permissions.py | 2 +- datasette/views/special.py | 4 ++++ docs/authentication.rst | 2 ++ tests/test_auth.py | 11 ++++++++++- 4 files changed, 17 insertions(+), 2 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 4d836ddc..d908af7a 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -69,4 +69,4 @@ def actor_from_request(datasette, request): if expires_at is not None: if expires_at < time.time(): return None - return {"id": decoded["a"], "dstok": True} + return {"id": decoded["a"], "token": "dstok"} diff --git a/datasette/views/special.py b/datasette/views/special.py index 7f70eb1f..91130353 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -177,6 +177,10 @@ class CreateTokenView(BaseView): raise Forbidden( "You must be logged in as an actor with an ID to create a token" ) + if request.actor.get("token"): + raise Forbidden( + "Token authentication cannot be used to create additional tokens" + ) async def get(self, request): self.check_permission(request) diff --git a/docs/authentication.rst b/docs/authentication.rst index fc903fbb..cbecd296 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -348,6 +348,8 @@ A token created by a user will include that user's ``"id"`` in the token payload Coming soon: a mechanism for creating tokens that can only perform a subset of the actions available to the user who created them. +This page cannot be accessed by actors with a ``"token": "some-value"`` property. This is to prevent API tokens from being used to automatically create more tokens. Datasette plugins that implement their own form of API token authentication should follow this convention. + .. _permissions_plugins: Checking permissions in plugins diff --git a/tests/test_auth.py b/tests/test_auth.py index be21d6a5..397d51d7 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -180,6 +180,15 @@ def test_auth_create_token(app_client, post_data, errors, expected_duration): assert about_right - 2 < details["e"] < about_right + 2 +def test_auth_create_token_not_allowed_for_tokens(app_client): + ds_tok = app_client.ds.sign({"a": "test", "token": "dstok"}, "token") + response = app_client.get( + "/-/create-token", + headers={"Authorization": "Bearer dstok_{}".format(ds_tok)}, + ) + assert response.status == 403 + + @pytest.mark.parametrize( "scenario,should_work", ( @@ -207,6 +216,6 @@ def test_auth_with_dstok_token(app_client, scenario, should_work): headers["Authorization"] = "Bearer {}".format(token) response = app_client.get("/-/actor.json", headers=headers) if should_work: - assert response.json == {"actor": {"id": "test", "dstok": True}} + assert response.json == {"actor": {"id": "test", "token": "dstok"}} else: assert response.json == {"actor": None} From c23fa850e7f21977e367e3467656055216978e8a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 19:55:47 -0700 Subject: [PATCH 0940/1705] allow_signed_tokens setting, closes #1856 --- datasette/app.py | 5 +++++ datasette/default_permissions.py | 2 ++ datasette/views/special.py | 2 ++ docs/authentication.rst | 2 ++ docs/cli-reference.rst | 2 ++ docs/plugins.rst | 1 + docs/settings.rst | 13 +++++++++++++ tests/test_auth.py | 26 +++++++++++++++++++++----- 8 files changed, 48 insertions(+), 5 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index cab9d142..c868f8d3 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -124,6 +124,11 @@ SETTINGS = ( True, "Allow users to download the original SQLite database files", ), + Setting( + "allow_signed_tokens", + True, + "Allow users to create and use signed API tokens", + ), Setting("suggest_facets", True, "Calculate and display suggested facets"), Setting( "default_cache_ttl", diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index d908af7a..49ca8851 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -52,6 +52,8 @@ def permission_allowed(datasette, actor, action, resource): @hookimpl def actor_from_request(datasette, request): prefix = "dstok_" + if not datasette.setting("allow_signed_tokens"): + return None authorization = request.headers.get("authorization") if not authorization: return None diff --git a/datasette/views/special.py b/datasette/views/special.py index 91130353..89015958 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -171,6 +171,8 @@ class CreateTokenView(BaseView): has_json_alternate = False def check_permission(self, request): + if not self.ds.setting("allow_signed_tokens"): + raise Forbidden("Signed tokens are not enabled for this Datasette instance") if not request.actor: raise Forbidden("You must be logged in to create a token") if not request.actor.get("id"): diff --git a/docs/authentication.rst b/docs/authentication.rst index cbecd296..50304ec5 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -350,6 +350,8 @@ Coming soon: a mechanism for creating tokens that can only perform a subset of t This page cannot be accessed by actors with a ``"token": "some-value"`` property. This is to prevent API tokens from being used to automatically create more tokens. Datasette plugins that implement their own form of API token authentication should follow this convention. +You can disable this feature using the :ref:`allow_signed_tokens ` setting. + .. _permissions_plugins: Checking permissions in plugins diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 4a8465cb..fd5e2404 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -226,6 +226,8 @@ These can be passed to ``datasette serve`` using ``datasette serve --setting nam ?_facet= parameter (default=True) allow_download Allow users to download the original SQLite database files (default=True) + allow_signed_tokens Allow users to create and use signed API tokens + (default=True) suggest_facets Calculate and display suggested facets (default=True) default_cache_ttl Default HTTP cache TTL (used in Cache-Control: diff --git a/docs/plugins.rst b/docs/plugins.rst index 29078054..9efef32f 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -151,6 +151,7 @@ If you run ``datasette plugins --all`` it will include default plugins that ship "templates": false, "version": null, "hooks": [ + "actor_from_request", "permission_allowed" ] }, diff --git a/docs/settings.rst b/docs/settings.rst index a6d50543..be640b21 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -169,6 +169,19 @@ Should users be able to download the original SQLite database using a link on th datasette mydatabase.db --setting allow_download off +.. _setting_allow_signed_tokens: + +allow_signed_tokens +~~~~~~~~~~~~~~~~~~~ + +Should users be able to create signed API tokens to access Datasette? + +This is turned on by default. Use the following to turn it off:: + + datasette mydatabase.db --setting allow_signed_tokens off + +Turning this setting off will disable the ``/-/create-token`` page, :ref:`described here `. It will also cause any incoming ``Authorization: Bearer dstok_...`` API tokens to be ignored. + .. _setting_default_cache_ttl: default_cache_ttl diff --git a/tests/test_auth.py b/tests/test_auth.py index 397d51d7..a79dafd8 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -189,9 +189,20 @@ def test_auth_create_token_not_allowed_for_tokens(app_client): assert response.status == 403 +def test_auth_create_token_not_allowed_if_allow_signed_tokens_off(app_client): + app_client.ds._settings["allow_signed_tokens"] = False + try: + ds_actor = app_client.actor_cookie({"id": "test"}) + response = app_client.get("/-/create-token", cookies={"ds_actor": ds_actor}) + assert response.status == 403 + finally: + app_client.ds._settings["allow_signed_tokens"] = True + + @pytest.mark.parametrize( "scenario,should_work", ( + ("allow_signed_tokens_off", False), ("no_token", False), ("invalid_token", False), ("expired_token", False), @@ -201,7 +212,7 @@ def test_auth_create_token_not_allowed_for_tokens(app_client): ) def test_auth_with_dstok_token(app_client, scenario, should_work): token = None - if scenario == "valid_unlimited_token": + if scenario in ("valid_unlimited_token", "allow_signed_tokens_off"): token = app_client.ds.sign({"a": "test"}, "token") elif scenario == "valid_expiring_token": token = app_client.ds.sign({"a": "test", "e": int(time.time()) + 1000}, "token") @@ -211,11 +222,16 @@ def test_auth_with_dstok_token(app_client, scenario, should_work): token = "invalid" if token: token = "dstok_{}".format(token) + if scenario == "allow_signed_tokens_off": + app_client.ds._settings["allow_signed_tokens"] = False headers = {} if token: headers["Authorization"] = "Bearer {}".format(token) response = app_client.get("/-/actor.json", headers=headers) - if should_work: - assert response.json == {"actor": {"id": "test", "token": "dstok"}} - else: - assert response.json == {"actor": None} + try: + if should_work: + assert response.json == {"actor": {"id": "test", "token": "dstok"}} + else: + assert response.json == {"actor": None} + finally: + app_client.ds._settings["allow_signed_tokens"] = True From c36a74ece1e475291af326d493d8db9ff3afdd30 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 21:04:39 -0700 Subject: [PATCH 0941/1705] Try shutting down executor in tests to free up thread local SQLite connections, refs #1843 --- tests/fixtures.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/fixtures.py b/tests/fixtures.py index 13a3dffa..d1afd2f3 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -166,6 +166,7 @@ def make_app_client( # Close the connection to avoid "too many open files" errors conn.close() os.remove(filepath) + ds.executor.shutdown() @pytest.fixture(scope="session") From c556fad65d8a45ce85027678796a12ac9107d9ed Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 21:25:47 -0700 Subject: [PATCH 0942/1705] Try to address too many files error again, refs #1843 --- tests/fixtures.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index d1afd2f3..92a10da6 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -131,10 +131,14 @@ def make_app_client( for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) + # Close the connection to avoid "too many open files" errors + conn.close() if extra_databases is not None: for extra_filename, extra_sql in extra_databases.items(): extra_filepath = os.path.join(tmpdir, extra_filename) - sqlite3.connect(extra_filepath).executescript(extra_sql) + c2 = sqlite3.connect(extra_filepath) + c2.executescript(extra_sql) + c2.close() # Insert at start to help test /-/databases ordering: files.insert(0, extra_filepath) os.chdir(os.path.dirname(filepath)) @@ -163,10 +167,7 @@ def make_app_client( crossdb=crossdb, ) yield TestClient(ds) - # Close the connection to avoid "too many open files" errors - conn.close() os.remove(filepath) - ds.executor.shutdown() @pytest.fixture(scope="session") From c7956eed7777c62653b4d508570c5d77cfead7d9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 21:26:12 -0700 Subject: [PATCH 0943/1705] datasette create-token command, refs #1859 --- datasette/default_permissions.py | 38 ++++++++++++++++++++++++++++ docs/authentication.rst | 23 +++++++++++++++++ docs/cli-reference.rst | 43 ++++++++++++++++++++++++++------ docs/plugins.rst | 3 ++- tests/test_api.py | 1 + tests/test_auth.py | 28 +++++++++++++++++++++ tests/test_plugins.py | 2 ++ 7 files changed, 130 insertions(+), 8 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 49ca8851..12499c16 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -1,6 +1,8 @@ from datasette import hookimpl from datasette.utils import actor_matches_allow +import click import itsdangerous +import json import time @@ -72,3 +74,39 @@ def actor_from_request(datasette, request): if expires_at < time.time(): return None return {"id": decoded["a"], "token": "dstok"} + + +@hookimpl +def register_commands(cli): + from datasette.app import Datasette + + @cli.command() + @click.argument("id") + @click.option( + "--secret", + help="Secret used for signing the API tokens", + envvar="DATASETTE_SECRET", + required=True, + ) + @click.option( + "-e", + "--expires-after", + help="Token should expire after this many seconds", + type=int, + ) + @click.option( + "--debug", + help="Show decoded token", + is_flag=True, + ) + def create_token(id, secret, expires_after, debug): + "Create a signed API token for the specified actor ID" + ds = Datasette(secret=secret) + bits = {"a": id, "token": "dstok"} + if expires_after: + bits["e"] = int(time.time()) + expires_after + token = ds.sign(bits, namespace="token") + click.echo("dstok_{}".format(token)) + if debug: + click.echo("\nDecoded:\n") + click.echo(json.dumps(ds.unsign(token, namespace="token"), indent=2)) diff --git a/docs/authentication.rst b/docs/authentication.rst index 50304ec5..0835e17c 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -352,6 +352,29 @@ This page cannot be accessed by actors with a ``"token": "some-value"`` property You can disable this feature using the :ref:`allow_signed_tokens ` setting. +.. _authentication_cli_create_token: + +datasette create-token +---------------------- + +You can also create tokens on the command line using the ``datasette create-token`` command. + +This command takes one required argument - the ID of the actor to be associated with the created token. + +You can specify an ``--expires-after`` option in seconds. If omitted, the token will never expire. + +The command will sign the token using the ``DATASETTE_SECRET`` environment variable, if available. You can also pass the secret using the ``--secret`` option. + +This means you can run the command locally to create tokens for use with a deployed Datasette instance, provided you know that instance's secret. + +To create a token for the ``root`` actor that will expire in one hour:: + + datasette create-token root --expires-after 3600 + +To create a secret that never expires using a specific secret:: + + datasette create-token root --secret my-secret-goes-here + .. _permissions_plugins: Checking permissions in plugins diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index fd5e2404..b40c6b2c 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -47,13 +47,14 @@ Running ``datasette --help`` shows a list of all of the available commands. --help Show this message and exit. Commands: - serve* Serve up specified SQLite database files with a web UI - inspect Generate JSON summary of provided database files - install Install plugins and packages from PyPI into the same... - package Package SQLite files into a Datasette Docker container - plugins List currently installed plugins - publish Publish specified SQLite database files to the internet along... - uninstall Uninstall plugins and Python packages from the Datasette... + serve* Serve up specified SQLite database files with a web UI + create-token Create a signed API token for the specified actor ID + inspect Generate JSON summary of provided database files + install Install plugins and packages from PyPI into the same... + package Package SQLite files into a Datasette Docker container + plugins List currently installed plugins + publish Publish specified SQLite database files to the internet... + uninstall Uninstall plugins and Python packages from the Datasette... .. [[[end]]] @@ -591,3 +592,31 @@ This performance optimization is used automatically by some of the ``datasette p .. [[[end]]] + + +.. _cli_help_create_token___help: + +datasette create-token +====================== + +Create a signed API token, see :ref:`authentication_cli_create_token`. + +.. [[[cog + help(["create-token", "--help"]) +.. ]]] + +:: + + Usage: datasette create-token [OPTIONS] ID + + Create a signed API token for the specified actor ID + + Options: + --secret TEXT Secret used for signing the API tokens + [required] + -e, --expires-after INTEGER Token should expire after this many seconds + --debug Show decoded token + --help Show this message and exit. + + +.. [[[end]]] diff --git a/docs/plugins.rst b/docs/plugins.rst index 9efef32f..3ae42293 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -152,7 +152,8 @@ If you run ``datasette plugins --all`` it will include default plugins that ship "version": null, "hooks": [ "actor_from_request", - "permission_allowed" + "permission_allowed", + "register_commands" ] }, { diff --git a/tests/test_api.py b/tests/test_api.py index ad74d16e..f7cbe950 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -806,6 +806,7 @@ def test_settings_json(app_client): "max_returned_rows": 100, "sql_time_limit_ms": 200, "allow_download": True, + "allow_signed_tokens": True, "allow_facet": True, "suggest_facets": True, "default_cache_ttl": 5, diff --git a/tests/test_auth.py b/tests/test_auth.py index a79dafd8..f2d82107 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,5 +1,7 @@ from .fixtures import app_client +from click.testing import CliRunner from datasette.utils import baseconv +from datasette.cli import cli import pytest import time @@ -235,3 +237,29 @@ def test_auth_with_dstok_token(app_client, scenario, should_work): assert response.json == {"actor": None} finally: app_client.ds._settings["allow_signed_tokens"] = True + + +@pytest.mark.parametrize("expires", (None, 1000, -1000)) +def test_cli_create_token(app_client, expires): + secret = app_client.ds._secret + runner = CliRunner(mix_stderr=False) + args = ["create-token", "--secret", secret, "test"] + if expires: + args += ["--expires-after", str(expires)] + result = runner.invoke(cli, args) + assert result.exit_code == 0 + token = result.output.strip() + assert token.startswith("dstok_") + details = app_client.ds.unsign(token[len("dstok_") :], "token") + expected_keys = {"a", "token"} + if expires: + expected_keys.add("e") + assert details.keys() == expected_keys + assert details["a"] == "test" + response = app_client.get( + "/-/actor.json", headers={"Authorization": "Bearer {}".format(token)} + ) + if expires is None or expires > 0: + assert response.json == {"actor": {"id": "test", "token": "dstok"}} + else: + assert response.json == {"actor": None} diff --git a/tests/test_plugins.py b/tests/test_plugins.py index e0a7bc76..de3fde8e 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -971,6 +971,7 @@ def test_hook_register_commands(): "plugins", "publish", "uninstall", + "create-token", } # Now install a plugin @@ -1001,6 +1002,7 @@ def test_hook_register_commands(): "uninstall", "verify", "unverify", + "create-token", } pm.unregister(name="verify") importlib.reload(cli) From df7bf0b2fc262f0b025b3cdd283ff8ce60653175 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 14:13:31 -0700 Subject: [PATCH 0944/1705] Fix bug with breadcrumbs and request=None, closes #1849 --- datasette/app.py | 9 ++++++--- tests/test_internals_datasette.py | 9 +++++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 9df16558..246269f3 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -633,15 +633,18 @@ class Datasette: async def _crumb_items(self, request, table=None, database=None): crumbs = [] + actor = None + if request: + actor = request.actor # Top-level link if await self.permission_allowed( - actor=request.actor, action="view-instance", default=True + actor=actor, action="view-instance", default=True ): crumbs.append({"href": self.urls.instance(), "label": "home"}) # Database link if database: if await self.permission_allowed( - actor=request.actor, + actor=actor, action="view-database", resource=database, default=True, @@ -656,7 +659,7 @@ class Datasette: if table: assert database, "table= requires database=" if await self.permission_allowed( - actor=request.actor, + actor=actor, action="view-table", resource=(database, table), default=True, diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index c82cafb3..1b4732af 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -125,3 +125,12 @@ async def test_datasette_ensure_permissions_check_visibility( visible, private = await ds.check_visibility(actor, permissions=permissions) assert visible == should_allow assert private == expected_private + + +@pytest.mark.asyncio +async def test_datasette_render_template_no_request(): + # https://github.com/simonw/datasette/issues/1849 + ds = Datasette([], memory=True) + await ds.invoke_startup() + rendered = await ds.render_template("error.html") + assert "Error " in rendered From 55a709c480a1e7401b4ff6208f37a2cf7c682183 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 14:34:33 -0700 Subject: [PATCH 0945/1705] Allow leading comments on SQL queries, refs #1860 --- datasette/utils/__init__.py | 27 +++++++++++++++++++++------ tests/test_utils.py | 7 +++++++ 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 803ba96d..977a66d6 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -205,13 +205,28 @@ class InvalidSql(Exception): pass +# Allow SQL to start with a /* */ or -- comment +comment_re = ( + # Start of string, then any amount of whitespace + r"^(\s*" + + + # Comment that starts with -- and ends at a newline + r"(?:\-\-.*?\n\s*)" + + + # Comment that starts with /* and ends with */ + r"|(?:/\*[\s\S]*?\*/)" + + + # Whitespace + r")*\s*" +) + allowed_sql_res = [ - re.compile(r"^select\b"), - re.compile(r"^explain\s+select\b"), - re.compile(r"^explain\s+query\s+plan\s+select\b"), - re.compile(r"^with\b"), - re.compile(r"^explain\s+with\b"), - re.compile(r"^explain\s+query\s+plan\s+with\b"), + re.compile(comment_re + r"select\b"), + re.compile(comment_re + r"explain\s+select\b"), + re.compile(comment_re + r"explain\s+query\s+plan\s+select\b"), + re.compile(comment_re + r"with\b"), + re.compile(comment_re + r"explain\s+with\b"), + re.compile(comment_re + r"explain\s+query\s+plan\s+with\b"), ] allowed_pragmas = ( "database_list", diff --git a/tests/test_utils.py b/tests/test_utils.py index d71a612d..e89f1e6b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -141,6 +141,7 @@ def test_custom_json_encoder(obj, expected): "update blah set some_column='# Hello there\n\n* This is a list\n* of items\n--\n[And a link](https://github.com/simonw/datasette-render-markdown).'\nas demo_markdown", "PRAGMA case_sensitive_like = true", "SELECT * FROM pragma_not_on_allow_list('idx52')", + "/* This comment is not valid. select 1", ], ) def test_validate_sql_select_bad(bad_sql): @@ -166,6 +167,12 @@ def test_validate_sql_select_bad(bad_sql): "explain query plan WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;", "SELECT * FROM pragma_index_info('idx52')", "select * from pragma_table_xinfo('table')", + # Various types of comment + "-- comment\nselect 1", + "-- one line\n -- two line\nselect 1", + " /* comment */\nselect 1", + " /* comment */select 1", + "/* comment */\n -- another\n /* one more */ select 1", ], ) def test_validate_sql_select_good(good_sql): From 55f860c304aea813cb7ed740cc5625560a0722a0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 14:13:31 -0700 Subject: [PATCH 0946/1705] Fix bug with breadcrumbs and request=None, closes #1849 --- datasette/app.py | 9 ++++++--- tests/test_internals_datasette.py | 9 +++++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index c868f8d3..596ff44d 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -639,15 +639,18 @@ class Datasette: async def _crumb_items(self, request, table=None, database=None): crumbs = [] + actor = None + if request: + actor = request.actor # Top-level link if await self.permission_allowed( - actor=request.actor, action="view-instance", default=True + actor=actor, action="view-instance", default=True ): crumbs.append({"href": self.urls.instance(), "label": "home"}) # Database link if database: if await self.permission_allowed( - actor=request.actor, + actor=actor, action="view-database", resource=database, default=True, @@ -662,7 +665,7 @@ class Datasette: if table: assert database, "table= requires database=" if await self.permission_allowed( - actor=request.actor, + actor=actor, action="view-table", resource=(database, table), default=True, diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index c82cafb3..1b4732af 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -125,3 +125,12 @@ async def test_datasette_ensure_permissions_check_visibility( visible, private = await ds.check_visibility(actor, permissions=permissions) assert visible == should_allow assert private == expected_private + + +@pytest.mark.asyncio +async def test_datasette_render_template_no_request(): + # https://github.com/simonw/datasette/issues/1849 + ds = Datasette([], memory=True) + await ds.invoke_startup() + rendered = await ds.render_template("error.html") + assert "Error " in rendered From af5d5d0243631562ad83f2c318bff31a077feb5d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 14:34:33 -0700 Subject: [PATCH 0947/1705] Allow leading comments on SQL queries, refs #1860 --- datasette/utils/__init__.py | 27 +++++++++++++++++++++------ tests/test_utils.py | 7 +++++++ 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 803ba96d..977a66d6 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -205,13 +205,28 @@ class InvalidSql(Exception): pass +# Allow SQL to start with a /* */ or -- comment +comment_re = ( + # Start of string, then any amount of whitespace + r"^(\s*" + + + # Comment that starts with -- and ends at a newline + r"(?:\-\-.*?\n\s*)" + + + # Comment that starts with /* and ends with */ + r"|(?:/\*[\s\S]*?\*/)" + + + # Whitespace + r")*\s*" +) + allowed_sql_res = [ - re.compile(r"^select\b"), - re.compile(r"^explain\s+select\b"), - re.compile(r"^explain\s+query\s+plan\s+select\b"), - re.compile(r"^with\b"), - re.compile(r"^explain\s+with\b"), - re.compile(r"^explain\s+query\s+plan\s+with\b"), + re.compile(comment_re + r"select\b"), + re.compile(comment_re + r"explain\s+select\b"), + re.compile(comment_re + r"explain\s+query\s+plan\s+select\b"), + re.compile(comment_re + r"with\b"), + re.compile(comment_re + r"explain\s+with\b"), + re.compile(comment_re + r"explain\s+query\s+plan\s+with\b"), ] allowed_pragmas = ( "database_list", diff --git a/tests/test_utils.py b/tests/test_utils.py index d71a612d..e89f1e6b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -141,6 +141,7 @@ def test_custom_json_encoder(obj, expected): "update blah set some_column='# Hello there\n\n* This is a list\n* of items\n--\n[And a link](https://github.com/simonw/datasette-render-markdown).'\nas demo_markdown", "PRAGMA case_sensitive_like = true", "SELECT * FROM pragma_not_on_allow_list('idx52')", + "/* This comment is not valid. select 1", ], ) def test_validate_sql_select_bad(bad_sql): @@ -166,6 +167,12 @@ def test_validate_sql_select_bad(bad_sql): "explain query plan WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;", "SELECT * FROM pragma_index_info('idx52')", "select * from pragma_table_xinfo('table')", + # Various types of comment + "-- comment\nselect 1", + "-- one line\n -- two line\nselect 1", + " /* comment */\nselect 1", + " /* comment */select 1", + "/* comment */\n -- another\n /* one more */ select 1", ], ) def test_validate_sql_select_good(good_sql): From 382a87158337540f991c6dc887080f7b37c7c26e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 14:13:31 -0700 Subject: [PATCH 0948/1705] max_signed_tokens_ttl setting, closes #1858 Also redesigned token format to include creation time and optional duration. --- datasette/app.py | 5 ++++ datasette/default_permissions.py | 33 +++++++++++++++++---- datasette/views/special.py | 20 ++++++++----- docs/settings.rst | 15 ++++++++++ tests/test_api.py | 1 + tests/test_auth.py | 50 ++++++++++++++++++++++++-------- 6 files changed, 99 insertions(+), 25 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 596ff44d..894d7f0f 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -129,6 +129,11 @@ SETTINGS = ( True, "Allow users to create and use signed API tokens", ), + Setting( + "max_signed_tokens_ttl", + 0, + "Maximum allowed expiry time for signed API tokens", + ), Setting("suggest_facets", True, "Calculate and display suggested facets"), Setting( "default_cache_ttl", diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 12499c16..c502dd70 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -56,6 +56,7 @@ def actor_from_request(datasette, request): prefix = "dstok_" if not datasette.setting("allow_signed_tokens"): return None + max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl") authorization = request.headers.get("authorization") if not authorization: return None @@ -69,11 +70,31 @@ def actor_from_request(datasette, request): decoded = datasette.unsign(token, namespace="token") except itsdangerous.BadSignature: return None - expires_at = decoded.get("e") - if expires_at is not None: - if expires_at < time.time(): + if "t" not in decoded: + # Missing timestamp + return None + created = decoded["t"] + if not isinstance(created, int): + # Invalid timestamp + return None + duration = decoded.get("d") + if duration is not None and not isinstance(duration, int): + # Invalid duration + return None + if (duration is None and max_signed_tokens_ttl) or ( + duration is not None + and max_signed_tokens_ttl + and duration > max_signed_tokens_ttl + ): + duration = max_signed_tokens_ttl + if duration: + if time.time() - created > duration: + # Expired return None - return {"id": decoded["a"], "token": "dstok"} + actor = {"id": decoded["a"], "token": "dstok"} + if duration: + actor["token_expires"] = created + duration + return actor @hookimpl @@ -102,9 +123,9 @@ def register_commands(cli): def create_token(id, secret, expires_after, debug): "Create a signed API token for the specified actor ID" ds = Datasette(secret=secret) - bits = {"a": id, "token": "dstok"} + bits = {"a": id, "token": "dstok", "t": int(time.time())} if expires_after: - bits["e"] = int(time.time()) + expires_after + bits["d"] = expires_after token = ds.sign(bits, namespace="token") click.echo("dstok_{}".format(token)) if debug: diff --git a/datasette/views/special.py b/datasette/views/special.py index 89015958..b754a2f0 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -195,20 +195,24 @@ class CreateTokenView(BaseView): async def post(self, request): self.check_permission(request) post = await request.post_vars() - expires = None errors = [] + duration = None if post.get("expire_type"): - duration = post.get("expire_duration") - if not duration or not duration.isdigit() or not int(duration) > 0: + duration_string = post.get("expire_duration") + if ( + not duration_string + or not duration_string.isdigit() + or not int(duration_string) > 0 + ): errors.append("Invalid expire duration") else: unit = post["expire_type"] if unit == "minutes": - expires = int(duration) * 60 + duration = int(duration_string) * 60 elif unit == "hours": - expires = int(duration) * 60 * 60 + duration = int(duration_string) * 60 * 60 elif unit == "days": - expires = int(duration) * 60 * 60 * 24 + duration = int(duration_string) * 60 * 60 * 24 else: errors.append("Invalid expire duration unit") token_bits = None @@ -216,8 +220,10 @@ class CreateTokenView(BaseView): if not errors: token_bits = { "a": request.actor["id"], - "e": (int(time.time()) + expires) if expires else None, + "t": int(time.time()), } + if duration: + token_bits["d"] = duration token = "dstok_{}".format(self.ds.sign(token_bits, "token")) return await self.render( ["create_token.html"], diff --git a/docs/settings.rst b/docs/settings.rst index be640b21..a990c78c 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -182,6 +182,21 @@ This is turned on by default. Use the following to turn it off:: Turning this setting off will disable the ``/-/create-token`` page, :ref:`described here `. It will also cause any incoming ``Authorization: Bearer dstok_...`` API tokens to be ignored. +.. _setting_max_signed_tokens_ttl: + +max_signed_tokens_ttl +~~~~~~~~~~~~~~~~~~~~~ + +Maximum allowed expiry time for signed API tokens created by users. + +Defaults to ``0`` which means no limit - tokens can be created that will never expire. + +Set this to a value in seconds to limit the maximum expiry time. For example, to set that limit to 24 hours you would use:: + + datasette mydatabase.db --setting max_signed_tokens_ttl 86400 + +This setting is enforced when incoming tokens are processed. + .. _setting_default_cache_ttl: default_cache_ttl diff --git a/tests/test_api.py b/tests/test_api.py index f7cbe950..fc171421 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -807,6 +807,7 @@ def test_settings_json(app_client): "sql_time_limit_ms": 200, "allow_download": True, "allow_signed_tokens": True, + "max_signed_tokens_ttl": 0, "allow_facet": True, "suggest_facets": True, "default_cache_ttl": 5, diff --git a/tests/test_auth.py b/tests/test_auth.py index f2d82107..fa1b2e46 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -173,13 +173,19 @@ def test_auth_create_token(app_client, post_data, errors, expected_duration): # Extract token from page token = response2.text.split('value="dstok_')[1].split('"')[0] details = app_client.ds.unsign(token, "token") - assert details.keys() == {"a", "e"} + assert details.keys() == {"a", "t", "d"} or details.keys() == {"a", "t"} assert details["a"] == "test" if expected_duration is None: - assert details["e"] is None + assert "d" not in details else: - about_right = int(time.time()) + expected_duration - assert about_right - 2 < details["e"] < about_right + 2 + assert details["d"] == expected_duration + # And test that token + response3 = app_client.get( + "/-/actor.json", + headers={"Authorization": "Bearer {}".format("dstok_{}".format(token))}, + ) + assert response3.status == 200 + assert response3.json["actor"]["id"] == "test" def test_auth_create_token_not_allowed_for_tokens(app_client): @@ -206,6 +212,7 @@ def test_auth_create_token_not_allowed_if_allow_signed_tokens_off(app_client): ( ("allow_signed_tokens_off", False), ("no_token", False), + ("no_timestamp", False), ("invalid_token", False), ("expired_token", False), ("valid_unlimited_token", True), @@ -214,12 +221,15 @@ def test_auth_create_token_not_allowed_if_allow_signed_tokens_off(app_client): ) def test_auth_with_dstok_token(app_client, scenario, should_work): token = None + _time = int(time.time()) if scenario in ("valid_unlimited_token", "allow_signed_tokens_off"): - token = app_client.ds.sign({"a": "test"}, "token") + token = app_client.ds.sign({"a": "test", "t": _time}, "token") elif scenario == "valid_expiring_token": - token = app_client.ds.sign({"a": "test", "e": int(time.time()) + 1000}, "token") + token = app_client.ds.sign({"a": "test", "t": _time - 50, "d": 1000}, "token") elif scenario == "expired_token": - token = app_client.ds.sign({"a": "test", "e": int(time.time()) - 1000}, "token") + token = app_client.ds.sign({"a": "test", "t": _time - 2000, "d": 1000}, "token") + elif scenario == "no_timestamp": + token = app_client.ds.sign({"a": "test"}, "token") elif scenario == "invalid_token": token = "invalid" if token: @@ -232,7 +242,16 @@ def test_auth_with_dstok_token(app_client, scenario, should_work): response = app_client.get("/-/actor.json", headers=headers) try: if should_work: - assert response.json == {"actor": {"id": "test", "token": "dstok"}} + assert response.json.keys() == {"actor"} + actor = response.json["actor"] + expected_keys = {"id", "token"} + if scenario != "valid_unlimited_token": + expected_keys.add("token_expires") + assert actor.keys() == expected_keys + assert actor["id"] == "test" + assert actor["token"] == "dstok" + if scenario != "valid_unlimited_token": + assert isinstance(actor["token_expires"], int) else: assert response.json == {"actor": None} finally: @@ -251,15 +270,22 @@ def test_cli_create_token(app_client, expires): token = result.output.strip() assert token.startswith("dstok_") details = app_client.ds.unsign(token[len("dstok_") :], "token") - expected_keys = {"a", "token"} + expected_keys = {"a", "token", "t"} if expires: - expected_keys.add("e") + expected_keys.add("d") assert details.keys() == expected_keys assert details["a"] == "test" response = app_client.get( "/-/actor.json", headers={"Authorization": "Bearer {}".format(token)} ) if expires is None or expires > 0: - assert response.json == {"actor": {"id": "test", "token": "dstok"}} + expected_actor = { + "id": "test", + "token": "dstok", + } + if expires and expires > 0: + expected_actor["token_expires"] = details["t"] + expires + assert response.json == {"actor": expected_actor} else: - assert response.json == {"actor": None} + expected_actor = None + assert response.json == {"actor": expected_actor} From 51c436fed29205721dcf17fa31d7e7090d34ebb8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 20:57:02 -0700 Subject: [PATCH 0949/1705] First draft of insert row write API, refs #1851 --- datasette/default_permissions.py | 2 +- datasette/views/table.py | 76 +++++++++++++++++++++++++++----- docs/authentication.rst | 12 +++++ docs/cli-reference.rst | 2 + docs/json_api.rst | 38 ++++++++++++++++ 5 files changed, 119 insertions(+), 11 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index c502dd70..87684e2a 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -9,7 +9,7 @@ import time @hookimpl(tryfirst=True) def permission_allowed(datasette, actor, action, resource): async def inner(): - if action in ("permissions-debug", "debug-menu"): + if action in ("permissions-debug", "debug-menu", "insert-row"): if actor and actor.get("id") == "root": return True elif action == "view-instance": diff --git a/datasette/views/table.py b/datasette/views/table.py index f73b0957..74d1c532 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -28,7 +28,7 @@ from datasette.utils import ( urlsafe_components, value_as_boolean, ) -from datasette.utils.asgi import BadRequest, Forbidden, NotFound +from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response from datasette.filters import Filters from .base import DataView, DatasetteError, ureg from .database import QueryView @@ -103,15 +103,71 @@ class TableView(DataView): canned_query = await self.ds.get_canned_query( database_name, table_name, request.actor ) - assert canned_query, "You may only POST to a canned query" - return await QueryView(self.ds).data( - request, - canned_query["sql"], - metadata=canned_query, - editable=False, - canned_query=table_name, - named_parameters=canned_query.get("params"), - write=bool(canned_query.get("write")), + if canned_query: + return await QueryView(self.ds).data( + request, + canned_query["sql"], + metadata=canned_query, + editable=False, + canned_query=table_name, + named_parameters=canned_query.get("params"), + write=bool(canned_query.get("write")), + ) + else: + # Handle POST to a table + return await self.table_post(request, database_name, table_name) + + async def table_post(self, request, database_name, table_name): + # Table must exist (may handle table creation in the future) + db = self.ds.get_database(database_name) + if not await db.table_exists(table_name): + raise NotFound("Table not found: {}".format(table_name)) + # Must have insert-row permission + if not await self.ds.permission_allowed( + request.actor, "insert-row", resource=(database_name, table_name) + ): + raise Forbidden("Permission denied") + if request.headers.get("content-type") != "application/json": + # TODO: handle form-encoded data + raise BadRequest("Must send JSON data") + data = json.loads(await request.post_body()) + if "row" not in data: + raise BadRequest('Must send "row" data') + row = data["row"] + if not isinstance(row, dict): + raise BadRequest("row must be a dictionary") + # Verify all columns exist + columns = await db.table_columns(table_name) + pks = await db.primary_keys(table_name) + for key in row: + if key not in columns: + raise BadRequest("Column not found: {}".format(key)) + if key in pks: + raise BadRequest( + "Cannot insert into primary key column: {}".format(key) + ) + # Perform the insert + sql = "INSERT INTO [{table}] ({columns}) VALUES ({values})".format( + table=escape_sqlite(table_name), + columns=", ".join(escape_sqlite(c) for c in row), + values=", ".join("?" for c in row), + ) + cursor = await db.execute_write(sql, list(row.values())) + # Return the new row + rowid = cursor.lastrowid + new_row = ( + await db.execute( + "SELECT * FROM [{table}] WHERE rowid = ?".format( + table=escape_sqlite(table_name) + ), + [rowid], + ) + ).first() + return Response.json( + { + "row": dict(new_row), + }, + status=201, ) async def columns_to_select(self, table_columns, pks, request): diff --git a/docs/authentication.rst b/docs/authentication.rst index 0835e17c..233a50d2 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -547,6 +547,18 @@ Actor is allowed to view (and execute) a :ref:`canned query ` pa Default *allow*. +.. _permissions_insert_row: + +insert-row +---------- + +Actor is allowed to insert rows into a table. + +``resource`` - tuple: (string, string) + The name of the database, then the name of the table + +Default *deny*. + .. _permissions_execute_sql: execute-sql diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index b40c6b2c..56156568 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -229,6 +229,8 @@ These can be passed to ``datasette serve`` using ``datasette serve --setting nam database files (default=True) allow_signed_tokens Allow users to create and use signed API tokens (default=True) + max_signed_tokens_ttl Maximum allowed expiry time for signed API tokens + (default=0) suggest_facets Calculate and display suggested facets (default=True) default_cache_ttl Default HTTP cache TTL (used in Cache-Control: diff --git a/docs/json_api.rst b/docs/json_api.rst index d3fdb1e4..b339a738 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -455,3 +455,41 @@ You can find this near the top of the source code of those pages, looking like t The JSON URL is also made available in a ``Link`` HTTP header for the page:: Link: https://latest.datasette.io/fixtures/sortable.json; rel="alternate"; type="application/json+datasette" + +.. _json_api_write: + +The JSON write API +------------------ + +Datasette provides a write API for JSON data. This is a POST-only API that requires an authenticated API token, see :ref:`CreateTokenView`. + +.. _json_api_write_insert_row: + +Inserting a single row +~~~~~~~~~~~~~~~~~~~~~~ + +This requires the :ref:`permissions_insert_row` permission. + +:: + + POST // + Content-Type: application/json + Authorization: Bearer dstok_ + { + "row": { + "column1": "value1", + "column2": "value2" + } + } + +If successful, this will return a ``201`` status code and the newly inserted row, for example: + +.. code-block:: json + + { + "row": { + "id": 1, + "column1": "value1", + "column2": "value2" + } + } From f6ca86987ba9d7d48eccf2cfe0bfc94942003844 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 06:56:11 -0700 Subject: [PATCH 0950/1705] Delete mirror-master-and-main.yml Closes #1865 --- .github/workflows/mirror-master-and-main.yml | 21 -------------------- 1 file changed, 21 deletions(-) delete mode 100644 .github/workflows/mirror-master-and-main.yml diff --git a/.github/workflows/mirror-master-and-main.yml b/.github/workflows/mirror-master-and-main.yml deleted file mode 100644 index 8418df40..00000000 --- a/.github/workflows/mirror-master-and-main.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Mirror "master" and "main" branches -on: - push: - branches: - - master - - main - -jobs: - mirror: - runs-on: ubuntu-latest - steps: - - name: Mirror to "master" - uses: zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 - with: - target-branch: master - force: false - - name: Mirror to "main" - uses: zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 - with: - target-branch: main - force: false From 5f6be3c48b661f74198b8fc85361d3ad6657880e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 11:47:41 -0700 Subject: [PATCH 0951/1705] Better comment handling in SQL regex, refs #1860 --- datasette/utils/__init__.py | 9 +++++---- tests/test_utils.py | 1 + 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 977a66d6..5acfb8b4 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -208,16 +208,16 @@ class InvalidSql(Exception): # Allow SQL to start with a /* */ or -- comment comment_re = ( # Start of string, then any amount of whitespace - r"^(\s*" + r"^\s*(" + # Comment that starts with -- and ends at a newline r"(?:\-\-.*?\n\s*)" + - # Comment that starts with /* and ends with */ - r"|(?:/\*[\s\S]*?\*/)" + # Comment that starts with /* and ends with */ - but does not have */ in it + r"|(?:\/\*((?!\*\/)[\s\S])*\*\/)" + # Whitespace - r")*\s*" + r"\s*)*\s*" ) allowed_sql_res = [ @@ -228,6 +228,7 @@ allowed_sql_res = [ re.compile(comment_re + r"explain\s+with\b"), re.compile(comment_re + r"explain\s+query\s+plan\s+with\b"), ] + allowed_pragmas = ( "database_list", "foreign_key_list", diff --git a/tests/test_utils.py b/tests/test_utils.py index e89f1e6b..c1589107 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -142,6 +142,7 @@ def test_custom_json_encoder(obj, expected): "PRAGMA case_sensitive_like = true", "SELECT * FROM pragma_not_on_allow_list('idx52')", "/* This comment is not valid. select 1", + "/**/\nupdate foo set bar = 1\n/* test */ select 1", ], ) def test_validate_sql_select_bad(bad_sql): From d2ca13b699d441a201c55cb72ff96919d3cd22bf Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 11:50:54 -0700 Subject: [PATCH 0952/1705] Add test for /* multi line */ comment, refs #1860 --- tests/test_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_utils.py b/tests/test_utils.py index c1589107..8b64f865 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -174,6 +174,7 @@ def test_validate_sql_select_bad(bad_sql): " /* comment */\nselect 1", " /* comment */select 1", "/* comment */\n -- another\n /* one more */ select 1", + "/* This comment \n has multiple lines */\nselect 1", ], ) def test_validate_sql_select_good(good_sql): From 918f3561208ee58c44773d30e21bace7d7c7cf3b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 06:56:11 -0700 Subject: [PATCH 0953/1705] Delete mirror-master-and-main.yml Closes #1865 --- .github/workflows/mirror-master-and-main.yml | 21 -------------------- 1 file changed, 21 deletions(-) delete mode 100644 .github/workflows/mirror-master-and-main.yml diff --git a/.github/workflows/mirror-master-and-main.yml b/.github/workflows/mirror-master-and-main.yml deleted file mode 100644 index 8418df40..00000000 --- a/.github/workflows/mirror-master-and-main.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Mirror "master" and "main" branches -on: - push: - branches: - - master - - main - -jobs: - mirror: - runs-on: ubuntu-latest - steps: - - name: Mirror to "master" - uses: zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 - with: - target-branch: master - force: false - - name: Mirror to "main" - uses: zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 - with: - target-branch: main - force: false From b597bb6b3e7c4b449654bbfa5b01ceff3eb3cb33 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 11:47:41 -0700 Subject: [PATCH 0954/1705] Better comment handling in SQL regex, refs #1860 --- datasette/utils/__init__.py | 9 +++++---- tests/test_utils.py | 1 + 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 977a66d6..5acfb8b4 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -208,16 +208,16 @@ class InvalidSql(Exception): # Allow SQL to start with a /* */ or -- comment comment_re = ( # Start of string, then any amount of whitespace - r"^(\s*" + r"^\s*(" + # Comment that starts with -- and ends at a newline r"(?:\-\-.*?\n\s*)" + - # Comment that starts with /* and ends with */ - r"|(?:/\*[\s\S]*?\*/)" + # Comment that starts with /* and ends with */ - but does not have */ in it + r"|(?:\/\*((?!\*\/)[\s\S])*\*\/)" + # Whitespace - r")*\s*" + r"\s*)*\s*" ) allowed_sql_res = [ @@ -228,6 +228,7 @@ allowed_sql_res = [ re.compile(comment_re + r"explain\s+with\b"), re.compile(comment_re + r"explain\s+query\s+plan\s+with\b"), ] + allowed_pragmas = ( "database_list", "foreign_key_list", diff --git a/tests/test_utils.py b/tests/test_utils.py index e89f1e6b..c1589107 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -142,6 +142,7 @@ def test_custom_json_encoder(obj, expected): "PRAGMA case_sensitive_like = true", "SELECT * FROM pragma_not_on_allow_list('idx52')", "/* This comment is not valid. select 1", + "/**/\nupdate foo set bar = 1\n/* test */ select 1", ], ) def test_validate_sql_select_bad(bad_sql): From 6958e21b5c2012adf5655d2512cb4106490d10f2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 11:50:54 -0700 Subject: [PATCH 0955/1705] Add test for /* multi line */ comment, refs #1860 --- tests/test_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_utils.py b/tests/test_utils.py index c1589107..8b64f865 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -174,6 +174,7 @@ def test_validate_sql_select_bad(bad_sql): " /* comment */\nselect 1", " /* comment */select 1", "/* comment */\n -- another\n /* one more */ select 1", + "/* This comment \n has multiple lines */\nselect 1", ], ) def test_validate_sql_select_good(good_sql): From a51608090b5ee37593078f71d18b33767ef3af79 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 12:06:18 -0700 Subject: [PATCH 0956/1705] Slight tweak to insert row API design, refs #1851 https://github.com/simonw/datasette/issues/1851#issuecomment-1292997608 --- datasette/views/table.py | 10 +++++----- docs/json_api.rst | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 74d1c532..056b7b04 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -131,11 +131,11 @@ class TableView(DataView): # TODO: handle form-encoded data raise BadRequest("Must send JSON data") data = json.loads(await request.post_body()) - if "row" not in data: - raise BadRequest('Must send "row" data') - row = data["row"] + if "insert" not in data: + raise BadRequest('Must send a "insert" key containing a dictionary') + row = data["insert"] if not isinstance(row, dict): - raise BadRequest("row must be a dictionary") + raise BadRequest("insert must be a dictionary") # Verify all columns exist columns = await db.table_columns(table_name) pks = await db.primary_keys(table_name) @@ -165,7 +165,7 @@ class TableView(DataView): ).first() return Response.json( { - "row": dict(new_row), + "inserted_row": dict(new_row), }, status=201, ) diff --git a/docs/json_api.rst b/docs/json_api.rst index b339a738..2ed8a354 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -476,7 +476,7 @@ This requires the :ref:`permissions_insert_row` permission. Content-Type: application/json Authorization: Bearer dstok_ { - "row": { + "insert": { "column1": "value1", "column2": "value2" } @@ -487,7 +487,7 @@ If successful, this will return a ``201`` status code and the newly inserted row .. code-block:: json { - "row": { + "inserted_row": { "id": 1, "column1": "value1", "column2": "value2" From a2a5dff709c6f1676ac30b5e734c2763002562cf Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 12:08:26 -0700 Subject: [PATCH 0957/1705] Missing tests for insert row API, refs #1851 --- tests/test_api_write.py | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 tests/test_api_write.py diff --git a/tests/test_api_write.py b/tests/test_api_write.py new file mode 100644 index 00000000..86c221d0 --- /dev/null +++ b/tests/test_api_write.py @@ -0,0 +1,38 @@ +from datasette.app import Datasette +from datasette.utils import sqlite3 +import pytest +import time + + +@pytest.fixture +def ds_write(tmp_path_factory): + db_directory = tmp_path_factory.mktemp("dbs") + db_path = str(db_directory / "data.db") + db = sqlite3.connect(str(db_path)) + db.execute("vacuum") + db.execute("create table docs (id integer primary key, title text, score float)") + ds = Datasette([db_path]) + yield ds + db.close() + + +@pytest.mark.asyncio +async def test_write_row(ds_write): + token = "dstok_{}".format( + ds_write.sign( + {"a": "root", "token": "dstok", "t": int(time.time())}, namespace="token" + ) + ) + response = await ds_write.client.post( + "/data/docs", + json={"insert": {"title": "Test", "score": 1.0}}, + headers={ + "Authorization": "Bearer {}".format(token), + "Content-Type": "application/json", + }, + ) + expected_row = {"id": 1, "title": "Test", "score": 1.0} + assert response.status_code == 201 + assert response.json()["inserted_row"] == expected_row + rows = (await ds_write.get_database("data").execute("select * from docs")).rows + assert dict(rows[0]) == expected_row From 6e788b49edf4f842c0817f006eb9d865778eea5e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 13:17:18 -0700 Subject: [PATCH 0958/1705] New URL design /db/table/-/insert, refs #1851 --- datasette/app.py | 6 +++- datasette/views/table.py | 69 +++++++++++++++++++++++++++++++++++++++- docs/json_api.rst | 18 ++++++----- tests/test_api_write.py | 6 ++-- 4 files changed, 86 insertions(+), 13 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 894d7f0f..8bc5fe36 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -39,7 +39,7 @@ from .views.special import ( PermissionsDebugView, MessagesDebugView, ) -from .views.table import TableView +from .views.table import TableView, TableInsertView from .views.row import RowView from .renderer import json_renderer from .url_builder import Urls @@ -1262,6 +1262,10 @@ class Datasette: RowView.as_view(self), r"/(?P[^\/\.]+)/(?P
    [^/]+?)/(?P[^/]+?)(\.(?P\w+))?$", ) + add_route( + TableInsertView.as_view(self), + r"/(?P[^\/\.]+)/(?P
    [^\/\.]+)/-/insert$", + ) return [ # Compile any strings to regular expressions ((re.compile(pattern) if isinstance(pattern, str) else pattern), view) diff --git a/datasette/views/table.py b/datasette/views/table.py index 056b7b04..be3d4f93 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -30,7 +30,7 @@ from datasette.utils import ( ) from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response from datasette.filters import Filters -from .base import DataView, DatasetteError, ureg +from .base import BaseView, DataView, DatasetteError, ureg from .database import QueryView LINK_WITH_LABEL = ( @@ -1077,3 +1077,70 @@ async def display_columns_and_rows( } columns = [first_column] + columns return columns, cell_rows + + +class TableInsertView(BaseView): + name = "table-insert" + + def __init__(self, datasette): + self.ds = datasette + + async def post(self, request): + database_route = tilde_decode(request.url_vars["database"]) + try: + db = self.ds.get_database(route=database_route) + except KeyError: + raise NotFound("Database not found: {}".format(database_route)) + database_name = db.name + table_name = tilde_decode(request.url_vars["table"]) + # Table must exist (may handle table creation in the future) + db = self.ds.get_database(database_name) + if not await db.table_exists(table_name): + raise NotFound("Table not found: {}".format(table_name)) + # Must have insert-row permission + if not await self.ds.permission_allowed( + request.actor, "insert-row", resource=(database_name, table_name) + ): + raise Forbidden("Permission denied") + if request.headers.get("content-type") != "application/json": + # TODO: handle form-encoded data + raise BadRequest("Must send JSON data") + data = json.loads(await request.post_body()) + if "row" not in data: + raise BadRequest('Must send a "row" key containing a dictionary') + row = data["row"] + if not isinstance(row, dict): + raise BadRequest("row must be a dictionary") + # Verify all columns exist + columns = await db.table_columns(table_name) + pks = await db.primary_keys(table_name) + for key in row: + if key not in columns: + raise BadRequest("Column not found: {}".format(key)) + if key in pks: + raise BadRequest( + "Cannot insert into primary key column: {}".format(key) + ) + # Perform the insert + sql = "INSERT INTO [{table}] ({columns}) VALUES ({values})".format( + table=escape_sqlite(table_name), + columns=", ".join(escape_sqlite(c) for c in row), + values=", ".join("?" for c in row), + ) + cursor = await db.execute_write(sql, list(row.values())) + # Return the new row + rowid = cursor.lastrowid + new_row = ( + await db.execute( + "SELECT * FROM [{table}] WHERE rowid = ?".format( + table=escape_sqlite(table_name) + ), + [rowid], + ) + ).first() + return Response.json( + { + "inserted": [dict(new_row)], + }, + status=201, + ) diff --git a/docs/json_api.rst b/docs/json_api.rst index 2ed8a354..4a7961f2 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -463,7 +463,7 @@ The JSON write API Datasette provides a write API for JSON data. This is a POST-only API that requires an authenticated API token, see :ref:`CreateTokenView`. -.. _json_api_write_insert_row: +.. _TableInsertView: Inserting a single row ~~~~~~~~~~~~~~~~~~~~~~ @@ -472,11 +472,11 @@ This requires the :ref:`permissions_insert_row` permission. :: - POST //
    + POST //
    /-/insert Content-Type: application/json Authorization: Bearer dstok_ { - "insert": { + "row": { "column1": "value1", "column2": "value2" } @@ -487,9 +487,11 @@ If successful, this will return a ``201`` status code and the newly inserted row .. code-block:: json { - "inserted_row": { - "id": 1, - "column1": "value1", - "column2": "value2" - } + "inserted": [ + { + "id": 1, + "column1": "value1", + "column2": "value2" + } + ] } diff --git a/tests/test_api_write.py b/tests/test_api_write.py index 86c221d0..e8222e43 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -24,8 +24,8 @@ async def test_write_row(ds_write): ) ) response = await ds_write.client.post( - "/data/docs", - json={"insert": {"title": "Test", "score": 1.0}}, + "/data/docs/-/insert", + json={"row": {"title": "Test", "score": 1.0}}, headers={ "Authorization": "Bearer {}".format(token), "Content-Type": "application/json", @@ -33,6 +33,6 @@ async def test_write_row(ds_write): ) expected_row = {"id": 1, "title": "Test", "score": 1.0} assert response.status_code == 201 - assert response.json()["inserted_row"] == expected_row + assert response.json()["inserted"] == [expected_row] rows = (await ds_write.get_database("data").execute("select * from docs")).rows assert dict(rows[0]) == expected_row From b912d92b651c4f0b5137da924d135654511f0fe0 Mon Sep 17 00:00:00 2001 From: Forest Gregg Date: Thu, 27 Oct 2022 16:51:20 -0400 Subject: [PATCH 0959/1705] Make hash and size a lazy property (#1837) * use inspect data for hash and file size * make hash and cached_size lazy properties * move hash property near size --- datasette/database.py | 36 ++++++++++++++++++++++++------------ 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index d75bd70c..af1df0a8 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -39,7 +39,7 @@ class Database: self.memory_name = memory_name if memory_name is not None: self.is_memory = True - self.hash = None + self.cached_hash = None self.cached_size = None self._cached_table_counts = None self._write_thread = None @@ -47,14 +47,6 @@ class Database: # These are used when in non-threaded mode: self._read_connection = None self._write_connection = None - if not self.is_mutable and not self.is_memory: - if self.ds.inspect_data and self.ds.inspect_data.get(self.name): - self.hash = self.ds.inspect_data[self.name]["hash"] - self.cached_size = self.ds.inspect_data[self.name]["size"] - else: - p = Path(path) - self.hash = inspect_hash(p) - self.cached_size = p.stat().st_size @property def cached_table_counts(self): @@ -266,14 +258,34 @@ class Database: results = await self.execute_fn(sql_operation_in_thread) return results + @property + def hash(self): + if self.cached_hash is not None: + return self.cached_hash + elif self.is_mutable or self.is_memory: + return None + elif self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self.cached_hash = self.ds.inspect_data[self.name]["hash"] + return self.cached_hash + else: + p = Path(self.path) + self.cached_hash = inspect_hash(p) + return self.cached_hash + @property def size(self): - if self.is_memory: - return 0 if self.cached_size is not None: return self.cached_size - else: + elif self.is_memory: + return 0 + elif self.is_mutable: return Path(self.path).stat().st_size + elif self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self.cached_size = self.ds.inspect_data[self.name]["size"] + return self.cached_size + else: + self.cached_size = Path(self.path).stat().st_size + return self.cached_size async def table_counts(self, limit=10): if not self.is_mutable and self.cached_table_counts is not None: From 2c36e45447494cd7505440943367e29ec57c8e72 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 27 Oct 2022 13:51:45 -0700 Subject: [PATCH 0960/1705] Bump black from 22.8.0 to 22.10.0 (#1839) Bumps [black](https://github.com/psf/black) from 22.8.0 to 22.10.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/22.8.0...22.10.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index fe258adb..625557ae 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ setup( "pytest-xdist>=2.2.1", "pytest-asyncio>=0.17", "beautifulsoup4>=4.8.1", - "black==22.8.0", + "black==22.10.0", "blacken-docs==1.12.1", "pytest-timeout>=1.4.2", "trustme>=0.7", From e5e0459a0b60608cb5e9ff83f6b41f59e6cafdfd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 13:58:00 -0700 Subject: [PATCH 0961/1705] Release notes for 0.63, refs #1869 --- docs/changelog.rst | 44 +++++++++++++++++++++++++------------------- 1 file changed, 25 insertions(+), 19 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2255dcce..01957e4f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,36 +4,42 @@ Changelog ========= -.. _v0_63a1: +.. _v0_63: -0.63a1 (2022-10-23) -------------------- +0.63 (2022-10-27) +----------------- +Features +~~~~~~~~ + +- Now tested against Python 3.11. Docker containers used by ``datasette publish`` and ``datasette package`` both now use that version of Python. (:issue:`1853`) +- ``--load-extension`` option now supports entrypoints. Thanks, Alex Garcia. (`#1789 `__) +- Facet size can now be set per-table with the new ``facet_size`` table metadata option. (:issue:`1804`) +- The :ref:`setting_truncate_cells_html` setting now also affects long URLs in columns. (:issue:`1805`) +- The non-JavaScript SQL editor textarea now increases height to fit the SQL query. (:issue:`1786`) +- Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) +- The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) +- SQL queries can now include leading SQL comments, using ``/* ... */`` or ``-- ...`` syntax. Thanks, Charles Nepote. (:issue:`1860`) - SQL query is now re-displayed when terminated with a time limit error. (:issue:`1819`) -- New documentation on :ref:`deploying_openrc` - thanks, Adam Simpson. (`#1825 `__) - The :ref:`inspect data ` mechanism is now used to speed up server startup - thanks, Forest Gregg. (:issue:`1834`) - In :ref:`config_dir` databases with filenames ending in ``.sqlite`` or ``.sqlite3`` are now automatically added to the Datasette instance. (:issue:`1646`) - Breadcrumb navigation display now respects the current user's permissions. (:issue:`1831`) -- Screenshots in the documentation are now maintained using `shot-scraper `__, as described in `Automating screenshots for the Datasette documentation using shot-scraper `__. (:issue:`1844`) -- The :ref:`datasette.check_visibility() ` method now accepts an optional ``permissions=`` list, allowing it to take multiple permissions into account at once when deciding if something should be shown as public or private. This has been used to correctly display padlock icons in more places in the Datasette interface. (:issue:`1829`) - -.. _v0_63a0: - -0.63a0 (2022-09-26) -------------------- +Plugin hooks and internals +~~~~~~~~~~~~~~~~~~~~~~~~~~ - The :ref:`plugin_hook_prepare_jinja2_environment` plugin hook now accepts an optional ``datasette`` argument. Hook implementations can also now return an ``async`` function which will be awaited automatically. (:issue:`1809`) -- ``--load-extension`` option now supports entrypoints. Thanks, Alex Garcia. (`#1789 `__) -- New tutorial: `Cleaning data with sqlite-utils and Datasette `__. -- Facet size can now be set per-table with the new ``facet_size`` table metadata option. (:issue:`1804`) -- ``truncate_cells_html`` setting now also affects long URLs in columns. (:issue:`1805`) - ``Database(is_mutable=)`` now defaults to ``True``. (:issue:`1808`) -- Non-JavaScript textarea now increases height to fit the SQL query. (:issue:`1786`) -- More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) +- The :ref:`datasette.check_visibility() ` method now accepts an optional ``permissions=`` list, allowing it to take multiple permissions into account at once when deciding if something should be shown as public or private. This has been used to correctly display padlock icons in more places in the Datasette interface. (:issue:`1829`) - Datasette no longer enforces upper bounds on its dependencies. (:issue:`1800`) -- Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) -- The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) + +Documentation +~~~~~~~~~~~~~ + +- New tutorial: `Cleaning data with sqlite-utils and Datasette `__. +- Screenshots in the documentation are now maintained using `shot-scraper `__, as described in `Automating screenshots for the Datasette documentation using shot-scraper `__. (:issue:`1844`) +- More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) +- New documentation on :ref:`deploying_openrc` - thanks, Adam Simpson. (`#1825 `__) .. _v0_62: From bf00b0b59b6692bdec597ac9db4e0b497c5a47b4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 15:11:26 -0700 Subject: [PATCH 0962/1705] Release 0.63 Refs #1646, #1786, #1787, #1789, #1794, #1800, #1804, #1805, #1808, #1809, #1816, #1819, #1825, #1829, #1831, #1834, #1844, #1853, #1860 Closes #1869 --- datasette/version.py | 2 +- docs/changelog.rst | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index eb36da45..ac012640 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.63a1" +__version__ = "0.63" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 01957e4f..f573afb3 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,6 +9,8 @@ Changelog 0.63 (2022-10-27) ----------------- +See `Datasette 0.63: The annotated release notes `__ for more background on the changes in this release. + Features ~~~~~~~~ From 2ea60e12d90b7cec03ebab728854d3ec4d553f54 Mon Sep 17 00:00:00 2001 From: Forest Gregg Date: Thu, 27 Oct 2022 16:51:20 -0400 Subject: [PATCH 0963/1705] Make hash and size a lazy property (#1837) * use inspect data for hash and file size * make hash and cached_size lazy properties * move hash property near size --- datasette/database.py | 36 ++++++++++++++++++++++++------------ 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index d75bd70c..af1df0a8 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -39,7 +39,7 @@ class Database: self.memory_name = memory_name if memory_name is not None: self.is_memory = True - self.hash = None + self.cached_hash = None self.cached_size = None self._cached_table_counts = None self._write_thread = None @@ -47,14 +47,6 @@ class Database: # These are used when in non-threaded mode: self._read_connection = None self._write_connection = None - if not self.is_mutable and not self.is_memory: - if self.ds.inspect_data and self.ds.inspect_data.get(self.name): - self.hash = self.ds.inspect_data[self.name]["hash"] - self.cached_size = self.ds.inspect_data[self.name]["size"] - else: - p = Path(path) - self.hash = inspect_hash(p) - self.cached_size = p.stat().st_size @property def cached_table_counts(self): @@ -266,14 +258,34 @@ class Database: results = await self.execute_fn(sql_operation_in_thread) return results + @property + def hash(self): + if self.cached_hash is not None: + return self.cached_hash + elif self.is_mutable or self.is_memory: + return None + elif self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self.cached_hash = self.ds.inspect_data[self.name]["hash"] + return self.cached_hash + else: + p = Path(self.path) + self.cached_hash = inspect_hash(p) + return self.cached_hash + @property def size(self): - if self.is_memory: - return 0 if self.cached_size is not None: return self.cached_size - else: + elif self.is_memory: + return 0 + elif self.is_mutable: return Path(self.path).stat().st_size + elif self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self.cached_size = self.ds.inspect_data[self.name]["size"] + return self.cached_size + else: + self.cached_size = Path(self.path).stat().st_size + return self.cached_size async def table_counts(self, limit=10): if not self.is_mutable and self.cached_table_counts is not None: From 641bc4453b5ef1dff0b2fc7dfad0b692be7aa61c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 27 Oct 2022 13:51:45 -0700 Subject: [PATCH 0964/1705] Bump black from 22.8.0 to 22.10.0 (#1839) Bumps [black](https://github.com/psf/black) from 22.8.0 to 22.10.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/22.8.0...22.10.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index fe258adb..625557ae 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ setup( "pytest-xdist>=2.2.1", "pytest-asyncio>=0.17", "beautifulsoup4>=4.8.1", - "black==22.8.0", + "black==22.10.0", "blacken-docs==1.12.1", "pytest-timeout>=1.4.2", "trustme>=0.7", From 26af9b9c4a6c62ee15870caa1c7bc455165d3b11 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 13:58:00 -0700 Subject: [PATCH 0965/1705] Release notes for 0.63, refs #1869 --- docs/changelog.rst | 44 +++++++++++++++++++++++++------------------- 1 file changed, 25 insertions(+), 19 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2255dcce..01957e4f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,36 +4,42 @@ Changelog ========= -.. _v0_63a1: +.. _v0_63: -0.63a1 (2022-10-23) -------------------- +0.63 (2022-10-27) +----------------- +Features +~~~~~~~~ + +- Now tested against Python 3.11. Docker containers used by ``datasette publish`` and ``datasette package`` both now use that version of Python. (:issue:`1853`) +- ``--load-extension`` option now supports entrypoints. Thanks, Alex Garcia. (`#1789 `__) +- Facet size can now be set per-table with the new ``facet_size`` table metadata option. (:issue:`1804`) +- The :ref:`setting_truncate_cells_html` setting now also affects long URLs in columns. (:issue:`1805`) +- The non-JavaScript SQL editor textarea now increases height to fit the SQL query. (:issue:`1786`) +- Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) +- The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) +- SQL queries can now include leading SQL comments, using ``/* ... */`` or ``-- ...`` syntax. Thanks, Charles Nepote. (:issue:`1860`) - SQL query is now re-displayed when terminated with a time limit error. (:issue:`1819`) -- New documentation on :ref:`deploying_openrc` - thanks, Adam Simpson. (`#1825 `__) - The :ref:`inspect data ` mechanism is now used to speed up server startup - thanks, Forest Gregg. (:issue:`1834`) - In :ref:`config_dir` databases with filenames ending in ``.sqlite`` or ``.sqlite3`` are now automatically added to the Datasette instance. (:issue:`1646`) - Breadcrumb navigation display now respects the current user's permissions. (:issue:`1831`) -- Screenshots in the documentation are now maintained using `shot-scraper `__, as described in `Automating screenshots for the Datasette documentation using shot-scraper `__. (:issue:`1844`) -- The :ref:`datasette.check_visibility() ` method now accepts an optional ``permissions=`` list, allowing it to take multiple permissions into account at once when deciding if something should be shown as public or private. This has been used to correctly display padlock icons in more places in the Datasette interface. (:issue:`1829`) - -.. _v0_63a0: - -0.63a0 (2022-09-26) -------------------- +Plugin hooks and internals +~~~~~~~~~~~~~~~~~~~~~~~~~~ - The :ref:`plugin_hook_prepare_jinja2_environment` plugin hook now accepts an optional ``datasette`` argument. Hook implementations can also now return an ``async`` function which will be awaited automatically. (:issue:`1809`) -- ``--load-extension`` option now supports entrypoints. Thanks, Alex Garcia. (`#1789 `__) -- New tutorial: `Cleaning data with sqlite-utils and Datasette `__. -- Facet size can now be set per-table with the new ``facet_size`` table metadata option. (:issue:`1804`) -- ``truncate_cells_html`` setting now also affects long URLs in columns. (:issue:`1805`) - ``Database(is_mutable=)`` now defaults to ``True``. (:issue:`1808`) -- Non-JavaScript textarea now increases height to fit the SQL query. (:issue:`1786`) -- More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) +- The :ref:`datasette.check_visibility() ` method now accepts an optional ``permissions=`` list, allowing it to take multiple permissions into account at once when deciding if something should be shown as public or private. This has been used to correctly display padlock icons in more places in the Datasette interface. (:issue:`1829`) - Datasette no longer enforces upper bounds on its dependencies. (:issue:`1800`) -- Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) -- The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) + +Documentation +~~~~~~~~~~~~~ + +- New tutorial: `Cleaning data with sqlite-utils and Datasette `__. +- Screenshots in the documentation are now maintained using `shot-scraper `__, as described in `Automating screenshots for the Datasette documentation using shot-scraper `__. (:issue:`1844`) +- More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) +- New documentation on :ref:`deploying_openrc` - thanks, Adam Simpson. (`#1825 `__) .. _v0_62: From 61171f01549549e5fb25c72b13280d941d96dbf1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 15:11:26 -0700 Subject: [PATCH 0966/1705] Release 0.63 Refs #1646, #1786, #1787, #1789, #1794, #1800, #1804, #1805, #1808, #1809, #1816, #1819, #1825, #1829, #1831, #1834, #1844, #1853, #1860 Closes #1869 --- datasette/version.py | 2 +- docs/changelog.rst | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index eb36da45..ac012640 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.63a1" +__version__ = "0.63" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 01957e4f..f573afb3 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,6 +9,8 @@ Changelog 0.63 (2022-10-27) ----------------- +See `Datasette 0.63: The annotated release notes `__ for more background on the changes in this release. + Features ~~~~~~~~ From c9b5f5d598e7f85cd3e1ce020351a27da334408b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 17:58:36 -0700 Subject: [PATCH 0967/1705] Depend on sqlite-utils>=3.30 Decided to use the most recent version in case I decide later to use the flatten() utility function. Refs #1850 --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 625557ae..99e2a4ad 100644 --- a/setup.py +++ b/setup.py @@ -57,6 +57,7 @@ setup( "PyYAML>=5.3", "mergedeep>=1.1.1", "itsdangerous>=1.1", + "sqlite-utils>=3.30", ], entry_points=""" [console_scripts] From c35859ae3df163406f1a1895ccf9803e933b2d8e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 29 Oct 2022 23:03:45 -0700 Subject: [PATCH 0968/1705] API for bulk inserts, closes #1866 --- datasette/app.py | 5 ++ datasette/views/table.py | 136 +++++++++++++++++++++---------- docs/cli-reference.rst | 2 + docs/json_api.rst | 48 ++++++++++- docs/settings.rst | 11 +++ tests/test_api.py | 1 + tests/test_api_write.py | 168 +++++++++++++++++++++++++++++++++++++-- 7 files changed, 320 insertions(+), 51 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 8bc5fe36..f80d3792 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -99,6 +99,11 @@ SETTINGS = ( 1000, "Maximum rows that can be returned from a table or custom query", ), + Setting( + "max_insert_rows", + 100, + "Maximum rows that can be inserted at a time using the bulk insert API", + ), Setting( "num_sql_threads", 3, diff --git a/datasette/views/table.py b/datasette/views/table.py index be3d4f93..fd203036 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -30,6 +30,7 @@ from datasette.utils import ( ) from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response from datasette.filters import Filters +import sqlite_utils from .base import BaseView, DataView, DatasetteError, ureg from .database import QueryView @@ -1085,62 +1086,109 @@ class TableInsertView(BaseView): def __init__(self, datasette): self.ds = datasette + async def _validate_data(self, request, db, table_name): + errors = [] + + def _errors(errors): + return None, errors, {} + + if request.headers.get("content-type") != "application/json": + # TODO: handle form-encoded data + return _errors(["Invalid content-type, must be application/json"]) + body = await request.post_body() + try: + data = json.loads(body) + except json.JSONDecodeError as e: + return _errors(["Invalid JSON: {}".format(e)]) + if not isinstance(data, dict): + return _errors(["JSON must be a dictionary"]) + keys = data.keys() + # keys must contain "row" or "rows" + if "row" not in keys and "rows" not in keys: + return _errors(['JSON must have one or other of "row" or "rows"']) + rows = [] + if "row" in keys: + if "rows" in keys: + return _errors(['Cannot use "row" and "rows" at the same time']) + row = data["row"] + if not isinstance(row, dict): + return _errors(['"row" must be a dictionary']) + rows = [row] + data["return_rows"] = True + else: + rows = data["rows"] + if not isinstance(rows, list): + return _errors(['"rows" must be a list']) + for row in rows: + if not isinstance(row, dict): + return _errors(['"rows" must be a list of dictionaries']) + # Does this exceed max_insert_rows? + max_insert_rows = self.ds.setting("max_insert_rows") + if len(rows) > max_insert_rows: + return _errors( + ["Too many rows, maximum allowed is {}".format(max_insert_rows)] + ) + # Validate columns of each row + columns = await db.table_columns(table_name) + # TODO: There are cases where pks are OK, if not using auto-incrementing pk + pks = await db.primary_keys(table_name) + allowed_columns = set(columns) - set(pks) + for i, row in enumerate(rows): + invalid_columns = set(row.keys()) - allowed_columns + if invalid_columns: + errors.append( + "Row {} has invalid columns: {}".format( + i, ", ".join(sorted(invalid_columns)) + ) + ) + if errors: + return _errors(errors) + extra = {key: data[key] for key in data if key not in ("rows", "row")} + return rows, errors, extra + async def post(self, request): + def _error(messages, status=400): + return Response.json({"ok": False, "errors": messages}, status=status) + database_route = tilde_decode(request.url_vars["database"]) try: db = self.ds.get_database(route=database_route) except KeyError: - raise NotFound("Database not found: {}".format(database_route)) + return _error(["Database not found: {}".format(database_route)], 404) database_name = db.name table_name = tilde_decode(request.url_vars["table"]) + # Table must exist (may handle table creation in the future) db = self.ds.get_database(database_name) if not await db.table_exists(table_name): - raise NotFound("Table not found: {}".format(table_name)) + return _error(["Table not found: {}".format(table_name)], 404) # Must have insert-row permission if not await self.ds.permission_allowed( request.actor, "insert-row", resource=(database_name, table_name) ): - raise Forbidden("Permission denied") - if request.headers.get("content-type") != "application/json": - # TODO: handle form-encoded data - raise BadRequest("Must send JSON data") - data = json.loads(await request.post_body()) - if "row" not in data: - raise BadRequest('Must send a "row" key containing a dictionary') - row = data["row"] - if not isinstance(row, dict): - raise BadRequest("row must be a dictionary") - # Verify all columns exist - columns = await db.table_columns(table_name) - pks = await db.primary_keys(table_name) - for key in row: - if key not in columns: - raise BadRequest("Column not found: {}".format(key)) - if key in pks: - raise BadRequest( - "Cannot insert into primary key column: {}".format(key) + return _error(["Permission denied"], 403) + rows, errors, extra = await self._validate_data(request, db, table_name) + if errors: + return _error(errors, 400) + + should_return = bool(extra.get("return_rows", False)) + # Insert rows + def insert_rows(conn): + table = sqlite_utils.Database(conn)[table_name] + if should_return: + rowids = [] + for row in rows: + rowids.append(table.insert(row).last_rowid) + return list( + table.rows_where( + "rowid in ({})".format(",".join("?" for _ in rowids)), rowids + ) ) - # Perform the insert - sql = "INSERT INTO [{table}] ({columns}) VALUES ({values})".format( - table=escape_sqlite(table_name), - columns=", ".join(escape_sqlite(c) for c in row), - values=", ".join("?" for c in row), - ) - cursor = await db.execute_write(sql, list(row.values())) - # Return the new row - rowid = cursor.lastrowid - new_row = ( - await db.execute( - "SELECT * FROM [{table}] WHERE rowid = ?".format( - table=escape_sqlite(table_name) - ), - [rowid], - ) - ).first() - return Response.json( - { - "inserted": [dict(new_row)], - }, - status=201, - ) + else: + table.insert_all(rows) + + rows = await db.execute_write_fn(insert_rows) + result = {"ok": True} + if should_return: + result["inserted"] = rows + return Response.json(result, status=201) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 56156568..649a3dcd 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -213,6 +213,8 @@ These can be passed to ``datasette serve`` using ``datasette serve --setting nam (default=100) max_returned_rows Maximum rows that can be returned from a table or custom query (default=1000) + max_insert_rows Maximum rows that can be inserted at a time using + the bulk insert API (default=1000) num_sql_threads Number of threads in the thread pool for executing SQLite queries (default=3) sql_time_limit_ms Time limit for a SQL query in milliseconds diff --git a/docs/json_api.rst b/docs/json_api.rst index 4a7961f2..01558c23 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -465,11 +465,13 @@ Datasette provides a write API for JSON data. This is a POST-only API that requi .. _TableInsertView: -Inserting a single row -~~~~~~~~~~~~~~~~~~~~~~ +Inserting rows +~~~~~~~~~~~~~~ This requires the :ref:`permissions_insert_row` permission. +A single row can be inserted using the ``"row"`` key: + :: POST //
    /-/insert @@ -495,3 +497,45 @@ If successful, this will return a ``201`` status code and the newly inserted row } ] } + +To insert multiple rows at a time, use the same API method but send a list of dictionaries as the ``"rows"`` key: + +:: + + POST //
    /-/insert + Content-Type: application/json + Authorization: Bearer dstok_ + { + "rows": [ + { + "column1": "value1", + "column2": "value2" + }, + { + "column1": "value3", + "column2": "value4" + } + ] + } + +If successful, this will return a ``201`` status code and an empty ``{}`` response body. + +To return the newly inserted rows, add the ``"return_rows": true`` key to the request body: + +.. code-block:: json + + { + "rows": [ + { + "column1": "value1", + "column2": "value2" + }, + { + "column1": "value3", + "column2": "value4" + } + ], + "return_rows": true + } + +This will return the same ``"inserted"`` key as the single row example above. There is a small performance penalty for using this option. diff --git a/docs/settings.rst b/docs/settings.rst index a990c78c..b86b18bd 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -96,6 +96,17 @@ You can increase or decrease this limit like so:: datasette mydatabase.db --setting max_returned_rows 2000 +.. _setting_max_insert_rows: + +max_insert_rows +~~~~~~~~~~~~~~~ + +Maximum rows that can be inserted at a time using the bulk insert API, see :ref:`TableInsertView`. Defaults to 100. + +You can increase or decrease this limit like so:: + + datasette mydatabase.db --setting max_insert_rows 1000 + .. _setting_num_sql_threads: num_sql_threads diff --git a/tests/test_api.py b/tests/test_api.py index fc171421..ebd675b9 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -804,6 +804,7 @@ def test_settings_json(app_client): "facet_suggest_time_limit_ms": 50, "facet_time_limit_ms": 200, "max_returned_rows": 100, + "max_insert_rows": 100, "sql_time_limit_ms": 200, "allow_download": True, "allow_signed_tokens": True, diff --git a/tests/test_api_write.py b/tests/test_api_write.py index e8222e43..4a5a58aa 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -18,11 +18,7 @@ def ds_write(tmp_path_factory): @pytest.mark.asyncio async def test_write_row(ds_write): - token = "dstok_{}".format( - ds_write.sign( - {"a": "root", "token": "dstok", "t": int(time.time())}, namespace="token" - ) - ) + token = write_token(ds_write) response = await ds_write.client.post( "/data/docs/-/insert", json={"row": {"title": "Test", "score": 1.0}}, @@ -36,3 +32,165 @@ async def test_write_row(ds_write): assert response.json()["inserted"] == [expected_row] rows = (await ds_write.get_database("data").execute("select * from docs")).rows assert dict(rows[0]) == expected_row + + +@pytest.mark.asyncio +@pytest.mark.parametrize("return_rows", (True, False)) +async def test_write_rows(ds_write, return_rows): + token = write_token(ds_write) + data = {"rows": [{"title": "Test {}".format(i), "score": 1.0} for i in range(20)]} + if return_rows: + data["return_rows"] = True + response = await ds_write.client.post( + "/data/docs/-/insert", + json=data, + headers={ + "Authorization": "Bearer {}".format(token), + "Content-Type": "application/json", + }, + ) + assert response.status_code == 201 + actual_rows = [ + dict(r) + for r in ( + await ds_write.get_database("data").execute("select * from docs") + ).rows + ] + assert len(actual_rows) == 20 + assert actual_rows == [ + {"id": i + 1, "title": "Test {}".format(i), "score": 1.0} for i in range(20) + ] + assert response.json()["ok"] is True + if return_rows: + assert response.json()["inserted"] == actual_rows + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "path,input,special_case,expected_status,expected_errors", + ( + ( + "/data2/docs/-/insert", + {}, + None, + 404, + ["Database not found: data2"], + ), + ( + "/data/docs2/-/insert", + {}, + None, + 404, + ["Table not found: docs2"], + ), + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test"} for i in range(10)]}, + "bad_token", + 403, + ["Permission denied"], + ), + ( + "/data/docs/-/insert", + {}, + "invalid_json", + 400, + [ + "Invalid JSON: Expecting property name enclosed in double quotes: line 1 column 2 (char 1)" + ], + ), + ( + "/data/docs/-/insert", + {}, + "invalid_content_type", + 400, + ["Invalid content-type, must be application/json"], + ), + ( + "/data/docs/-/insert", + [], + None, + 400, + ["JSON must be a dictionary"], + ), + ( + "/data/docs/-/insert", + {"row": "blah"}, + None, + 400, + ['"row" must be a dictionary'], + ), + ( + "/data/docs/-/insert", + {"blah": "blah"}, + None, + 400, + ['JSON must have one or other of "row" or "rows"'], + ), + ( + "/data/docs/-/insert", + {"rows": "blah"}, + None, + 400, + ['"rows" must be a list'], + ), + ( + "/data/docs/-/insert", + {"rows": ["blah"]}, + None, + 400, + ['"rows" must be a list of dictionaries'], + ), + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test"} for i in range(101)]}, + None, + 400, + ["Too many rows, maximum allowed is 100"], + ), + # Validate columns of each row + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test", "bad": 1, "worse": 2} for i in range(2)]}, + None, + 400, + [ + "Row 0 has invalid columns: bad, worse", + "Row 1 has invalid columns: bad, worse", + ], + ), + ), +) +async def test_write_row_errors( + ds_write, path, input, special_case, expected_status, expected_errors +): + token = write_token(ds_write) + if special_case == "bad_token": + token += "bad" + kwargs = dict( + json=input, + headers={ + "Authorization": "Bearer {}".format(token), + "Content-Type": "text/plain" + if special_case == "invalid_content_type" + else "application/json", + }, + ) + if special_case == "invalid_json": + del kwargs["json"] + kwargs["content"] = "{bad json" + response = await ds_write.client.post( + path, + **kwargs, + ) + assert response.status_code == expected_status + assert response.json()["ok"] is False + assert response.json()["errors"] == expected_errors + + +def write_token(ds): + return "dstok_{}".format( + ds.sign( + {"a": "root", "token": "dstok", "t": int(time.time())}, namespace="token" + ) + ) From f6bf2d8045cc239fe34357342bff1440561c8909 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 29 Oct 2022 23:20:11 -0700 Subject: [PATCH 0969/1705] Initial prototype of API explorer at /-/api, refs #1871 --- datasette/app.py | 5 ++ datasette/templates/api_explorer.html | 73 +++++++++++++++++++++++++++ datasette/views/special.py | 8 +++ tests/test_docs.py | 2 +- 4 files changed, 87 insertions(+), 1 deletion(-) create mode 100644 datasette/templates/api_explorer.html diff --git a/datasette/app.py b/datasette/app.py index f80d3792..c3d802a4 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -33,6 +33,7 @@ from .views.special import ( JsonDataView, PatternPortfolioView, AuthTokenView, + ApiExplorerView, CreateTokenView, LogoutView, AllowDebugView, @@ -1235,6 +1236,10 @@ class Datasette: CreateTokenView.as_view(self), r"/-/create-token$", ) + add_route( + ApiExplorerView.as_view(self), + r"/-/api$", + ) add_route( LogoutView.as_view(self), r"/-/logout$", diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html new file mode 100644 index 00000000..034bee60 --- /dev/null +++ b/datasette/templates/api_explorer.html @@ -0,0 +1,73 @@ +{% extends "base.html" %} + +{% block title %}API Explorer{% endblock %} + +{% block content %} + +

    API Explorer

    + +

    Use this tool to try out the Datasette write API.

    + +{% if errors %} + {% for error in errors %} +

    {{ error }}

    + {% endfor %} +{% endif %} + + +
    + + +
    +
    + + +
    +
    + +
    +

    + + + + +{% endblock %} diff --git a/datasette/views/special.py b/datasette/views/special.py index b754a2f0..9922a621 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -235,3 +235,11 @@ class CreateTokenView(BaseView): "token_bits": token_bits, }, ) + + +class ApiExplorerView(BaseView): + name = "api_explorer" + has_json_alternate = False + + async def get(self, request): + return await self.render(["api_explorer.html"], request) diff --git a/tests/test_docs.py b/tests/test_docs.py index cd5a6c13..e9b813fe 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -62,7 +62,7 @@ def documented_views(): if first_word.endswith("View"): view_labels.add(first_word) # We deliberately don't document these: - view_labels.update(("PatternPortfolioView", "AuthTokenView")) + view_labels.update(("PatternPortfolioView", "AuthTokenView", "ApiExplorerView")) return view_labels From 9eb9ffae3ddd4e8ff0b713bf6fd6a0afed3368d7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 30 Oct 2022 13:09:55 -0700 Subject: [PATCH 0970/1705] Drop API token requirement from API explorer, refs #1871 --- datasette/default_permissions.py | 9 +++++++++ datasette/templates/api_explorer.html | 13 ++++--------- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 87684e2a..151ba2b5 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -131,3 +131,12 @@ def register_commands(cli): if debug: click.echo("\nDecoded:\n") click.echo(json.dumps(ds.unsign(token, namespace="token"), indent=2)) + + +@hookimpl +def skip_csrf(scope): + # Skip CSRF check for requests with content-type: application/json + if scope["type"] == "http": + headers = scope.get("headers") or {} + if dict(headers).get(b"content-type") == b"application/json": + return True diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html index 034bee60..01b182d8 100644 --- a/datasette/templates/api_explorer.html +++ b/datasette/templates/api_explorer.html @@ -15,16 +15,13 @@ {% endif %}
    -
    - - -
    - +
    -
    - +
    + +

    @@ -46,7 +43,6 @@ form.addEventListener("submit", (ev) => { var formData = new FormData(form); var json = formData.get('json'); var path = formData.get('path'); - var token = formData.get('token'); // Validate JSON try { var data = JSON.parse(json); @@ -60,7 +56,6 @@ form.addEventListener("submit", (ev) => { body: json, headers: { 'Content-Type': 'application/json', - 'Authorization': `Bearer ${token}` } }).then(r => r.json()).then(r => { alert(JSON.stringify(r, null, 2)); From fedbfcc36873366143195d8fe124e1859bf88346 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 30 Oct 2022 14:49:07 -0700 Subject: [PATCH 0971/1705] Neater display of output and errors in API explorer, refs #1871 --- datasette/templates/api_explorer.html | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html index 01b182d8..38fdb7bc 100644 --- a/datasette/templates/api_explorer.html +++ b/datasette/templates/api_explorer.html @@ -26,6 +26,12 @@

    + + """.format( escape(ex.sql) ) diff --git a/tests/test_api.py b/tests/test_api.py index ad74d16e..4027a7a5 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -662,7 +662,11 @@ def test_sql_time_limit(app_client_shorter_time_limit): "

    SQL query took too long. The time limit is controlled by the\n" 'sql_time_limit_ms\n' "configuration option.

    \n" - "
    select sleep(0.5)
    " + '\n' + "" ), "status": 400, "title": "SQL Interrupted", diff --git a/tests/test_html.py b/tests/test_html.py index 4b394199..7cfe9d90 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -172,7 +172,7 @@ def test_sql_time_limit(app_client_shorter_time_limit): """ sql_time_limit_ms """.strip(), - "
    select sleep(0.5)
    ", + '', ] for expected_html_fragment in expected_html_fragments: assert expected_html_fragment in response.text From 93a02281dad2f23da84210f6ae9c63777ad8af5e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Nov 2022 10:22:26 -0700 Subject: [PATCH 0976/1705] Show interrupted query in resizing textarea, closes #1876 --- datasette/views/base.py | 6 +++++- tests/test_api.py | 6 +++++- tests/test_html.py | 2 +- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index 67aa3a42..6b01fdd2 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -378,7 +378,11 @@ class DataView(BaseView):

    SQL query took too long. The time limit is controlled by the sql_time_limit_ms configuration option.

    -
    {}
    + + """.format( escape(ex.sql) ) diff --git a/tests/test_api.py b/tests/test_api.py index ebd675b9..de0223e2 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -662,7 +662,11 @@ def test_sql_time_limit(app_client_shorter_time_limit): "

    SQL query took too long. The time limit is controlled by the\n" 'sql_time_limit_ms\n' "configuration option.

    \n" - "
    select sleep(0.5)
    " + '\n' + "" ), "status": 400, "title": "SQL Interrupted", diff --git a/tests/test_html.py b/tests/test_html.py index 4b394199..7cfe9d90 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -172,7 +172,7 @@ def test_sql_time_limit(app_client_shorter_time_limit): """ sql_time_limit_ms """.strip(), - "
    select sleep(0.5)
    ", + '', ] for expected_html_fragment in expected_html_fragments: assert expected_html_fragment in response.text From 9bec7c38eb93cde5afb16df9bdd96aea2a5b0459 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Nov 2022 11:07:59 -0700 Subject: [PATCH 0977/1705] ignore and replace options for bulk inserts, refs #1873 Also removed the rule that you cannot include primary keys in the rows you insert. And added validation that catches invalid parameters in the incoming JSON. And renamed "inserted" to "rows" in the returned JSON for return_rows: true --- datasette/views/table.py | 41 ++++++++++++++------ docs/json_api.rst | 4 +- tests/test_api_write.py | 83 ++++++++++++++++++++++++++++++++++++++-- 3 files changed, 111 insertions(+), 17 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 1e3d566e..7692a4e3 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -1107,6 +1107,7 @@ class TableInsertView(BaseView): if not isinstance(data, dict): return _errors(["JSON must be a dictionary"]) keys = data.keys() + # keys must contain "row" or "rows" if "row" not in keys and "rows" not in keys: return _errors(['JSON must have one or other of "row" or "rows"']) @@ -1126,19 +1127,31 @@ class TableInsertView(BaseView): for row in rows: if not isinstance(row, dict): return _errors(['"rows" must be a list of dictionaries']) + # Does this exceed max_insert_rows? max_insert_rows = self.ds.setting("max_insert_rows") if len(rows) > max_insert_rows: return _errors( ["Too many rows, maximum allowed is {}".format(max_insert_rows)] ) + + # Validate other parameters + extras = { + key: value for key, value in data.items() if key not in ("row", "rows") + } + valid_extras = {"return_rows", "ignore", "replace"} + invalid_extras = extras.keys() - valid_extras + if invalid_extras: + return _errors( + ['Invalid parameter: "{}"'.format('", "'.join(sorted(invalid_extras)))] + ) + if extras.get("ignore") and extras.get("replace"): + return _errors(['Cannot use "ignore" and "replace" at the same time']) + # Validate columns of each row - columns = await db.table_columns(table_name) - # TODO: There are cases where pks are OK, if not using auto-incrementing pk - pks = await db.primary_keys(table_name) - allowed_columns = set(columns) - set(pks) + columns = set(await db.table_columns(table_name)) for i, row in enumerate(rows): - invalid_columns = set(row.keys()) - allowed_columns + invalid_columns = set(row.keys()) - columns if invalid_columns: errors.append( "Row {} has invalid columns: {}".format( @@ -1147,8 +1160,7 @@ class TableInsertView(BaseView): ) if errors: return _errors(errors) - extra = {key: data[key] for key in data if key not in ("rows", "row")} - return rows, errors, extra + return rows, errors, extras async def post(self, request): database_route = tilde_decode(request.url_vars["database"]) @@ -1168,18 +1180,23 @@ class TableInsertView(BaseView): request.actor, "insert-row", resource=(database_name, table_name) ): return _error(["Permission denied"], 403) - rows, errors, extra = await self._validate_data(request, db, table_name) + rows, errors, extras = await self._validate_data(request, db, table_name) if errors: return _error(errors, 400) - should_return = bool(extra.get("return_rows", False)) + ignore = extras.get("ignore") + replace = extras.get("replace") + + should_return = bool(extras.get("return_rows", False)) # Insert rows def insert_rows(conn): table = sqlite_utils.Database(conn)[table_name] if should_return: rowids = [] for row in rows: - rowids.append(table.insert(row).last_rowid) + rowids.append( + table.insert(row, ignore=ignore, replace=replace).last_rowid + ) return list( table.rows_where( "rowid in ({})".format(",".join("?" for _ in rowids)), @@ -1187,12 +1204,12 @@ class TableInsertView(BaseView): ) ) else: - table.insert_all(rows) + table.insert_all(rows, ignore=ignore, replace=replace) rows = await db.execute_write_fn(insert_rows) result = {"ok": True} if should_return: - result["inserted"] = rows + result["rows"] = rows return Response.json(result, status=201) diff --git a/docs/json_api.rst b/docs/json_api.rst index da4500ab..34c13211 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -489,7 +489,7 @@ If successful, this will return a ``201`` status code and the newly inserted row .. code-block:: json { - "inserted": [ + "rows": [ { "id": 1, "column1": "value1", @@ -538,7 +538,7 @@ To return the newly inserted rows, add the ``"return_rows": true`` key to the re "return_rows": true } -This will return the same ``"inserted"`` key as the single row example above. There is a small performance penalty for using this option. +This will return the same ``"rows"`` key as the single row example above. There is a small performance penalty for using this option. .. _RowDeleteView: diff --git a/tests/test_api_write.py b/tests/test_api_write.py index 1cfba104..d0b0f324 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -37,7 +37,7 @@ async def test_write_row(ds_write): ) expected_row = {"id": 1, "title": "Test", "score": 1.0} assert response.status_code == 201 - assert response.json()["inserted"] == [expected_row] + assert response.json()["rows"] == [expected_row] rows = (await ds_write.get_database("data").execute("select * from docs")).rows assert dict(rows[0]) == expected_row @@ -70,7 +70,7 @@ async def test_write_rows(ds_write, return_rows): ] assert response.json()["ok"] is True if return_rows: - assert response.json()["inserted"] == actual_rows + assert response.json()["rows"] == actual_rows @pytest.mark.asyncio @@ -156,6 +156,27 @@ async def test_write_rows(ds_write, return_rows): 400, ["Too many rows, maximum allowed is 100"], ), + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test"}], "ignore": True, "replace": True}, + None, + 400, + ['Cannot use "ignore" and "replace" at the same time'], + ), + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test"}], "invalid_param": True}, + None, + 400, + ['Invalid parameter: "invalid_param"'], + ), + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test"}], "one": True, "two": True}, + None, + 400, + ['Invalid parameter: "one", "two"'], + ), # Validate columns of each row ( "/data/docs/-/insert", @@ -196,6 +217,62 @@ async def test_write_row_errors( assert response.json()["errors"] == expected_errors +@pytest.mark.asyncio +@pytest.mark.parametrize( + "ignore,replace,expected_rows", + ( + ( + True, + False, + [ + {"id": 1, "title": "Exists", "score": None}, + ], + ), + ( + False, + True, + [ + {"id": 1, "title": "One", "score": None}, + ], + ), + ), +) +@pytest.mark.parametrize("should_return", (True, False)) +async def test_insert_ignore_replace( + ds_write, ignore, replace, expected_rows, should_return +): + await ds_write.get_database("data").execute_write( + "insert into docs (id, title) values (1, 'Exists')" + ) + token = write_token(ds_write) + data = {"rows": [{"id": 1, "title": "One"}]} + if ignore: + data["ignore"] = True + if replace: + data["replace"] = True + if should_return: + data["return_rows"] = True + response = await ds_write.client.post( + "/data/docs/-/insert", + json=data, + headers={ + "Authorization": "Bearer {}".format(token), + "Content-Type": "application/json", + }, + ) + assert response.status_code == 201 + actual_rows = [ + dict(r) + for r in ( + await ds_write.get_database("data").execute("select * from docs") + ).rows + ] + assert actual_rows == expected_rows + assert response.json()["ok"] is True + if should_return: + assert response.json()["rows"] == expected_rows + + @pytest.mark.asyncio @pytest.mark.parametrize("scenario", ("no_token", "no_perm", "bad_table", "has_perm")) async def test_delete_row(ds_write, scenario): @@ -217,7 +294,7 @@ async def test_delete_row(ds_write, scenario): }, ) assert insert_response.status_code == 201 - pk = insert_response.json()["inserted"][0]["id"] + pk = insert_response.json()["rows"][0]["id"] path = "/data/{}/{}/-/delete".format( "docs" if scenario != "bad_table" else "bad_table", pk From 497290beaf32e6b779f9683ef15f1c5bc142a41a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Nov 2022 12:59:17 -0700 Subject: [PATCH 0978/1705] Handle database errors in /-/insert, refs #1866, #1873 Also improved API explorer to show HTTP status of response, refs #1871 --- datasette/templates/api_explorer.html | 14 +++++++++----- datasette/views/table.py | 5 ++++- tests/test_api_write.py | 11 +++++++++++ 3 files changed, 24 insertions(+), 6 deletions(-) diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html index 38fdb7bc..93bacde3 100644 --- a/datasette/templates/api_explorer.html +++ b/datasette/templates/api_explorer.html @@ -27,7 +27,8 @@ @@ -64,12 +65,15 @@ form.addEventListener("submit", (ev) => { headers: { 'Content-Type': 'application/json', } - }).then(r => r.json()).then(r => { + }).then(r => { + document.getElementById('response-status').textContent = r.status; + return r.json(); + }).then(data => { var errorList = output.querySelector('.errors'); - if (r.errors) { + if (data.errors) { errorList.style.display = 'block'; errorList.innerHTML = ''; - r.errors.forEach(error => { + data.errors.forEach(error => { var li = document.createElement('li'); li.textContent = error; errorList.appendChild(li); @@ -77,7 +81,7 @@ form.addEventListener("submit", (ev) => { } else { errorList.style.display = 'none'; } - output.querySelector('pre').innerText = JSON.stringify(r, null, 2); + output.querySelector('pre').innerText = JSON.stringify(data, null, 2); output.style.display = 'block'; }).catch(err => { alert("Error: " + err); diff --git a/datasette/views/table.py b/datasette/views/table.py index 7692a4e3..61227206 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -1206,7 +1206,10 @@ class TableInsertView(BaseView): else: table.insert_all(rows, ignore=ignore, replace=replace) - rows = await db.execute_write_fn(insert_rows) + try: + rows = await db.execute_write_fn(insert_rows) + except Exception as e: + return _error([str(e)]) result = {"ok": True} if should_return: result["rows"] = rows diff --git a/tests/test_api_write.py b/tests/test_api_write.py index d0b0f324..0b567f48 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -156,6 +156,13 @@ async def test_write_rows(ds_write, return_rows): 400, ["Too many rows, maximum allowed is 100"], ), + ( + "/data/docs/-/insert", + {"rows": [{"id": 1, "title": "Test"}]}, + "duplicate_id", + 400, + ["UNIQUE constraint failed: docs.id"], + ), ( "/data/docs/-/insert", {"rows": [{"title": "Test"}], "ignore": True, "replace": True}, @@ -194,6 +201,10 @@ async def test_write_row_errors( ds_write, path, input, special_case, expected_status, expected_errors ): token = write_token(ds_write) + if special_case == "duplicate_id": + await ds_write.get_database("data").execute_write( + "insert into docs (id) values (1)" + ) if special_case == "bad_token": token += "bad" kwargs = dict( From 0b166befc0096fca30d71e19608a928d59c331a4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Nov 2022 17:31:22 -0700 Subject: [PATCH 0979/1705] API explorer can now do GET, has JSON syntax highlighting Refs #1871 --- .../static/json-format-highlight-1.0.1.js | 43 +++++++++++ datasette/templates/api_explorer.html | 77 +++++++++++++++---- 2 files changed, 103 insertions(+), 17 deletions(-) create mode 100644 datasette/static/json-format-highlight-1.0.1.js diff --git a/datasette/static/json-format-highlight-1.0.1.js b/datasette/static/json-format-highlight-1.0.1.js new file mode 100644 index 00000000..e87c76e1 --- /dev/null +++ b/datasette/static/json-format-highlight-1.0.1.js @@ -0,0 +1,43 @@ +/* +https://github.com/luyilin/json-format-highlight +From https://unpkg.com/json-format-highlight@1.0.1/dist/json-format-highlight.js +MIT Licensed +*/ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : + typeof define === 'function' && define.amd ? define(factory) : + (global.jsonFormatHighlight = factory()); +}(this, (function () { 'use strict'; + +var defaultColors = { + keyColor: 'dimgray', + numberColor: 'lightskyblue', + stringColor: 'lightcoral', + trueColor: 'lightseagreen', + falseColor: '#f66578', + nullColor: 'cornflowerblue' +}; + +function index (json, colorOptions) { + if ( colorOptions === void 0 ) colorOptions = {}; + + if (!json) { return; } + if (typeof json !== 'string') { + json = JSON.stringify(json, null, 2); + } + var colors = Object.assign({}, defaultColors, colorOptions); + json = json.replace(/&/g, '&').replace(//g, '>'); + return json.replace(/("(\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*"(\s*:)?|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+]?\d+)?)/g, function (match) { + var color = colors.numberColor; + if (/^"/.test(match)) { + color = /:$/.test(match) ? colors.keyColor : colors.stringColor; + } else { + color = /true/.test(match) ? colors.trueColor : /false/.test(match) ? colors.falseColor : /null/.test(match) ? colors.nullColor : color; + } + return ("" + match + ""); + }); +} + +return index; + +}))); diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html index 93bacde3..de5337e3 100644 --- a/datasette/templates/api_explorer.html +++ b/datasette/templates/api_explorer.html @@ -2,6 +2,10 @@ {% block title %}API Explorer{% endblock %} +{% block extra_head %} + +{% endblock %} + {% block content %}

    API Explorer

    @@ -14,17 +18,30 @@ {% endfor %} {% endif %} -
    -
    - - -
    -
    - - -
    -

    - +
    + GET +
    +
    + + + +
    + +
    +
    + POST +
    +
    + + +
    +
    + + +
    +

    + +
    {% else %} - {% if not canned_write and not error %} + {% if not canned_query_write and not error %}

    0 results

    {% endif %} {% endif %} diff --git a/datasette/views/database.py b/datasette/views/database.py index 0770a380..658c35e6 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -1,4 +1,3 @@ -from asyncinject import Registry from dataclasses import dataclass, field from typing import Callable from urllib.parse import parse_qsl, urlencode @@ -33,7 +32,7 @@ from datasette.utils import ( from datasette.utils.asgi import AsgiFileDownload, NotFound, Response, Forbidden from datasette.plugins import pm -from .base import BaseView, DatasetteError, DataView, View, _error, stream_csv +from .base import BaseView, DatasetteError, View, _error, stream_csv class DatabaseView(View): @@ -57,7 +56,7 @@ class DatabaseView(View): sql = (request.args.get("sql") or "").strip() if sql: - return await query_view(request, datasette) + return await QueryView()(request, datasette) if format_ not in ("html", "json"): raise NotFound("Invalid format: {}".format(format_)) @@ -65,10 +64,6 @@ class DatabaseView(View): metadata = (datasette.metadata("databases") or {}).get(database, {}) datasette.update_with_inherited_metadata(metadata) - table_counts = await db.table_counts(5) - hidden_table_names = set(await db.hidden_table_names()) - all_foreign_keys = await db.get_all_foreign_keys() - sql_views = [] for view_name in await db.view_names(): view_visible, view_private = await datasette.check_visibility( @@ -196,8 +191,13 @@ class QueryContext: # urls: dict = field( # metadata={"help": "Object containing URL helpers like `database()`"} # ) - canned_write: bool = field( - metadata={"help": "Boolean indicating if this canned query allows writes"} + canned_query_write: bool = field( + metadata={ + "help": "Boolean indicating if this is a canned query that allows writes" + } + ) + metadata: dict = field( + metadata={"help": "Metadata about the database or the canned query"} ) db_is_immutable: bool = field( metadata={"help": "Boolean indicating if this database is immutable"} @@ -232,7 +232,6 @@ class QueryContext: show_hide_hidden: str = field( metadata={"help": "Hidden input field for the _show_sql parameter"} ) - metadata: dict = field(metadata={"help": "Metadata about the query/database"}) database_color: Callable = field( metadata={"help": "Function that returns a color for a given database name"} ) @@ -242,6 +241,12 @@ class QueryContext: alternate_url_json: str = field( metadata={"help": "URL for alternate JSON version of this page"} ) + # TODO: refactor this to somewhere else, probably ds.render_template() + select_templates: list = field( + metadata={ + "help": "List of templates that were considered for rendering this page" + } + ) async def get_tables(datasette, request, db): @@ -320,287 +325,105 @@ async def database_download(request, datasette): ) -async def query_view( - request, - datasette, - # canned_query=None, - # _size=None, - # named_parameters=None, - # write=False, -): - db = await datasette.resolve_database(request) - database = db.name - # Flattened because of ?sql=&name1=value1&name2=value2 feature - params = {key: request.args.get(key) for key in request.args} - sql = None - if "sql" in params: - sql = params.pop("sql") - if "_shape" in params: - params.pop("_shape") +class QueryView(View): + async def post(self, request, datasette): + from datasette.app import TableNotFound - # extras come from original request.args to avoid being flattened - extras = request.args.getlist("_extra") + db = await datasette.resolve_database(request) - # TODO: Behave differently for canned query here: - await datasette.ensure_permissions(request.actor, [("execute-sql", database)]) - - _, private = await datasette.check_visibility( - request.actor, - permissions=[ - ("view-database", database), - "view-instance", - ], - ) - - extra_args = {} - if params.get("_timelimit"): - extra_args["custom_time_limit"] = int(params["_timelimit"]) - - format_ = request.url_vars.get("format") or "html" - query_error = None - try: - validate_sql_select(sql) - results = await datasette.execute( - database, sql, params, truncate=True, **extra_args - ) - columns = results.columns - rows = results.rows - except QueryInterrupted as ex: - raise DatasetteError( - textwrap.dedent( - """ -

    SQL query took too long. The time limit is controlled by the - sql_time_limit_ms - configuration option.

    - - - """.format( - markupsafe.escape(ex.sql) - ) - ).strip(), - title="SQL Interrupted", - status=400, - message_is_html=True, - ) - except sqlite3.DatabaseError as ex: - query_error = str(ex) - results = None - rows = [] - columns = [] - except (sqlite3.OperationalError, InvalidSql) as ex: - raise DatasetteError(str(ex), title="Invalid SQL", status=400) - except sqlite3.OperationalError as ex: - raise DatasetteError(str(ex)) - except DatasetteError: - raise - - # Handle formats from plugins - if format_ == "csv": - - async def fetch_data_for_csv(request, _next=None): - results = await db.execute(sql, params, truncate=True) - data = {"rows": results.rows, "columns": results.columns} - return data, None, None - - return await stream_csv(datasette, fetch_data_for_csv, request, db.name) - elif format_ in datasette.renderers.keys(): - # Dispatch request to the correct output format renderer - # (CSV is not handled here due to streaming) - result = call_with_supported_arguments( - datasette.renderers[format_][0], - datasette=datasette, - columns=columns, - rows=rows, - sql=sql, - query_name=None, - database=database, - table=None, - request=request, - view_name="table", - truncated=results.truncated if results else False, - error=query_error, - # These will be deprecated in Datasette 1.0: - args=request.args, - data={"rows": rows, "columns": columns}, - ) - if asyncio.iscoroutine(result): - result = await result - if result is None: - raise NotFound("No data") - if isinstance(result, dict): - r = Response( - body=result.get("body"), - status=result.get("status_code") or 200, - content_type=result.get("content_type", "text/plain"), - headers=result.get("headers"), + # We must be a canned query + table_found = False + try: + await datasette.resolve_table(request) + table_found = True + except TableNotFound as table_not_found: + canned_query = await datasette.get_canned_query( + table_not_found.database_name, table_not_found.table, request.actor ) - elif isinstance(result, Response): - r = result - # if status_code is not None: - # # Over-ride the status code - # r.status = status_code - else: - assert False, f"{result} should be dict or Response" - elif format_ == "html": - headers = {} - templates = [f"query-{to_css_class(database)}.html", "query.html"] - template = datasette.jinja_env.select_template(templates) - alternate_url_json = datasette.absolute_url( - request, - datasette.urls.path(path_with_format(request=request, format="json")), - ) - data = {} - headers.update( - { - "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( - alternate_url_json - ) - } - ) - metadata = (datasette.metadata("databases") or {}).get(database, {}) - datasette.update_with_inherited_metadata(metadata) + if canned_query is None: + raise + if table_found: + # That should not have happened + raise DatasetteError("Unexpected table found on POST", status=404) - renderers = {} - for key, (_, can_render) in datasette.renderers.items(): - it_can_render = call_with_supported_arguments( - can_render, - datasette=datasette, - columns=data.get("columns") or [], - rows=data.get("rows") or [], - sql=data.get("query", {}).get("sql", None), - query_name=data.get("query_name"), - database=database, - table=data.get("table"), - request=request, - view_name="database", + # If database is immutable, return an error + if not db.is_mutable: + raise Forbidden("Database is immutable") + + # Process the POST + body = await request.post_body() + body = body.decode("utf-8").strip() + if body.startswith("{") and body.endswith("}"): + params = json.loads(body) + # But we want key=value strings + for key, value in params.items(): + params[key] = str(value) + else: + params = dict(parse_qsl(body, keep_blank_values=True)) + # Should we return JSON? + should_return_json = ( + request.headers.get("accept") == "application/json" + or request.args.get("_json") + or params.get("_json") + ) + params_for_query = MagicParameters(params, request, datasette) + ok = None + redirect_url = None + try: + cursor = await db.execute_write(canned_query["sql"], params_for_query) + message = canned_query.get( + "on_success_message" + ) or "Query executed, {} row{} affected".format( + cursor.rowcount, "" if cursor.rowcount == 1 else "s" + ) + message_type = datasette.INFO + redirect_url = canned_query.get("on_success_redirect") + ok = True + except Exception as ex: + message = canned_query.get("on_error_message") or str(ex) + message_type = datasette.ERROR + redirect_url = canned_query.get("on_error_redirect") + ok = False + if should_return_json: + return Response.json( + { + "ok": ok, + "message": message, + "redirect": redirect_url, + } ) - it_can_render = await await_me_maybe(it_can_render) - if it_can_render: - renderers[key] = datasette.urls.path( - path_with_format(request=request, format=key) - ) - - allow_execute_sql = await datasette.permission_allowed( - request.actor, "execute-sql", database - ) - - show_hide_hidden = "" - if metadata.get("hide_sql"): - if bool(params.get("_show_sql")): - show_hide_link = path_with_removed_args(request, {"_show_sql"}) - show_hide_text = "hide" - show_hide_hidden = '' - else: - show_hide_link = path_with_added_args(request, {"_show_sql": 1}) - show_hide_text = "show" else: - if bool(params.get("_hide_sql")): - show_hide_link = path_with_removed_args(request, {"_hide_sql"}) - show_hide_text = "show" - show_hide_hidden = '' - else: - show_hide_link = path_with_added_args(request, {"_hide_sql": 1}) - show_hide_text = "hide" - hide_sql = show_hide_text == "show" + datasette.add_message(request, message, message_type) + return Response.redirect(redirect_url or request.path) - # Extract any :named parameters - named_parameters = await derive_named_parameters( - datasette.get_database(database), sql - ) - named_parameter_values = { - named_parameter: params.get(named_parameter) or "" - for named_parameter in named_parameters - if not named_parameter.startswith("_") - } + async def get(self, request, datasette): + from datasette.app import TableNotFound - # Set to blank string if missing from params - for named_parameter in named_parameters: - if named_parameter not in params and not named_parameter.startswith("_"): - params[named_parameter] = "" - - r = Response.html( - await datasette.render_template( - template, - QueryContext( - database=database, - query={ - "sql": sql, - "params": params, - }, - canned_query=None, - private=private, - canned_write=False, - db_is_immutable=not db.is_mutable, - error=query_error, - hide_sql=hide_sql, - show_hide_link=datasette.urls.path(show_hide_link), - show_hide_text=show_hide_text, - editable=True, # TODO - allow_execute_sql=allow_execute_sql, - tables=await get_tables(datasette, request, db), - named_parameter_values=named_parameter_values, - edit_sql_url="todo", - display_rows=await display_rows( - datasette, database, request, rows, columns - ), - table_columns=await _table_columns(datasette, database) - if allow_execute_sql - else {}, - columns=columns, - renderers=renderers, - url_csv=datasette.urls.path( - path_with_format( - request=request, format="csv", extra_qs={"_size": "max"} - ) - ), - show_hide_hidden=markupsafe.Markup(show_hide_hidden), - metadata=metadata, - database_color=lambda _: "#ff0000", - alternate_url_json=alternate_url_json, - ), - request=request, - view_name="database", - ), - headers=headers, - ) - else: - assert False, "Invalid format: {}".format(format_) - if datasette.cors: - add_cors_headers(r.headers) - return r - - -class QueryView(DataView): - async def data( - self, - request, - sql, - editable=True, - canned_query=None, - metadata=None, - _size=None, - named_parameters=None, - write=False, - default_labels=None, - ): - db = await self.ds.resolve_database(request) + db = await datasette.resolve_database(request) database = db.name - params = {key: request.args.get(key) for key in request.args} - if "sql" in params: - params.pop("sql") - if "_shape" in params: - params.pop("_shape") + + # Are we a canned query? + canned_query = None + canned_query_write = False + if "table" in request.url_vars: + try: + await datasette.resolve_table(request) + except TableNotFound as table_not_found: + # Was this actually a canned query? + canned_query = await datasette.get_canned_query( + table_not_found.database_name, table_not_found.table, request.actor + ) + if canned_query is None: + raise + canned_query_write = bool(canned_query.get("write")) private = False if canned_query: # Respect canned query permissions - visible, private = await self.ds.check_visibility( + visible, private = await datasette.check_visibility( request.actor, permissions=[ - ("view-query", (database, canned_query)), + ("view-query", (database, canned_query["name"])), ("view-database", database), "view-instance", ], @@ -609,18 +432,32 @@ class QueryView(DataView): raise Forbidden("You do not have permission to view this query") else: - await self.ds.ensure_permissions(request.actor, [("execute-sql", database)]) + await datasette.ensure_permissions( + request.actor, [("execute-sql", database)] + ) + + # Flattened because of ?sql=&name1=value1&name2=value2 feature + params = {key: request.args.get(key) for key in request.args} + sql = None + + if canned_query: + sql = canned_query["sql"] + elif "sql" in params: + sql = params.pop("sql") # Extract any :named parameters - named_parameters = named_parameters or await derive_named_parameters( - self.ds.get_database(database), sql - ) + named_parameters = [] + if canned_query and canned_query.get("params"): + named_parameters = canned_query["params"] + if not named_parameters: + named_parameters = await derive_named_parameters( + datasette.get_database(database), sql + ) named_parameter_values = { named_parameter: params.get(named_parameter) or "" for named_parameter in named_parameters if not named_parameter.startswith("_") } - # Set to blank string if missing from params for named_parameter in named_parameters: if named_parameter not in params and not named_parameter.startswith("_"): @@ -629,212 +466,159 @@ class QueryView(DataView): extra_args = {} if params.get("_timelimit"): extra_args["custom_time_limit"] = int(params["_timelimit"]) - if _size: - extra_args["page_size"] = _size - templates = [f"query-{to_css_class(database)}.html", "query.html"] - if canned_query: - templates.insert( - 0, - f"query-{to_css_class(database)}-{to_css_class(canned_query)}.html", - ) + format_ = request.url_vars.get("format") or "html" query_error = None + results = None + rows = [] + columns = [] - # Execute query - as write or as read - if write: - if request.method == "POST": - # If database is immutable, return an error - if not db.is_mutable: - raise Forbidden("Database is immutable") - body = await request.post_body() - body = body.decode("utf-8").strip() - if body.startswith("{") and body.endswith("}"): - params = json.loads(body) - # But we want key=value strings - for key, value in params.items(): - params[key] = str(value) - else: - params = dict(parse_qsl(body, keep_blank_values=True)) - # Should we return JSON? - should_return_json = ( - request.headers.get("accept") == "application/json" - or request.args.get("_json") - or params.get("_json") - ) - if canned_query: - params_for_query = MagicParameters(params, request, self.ds) - else: - params_for_query = params - ok = None - try: - cursor = await self.ds.databases[database].execute_write( - sql, params_for_query - ) - message = metadata.get( - "on_success_message" - ) or "Query executed, {} row{} affected".format( - cursor.rowcount, "" if cursor.rowcount == 1 else "s" - ) - message_type = self.ds.INFO - redirect_url = metadata.get("on_success_redirect") - ok = True - except Exception as e: - message = metadata.get("on_error_message") or str(e) - message_type = self.ds.ERROR - redirect_url = metadata.get("on_error_redirect") - ok = False - if should_return_json: - return Response.json( - { - "ok": ok, - "message": message, - "redirect": redirect_url, - } - ) - else: - self.ds.add_message(request, message, message_type) - return self.redirect(request, redirect_url or request.path) - else: + params_for_query = params - async def extra_template(): - return { - "request": request, - "db_is_immutable": not db.is_mutable, - "path_with_added_args": path_with_added_args, - "path_with_removed_args": path_with_removed_args, - "named_parameter_values": named_parameter_values, - "canned_query": canned_query, - "success_message": request.args.get("_success") or "", - "canned_write": True, - } - - return ( - { - "database": database, - "rows": [], - "truncated": False, - "columns": [], - "query": {"sql": sql, "params": params}, - "private": private, - }, - extra_template, - templates, - ) - else: # Not a write - if canned_query: - params_for_query = MagicParameters(params, request, self.ds) - else: - params_for_query = params + if not canned_query_write: try: - results = await self.ds.execute( + if not canned_query: + # For regular queries we only allow SELECT, plus other rules + validate_sql_select(sql) + else: + # Canned queries can run magic parameters + params_for_query = MagicParameters(params, request, datasette) + results = await datasette.execute( database, sql, params_for_query, truncate=True, **extra_args ) - columns = [r[0] for r in results.description] - except sqlite3.DatabaseError as e: - query_error = e + columns = results.columns + rows = results.rows + except QueryInterrupted as ex: + raise DatasetteError( + textwrap.dedent( + """ +

    SQL query took too long. The time limit is controlled by the + sql_time_limit_ms + configuration option.

    + + + """.format( + markupsafe.escape(ex.sql) + ) + ).strip(), + title="SQL Interrupted", + status=400, + message_is_html=True, + ) + except sqlite3.DatabaseError as ex: + query_error = str(ex) results = None + rows = [] columns = [] + except (sqlite3.OperationalError, InvalidSql) as ex: + raise DatasetteError(str(ex), title="Invalid SQL", status=400) + except sqlite3.OperationalError as ex: + raise DatasetteError(str(ex)) + except DatasetteError: + raise - allow_execute_sql = await self.ds.permission_allowed( - request.actor, "execute-sql", database - ) + # Handle formats from plugins + if format_ == "csv": - async def extra_template(): - display_rows = [] - truncate_cells = self.ds.setting("truncate_cells_html") - for row in results.rows if results else []: - display_row = [] - for column, value in zip(results.columns, row): - display_value = value - # Let the plugins have a go - # pylint: disable=no-member - plugin_display_value = None - for candidate in pm.hook.render_cell( - row=row, - value=value, - column=column, - table=None, - database=database, - datasette=self.ds, - request=request, - ): - candidate = await await_me_maybe(candidate) - if candidate is not None: - plugin_display_value = candidate - break - if plugin_display_value is not None: - display_value = plugin_display_value - else: - if value in ("", None): - display_value = markupsafe.Markup(" ") - elif is_url(str(display_value).strip()): - display_value = markupsafe.Markup( - '{truncated_url}'.format( - url=markupsafe.escape(value.strip()), - truncated_url=markupsafe.escape( - truncate_url(value.strip(), truncate_cells) - ), - ) - ) - elif isinstance(display_value, bytes): - blob_url = path_with_format( - request=request, - format="blob", - extra_qs={ - "_blob_column": column, - "_blob_hash": hashlib.sha256( - display_value - ).hexdigest(), - }, - ) - formatted = format_bytes(len(value)) - display_value = markupsafe.Markup( - '<Binary: {:,} byte{}>'.format( - blob_url, - ' title="{}"'.format(formatted) - if "bytes" not in formatted - else "", - len(value), - "" if len(value) == 1 else "s", - ) - ) - else: - display_value = str(value) - if truncate_cells and len(display_value) > truncate_cells: - display_value = ( - display_value[:truncate_cells] + "\u2026" - ) - display_row.append(display_value) - display_rows.append(display_row) + async def fetch_data_for_csv(request, _next=None): + results = await db.execute(sql, params, truncate=True) + data = {"rows": results.rows, "columns": results.columns} + return data, None, None - # Show 'Edit SQL' button only if: - # - User is allowed to execute SQL - # - SQL is an approved SELECT statement - # - No magic parameters, so no :_ in the SQL string - edit_sql_url = None - is_validated_sql = False - try: - validate_sql_select(sql) - is_validated_sql = True - except InvalidSql: - pass - if allow_execute_sql and is_validated_sql and ":_" not in sql: - edit_sql_url = ( - self.ds.urls.database(database) - + "?" - + urlencode( - { - **{ - "sql": sql, - }, - **named_parameter_values, - } - ) + return await stream_csv(datasette, fetch_data_for_csv, request, db.name) + elif format_ in datasette.renderers.keys(): + # Dispatch request to the correct output format renderer + # (CSV is not handled here due to streaming) + result = call_with_supported_arguments( + datasette.renderers[format_][0], + datasette=datasette, + columns=columns, + rows=rows, + sql=sql, + query_name=canned_query["name"] if canned_query else None, + database=database, + table=None, + request=request, + view_name="table", + truncated=results.truncated if results else False, + error=query_error, + # These will be deprecated in Datasette 1.0: + args=request.args, + data={"rows": rows, "columns": columns}, + ) + if asyncio.iscoroutine(result): + result = await result + if result is None: + raise NotFound("No data") + if isinstance(result, dict): + r = Response( + body=result.get("body"), + status=result.get("status_code") or 200, + content_type=result.get("content_type", "text/plain"), + headers=result.get("headers"), + ) + elif isinstance(result, Response): + r = result + # if status_code is not None: + # # Over-ride the status code + # r.status = status_code + else: + assert False, f"{result} should be dict or Response" + elif format_ == "html": + headers = {} + templates = [f"query-{to_css_class(database)}.html", "query.html"] + if canned_query: + templates.insert( + 0, + f"query-{to_css_class(database)}-{to_css_class(canned_query['name'])}.html", ) + template = datasette.jinja_env.select_template(templates) + alternate_url_json = datasette.absolute_url( + request, + datasette.urls.path(path_with_format(request=request, format="json")), + ) + data = {} + headers.update( + { + "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( + alternate_url_json + ) + } + ) + metadata = (datasette.metadata("databases") or {}).get(database, {}) + datasette.update_with_inherited_metadata(metadata) + + renderers = {} + for key, (_, can_render) in datasette.renderers.items(): + it_can_render = call_with_supported_arguments( + can_render, + datasette=datasette, + columns=data.get("columns") or [], + rows=data.get("rows") or [], + sql=data.get("query", {}).get("sql", None), + query_name=data.get("query_name"), + database=database, + table=data.get("table"), + request=request, + view_name="database", + ) + it_can_render = await await_me_maybe(it_can_render) + if it_can_render: + renderers[key] = datasette.urls.path( + path_with_format(request=request, format=key) + ) + + allow_execute_sql = await datasette.permission_allowed( + request.actor, "execute-sql", database + ) + show_hide_hidden = "" - if metadata.get("hide_sql"): + if canned_query and canned_query.get("hide_sql"): if bool(params.get("_show_sql")): show_hide_link = path_with_removed_args(request, {"_show_sql"}) show_hide_text = "hide" @@ -855,42 +639,86 @@ class QueryView(DataView): show_hide_link = path_with_added_args(request, {"_hide_sql": 1}) show_hide_text = "hide" hide_sql = show_hide_text == "show" - return { - "display_rows": display_rows, - "custom_sql": True, - "named_parameter_values": named_parameter_values, - "editable": editable, - "canned_query": canned_query, - "edit_sql_url": edit_sql_url, - "metadata": metadata, - "settings": self.ds.settings_dict(), - "request": request, - "show_hide_link": self.ds.urls.path(show_hide_link), - "show_hide_text": show_hide_text, - "show_hide_hidden": markupsafe.Markup(show_hide_hidden), - "hide_sql": hide_sql, - "table_columns": await _table_columns(self.ds, database) - if allow_execute_sql - else {}, - } - return ( - { - "ok": not query_error, - "database": database, - "query_name": canned_query, - "rows": results.rows if results else [], - "truncated": results.truncated if results else False, - "columns": columns, - "query": {"sql": sql, "params": params}, - "error": str(query_error) if query_error else None, - "private": private, - "allow_execute_sql": allow_execute_sql, - }, - extra_template, - templates, - 400 if query_error else 200, - ) + # Show 'Edit SQL' button only if: + # - User is allowed to execute SQL + # - SQL is an approved SELECT statement + # - No magic parameters, so no :_ in the SQL string + edit_sql_url = None + is_validated_sql = False + try: + validate_sql_select(sql) + is_validated_sql = True + except InvalidSql: + pass + if allow_execute_sql and is_validated_sql and ":_" not in sql: + edit_sql_url = ( + datasette.urls.database(database) + + "?" + + urlencode( + { + **{ + "sql": sql, + }, + **named_parameter_values, + } + ) + ) + + r = Response.html( + await datasette.render_template( + template, + QueryContext( + database=database, + query={ + "sql": sql, + "params": params, + }, + canned_query=canned_query["name"] if canned_query else None, + private=private, + canned_query_write=canned_query_write, + db_is_immutable=not db.is_mutable, + error=query_error, + hide_sql=hide_sql, + show_hide_link=datasette.urls.path(show_hide_link), + show_hide_text=show_hide_text, + editable=not canned_query, + allow_execute_sql=allow_execute_sql, + tables=await get_tables(datasette, request, db), + named_parameter_values=named_parameter_values, + edit_sql_url=edit_sql_url, + display_rows=await display_rows( + datasette, database, request, rows, columns + ), + table_columns=await _table_columns(datasette, database) + if allow_execute_sql + else {}, + columns=columns, + renderers=renderers, + url_csv=datasette.urls.path( + path_with_format( + request=request, format="csv", extra_qs={"_size": "max"} + ) + ), + show_hide_hidden=markupsafe.Markup(show_hide_hidden), + metadata=canned_query or metadata, + database_color=lambda _: "#ff0000", + alternate_url_json=alternate_url_json, + select_templates=[ + f"{'*' if template_name == template.name else ''}{template_name}" + for template_name in templates + ], + ), + request=request, + view_name="database", + ), + headers=headers, + ) + else: + assert False, "Invalid format: {}".format(format_) + if datasette.cors: + add_cors_headers(r.headers) + return r class MagicParameters(dict): diff --git a/datasette/views/table.py b/datasette/views/table.py index 77acfd95..28264e92 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -9,7 +9,6 @@ import markupsafe from datasette.plugins import pm from datasette.database import QueryInterrupted from datasette import tracer -from datasette.renderer import json_renderer from datasette.utils import ( add_cors_headers, await_me_maybe, @@ -21,7 +20,6 @@ from datasette.utils import ( tilde_encode, escape_sqlite, filters_should_redirect, - format_bytes, is_url, path_from_row_pks, path_with_added_args, @@ -38,7 +36,7 @@ from datasette.utils import ( from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response from datasette.filters import Filters import sqlite_utils -from .base import BaseView, DataView, DatasetteError, ureg, _error, stream_csv +from .base import BaseView, DatasetteError, ureg, _error, stream_csv from .database import QueryView LINK_WITH_LABEL = ( @@ -698,57 +696,6 @@ async def table_view(datasette, request): return response -class CannedQueryView(DataView): - def __init__(self, datasette): - self.ds = datasette - - async def post(self, request): - from datasette.app import TableNotFound - - try: - await self.ds.resolve_table(request) - except TableNotFound as e: - # Was this actually a canned query? - canned_query = await self.ds.get_canned_query( - e.database_name, e.table, request.actor - ) - if canned_query: - # Handle POST to a canned query - return await QueryView(self.ds).data( - request, - canned_query["sql"], - metadata=canned_query, - editable=False, - canned_query=e.table, - named_parameters=canned_query.get("params"), - write=bool(canned_query.get("write")), - ) - - return Response.text("Method not allowed", status=405) - - async def data(self, request, **kwargs): - from datasette.app import TableNotFound - - try: - await self.ds.resolve_table(request) - except TableNotFound as not_found: - canned_query = await self.ds.get_canned_query( - not_found.database_name, not_found.table, request.actor - ) - if canned_query: - return await QueryView(self.ds).data( - request, - canned_query["sql"], - metadata=canned_query, - editable=False, - canned_query=not_found.table, - named_parameters=canned_query.get("params"), - write=bool(canned_query.get("write")), - ) - else: - raise - - async def table_view_traced(datasette, request): from datasette.app import TableNotFound @@ -761,10 +708,7 @@ async def table_view_traced(datasette, request): ) # If this is a canned query, not a table, then dispatch to QueryView instead if canned_query: - if request.method == "POST": - return await CannedQueryView(datasette).post(request) - else: - return await CannedQueryView(datasette).get(request) + return await QueryView()(request, datasette) else: raise diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index d6a88733..e9ad3239 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -95,12 +95,12 @@ def test_insert(canned_write_client): csrftoken_from=True, cookies={"foo": "bar"}, ) - assert 302 == response.status - assert "/data/add_name?success" == response.headers["Location"] messages = canned_write_client.ds.unsign( response.cookies["ds_messages"], "messages" ) - assert [["Query executed, 1 row affected", 1]] == messages + assert messages == [["Query executed, 1 row affected", 1]] + assert response.status == 302 + assert response.headers["Location"] == "/data/add_name?success" @pytest.mark.parametrize( @@ -382,11 +382,11 @@ def test_magic_parameters_cannot_be_used_in_arbitrary_queries(magic_parameters_c def test_canned_write_custom_template(canned_write_client): response = canned_write_client.get("/data/update_name") assert response.status == 200 + assert "!!!CUSTOM_UPDATE_NAME_TEMPLATE!!!" in response.text assert ( "" in response.text ) - assert "!!!CUSTOM_UPDATE_NAME_TEMPLATE!!!" in response.text # And test for link rel=alternate while we're here: assert ( '' From 8920d425f4d417cfd998b61016c5ff3530cd34e1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 10:20:58 -0700 Subject: [PATCH 1221/1705] 1.0a3 release notes, smaller changes section - refs #2135 --- docs/changelog.rst | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index ee48d075..b4416f94 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,25 @@ Changelog ========= +.. _v1_0_a3: + +1.0a3 (2023-08-09) +------------------ + +This alpha release previews the updated design for Datasette's default JSON API. + +Smaller changes +~~~~~~~~~~~~~~~ + +- Datasette documentation now shows YAML examples for :ref:`metadata` by default, with a tab interface for switching to JSON. (:issue:`1153`) +- :ref:`plugin_register_output_renderer` plugins now have access to ``error`` and ``truncated`` arguments, allowing them to display error messages and take into account truncated results. (:issue:`2130`) +- ``render_cell()`` plugin hook now also supports an optional ``request`` argument. (:issue:`2007`) +- New ``Justfile`` to support development workflows for Datasette using `Just `__. +- ``datasette.render_template()`` can now accepts a ``datasette.views.Context`` subclass as an alternative to a dictionary. (:issue:`2127`) +- ``datasette install -e path`` option for editable installations, useful while developing plugins. (:issue:`2106`) +- When started with the ``--cors`` option Datasette now serves an ``Access-Control-Max-Age: 3600`` header, ensuring CORS OPTIONS requests are repeated no more than once an hour. (:issue:`2079`) +- Fixed a bug where the ``_internal`` database could display ``None`` instead of ``null`` for in-memory databases. (:issue:`1970`) + .. _v0_64_2: 0.64.2 (2023-03-08) From e34d09c6ec16ff5e7717e112afdad67f7c05a62a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 12:01:59 -0700 Subject: [PATCH 1222/1705] Don't include columns in query JSON, refs #2136 --- datasette/renderer.py | 8 +++++++- datasette/views/database.py | 2 +- tests/test_api.py | 1 - tests/test_cli_serve_get.py | 11 ++++++----- 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/datasette/renderer.py b/datasette/renderer.py index 0bd74e81..224031a7 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -27,7 +27,7 @@ def convert_specific_columns_to_json(rows, columns, json_cols): return new_rows -def json_renderer(args, data, error, truncated=None): +def json_renderer(request, args, data, error, truncated=None): """Render a response as JSON""" status_code = 200 @@ -106,6 +106,12 @@ def json_renderer(args, data, error, truncated=None): "status": 400, "title": None, } + + # Don't include "columns" in output + # https://github.com/simonw/datasette/issues/2136 + if isinstance(data, dict) and "columns" not in request.args.getlist("_extra"): + data.pop("columns", None) + # Handle _nl option for _shape=array nl = args.get("_nl", "") if nl and shape == "array": diff --git a/datasette/views/database.py b/datasette/views/database.py index 658c35e6..cf76f3c2 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -548,7 +548,7 @@ class QueryView(View): error=query_error, # These will be deprecated in Datasette 1.0: args=request.args, - data={"rows": rows, "columns": columns}, + data={"ok": True, "rows": rows, "columns": columns}, ) if asyncio.iscoroutine(result): result = await result diff --git a/tests/test_api.py b/tests/test_api.py index 28415a0b..f96f571e 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -649,7 +649,6 @@ async def test_custom_sql(ds_client): {"content": "RENDER_CELL_DEMO"}, {"content": "RENDER_CELL_ASYNC"}, ], - "columns": ["content"], "ok": True, "truncated": False, } diff --git a/tests/test_cli_serve_get.py b/tests/test_cli_serve_get.py index 2e0390bb..dc7fc1e2 100644 --- a/tests/test_cli_serve_get.py +++ b/tests/test_cli_serve_get.py @@ -34,11 +34,12 @@ def test_serve_with_get(tmp_path_factory): "/_memory.json?sql=select+sqlite_version()", ], ) - assert 0 == result.exit_code, result.output - assert { - "truncated": False, - "columns": ["sqlite_version()"], - }.items() <= json.loads(result.output).items() + assert result.exit_code == 0, result.output + data = json.loads(result.output) + # Should have a single row with a single column + assert len(data["rows"]) == 1 + assert list(data["rows"][0].keys()) == ["sqlite_version()"] + assert set(data.keys()) == {"rows", "ok", "truncated"} # The plugin should have created hello.txt assert (plugins_dir / "hello.txt").read_text() == "hello" From 856ca68d94708c6e94673cb6bc28bf3e3ca17845 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 12:04:40 -0700 Subject: [PATCH 1223/1705] Update default JSON representation docs, refs #2135 --- docs/json_api.rst | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/docs/json_api.rst b/docs/json_api.rst index c273c2a8..16b997eb 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -9,10 +9,10 @@ through the Datasette user interface can also be accessed as JSON via the API. To access the API for a page, either click on the ``.json`` link on that page or edit the URL and add a ``.json`` extension to it. -.. _json_api_shapes: +.. _json_api_default: -Different shapes ----------------- +Default representation +---------------------- The default JSON representation of data from a SQLite table or custom query looks like this: @@ -21,7 +21,6 @@ looks like this: { "ok": true, - "next": null, "rows": [ { "id": 3, @@ -39,13 +38,22 @@ looks like this: "id": 1, "name": "San Francisco" } - ] + ], + "truncated": false } -The ``rows`` key is a list of objects, each one representing a row. ``next`` indicates if -there is another page, and ``ok`` is always ``true`` if an error did not occur. +``"ok"`` is always ``true`` if an error did not occur. -If ``next`` is present then the next page in the pagination set can be retrieved using ``?_next=VALUE``. +The ``"rows"`` key is a list of objects, each one representing a row. + +The ``"truncated"`` key lets you know if the query was truncated. This can happen if a SQL query returns more than 1,000 results (or the :ref:`setting_max_returned_rows` setting). + +For table pages, an additional key ``"next"`` may be present. This indicates that the next page in the pagination set can be retrieved using ``?_next=VALUE``. + +.. _json_api_shapes: + +Different shapes +---------------- The ``_shape`` parameter can be used to access alternative formats for the ``rows`` key which may be more convenient for your application. There are three From 90cb9ca58d910f49e8f117bbdd94df6f0855cf99 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 12:11:16 -0700 Subject: [PATCH 1224/1705] JSON changes in release notes, refs #2135 --- docs/changelog.rst | 35 ++++++++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b4416f94..4c70855b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,7 +9,40 @@ Changelog 1.0a3 (2023-08-09) ------------------ -This alpha release previews the updated design for Datasette's default JSON API. +This alpha release previews the updated design for Datasette's default JSON API. (:issue:`782`) + +The new :ref:`default JSON representation ` for both table pages (``/dbname/table.json``) and arbitrary SQL queries (``/dbname.json?sql=...``) is now shaped like this: + +.. code-block:: json + + { + "ok": true, + "rows": [ + { + "id": 3, + "name": "Detroit" + }, + { + "id": 2, + "name": "Los Angeles" + }, + { + "id": 4, + "name": "Memnonia" + }, + { + "id": 1, + "name": "San Francisco" + } + ], + "truncated": false + } + +Tables will include an additional ``"next"`` key for pagination, which can be passed to ``?_next=`` to fetch the next page of results. + +The various ``?_shape=`` options continue to work as before - see :ref:`json_api_shapes` for details. + +A new ``?_extra=`` mechanism is available for tables, but has not yet been stabilized or documented. Details on that are available in :issue:`262`. Smaller changes ~~~~~~~~~~~~~~~ From 19ab4552e212c9845a59461cc73e82d5ae8c278a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 12:13:11 -0700 Subject: [PATCH 1225/1705] Release 1.0a3 Closes #2135 Refs #262, #782, #1153, #1970, #2007, #2079, #2106, #2127, #2130 --- datasette/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 3b81ab21..61dee464 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "1.0a2" +__version__ = "1.0a3" __version_info__ = tuple(__version__.split(".")) From 4a42476bb7ce4c5ed941f944115dedd9bce34656 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 15:04:16 -0700 Subject: [PATCH 1226/1705] datasette plugins --requirements, closes #2133 --- datasette/cli.py | 12 ++++++++++-- docs/cli-reference.rst | 1 + docs/plugins.rst | 32 ++++++++++++++++++++++++++++---- tests/test_cli.py | 3 +++ 4 files changed, 42 insertions(+), 6 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 32266888..21fd25d6 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -223,15 +223,23 @@ pm.hook.publish_subcommand(publish=publish) @cli.command() @click.option("--all", help="Include built-in default plugins", is_flag=True) +@click.option( + "--requirements", help="Output requirements.txt of installed plugins", is_flag=True +) @click.option( "--plugins-dir", type=click.Path(exists=True, file_okay=False, dir_okay=True), help="Path to directory containing custom plugins", ) -def plugins(all, plugins_dir): +def plugins(all, requirements, plugins_dir): """List currently installed plugins""" app = Datasette([], plugins_dir=plugins_dir) - click.echo(json.dumps(app._plugins(all=all), indent=4)) + if requirements: + for plugin in app._plugins(): + if plugin["version"]: + click.echo("{}=={}".format(plugin["name"], plugin["version"])) + else: + click.echo(json.dumps(app._plugins(all=all), indent=4)) @cli.command() diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 2177fc9e..7a96d311 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -282,6 +282,7 @@ Output JSON showing all currently installed plugins, their versions, whether the Options: --all Include built-in default plugins + --requirements Output requirements.txt of installed plugins --plugins-dir DIRECTORY Path to directory containing custom plugins --help Show this message and exit. diff --git a/docs/plugins.rst b/docs/plugins.rst index 979f94dd..19bfdd0c 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -90,7 +90,12 @@ You can see a list of installed plugins by navigating to the ``/-/plugins`` page You can also use the ``datasette plugins`` command:: - $ datasette plugins + datasette plugins + +Which outputs: + +.. code-block:: json + [ { "name": "datasette_json_html", @@ -107,7 +112,8 @@ You can also use the ``datasette plugins`` command:: cog.out("\n") result = CliRunner().invoke(cli.cli, ["plugins", "--all"]) # cog.out() with text containing newlines was unindenting for some reason - cog.outl("If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette::\n") + cog.outl("If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette:\n") + cog.outl(".. code-block:: json\n") plugins = [p for p in json.loads(result.output) if p["name"].startswith("datasette.")] indented = textwrap.indent(json.dumps(plugins, indent=4), " ") for line in indented.split("\n"): @@ -115,7 +121,9 @@ You can also use the ``datasette plugins`` command:: cog.out("\n\n") .. ]]] -If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette:: +If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette: + +.. code-block:: json [ { @@ -236,6 +244,22 @@ If you run ``datasette plugins --all`` it will include default plugins that ship You can add the ``--plugins-dir=`` option to include any plugins found in that directory. +Add ``--requirements`` to output a list of installed plugins that can then be installed in another Datasette instance using ``datasette install -r requirements.txt``:: + + datasette plugins --requirements + +The output will look something like this:: + + datasette-codespaces==0.1.1 + datasette-graphql==2.2 + datasette-json-html==1.0.1 + datasette-pretty-json==0.2.2 + datasette-x-forwarded-host==0.1 + +To write that to a ``requirements.txt`` file, run this:: + + datasette plugins --requirements > requirements.txt + .. _plugins_configuration: Plugin configuration @@ -390,7 +414,7 @@ Any values embedded in ``metadata.yaml`` will be visible to anyone who views the If you are publishing your data using the :ref:`datasette publish ` family of commands, you can use the ``--plugin-secret`` option to set these secrets at publish time. For example, using Heroku you might run the following command:: - $ datasette publish heroku my_database.db \ + datasette publish heroku my_database.db \ --name my-heroku-app-demo \ --install=datasette-auth-github \ --plugin-secret datasette-auth-github client_id your_client_id \ diff --git a/tests/test_cli.py b/tests/test_cli.py index 75724f61..056e2821 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -108,6 +108,9 @@ def test_plugins_cli(app_client): assert set(names).issuperset({p["name"] for p in EXPECTED_PLUGINS}) # And the following too: assert set(names).issuperset(DEFAULT_PLUGINS) + # --requirements should be empty because there are no installed non-plugins-dir plugins + result3 = runner.invoke(cli, ["plugins", "--requirements"]) + assert result3.output == "" def test_metadata_yaml(): From a3593c901580ea50854c3e0774b0ba0126e8a76f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 17:32:07 -0700 Subject: [PATCH 1227/1705] on_success_message_sql, closes #2138 --- datasette/views/database.py | 29 ++++++++++++++++---- docs/sql_queries.rst | 21 ++++++++++---- tests/test_canned_queries.py | 53 +++++++++++++++++++++++++++++++----- 3 files changed, 85 insertions(+), 18 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index cf76f3c2..79b3f88d 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -360,6 +360,10 @@ class QueryView(View): params[key] = str(value) else: params = dict(parse_qsl(body, keep_blank_values=True)) + + # Don't ever send csrftoken as a SQL parameter + params.pop("csrftoken", None) + # Should we return JSON? should_return_json = ( request.headers.get("accept") == "application/json" @@ -371,12 +375,27 @@ class QueryView(View): redirect_url = None try: cursor = await db.execute_write(canned_query["sql"], params_for_query) - message = canned_query.get( - "on_success_message" - ) or "Query executed, {} row{} affected".format( - cursor.rowcount, "" if cursor.rowcount == 1 else "s" - ) + # success message can come from on_success_message or on_success_message_sql + message = None message_type = datasette.INFO + on_success_message_sql = canned_query.get("on_success_message_sql") + if on_success_message_sql: + try: + message_result = ( + await db.execute(on_success_message_sql, params_for_query) + ).first() + if message_result: + message = message_result[0] + except Exception as ex: + message = "Error running on_success_message_sql: {}".format(ex) + message_type = datasette.ERROR + if not message: + message = canned_query.get( + "on_success_message" + ) or "Query executed, {} row{} affected".format( + cursor.rowcount, "" if cursor.rowcount == 1 else "s" + ) + redirect_url = canned_query.get("on_success_redirect") ok = True except Exception as ex: diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index 3c2cb228..1ae07e1f 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -392,6 +392,7 @@ This configuration will create a page at ``/mydatabase/add_name`` displaying a f You can customize how Datasette represents success and errors using the following optional properties: - ``on_success_message`` - the message shown when a query is successful +- ``on_success_message_sql`` - alternative to ``on_success_message``: a SQL query that should be executed to generate the message - ``on_success_redirect`` - the path or URL the user is redirected to on success - ``on_error_message`` - the message shown when a query throws an error - ``on_error_redirect`` - the path or URL the user is redirected to on error @@ -405,11 +406,12 @@ For example: "queries": { "add_name": { "sql": "INSERT INTO names (name) VALUES (:name)", + "params": ["name"], "write": True, - "on_success_message": "Name inserted", + "on_success_message_sql": "select 'Name inserted: ' || :name", "on_success_redirect": "/mydatabase/names", "on_error_message": "Name insert failed", - "on_error_redirect": "/mydatabase" + "on_error_redirect": "/mydatabase", } } } @@ -426,8 +428,10 @@ For example: queries: add_name: sql: INSERT INTO names (name) VALUES (:name) + params: + - name write: true - on_success_message: Name inserted + on_success_message_sql: 'select ''Name inserted: '' || :name' on_success_redirect: /mydatabase/names on_error_message: Name insert failed on_error_redirect: /mydatabase @@ -443,8 +447,11 @@ For example: "queries": { "add_name": { "sql": "INSERT INTO names (name) VALUES (:name)", + "params": [ + "name" + ], "write": true, - "on_success_message": "Name inserted", + "on_success_message_sql": "select 'Name inserted: ' || :name", "on_success_redirect": "/mydatabase/names", "on_error_message": "Name insert failed", "on_error_redirect": "/mydatabase" @@ -455,10 +462,12 @@ For example: } .. [[[end]]] -You can use ``"params"`` to explicitly list the named parameters that should be displayed as form fields - otherwise they will be automatically detected. +You can use ``"params"`` to explicitly list the named parameters that should be displayed as form fields - otherwise they will be automatically detected. ``"params"`` is not necessary in the above example, since without it ``"name"`` would be automatically detected from the query. You can pre-populate form fields when the page first loads using a query string, e.g. ``/mydatabase/add_name?name=Prepopulated``. The user will have to submit the form to execute the query. +If you specify a query in ``"on_success_message_sql"``, that query will be executed after the main query. The first column of the first row return by that query will be displayed as a success message. Named parameters from the main query will be made available to the success message query as well. + .. _canned_queries_magic_parameters: Magic parameters @@ -589,7 +598,7 @@ The JSON response will look like this: "redirect": "/data/add_name" } -The ``"message"`` and ``"redirect"`` values here will take into account ``on_success_message``, ``on_success_redirect``, ``on_error_message`` and ``on_error_redirect``, if they have been set. +The ``"message"`` and ``"redirect"`` values here will take into account ``on_success_message``, ``on_success_message_sql``, ``on_success_redirect``, ``on_error_message`` and ``on_error_redirect``, if they have been set. .. _pagination: diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index e9ad3239..5256c24c 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -31,9 +31,15 @@ def canned_write_client(tmpdir): }, "add_name_specify_id": { "sql": "insert into names (rowid, name) values (:rowid, :name)", + "on_success_message_sql": "select 'Name added: ' || :name || ' with rowid ' || :rowid", "write": True, "on_error_redirect": "/data/add_name_specify_id?error", }, + "add_name_specify_id_with_error_in_on_success_message_sql": { + "sql": "insert into names (rowid, name) values (:rowid, :name)", + "on_success_message_sql": "select this is bad SQL", + "write": True, + }, "delete_name": { "sql": "delete from names where rowid = :rowid", "write": True, @@ -179,6 +185,34 @@ def test_insert_error(canned_write_client): ) +def test_on_success_message_sql(canned_write_client): + response = canned_write_client.post( + "/data/add_name_specify_id", + {"rowid": 5, "name": "Should be OK"}, + csrftoken_from=True, + ) + assert response.status == 302 + assert response.headers["Location"] == "/data/add_name_specify_id" + messages = canned_write_client.ds.unsign( + response.cookies["ds_messages"], "messages" + ) + assert messages == [["Name added: Should be OK with rowid 5", 1]] + + +def test_error_in_on_success_message_sql(canned_write_client): + response = canned_write_client.post( + "/data/add_name_specify_id_with_error_in_on_success_message_sql", + {"rowid": 1, "name": "Should fail"}, + csrftoken_from=True, + ) + messages = canned_write_client.ds.unsign( + response.cookies["ds_messages"], "messages" + ) + assert messages == [ + ["Error running on_success_message_sql: no such column: bad", 3] + ] + + def test_custom_params(canned_write_client): response = canned_write_client.get("/data/update_name?extra=foo") assert '' in response.text @@ -232,21 +266,22 @@ def test_canned_query_permissions_on_database_page(canned_write_client): query_names = { q["name"] for q in canned_write_client.get("/data.json").json["queries"] } - assert { + assert query_names == { + "add_name_specify_id_with_error_in_on_success_message_sql", + "from_hook", + "update_name", + "add_name_specify_id", + "from_async_hook", "canned_read", "add_name", - "add_name_specify_id", - "update_name", - "from_async_hook", - "from_hook", - } == query_names + } # With auth shows four response = canned_write_client.get( "/data.json", cookies={"ds_actor": canned_write_client.actor_cookie({"id": "root"})}, ) - assert 200 == response.status + assert response.status == 200 query_names_and_private = sorted( [ {"name": q["name"], "private": q["private"]} @@ -257,6 +292,10 @@ def test_canned_query_permissions_on_database_page(canned_write_client): assert query_names_and_private == [ {"name": "add_name", "private": False}, {"name": "add_name_specify_id", "private": False}, + { + "name": "add_name_specify_id_with_error_in_on_success_message_sql", + "private": False, + }, {"name": "canned_read", "private": False}, {"name": "delete_name", "private": True}, {"name": "from_async_hook", "private": False}, From 33251d04e78d575cca62bb59069bb43a7d924746 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 17:56:27 -0700 Subject: [PATCH 1228/1705] Canned query write counters demo, refs #2134 --- .github/workflows/deploy-latest.yml | 30 +++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index ed60376c..4746aa07 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -57,6 +57,36 @@ jobs: db.route = "alternative-route" ' > plugins/alternative_route.py cp fixtures.db fixtures2.db + - name: And the counters writable canned query demo + run: | + cat > plugins/counters.py < Date: Thu, 10 Aug 2023 22:16:19 -0700 Subject: [PATCH 1229/1705] Fixed display of database color Closes #2139, closes #2119 --- datasette/database.py | 7 +++++++ datasette/templates/database.html | 2 +- datasette/templates/query.html | 2 +- datasette/templates/row.html | 2 +- datasette/templates/table.html | 2 +- datasette/views/base.py | 4 ---- datasette/views/database.py | 8 +++----- datasette/views/index.py | 4 +--- datasette/views/row.py | 4 +++- datasette/views/table.py | 2 +- tests/test_html.py | 20 ++++++++++++++++++++ 11 files changed, 39 insertions(+), 18 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index d8043c24..af39ac9e 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -1,6 +1,7 @@ import asyncio from collections import namedtuple from pathlib import Path +import hashlib import janus import queue import sys @@ -62,6 +63,12 @@ class Database: } return self._cached_table_counts + @property + def color(self): + if self.hash: + return self.hash[:6] + return hashlib.md5(self.name.encode("utf8")).hexdigest()[:6] + def suggest_name(self): if self.path: return Path(self.path).stem diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 7acf0369..3d4dae07 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -10,7 +10,7 @@ {% block body_class %}db db-{{ database|to_css_class }}{% endblock %} {% block content %} -