From 93bfa26bfd25a3cc911d637596e364d3474325bd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jul 2019 20:57:28 -0700 Subject: [PATCH 0001/2113] Added asgi_wrapper plugin hook, closes #520 --- datasette/app.py | 5 ++++- datasette/hookspecs.py | 5 +++++ docs/plugins.rst | 41 +++++++++++++++++++++++++++++++++++++++++ tests/fixtures.py | 23 +++++++++++++++++++++++ tests/test_plugins.py | 5 +++++ 5 files changed, 78 insertions(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index 4a8ead1d..16a29e20 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -651,9 +651,12 @@ class Datasette: if not database.is_mutable: await database.table_counts(limit=60 * 60 * 1000) - return AsgiLifespan( + asgi = AsgiLifespan( AsgiTracer(DatasetteRouter(self, routes)), on_startup=setup_db ) + for wrapper in pm.hook.asgi_wrapper(datasette=self): + asgi = wrapper(asgi) + return asgi class DatasetteRouter(AsgiRouter): diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 61523a31..42adaae8 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -5,6 +5,11 @@ hookspec = HookspecMarker("datasette") hookimpl = HookimplMarker("datasette") +@hookspec +def asgi_wrapper(datasette): + "Returns an ASGI middleware callable to wrap our ASGI application with" + + @hookspec def prepare_connection(conn): "Modify SQLite connection in some way e.g. register custom SQL functions" diff --git a/docs/plugins.rst b/docs/plugins.rst index bd32b3a6..be335546 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -666,3 +666,44 @@ The plugin hook can then be used to register the new facet class like this: @hookimpl def register_facet_classes(): return [SpecialFacet] + + +.. _plugin_asgi_wrapper: + +asgi_wrapper(datasette) +~~~~~~~~~~~~~~~~~~~~~~~ + +Return an `ASGI `__ middleware wrapper function that will be applied to the Datasette ASGI application. + +This is a very powerful hook. You can use it to manipulate the entire Datasette response, or even to configure new URL routes that will be handled by your own custom code. + +You can write your ASGI code directly against the low-level specification, or you can use the middleware utilites provided by an ASGI framework such as `Starlette `__. + +This example plugin adds a ``x-databases`` HTTP header listing the currently attached databases: + +.. code-block:: python + + from datasette import hookimpl + from functools import wraps + + + @hookimpl + def asgi_wrapper(datasette): + def wrap_with_databases_header(app): + @wraps(app) + async def add_x_databases_header(scope, recieve, send): + async def wrapped_send(event): + if event["type"] == "http.response.start": + original_headers = event.get("headers") or [] + event = { + "type": event["type"], + "status": event["status"], + "headers": original_headers + [ + [b"x-databases", + ", ".join(datasette.databases.keys()).encode("utf-8")] + ], + } + await send(event) + await app(scope, recieve, wrapped_send) + return add_x_databases_header + return wrap_with_databases_header diff --git a/tests/fixtures.py b/tests/fixtures.py index 0330c8ed..fab6509e 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -372,6 +372,7 @@ def render_cell(value, column, table, database, datasette): PLUGIN2 = """ from datasette import hookimpl +from functools import wraps import jinja2 import json @@ -413,6 +414,28 @@ def render_cell(value, database): label=jinja2.escape(data["label"] or "") or " " ) ) + + +@hookimpl +def asgi_wrapper(datasette): + def wrap_with_databases_header(app): + @wraps(app) + async def add_x_databases_header(scope, recieve, send): + async def wrapped_send(event): + if event["type"] == "http.response.start": + original_headers = event.get("headers") or [] + event = { + "type": event["type"], + "status": event["status"], + "headers": original_headers + [ + [b"x-databases", + ", ".join(datasette.databases.keys()).encode("utf-8")] + ], + } + await send(event) + await app(scope, recieve, wrapped_send) + return add_x_databases_header + return wrap_with_databases_header """ TABLES = ( diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 56033bdd..9bdd491a 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -162,3 +162,8 @@ def test_plugins_extra_body_script(app_client, path, expected_extra_body_script) json_data = r.search(app_client.get(path).body.decode("utf8")).group(1) actual_data = json.loads(json_data) assert expected_extra_body_script == actual_data + + +def test_plugins_asgi_wrapper(app_client): + response = app_client.get("/fixtures") + assert "fixtures" == response.headers["x-databases"] From 4d2fdafe39159c9a8aa83f7e9bfe768bbbbb56a3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jul 2019 20:57:28 -0700 Subject: [PATCH 0002/2113] Added asgi_wrapper plugin hook, closes #520 --- datasette/app.py | 5 ++++- datasette/hookspecs.py | 5 +++++ docs/plugins.rst | 41 +++++++++++++++++++++++++++++++++++++++++ tests/fixtures.py | 23 +++++++++++++++++++++++ tests/test_plugins.py | 5 +++++ 5 files changed, 78 insertions(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index 4a8ead1d..16a29e20 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -651,9 +651,12 @@ class Datasette: if not database.is_mutable: await database.table_counts(limit=60 * 60 * 1000) - return AsgiLifespan( + asgi = AsgiLifespan( AsgiTracer(DatasetteRouter(self, routes)), on_startup=setup_db ) + for wrapper in pm.hook.asgi_wrapper(datasette=self): + asgi = wrapper(asgi) + return asgi class DatasetteRouter(AsgiRouter): diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 61523a31..42adaae8 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -5,6 +5,11 @@ hookspec = HookspecMarker("datasette") hookimpl = HookimplMarker("datasette") +@hookspec +def asgi_wrapper(datasette): + "Returns an ASGI middleware callable to wrap our ASGI application with" + + @hookspec def prepare_connection(conn): "Modify SQLite connection in some way e.g. register custom SQL functions" diff --git a/docs/plugins.rst b/docs/plugins.rst index bd32b3a6..be335546 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -666,3 +666,44 @@ The plugin hook can then be used to register the new facet class like this: @hookimpl def register_facet_classes(): return [SpecialFacet] + + +.. _plugin_asgi_wrapper: + +asgi_wrapper(datasette) +~~~~~~~~~~~~~~~~~~~~~~~ + +Return an `ASGI `__ middleware wrapper function that will be applied to the Datasette ASGI application. + +This is a very powerful hook. You can use it to manipulate the entire Datasette response, or even to configure new URL routes that will be handled by your own custom code. + +You can write your ASGI code directly against the low-level specification, or you can use the middleware utilites provided by an ASGI framework such as `Starlette `__. + +This example plugin adds a ``x-databases`` HTTP header listing the currently attached databases: + +.. code-block:: python + + from datasette import hookimpl + from functools import wraps + + + @hookimpl + def asgi_wrapper(datasette): + def wrap_with_databases_header(app): + @wraps(app) + async def add_x_databases_header(scope, recieve, send): + async def wrapped_send(event): + if event["type"] == "http.response.start": + original_headers = event.get("headers") or [] + event = { + "type": event["type"], + "status": event["status"], + "headers": original_headers + [ + [b"x-databases", + ", ".join(datasette.databases.keys()).encode("utf-8")] + ], + } + await send(event) + await app(scope, recieve, wrapped_send) + return add_x_databases_header + return wrap_with_databases_header diff --git a/tests/fixtures.py b/tests/fixtures.py index 0330c8ed..fab6509e 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -372,6 +372,7 @@ def render_cell(value, column, table, database, datasette): PLUGIN2 = """ from datasette import hookimpl +from functools import wraps import jinja2 import json @@ -413,6 +414,28 @@ def render_cell(value, database): label=jinja2.escape(data["label"] or "") or " " ) ) + + +@hookimpl +def asgi_wrapper(datasette): + def wrap_with_databases_header(app): + @wraps(app) + async def add_x_databases_header(scope, recieve, send): + async def wrapped_send(event): + if event["type"] == "http.response.start": + original_headers = event.get("headers") or [] + event = { + "type": event["type"], + "status": event["status"], + "headers": original_headers + [ + [b"x-databases", + ", ".join(datasette.databases.keys()).encode("utf-8")] + ], + } + await send(event) + await app(scope, recieve, wrapped_send) + return add_x_databases_header + return wrap_with_databases_header """ TABLES = ( diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 56033bdd..9bdd491a 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -162,3 +162,8 @@ def test_plugins_extra_body_script(app_client, path, expected_extra_body_script) json_data = r.search(app_client.get(path).body.decode("utf8")).group(1) actual_data = json.loads(json_data) assert expected_extra_body_script == actual_data + + +def test_plugins_asgi_wrapper(app_client): + response = app_client.get("/fixtures") + assert "fixtures" == response.headers["x-databases"] From f0d32da0a9af87bcb15e34e35424f0c0053be83a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jul 2019 21:32:55 -0700 Subject: [PATCH 0003/2113] Switch to ~= dependencies, closes #532 (#536) * Switch to ~= dependencies, closes #532 * Bump click and click-default-group * imp. is deprecated, use types.ModuleType instead - thanks https://stackoverflow.com/a/32175781 * Upgrade to pytest 5 --- datasette/utils/__init__.py | 4 ++-- setup.py | 26 +++++++++++++------------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 94ccc23e..17a4d595 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -3,7 +3,6 @@ from collections import OrderedDict import base64 import click import hashlib -import imp import json import os import pkg_resources @@ -11,6 +10,7 @@ import re import shlex import tempfile import time +import types import shutil import urllib import numbers @@ -588,7 +588,7 @@ def link_or_copy_directory(src, dst): def module_from_path(path, name): # Adapted from http://sayspy.blogspot.com/2011/07/how-to-import-module-from-just-file.html - mod = imp.new_module(name) + mod = types.ModuleType(name) mod.__file__ = path with open(path, "r") as file: code = compile(file.read(), path, "exec", dont_inherit=True) diff --git a/setup.py b/setup.py index fdbb948e..254859b0 100644 --- a/setup.py +++ b/setup.py @@ -41,14 +41,14 @@ setup( package_data={"datasette": ["templates/*.html"]}, include_package_data=True, install_requires=[ - "click>=6.7", - "click-default-group==1.2", - "Jinja2==2.10.1", - "hupper==1.0", - "pint==0.8.1", - "pluggy>=0.12.0", - "uvicorn>=0.8.1", - "aiofiles==0.4.0", + "click~=7.0", + "click-default-group~=1.2.1", + "Jinja2~=2.10.1", + "hupper~=1.0", + "pint~=0.8.1", + "pluggy~=0.12.0", + "uvicorn~=0.8.1", + "aiofiles~=0.4.0", ], entry_points=""" [console_scripts] @@ -58,11 +58,11 @@ setup( extras_require={ "docs": ["sphinx_rtd_theme", "sphinx-autobuild"], "test": [ - "pytest==4.6.1", - "pytest-asyncio==0.10.0", - "aiohttp==3.5.3", - "beautifulsoup4==4.6.1", - "asgiref==3.1.2", + "pytest~=5.0.0", + "pytest-asyncio~=0.10.0", + "aiohttp~=3.5.3", + "beautifulsoup4~=4.6.1", + "asgiref~=3.1.2", ] + maybe_black, }, From a2d45931935f6bb73605a94afedf9e78308c95d6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 3 Jul 2019 22:36:44 -0700 Subject: [PATCH 0004/2113] Secret plugin configuration options (#539) Closes #538 --- datasette/app.py | 11 ++++++++++- docs/plugins.rst | 33 +++++++++++++++++++++++++++++++++ tests/fixtures.py | 10 +++++++++- tests/test_plugins.py | 15 ++++++++++++++- 4 files changed, 66 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 16a29e20..70bd3c12 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -268,7 +268,16 @@ class Datasette: ) if plugins is None: return None - return plugins.get(plugin_name) + plugin_config = plugins.get(plugin_name) + # Resolve any $file and $env keys + if isinstance(plugin_config, dict): + for key, value in plugin_config.items(): + if isinstance(value, dict): + if list(value.keys()) == ["$env"]: + plugin_config[key] = os.environ.get(list(value.values())[0]) + elif list(value.keys()) == ["$file"]: + plugin_config[key] = open(list(value.values())[0]).read() + return plugin_config def app_css_hash(self): if not hasattr(self, "_app_css_hash"): diff --git a/docs/plugins.rst b/docs/plugins.rst index be335546..609fa844 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -219,6 +219,39 @@ Here is an example of some plugin configuration for a specific table:: This tells the ``datasette-cluster-map`` column which latitude and longitude columns should be used for a table called ``Street_Tree_List`` inside a database file called ``sf-trees.db``. +Secret configuration values +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Any values embedded in ``metadata.json`` will be visible to anyone who views the ``/-/metadata`` page of your Datasette instance. Some plugins may need configuration that should stay secret - API keys for example. There are two ways in which you can store secret configuration values. + +**As environment variables**. If your secret lives in an environment variable that is available to the Datasette process, you can indicate that the configuration value should be read from that environment variable like so:: + + { + "plugins": { + "datasette-auth-github": { + "client_secret": { + "$env": "GITHUB_CLIENT_SECRET" + } + } + } + } + + +**As values in separate files**. Your secrets can also live in files on disk. To specify a secret should be read from a file, provide the full file path like this:: + + { + "plugins": { + "datasette-auth-github": { + "client_secret": { + "$file": "/secrets/client-secret" + } + } + } + } + +Writing plugins that accept configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + When you are writing plugins, you can access plugin configuration like this using the ``datasette.plugin_config()`` method. If you know you need plugin configuration for a specific table, you can access it like this:: plugin_config = datasette.plugin_config( diff --git a/tests/fixtures.py b/tests/fixtures.py index fab6509e..db5f06e2 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -15,6 +15,10 @@ import time from urllib.parse import unquote +# This temp file is used by one of the plugin config tests +TEMP_PLUGIN_SECRET_FILE = os.path.join(tempfile.gettempdir(), "plugin-secret") + + class TestResponse: def __init__(self, status, headers, body): self.status = status @@ -246,7 +250,11 @@ METADATA = { "source_url": "https://github.com/simonw/datasette/blob/master/tests/fixtures.py", "about": "About Datasette", "about_url": "https://github.com/simonw/datasette", - "plugins": {"name-of-plugin": {"depth": "root"}}, + "plugins": { + "name-of-plugin": {"depth": "root"}, + "env-plugin": {"foo": {"$env": "FOO_ENV"}}, + "file-plugin": {"foo": {"$file": TEMP_PLUGIN_SECRET_FILE}}, + }, "databases": { "fixtures": { "description": "Test tables description", diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 9bdd491a..f42eebd7 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -1,7 +1,8 @@ from bs4 import BeautifulSoup as Soup -from .fixtures import app_client # noqa +from .fixtures import app_client, make_app_client, TEMP_PLUGIN_SECRET_FILE # noqa import base64 import json +import os import re import pytest import urllib @@ -125,6 +126,18 @@ def test_plugin_config(app_client): assert None is app_client.ds.plugin_config("unknown-plugin") +def test_plugin_config_env(app_client): + os.environ["FOO_ENV"] = "FROM_ENVIRONMENT" + assert {"foo": "FROM_ENVIRONMENT"} == app_client.ds.plugin_config("env-plugin") + del os.environ["FOO_ENV"] + + +def test_plugin_config_file(app_client): + open(TEMP_PLUGIN_SECRET_FILE, "w").write("FROM_FILE") + assert {"foo": "FROM_FILE"} == app_client.ds.plugin_config("file-plugin") + os.remove(TEMP_PLUGIN_SECRET_FILE) + + @pytest.mark.parametrize( "path,expected_extra_body_script", [ From 25ff0a8ba6b2e3247a66048ad173ba5ed8a38b80 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 3 Jul 2019 22:47:45 -0700 Subject: [PATCH 0005/2113] Fix for accidentally leaking secrets in /-/metadata, closes #538 --- datasette/app.py | 9 ++++++--- tests/test_plugins.py | 8 ++++++++ 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 70bd3c12..56b60533 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -271,12 +271,15 @@ class Datasette: plugin_config = plugins.get(plugin_name) # Resolve any $file and $env keys if isinstance(plugin_config, dict): - for key, value in plugin_config.items(): + # Create a copy so we don't mutate the version visible at /-/metadata.json + plugin_config_copy = dict(plugin_config) + for key, value in plugin_config_copy.items(): if isinstance(value, dict): if list(value.keys()) == ["$env"]: - plugin_config[key] = os.environ.get(list(value.values())[0]) + plugin_config_copy[key] = os.environ.get(list(value.values())[0]) elif list(value.keys()) == ["$file"]: - plugin_config[key] = open(list(value.values())[0]).read() + plugin_config_copy[key] = open(list(value.values())[0]).read() + return plugin_config_copy return plugin_config def app_css_hash(self): diff --git a/tests/test_plugins.py b/tests/test_plugins.py index f42eebd7..9af2a430 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -129,12 +129,20 @@ def test_plugin_config(app_client): def test_plugin_config_env(app_client): os.environ["FOO_ENV"] = "FROM_ENVIRONMENT" assert {"foo": "FROM_ENVIRONMENT"} == app_client.ds.plugin_config("env-plugin") + # Ensure secrets aren't visible in /-/metadata.json + metadata = app_client.get("/-/metadata.json") + assert {"foo": {"$env": "FOO_ENV"}} == metadata.json["plugins"]["env-plugin"] del os.environ["FOO_ENV"] def test_plugin_config_file(app_client): open(TEMP_PLUGIN_SECRET_FILE, "w").write("FROM_FILE") assert {"foo": "FROM_FILE"} == app_client.ds.plugin_config("file-plugin") + # Ensure secrets aren't visible in /-/metadata.json + metadata = app_client.get("/-/metadata.json") + assert {"foo": {"$file": TEMP_PLUGIN_SECRET_FILE}} == metadata.json["plugins"][ + "file-plugin" + ] os.remove(TEMP_PLUGIN_SECRET_FILE) From 107d47567dedd472eebec7f35bc34f5b58285ba8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 3 Jul 2019 22:56:13 -0700 Subject: [PATCH 0006/2113] Black --- datasette/app.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index 56b60533..1a41c1c6 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -276,7 +276,9 @@ class Datasette: for key, value in plugin_config_copy.items(): if isinstance(value, dict): if list(value.keys()) == ["$env"]: - plugin_config_copy[key] = os.environ.get(list(value.values())[0]) + plugin_config_copy[key] = os.environ.get( + list(value.values())[0] + ) elif list(value.keys()) == ["$file"]: plugin_config_copy[key] = open(list(value.values())[0]).read() return plugin_config_copy From 16fdabda978fa659bed0e8670a385dab3c2cd197 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 4 Jul 2019 07:03:02 -0700 Subject: [PATCH 0007/2113] Better robustness in face of missing raw_path --- datasette/utils/asgi.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index fdf330ae..38ffc072 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -88,7 +88,10 @@ class AsgiRouter: async def __call__(self, scope, receive, send): # Because we care about "foo/bar" v.s. "foo%2Fbar" we decode raw_path ourselves - path = scope["raw_path"].decode("ascii") + path = scope["path"] + raw_path = scope.get("raw_path") + if raw_path: + path = raw_path.decode("ascii") for regex, view in self.routes: match = regex.match(path) if match is not None: From a18e0964ecd04593f227616538a80dee08768057 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jul 2019 13:34:41 -0700 Subject: [PATCH 0008/2113] Refactor templates for better top nav customization, refs #540 --- datasette/static/app.css | 18 +++++++++++++++++- datasette/templates/_footer.html | 21 +++++++++++++++++++++ datasette/templates/base.html | 28 +++++----------------------- datasette/templates/database.html | 8 +++++++- datasette/templates/index.html | 3 ++- datasette/templates/row.html | 11 +++++++++-- datasette/templates/table.html | 9 ++++++++- 7 files changed, 69 insertions(+), 29 deletions(-) create mode 100644 datasette/templates/_footer.html diff --git a/datasette/static/app.css b/datasette/static/app.css index 468c15f6..76ecdd8d 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -1,5 +1,6 @@ body { - margin: 0 1em; + margin: 0; + padding: 0; font-family: "Helvetica Neue", sans-serif; font-size: 1rem; font-weight: 400; @@ -8,6 +9,9 @@ body { text-align: left; background-color: #fff; } +.bd { + margin: 0 1em; +} table { border-collapse: collapse; } @@ -82,9 +86,21 @@ table a:visited { .hd { border-bottom: 2px solid #ccc; + padding: 0.2em 1em; + background-color: #eee; + overflow: hidden; + box-sizing: border-box; +} +.hd p { + margin: 0; + padding: 0; +} +.hd .crumbs { + float: left; } .ft { margin: 1em 0; + padding: 0.5em 1em 0 1em; border-top: 1px solid #ccc; font-size: 0.8em; } diff --git a/datasette/templates/_footer.html b/datasette/templates/_footer.html new file mode 100644 index 00000000..f930f445 --- /dev/null +++ b/datasette/templates/_footer.html @@ -0,0 +1,21 @@ +Powered by Datasette +{% if query_ms %}· Query took {{ query_ms|round(3) }}ms{% endif %} +{% if metadata %} + {% if metadata.license or metadata.license_url %}· Data license: + {% if metadata.license_url %} + {{ metadata.license or metadata.license_url }} + {% else %} + {{ metadata.license }} + {% endif %} + {% endif %} + {% if metadata.source or metadata.source_url %}· + Data source: {% if metadata.source_url %} + + {% endif %}{{ metadata.source or metadata.source_url }}{% if metadata.source_url %}{% endif %} + {% endif %} + {% if metadata.about or metadata.about_url %}· + About: {% if metadata.about_url %} + + {% endif %}{{ metadata.about or metadata.about_url }}{% if metadata.about_url %}{% endif %} + {% endif %} +{% endif %} diff --git a/datasette/templates/base.html b/datasette/templates/base.html index 0ea41d7e..d26043f8 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -14,33 +14,15 @@ + + +
{% block content %} {% endblock %} - -
- Powered by Datasette - {% if query_ms %}· Query took {{ query_ms|round(3) }}ms{% endif %} - {% if metadata %} - {% if metadata.license or metadata.license_url %}· Data license: - {% if metadata.license_url %} - {{ metadata.license or metadata.license_url }} - {% else %} - {{ metadata.license }} - {% endif %} - {% endif %} - {% if metadata.source or metadata.source_url %}· - Data source: {% if metadata.source_url %} - - {% endif %}{{ metadata.source or metadata.source_url }}{% if metadata.source_url %}{% endif %} - {% endif %} - {% if metadata.about or metadata.about_url %}· - About: {% if metadata.about_url %} - - {% endif %}{{ metadata.about or metadata.about_url }}{% if metadata.about_url %}{% endif %} - {% endif %} - {% endif %}
+
{% block footer %}{% include "_footer.html" %}{% endblock %}
+ {% for body_script in body_scripts %} {% endfor %} diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 9fb4d6eb..f168db97 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -9,8 +9,14 @@ {% block body_class %}db db-{{ database|to_css_class }}{% endblock %} +{% block nav %} +

+ home +

+ {{ super() }} +{% endblock %} + {% block content %} -

{{ metadata.title or database }}

diff --git a/datasette/templates/index.html b/datasette/templates/index.html index c8ad4148..b394564a 100644 --- a/datasette/templates/index.html +++ b/datasette/templates/index.html @@ -21,7 +21,8 @@ {{ "{:,}".format(database.views_count) }} view{% if database.views_count != 1 %}s{% endif %} {% endif %}

-

{% for table in database.tables_and_views_truncated %}{{ table.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}

+

{% for table in database.tables_and_views_truncated %}{{ table.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}

{% endfor %} {% endblock %} diff --git a/datasette/templates/row.html b/datasette/templates/row.html index bda1e4e2..5703900d 100644 --- a/datasette/templates/row.html +++ b/datasette/templates/row.html @@ -15,9 +15,16 @@ {% block body_class %}row db-{{ database|to_css_class }} table-{{ table|to_css_class }}{% endblock %} -{% block content %} - +{% block nav %} +

+ home / + {{ database }} / + {{ table }} +

+ {{ super() }} +{% endblock %} +{% block content %}

{{ table }}: {{ ', '.join(primary_key_values) }}

{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 2287e901..c7913f60 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -16,8 +16,15 @@ {% block body_class %}table db-{{ database|to_css_class }} table-{{ table|to_css_class }}{% endblock %} +{% block nav %} +

+ home / + {{ database }} +

+ {{ super() }} +{% endblock %} + {% block content %} -

{{ metadata.title or table }}{% if is_view %} (view){% endif %}

From fcfcae21e67cc15090942b1d2a47b5f016279337 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jul 2019 17:05:56 -0700 Subject: [PATCH 0009/2113] extra_template_vars plugin hook (#542) * extra_template_vars plugin hook Closes #541 * Workaround for cwd bug Based on https://github.com/pytest-dev/pytest/issues/1235#issuecomment-175295691 --- datasette/hookspecs.py | 5 ++ datasette/views/base.py | 25 ++++++++- datasette/views/index.py | 11 ++-- datasette/views/special.py | 6 +- docs/plugins.rst | 86 +++++++++++++++++++++++++---- tests/conftest.py | 15 +++++ tests/fixtures.py | 23 ++++++++ tests/test_plugins.py | 26 +++++++++ tests/test_templates/show_json.html | 8 +++ 9 files changed, 186 insertions(+), 19 deletions(-) create mode 100644 tests/test_templates/show_json.html diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 42adaae8..3c6726b7 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -35,6 +35,11 @@ def extra_body_script(template, database, table, view_name, datasette): "Extra JavaScript code to be included in diff --git a/datasette/templates/_codemirror_foot.html b/datasette/templates/_codemirror_foot.html index 4b55bf8d..9bc6d97f 100644 --- a/datasette/templates/_codemirror_foot.html +++ b/datasette/templates/_codemirror_foot.html @@ -1,5 +1,18 @@ diff --git a/datasette/templates/database.html b/datasette/templates/database.html index a934f336..a0d0fcf6 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -26,7 +26,10 @@

Custom SQL query

-

+

+ + +

{% endif %} diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 7c6c59f3..34fa78a5 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -37,7 +37,7 @@ {% if editable and config.allow_sql %}

{% else %} -
{% if query %}{{ query.sql }}{% endif %}
+
{% if query %}{{ query.sql }}{% endif %}
{% endif %} {% else %} @@ -49,7 +49,10 @@

{% endfor %} {% endif %} -

+

+ + +

{% if display_rows %} From 908fc3999e06f3ccd3bb8ad0539490bbc7809748 Mon Sep 17 00:00:00 2001 From: Tobias Kunze Date: Mon, 14 Oct 2019 05:52:33 +0200 Subject: [PATCH 0039/2113] Sort databases on homepage by argument order - #591 Closes #585 - thanks, @rixx! --- datasette/app.py | 2 +- datasette/views/index.py | 2 -- tests/test_html.py | 4 ++-- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 41a4eb37..935b1730 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -159,7 +159,7 @@ class Datasette: self.files = [MEMORY] elif memory: self.files = (MEMORY,) + self.files - self.databases = {} + self.databases = collections.OrderedDict() self.inspect_data = inspect_data for file in self.files: path = file diff --git a/datasette/views/index.py b/datasette/views/index.py index fddb04d9..f2e5f774 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -97,8 +97,6 @@ class IndexView(BaseView): } ) - databases.sort(key=lambda database: database["name"]) - if as_format: headers = {} if self.ds.cors: diff --git a/tests/test_html.py b/tests/test_html.py index 0a6df984..ec7765f6 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -26,11 +26,11 @@ def test_homepage(app_client_two_attached_databases): ) # Should be two attached databases assert [ - {"href": "/extra_database", "text": "extra_database"}, {"href": "/fixtures", "text": "fixtures"}, + {"href": "/extra_database", "text": "extra_database"}, ] == [{"href": a["href"], "text": a.text.strip()} for a in soup.select("h2 a")] # The first attached database should show count text and attached tables - h2 = soup.select("h2")[0] + h2 = soup.select("h2")[1] assert "extra_database" == h2.text.strip() counts_p, links_p = h2.find_all_next("p")[:2] assert ( From 12cec411cae73ba7211429da12cd32c551fe17b1 Mon Sep 17 00:00:00 2001 From: Tobias Kunze Date: Mon, 14 Oct 2019 05:53:21 +0200 Subject: [PATCH 0040/2113] Display metadata footer on custom SQL queries (#589) Closes #408 - thanks, @rixx! --- datasette/views/database.py | 10 ++++++---- tests/test_html.py | 12 ++++++++++++ 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index 78af19c5..31d6af59 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -10,12 +10,17 @@ class DatabaseView(DataView): name = "database" async def data(self, request, database, hash, default_labels=False, _size=None): + metadata = (self.ds.metadata("databases") or {}).get(database, {}) + self.ds.update_with_inherited_metadata(metadata) + if request.args.get("sql"): if not self.ds.config("allow_sql"): raise DatasetteError("sql= is not allowed", status=400) sql = request.raw_args.pop("sql") validate_sql_select(sql) - return await self.custom_sql(request, database, hash, sql, _size=_size) + return await self.custom_sql( + request, database, hash, sql, _size=_size, metadata=metadata + ) db = self.ds.databases[database] @@ -24,9 +29,6 @@ class DatabaseView(DataView): hidden_table_names = set(await db.hidden_table_names()) all_foreign_keys = await db.get_all_foreign_keys() - metadata = (self.ds.metadata("databases") or {}).get(database, {}) - self.ds.update_with_inherited_metadata(metadata) - tables = [] for table in table_counts: table_columns = await db.table_columns(table) diff --git a/tests/test_html.py b/tests/test_html.py index ec7765f6..0bb1c163 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -737,6 +737,18 @@ def test_database_metadata(app_client): assert_footer_links(soup) +def test_database_metadata_with_custom_sql(app_client): + response = app_client.get("/fixtures?sql=select+*+from+simple_primary_key") + assert response.status == 200 + soup = Soup(response.body, "html.parser") + # Page title should be the default + assert "fixtures" == soup.find("h1").text + # Description should be custom + assert "Custom SQL query returning" in soup.find("h3").text + # The source/license should be inherited + assert_footer_links(soup) + + def test_table_metadata(app_client): response = app_client.get("/fixtures/simple_primary_key") assert response.status == 200 From 9366d0bf191daccee6093c54ed51a2855d129cd8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 14 Oct 2019 15:29:16 -0700 Subject: [PATCH 0041/2113] Add Python versions badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 59a6649e..a4db6611 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ # Datasette [![PyPI](https://img.shields.io/pypi/v/datasette.svg)](https://pypi.org/project/datasette/) +[![Python 3.x](https://img.shields.io/pypi/pyversions/datasette.svg?logo=python&logoColor=white)](https://pypi.org/project/datasette/) [![Travis CI](https://travis-ci.org/simonw/datasette.svg?branch=master)](https://travis-ci.org/simonw/datasette) [![Documentation Status](https://readthedocs.org/projects/datasette/badge/?version=latest)](http://datasette.readthedocs.io/en/latest/?badge=latest) [![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://github.com/simonw/datasette/blob/master/LICENSE) From 3e864b1625f3142e6ff084f9b41247f2f9f60f80 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 17 Oct 2019 14:51:45 -0700 Subject: [PATCH 0042/2113] Use --platform=managed for publish cloudrun, closes #587 --- datasette/publish/cloudrun.py | 2 +- tests/test_publish_cloudrun.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 32c9cd2a..c2d77746 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -110,7 +110,7 @@ def publish_subcommand(publish): image_id = "gcr.io/{project}/{name}".format(project=project, name=name) check_call("gcloud builds submit --tag {}".format(image_id), shell=True) check_call( - "gcloud beta run deploy --allow-unauthenticated --image {}{}".format( + "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {}{}".format( image_id, " {}".format(service) if service else "" ), shell=True, diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 1e9bb830..481ac04d 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -40,7 +40,7 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which): [ mock.call("gcloud builds submit --tag {}".format(tag), shell=True), mock.call( - "gcloud beta run deploy --allow-unauthenticated --image {}".format( + "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {}".format( tag ), shell=True, From b6ad1fdc7068cb8248787843e7438d1f19fa2e3a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 17 Oct 2019 22:23:01 -0700 Subject: [PATCH 0043/2113] Fixed bug returning non-ascii characters in CSV, closes #584 --- datasette/utils/asgi.py | 2 +- tests/test_csv.py | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index eaf3428d..bafcfb4a 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -217,7 +217,7 @@ class AsgiWriter: await self.send( { "type": "http.response.body", - "body": chunk.encode("latin-1"), + "body": chunk.encode("utf-8"), "more_body": True, } ) diff --git a/tests/test_csv.py b/tests/test_csv.py index c3cdc241..1d5d2df2 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -80,6 +80,15 @@ def test_table_csv_download(app_client): assert expected_disposition == response.headers["Content-Disposition"] +def test_csv_with_non_ascii_characters(app_client): + response = app_client.get( + "/fixtures.csv?sql=select%0D%0A++%27%F0%9D%90%9C%F0%9D%90%A2%F0%9D%90%AD%F0%9D%90%A2%F0%9D%90%9E%F0%9D%90%AC%27+as+text%2C%0D%0A++1+as+number%0D%0Aunion%0D%0Aselect%0D%0A++%27bob%27+as+text%2C%0D%0A++2+as+number%0D%0Aorder+by%0D%0A++number" + ) + assert response.status == 200 + assert "text/plain; charset=utf-8" == response.headers["content-type"] + assert "text,number\r\n𝐜𝐢𝐭𝐢𝐞𝐬,1\r\nbob,2\r\n" == response.body.decode("utf8") + + def test_max_csv_mb(app_client_csv_max_mb_one): response = app_client_csv_max_mb_one.get( "/fixtures.csv?sql=select+randomblob(10000)+" From b647b5efc29300f715ba656e41b0591f342938e1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 18 Oct 2019 15:51:07 -0700 Subject: [PATCH 0044/2113] Fix for /foo v.s. /foo-bar issue, closes #597 Pull request #599 --- datasette/views/base.py | 16 ++++++++-------- tests/fixtures.py | 7 +++++++ tests/test_api.py | 18 ++++++++++++++++++ 3 files changed, 33 insertions(+), 8 deletions(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index db1d69d9..219630af 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -193,14 +193,14 @@ class DataView(BaseView): async def resolve_db_name(self, request, db_name, **kwargs): hash = None name = None - if "-" in db_name: - # Might be name-and-hash, or might just be - # a name with a hyphen in it - name, hash = db_name.rsplit("-", 1) - if name not in self.ds.databases: - # Try the whole name - name = db_name - hash = None + if db_name not in self.ds.databases and "-" in db_name: + # No matching DB found, maybe it's a name-hash? + name_bit, hash_bit = db_name.rsplit("-", 1) + if name_bit not in self.ds.databases: + raise NotFound("Database not found: {}".format(name)) + else: + name = name_bit + hash = hash_bit else: name = db_name # Verify the hash diff --git a/tests/fixtures.py b/tests/fixtures.py index dac28dc0..a4c32f36 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -178,6 +178,13 @@ def app_client_two_attached_databases(): ) +@pytest.fixture(scope="session") +def app_client_conflicting_database_names(): + yield from make_app_client( + extra_databases={"foo.db": EXTRA_DATABASE_SQL, "foo-bar.db": EXTRA_DATABASE_SQL} + ) + + @pytest.fixture(scope="session") def app_client_two_attached_databases_one_immutable(): yield from make_app_client( diff --git a/tests/test_api.py b/tests/test_api.py index cc00b780..826c00f3 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -7,6 +7,7 @@ from .fixtures import ( # noqa app_client_larger_cache_size, app_client_returned_rows_matches_page_size, app_client_two_attached_databases_one_immutable, + app_client_conflicting_database_names, app_client_with_cors, app_client_with_dot, generate_compound_rows, @@ -1652,3 +1653,20 @@ def test_cors(app_client_with_cors, path, status_code): response = app_client_with_cors.get(path) assert response.status == status_code assert "*" == response.headers["Access-Control-Allow-Origin"] + + +def test_common_prefix_database_names(app_client_conflicting_database_names): + # https://github.com/simonw/datasette/issues/597 + assert ["fixtures", "foo", "foo-bar"] == [ + d["name"] + for d in json.loads( + app_client_conflicting_database_names.get("/-/databases.json").body.decode( + "utf8" + ) + ) + ] + for db_name, path in (("foo", "/foo.json"), ("foo-bar", "/foo-bar.json")): + data = json.loads( + app_client_conflicting_database_names.get(path).body.decode("utf8") + ) + assert db_name == data["database"] From e877b1cb12076946fdbec7ca2fbfbfc75c1c2a28 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 18 Oct 2019 16:56:44 -0700 Subject: [PATCH 0045/2113] Don't auto-format SQL on page load (#601) Closes #600 --- datasette/templates/_codemirror_foot.html | 6 ------ 1 file changed, 6 deletions(-) diff --git a/datasette/templates/_codemirror_foot.html b/datasette/templates/_codemirror_foot.html index 9bc6d97f..9aba61ab 100644 --- a/datasette/templates/_codemirror_foot.html +++ b/datasette/templates/_codemirror_foot.html @@ -6,12 +6,6 @@ window.onload = () => { if (sqlFormat && !readOnly) { sqlFormat.hidden = false; } - if (readOnly) { - readOnly.innerHTML = sqlFormatter.format(readOnly.innerHTML); - } - if (sqlInput) { - sqlInput.value = sqlFormatter.format(sqlInput.value); - } var editor = CodeMirror.fromTextArea(sqlInput, { lineNumbers: true, mode: "text/x-sql", From debea4f971c180af64e16b83be98d830e9dee54f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 18 Oct 2019 18:05:47 -0700 Subject: [PATCH 0046/2113] Release 0.30 --- docs/changelog.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 26d0f75c..e8dafa35 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,20 @@ Changelog ========= +.. _v0_30: + +0.30 (2019-10-18) +----------------- + +- Added ``/-/threads`` debugging page +- Allow ``EXPLAIN WITH...`` (`#583 `__) +- Button to format SQL - thanks, Tobias Kunze (`#136 `__) +- Sort databases on homepage by argument order - thanks, Tobias Kunze (`#585 `__) +- Display metadata footer on custom SQL queries - thanks, Tobias Kunze (`#589 `__) +- Use ``--platform=managed`` for ``publish cloudrun`` (`#587 `__) +- Fixed bug returning non-ASCII characters in CSV (`#584 `__) +- Fix for ``/foo`` v.s. ``/foo-bar`` bug (`#601 `__) + .. _v0_29_3: 0.29.3 (2019-09-02) From 8050f9e1ece9afd0236ad38c6458c12a4ad917e6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 18 Oct 2019 18:08:04 -0700 Subject: [PATCH 0047/2113] Update news in README --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index a4db6611..5894017e 100644 --- a/README.md +++ b/README.md @@ -21,6 +21,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover ## News + * 18th October 2019: [Datasette 0.30](https://datasette.readthedocs.io/en/stable/changelog.html#v0-30) * 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail. * 7th July 2019: [Datasette 0.29](https://datasette.readthedocs.io/en/stable/changelog.html#v0-29) - ASGI, new plugin hooks, facet by date and much, much more... * [datasette-auth-github](https://github.com/simonw/datasette-auth-github) - a new plugin for Datasette 0.29 that lets you require users to authenticate against GitHub before accessing your Datasette instance. You can whitelist specific users, or you can restrict access to members of specific GitHub organizations or teams. From f4c0830529a9513a83437a9e1550bbe27ebc5c64 Mon Sep 17 00:00:00 2001 From: chris48s Date: Mon, 21 Oct 2019 03:03:08 +0100 Subject: [PATCH 0048/2113] Always pop as_format off args dict (#603) Closes #563. Thanks, @chris48s --- datasette/views/base.py | 2 ++ tests/test_api.py | 9 +++++++++ 2 files changed, 11 insertions(+) diff --git a/datasette/views/base.py b/datasette/views/base.py index 219630af..348f0c03 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -362,6 +362,8 @@ class DataView(BaseView): _format = request.args.get("_format", None) if not _format: _format = (args.pop("as_format", None) or "").lstrip(".") + else: + args.pop("as_format", None) if "table_and_format" in args: db = self.ds.databases[database] diff --git a/tests/test_api.py b/tests/test_api.py index 826c00f3..a734b8de 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1107,6 +1107,15 @@ def test_row(app_client): assert [{"id": "1", "content": "hello"}] == response.json["rows"] +def test_row_format_in_querystring(app_client): + # regression test for https://github.com/simonw/datasette/issues/563 + response = app_client.get( + "/fixtures/simple_primary_key/1?_format=json&_shape=objects" + ) + assert response.status == 200 + assert [{"id": "1", "content": "hello"}] == response.json["rows"] + + def test_row_strange_table_name(app_client): response = app_client.get( "/fixtures/table%2Fwith%2Fslashes.csv/3.json?_shape=objects" From 5dd4d2b2d3abcfd507a6df47e7c2fbad3c552fd8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 30 Oct 2019 11:49:01 -0700 Subject: [PATCH 0049/2113] Update to latest black (#609) --- datasette/views/base.py | 9 ++++++--- datasette/views/table.py | 7 ++++--- setup.py | 2 +- tests/test_api.py | 2 +- 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index 348f0c03..1568b084 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -257,9 +257,12 @@ class DataView(BaseView): assert NotImplemented async def get(self, request, db_name, **kwargs): - database, hash, correct_hash_provided, should_redirect = await self.resolve_db_name( - request, db_name, **kwargs - ) + ( + database, + hash, + correct_hash_provided, + should_redirect, + ) = await self.resolve_db_name(request, db_name, **kwargs) if should_redirect: return self.redirect(request, should_redirect, remove_args={"_hash"}) diff --git a/datasette/views/table.py b/datasette/views/table.py index 8ba3abe4..e0362e53 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -586,9 +586,10 @@ class TableView(RowTableShared): ) for facet in facet_instances: - instance_facet_results, instance_facets_timed_out = ( - await facet.facet_results() - ) + ( + instance_facet_results, + instance_facets_timed_out, + ) = await facet.facet_results() facet_results.update(instance_facet_results) facets_timed_out.extend(instance_facets_timed_out) diff --git a/setup.py b/setup.py index cbe545a1..9ae56306 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,7 @@ def get_version(): # Only install black on Python 3.6 or higher maybe_black = [] if sys.version_info > (3, 6): - maybe_black = ["black"] + maybe_black = ["black~=19.10b0"] setup( name="datasette", diff --git a/tests/test_api.py b/tests/test_api.py index a734b8de..4ea95e84 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1245,7 +1245,7 @@ def test_config_json(app_client): def test_page_size_matching_max_returned_rows( - app_client_returned_rows_matches_page_size + app_client_returned_rows_matches_page_size, ): fetched = [] path = "/fixtures/no_primary_key.json" From e2c390500e6782aa476a7edc05c46cf907875a6e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 30 Oct 2019 11:49:26 -0700 Subject: [PATCH 0050/2113] Persist _where= in hidden fields, closes #604 --- datasette/views/table.py | 3 +++ tests/test_html.py | 6 ++++++ 2 files changed, 9 insertions(+) diff --git a/datasette/views/table.py b/datasette/views/table.py index e0362e53..652ce994 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -721,6 +721,9 @@ class TableView(RowTableShared): for arg in ("_fts_table", "_fts_pk"): if arg in special_args: form_hidden_args.append((arg, special_args[arg])) + if request.args["_where"]: + for where_text in request.args["_where"]: + form_hidden_args.append(("_where", where_text)) return { "supports_search": bool(fts_table), "search": search or "", diff --git a/tests/test_html.py b/tests/test_html.py index 0bb1c163..aa628dec 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -955,6 +955,12 @@ def test_extra_where_clauses(app_client): "/fixtures/facetable?_where=city_id%3D1", "/fixtures/facetable?_where=neighborhood%3D%27Dogpatch%27", ] == hrefs + # These should also be persisted as hidden fields + inputs = soup.find("form").findAll("input") + hiddens = [i for i in inputs if i["type"] == "hidden"] + assert [("_where", "neighborhood='Dogpatch'"), ("_where", "city_id=1")] == [ + (hidden["name"], hidden["value"]) for hidden in hiddens + ] def test_binary_data_display(app_client): From f5f6cbe03cbf05737d848f44779372b5daa79a25 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 30 Oct 2019 11:56:04 -0700 Subject: [PATCH 0051/2113] Release 0.30.1 --- docs/changelog.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index e8dafa35..8ac32c45 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,14 @@ Changelog ========= +.. _v0_30_1: + +0.30.1 (2019-10-30) +------------------- + +- Fixed bug where ``?_where=`` parameter was not persisted in hidden form fields (`#604 `__) +- Fixed bug with .JSON representation of row pages - thanks, Chris Shaw (`#603 `__) + .. _v0_30: 0.30 (2019-10-18) From 3ca290e0db03bb4747e24203c445873f74512107 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 30 Oct 2019 12:00:21 -0700 Subject: [PATCH 0052/2113] Fixed dumb error --- datasette/views/table.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 652ce994..44b186cf 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -721,7 +721,7 @@ class TableView(RowTableShared): for arg in ("_fts_table", "_fts_pk"): if arg in special_args: form_hidden_args.append((arg, special_args[arg])) - if request.args["_where"]: + if request.args.get("_where"): for where_text in request.args["_where"]: form_hidden_args.append(("_where", where_text)) return { From 937828f946238c28e77ba50e0b2e649c874560f7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 31 Oct 2019 22:39:59 -0700 Subject: [PATCH 0053/2113] Use distinfo.project_name for plugin name if available, closes #606 --- datasette/utils/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 449217b5..3d28a36b 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -633,6 +633,7 @@ def get_plugins(pm): distinfo = plugin_to_distinfo.get(plugin) if distinfo: plugin_info["version"] = distinfo.version + plugin_info["name"] = distinfo.project_name plugins.append(plugin_info) return plugins From 50287e7c6bb0987536e5515f05945721c4515e9a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 1 Nov 2019 12:37:46 -0700 Subject: [PATCH 0054/2113] Only suggest array facet for arrays of strings - closes #562 --- datasette/facets.py | 44 +++++++++++++++++++++++++++++++++----------- tests/fixtures.py | 33 +++++++++++++++++---------------- tests/test_api.py | 20 +++++++++++++++++--- tests/test_csv.py | 32 ++++++++++++++++---------------- tests/test_facets.py | 9 +++++++++ 5 files changed, 92 insertions(+), 46 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index 365d9c65..9b5baaa2 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -257,6 +257,16 @@ class ColumnFacet(Facet): class ArrayFacet(Facet): type = "array" + def _is_json_array_of_strings(self, json_string): + try: + array = json.loads(json_string) + except ValueError: + return False + for item in array: + if not isinstance(item, str): + return False + return True + async def suggest(self): columns = await self.get_columns(self.sql, self.params) suggested_facets = [] @@ -282,18 +292,30 @@ class ArrayFacet(Facet): ) types = tuple(r[0] for r in results.rows) if types in (("array",), ("array", None)): - suggested_facets.append( - { - "name": column, - "type": "array", - "toggle_url": self.ds.absolute_url( - self.request, - path_with_added_args( - self.request, {"_facet_array": column} - ), - ), - } + # Now sanity check that first 100 arrays contain only strings + first_100 = await self.ds.execute( + self.database, + "select {column} from ({sql}) where {column} is not null".format( + column=escape_sqlite(column), sql=self.sql + ), + self.params, + truncate=False, + custom_time_limit=self.ds.config("facet_suggest_time_limit_ms"), + log_sql_errors=False, ) + if all(self._is_json_array_of_strings(r[0]) for r in first_100): + suggested_facets.append( + { + "name": column, + "type": "array", + "toggle_url": self.ds.absolute_url( + self.request, + path_with_added_args( + self.request, {"_facet_array": column} + ), + ), + } + ) except (QueryInterrupted, sqlite3.OperationalError): continue return suggested_facets diff --git a/tests/fixtures.py b/tests/fixtures.py index a4c32f36..93c3da9f 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -661,26 +661,27 @@ CREATE TABLE facetable ( city_id integer, neighborhood text, tags text, + complex_array text, FOREIGN KEY ("city_id") REFERENCES [facet_cities](id) ); INSERT INTO facetable - (created, planet_int, on_earth, state, city_id, neighborhood, tags) + (created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array) VALUES - ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]'), - ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]'), - ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'SOMA', '[]'), - ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Tenderloin', '[]'), - ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Bernal Heights', '[]'), - ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Hayes Valley', '[]'), - ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Hollywood', '[]'), - ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Downtown', '[]'), - ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Los Feliz', '[]'), - ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Koreatown', '[]'), - ("2019-01-16 08:00:00", 1, 1, 'MI', 3, 'Downtown', '[]'), - ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Greektown', '[]'), - ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Corktown', '[]'), - ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Mexicantown', '[]'), - ("2019-01-17 08:00:00", 2, 0, 'MC', 4, 'Arcadia Planitia', '[]') + ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]'), + ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]'), + ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'SOMA', '[]', '[]'), + ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Tenderloin', '[]', '[]'), + ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Bernal Heights', '[]', '[]'), + ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Hayes Valley', '[]', '[]'), + ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Hollywood', '[]', '[]'), + ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Downtown', '[]', '[]'), + ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Los Feliz', '[]', '[]'), + ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Koreatown', '[]', '[]'), + ("2019-01-16 08:00:00", 1, 1, 'MI', 3, 'Downtown', '[]', '[]'), + ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Greektown', '[]', '[]'), + ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Corktown', '[]', '[]'), + ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Mexicantown', '[]', '[]'), + ("2019-01-17 08:00:00", 2, 0, 'MC', 4, 'Arcadia Planitia', '[]', '[]') ; CREATE TABLE binary_data ( diff --git a/tests/test_api.py b/tests/test_api.py index 4ea95e84..41557bcf 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -195,6 +195,7 @@ def test_database_page(app_client): "city_id", "neighborhood", "tags", + "complex_array", ], "primary_keys": ["pk"], "count": 15, @@ -1029,15 +1030,25 @@ def test_table_filter_queries_multiple_of_same_type(app_client): def test_table_filter_json_arraycontains(app_client): response = app_client.get("/fixtures/facetable.json?tags__arraycontains=tag1") assert [ - [1, "2019-01-14 08:00:00", 1, 1, "CA", 1, "Mission", '["tag1", "tag2"]'], - [2, "2019-01-14 08:00:00", 1, 1, "CA", 1, "Dogpatch", '["tag1", "tag3"]'], + [ + 1, + "2019-01-14 08:00:00", + 1, + 1, + "CA", + 1, + "Mission", + '["tag1", "tag2"]', + '[{"foo": "bar"}]', + ], + [2, "2019-01-14 08:00:00", 1, 1, "CA", 1, "Dogpatch", '["tag1", "tag3"]', "[]"], ] == response.json["rows"] def test_table_filter_extra_where(app_client): response = app_client.get("/fixtures/facetable.json?_where=neighborhood='Dogpatch'") assert [ - [2, "2019-01-14 08:00:00", 1, 1, "CA", 1, "Dogpatch", '["tag1", "tag3"]'] + [2, "2019-01-14 08:00:00", 1, 1, "CA", 1, "Dogpatch", '["tag1", "tag3"]', "[]"] ] == response.json["rows"] @@ -1453,6 +1464,7 @@ def test_suggested_facets(app_client): {"name": "city_id", "querystring": "_facet=city_id"}, {"name": "neighborhood", "querystring": "_facet=neighborhood"}, {"name": "tags", "querystring": "_facet=tags"}, + {"name": "complex_array", "querystring": "_facet=complex_array"}, {"name": "created", "querystring": "_facet_date=created"}, ] if detect_json1(): @@ -1488,6 +1500,7 @@ def test_expand_labels(app_client): "city_id": {"value": 1, "label": "San Francisco"}, "neighborhood": "Dogpatch", "tags": '["tag1", "tag3"]', + "complex_array": "[]", }, "13": { "pk": 13, @@ -1498,6 +1511,7 @@ def test_expand_labels(app_client): "city_id": {"value": 3, "label": "Detroit"}, "neighborhood": "Corktown", "tags": "[]", + "complex_array": "[]", }, } == response.json diff --git a/tests/test_csv.py b/tests/test_csv.py index 1d5d2df2..b148b6db 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -21,22 +21,22 @@ world ) EXPECTED_TABLE_WITH_LABELS_CSV = """ -pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags -1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]" -2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]" -3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[] -4,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Tenderloin,[] -5,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Bernal Heights,[] -6,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Hayes Valley,[] -7,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Hollywood,[] -8,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Downtown,[] -9,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Los Feliz,[] -10,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Koreatown,[] -11,2019-01-16 08:00:00,1,1,MI,3,Detroit,Downtown,[] -12,2019-01-17 08:00:00,1,1,MI,3,Detroit,Greektown,[] -13,2019-01-17 08:00:00,1,1,MI,3,Detroit,Corktown,[] -14,2019-01-17 08:00:00,1,1,MI,3,Detroit,Mexicantown,[] -15,2019-01-17 08:00:00,2,0,MC,4,Memnonia,Arcadia Planitia,[] +pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,complex_array +1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]","[{""foo"": ""bar""}]" +2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]",[] +3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[],[] +4,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Tenderloin,[],[] +5,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Bernal Heights,[],[] +6,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Hayes Valley,[],[] +7,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Hollywood,[],[] +8,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Downtown,[],[] +9,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Los Feliz,[],[] +10,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Koreatown,[],[] +11,2019-01-16 08:00:00,1,1,MI,3,Detroit,Downtown,[],[] +12,2019-01-17 08:00:00,1,1,MI,3,Detroit,Greektown,[],[] +13,2019-01-17 08:00:00,1,1,MI,3,Detroit,Corktown,[],[] +14,2019-01-17 08:00:00,1,1,MI,3,Detroit,Mexicantown,[],[] +15,2019-01-17 08:00:00,2,0,MC,4,Memnonia,Arcadia Planitia,[],[] """.lstrip().replace( "\n", "\r\n" ) diff --git a/tests/test_facets.py b/tests/test_facets.py index 9169f666..402c155b 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -23,6 +23,10 @@ async def test_column_facet_suggest(app_client): {"name": "city_id", "toggle_url": "http://localhost/?_facet=city_id"}, {"name": "neighborhood", "toggle_url": "http://localhost/?_facet=neighborhood"}, {"name": "tags", "toggle_url": "http://localhost/?_facet=tags"}, + { + "name": "complex_array", + "toggle_url": "http://localhost/?_facet=complex_array", + }, ] == suggestions @@ -57,6 +61,10 @@ async def test_column_facet_suggest_skip_if_already_selected(app_client): "name": "tags", "toggle_url": "http://localhost/?_facet=planet_int&_facet=on_earth&_facet=tags", }, + { + "name": "complex_array", + "toggle_url": "http://localhost/?_facet=planet_int&_facet=on_earth&_facet=complex_array", + }, ] == suggestions @@ -78,6 +86,7 @@ async def test_column_facet_suggest_skip_if_enabled_by_metadata(app_client): "state", "neighborhood", "tags", + "complex_array", ] == suggestions From ba5414f16b49781261d0f41a16f2210d5fa3976f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 1 Nov 2019 12:38:15 -0700 Subject: [PATCH 0055/2113] Only inspect first 100 records for #562 --- datasette/facets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/facets.py b/datasette/facets.py index 9b5baaa2..7f350dfe 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -295,7 +295,7 @@ class ArrayFacet(Facet): # Now sanity check that first 100 arrays contain only strings first_100 = await self.ds.execute( self.database, - "select {column} from ({sql}) where {column} is not null".format( + "select {column} from ({sql}) where {column} is not null limit 100".format( column=escape_sqlite(column), sql=self.sql ), self.params, From 7152e76eda9049574643261e7a471958cc16d0b9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 1 Nov 2019 14:45:59 -0700 Subject: [PATCH 0056/2113] Don't suggest array facet if column is only [], closes #610 --- datasette/facets.py | 29 ++++++++++++++++++----------- tests/test_facets.py | 14 ++++++++++++++ 2 files changed, 32 insertions(+), 11 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index 7f350dfe..0c6459d6 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -293,17 +293,24 @@ class ArrayFacet(Facet): types = tuple(r[0] for r in results.rows) if types in (("array",), ("array", None)): # Now sanity check that first 100 arrays contain only strings - first_100 = await self.ds.execute( - self.database, - "select {column} from ({sql}) where {column} is not null limit 100".format( - column=escape_sqlite(column), sql=self.sql - ), - self.params, - truncate=False, - custom_time_limit=self.ds.config("facet_suggest_time_limit_ms"), - log_sql_errors=False, - ) - if all(self._is_json_array_of_strings(r[0]) for r in first_100): + first_100 = [ + v[0] + for v in await self.ds.execute( + self.database, + "select {column} from ({sql}) where {column} is not null and json_array_length({column}) > 0 limit 100".format( + column=escape_sqlite(column), sql=self.sql + ), + self.params, + truncate=False, + custom_time_limit=self.ds.config( + "facet_suggest_time_limit_ms" + ), + log_sql_errors=False, + ) + ] + if first_100 and all( + self._is_json_array_of_strings(r) for r in first_100 + ): suggested_facets.append( { "name": column, diff --git a/tests/test_facets.py b/tests/test_facets.py index 402c155b..e3dc3df3 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -215,6 +215,20 @@ async def test_array_facet_suggest(app_client): ] == suggestions +@pytest.mark.asyncio +@pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module") +async def test_array_facet_suggest_not_if_all_empty_arrays(app_client): + facet = ArrayFacet( + app_client.ds, + MockRequest("http://localhost/"), + database="fixtures", + sql="select * from facetable where tags = '[]'", + table="facetable", + ) + suggestions = await facet.suggest() + assert [] == suggestions + + @pytest.mark.asyncio @pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module") async def test_array_facet_results(app_client): From ffae2f0ecde1ca92e78d097665df820d3b7861e6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 1 Nov 2019 14:57:49 -0700 Subject: [PATCH 0057/2113] Better documentation of --host, closes #574 --- README.md | 25 +++++++++++++++---------- datasette/cli.py | 11 +++++++++-- docs/datasette-serve-help.txt | 7 +++++-- 3 files changed, 29 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 5894017e..9f85f1ba 100644 --- a/README.md +++ b/README.md @@ -89,26 +89,31 @@ Now visiting http://localhost:8001/History/downloads will show you a web interfa ## datasette serve options - $ datasette serve --help - Usage: datasette serve [OPTIONS] [FILES]... Serve up specified SQLite database files with a web UI Options: -i, --immutable PATH Database files to open in immutable mode - -h, --host TEXT host for server, defaults to 127.0.0.1 - -p, --port INTEGER port for server, defaults to 8001 + -h, --host TEXT Host for server. Defaults to 127.0.0.1 which means + only connections from the local machine will be + allowed. Use 0.0.0.0 to listen to all IPs and + allow access from other machines. + -p, --port INTEGER Port for server, defaults to 8001 --debug Enable debug mode - useful for development - --reload Automatically reload if database or code change detected - - useful for development - --cors Enable CORS by serving Access-Control-Allow-Origin: * + --reload Automatically reload if database or code change + detected - useful for development + --cors Enable CORS by serving Access-Control-Allow- + Origin: * --load-extension PATH Path to a SQLite extension to load - --inspect-file TEXT Path to JSON file created using "datasette inspect" - -m, --metadata FILENAME Path to JSON file containing license/source metadata + --inspect-file TEXT Path to JSON file created using "datasette + inspect" + -m, --metadata FILENAME Path to JSON file containing license/source + metadata --template-dir DIRECTORY Path to directory containing custom templates --plugins-dir DIRECTORY Path to directory containing custom plugins - --static STATIC MOUNT mountpoint:path-to-directory for serving static files + --static STATIC MOUNT mountpoint:path-to-directory for serving static + files --memory Make :memory: database available --config CONFIG Set config option using configname:value datasette.readthedocs.io/en/latest/config.html diff --git a/datasette/cli.py b/datasette/cli.py index 181b281c..67c2fe71 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -230,9 +230,16 @@ def package( multiple=True, ) @click.option( - "-h", "--host", default="127.0.0.1", help="host for server, defaults to 127.0.0.1" + "-h", + "--host", + default="127.0.0.1", + help=( + "Host for server. Defaults to 127.0.0.1 which means only connections " + "from the local machine will be allowed. Use 0.0.0.0 to listen to " + "all IPs and allow access from other machines." + ), ) -@click.option("-p", "--port", default=8001, help="port for server, defaults to 8001") +@click.option("-p", "--port", default=8001, help="Port for server, defaults to 8001") @click.option( "--debug", is_flag=True, help="Enable debug mode - useful for development" ) diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index 7b7c3b09..1447e84d 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -6,8 +6,11 @@ Usage: datasette serve [OPTIONS] [FILES]... Options: -i, --immutable PATH Database files to open in immutable mode - -h, --host TEXT host for server, defaults to 127.0.0.1 - -p, --port INTEGER port for server, defaults to 8001 + -h, --host TEXT Host for server. Defaults to 127.0.0.1 which means only + connections from the local machine will be allowed. Use + 0.0.0.0 to listen to all IPs and allow access from other + machines. + -p, --port INTEGER Port for server, defaults to 8001 --debug Enable debug mode - useful for development --reload Automatically reload if database or code change detected - useful for development From ed57e4f99018c1d520858f55f6eee4eb1cc2af3d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 1 Nov 2019 15:15:10 -0700 Subject: [PATCH 0058/2113] Plugin static assets support both hyphens and underscores in names Closes #611 --- datasette/app.py | 13 +++++++++++-- docs/plugins.rst | 2 +- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 935b1730..203e0991 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -612,8 +612,17 @@ class Datasette: # Mount any plugin static/ directories for plugin in get_plugins(pm): if plugin["static_path"]: - modpath = "/-/static-plugins/{}/(?P.*)$".format(plugin["name"]) - add_route(asgi_static(plugin["static_path"]), modpath) + add_route( + asgi_static(plugin["static_path"]), + "/-/static-plugins/{}/(?P.*)$".format(plugin["name"]), + ) + # Support underscores in name in addition to hyphens, see https://github.com/simonw/datasette/issues/611 + add_route( + asgi_static(plugin["static_path"]), + "/-/static-plugins/{}/(?P.*)$".format( + plugin["name"].replace("-", "_") + ), + ) add_route( JsonDataView.as_asgi(self, "metadata.json", lambda: self._metadata), r"/-/metadata(?P(\.json)?)$", diff --git a/docs/plugins.rst b/docs/plugins.rst index 1d4f1e1a..6df7ff6a 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -442,7 +442,7 @@ you have one: @hookimpl def extra_js_urls(): return [ - '/-/static-plugins/your_plugin/app.js' + '/-/static-plugins/your-plugin/app.js' ] .. _plugin_hook_publish_subcommand: From 14da70525b35e1a44cd45c19101385467057f041 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 2 Nov 2019 15:29:40 -0700 Subject: [PATCH 0059/2113] Don't show 'None' as label for nullable foreign key, closes #406 --- datasette/views/table.py | 2 +- tests/fixtures.py | 1 + tests/test_api.py | 18 ++++++++++++++++-- tests/test_html.py | 9 +++++++-- 4 files changed, 25 insertions(+), 5 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 44b186cf..326c11ae 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -637,7 +637,7 @@ class TableView(RowTableShared): new_row = CustomRow(columns) for column in row.keys(): value = row[column] - if (column, value) in expanded_labels: + if (column, value) in expanded_labels and value is not None: new_row[column] = { "value": value, "label": expanded_labels[(column, value)], diff --git a/tests/fixtures.py b/tests/fixtures.py index 93c3da9f..8aa44687 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -754,6 +754,7 @@ INSERT INTO primary_key_multiple_columns VALUES (1, 'hey', 'world'); INSERT INTO primary_key_multiple_columns_explicit_label VALUES (1, 'hey', 'world2'); INSERT INTO foreign_key_references VALUES (1, 1, 1); +INSERT INTO foreign_key_references VALUES (2, null, null); INSERT INTO complex_foreign_keys VALUES (1, 1, 2, 1); INSERT INTO custom_foreign_key_label VALUES (1, 1); diff --git a/tests/test_api.py b/tests/test_api.py index 41557bcf..c6acbab1 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -216,7 +216,7 @@ def test_database_page(app_client): "name": "foreign_key_references", "columns": ["pk", "foreign_key_with_label", "foreign_key_with_no_label"], "primary_keys": ["pk"], - "count": 1, + "count": 2, "hidden": False, "fts_table": None, "foreign_keys": { @@ -1519,7 +1519,7 @@ def test_expand_labels(app_client): def test_expand_label(app_client): response = app_client.get( "/fixtures/foreign_key_references.json?_shape=object" - "&_label=foreign_key_with_label" + "&_label=foreign_key_with_label&_size=1" ) assert { "1": { @@ -1693,3 +1693,17 @@ def test_common_prefix_database_names(app_client_conflicting_database_names): app_client_conflicting_database_names.get(path).body.decode("utf8") ) assert db_name == data["database"] + + +def test_null_foreign_keys_are_not_expanded(app_client): + response = app_client.get( + "/fixtures/foreign_key_references.json?_shape=array&_labels=on" + ) + assert [ + { + "pk": "1", + "foreign_key_with_label": {"value": "1", "label": "hello"}, + "foreign_key_with_no_label": {"value": "1", "label": "1"}, + }, + {"pk": "2", "foreign_key_with_label": None, "foreign_key_with_no_label": None,}, + ] == response.json diff --git a/tests/test_html.py b/tests/test_html.py index aa628dec..f63e595b 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -603,7 +603,12 @@ def test_table_html_foreign_key_links(app_client): '1', 'hello\xa01', '1', - ] + ], + [ + '2', + '\xa0', + '\xa0', + ], ] assert expected == [ [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") @@ -611,7 +616,7 @@ def test_table_html_foreign_key_links(app_client): def test_table_html_disable_foreign_key_links_with_labels(app_client): - response = app_client.get("/fixtures/foreign_key_references?_labels=off") + response = app_client.get("/fixtures/foreign_key_references?_labels=off&_size=1") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") expected = [ From c3181d9a840dff7be8c990b21f5749db393a4ea0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 2 Nov 2019 15:47:20 -0700 Subject: [PATCH 0060/2113] Release notes for 0.30.2 --- docs/changelog.rst | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 8ac32c45..f4761efe 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,16 @@ Changelog ========= +.. _v0_30_2: + +0.30.2 (2019-11-02) +------------------- + +- ``/-/plugins`` page now uses distribution name e.g. ``datasette-cluster-map`` instead of the name of the underlying Python package (``datasette_cluster_map``) (`#606 `__) +- Array faceting is now only suggested for columns that contain arrays of strings (`#562 `__) +- Better documentation for the ``--host`` argument (`#574 `__) +- Don't show ``None`` with a broken link for the label on a nullable foreign key (`#406 `__) + .. _v0_30_1: 0.30.1 (2019-10-30) @@ -14,6 +24,7 @@ Changelog .. _v0_30: + 0.30 (2019-10-18) ----------------- @@ -82,7 +93,7 @@ Two new plugins take advantage of this hook: New plugin hook: extra_template_vars ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The :ref:`plugin_extra_template_vars` plugin hook allows plugins to inject their own additional variables into the Datasette template context. This can be used in conjunction with custom templates to customize the Datasette interface. `datasette-auth-github `__ uses this hook to add custom HTML to the new top navigation bar (which is designed to be modified by plugins, see `#540 `__). +The :ref:`plugin_hook_extra_template_vars` plugin hook allows plugins to inject their own additional variables into the Datasette template context. This can be used in conjunction with custom templates to customize the Datasette interface. `datasette-auth-github `__ uses this hook to add custom HTML to the new top navigation bar (which is designed to be modified by plugins, see `#540 `__). Secret plugin configuration options ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 2bf7ce5f517d772a16d7855a35a8a75d4456aad7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 2 Nov 2019 16:12:46 -0700 Subject: [PATCH 0061/2113] Fix CSV export for nullable foreign keys, closes #612 --- datasette/views/base.py | 12 ++++++++---- tests/test_csv.py | 15 +++++++++++++++ 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index 1568b084..94945304 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -330,10 +330,14 @@ class DataView(BaseView): else: # Look for {"value": "label": } dicts and expand new_row = [] - for cell in row: - if isinstance(cell, dict): - new_row.append(cell["value"]) - new_row.append(cell["label"]) + for heading, cell in zip(data["columns"], row): + if heading in expanded_columns: + if cell is None: + new_row.extend(("", "")) + else: + assert isinstance(cell, dict) + new_row.append(cell["value"]) + new_row.append(cell["label"]) else: new_row.append(cell) await writer.writerow(new_row) diff --git a/tests/test_csv.py b/tests/test_csv.py index b148b6db..13aca489 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -41,6 +41,14 @@ pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,com "\n", "\r\n" ) +EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV = """ +pk,foreign_key_with_label,foreign_key_with_label_label,foreign_key_with_no_label,foreign_key_with_no_label_label +1,1,hello,1,1 +2,,,, +""".lstrip().replace( + "\n", "\r\n" +) + def test_table_csv(app_client): response = app_client.get("/fixtures/simple_primary_key.csv") @@ -63,6 +71,13 @@ def test_table_csv_with_labels(app_client): assert EXPECTED_TABLE_WITH_LABELS_CSV == response.text +def test_table_csv_with_nullable_labels(app_client): + response = app_client.get("/fixtures/foreign_key_references.csv?_labels=1") + assert response.status == 200 + assert "text/plain; charset=utf-8" == response.headers["content-type"] + assert EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV == response.text + + def test_custom_sql_csv(app_client): response = app_client.get( "/fixtures.csv?sql=select+content+from+simple_primary_key+limit+2" From ee330222f4c3ee66c2fe41ebc76fed56b9cb9a00 Mon Sep 17 00:00:00 2001 From: Tobias Kunze Date: Mon, 4 Nov 2019 03:39:55 +0100 Subject: [PATCH 0062/2113] Offer to format readonly SQL (#602) Following discussion in #601, this PR adds a "Format SQL" button to read-only SQL (if the SQL actually differs from the formatting result). It also removes a console error on readonly SQL queries. Thanks, @rixx! --- datasette/templates/_codemirror_foot.html | 41 ++++++++++++++--------- 1 file changed, 26 insertions(+), 15 deletions(-) diff --git a/datasette/templates/_codemirror_foot.html b/datasette/templates/_codemirror_foot.html index 9aba61ab..4019d448 100644 --- a/datasette/templates/_codemirror_foot.html +++ b/datasette/templates/_codemirror_foot.html @@ -6,21 +6,32 @@ window.onload = () => { if (sqlFormat && !readOnly) { sqlFormat.hidden = false; } - var editor = CodeMirror.fromTextArea(sqlInput, { - lineNumbers: true, - mode: "text/x-sql", - lineWrapping: true, - }); - editor.setOption("extraKeys", { - "Shift-Enter": function() { - document.getElementsByClassName("sql")[0].submit(); - }, - Tab: false - }); - if (sqlInput && sqlFormat) { - sqlFormat.addEventListener("click", ev => { - editor.setValue(sqlFormatter.format(editor.getValue())); - }) + if (sqlInput) { + var editor = CodeMirror.fromTextArea(sqlInput, { + lineNumbers: true, + mode: "text/x-sql", + lineWrapping: true, + }); + editor.setOption("extraKeys", { + "Shift-Enter": function() { + document.getElementsByClassName("sql")[0].submit(); + }, + Tab: false + }); + if (sqlFormat) { + sqlFormat.addEventListener("click", ev => { + editor.setValue(sqlFormatter.format(editor.getValue())); + }) + } + } + if (sqlFormat && readOnly) { + const formatted = sqlFormatter.format(readOnly.innerHTML); + if (formatted != readOnly.innerHTML) { + sqlFormat.hidden = false; + sqlFormat.addEventListener("click", ev => { + readOnly.innerHTML = formatted; + }) + } } } From 9db22cdf1809fb78a7b183cd2f617cd5e26efc68 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 3 Nov 2019 20:11:55 -0800 Subject: [PATCH 0063/2113] pk__notin= filter, closes #614 --- datasette/filters.py | 15 +++++++++++++++ docs/json_api.rst | 3 +++ tests/test_filters.py | 3 +++ 3 files changed, 21 insertions(+) diff --git a/datasette/filters.py b/datasette/filters.py index efe014ae..5897a3ed 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -77,6 +77,20 @@ class InFilter(Filter): return "{} in {}".format(column, json.dumps(self.split_value(value))) +class NotInFilter(InFilter): + key = "notin" + display = "not in" + + def where_clause(self, table, column, value, param_counter): + values = self.split_value(value) + params = [":p{}".format(param_counter + i) for i in range(len(values))] + sql = "{} not in ({})".format(escape_sqlite(column), ", ".join(params)) + return sql, values + + def human_clause(self, column, value): + return "{} not in {}".format(column, json.dumps(self.split_value(value))) + + class Filters: _filters = ( [ @@ -125,6 +139,7 @@ class Filters: TemplatedFilter("like", "like", '"{c}" like :{p}', '{c} like "{v}"'), TemplatedFilter("glob", "glob", '"{c}" glob :{p}', '{c} glob "{v}"'), InFilter(), + NotInFilter(), ] + ( [ diff --git a/docs/json_api.rst b/docs/json_api.rst index 4b365e14..de70362c 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -228,6 +228,9 @@ You can filter the data returned by the table based on column values using a que ``?column__in=["value","value,with,commas"]`` +``?column__notin=value1,value2,value3`` + Rows where column does not match any of the provided values. The inverse of ``__in=``. Also supports JSON arrays. + ``?column__arraycontains=value`` Works against columns that contain JSON arrays - matches if any of the values in that array match. diff --git a/tests/test_filters.py b/tests/test_filters.py index fd682cd9..8598087f 100644 --- a/tests/test_filters.py +++ b/tests/test_filters.py @@ -47,6 +47,9 @@ import pytest ["foo in (:p0, :p1)"], ["dog,cat", "cat[dog]"], ), + # Not in, and JSON array not in + ((("foo__notin", "1,2,3"),), ["foo not in (:p0, :p1, :p2)"], ["1", "2", "3"]), + ((("foo__notin", "[1,2,3]"),), ["foo not in (:p0, :p1, :p2)"], [1, 2, 3]), ], ) def test_build_where(args, expected_where, expected_params): From 52fa79c6075f0830ff635b81d957c64d877a05aa Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 4 Nov 2019 15:03:48 -0800 Subject: [PATCH 0064/2113] Use select colnames, not select * for table view - refs #615 --- datasette/views/table.py | 8 ++++++-- tests/test_api.py | 3 ++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 326c11ae..139ff80b 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -235,13 +235,17 @@ class TableView(RowTableShared): raise NotFound("Table not found: {}".format(table)) pks = await db.primary_keys(table) + table_columns = await db.table_columns(table) + + select_columns = ", ".join(escape_sqlite(t) for t in table_columns) + use_rowid = not pks and not is_view if use_rowid: - select = "rowid, *" + select = "rowid, {}".format(select_columns) order_by = "rowid" order_by_pks = "rowid" else: - select = "*" + select = select_columns order_by_pks = ", ".join([escape_sqlite(pk) for pk in pks]) order_by = order_by_pks diff --git a/tests/test_api.py b/tests/test_api.py index c6acbab1..4a09b238 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -610,7 +610,8 @@ def test_table_json(app_client): assert response.status == 200 data = response.json assert ( - data["query"]["sql"] == "select * from simple_primary_key order by id limit 51" + data["query"]["sql"] + == "select id, content from simple_primary_key order by id limit 51" ) assert data["query"]["params"] == {} assert data["rows"] == [ From 931bfc66613aa3e22f8314df5c0d0758baf31f38 Mon Sep 17 00:00:00 2001 From: Tobias Kunze Date: Tue, 5 Nov 2019 00:16:30 +0100 Subject: [PATCH 0065/2113] Handle spaces in DB names (#590) Closes #503 - thanks, @rixx --- datasette/views/base.py | 3 ++- tests/fixtures.py | 4 ++-- tests/test_api.py | 19 ++++++++++++++++++- tests/test_html.py | 8 ++++---- 4 files changed, 26 insertions(+), 8 deletions(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index 94945304..062c6956 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -203,12 +203,13 @@ class DataView(BaseView): hash = hash_bit else: name = db_name - # Verify the hash + name = urllib.parse.unquote_plus(name) try: db = self.ds.databases[name] except KeyError: raise NotFound("Database not found: {}".format(name)) + # Verify the hash expected = "000" if db.hash is not None: expected = db.hash[:HASH_LENGTH] diff --git a/tests/fixtures.py b/tests/fixtures.py index 8aa44687..dcc414bf 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -174,7 +174,7 @@ def app_client_no_files(): @pytest.fixture(scope="session") def app_client_two_attached_databases(): yield from make_app_client( - extra_databases={"extra_database.db": EXTRA_DATABASE_SQL} + extra_databases={"extra database.db": EXTRA_DATABASE_SQL} ) @@ -188,7 +188,7 @@ def app_client_conflicting_database_names(): @pytest.fixture(scope="session") def app_client_two_attached_databases_one_immutable(): yield from make_app_client( - is_immutable=True, extra_databases={"extra_database.db": EXTRA_DATABASE_SQL} + is_immutable=True, extra_databases={"extra database.db": EXTRA_DATABASE_SQL} ) diff --git a/tests/test_api.py b/tests/test_api.py index 4a09b238..1fa8642f 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -6,6 +6,7 @@ from .fixtures import ( # noqa app_client_shorter_time_limit, app_client_larger_cache_size, app_client_returned_rows_matches_page_size, + app_client_two_attached_databases, app_client_two_attached_databases_one_immutable, app_client_conflicting_database_names, app_client_with_cors, @@ -1188,7 +1189,7 @@ def test_databases_json(app_client_two_attached_databases_one_immutable): databases = response.json assert 2 == len(databases) extra_database, fixtures_database = databases - assert "extra_database" == extra_database["name"] + assert "extra database" == extra_database["name"] assert None == extra_database["hash"] assert True == extra_database["is_mutable"] assert False == extra_database["is_memory"] @@ -1679,6 +1680,22 @@ def test_cors(app_client_with_cors, path, status_code): assert "*" == response.headers["Access-Control-Allow-Origin"] +@pytest.mark.parametrize( + "path", + ( + "/", + ".json", + "/searchable", + "/searchable.json", + "/searchable_view", + "/searchable_view.json", + ), +) +def test_database_with_space_in_name(app_client_two_attached_databases, path): + response = app_client_two_attached_databases.get("/extra database" + path) + assert response.status == 200 + + def test_common_prefix_database_names(app_client_conflicting_database_names): # https://github.com/simonw/datasette/issues/597 assert ["fixtures", "foo", "foo-bar"] == [ diff --git a/tests/test_html.py b/tests/test_html.py index f63e595b..7f1af86e 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -27,11 +27,11 @@ def test_homepage(app_client_two_attached_databases): # Should be two attached databases assert [ {"href": "/fixtures", "text": "fixtures"}, - {"href": "/extra_database", "text": "extra_database"}, + {"href": "/extra database", "text": "extra database"}, ] == [{"href": a["href"], "text": a.text.strip()} for a in soup.select("h2 a")] # The first attached database should show count text and attached tables h2 = soup.select("h2")[1] - assert "extra_database" == h2.text.strip() + assert "extra database" == h2.text.strip() counts_p, links_p = h2.find_all_next("p")[:2] assert ( "2 rows in 1 table, 5 rows in 4 hidden tables, 1 view" == counts_p.text.strip() @@ -41,8 +41,8 @@ def test_homepage(app_client_two_attached_databases): {"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a") ] assert [ - {"href": "/extra_database/searchable", "text": "searchable"}, - {"href": "/extra_database/searchable_view", "text": "searchable_view"}, + {"href": "/extra database/searchable", "text": "searchable"}, + {"href": "/extra database/searchable_view", "text": "searchable_view"}, ] == table_links From c30f07c58e410ee296b28aeabe4dc461dd40b435 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 5 Nov 2019 21:12:55 -0800 Subject: [PATCH 0066/2113] Removed _group_count=col feature, closes #504 --- datasette/views/table.py | 12 ------------ docs/json_api.rst | 9 --------- 2 files changed, 21 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 139ff80b..920693d7 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -499,18 +499,6 @@ class TableView(RowTableShared): if order_by: order_by = "order by {} ".format(order_by) - # _group_count=col1&_group_count=col2 - group_count = special_args_lists.get("_group_count") or [] - if group_count: - sql = 'select {group_cols}, count(*) as "count" from {table_name} {where} group by {group_cols} order by "count" desc limit 100'.format( - group_cols=", ".join( - '"{}"'.format(group_count_col) for group_count_col in group_count - ), - table_name=escape_sqlite(table), - where=where_clause, - ) - return await self.custom_sql(request, database, hash, sql, editable=True) - extra_args = {} # Handle ?_size=500 page_size = _size or request.raw_args.get("_size") diff --git a/docs/json_api.rst b/docs/json_api.rst index de70362c..e369bee7 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -321,15 +321,6 @@ Special table arguments Here's `an example `__. - -``?_group_count=COLUMN`` - Executes a SQL query that returns a count of the number of rows matching - each unique value in that column, with the most common ordered first. - -``?_group_count=COLUMN1&_group_count=column2`` - You can pass multiple ``_group_count`` columns to return counts against - unique combinations of those columns. - ``?_next=TOKEN`` Pagination by continuation token - pass the token that was returned in the ``"next"`` property by the previous page. From f9c146b893856a48afa810ebcce1714f30d0d3a2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 6 Nov 2019 16:55:44 -0800 Subject: [PATCH 0067/2113] Removed unused special_args_lists variable --- datasette/views/table.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 920693d7..a60a3941 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -261,12 +261,10 @@ class TableView(RowTableShared): # That's so if there is a column that starts with _ # it can still be queried using ?_col__exact=blah special_args = {} - special_args_lists = {} other_args = [] for key, value in args.items(): if key.startswith("_") and "__" not in key: special_args[key] = value[0] - special_args_lists[key] = value else: for v in value: other_args.append((key, v)) From 83fc5165ac724f69cd57d8f15cd3038e7b30f878 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 7 Nov 2019 18:48:39 -0800 Subject: [PATCH 0068/2113] Improved UI for publish cloudrun, closes #608 --- datasette/publish/cloudrun.py | 39 ++++++++++++++++++++++-- tests/test_publish_cloudrun.py | 55 ++++++++++++++++++++++++++++++++-- 2 files changed, 90 insertions(+), 4 deletions(-) diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index c2d77746..a833a32b 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -60,6 +60,23 @@ def publish_subcommand(publish): "gcloud config get-value project", shell=True, universal_newlines=True ).strip() + if not service: + # Show the user their current services, then prompt for one + click.echo("Please provide a service name for this deployment\n") + click.echo("Using an existing service name will over-write it") + click.echo("") + existing_services = get_existing_services() + if existing_services: + click.echo("Your existing services:\n") + for existing_service in existing_services: + click.echo( + " {name} - created {created} - {url}".format( + **existing_service + ) + ) + click.echo("") + service = click.prompt("Service name", type=str) + extra_metadata = { "title": title, "license": license, @@ -110,8 +127,26 @@ def publish_subcommand(publish): image_id = "gcr.io/{project}/{name}".format(project=project, name=name) check_call("gcloud builds submit --tag {}".format(image_id), shell=True) check_call( - "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {}{}".format( - image_id, " {}".format(service) if service else "" + "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {} {}".format( + image_id, service, ), shell=True, ) + + +def get_existing_services(): + services = json.loads( + check_output( + "gcloud beta run services list --platform=managed --format json", + shell=True, + universal_newlines=True, + ) + ) + return [ + { + "name": service["metadata"]["name"], + "created": service["metadata"]["creationTimestamp"], + "url": service["status"]["address"]["url"], + } + for service in services + ] diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 481ac04d..a038b60e 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -24,6 +24,53 @@ def test_publish_cloudrun_invalid_database(mock_which): assert 'Path "woop.db" does not exist' in result.output +@mock.patch("shutil.which") +@mock.patch("datasette.publish.cloudrun.check_output") +@mock.patch("datasette.publish.cloudrun.check_call") +@mock.patch("datasette.publish.cloudrun.get_existing_services") +def test_publish_cloudrun_prompts_for_service( + mock_get_existing_services, mock_call, mock_output, mock_which +): + mock_get_existing_services.return_value = [ + {"name": "existing", "created": "2019-01-01", "url": "http://www.example.com/"} + ] + mock_output.return_value = "myproject" + mock_which.return_value = True + runner = CliRunner() + with runner.isolated_filesystem(): + open("test.db", "w").write("data") + result = runner.invoke( + cli.cli, ["publish", "cloudrun", "test.db"], input="input-service" + ) + assert ( + """ +Please provide a service name for this deployment + +Using an existing service name will over-write it + +Your existing services: + + existing - created 2019-01-01 - http://www.example.com/ + +Service name: input-service +""".strip() + == result.output.strip() + ) + assert 0 == result.exit_code + tag = "gcr.io/myproject/datasette" + mock_call.assert_has_calls( + [ + mock.call("gcloud builds submit --tag {}".format(tag), shell=True), + mock.call( + "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {} input-service".format( + tag + ), + shell=True, + ), + ] + ) + + @mock.patch("shutil.which") @mock.patch("datasette.publish.cloudrun.check_output") @mock.patch("datasette.publish.cloudrun.check_call") @@ -33,14 +80,16 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which): runner = CliRunner() with runner.isolated_filesystem(): open("test.db", "w").write("data") - result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"]) + result = runner.invoke( + cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"] + ) assert 0 == result.exit_code tag = "gcr.io/{}/datasette".format(mock_output.return_value) mock_call.assert_has_calls( [ mock.call("gcloud builds submit --tag {}".format(tag), shell=True), mock.call( - "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {}".format( + "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {} test".format( tag ), shell=True, @@ -65,6 +114,8 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which): "publish", "cloudrun", "test.db", + "--service", + "datasette", "--plugin-secret", "datasette-auth-github", "client_id", From 9f5d19c254d1bfbd99f576dff47a6e32e01c76ed Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 8 Nov 2019 18:12:20 -0800 Subject: [PATCH 0069/2113] Improved documentation for "publish cloudrun" --- docs/publish.rst | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/docs/publish.rst b/docs/publish.rst index 304be8ef..89d33085 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -43,14 +43,16 @@ You will first need to install and configure the Google Cloud CLI tools by follo You can then publish a database to Google Cloud Run using the following command:: - datasette publish cloudrun mydatabase.db + datasette publish cloudrun mydatabase.db --service=my-database + +A Cloud Run **service** is a single hosted application. The service name you specify will be used as part of the Cloud Run URL. If you deploy to a service name that you have used in the past your new deployment will replace the previous one. + +If you omit the ``--service`` option you will be asked to pick a service name interactively during the deploy. You may need to interact with prompts from the tool. Once it has finished it will output a URL like this one:: - Service [datasette] revision [datasette-00001] has been deployed - and is serving traffic at https://datasette-j7hipcg4aq-uc.a.run.app - -During the deployment the tool will prompt you for the name of your service. You can reuse an existing name to replace your previous deployment with your new version, or pick a new name to deploy to a new URL. + Service [my-service] revision [my-service-00001] has been deployed + and is serving traffic at https://my-service-j7hipcg4aq-uc.a.run.app .. literalinclude:: datasette-publish-cloudrun-help.txt @@ -90,18 +92,18 @@ Custom metadata and plugins You can define your own :ref:`metadata` and deploy that with your instance like so:: - datasette publish nowv1 mydatabase.db -m metadata.json + datasette publish cloudrun --service=my-service mydatabase.db -m metadata.json If you just want to set the title, license or source information you can do that directly using extra options to ``datasette publish``:: - datasette publish nowv1 mydatabase.db \ + datasette publish cloudrun mydatabase.db --service=my-service \ --title="Title of my database" \ --source="Where the data originated" \ --source_url="http://www.example.com/" You can also specify plugins you would like to install. For example, if you want to include the `datasette-vega `_ visualization plugin you can use the following:: - datasette publish nowv1 mydatabase.db --install=datasette-vega + datasette publish cloudrun mydatabase.db --service=my-service --install=datasette-vega If a plugin has any :ref:`plugins_configuration_secret` you can use the ``--plugin-secret`` option to set those secrets at publish time. For example, using Heroku with `datasette-auth-github `__ you might run the following command:: From 10b9d85edaaf198879344aa1c498000cfb27dff8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 8 Nov 2019 18:15:13 -0800 Subject: [PATCH 0070/2113] datasette-csvs on Glitch now uses sqlite-utils It previously used csvs-to-sqlite but that had heavy dependencies. See https://support.glitch.com/t/can-you-upgrade-python-to-latest-version/7980/33 --- docs/getting_started.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting_started.rst b/docs/getting_started.rst index d0c22583..fdf7d23c 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -25,7 +25,7 @@ Glitch allows you to "remix" any project to create your own copy and start editi .. image:: https://cdn.glitch.com/2703baf2-b643-4da7-ab91-7ee2a2d00b5b%2Fremix-button.svg :target: https://glitch.com/edit/#!/remix/datasette-csvs -Find a CSV file and drag it onto the Glitch file explorer panel - ``datasette-csvs`` will automatically convert it to a SQLite database (using `csvs-to-sqlite `__) and allow you to start exploring it using Datasette. +Find a CSV file and drag it onto the Glitch file explorer panel - ``datasette-csvs`` will automatically convert it to a SQLite database (using `sqlite-utils `__) and allow you to start exploring it using Datasette. If your CSV file has a ``latitude`` and ``longitude`` column you can visualize it on a map by uncommenting the ``datasette-cluster-map`` line in the ``requirements.txt`` file using the Glitch file editor. From 28c4a6db5b5e512db630d7ba6127196185de67c7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 9 Nov 2019 17:29:36 -0800 Subject: [PATCH 0071/2113] CREATE INDEX statements on table page, closes #618 --- datasette/database.py | 13 ++++++++++++- tests/fixtures.py | 1 + tests/test_html.py | 33 +++++++++++++++++++++++++++++++++ 3 files changed, 46 insertions(+), 1 deletion(-) diff --git a/datasette/database.py b/datasette/database.py index 7e6f7245..3a1cea94 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -232,7 +232,18 @@ class Database: ) if not table_definition_rows: return None - return table_definition_rows[0][0] + bits = [table_definition_rows[0][0] + ";"] + # Add on any indexes + index_rows = list( + await self.ds.execute( + self.name, + "select sql from sqlite_master where tbl_name = :n and type='index' and sql is not null", + {"n": table}, + ) + ) + for index_row in index_rows: + bits.append(index_row[0] + ";") + return "\n".join(bits) async def get_view_definition(self, view): return await self.get_table_definition(view, "view") diff --git a/tests/fixtures.py b/tests/fixtures.py index dcc414bf..87e66f99 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -514,6 +514,7 @@ CREATE TABLE compound_three_primary_keys ( content text, PRIMARY KEY (pk1, pk2, pk3) ); +CREATE INDEX idx_compound_three_primary_keys_content ON compound_three_primary_keys(content); CREATE TABLE foreign_key_references ( pk varchar(30) primary key, diff --git a/tests/test_html.py b/tests/test_html.py index 7f1af86e..44627cdc 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -119,6 +119,39 @@ def test_row_strange_table_name_with_url_hash(app_client_with_hash): assert response.status == 200 +@pytest.mark.parametrize( + "path,expected_definition_sql", + [ + ( + "/fixtures/facet_cities", + """ +CREATE TABLE facet_cities ( + id integer primary key, + name text +); + """.strip(), + ), + ( + "/fixtures/compound_three_primary_keys", + """ +CREATE TABLE compound_three_primary_keys ( + pk1 varchar(30), + pk2 varchar(30), + pk3 varchar(30), + content text, + PRIMARY KEY (pk1, pk2, pk3) +); +CREATE INDEX idx_compound_three_primary_keys_content ON compound_three_primary_keys(content); + """.strip(), + ), + ], +) +def test_definition_sql(path, expected_definition_sql, app_client): + response = app_client.get(path) + pre = Soup(response.body, "html.parser").select_one("pre.wrapped-sql") + assert expected_definition_sql == pre.string + + def test_table_cell_truncation(): for client in make_app_client(config={"truncate_cells_html": 5}): response = client.get("/fixtures/facetable") From 1c063fae9dba70f70244db010d55a18846640f07 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 10 Nov 2019 19:45:34 -0800 Subject: [PATCH 0072/2113] Test against Python 3.8 in Travis (#623) * Test against Python 3.8 in Travis * Avoid current_task warnings in Python 3.8 --- .travis.yml | 1 + datasette/tracer.py | 9 ++++++++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 29388bc1..a6b15b7e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,6 +5,7 @@ dist: xenial python: - "3.6" - "3.7" + - "3.8" - "3.5" # Executed for 3.5 AND 3.5 as the first "test" stage: diff --git a/datasette/tracer.py b/datasette/tracer.py index e46a6fda..a638b140 100644 --- a/datasette/tracer.py +++ b/datasette/tracer.py @@ -9,12 +9,19 @@ tracers = {} TRACE_RESERVED_KEYS = {"type", "start", "end", "duration_ms", "traceback"} +# asyncio.current_task was introduced in Python 3.7: +for obj in (asyncio, asyncio.Task): + current_task = getattr(obj, "current_task", None) + if current_task is not None: + break + + def get_task_id(): try: loop = asyncio.get_event_loop() except RuntimeError: return None - return id(asyncio.Task.current_task(loop=loop)) + return id(current_task(loop=loop)) @contextmanager From 42ee3e16a9ba7cc513b8da944cc1609a5407cf42 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 10 Nov 2019 20:19:01 -0800 Subject: [PATCH 0073/2113] Bump pint to 0.9 (#624) This fixes 2 deprecation warnings in Python 3.8 - refs #623 #622 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 9ae56306..e8229de1 100644 --- a/setup.py +++ b/setup.py @@ -45,7 +45,7 @@ setup( "click-default-group~=1.2.1", "Jinja2~=2.10.1", "hupper~=1.0", - "pint~=0.8.1", + "pint~=0.9", "pluggy~=0.12.0", "uvicorn~=0.8.4", "aiofiles~=0.4.0", From 5bc2570121aea8141ff88790e214765472882b08 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 11 Nov 2019 20:45:12 -0800 Subject: [PATCH 0074/2113] Include uvicorn version in /-/versions, refs #622 --- datasette/app.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/datasette/app.py b/datasette/app.py index 203e0991..4ba4adfb 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -12,6 +12,7 @@ from pathlib import Path import click from markupsafe import Markup from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader +import uvicorn from .views.base import DatasetteError, ureg, AsgiRouter from .views.database import DatabaseDownload, DatabaseView @@ -433,6 +434,7 @@ class Datasette: }, "datasette": datasette_version, "asgi": "3.0", + "uvicorn": uvicorn.__version__, "sqlite": { "version": sqlite_version, "fts_versions": fts_versions, From cf7776d36fbacefa874cbd6e5fcdc9fff7661203 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 11 Nov 2019 21:09:11 -0800 Subject: [PATCH 0075/2113] Support Python 3.8, stop supporting Python 3.5 (#627) * Upgrade to uvicorn 0.10.4 * Drop support for Python 3.5 * Bump all dependencies to latest releases * Update docs to reflect we no longer support 3.5 * Removed code that skipped black unit test on 3.5 Closes #622 --- .travis.yml | 1 - README.md | 2 +- docs/contributing.rst | 2 +- docs/installation.rst | 7 +++++-- setup.py | 20 ++++++++++---------- tests/test_black.py | 7 +------ 6 files changed, 18 insertions(+), 21 deletions(-) diff --git a/.travis.yml b/.travis.yml index a6b15b7e..0fc87d93 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,7 +6,6 @@ python: - "3.6" - "3.7" - "3.8" - - "3.5" # Executed for 3.5 AND 3.5 as the first "test" stage: script: diff --git a/README.md b/README.md index 9f85f1ba..14c9cfd6 100644 --- a/README.md +++ b/README.md @@ -69,7 +69,7 @@ sqlite-utils: a Python library and CLI tool for building SQLite databases](https pip3 install datasette -Datasette requires Python 3.5 or higher. We also have [detailed installation instructions](https://datasette.readthedocs.io/en/stable/installation.html) covering other options such as Docker. +Datasette requires Python 3.6 or higher. We also have [detailed installation instructions](https://datasette.readthedocs.io/en/stable/installation.html) covering other options such as Docker. ## Basic usage diff --git a/docs/contributing.rst b/docs/contributing.rst index 43834edc..078fd841 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -18,7 +18,7 @@ General guidelines Setting up a development environment ------------------------------------ -If you have Python 3.5 or higher installed on your computer (on OS X the easiest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps. +If you have Python 3.6 or higher installed on your computer (on OS X the easiest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps. If you want to use GitHub to publish your changes, first `create a fork of datasette `__ under your own GitHub account. diff --git a/docs/installation.rst b/docs/installation.rst index e65d8ee3..9ee7eb4e 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -69,16 +69,19 @@ You can now run the new custom image like so:: You can confirm that the plugins are installed by visiting http://127.0.0.1:8001/-/plugins - Install using pip ----------------- -To run Datasette without Docker you will need Python 3.5 or higher. +To run Datasette without Docker you will need Python 3.6 or higher. You can install Datasette and its dependencies using ``pip``:: pip install datasette +The last version to support Python 3.5 was 0.30.2 - you can install that version like so:: + + pip install datasette==0.30.2 + If you want to install Datasette in its own virtual environment, use this:: python -mvenv datasette-venv diff --git a/setup.py b/setup.py index e8229de1..7a4cdcb3 100644 --- a/setup.py +++ b/setup.py @@ -42,12 +42,12 @@ setup( include_package_data=True, install_requires=[ "click~=7.0", - "click-default-group~=1.2.1", - "Jinja2~=2.10.1", - "hupper~=1.0", + "click-default-group~=1.2.2", + "Jinja2~=2.10.3", + "hupper~=1.9", "pint~=0.9", - "pluggy~=0.12.0", - "uvicorn~=0.8.4", + "pluggy~=0.13.0", + "uvicorn~=0.10.4", "aiofiles~=0.4.0", ], entry_points=""" @@ -58,11 +58,11 @@ setup( extras_require={ "docs": ["sphinx_rtd_theme", "sphinx-autobuild"], "test": [ - "pytest~=5.0.0", + "pytest~=5.2.2", "pytest-asyncio~=0.10.0", - "aiohttp~=3.5.3", - "beautifulsoup4~=4.6.1", - "asgiref~=3.1.2", + "aiohttp~=3.6.2", + "beautifulsoup4~=4.8.1", + "asgiref~=3.2.3", ] + maybe_black, }, @@ -74,8 +74,8 @@ setup( "Intended Audience :: End Users/Desktop", "Topic :: Database", "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.5", ], ) diff --git a/tests/test_black.py b/tests/test_black.py index 68e2dcc0..b5bfcfd0 100644 --- a/tests/test_black.py +++ b/tests/test_black.py @@ -1,3 +1,4 @@ +import black from click.testing import CliRunner from pathlib import Path import pytest @@ -6,13 +7,7 @@ import sys code_root = Path(__file__).parent.parent -@pytest.mark.skipif( - sys.version_info[:2] < (3, 6), reason="Black requires Python 3.6 or later" -) def test_black(): - # Do not import at top of module because Python 3.5 will not have it installed - import black - runner = CliRunner() result = runner.invoke( black.main, [str(code_root / "tests"), str(code_root / "datasette"), "--check"] From 76fc6a9c7317ce4fbf3cc3d327c849f7274d960a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 11 Nov 2019 21:17:59 -0800 Subject: [PATCH 0076/2113] Release notes for 0.31 --- docs/changelog.rst | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index f4761efe..6e260be9 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,28 @@ Changelog ========= +.. _v0_31: + +0.31 (2019-11-11) +----------------- + +This version adds compatibility with Python 3.8 and breaks compatibility with Python 3.5. + +If you are still running Python 3.5 you should stick with ``0.30.2``, which you can install like this:: + + pip install datasette==0.30.2 + +- Format SQL button now works with read-only SQL queries - thanks, Tobias Kunze (`#602 `__) +- New ``?column__notin=x,y,z`` filter for table views (`#614 `__) +- Table view now uses ``select col1, col2, col3`` instead of ``select *`` +- Database filenames can now contain spaces - thanks, Tobias Kunze (`#590 `__) +- Removed obsolete ``?_group_count=col`` feature (`#504 `__) +- Improved user interface and documentation for ``datasette publish cloudrun`` (`#608 `__) +- Tables with indexes now show the `` CREATE INDEX`` statements on the table page (`#618 `__) +- Current version of `uvicorn `__ is now shown on ``/-/versions`` +- Python 3.8 is now supported! (`#622 `__) +- Python 3.5 is no longer supported. + .. _v0_30_2: 0.30.2 (2019-11-02) From c633c035dc8d4c60f1d13cb074918406bbdb3734 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 11 Nov 2019 21:26:56 -0800 Subject: [PATCH 0077/2113] Datasette 0.31 in news section --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 14c9cfd6..05995a74 100644 --- a/README.md +++ b/README.md @@ -21,6 +21,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover ## News + * 11th November 2019: [Datasette 0.31](https://datasette.readthedocs.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5. * 18th October 2019: [Datasette 0.30](https://datasette.readthedocs.io/en/stable/changelog.html#v0-30) * 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail. * 7th July 2019: [Datasette 0.29](https://datasette.readthedocs.io/en/stable/changelog.html#v0-29) - ASGI, new plugin hooks, facet by date and much, much more... From 7f89928062b1a1fdb2625a946f7cd5161e597401 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 11 Nov 2019 21:33:51 -0800 Subject: [PATCH 0078/2113] Removed code that conditionally installs black Since we no longer support Python 3.5 we don't need this any more. --- setup.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/setup.py b/setup.py index 7a4cdcb3..15284779 100644 --- a/setup.py +++ b/setup.py @@ -22,11 +22,6 @@ def get_version(): return g["__version__"] -# Only install black on Python 3.6 or higher -maybe_black = [] -if sys.version_info > (3, 6): - maybe_black = ["black~=19.10b0"] - setup( name="datasette", version=versioneer.get_version(), @@ -63,8 +58,8 @@ setup( "aiohttp~=3.6.2", "beautifulsoup4~=4.8.1", "asgiref~=3.2.3", - ] - + maybe_black, + "black~=19.10b0", + ], }, tests_require=["datasette[test]"], classifiers=[ From 1c518680e9692a9a77022af54f3de3e77fb1aaf4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 11 Nov 2019 21:57:48 -0800 Subject: [PATCH 0079/2113] Final steps: build stable branch of Read The Docs --- docs/contributing.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 078fd841..48930332 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -150,4 +150,7 @@ Wait long enough for Travis to build and deploy the demo version of that commit git tag 0.25.2 git push --tags -Once the release is out, you can manually update https://github.com/simonw/datasette/releases +Final steps once the release has deployed to https://pypi.org/project/datasette/ + +* Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases +* Manually kick off a build of the `stable` branch on Read The Docs: https://readthedocs.org/projects/datasette/builds/ From f554be39fc14ddc18921ca29d3920d55aad03d46 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 11 Nov 2019 22:00:13 -0800 Subject: [PATCH 0080/2113] ReST fix --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6e260be9..763b178e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -21,7 +21,7 @@ If you are still running Python 3.5 you should stick with ``0.30.2``, which you - Database filenames can now contain spaces - thanks, Tobias Kunze (`#590 `__) - Removed obsolete ``?_group_count=col`` feature (`#504 `__) - Improved user interface and documentation for ``datasette publish cloudrun`` (`#608 `__) -- Tables with indexes now show the `` CREATE INDEX`` statements on the table page (`#618 `__) +- Tables with indexes now show the ``CREATE INDEX`` statements on the table page (`#618 `__) - Current version of `uvicorn `__ is now shown on ``/-/versions`` - Python 3.8 is now supported! (`#622 `__) - Python 3.5 is no longer supported. From d977fbadf70a96bf2eea1407d01f99d98e092dec Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 11 Nov 2019 22:03:09 -0800 Subject: [PATCH 0081/2113] datasette publish uses python:3.8 base Docker image, closes #629 --- datasette/utils/__init__.py | 2 +- tests/test_publish_cloudrun.py | 2 +- tests/test_publish_now.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 3d28a36b..b8df48cf 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -306,7 +306,7 @@ def make_dockerfile( install = ["datasette"] + list(install) return """ -FROM python:3.6 +FROM python:3.8 COPY . /app WORKDIR /app {spatialite_extras} diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index a038b60e..c5b18cdf 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -128,7 +128,7 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which): .split("\n====================\n")[0] .strip() ) - expected = """FROM python:3.6 + expected = """FROM python:3.8 COPY . /app WORKDIR /app diff --git a/tests/test_publish_now.py b/tests/test_publish_now.py index 72aa71db..27fd1245 100644 --- a/tests/test_publish_now.py +++ b/tests/test_publish_now.py @@ -138,7 +138,7 @@ def test_publish_now_plugin_secrets(mock_run, mock_which): .split("\n====================\n")[0] .strip() ) - expected = """FROM python:3.6 + expected = """FROM python:3.8 COPY . /app WORKDIR /app From 16265f6a1a7c547e3925e0fc2d6b88754afb0435 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 12 Nov 2019 18:18:04 -0800 Subject: [PATCH 0082/2113] Release notes for 0.31.1 --- docs/changelog.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 763b178e..746f5b42 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_31_1: + +0.31.1 (2019-11-12) +------------------- + +- Deploymens created using ``datasette publish`` now use ``python:3.8`` base Docker image (`#629 `__) + .. _v0_31: 0.31 (2019-11-11) From a22c7761b61baa61b8e3da7d30887468d61d6b83 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 12 Nov 2019 18:18:39 -0800 Subject: [PATCH 0083/2113] Fixed typo in release notes --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 746f5b42..e527518e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,7 +9,7 @@ Changelog 0.31.1 (2019-11-12) ------------------- -- Deploymens created using ``datasette publish`` now use ``python:3.8`` base Docker image (`#629 `__) +- Deployments created using ``datasette publish`` now use ``python:3.8`` base Docker image (`#629 `__) .. _v0_31: From bbd00e903cdd49067ecdbdb60a4d225833a44b05 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 12 Nov 2019 18:38:13 -0800 Subject: [PATCH 0084/2113] Badge linking to datasette on hub.docker.com --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 05995a74..9a22c2b2 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,7 @@ [![Documentation Status](https://readthedocs.org/projects/datasette/badge/?version=latest)](http://datasette.readthedocs.io/en/latest/?badge=latest) [![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://github.com/simonw/datasette/blob/master/LICENSE) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://black.readthedocs.io/en/stable/) +[![docker: datasette](https://img.shields.io/badge/docker-datasette-blue)](https://hub.docker.com/r/datasetteproject/datasette) *A tool for exploring and publishing data* From 848dec4deb0d3c140a4e0394cac45fbb2593349b Mon Sep 17 00:00:00 2001 From: Stanley Zheng Date: Tue, 12 Nov 2019 23:28:42 -0500 Subject: [PATCH 0085/2113] Fix for datasette publish with just --source_url (#631) Closes #572 --- datasette/templates/_description_source_license.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/templates/_description_source_license.html b/datasette/templates/_description_source_license.html index 3327706e..a2bc18f2 100644 --- a/datasette/templates/_description_source_license.html +++ b/datasette/templates/_description_source_license.html @@ -21,7 +21,7 @@ {% endif %}{{ metadata.source or metadata.source_url }}{% if metadata.source_url %}{% endif %} {% endif %} - {% if metadata.about or metadata.about_url %}{% if metadata.license or metadata.license_url or metadata.source or metadat.source_url %}·{% endif %} + {% if metadata.about or metadata.about_url %}{% if metadata.license or metadata.license_url or metadata.source or metadata.source_url %}·{% endif %} About: {% if metadata.about_url %} {% endif %}{{ metadata.about or metadata.about_url }}{% if metadata.about_url %}{% endif %} From f52451023025579ae9a13de4a7f00d69200184cd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 13 Nov 2019 08:42:47 -0800 Subject: [PATCH 0086/2113] Fix "publish heroku" + upgrade to use Python 3.8.0 Closes #633. Closes #632. --- datasette/publish/heroku.py | 7 +++++-- tests/test_publish_heroku.py | 9 +++++++-- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index 34d1f773..e75f76df 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -72,7 +72,10 @@ def publish_subcommand(publish): "about_url": about_url, } - environment_variables = {} + environment_variables = { + # Avoid uvicorn error: https://github.com/simonw/datasette/issues/633 + "WEB_CONCURRENCY": "1" + } if plugin_secret: extra_metadata["plugins"] = {} for plugin_name, plugin_setting, setting_value in plugin_secret: @@ -164,7 +167,7 @@ def temporary_heroku_directory( if metadata_content: open("metadata.json", "w").write(json.dumps(metadata_content, indent=2)) - open("runtime.txt", "w").write("python-3.6.8") + open("runtime.txt", "w").write("python-3.8.0") if branch: install = [ diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py index 4cd66219..87386e93 100644 --- a/tests/test_publish_heroku.py +++ b/tests/test_publish_heroku.py @@ -57,8 +57,13 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which): open("test.db", "w").write("data") result = runner.invoke(cli.cli, ["publish", "heroku", "test.db"]) assert 0 == result.exit_code, result.output - mock_call.assert_called_once_with( - ["heroku", "builds:create", "-a", "f", "--include-vcs-ignore"] + mock_call.assert_has_calls( + [ + mock.call(["heroku", "config:set", "-a", "f", "WEB_CONCURRENCY=1",]), + mock.call( + ["heroku", "builds:create", "-a", "f", "--include-vcs-ignore"] + ), + ] ) From b51f258d00bb3c3b401f15d46a1fbd50394dbe1c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 13 Nov 2019 08:48:36 -0800 Subject: [PATCH 0087/2113] Release notes for 0.31.2 --- docs/changelog.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index e527518e..f4958399 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,15 @@ Changelog ========= +.. _v0_31_2: + +0.31.2 (2019-11-13) +------------------- + +- Fixed a bug where ``datasette publish heroku`` applications failed to start (`#633 `__) +- Fix for ``datasette publish`` with just ``--source_url`` - thanks, Stanley Zheng (`#572 `__) +- Deployments to Heroku now use Python 3.8.0 (`#632 `__) + .. _v0_31_1: 0.31.1 (2019-11-12) From 8c642f04e0608bf537fdd1f76d64c2367fb04d57 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 14 Nov 2019 15:14:22 -0800 Subject: [PATCH 0088/2113] Render templates using Jinja async mode Closes #628 --- datasette/app.py | 6 ++++-- datasette/views/base.py | 2 +- docs/plugins.rst | 23 ++++++++++++----------- tests/fixtures.py | 8 +++++++- tests/test_plugins.py | 18 ++++++++++++++++++ tests/test_templates/show_json.html | 1 + 6 files changed, 43 insertions(+), 15 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 4ba4adfb..02fcf303 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -583,7 +583,9 @@ class Datasette: ), ] ) - self.jinja_env = Environment(loader=template_loader, autoescape=True) + self.jinja_env = Environment( + loader=template_loader, autoescape=True, enable_async=True + ) self.jinja_env.filters["escape_css_string"] = escape_css_string self.jinja_env.filters["quote_plus"] = lambda u: urllib.parse.quote_plus(u) self.jinja_env.filters["escape_sqlite"] = escape_sqlite @@ -730,5 +732,5 @@ class DatasetteRouter(AsgiRouter): else: template = self.ds.jinja_env.select_template(templates) await asgi_send_html( - send, template.render(info), status=status, headers=headers + send, await template.render_async(info), status=status, headers=headers ) diff --git a/datasette/views/base.py b/datasette/views/base.py index 062c6956..5182479c 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -139,7 +139,7 @@ class BaseView(AsgiView): extra_template_vars.update(extra_vars) return Response.html( - template.render( + await template.render_async( { **context, **{ diff --git a/docs/plugins.rst b/docs/plugins.rst index 6df7ff6a..e5a3d7dd 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -629,7 +629,9 @@ Function that returns a dictionary If you return a function it will be executed. If it returns a dictionary those values will will be merged into the template context. Function that returns an awaitable function that returns a dictionary - You can also return a function which returns an awaitable function which returns a dictionary. This means you can execute additional SQL queries using ``datasette.execute()``. + You can also return a function which returns an awaitable function which returns a dictionary. + +Datasette runs Jinja2 in `async mode `__, which means you can add awaitable functions to the template scope and they will be automatically awaited when they are rendered by the template. Here's an example plugin that returns an authentication object from the ASGI scope: @@ -641,20 +643,19 @@ Here's an example plugin that returns an authentication object from the ASGI sco "auth": request.scope.get("auth") } -And here's an example which returns the current version of SQLite: +And here's an example which adds a ``sql_first(sql_query)`` function which executes a SQL statement and returns the first column of the first row of results: .. code-block:: python @hookimpl - def extra_template_vars(datasette): - async def inner(): - first_db = list(datasette.databases.keys())[0] - return { - "sqlite_version": ( - await datasette.execute(first_db, "select sqlite_version()") - ).rows[0][0] - } - return inner + def extra_template_vars(datasette, database): + async def sql_first(sql, dbname=None): + dbname = dbname or database or next(iter(datasette.databases.keys())) + return (await datasette.execute(dbname, sql)).rows[0][0] + +You can then use the new function in a template like so:: + + SQLite version: {{ sql_first("select sqlite_version()") }} .. _plugin_register_output_renderer: diff --git a/tests/fixtures.py b/tests/fixtures.py index 87e66f99..3e4203f7 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -446,13 +446,19 @@ def render_cell(value, database): @hookimpl def extra_template_vars(template, database, table, view_name, request, datasette): + async def query_database(sql): + first_db = list(datasette.databases.keys())[0] + return ( + await datasette.execute(first_db, sql) + ).rows[0][0] async def inner(): return { "extra_template_vars_from_awaitable": json.dumps({ "template": template, "scope_path": request.scope["path"], "awaitable": True, - }, default=lambda b: b.decode("utf8")) + }, default=lambda b: b.decode("utf8")), + "query_database": query_database, } return inner diff --git a/tests/test_plugins.py b/tests/test_plugins.py index b1c7fd9a..42d063f4 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -1,5 +1,6 @@ from bs4 import BeautifulSoup as Soup from .fixtures import app_client, make_app_client, TEMP_PLUGIN_SECRET_FILE # noqa +from datasette.utils import sqlite3 import base64 import json import os @@ -214,3 +215,20 @@ def test_plugins_extra_template_vars(restore_working_directory): "awaitable": True, "scope_path": "/-/metadata", } == extra_template_vars_from_awaitable + + +def test_plugins_async_template_function(restore_working_directory): + for client in make_app_client( + template_dir=str(pathlib.Path(__file__).parent / "test_templates") + ): + response = client.get("/-/metadata") + assert response.status == 200 + extra_from_awaitable_function = ( + Soup(response.body, "html.parser") + .select("pre.extra_from_awaitable_function")[0] + .text + ) + expected = ( + sqlite3.connect(":memory:").execute("select sqlite_version()").fetchone()[0] + ) + assert expected == extra_from_awaitable_function diff --git a/tests/test_templates/show_json.html b/tests/test_templates/show_json.html index bbf1bc06..cff04fb4 100644 --- a/tests/test_templates/show_json.html +++ b/tests/test_templates/show_json.html @@ -5,4 +5,5 @@ Test data for extra_template_vars:
{{ extra_template_vars|safe }}
{{ extra_template_vars_from_awaitable|safe }}
+
{{ query_database("select sqlite_version();") }}
{% endblock %} From a95bedb9c423fa6d772c93ef47bc40f13a5bea50 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 14 Nov 2019 15:18:53 -0800 Subject: [PATCH 0089/2113] Release notes for 0.32 --- docs/changelog.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index f4958399..2f909364 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_32: + +0.32 (2019-11-14) +----------------- + +Datasette now renders templates using `Jinja async mode `__. This makes it easy for plugins to provide custom template functions that perform asynchronous actions, for example the new `datasette-template-sql `__ plugin which allows custom templates to directly execute SQL queries and render their results. (`#628 `__) + .. _v0_31_2: 0.31.2 (2019-11-13) From 8fc9a5d877d26dbf2654e125f407ddd2fd767335 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 14 Nov 2019 15:46:37 -0800 Subject: [PATCH 0090/2113] Datasette 0.32 and datasette-template-sql in news --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 9a22c2b2..030c507f 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover ## News + * 14th November 2019: [Datasette 0.32](https://datasette.readthedocs.io/en/stable/changelog.html#v0-32) now uses asynchronous rendering in Jinja templates, which means template functions can perform asynchronous operations such as executing SQL queries. [datasette-template-sql](https://github.com/simonw/datasette-template-sql) is a new plugin uses this capability to add a new custom `sql(sql_query)` template function. * 11th November 2019: [Datasette 0.31](https://datasette.readthedocs.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5. * 18th October 2019: [Datasette 0.30](https://datasette.readthedocs.io/en/stable/changelog.html#v0-30) * 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail. From a9909c29ccac771c23c2ef22b89d10697b5256b9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 15 Nov 2019 14:49:45 -0800 Subject: [PATCH 0091/2113] Move .execute() from Datasette to Database Refs #569 - I split this change out from #579 --- datasette/app.py | 90 ++++++--------------------- datasette/database.py | 137 +++++++++++++++++++++++++++++++----------- 2 files changed, 121 insertions(+), 106 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 02fcf303..119d0e19 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -24,13 +24,11 @@ from .database import Database from .utils import ( QueryInterrupted, - Results, escape_css_string, escape_sqlite, get_plugins, module_from_path, sqlite3, - sqlite_timelimit, to_css_class, ) from .utils.asgi import ( @@ -42,13 +40,12 @@ from .utils.asgi import ( asgi_send_json, asgi_send_redirect, ) -from .tracer import trace, AsgiTracer +from .tracer import AsgiTracer from .plugins import pm, DEFAULT_PLUGINS from .version import __version__ app_root = Path(__file__).parent.parent -connections = threading.local() MEMORY = object() ConfigOption = collections.namedtuple("ConfigOption", ("name", "default", "help")) @@ -336,6 +333,25 @@ class Datasette: # pylint: disable=no-member pm.hook.prepare_connection(conn=conn) + async def execute( + self, + db_name, + sql, + params=None, + truncate=False, + custom_time_limit=None, + page_size=None, + log_sql_errors=True, + ): + return await self.databases[db_name].execute( + sql, + params=params, + truncate=truncate, + custom_time_limit=custom_time_limit, + page_size=page_size, + log_sql_errors=log_sql_errors, + ) + async def expand_foreign_keys(self, database, table, column, values): "Returns dict mapping (column, value) -> label" labeled_fks = {} @@ -477,72 +493,6 @@ class Datasette: .get(table, {}) ) - async def execute_against_connection_in_thread(self, db_name, fn): - def in_thread(): - conn = getattr(connections, db_name, None) - if not conn: - conn = self.databases[db_name].connect() - self.prepare_connection(conn) - setattr(connections, db_name, conn) - return fn(conn) - - return await asyncio.get_event_loop().run_in_executor(self.executor, in_thread) - - async def execute( - self, - db_name, - sql, - params=None, - truncate=False, - custom_time_limit=None, - page_size=None, - log_sql_errors=True, - ): - """Executes sql against db_name in a thread""" - page_size = page_size or self.page_size - - def sql_operation_in_thread(conn): - time_limit_ms = self.sql_time_limit_ms - if custom_time_limit and custom_time_limit < time_limit_ms: - time_limit_ms = custom_time_limit - - with sqlite_timelimit(conn, time_limit_ms): - try: - cursor = conn.cursor() - cursor.execute(sql, params or {}) - max_returned_rows = self.max_returned_rows - if max_returned_rows == page_size: - max_returned_rows += 1 - if max_returned_rows and truncate: - rows = cursor.fetchmany(max_returned_rows + 1) - truncated = len(rows) > max_returned_rows - rows = rows[:max_returned_rows] - else: - rows = cursor.fetchall() - truncated = False - except sqlite3.OperationalError as e: - if e.args == ("interrupted",): - raise QueryInterrupted(e, sql, params) - if log_sql_errors: - print( - "ERROR: conn={}, sql = {}, params = {}: {}".format( - conn, repr(sql), params, e - ) - ) - raise - - if truncate: - return Results(rows, truncated, cursor.description) - - else: - return Results(rows, False, cursor.description) - - with trace("sql", database=db_name, sql=sql.strip(), params=params): - results = await self.execute_against_connection_in_thread( - db_name, sql_operation_in_thread - ) - return results - def register_renderers(self): """ Register output renderers which output data in custom formats. """ # Built-in renderers diff --git a/datasette/database.py b/datasette/database.py index 3a1cea94..9a8ae4d4 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -1,17 +1,25 @@ +import asyncio +import contextlib from pathlib import Path +import threading +from .tracer import trace from .utils import ( QueryInterrupted, + Results, detect_fts, detect_primary_keys, detect_spatialite, get_all_foreign_keys, get_outbound_foreign_keys, + sqlite_timelimit, sqlite3, table_columns, ) from .inspect import inspect_hash +connections = threading.local() + class Database: def __init__(self, ds, path=None, is_mutable=False, is_memory=False): @@ -45,6 +53,73 @@ class Database: "file:{}?{}".format(self.path, qs), uri=True, check_same_thread=False ) + async def execute_against_connection_in_thread(self, fn): + def in_thread(): + conn = getattr(connections, self.name, None) + if not conn: + conn = self.connect() + self.ds.prepare_connection(conn) + setattr(connections, self.name, conn) + return fn(conn) + + return await asyncio.get_event_loop().run_in_executor( + self.ds.executor, in_thread + ) + + async def execute( + self, + sql, + params=None, + truncate=False, + custom_time_limit=None, + page_size=None, + log_sql_errors=True, + ): + """Executes sql against db_name in a thread""" + page_size = page_size or self.ds.page_size + + def sql_operation_in_thread(conn): + time_limit_ms = self.ds.sql_time_limit_ms + if custom_time_limit and custom_time_limit < time_limit_ms: + time_limit_ms = custom_time_limit + + with sqlite_timelimit(conn, time_limit_ms): + try: + cursor = conn.cursor() + cursor.execute(sql, params or {}) + max_returned_rows = self.ds.max_returned_rows + if max_returned_rows == page_size: + max_returned_rows += 1 + if max_returned_rows and truncate: + rows = cursor.fetchmany(max_returned_rows + 1) + truncated = len(rows) > max_returned_rows + rows = rows[:max_returned_rows] + else: + rows = cursor.fetchall() + truncated = False + except sqlite3.OperationalError as e: + if e.args == ("interrupted",): + raise QueryInterrupted(e, sql, params) + if log_sql_errors: + print( + "ERROR: conn={}, sql = {}, params = {}: {}".format( + conn, repr(sql), params, e + ) + ) + raise + + if truncate: + return Results(rows, truncated, cursor.description) + + else: + return Results(rows, False, cursor.description) + + with trace("sql", database=self.name, sql=sql.strip(), params=params): + results = await self.execute_against_connection_in_thread( + sql_operation_in_thread + ) + return results + @property def size(self): if self.is_memory: @@ -62,8 +137,7 @@ class Database: for table in await self.table_names(): try: table_count = ( - await self.ds.execute( - self.name, + await self.execute( "select count(*) from [{}]".format(table), custom_time_limit=limit, ) @@ -89,32 +163,30 @@ class Database: return Path(self.path).stem async def table_exists(self, table): - results = await self.ds.execute( - self.name, - "select 1 from sqlite_master where type='table' and name=?", - params=(table,), + results = await self.execute( + "select 1 from sqlite_master where type='table' and name=?", params=(table,) ) return bool(results.rows) async def table_names(self): - results = await self.ds.execute( - self.name, "select name from sqlite_master where type='table'" + results = await self.execute( + "select name from sqlite_master where type='table'" ) return [r[0] for r in results.rows] async def table_columns(self, table): - return await self.ds.execute_against_connection_in_thread( - self.name, lambda conn: table_columns(conn, table) + return await self.execute_against_connection_in_thread( + lambda conn: table_columns(conn, table) ) async def primary_keys(self, table): - return await self.ds.execute_against_connection_in_thread( - self.name, lambda conn: detect_primary_keys(conn, table) + return await self.execute_against_connection_in_thread( + lambda conn: detect_primary_keys(conn, table) ) async def fts_table(self, table): - return await self.ds.execute_against_connection_in_thread( - self.name, lambda conn: detect_fts(conn, table) + return await self.execute_against_connection_in_thread( + lambda conn: detect_fts(conn, table) ) async def label_column_for_table(self, table): @@ -124,8 +196,8 @@ class Database: if explicit_label_column: return explicit_label_column # If a table has two columns, one of which is ID, then label_column is the other one - column_names = await self.ds.execute_against_connection_in_thread( - self.name, lambda conn: table_columns(conn, table) + column_names = await self.execute_against_connection_in_thread( + lambda conn: table_columns(conn, table) ) # Is there a name or title column? name_or_title = [c for c in column_names if c in ("name", "title")] @@ -141,8 +213,8 @@ class Database: return None async def foreign_keys_for_table(self, table): - return await self.ds.execute_against_connection_in_thread( - self.name, lambda conn: get_outbound_foreign_keys(conn, table) + return await self.execute_against_connection_in_thread( + lambda conn: get_outbound_foreign_keys(conn, table) ) async def hidden_table_names(self): @@ -150,18 +222,17 @@ class Database: hidden_tables = [ r[0] for r in ( - await self.ds.execute( - self.name, + await self.execute( """ select name from sqlite_master where rootpage = 0 and sql like '%VIRTUAL TABLE%USING FTS%' - """, + """ ) ).rows ] - has_spatialite = await self.ds.execute_against_connection_in_thread( - self.name, detect_spatialite + has_spatialite = await self.execute_against_connection_in_thread( + detect_spatialite ) if has_spatialite: # Also hide Spatialite internal tables @@ -178,13 +249,12 @@ class Database: ] + [ r[0] for r in ( - await self.ds.execute( - self.name, + await self.execute( """ select name from sqlite_master where name like "idx_%" and type = "table" - """, + """ ) ).rows ] @@ -207,25 +277,20 @@ class Database: return hidden_tables async def view_names(self): - results = await self.ds.execute( - self.name, "select name from sqlite_master where type='view'" - ) + results = await self.execute("select name from sqlite_master where type='view'") return [r[0] for r in results.rows] async def get_all_foreign_keys(self): - return await self.ds.execute_against_connection_in_thread( - self.name, get_all_foreign_keys - ) + return await self.execute_against_connection_in_thread(get_all_foreign_keys) async def get_outbound_foreign_keys(self, table): - return await self.ds.execute_against_connection_in_thread( - self.name, lambda conn: get_outbound_foreign_keys(conn, table) + return await self.execute_against_connection_in_thread( + lambda conn: get_outbound_foreign_keys(conn, table) ) async def get_table_definition(self, table, type_="table"): table_definition_rows = list( - await self.ds.execute( - self.name, + await self.execute( "select sql from sqlite_master where name = :n and type=:t", {"n": table, "t": type_}, ) From 440a70428c624f6e27b630026acdba2032acc9a7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 19 Nov 2019 15:01:10 -0800 Subject: [PATCH 0092/2113] Include rowid in filter select, closes #636 --- datasette/views/table.py | 6 +----- tests/test_html.py | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+), 5 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index a60a3941..516b474d 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -587,10 +587,6 @@ class TableView(RowTableShared): columns = [r[0] for r in results.description] rows = list(results.rows) - filter_columns = columns[:] - if use_rowid and filter_columns[0] == "rowid": - filter_columns = filter_columns[1:] - # Expand labeled columns if requested expanded_columns = [] expandable_columns = await self.expandable_columns(database, table) @@ -720,7 +716,7 @@ class TableView(RowTableShared): "use_rowid": use_rowid, "filters": filters, "display_columns": display_columns, - "filter_columns": filter_columns, + "filter_columns": columns, "display_rows": display_rows, "facets_timed_out": facets_timed_out, "sorted_facet_results": sorted( diff --git a/tests/test_html.py b/tests/test_html.py index 44627cdc..3b331f38 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -679,6 +679,30 @@ def test_table_html_foreign_key_custom_label_column(app_client): ] +@pytest.mark.parametrize( + "path,expected_column_options", + [ + ("/fixtures/infinity", ["- column -", "rowid", "value"]), + ( + "/fixtures/primary_key_multiple_columns", + ["- column -", "id", "content", "content2"], + ), + ("/fixtures/compound_primary_key", ["- column -", "pk1", "pk2", "content"]), + ], +) +def test_table_html_filter_form_column_options( + path, expected_column_options, app_client +): + response = app_client.get(path) + assert response.status == 200 + form = Soup(response.body, "html.parser").find("form") + column_options = [ + o.attrs.get("value") or o.string + for o in form.select("select[name=_filter_column] option") + ] + assert expected_column_options == column_options + + def test_row_html_compound_primary_key(app_client): response = app_client.get("/fixtures/compound_primary_key/a,b") assert response.status == 200 From c16be14517414a94e1fdbd888e8a3ad0669e3bca Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 20 Nov 2019 10:02:07 -0800 Subject: [PATCH 0093/2113] How to upgrade using Docker --- docs/installation.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/installation.rst b/docs/installation.rst index 9ee7eb4e..c547f9e4 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -33,6 +33,10 @@ Now visit http://127.0.0.1:8001/ to access Datasette. (You can download a copy of ``fixtures.db`` from https://latest.datasette.io/fixtures.db ) +To upgrade to the most recent release of Datasette, run the following:: + + docker pull datasetteproject/datasette + Loading Spatialite ~~~~~~~~~~~~~~~~~~ From fd137da7f83c117b18e189707a1039e319dd5c91 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 21 Nov 2019 16:56:55 -0800 Subject: [PATCH 0094/2113] Suggest column facet only if at least one count > 1 Fixes #638 --- datasette/facets.py | 5 ++++- tests/fixtures.py | 33 +++++++++++++++++---------------- tests/test_api.py | 30 ++++++++++++++++++++++++++++-- tests/test_csv.py | 32 ++++++++++++++++---------------- 4 files changed, 65 insertions(+), 35 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index 0c6459d6..a314faaf 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -143,9 +143,10 @@ class ColumnFacet(Facet): if column in already_enabled: continue suggested_facet_sql = """ - select distinct {column} from ( + select {column}, count(*) as n from ( {sql} ) where {column} is not null + group by {column} limit {limit} """.format( column=escape_sqlite(column), sql=self.sql, limit=facet_size + 1 @@ -165,6 +166,8 @@ class ColumnFacet(Facet): and num_distinct_values > 1 and num_distinct_values <= facet_size and num_distinct_values < row_count + # And at least one has n > 1 + and any(r["n"] > 1 for r in distinct_values) ): suggested_facets.append( { diff --git a/tests/fixtures.py b/tests/fixtures.py index 3e4203f7..bb01d171 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -669,26 +669,27 @@ CREATE TABLE facetable ( neighborhood text, tags text, complex_array text, + distinct_some_null, FOREIGN KEY ("city_id") REFERENCES [facet_cities](id) ); INSERT INTO facetable - (created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array) + (created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array, distinct_some_null) VALUES - ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]'), - ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]'), - ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'SOMA', '[]', '[]'), - ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Tenderloin', '[]', '[]'), - ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Bernal Heights', '[]', '[]'), - ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Hayes Valley', '[]', '[]'), - ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Hollywood', '[]', '[]'), - ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Downtown', '[]', '[]'), - ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Los Feliz', '[]', '[]'), - ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Koreatown', '[]', '[]'), - ("2019-01-16 08:00:00", 1, 1, 'MI', 3, 'Downtown', '[]', '[]'), - ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Greektown', '[]', '[]'), - ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Corktown', '[]', '[]'), - ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Mexicantown', '[]', '[]'), - ("2019-01-17 08:00:00", 2, 0, 'MC', 4, 'Arcadia Planitia', '[]', '[]') + ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]', 'one'), + ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]', 'two'), + ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'SOMA', '[]', '[]', null), + ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Tenderloin', '[]', '[]', null), + ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Bernal Heights', '[]', '[]', null), + ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Hayes Valley', '[]', '[]', null), + ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Hollywood', '[]', '[]', null), + ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Downtown', '[]', '[]', null), + ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Los Feliz', '[]', '[]', null), + ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Koreatown', '[]', '[]', null), + ("2019-01-16 08:00:00", 1, 1, 'MI', 3, 'Downtown', '[]', '[]', null), + ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Greektown', '[]', '[]', null), + ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Corktown', '[]', '[]', null), + ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Mexicantown', '[]', '[]', null), + ("2019-01-17 08:00:00", 2, 0, 'MC', 4, 'Arcadia Planitia', '[]', '[]', null) ; CREATE TABLE binary_data ( diff --git a/tests/test_api.py b/tests/test_api.py index 1fa8642f..34eef4ce 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -197,6 +197,7 @@ def test_database_page(app_client): "neighborhood", "tags", "complex_array", + "distinct_some_null", ], "primary_keys": ["pk"], "count": 15, @@ -1042,15 +1043,38 @@ def test_table_filter_json_arraycontains(app_client): "Mission", '["tag1", "tag2"]', '[{"foo": "bar"}]', + "one", + ], + [ + 2, + "2019-01-14 08:00:00", + 1, + 1, + "CA", + 1, + "Dogpatch", + '["tag1", "tag3"]', + "[]", + "two", ], - [2, "2019-01-14 08:00:00", 1, 1, "CA", 1, "Dogpatch", '["tag1", "tag3"]', "[]"], ] == response.json["rows"] def test_table_filter_extra_where(app_client): response = app_client.get("/fixtures/facetable.json?_where=neighborhood='Dogpatch'") assert [ - [2, "2019-01-14 08:00:00", 1, 1, "CA", 1, "Dogpatch", '["tag1", "tag3"]', "[]"] + [ + 2, + "2019-01-14 08:00:00", + 1, + 1, + "CA", + 1, + "Dogpatch", + '["tag1", "tag3"]', + "[]", + "two", + ] ] == response.json["rows"] @@ -1503,6 +1527,7 @@ def test_expand_labels(app_client): "neighborhood": "Dogpatch", "tags": '["tag1", "tag3"]', "complex_array": "[]", + "distinct_some_null": "two", }, "13": { "pk": 13, @@ -1514,6 +1539,7 @@ def test_expand_labels(app_client): "neighborhood": "Corktown", "tags": "[]", "complex_array": "[]", + "distinct_some_null": None, }, } == response.json diff --git a/tests/test_csv.py b/tests/test_csv.py index 13aca489..1030c2bb 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -21,22 +21,22 @@ world ) EXPECTED_TABLE_WITH_LABELS_CSV = """ -pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,complex_array -1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]","[{""foo"": ""bar""}]" -2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]",[] -3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[],[] -4,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Tenderloin,[],[] -5,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Bernal Heights,[],[] -6,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Hayes Valley,[],[] -7,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Hollywood,[],[] -8,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Downtown,[],[] -9,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Los Feliz,[],[] -10,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Koreatown,[],[] -11,2019-01-16 08:00:00,1,1,MI,3,Detroit,Downtown,[],[] -12,2019-01-17 08:00:00,1,1,MI,3,Detroit,Greektown,[],[] -13,2019-01-17 08:00:00,1,1,MI,3,Detroit,Corktown,[],[] -14,2019-01-17 08:00:00,1,1,MI,3,Detroit,Mexicantown,[],[] -15,2019-01-17 08:00:00,2,0,MC,4,Memnonia,Arcadia Planitia,[],[] +pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,complex_array,distinct_some_null +1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]","[{""foo"": ""bar""}]",one +2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]",[],two +3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[],[], +4,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Tenderloin,[],[], +5,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Bernal Heights,[],[], +6,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Hayes Valley,[],[], +7,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Hollywood,[],[], +8,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Downtown,[],[], +9,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Los Feliz,[],[], +10,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Koreatown,[],[], +11,2019-01-16 08:00:00,1,1,MI,3,Detroit,Downtown,[],[], +12,2019-01-17 08:00:00,1,1,MI,3,Detroit,Greektown,[],[], +13,2019-01-17 08:00:00,1,1,MI,3,Detroit,Corktown,[],[], +14,2019-01-17 08:00:00,1,1,MI,3,Detroit,Mexicantown,[],[], +15,2019-01-17 08:00:00,2,0,MC,4,Memnonia,Arcadia Planitia,[],[], """.lstrip().replace( "\n", "\r\n" ) From d3e1c3017ee2f606a731208d59fe48805cdc3259 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 22 Nov 2019 22:07:01 -0800 Subject: [PATCH 0095/2113] Display 0 results, closes #637 --- datasette/static/app.css | 7 +++++ datasette/templates/_table.html | 56 ++++++++++++++++++--------------- datasette/templates/query.html | 2 ++ tests/test_html.py | 14 +++++++++ 4 files changed, 53 insertions(+), 26 deletions(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index 34eb122c..d7cf6334 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -327,3 +327,10 @@ a.not-underlined { pre.wrapped-sql { white-space: pre-wrap; } + +p.zero-results { + border: 2px solid #ccc; + background-color: #eee; + padding: 0.5em; + font-style: italic; +} diff --git a/datasette/templates/_table.html b/datasette/templates/_table.html index c7a72253..42c37c55 100644 --- a/datasette/templates/_table.html +++ b/datasette/templates/_table.html @@ -1,28 +1,32 @@ - - - - {% for column in display_columns %} - + {% endfor %} + + + + {% for row in display_rows %} + + {% for cell in row %} + + {% endfor %} + + {% endfor %} + +
- {% if not column.sortable %} - {{ column.name }} - {% else %} - {% if column.name == sort %} - {{ column.name }} ▼ +{% if display_rows %} + + + + {% for column in display_columns %} + - {% endfor %} - - - - {% for row in display_rows %} - - {% for cell in row %} - - {% endfor %} - - {% endfor %} - -
+ {% if not column.sortable %} + {{ column.name }} {% else %} - {{ column.name }}{% if column.name == sort_desc %} ▲{% endif %} + {% if column.name == sort %} + {{ column.name }} ▼ + {% else %} + {{ column.name }}{% if column.name == sort_desc %} ▲{% endif %} + {% endif %} {% endif %} - {% endif %} -
{{ cell.value }}
+
{{ cell.value }}
+{% else %} +

0 records

+{% endif %} diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 34fa78a5..f10ff000 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -73,6 +73,8 @@ {% endfor %} +{% else %} +

0 results

{% endif %} {% include "_codemirror_foot.html" %} diff --git a/tests/test_html.py b/tests/test_html.py index 3b331f38..db73da18 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1059,3 +1059,17 @@ def test_custom_table_include(): '1 - 2 - hello 1' "
" ) == str(Soup(response.text, "html.parser").select_one("div.custom-table-row")) + + +@pytest.mark.parametrize( + "path", + [ + "/fixtures?sql=select+*+from+[123_starts_with_digits]", + "/fixtures/123_starts_with_digits", + ], +) +def test_zero_results(app_client, path): + response = app_client.get(path) + soup = Soup(response.text, "html.parser") + assert 0 == len(soup.select("table")) + assert 1 == len(soup.select("p.zero-results")) From aca41618f8761f99c47c8ae8e81b07a6d4af4d7a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 25 Nov 2019 09:04:39 -0800 Subject: [PATCH 0096/2113] index view is also important for plugin hooks --- docs/plugins.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index e5a3d7dd..9bceb961 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -583,7 +583,7 @@ Extra JavaScript to be added to a `` - - - + + + + + + + +
+

Pattern Portfolio

+
+

.hd for /database/table/row

+ +

.bd for /

+
+

Datasette Fixtures

+ +

+ Data license: + Apache License 2.0 + · + Data source: + + tests/fixtures.py + · + About: + + About Datasette +

+

fixtures

+

+ 1,258 rows in 24 tables, 206 rows in 5 hidden tables, 4 views +

+

compound_three_primary_keys, sortable, facetable, roadside_attraction_characteristics, simple_primary_key, ...

+

data

+

+ 6 rows in 2 tables +

+

names, foo

+
+

.bd for /database

+
+

fixtures

+ +

+ Data license: + Apache License 2.0 + · + Data source: + + tests/fixtures.py + · + About: + + About Datasette +

+
+

Custom SQL query

+

+

+ + +

+
+
+

123_starts_with_digits

+

content

+

0 rows

+
+
+

Table With Space In Name

+

pk, content

+

0 rows

+
+
+

attraction_characteristic

+

pk, name

+

2 rows

+
+
+

.bd for /database/table

+
+

roadside_attraction_characteristics

+

+ Data license: + Apache License 2.0 + · + Data source: + + tests/fixtures.py + · + About: + + About Datasette +

+

3 rows + where characteristic_id = 2 +

+
+
+
+
+ +
+
+ +
+
+
+
+ +
+
+ +
+
+
+
+ +
+ + +
+
+

View and edit SQL

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Link + + rowid ▼ + + attraction_id + + characteristic_id +
1The Mystery Spot 1Paranormal 2
2Winchester Mystery House 2Paranormal 2
3Bigfoot Discovery Museum 4Paranormal 2
+
+

Advanced export

+

JSON shape: + default, + array, + newline-delimited +

+
+

+ CSV options: + + + + + +

+
+
+
CREATE TABLE roadside_attraction_characteristics (
+    attraction_id INTEGER REFERENCES roadside_attractions(pk),
+    characteristic_id INTEGER REFERENCES attraction_characteristic(pk)
+);
+
+

.bd for /database/table/row

+
+

roadside_attractions: 2

+

This data as json

+ + + + + + + + + + + + + + + + + + + +
+ pk + + name + + address + + latitude + + longitude +
2Winchester Mystery House525 South Winchester Boulevard, San Jose, CA 9512837.3184-121.9511
+

Links from other tables

+
    +
  • + + 1 row + from attraction_id in roadside_attraction_characteristics +
  • +
+
+

.ft

+
Powered by Datasette + · Data license: + Apache License 2.0 + · + Data source: + + tests/fixtures.py + · + About: + + About Datasette +
+ + diff --git a/datasette/views/base.py b/datasette/views/base.py index 2478bd84..e2bce2f9 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -72,7 +72,8 @@ class BaseView(AsgiView): def database_color(self, database): return "ff0000" - async def render(self, templates, request, context): + async def render(self, templates, request, context=None): + context = context or {} template = self.ds.jinja_env.select_template(templates) template_context = { **context, diff --git a/datasette/views/special.py b/datasette/views/special.py index 2b31028d..dfe5ea8c 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -32,3 +32,13 @@ class JsonDataView(BaseView): "data_json": json.dumps(data, indent=4), }, ) + + +class PatternPortfolioView(BaseView): + name = "patterns" + + def __init__(self, datasette): + self.ds = datasette + + async def get(self, request): + return await self.render(["patterns.html"], request=request,) diff --git a/tests/test_docs.py b/tests/test_docs.py index d7c5a534..77c2a611 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -65,6 +65,8 @@ def documented_views(): first_word = label.split("_")[0] if first_word.endswith("View"): view_labels.add(first_word) + # We deliberately don't document this one: + view_labels.add("PatternPortfolioView") return view_labels From d996d4122b522eeec3c610f6b2561aa96652ecd2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 3 May 2020 08:46:49 -0700 Subject: [PATCH 0195/2113] Add badges to documentation index --- docs/index.rst | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/index.rst b/docs/index.rst index 070b6f64..2390e263 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,6 +1,22 @@ Datasette ========= +|PyPI| |Changelog| |Python 3.x| |Travis CI| |License| |docker: +datasette| + +.. |PyPI| image:: https://img.shields.io/pypi/v/datasette.svg + :target: https://pypi.org/project/datasette/ +.. |Changelog| image:: https://img.shields.io/github/v/release/simonw/datasette?include_prereleases&label=changelog + :target: https://datasette.readthedocs.io/en/stable/changelog.html +.. |Python 3.x| image:: https://img.shields.io/pypi/pyversions/datasette.svg?logo=python&logoColor=white + :target: https://pypi.org/project/datasette/ +.. |Travis CI| image:: https://travis-ci.org/simonw/datasette.svg?branch=master + :target: https://travis-ci.org/simonw/datasette +.. |License| image:: https://img.shields.io/badge/license-Apache%202.0-blue.svg + :target: https://github.com/simonw/datasette/blob/master/LICENSE +.. |docker: datasette| image:: https://img.shields.io/badge/docker-datasette-blue + :target: https://hub.docker.com/r/datasetteproject/datasette + *A tool for exploring and publishing data* Datasette is a tool for exploring and publishing data. It helps people take data of any shape or size and publish that as an interactive, explorable website and accompanying API. From 985e59493e44d6fcebf7a30f693f4edecee3e90d Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 4 May 2020 09:17:48 -0700 Subject: [PATCH 0196/2113] Update aiofiles requirement from ~=0.4.0 to >=0.4,<0.6 (#725) Refs #754 Updates the requirements on [aiofiles](https://github.com/Tinche/aiofiles) to permit the latest version. - [Release notes](https://github.com/Tinche/aiofiles/releases) - [Commits](https://github.com/Tinche/aiofiles/compare/v0.4.0...v0.5.0) Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 0de00dc5..0519fa8a 100644 --- a/setup.py +++ b/setup.py @@ -43,7 +43,7 @@ setup( "pint~=0.9", "pluggy~=0.13.0", "uvicorn~=0.11", - "aiofiles~=0.4.0", + "aiofiles>=0.4,<0.6", "janus~=0.4.0", "PyYAML~=5.3", "mergedeep~=1.1.1", From e232f77055880b38cc0b738607cd50cde9188eaf Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 4 May 2020 09:45:49 -0700 Subject: [PATCH 0197/2113] Update mergedeep requirement from ~=1.1.1 to >=1.1.1,<1.4.0 (#728) Updates the requirements on [mergedeep](https://github.com/clarketm/mergedeep) to permit the latest version. - [Release notes](https://github.com/clarketm/mergedeep/releases) - [Commits](https://github.com/clarketm/mergedeep/compare/v1.1.1...v1.3.0) Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 0519fa8a..8b54a97b 100644 --- a/setup.py +++ b/setup.py @@ -46,7 +46,7 @@ setup( "aiofiles>=0.4,<0.6", "janus~=0.4.0", "PyYAML~=5.3", - "mergedeep~=1.1.1", + "mergedeep>=1.1.1,<1.4.0", ], entry_points=""" [console_scripts] From 109c5a430d53fe38b1300e0daa20f5cef047a08e Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 4 May 2020 09:48:03 -0700 Subject: [PATCH 0198/2113] Update janus requirement from ~=0.4.0 to >=0.4,<0.6 (#734) Updates the requirements on [janus](https://github.com/aio-libs/janus) to permit the latest version. - [Release notes](https://github.com/aio-libs/janus/releases) - [Changelog](https://github.com/aio-libs/janus/blob/master/CHANGES.rst) - [Commits](https://github.com/aio-libs/janus/compare/v0.4.0...v0.5.0) Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 8b54a97b..69ac0939 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,7 @@ setup( "pluggy~=0.13.0", "uvicorn~=0.11", "aiofiles>=0.4,<0.6", - "janus~=0.4.0", + "janus>=0.4,<0.6", "PyYAML~=5.3", "mergedeep>=1.1.1,<1.4.0", ], From aa064de3f400899dbf61f2d33a035fba4017596c Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 4 May 2020 10:13:15 -0700 Subject: [PATCH 0199/2113] Update jinja2 requirement from ~=2.10.3 to >=2.10.3,<2.12.0 (#722) Updates the requirements on [jinja2](https://github.com/pallets/jinja) to permit the latest version. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/master/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/2.10.3...2.11.1) Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 69ac0939..0762ace6 100644 --- a/setup.py +++ b/setup.py @@ -38,7 +38,7 @@ setup( install_requires=[ "click~=7.1.1", "click-default-group~=1.2.2", - "Jinja2~=2.10.3", + "Jinja2>=2.10.3,<2.12.0", "hupper~=1.9", "pint~=0.9", "pluggy~=0.13.0", From c91fb9e3d4f0632d4ef25a21165739ab88a9d491 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 4 May 2020 10:13:41 -0700 Subject: [PATCH 0200/2113] Update pytest requirement from ~=5.2.2 to >=5.2.2,<5.5.0 (#721) Updates the requirements on [pytest](https://github.com/pytest-dev/pytest) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/5.2.2...5.4.1) Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 0762ace6..6e4d423a 100644 --- a/setup.py +++ b/setup.py @@ -56,7 +56,7 @@ setup( extras_require={ "docs": ["sphinx_rtd_theme", "sphinx-autobuild"], "test": [ - "pytest~=5.2.2", + "pytest>=5.2.2,<5.5.0", "pytest-asyncio~=0.10.0", "aiohttp~=3.6.2", "beautifulsoup4~=4.8.1", From dbd2d70b3819a7041bb36a527033d77c85683c05 Mon Sep 17 00:00:00 2001 From: Colin Dellow Date: Mon, 4 May 2020 13:14:25 -0400 Subject: [PATCH 0201/2113] asgi: check raw_path is not None (#719) The ASGI spec (https://asgi.readthedocs.io/en/latest/specs/www.html#http) seems to imply that `None` is a valid value, so we need to check the value itself, not just whether the key is present. In particular, the [mangum](https://github.com/erm/mangum) adapter passes `None` for this key. --- datasette/utils/asgi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index df358240..73ae562b 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -46,7 +46,7 @@ class Request: @property def path(self): - if "raw_path" in self.scope: + if self.scope.get("raw_path") is not None: return self.scope["raw_path"].decode("latin-1") else: path = self.scope["path"] From 707fe039947b3e48f2b6dcfe8e577d76b617f2a5 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 4 May 2020 10:14:46 -0700 Subject: [PATCH 0202/2113] Update beautifulsoup4 requirement from ~=4.8.1 to >=4.8.1,<4.10.0 (#720) Updates the requirements on [beautifulsoup4](http://www.crummy.com/software/BeautifulSoup/bs4/) to permit the latest version. Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 6e4d423a..19f5b4e3 100644 --- a/setup.py +++ b/setup.py @@ -59,7 +59,7 @@ setup( "pytest>=5.2.2,<5.5.0", "pytest-asyncio~=0.10.0", "aiohttp~=3.6.2", - "beautifulsoup4~=4.8.1", + "beautifulsoup4>=4.8.1,<4.10.0", "asgiref~=3.2.3", "black~=19.10b0", ], From b314e088c59425122fb2b2abde8741010d9d274a Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 4 May 2020 10:40:48 -0700 Subject: [PATCH 0203/2113] Update pytest-asyncio requirement from ~=0.10.0 to >=0.10,<0.13 (#753) Updates the requirements on [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) - [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.10.0...v0.12.0) Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 19f5b4e3..c4886a11 100644 --- a/setup.py +++ b/setup.py @@ -57,7 +57,7 @@ setup( "docs": ["sphinx_rtd_theme", "sphinx-autobuild"], "test": [ "pytest>=5.2.2,<5.5.0", - "pytest-asyncio~=0.10.0", + "pytest-asyncio>=0.10,<0.13", "aiohttp~=3.6.2", "beautifulsoup4>=4.8.1,<4.10.0", "asgiref~=3.2.3", From 450d2e2896e07a8ce27f1cf143febe280c97301b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 4 May 2020 10:40:01 -0700 Subject: [PATCH 0204/2113] Fixed pytest warning about TestClient class --- tests/test_config_dir.py | 6 +++--- tests/test_plugins.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index f262cc59..50e67f80 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -3,7 +3,7 @@ import pytest import sqlite3 from datasette.app import Datasette -from .fixtures import TestClient +from .fixtures import TestClient as _TestClient PLUGIN = """ from datasette import hookimpl @@ -76,7 +76,7 @@ def config_dir_client(tmp_path_factory): ) ds = Datasette([], config_dir=config_dir) - client = TestClient(ds.app()) + client = _TestClient(ds.app()) client.ds = ds yield client @@ -137,7 +137,7 @@ def test_metadata_yaml(tmp_path_factory, filename): config_dir = tmp_path_factory.mktemp("yaml-config-dir") (config_dir / filename).write_text("title: Title from metadata", "utf-8") ds = Datasette([], config_dir=config_dir) - client = TestClient(ds.app()) + client = _TestClient(ds.app()) client.ds = ds response = client.get("/-/metadata.json") assert 200 == response.status diff --git a/tests/test_plugins.py b/tests/test_plugins.py index fce6fd77..8b6a6b41 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -4,7 +4,7 @@ from .fixtures import ( make_app_client, TABLES, TEMP_PLUGIN_SECRET_FILE, - TestClient, + TestClient as _TestClient, ) # noqa from datasette.app import Datasette from datasette.plugins import get_plugins, DEFAULT_PLUGINS @@ -293,7 +293,7 @@ def view_names_client(tmp_path_factory): db_path = str(tmpdir / "fixtures.db") conn = sqlite3.connect(db_path) conn.executescript(TABLES) - return TestClient( + return _TestClient( Datasette( [db_path], template_dir=str(templates), plugins_dir=str(plugins) ).app() From 9424687e9e94401438896116898a071702b09d40 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 4 May 2020 10:41:58 -0700 Subject: [PATCH 0205/2113] Consistently return charset utf-8, closes #752 --- datasette/app.py | 2 +- datasette/utils/asgi.py | 16 ++++++++++------ docs/custom_templates.rst | 4 ++-- tests/test_html.py | 6 ++++++ 4 files changed, 19 insertions(+), 9 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index b541a9a4..8a4b6011 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -828,7 +828,7 @@ class DatasetteRouter(AsgiRouter): view_name="page", ) # Pull content-type out into separate parameter - content_type = "text/html" + content_type = "text/html; charset=utf-8" matches = [k for k in headers if k.lower() == "content-type"] if matches: content_type = headers[matches[0]] diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 73ae562b..20047bb5 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -130,7 +130,7 @@ class AsgiRouter: { "type": "http.response.start", "status": 404, - "headers": [[b"content-type", b"text/html"]], + "headers": [[b"content-type", b"text/html; charset=utf-8"]], } ) await send({"type": "http.response.body", "body": b"

404

"}) @@ -140,11 +140,11 @@ class AsgiRouter: { "type": "http.response.start", "status": 404, - "headers": [[b"content-type", b"text/html"]], + "headers": [[b"content-type", b"text/html; charset=utf-8"]], } ) html = "

500

".format(escape(repr(exception))) - await send({"type": "http.response.body", "body": html.encode("latin-1")}) + await send({"type": "http.response.body", "body": html.encode("utf-8")}) class AsgiLifespan: @@ -259,7 +259,11 @@ async def asgi_send_json(send, info, status=200, headers=None): async def asgi_send_html(send, html, status=200, headers=None): headers = headers or {} await asgi_send( - send, html, status=status, headers=headers, content_type="text/html" + send, + html, + status=status, + headers=headers, + content_type="text/html; charset=utf-8", ) @@ -269,13 +273,13 @@ async def asgi_send_redirect(send, location, status=302): "", status=status, headers={"Location": location}, - content_type="text/html", + content_type="text/html; charset=utf-8", ) async def asgi_send(send, content, status, headers=None, content_type="text/plain"): await asgi_start(send, status, headers, content_type) - await send({"type": "http.response.body", "body": content.encode("latin-1")}) + await send({"type": "http.response.body", "body": content.encode("utf-8")}) async def asgi_start(send, status, headers=None, content_type="text/plain"): diff --git a/docs/custom_templates.rst b/docs/custom_templates.rst index 142ecc97..adbfbc25 100644 --- a/docs/custom_templates.rst +++ b/docs/custom_templates.rst @@ -284,7 +284,7 @@ You can nest directories within pages to create a nested structure. To create a Custom headers and status codes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Custom pages default to being served with a content-type of ``text/html`` and a ``200`` status code. You can change these by calling a custom function from within your template. +Custom pages default to being served with a content-type of ``text/html; charset=utf-8`` and a ``200`` status code. You can change these by calling a custom function from within your template. For example, to serve a custom page with a ``418 I'm a teapot`` HTTP status code, create a file in ``pages/teapot.html`` containing the following:: @@ -314,7 +314,7 @@ You can verify this is working using ``curl`` like this:: date: Sun, 26 Apr 2020 18:38:30 GMT server: uvicorn x-teapot: I am - content-type: text/html + content-type: text/html; charset=utf-8 Custom redirects ~~~~~~~~~~~~~~~~ diff --git a/tests/test_html.py b/tests/test_html.py index b8dc543c..564365ce 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -963,6 +963,12 @@ def test_404_trailing_slash_redirect(app_client, path, expected_redirect): assert expected_redirect == response.headers["Location"] +def test_404_content_type(app_client): + response = app_client.get("/404") + assert 404 == response.status + assert "text/html; charset=utf-8" == response.headers["content-type"] + + def test_canned_query_with_custom_metadata(app_client): response = app_client.get("/fixtures/neighborhood_search?text=town") assert response.status == 200 From cc872b1f50f1d2c0bc2d930c86a6644f154459dc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 4 May 2020 11:42:01 -0700 Subject: [PATCH 0206/2113] Fixed rogue output in tests, closes #755 --- tests/test_database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_database.py b/tests/test_database.py index 35923c0b..a9728019 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -116,7 +116,7 @@ async def test_execute_write_fn_block_false(app_client): def write_fn(conn): with conn: - conn.execute("delete from roadside_attractions where id = 1;") + conn.execute("delete from roadside_attractions where pk = 1;") row = conn.execute("select count(*) from roadside_attractions").fetchone() print("row = ", row) return row[0] From 7e2bb314649baa9e782ad22ff452d90d46aa840b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 4 May 2020 12:10:31 -0700 Subject: [PATCH 0207/2113] Documented installation using pipx, closes #756 --- docs/installation.rst | 87 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 87 insertions(+) diff --git a/docs/installation.rst b/docs/installation.rst index c547f9e4..990d867b 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -97,3 +97,90 @@ You can now run Datasette like so:: datasette fixtures.db If you want to start making contributions to the Datasette project by installing a copy that lets you directly modify the code, take a look at our guide to :ref:`devenvironment`. + +Install using pipx +------------------ + +`pipx `__ is a tool for installing Python software with all of its dependencies in an isolated environment, to ensure that they will not conflict with any other installed Python software. + +If you use `Homebrew `__ on macOS you can install pipx like this:: + + brew install pipx + pipx ensurepath + +Without Homebrew you can install it like so:: + + python3 -m pip install --user pipx + python3 -m pipx ensurepath + +The ``pipx ensurepath`` command configures your shell to ensure it can find commands that have been installed by pipx - generally by making sure ``~/.local/bin`` has been added to your ``PATH``. + +Once pipx is installed you can use it to install Datasette like this:: + + pipx install datasette + +Then run ``datasette --version`` to confirm that it has been successfully installed. + +Installing plugins using pipx +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Datasette plugins need to be installed into the same environment as Datasette itself. You can do this using ``pipx inject datasette name-of-plugin`` - and then confirm that the plugin has been installed using the ``datasette plugins`` command:: + + $ datasette plugins + [] + + $ pipx inject datasette datasette-json-html + injected package datasette-json-html into venv datasette + done! ✨ 🌟 ✨ + + $ datasette plugins + [ + { + "name": "datasette-json-html", + "static": false, + "templates": false, + "version": "0.6" + } + ] + +Upgrading packages using pipx +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can upgrade your pipx installation to the latest release of Datasette using ``pipx upgrade datasette``:: + + $ pipx upgrade datasette + upgraded package datasette from 0.39 to 0.40 (location: /Users/simon/.local/pipx/venvs/datasette) + +To upgrade a plugin within the pipx environment use ``pipx runpip datasette install -U name-of-plugin`` - like this:: + + % datasette plugins + [ + { + "name": "datasette-vega", + "static": true, + "templates": false, + "version": "0.6" + } + ] + + $ pipx runpip datasette install -U datasette-vega + Collecting datasette-vega + Downloading datasette_vega-0.6.2-py3-none-any.whl (1.8 MB) + |████████████████████████████████| 1.8 MB 2.0 MB/s + ... + Installing collected packages: datasette-vega + Attempting uninstall: datasette-vega + Found existing installation: datasette-vega 0.6 + Uninstalling datasette-vega-0.6: + Successfully uninstalled datasette-vega-0.6 + Successfully installed datasette-vega-0.6.2 + + $ datasette plugins + [ + { + "name": "datasette-vega", + "static": true, + "templates": false, + "version": "0.6.2" + } + ] From 0cdf111ae68d46eb2eb51d85e20e1447a42cbdcc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 4 May 2020 12:31:13 -0700 Subject: [PATCH 0208/2113] Move pip/pipx to top of installation instructions Less intimidating than Docker, hopefully. --- docs/installation.rst | 128 ++++++++++++++++++++++-------------------- 1 file changed, 67 insertions(+), 61 deletions(-) diff --git a/docs/installation.rst b/docs/installation.rst index 990d867b..c88950c2 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -11,67 +11,7 @@ on to your machine, or you can install it using Docker. .. contents:: -Using Docker ------------- - -A Docker image containing the latest release of Datasette is published to Docker -Hub here: https://hub.docker.com/r/datasetteproject/datasette/ - -If you have Docker installed (for example with `Docker for Mac -`_ on OS X) you can download and run this -image like so:: - - docker run -p 8001:8001 -v `pwd`:/mnt \ - datasetteproject/datasette \ - datasette -p 8001 -h 0.0.0.0 /mnt/fixtures.db - -This will start an instance of Datasette running on your machine's port 8001, -serving the ``fixtures.db`` file in your current directory. - -Now visit http://127.0.0.1:8001/ to access Datasette. - -(You can download a copy of ``fixtures.db`` from -https://latest.datasette.io/fixtures.db ) - -To upgrade to the most recent release of Datasette, run the following:: - - docker pull datasetteproject/datasette - -Loading Spatialite -~~~~~~~~~~~~~~~~~~ - -The ``datasetteproject/datasette`` image includes a recent version of the -:ref:`SpatiaLite extension ` for SQLite. To load and enable that -module, use the following command:: - - docker run -p 8001:8001 -v `pwd`:/mnt \ - datasetteproject/datasette \ - datasette -p 8001 -h 0.0.0.0 /mnt/fixtures.db \ - --load-extension=/usr/local/lib/mod_spatialite.so - -You can confirm that SpatiaLite is successfully loaded by visiting -http://127.0.0.1:8001/-/versions - -Installing plugins -~~~~~~~~~~~~~~~~~~ - -If you want to install plugins into your local Datasette Docker image you can do -so using the following recipe. This will install the plugins and then save a -brand new local image called ``datasette-with-plugins``:: - - docker run datasetteproject/datasette \ - pip install datasette-vega - - docker commit $(docker ps -lq) datasette-with-plugins - -You can now run the new custom image like so:: - - docker run -p 8001:8001 -v `pwd`:/mnt \ - datasette-with-plugins \ - datasette -p 8001 -h 0.0.0.0 /mnt/fixtures.db - -You can confirm that the plugins are installed by visiting -http://127.0.0.1:8001/-/plugins +.. _installation_pip: Install using pip ----------------- @@ -98,6 +38,8 @@ You can now run Datasette like so:: If you want to start making contributions to the Datasette project by installing a copy that lets you directly modify the code, take a look at our guide to :ref:`devenvironment`. +.. _installation_pipx: + Install using pipx ------------------ @@ -184,3 +126,67 @@ To upgrade a plugin within the pipx environment use ``pipx runpip datasette inst "version": "0.6.2" } ] + +.. _installation_docker: + +Using Docker +------------ + +A Docker image containing the latest release of Datasette is published to Docker +Hub here: https://hub.docker.com/r/datasetteproject/datasette/ + +If you have Docker installed (for example with `Docker for Mac +`_ on OS X) you can download and run this +image like so:: + + docker run -p 8001:8001 -v `pwd`:/mnt \ + datasetteproject/datasette \ + datasette -p 8001 -h 0.0.0.0 /mnt/fixtures.db + +This will start an instance of Datasette running on your machine's port 8001, +serving the ``fixtures.db`` file in your current directory. + +Now visit http://127.0.0.1:8001/ to access Datasette. + +(You can download a copy of ``fixtures.db`` from +https://latest.datasette.io/fixtures.db ) + +To upgrade to the most recent release of Datasette, run the following:: + + docker pull datasetteproject/datasette + +Loading Spatialite +~~~~~~~~~~~~~~~~~~ + +The ``datasetteproject/datasette`` image includes a recent version of the +:ref:`SpatiaLite extension ` for SQLite. To load and enable that +module, use the following command:: + + docker run -p 8001:8001 -v `pwd`:/mnt \ + datasetteproject/datasette \ + datasette -p 8001 -h 0.0.0.0 /mnt/fixtures.db \ + --load-extension=/usr/local/lib/mod_spatialite.so + +You can confirm that SpatiaLite is successfully loaded by visiting +http://127.0.0.1:8001/-/versions + +Installing plugins +~~~~~~~~~~~~~~~~~~ + +If you want to install plugins into your local Datasette Docker image you can do +so using the following recipe. This will install the plugins and then save a +brand new local image called ``datasette-with-plugins``:: + + docker run datasetteproject/datasette \ + pip install datasette-vega + + docker commit $(docker ps -lq) datasette-with-plugins + +You can now run the new custom image like so:: + + docker run -p 8001:8001 -v `pwd`:/mnt \ + datasette-with-plugins \ + datasette -p 8001 -h 0.0.0.0 /mnt/fixtures.db + +You can confirm that the plugins are installed by visiting +http://127.0.0.1:8001/-/plugins From 9212f0c9c3138f005ea8d57acacb8a2a80b252a6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 4 May 2020 12:35:28 -0700 Subject: [PATCH 0209/2113] Removed note about virtual environments Simplifies things now that we also talk about pipx. --- docs/installation.rst | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/docs/installation.rst b/docs/installation.rst index c88950c2..cdf1467a 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -22,19 +22,13 @@ You can install Datasette and its dependencies using ``pip``:: pip install datasette -The last version to support Python 3.5 was 0.30.2 - you can install that version like so:: +The last version to support Python 3.5 was 0.30.2. If you are running Python 3.5 (check using ``python3 --version``) you can install that version of Datasette like so:: pip install datasette==0.30.2 -If you want to install Datasette in its own virtual environment, use this:: - - python -mvenv datasette-venv - source datasette-venv/bin/activate - pip install datasette - You can now run Datasette like so:: - datasette fixtures.db + datasette If you want to start making contributions to the Datasette project by installing a copy that lets you directly modify the code, take a look at our guide to :ref:`devenvironment`. From 0784f2ef9d3ff6dd9df05f54cb51de29a6d11764 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 6 May 2020 10:18:31 -0700 Subject: [PATCH 0210/2113] Allow specific pragma functions, closes #761 --- datasette/utils/__init__.py | 21 ++++++++++++++++++++- tests/test_utils.py | 5 ++++- 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 490b71c8..f1c24041 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -171,7 +171,26 @@ allowed_sql_res = [ re.compile(r"^explain with\b"), re.compile(r"^explain query plan with\b"), ] -disallawed_sql_res = [(re.compile("pragma"), "Statement may not contain PRAGMA")] +allowed_pragmas = ( + "database_list", + "foreign_key_list", + "function_list", + "index_info", + "index_list", + "index_xinfo", + "page_count", + "max_page_count", + "page_size", + "schema_version", + "table_info", + "table_xinfo", +) +disallawed_sql_res = [ + ( + re.compile("pragma(?!_({}))".format("|".join(allowed_pragmas))), + "Statement may not contain PRAGMA", + ) +] def validate_sql_select(sql): diff --git a/tests/test_utils.py b/tests/test_utils.py index fe5d9a26..7e4f1a8e 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -140,7 +140,8 @@ def test_custom_json_encoder(obj, expected): "update blah;", "-- sql comment to skip\nupdate blah;", "update blah set some_column='# Hello there\n\n* This is a list\n* of items\n--\n[And a link](https://github.com/simonw/datasette-render-markdown).'\nas demo_markdown", - "PRAGMA case_sensitive_like = true" "SELECT * FROM pragma_index_info('idx52')", + "PRAGMA case_sensitive_like = true", + "SELECT * FROM pragma_not_on_allow_list('idx52')", ], ) def test_validate_sql_select_bad(bad_sql): @@ -162,6 +163,8 @@ def test_validate_sql_select_bad(bad_sql): "WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;", "explain WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;", "explain query plan WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;", + "SELECT * FROM pragma_index_info('idx52')", + "select * from pragma_table_xinfo('table')", ], ) def test_validate_sql_select_good(good_sql): From 182e5c8745c94576718315f7596ccc81e5e2417b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 6 May 2020 11:20:58 -0700 Subject: [PATCH 0211/2113] Release Datasette 0.41 Refs #648 #731 #750 #151 #761 #752 #719 #756 #748 --- README.md | 1 + docs/changelog.rst | 29 +++++++++++++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/README.md b/README.md index 12a1ec39..f2a3d81d 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover ## News + * 6th May 2020: [Datasette 0.41](http://datasette.readthedocs.io/en/latest/changelog.html#v0-41) - New mechanism for [creating custom pages](https://datasette.readthedocs.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://datasette.readthedocs.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements. * 21st April 2020: [Datasette 0.40](http://datasette.readthedocs.io/en/latest/changelog.html#v0-40) - Metadata can now be provided as YAML instead of JSON. Publishing to Zeit Now v1 is no longer supported, but Now v2 support is provided by the new [datasette-publish-now](https://github.com/simonw/datasette-publish-now) plugin. Various bug fixes. * 24th March 2020: [Datasette 0.39](http://datasette.readthedocs.io/en/latest/changelog.html#v0-39) - New `base_url` configuration option for running Datasette under a different URL prefix, `"sort"` and `"sort_desc"` metadata options for setting a default sort order for a table. * 8th March 2020: [Datasette 0.38](http://datasette.readthedocs.io/en/latest/changelog.html#v0-38) - New `--memory` option for `datasete publish cloudrun`, [Docker image](https://hub.docker.com/r/datasetteproject/datasette) upgraded to SQLite 3.31.1. diff --git a/docs/changelog.rst b/docs/changelog.rst index 7a58f58b..dc06e4ef 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,35 @@ Changelog ========= +.. _v0_41: + +0.41 (2020-05-06) +----------------- + +You can now create :ref:`custom pages ` within your Datasette instance using a custom template file. For example, adding a template file called ``templates/pages/about.html`` will result in a new page being served at ``/about`` on your instance. See the :ref:`custom pages documentation ` for full details, including how to return custom HTTP headers, redirects and status codes. (`#648 `__) + +:ref:`config_dir` (`#731 `__) allows you to define a custom Datasette instance as a directory. So instead of running the following:: + + $ datasette one.db two.db \ + --metadata.json \ + --template-dir=templates/ \ + --plugins-dir=plugins \ + --static css:css + +You can instead arrange your files in a single directory called ``my-project`` and run this:: + + $ datasette my-project/ + +Also in this release: + +* New ``NOT LIKE`` table filter: ``?colname__notlike=expression``. (`#750 `__) +* Datasette now has a *pattern portfolio* at ``/-/patterns`` - e.g. https://latest.datasette.io/-/patterns. This is a page that shows every Datasette user interface component in one place, to aid core development and people building custom CSS themes. (`#151 `__) +* SQLite `PRAGMA functions `__ such as ``pragma_table_info(tablename)`` are now allowed in Datasette SQL queries. (`#761 `__) +* Datasette pages now consistently return a ``content-type`` of ``text/html; charset=utf-8"``. (`#752 `__) +* Datasette now handles an ASGI ``raw_path`` value of ``None``, which should allow compatibilty with the `Mangum `__ adapter for running ASGI apps on AWS Lambda. Thanks, Colin Dellow. (`#719 `__) +* Installation documentation now covers how to :ref:`installation_pipx`. (`#756 `__) +* Improved the documentation for :ref:`full_text_search`. (`#748 `__) + .. _v0_40: 0.40 (2020-04-21) From 69e3a855dd7e5a77409d70b18c45ae3c1a145a75 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 8 May 2020 07:16:39 -0700 Subject: [PATCH 0212/2113] Rename execute_against_connection_in_thread() to execute_fn(), refs #685 --- datasette/cli.py | 2 +- datasette/database.py | 32 ++++++++++---------------------- 2 files changed, 11 insertions(+), 23 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 919be065..c59fb6e0 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -388,7 +388,7 @@ async def check_databases(ds): # to confirm they are all usable for database in list(ds.databases.values()): try: - await database.execute_against_connection_in_thread(check_connection) + await database.execute_fn(check_connection) except SpatialiteConnectionProblem: raise click.UsageError( "It looks like you're trying to load a SpatiaLite" diff --git a/datasette/database.py b/datasette/database.py index 48c367ef..0f540e01 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -101,7 +101,7 @@ class Database: result = e task.reply_queue.sync_q.put(result) - async def execute_against_connection_in_thread(self, fn): + async def execute_fn(self, fn): def in_thread(): conn = getattr(connections, self.name, None) if not conn: @@ -163,9 +163,7 @@ class Database: return Results(rows, False, cursor.description) with trace("sql", database=self.name, sql=sql.strip(), params=params): - results = await self.execute_against_connection_in_thread( - sql_operation_in_thread - ) + results = await self.execute_fn(sql_operation_in_thread) return results @property @@ -223,19 +221,13 @@ class Database: return [r[0] for r in results.rows] async def table_columns(self, table): - return await self.execute_against_connection_in_thread( - lambda conn: table_columns(conn, table) - ) + return await self.execute_fn(lambda conn: table_columns(conn, table)) async def primary_keys(self, table): - return await self.execute_against_connection_in_thread( - lambda conn: detect_primary_keys(conn, table) - ) + return await self.execute_fn(lambda conn: detect_primary_keys(conn, table)) async def fts_table(self, table): - return await self.execute_against_connection_in_thread( - lambda conn: detect_fts(conn, table) - ) + return await self.execute_fn(lambda conn: detect_fts(conn, table)) async def label_column_for_table(self, table): explicit_label_column = self.ds.table_metadata(self.name, table).get( @@ -244,9 +236,7 @@ class Database: if explicit_label_column: return explicit_label_column # If a table has two columns, one of which is ID, then label_column is the other one - column_names = await self.execute_against_connection_in_thread( - lambda conn: table_columns(conn, table) - ) + column_names = await self.execute_fn(lambda conn: table_columns(conn, table)) # Is there a name or title column? name_or_title = [c for c in column_names if c in ("name", "title")] if name_or_title: @@ -261,7 +251,7 @@ class Database: return None async def foreign_keys_for_table(self, table): - return await self.execute_against_connection_in_thread( + return await self.execute_fn( lambda conn: get_outbound_foreign_keys(conn, table) ) @@ -279,9 +269,7 @@ class Database: ) ).rows ] - has_spatialite = await self.execute_against_connection_in_thread( - detect_spatialite - ) + has_spatialite = await self.execute_fn(detect_spatialite) if has_spatialite: # Also hide Spatialite internal tables hidden_tables += [ @@ -329,10 +317,10 @@ class Database: return [r[0] for r in results.rows] async def get_all_foreign_keys(self): - return await self.execute_against_connection_in_thread(get_all_foreign_keys) + return await self.execute_fn(get_all_foreign_keys) async def get_outbound_foreign_keys(self, table): - return await self.execute_against_connection_in_thread( + return await self.execute_fn( lambda conn: get_outbound_foreign_keys(conn, table) ) From 4433306c1855ad69840cc76cbd41086137572be2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 8 May 2020 09:05:46 -0700 Subject: [PATCH 0213/2113] Improvements + docs for db.execute() and Results class * Including new results.first() and results.single_value() methods. Closes #685 --- datasette/app.py | 3 +- datasette/database.py | 39 +++++++++++++++++++++-- datasette/facets.py | 2 +- datasette/utils/__init__.py | 21 ------------- datasette/views/base.py | 2 +- datasette/views/table.py | 2 +- docs/internals.rst | 63 ++++++++++++++++++++++++++++++++++++- tests/test_database.py | 38 ++++++++++++++++++++++ 8 files changed, 141 insertions(+), 29 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 8a4b6011..f1fcc5eb 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -26,10 +26,9 @@ from .views.index import IndexView from .views.special import JsonDataView, PatternPortfolioView from .views.table import RowView, TableView from .renderer import json_renderer -from .database import Database +from .database import Database, QueryInterrupted from .utils import ( - QueryInterrupted, escape_css_string, escape_sqlite, format_bytes, diff --git a/datasette/database.py b/datasette/database.py index 0f540e01..e6154caa 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -8,8 +8,6 @@ import uuid from .tracer import trace from .utils import ( - QueryInterrupted, - Results, detect_fts, detect_primary_keys, detect_spatialite, @@ -371,3 +369,40 @@ class WriteTask: self.fn = fn self.task_id = task_id self.reply_queue = reply_queue + + +class QueryInterrupted(Exception): + pass + + +class MultipleValues(Exception): + pass + + +class Results: + def __init__(self, rows, truncated, description): + self.rows = rows + self.truncated = truncated + self.description = description + + @property + def columns(self): + return [d[0] for d in self.description] + + def first(self): + if self.rows: + return self.rows[0] + else: + return None + + def single_value(self): + if self.rows and 1 == len(self.rows) and 1 == len(self.rows[0]): + return self.rows[0][0] + else: + raise MultipleValues + + def __iter__(self): + return iter(self.rows) + + def __len__(self): + return len(self.rows) diff --git a/datasette/facets.py b/datasette/facets.py index 18558754..1712db9b 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -2,12 +2,12 @@ import json import urllib import re from datasette import hookimpl +from datasette.database import QueryInterrupted from datasette.utils import ( escape_sqlite, path_with_added_args, path_with_removed_args, detect_json1, - QueryInterrupted, InvalidSql, sqlite3, ) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index f1c24041..26a778d3 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -47,27 +47,6 @@ ENV SQLITE_EXTENSIONS /usr/lib/x86_64-linux-gnu/mod_spatialite.so """ -class QueryInterrupted(Exception): - pass - - -class Results: - def __init__(self, rows, truncated, description): - self.rows = rows - self.truncated = truncated - self.description = description - - @property - def columns(self): - return [d[0] for d in self.description] - - def __iter__(self): - return iter(self.rows) - - def __len__(self): - return len(self.rows) - - def urlsafe_components(token): "Splits token on commas and URL decodes each component" return [urllib.parse.unquote_plus(b) for b in token.split(",")] diff --git a/datasette/views/base.py b/datasette/views/base.py index e2bce2f9..f5eafe63 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -10,8 +10,8 @@ import pint from datasette import __version__ from datasette.plugins import pm +from datasette.database import QueryInterrupted from datasette.utils import ( - QueryInterrupted, InvalidSql, LimitedWriter, is_url, diff --git a/datasette/views/table.py b/datasette/views/table.py index 10e86eeb..c07447d3 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -5,9 +5,9 @@ import json import jinja2 from datasette.plugins import pm +from datasette.database import QueryInterrupted from datasette.utils import ( CustomRow, - QueryInterrupted, RequestParameters, append_querystring, compound_keys_after_sql, diff --git a/docs/internals.rst b/docs/internals.rst index d7b6e7cb..0020f96d 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -94,13 +94,74 @@ Database class Instances of the ``Database`` class can be used to execute queries against attached SQLite databases, and to run introspection against their schemas. -SQLite only allows one database connection to write at a time. Datasette handles this for you by maintaining a queue of writes to be executed against a given database. Plugins can submit write operations to this queue and they will be executed in the order in which they are received. +.. _database_execute: + +await db.execute(sql, ...) +-------------------------- + +Executes a SQL query against the database and returns the resulting rows (see :ref:`database_results`). + +``sql`` - string (required) + The SQL query to execute. This can include ``?`` or ``:named`` parameters. + +``params`` - list or dict + A list or dictionary of values to use for the parameters. List for ``?``, dictionary for ``:named``. + +``truncate`` - boolean + Should the rows returned by the query be truncated at the maximum page size? Defaults to ``True``, set this to ``False`` to disable truncation. + +``custom_time_limit`` - integer ms + A custom time limit for this query. This can be set to a lower value than the Datasette configured default. If a query takes longer than this it will be terminated early and raise a ``dataette.database.QueryInterrupted`` exception. + +``page_size`` - integer + Set a custom page size for truncation, over-riding the configured Datasette default. + +``log_sql_errors`` - boolean + Should any SQL errors be logged to the console in addition to being raised as an error? Defaults to ``True``. + +.. _database_results: + +Results +------- + +The ``db.execute()`` method returns a single ``Results`` object. This can be used to access the rows returned by the query. + +Iterating over a ``Results`` object will yield SQLite `Row objects `__. Each of these can be treated as a tuple or can be accessed using ``row["column"]`` syntax: + +.. code-block:: python + + info = [] + results = await db.execute("select name from sqlite_master") + for row in results: + info.append(row["name"]) + +The ``Results`` object also has the following properties and methods: + +``.truncated`` - boolean + Indicates if this query was truncated - if it returned more results than the specified ``page_size``. If this is true then the results object will only provide access to the first ``page_size`` rows in the query result. You can disable truncation by passing ``truncate=False`` to the ``db.query()`` method. + +``.columns`` - list of strings + A list of column names returned by the query. + +``.rows`` - list of sqlite3.Row + This property provides direct access to the list of rows returned by the database. You can access specific rows by index using ``results.rows[0]``. + +``.first()`` - row or None + Returns the first row in the results, or ``None`` if no rows were returned. + +``.single_value()`` + Returns the value of the first column of the first row of results - but only if the query returned a single row with a single column. Raises a ``datasette.database.MultipleValues`` exception otherwise. + +``.__len__()`` + Calling ``len(results)`` returns the (truncated) number of returned results. .. _database_execute_write: await db.execute_write(sql, params=None, block=False) ----------------------------------------------------- +SQLite only allows one database connection to write at a time. Datasette handles this for you by maintaining a queue of writes to be executed against a given database. Plugins can submit write operations to this queue and they will be executed in the order in which they are received. + This method can be used to queue up a non-SELECT SQL query to be executed against a single write connection to the database. You can pass additional SQL parameters as a tuple or dictionary. diff --git a/tests/test_database.py b/tests/test_database.py index a9728019..d4055776 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -1,9 +1,47 @@ +from datasette.database import Results, MultipleValues +from datasette.utils import sqlite3 from .fixtures import app_client import pytest import time import uuid +@pytest.mark.asyncio +async def test_execute1(app_client): + db = app_client.ds.databases["fixtures"] + results = await db.execute("select * from facetable") + assert isinstance(results, Results) + assert 15 == len(results) + + +@pytest.mark.asyncio +async def test_results_first(app_client): + db = app_client.ds.databases["fixtures"] + assert None is (await db.execute("select * from facetable where pk > 100")).first() + results = await db.execute("select * from facetable") + row = results.first() + assert isinstance(row, sqlite3.Row) + + +@pytest.mark.parametrize( + "query,expected", + [ + ("select 1", 1), + ("select 1, 2", None), + ("select 1 as num union select 2 as num", None), + ], +) +@pytest.mark.asyncio +async def test_results_single_value(app_client, query, expected): + db = app_client.ds.databases["fixtures"] + results = await db.execute(query) + if expected: + assert expected == results.single_value() + else: + with pytest.raises(MultipleValues): + results.single_value() + + @pytest.mark.parametrize( "tables,exists", ( From ec9cdc3ffa7d9a9a214f71fa7864f0cbdf6ccb23 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 8 May 2020 09:52:53 -0700 Subject: [PATCH 0214/2113] Documentation for .execute_fn(), refs #685 --- docs/internals.rst | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/docs/internals.rst b/docs/internals.rst index 0020f96d..526c531c 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -155,6 +155,26 @@ The ``Results`` object also has the following properties and methods: ``.__len__()`` Calling ``len(results)`` returns the (truncated) number of returned results. +.. _database_execute_fn: + +await db.execute_fn(fn) +----------------------- + +Executes a given callback function against a read-only database connection running in a thread. The function will be passed a SQLite connection, and the return value from the function will be returned by the ``await``. + +Example usage: + +syntax: + +.. code-block:: python + + def get_version(conn); + return conn.execute( + "select sqlite_version()" + ).fetchall()[0][0] + + version = await db.execute_fn(get_version) + .. _database_execute_write: await db.execute_write(sql, params=None, block=False) From 545c71b6044bbf30caef04976cbd73d519d278a5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 8 May 2020 09:57:01 -0700 Subject: [PATCH 0215/2113] Small cleanup --- docs/internals.rst | 3 --- 1 file changed, 3 deletions(-) diff --git a/docs/internals.rst b/docs/internals.rst index 526c531c..aa1ff7e7 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -163,9 +163,6 @@ await db.execute_fn(fn) Executes a given callback function against a read-only database connection running in a thread. The function will be passed a SQLite connection, and the return value from the function will be returned by the ``await``. Example usage: - -syntax: - .. code-block:: python def get_version(conn); From 5ab848f0b87ad2030088a7259fc1802316b90200 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 8 May 2020 10:04:47 -0700 Subject: [PATCH 0216/2113] RST fix --- docs/internals.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/internals.rst b/docs/internals.rst index aa1ff7e7..43944de9 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -163,6 +163,7 @@ await db.execute_fn(fn) Executes a given callback function against a read-only database connection running in a thread. The function will be passed a SQLite connection, and the return value from the function will be returned by the ``await``. Example usage: + .. code-block:: python def get_version(conn); From 2694ddcf14b88955e93a6cfb6c725500bb93e219 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 8 May 2020 10:29:17 -0700 Subject: [PATCH 0217/2113] Test for .execute_fn(), refs #685 --- docs/internals.rst | 2 +- tests/test_database.py | 12 +++++++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/docs/internals.rst b/docs/internals.rst index 43944de9..7b4c1755 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -166,7 +166,7 @@ Example usage: .. code-block:: python - def get_version(conn); + def get_version(conn): return conn.execute( "select sqlite_version()" ).fetchall()[0][0] diff --git a/tests/test_database.py b/tests/test_database.py index d4055776..1f1a3a7e 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -7,7 +7,7 @@ import uuid @pytest.mark.asyncio -async def test_execute1(app_client): +async def test_execute(app_client): db = app_client.ds.databases["fixtures"] results = await db.execute("select * from facetable") assert isinstance(results, Results) @@ -42,6 +42,16 @@ async def test_results_single_value(app_client, query, expected): results.single_value() +@pytest.mark.asyncio +async def test_execute_fn(app_client): + db = app_client.ds.databases["fixtures"] + + def get_1_plus_1(conn): + return conn.execute("select 1 + 1").fetchall()[0][0] + + assert 2 == await db.execute_fn(get_1_plus_1) + + @pytest.mark.parametrize( "tables,exists", ( From af6c6c5d6f929f951c0e63bfd1c82e37a071b50f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 8 May 2020 10:38:27 -0700 Subject: [PATCH 0218/2113] Release 0.42, refs #685 --- README.md | 1 + docs/changelog.rst | 11 +++++++++++ 2 files changed, 12 insertions(+) diff --git a/README.md b/README.md index f2a3d81d..7351c5c0 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover ## News + * 8th May 2020: [Datasette 0.42](http://datasette.readthedocs.io/en/latest/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database. * 6th May 2020: [Datasette 0.41](http://datasette.readthedocs.io/en/latest/changelog.html#v0-41) - New mechanism for [creating custom pages](https://datasette.readthedocs.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://datasette.readthedocs.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements. * 21st April 2020: [Datasette 0.40](http://datasette.readthedocs.io/en/latest/changelog.html#v0-40) - Metadata can now be provided as YAML instead of JSON. Publishing to Zeit Now v1 is no longer supported, but Now v2 support is provided by the new [datasette-publish-now](https://github.com/simonw/datasette-publish-now) plugin. Various bug fixes. * 24th March 2020: [Datasette 0.39](http://datasette.readthedocs.io/en/latest/changelog.html#v0-39) - New `base_url` configuration option for running Datasette under a different URL prefix, `"sort"` and `"sort_desc"` metadata options for setting a default sort order for a table. diff --git a/docs/changelog.rst b/docs/changelog.rst index dc06e4ef..48d3128b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,17 @@ Changelog ========= +.. _v0_42: + +0.42 (2020-05-08) +----------------- + +A small release which provides improved internal methods for use in plugins, along with documentation. See `#685 `__. + +* Added documentation for ``db.execute()``, see :ref:`database_execute`. +* Renamed ``db.execute_against_connection_in_thread()`` to ``db.execute_fn()`` and made it a documented method, see :ref:`database_execute_fn`. +* New ``results.first()`` and ``results.single_value()`` methods, plus documentation for the ``Results`` class - see :ref:`database_results`. + .. _v0_41: 0.41 (2020-05-06) From fc24edc153d76bcec917bb23d532981d9862e696 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 11 May 2020 11:28:53 -0700 Subject: [PATCH 0219/2113] Added project_urls, closes #764 --- setup.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/setup.py b/setup.py index c4886a11..d9c70de5 100644 --- a/setup.py +++ b/setup.py @@ -32,6 +32,14 @@ setup( author="Simon Willison", license="Apache License, Version 2.0", url="https://github.com/simonw/datasette", + project_urls={ + "Documentation": "https://datasette.readthedocs.io/en/stable/", + "Changelog": "https://datasette.readthedocs.io/en/stable/changelog.html", + "Live demo": "https://latest.datasette.io/", + "Source code": "https://github.com/simonw/datasette", + "Issues": "https://github.com/simonw/datasette/issues", + "CI": "https://travis-ci.org/simonw/datasette", + }, packages=find_packages(exclude="tests"), package_data={"datasette": ["templates/*.html"]}, include_package_data=True, From 504196341c49840270bd75ea1a1871ef386ba7ea Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 14 May 2020 22:51:39 -0700 Subject: [PATCH 0220/2113] Visually distinguish float/int columns, closes #729 --- datasette/static/app.css | 5 ++ datasette/templates/_table.html | 2 +- datasette/views/table.py | 12 ++- tests/test_html.py | 133 ++++++++++++++++---------------- 4 files changed, 85 insertions(+), 67 deletions(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index bae091b8..cc33277a 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -345,3 +345,8 @@ p.zero-results { padding: 0.5em; font-style: italic; } + +/* Value types */ +.type-float, .type-int { + color: #666; +} \ No newline at end of file diff --git a/datasette/templates/_table.html b/datasette/templates/_table.html index 42c37c55..8fee77b2 100644 --- a/datasette/templates/_table.html +++ b/datasette/templates/_table.html @@ -21,7 +21,7 @@ {% for row in display_rows %} {% for cell in row %} - {{ cell.value }} + {{ cell.value }} {% endfor %} {% endfor %} diff --git a/datasette/views/table.py b/datasette/views/table.py index c07447d3..51b7aa2f 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -112,6 +112,7 @@ class RowTableShared(DataView): cells.append( { "column": pks[0] if len(pks) == 1 else "Link", + "value_type": "link", "is_special_link_column": is_special_link_column, "raw": pk_path, "value": jinja2.Markup( @@ -192,7 +193,16 @@ class RowTableShared(DataView): if truncate_cells and len(display_value) > truncate_cells: display_value = display_value[:truncate_cells] + u"\u2026" - cells.append({"column": column, "value": display_value, "raw": value}) + cells.append( + { + "column": column, + "value": display_value, + "raw": value, + "value_type": "none" + if value is None + else str(type(value).__name__), + } + ) cell_rows.append(Row(cells)) if link_column: diff --git a/tests/test_html.py b/tests/test_html.py index 564365ce..a3388c2d 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -505,16 +505,16 @@ def test_table_html_simple_primary_key(app_client): assert ["nofollow"] == a["rel"] assert [ [ - '1', - 'hello', + '1', + 'hello', ], [ - '2', - 'world', + '2', + 'world', ], [ - '3', - '\xa0', + '3', + '\xa0', ], ] == [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] @@ -578,9 +578,12 @@ def test_row_html_simple_primary_key(app_client): assert response.status == 200 table = Soup(response.body, "html.parser").find("table") assert ["id", "content"] == [th.string.strip() for th in table.select("thead th")] - assert [['1', 'hello']] == [ - [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") - ] + assert [ + [ + '1', + 'hello', + ] + ] == [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] def test_table_not_exists(app_client): @@ -599,14 +602,14 @@ def test_table_html_no_primary_key(app_client): ] expected = [ [ - '{}'.format( + '{}'.format( i, i ), - '{}'.format(i), - '{}'.format(i), - 'a{}'.format(i), - 'b{}'.format(i), - 'c{}'.format(i), + '{}'.format(i), + '{}'.format(i), + 'a{}'.format(i), + 'b{}'.format(i), + 'c{}'.format(i), ] for i in range(1, 51) ] @@ -633,11 +636,11 @@ def test_row_html_no_primary_key(app_client): ] expected = [ [ - '1', - '1', - 'a1', - 'b1', - 'c1', + '1', + '1', + 'a1', + 'b1', + 'c1', ] ] assert expected == [ @@ -658,10 +661,10 @@ def test_table_html_compound_primary_key(app_client): assert a["href"].endswith("/compound_primary_key?_sort={}".format(expected_col)) expected = [ [ - 'a,b', - 'a', - 'b', - 'c', + 'a,b', + 'a', + 'b', + 'c', ] ] assert expected == [ @@ -675,14 +678,14 @@ def test_table_html_foreign_key_links(app_client): table = Soup(response.body, "html.parser").find("table") expected = [ [ - '1', - 'hello\xa01', - '1', + '1', + 'hello\xa01', + '1', ], [ - '2', - '\xa0', - '\xa0', + '2', + '\xa0', + '\xa0', ], ] assert expected == [ @@ -696,9 +699,9 @@ def test_table_html_disable_foreign_key_links_with_labels(app_client): table = Soup(response.body, "html.parser").find("table") expected = [ [ - '1', - '1', - '1', + '1', + '1', + '1', ] ] assert expected == [ @@ -712,8 +715,8 @@ def test_table_html_foreign_key_custom_label_column(app_client): table = Soup(response.body, "html.parser").find("table") expected = [ [ - '1', - 'world2\xa01', + '1', + 'world2\xa01', ] ] assert expected == [ @@ -754,9 +757,9 @@ def test_row_html_compound_primary_key(app_client): ] expected = [ [ - 'a', - 'b', - 'c', + 'a', + 'b', + 'c', ] ] assert expected == [ @@ -771,14 +774,14 @@ def test_compound_primary_key_with_foreign_key_references(app_client): table = Soup(response.body, "html.parser").find("table") expected = [ [ - '1,feline', - '1\xa01', - 'feline', + '1,feline', + '1\xa01', + 'feline', ], [ - '2,canine', - '2\xa02', - 'canine', + '2,canine', + '2\xa02', + 'canine', ], ] assert expected == [ @@ -799,16 +802,16 @@ def test_view_html(app_client): assert ths[1].string.strip() == "upper_content" expected = [ [ - 'hello', - 'HELLO', + 'hello', + 'HELLO', ], [ - 'world', - 'WORLD', + 'world', + 'WORLD', ], [ - '\xa0', - '\xa0', + '\xa0', + '\xa0', ], ] assert expected == [ @@ -1079,9 +1082,9 @@ def test_binary_data_display(app_client): table = Soup(response.body, "html.parser").find("table") expected_tds = [ [ - '1', - '1', - '<Binary\xa0data:\xa019\xa0bytes>', + '1', + '1', + '<Binary\xa0data:\xa019\xa0bytes>', ] ] assert expected_tds == [ @@ -1154,20 +1157,20 @@ def test_metadata_sort(app_client): rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] expected = [ [ - '3', - 'Detroit', + '3', + 'Detroit', ], [ - '2', - 'Los Angeles', + '2', + 'Los Angeles', ], [ - '4', - 'Memnonia', + '4', + 'Memnonia', ], [ - '1', - 'San Francisco', + '1', + 'San Francisco', ], ] assert expected == rows @@ -1189,12 +1192,12 @@ def test_metadata_sort_desc(app_client): rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] expected = [ [ - '2', - 'Paranormal', + '2', + 'Paranormal', ], [ - '1', - 'Museum', + '1', + 'Museum', ], ] assert expected == rows From 5ea8c6d1cd8ded832718fb1a652b5880c4bf5ebb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 14 May 2020 22:55:20 -0700 Subject: [PATCH 0221/2113] type-pk instead of type-link CSS class, closes #729 --- datasette/views/table.py | 2 +- tests/test_html.py | 36 ++++++++++++++++++------------------ 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 51b7aa2f..3289e58b 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -112,7 +112,7 @@ class RowTableShared(DataView): cells.append( { "column": pks[0] if len(pks) == 1 else "Link", - "value_type": "link", + "value_type": "pk", "is_special_link_column": is_special_link_column, "raw": pk_path, "value": jinja2.Markup( diff --git a/tests/test_html.py b/tests/test_html.py index a3388c2d..445f7b4c 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -505,15 +505,15 @@ def test_table_html_simple_primary_key(app_client): assert ["nofollow"] == a["rel"] assert [ [ - '1', + '1', 'hello', ], [ - '2', + '2', 'world', ], [ - '3', + '3', '\xa0', ], ] == [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] @@ -602,7 +602,7 @@ def test_table_html_no_primary_key(app_client): ] expected = [ [ - '{}'.format( + '{}'.format( i, i ), '{}'.format(i), @@ -661,7 +661,7 @@ def test_table_html_compound_primary_key(app_client): assert a["href"].endswith("/compound_primary_key?_sort={}".format(expected_col)) expected = [ [ - 'a,b', + 'a,b', 'a', 'b', 'c', @@ -678,12 +678,12 @@ def test_table_html_foreign_key_links(app_client): table = Soup(response.body, "html.parser").find("table") expected = [ [ - '1', + '1', 'hello\xa01', '1', ], [ - '2', + '2', '\xa0', '\xa0', ], @@ -699,7 +699,7 @@ def test_table_html_disable_foreign_key_links_with_labels(app_client): table = Soup(response.body, "html.parser").find("table") expected = [ [ - '1', + '1', '1', '1', ] @@ -715,7 +715,7 @@ def test_table_html_foreign_key_custom_label_column(app_client): table = Soup(response.body, "html.parser").find("table") expected = [ [ - '1', + '1', 'world2\xa01', ] ] @@ -774,12 +774,12 @@ def test_compound_primary_key_with_foreign_key_references(app_client): table = Soup(response.body, "html.parser").find("table") expected = [ [ - '1,feline', + '1,feline', '1\xa01', 'feline', ], [ - '2,canine', + '2,canine', '2\xa02', 'canine', ], @@ -1082,7 +1082,7 @@ def test_binary_data_display(app_client): table = Soup(response.body, "html.parser").find("table") expected_tds = [ [ - '1', + '1', '1', '<Binary\xa0data:\xa019\xa0bytes>', ] @@ -1157,19 +1157,19 @@ def test_metadata_sort(app_client): rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] expected = [ [ - '3', + '3', 'Detroit', ], [ - '2', + '2', 'Los Angeles', ], [ - '4', + '4', 'Memnonia', ], [ - '1', + '1', 'San Francisco', ], ] @@ -1192,11 +1192,11 @@ def test_metadata_sort_desc(app_client): rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] expected = [ [ - '2', + '2', 'Paranormal', ], [ - '1', + '1', 'Museum', ], ] From faea5093b865031f650da7da6539430f732f511a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 15 May 2020 11:16:47 -0700 Subject: [PATCH 0222/2113] Column headings now black in mobile view, closes #729 --- datasette/static/app.css | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index cc33277a..92f268ae 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -79,6 +79,7 @@ table a:visited { .rows-and-columns td:before { display: block; + color: black; margin-left: -10%; font-size: 0.8em; } @@ -349,4 +350,4 @@ p.zero-results { /* Value types */ .type-float, .type-int { color: #666; -} \ No newline at end of file +} From cee671a58f417f827d1735b1abaa40716534ea67 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 21 May 2020 10:53:51 -0700 Subject: [PATCH 0223/2113] Use dirs_exist_ok=True, refs #744 (#768) --- datasette/utils/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 26a778d3..04bf41af 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -602,9 +602,9 @@ def link_or_copy(src, dst): def link_or_copy_directory(src, dst): try: - shutil.copytree(src, dst, copy_function=os.link) + shutil.copytree(src, dst, copy_function=os.link, dirs_exist_ok=True) except OSError: - shutil.copytree(src, dst) + shutil.copytree(src, dst, dirs_exist_ok=True) def module_from_path(path, name): From 2d099ad9c657d2cab59de91cdb8bfed2da236ef6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 11:17:43 -0700 Subject: [PATCH 0224/2113] Backport of Python 3.8 shutil.copytree, refs #744 (#769) --- datasette/utils/__init__.py | 5 +- datasette/utils/shutil_backport.py | 101 +++++++++++++++++++++++++++++ 2 files changed, 104 insertions(+), 2 deletions(-) create mode 100644 datasette/utils/shutil_backport.py diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 04bf41af..cdb1bbc9 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -15,6 +15,7 @@ import shutil import urllib import numbers import yaml +from .shutil_backport import copytree try: import pysqlite3 as sqlite3 @@ -602,9 +603,9 @@ def link_or_copy(src, dst): def link_or_copy_directory(src, dst): try: - shutil.copytree(src, dst, copy_function=os.link, dirs_exist_ok=True) + copytree(src, dst, copy_function=os.link, dirs_exist_ok=True) except OSError: - shutil.copytree(src, dst, dirs_exist_ok=True) + copytree(src, dst, dirs_exist_ok=True) def module_from_path(path, name): diff --git a/datasette/utils/shutil_backport.py b/datasette/utils/shutil_backport.py new file mode 100644 index 00000000..dbe22404 --- /dev/null +++ b/datasette/utils/shutil_backport.py @@ -0,0 +1,101 @@ +""" +Backported from Python 3.8. + +This code is licensed under the Python License: +https://github.com/python/cpython/blob/v3.8.3/LICENSE +""" +import os +from shutil import copy, copy2, copystat, Error + + +def _copytree( + entries, + src, + dst, + symlinks, + ignore, + copy_function, + ignore_dangling_symlinks, + dirs_exist_ok=False, +): + if ignore is not None: + ignored_names = ignore(src, set(os.listdir(src))) + else: + ignored_names = set() + + os.makedirs(dst, exist_ok=dirs_exist_ok) + errors = [] + use_srcentry = copy_function is copy2 or copy_function is copy + + for srcentry in entries: + if srcentry.name in ignored_names: + continue + srcname = os.path.join(src, srcentry.name) + dstname = os.path.join(dst, srcentry.name) + srcobj = srcentry if use_srcentry else srcname + try: + if srcentry.is_symlink(): + linkto = os.readlink(srcname) + if symlinks: + os.symlink(linkto, dstname) + copystat(srcobj, dstname, follow_symlinks=not symlinks) + else: + if not os.path.exists(linkto) and ignore_dangling_symlinks: + continue + if srcentry.is_dir(): + copytree( + srcobj, + dstname, + symlinks, + ignore, + copy_function, + dirs_exist_ok=dirs_exist_ok, + ) + else: + copy_function(srcobj, dstname) + elif srcentry.is_dir(): + copytree( + srcobj, + dstname, + symlinks, + ignore, + copy_function, + dirs_exist_ok=dirs_exist_ok, + ) + else: + copy_function(srcentry, dstname) + except Error as err: + errors.extend(err.args[0]) + except OSError as why: + errors.append((srcname, dstname, str(why))) + try: + copystat(src, dst) + except OSError as why: + # Copying file access times may fail on Windows + if getattr(why, "winerror", None) is None: + errors.append((src, dst, str(why))) + if errors: + raise Error(errors) + return dst + + +def copytree( + src, + dst, + symlinks=False, + ignore=None, + copy_function=copy2, + ignore_dangling_symlinks=False, + dirs_exist_ok=False, +): + with os.scandir(src) as entries: + return _copytree( + entries=entries, + src=src, + dst=dst, + symlinks=symlinks, + ignore=ignore, + copy_function=copy_function, + ignore_dangling_symlinks=ignore_dangling_symlinks, + dirs_exist_ok=dirs_exist_ok, + ) From 9e6075d21facbfef565ffcdf160a558744fc1c2d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 11:35:31 -0700 Subject: [PATCH 0225/2113] rST fixes for register_output_renderer docs --- docs/plugins.rst | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index 59d39a62..50271e0a 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -743,11 +743,11 @@ Allows the plugin to register a new output renderer, to output data in a custom @hookimpl def register_output_renderer(datasette): return { - 'extension': 'test', - 'callback': render_test + "extension": "test", + "callback": render_test } -This will register `render_test` to be called when paths with the extension `.test` (for example `/database.test`, `/database/table.test`, or `/database/table/row.test`) are requested. When a request is received, the callback function is called with three positional arguments: +This will register ``render_test`` to be called when paths with the extension ``.test`` (for example ``/database.test``, ``/database/table.test``, or ``/database/table/row.test``) are requested. When a request is received, the callback function is called with three positional arguments: ``args`` - dictionary The GET parameters of the request @@ -756,15 +756,15 @@ This will register `render_test` to be called when paths with the extension `.te The data to be rendered ``view_name`` - string - The name of the view where the renderer is being called. (`index`, `database`, `table`, and `row` are the most important ones.) + The name of the view where the renderer is being called. (``index``, ``database``, ``table``, and ``row`` are the most important ones.) -The callback function can return `None`, if it is unable to render the data, or a dictionary with the following keys: +The callback function can return ``None``, if it is unable to render the data, or a dictionary with the following keys: ``body`` - string or bytes, optional The response body, default empty ``content_type`` - string, optional - The Content-Type header, default `text/plain` + The Content-Type header, default ``text/plain`` ``status_code`` - integer, optional The HTTP status code, default 200 @@ -775,7 +775,7 @@ A simple example of an output renderer callback function: def render_test(args, data, view_name): return { - 'body': 'Hello World' + "body": "Hello World" } Examples: `datasette-atom `_, `datasette-ics `_ From 41a0cd7b6afe0397efbbf27ad822679fc574811a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 12:25:52 -0700 Subject: [PATCH 0226/2113] call_with_supported_arguments() util, refs #581 --- datasette/utils/__init__.py | 11 +++++++++++ tests/test_utils.py | 11 +++++++++++ 2 files changed, 22 insertions(+) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index cdb1bbc9..03157072 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -3,6 +3,7 @@ from collections import OrderedDict import base64 import click import hashlib +import inspect import json import mergedeep import os @@ -803,3 +804,13 @@ def parse_metadata(content): return yaml.safe_load(content) except yaml.YAMLError: raise BadMetadataError("Metadata is not valid JSON or YAML") + + +def call_with_supported_arguments(fn, **kwargs): + parameters = inspect.signature(fn).parameters.keys() + call_with = [] + for parameter in parameters: + if parameter not in kwargs: + raise TypeError("{} requires parameters {}".format(fn, tuple(parameters))) + call_with.append(kwargs[parameter]) + return fn(*call_with) diff --git a/tests/test_utils.py b/tests/test_utils.py index 7e4f1a8e..59b80a67 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -446,3 +446,14 @@ async def test_request_post_vars(): request = Request(scope, receive) assert {"foo": "bar", "baz": "1"} == await request.post_vars() + + +def test_call_with_supported_arguments(): + def foo(a, b): + return "{}+{}".format(a, b) + + assert "1+2" == utils.call_with_supported_arguments(foo, a=1, b=2) + assert "1+2" == utils.call_with_supported_arguments(foo, a=1, b=2, c=3) + + with pytest.raises(TypeError): + utils.call_with_supported_arguments(foo, a=1) From da87e963bff24e47878a5bc2025c8bfc63d4bc93 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 13:16:02 -0700 Subject: [PATCH 0227/2113] Test that plugin hooks are unit tested (xfail) This currently fails using xfail. Closes 771. --- tests/test_plugins.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 8b6a6b41..1546de92 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -7,7 +7,7 @@ from .fixtures import ( TestClient as _TestClient, ) # noqa from datasette.app import Datasette -from datasette.plugins import get_plugins, DEFAULT_PLUGINS +from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm from datasette.utils import sqlite3 import base64 import json @@ -20,6 +20,20 @@ import pytest import urllib +@pytest.mark.xfail +@pytest.mark.parametrize( + "plugin_hook", [name for name in dir(pm.hook) if not name.startswith("_")] +) +def test_plugin_hooks_have_tests(plugin_hook): + "Every plugin hook should be referenced in this test module" + tests_in_this_module = [t for t in globals().keys() if t.startswith("test_")] + ok = False + for test in tests_in_this_module: + if plugin_hook in test: + ok = True + assert ok, "Plugin hook is missing tests: {}".format(plugin_hook) + + def test_plugins_dir_plugin_prepare_connection(app_client): response = app_client.get( "/fixtures.json?sql=select+convert_units(100%2C+'m'%2C+'ft')" From af5702220c06a913746c9657bd33c2108d80c33f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 13:34:12 -0700 Subject: [PATCH 0228/2113] Added datasette-media plugin to the docs --- docs/ecosystem.rst | 5 +++++ docs/plugins.rst | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst index 8c8785cd..4777cc16 100644 --- a/docs/ecosystem.rst +++ b/docs/ecosystem.rst @@ -97,6 +97,11 @@ datasette-json-html `datasette-json-html `__ renders HTML in Datasette's table view driven by JSON returned from your SQL queries. This provides a way to embed images, links and lists of links directly in Datasette's main interface, defined using custom SQL statements. +datasette-media +--------------- + +`datasette-media `__ adds the ability to serve media files such as images directly, configured through a SQL query that maps a URL parameter to a path to a file on disk. It can also serve resized image thumbnails. + datasette-jellyfish ------------------- diff --git a/docs/plugins.rst b/docs/plugins.rst index 50271e0a..78ad0309 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -887,4 +887,4 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att return add_x_databases_header return wrap_with_databases_header -Examples: `datasette-auth-github `_, `datasette-search-all `_ +Examples: `datasette-auth-github `_, `datasette-search-all `_, `datasette-media `_ From ad88c9b3f3d7886612dae6afed65d43940632b06 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 14:52:03 -0700 Subject: [PATCH 0229/2113] Mechanism for adding a default URL fragment to a canned query Closes #767 --- datasette/templates/database.html | 6 ++--- docs/sql_queries.rst | 40 ++++++++++++++++++++++++++++--- tests/fixtures.py | 1 + tests/test_html.py | 15 ++++++++++++ 4 files changed, 56 insertions(+), 6 deletions(-) diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 7d98f0e5..e47b2418 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -48,7 +48,7 @@ {% endif %} {% if views %} -

Views

+

Views

    {% for view in views %}
  • {{ view }}
  • @@ -57,10 +57,10 @@ {% endif %} {% if queries %} -

    Queries

    +

    Queries

    {% endif %} diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index da10191e..c3efd930 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -72,7 +72,9 @@ Canned queries -------------- As an alternative to adding views to your database, you can define canned -queries inside your ``metadata.json`` file. Here's an example:: +queries inside your ``metadata.json`` file. Here's an example: + +.. code-block:: json { "databases": { @@ -86,7 +88,7 @@ queries inside your ``metadata.json`` file. Here's an example:: } } -Then run datasette like this:: +Then run Datasette like this:: datasette sf-trees.db -m metadata.json @@ -104,6 +106,11 @@ title and description on the canned query page. As with regular table metadata you can alternatively specify ``"description_html"`` to have your description rendered as HTML (rather than having HTML special characters escaped). +.. _canned_queries_named_parameters: + +Named parameters +~~~~~~~~~~~~~~~~ + Canned queries support named parameters, so if you include those in the SQL you will then be able to enter them using the form fields on the canned query page or by adding them to the URL. This means canned queries can be used to create @@ -117,7 +124,9 @@ Here's an example of a canned query with a named parameter: from facetable join facet_cities on facetable.city_id = facet_cities.id where neighborhood like '%' || :text || '%' order by neighborhood; -In the canned query JSON it looks like this:: +In the canned query JSON it looks like this: + +.. code-block:: json { "databases": { @@ -139,6 +148,31 @@ https://latest.datasette.io/fixtures/neighborhood_search?text=town Note that we are using SQLite string concatenation here - the ``||`` operator - to add wildcard ``%`` characters to the string provided by the user. +.. _canned_queries_default_fragment: + +Setting a default fragment +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Some plugins, such as `datasette-vega `__, can be configured by including additional data in the fragment hash of the URL - the bit that comes after a ``#`` symbol. + +You can set a default fragment hash that will be included in the link to the canned query from the database index page using the ``"fragment"`` key: + +.. code-block:: json + + { + "databases": { + "fixtures": { + "queries": { + "neighborhood_search": { + "sql": "select neighborhood, facet_cities.name, state\nfrom facetable join facet_cities on facetable.city_id = facet_cities.id\nwhere neighborhood like '%' || :text || '%' order by neighborhood;", + "fragment": "fragment-goes-here" + } + } + } + } + +`See here `__ for a demo of this in action. + .. _pagination: Pagination diff --git a/tests/fixtures.py b/tests/fixtures.py index 0284ff9c..a3b75f9f 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -327,6 +327,7 @@ METADATA = { ), "title": "Search neighborhoods", "description_html": "Demonstrating simple like search", + "fragment": "fragment-goes-here", }, }, } diff --git a/tests/test_html.py b/tests/test_html.py index 445f7b4c..5a07953e 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -85,6 +85,21 @@ def test_database_page_redirects_with_url_hash(app_client_with_hash): assert "fixtures" in response.text +def test_database_page(app_client): + response = app_client.get("/fixtures") + soup = Soup(response.body, "html.parser") + queries_ul = soup.find("h2", text="Queries").find_next_sibling("ul") + assert queries_ul is not None + assert [ + ( + "/fixtures/%F0%9D%90%9C%F0%9D%90%A2%F0%9D%90%AD%F0%9D%90%A2%F0%9D%90%9E%F0%9D%90%AC", + "𝐜𝐢𝐭𝐢𝐞𝐬", + ), + ("/fixtures/pragma_cache_size", "pragma_cache_size"), + ("/fixtures/neighborhood_search#fragment-goes-here", "Search neighborhoods"), + ] == [(a["href"], a.text) for a in queries_ul.find_all("a")] + + def test_invalid_custom_sql(app_client): response = app_client.get("/fixtures?sql=.schema") assert response.status == 400 From 6d7cb02f00010d3cb4b4bac0460d41277652b80e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 15:17:53 -0700 Subject: [PATCH 0230/2113] Documentation for request object, refs #706 --- docs/internals.rst | 55 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/docs/internals.rst b/docs/internals.rst index 7b4c1755..5535ceb1 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -225,3 +225,58 @@ Here's an example of ``block=True`` in action: num_rows_left = await database.execute_write_fn(my_action, block=True) except Exception as e: print("An error occurred:", e) + +.. _internals_request: + +Request object +~~~~~~~~~~~~~~ + +The request object is passed to various plugin hooks. It represents an incoming HTTP request. It has the following properties: + +``.scope`` - dictionary + The ASGI scope that was used to construct this request, described in the `ASGI HTTP connection scope `__ specification. + +``.method`` - string + The HTTP method for this request, usually ``GET`` or ``POST``. + +``.url`` - string + The full URL for this request, e.g. ``https://latest.datasette.io/fixtures``. + +``.scheme`` - string + The request scheme - usually ``https`` or ``http``. + +``.headers`` - dictionary (str -> str) + A dictionary of incoming HTTP request headers. + +``.host`` - string + The host header from the incoming request, e.g. ``latest.datasette.io`` or ``localhost``. + +``.path`` - string + The path of the request, e.g. ``/fixtures``. + +``.query_string`` - string + The querystring component of the request, without the ``?`` - e.g. ``name__contains=sam&age__gt=10``. + +``.args`` - RequestParameters + An object representing the parsed querystring parameters, see below. + +``.raw_args`` - dictionary + A dictionary mapping querystring keys to values. If multiple keys of the same kind are provided, e.g. ``?foo=1&foo=2``, only the first value will be present in this dictionary. + +The object also has one awaitable method: + +``await request.post_vars()`` - dictionary + Returns a dictionary of form variables that were submitted in the request body via ``POST``. + +The RequestParameters class +--------------------------- + +This class, returned by ``request.args``, is a subclass of a Python dictionary that provides methods for working with keys that map to lists of values. + +Conider the querystring ``?foo=1&foo=2``. This will produce a ``request.args`` that looks like this:: + + RequestParameters({"foo": ["1", "2"]}) + +Calling ``request.args.get("foo")`` will return the first value, ``"1"``. If that key is not present it will return ``None`` - or the second argument if you passed one, which will be used as the default. + +Calling ``request.args.getlist("foo")`` will return the full list, ``["1", "2"]``. \ No newline at end of file From 50652f474b94e83d49ee15f219820cdbfc450f11 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 15:29:42 -0700 Subject: [PATCH 0231/2113] Stop using .raw_args, deprecate and undocument it - refs #706 --- datasette/utils/asgi.py | 1 + datasette/views/database.py | 4 ++-- datasette/views/table.py | 8 ++++---- docs/internals.rst | 3 --- 4 files changed, 7 insertions(+), 9 deletions(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 20047bb5..62a2a0c8 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -65,6 +65,7 @@ class Request: @property def raw_args(self): + # Deprecated, undocumented - may be removed in Datasette 1.0 return {key: value[0] for key, value in self.args.items()} async def post_vars(self): diff --git a/datasette/views/database.py b/datasette/views/database.py index 92e24f84..cd27dd5f 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -24,7 +24,7 @@ class DatabaseView(DataView): if request.args.get("sql"): if not self.ds.config("allow_sql"): raise DatasetteError("sql= is not allowed", status=400) - sql = request.raw_args.pop("sql") + sql = request.args.get("sql") validate_sql_select(sql) return await QueryView(self.ds).data( request, database, hash, sql, _size=_size, metadata=metadata @@ -107,7 +107,7 @@ class QueryView(DataView): metadata=None, _size=None, ): - params = request.raw_args + params = {key: request.args.get(key) for key in request.args} if "sql" in params: params.pop("sql") if "_shape" in params: diff --git a/datasette/views/table.py b/datasette/views/table.py index 3289e58b..aab4bbe3 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -527,7 +527,7 @@ class TableView(RowTableShared): extra_args = {} # Handle ?_size=500 - page_size = _size or request.raw_args.get("_size") + page_size = _size or request.args.get("_size") if page_size: if page_size == "max": page_size = self.ds.max_returned_rows @@ -558,8 +558,8 @@ class TableView(RowTableShared): sql_no_limit=sql_no_limit.rstrip(), limit=page_size + 1, offset=offset ) - if request.raw_args.get("_timelimit"): - extra_args["custom_time_limit"] = int(request.raw_args["_timelimit"]) + if request.args.get("_timelimit"): + extra_args["custom_time_limit"] = int(request.args["_timelimit"]) results = await db.execute(sql, params, truncate=True, **extra_args) @@ -890,7 +890,7 @@ class RowView(RowTableShared): "units": self.ds.table_metadata(database, table).get("units", {}), } - if "foreign_key_tables" in (request.raw_args.get("_extras") or "").split(","): + if "foreign_key_tables" in (request.args.get("_extras") or "").split(","): data["foreign_key_tables"] = await self.foreign_key_tables( database, table, pk_values ) diff --git a/docs/internals.rst b/docs/internals.rst index 5535ceb1..5bcb9da9 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -260,9 +260,6 @@ The request object is passed to various plugin hooks. It represents an incoming ``.args`` - RequestParameters An object representing the parsed querystring parameters, see below. -``.raw_args`` - dictionary - A dictionary mapping querystring keys to values. If multiple keys of the same kind are provided, e.g. ``?foo=1&foo=2``, only the first value will be present in this dictionary. - The object also has one awaitable method: ``await request.post_vars()`` - dictionary From 4b96857f170e329a73186e703cc0d9ca4e8719cc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 15:35:25 -0700 Subject: [PATCH 0232/2113] Link to request object documentation, refs #706 --- docs/plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index 78ad0309..feb14593 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -669,7 +669,7 @@ Extra template variables that should be made available in the rendered template The name of the view being displayed. (`index`, `database`, `table`, and `row` are the most important ones.) ``request`` - object - The current HTTP request object. ``request.scope`` provides access to the ASGI scope. + The current HTTP :ref:`internals_request`. ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` From 446e5de65d1b9c6c877e38b0ef13bc9285c465a1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 17:57:25 -0700 Subject: [PATCH 0233/2113] Refactored test plugins into tests/plugins, closes #775 --- tests/fixtures.py | 182 +---------------------------------- tests/plugins/my_plugin.py | 89 +++++++++++++++++ tests/plugins/my_plugin_2.py | 94 ++++++++++++++++++ tests/plugins/view_name.py | 9 ++ tests/test_api.py | 1 + tests/test_custom_pages.py | 16 +-- 6 files changed, 197 insertions(+), 194 deletions(-) create mode 100644 tests/plugins/my_plugin.py create mode 100644 tests/plugins/my_plugin_2.py create mode 100644 tests/plugins/view_name.py diff --git a/tests/fixtures.py b/tests/fixtures.py index a3b75f9f..1eaa1dfe 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -19,6 +19,8 @@ from urllib.parse import unquote, quote # This temp file is used by one of the plugin config tests TEMP_PLUGIN_SECRET_FILE = os.path.join(tempfile.gettempdir(), "plugin-secret") +PLUGINS_DIR = str(pathlib.Path(__file__).parent / "plugins") + class TestResponse: def __init__(self, status, headers, body): @@ -109,7 +111,6 @@ def make_app_client( inspect_data=None, static_mounts=None, template_dir=None, - extra_plugins=None, ): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, filename) @@ -130,12 +131,6 @@ def make_app_client( sqlite3.connect(extra_filepath).executescript(extra_sql) files.append(extra_filepath) os.chdir(os.path.dirname(filepath)) - plugins_dir = os.path.join(tmpdir, "plugins") - os.mkdir(plugins_dir) - open(os.path.join(plugins_dir, "my_plugin.py"), "w").write(PLUGIN1) - open(os.path.join(plugins_dir, "my_plugin_2.py"), "w").write(PLUGIN2) - for filename, content in (extra_plugins or {}).items(): - open(os.path.join(plugins_dir, filename), "w").write(content) config = config or {} config.update( { @@ -150,7 +145,7 @@ def make_app_client( memory=memory, cors=cors, metadata=METADATA, - plugins_dir=plugins_dir, + plugins_dir=PLUGINS_DIR, config=config, inspect_data=inspect_data, static_mounts=static_mounts, @@ -334,177 +329,6 @@ METADATA = { }, } -PLUGIN1 = """ -from datasette import hookimpl -import base64 -import pint -import json - -ureg = pint.UnitRegistry() - - -@hookimpl -def prepare_connection(conn, database, datasette): - def convert_units(amount, from_, to_): - "select convert_units(100, 'm', 'ft');" - return (amount * ureg(from_)).to(to_).to_tuple()[0] - conn.create_function('convert_units', 3, convert_units) - def prepare_connection_args(): - return 'database={}, datasette.plugin_config("name-of-plugin")={}'.format( - database, datasette.plugin_config("name-of-plugin") - ) - conn.create_function('prepare_connection_args', 0, prepare_connection_args) - - -@hookimpl -def extra_css_urls(template, database, table, datasette): - return ['https://plugin-example.com/{}/extra-css-urls-demo.css'.format( - base64.b64encode(json.dumps({ - "template": template, - "database": database, - "table": table, - }).encode("utf8")).decode("utf8") - )] - - -@hookimpl -def extra_js_urls(): - return [{ - 'url': 'https://plugin-example.com/jquery.js', - 'sri': 'SRIHASH', - }, 'https://plugin-example.com/plugin1.js'] - - -@hookimpl -def extra_body_script(template, database, table, datasette): - return 'var extra_body_script = {};'.format( - json.dumps({ - "template": template, - "database": database, - "table": table, - "config": datasette.plugin_config( - "name-of-plugin", - database=database, - table=table, - ) - }) - ) - - -@hookimpl -def render_cell(value, column, table, database, datasette): - # Render some debug output in cell with value RENDER_CELL_DEMO - if value != "RENDER_CELL_DEMO": - return None - return json.dumps({ - "column": column, - "table": table, - "database": database, - "config": datasette.plugin_config( - "name-of-plugin", - database=database, - table=table, - ) - }) - - -@hookimpl -def extra_template_vars(template, database, table, view_name, request, datasette): - return { - "extra_template_vars": json.dumps({ - "template": template, - "scope_path": request.scope["path"] if request else None - }, default=lambda b: b.decode("utf8")) - } -""" - -PLUGIN2 = """ -from datasette import hookimpl -from functools import wraps -import jinja2 -import json - - -@hookimpl -def extra_js_urls(): - return [{ - 'url': 'https://plugin-example.com/jquery.js', - 'sri': 'SRIHASH', - }, 'https://plugin-example.com/plugin2.js'] - - -@hookimpl -def render_cell(value, database): - # Render {"href": "...", "label": "..."} as link - if not isinstance(value, str): - return None - stripped = value.strip() - if not stripped.startswith("{") and stripped.endswith("}"): - return None - try: - data = json.loads(value) - except ValueError: - return None - if not isinstance(data, dict): - return None - if set(data.keys()) != {"href", "label"}: - return None - href = data["href"] - if not ( - href.startswith("/") or href.startswith("http://") - or href.startswith("https://") - ): - return None - return jinja2.Markup( - '{label}'.format( - database=database, - href=jinja2.escape(data["href"]), - label=jinja2.escape(data["label"] or "") or " " - ) - ) - - -@hookimpl -def extra_template_vars(template, database, table, view_name, request, datasette): - async def query_database(sql): - first_db = list(datasette.databases.keys())[0] - return ( - await datasette.execute(first_db, sql) - ).rows[0][0] - async def inner(): - return { - "extra_template_vars_from_awaitable": json.dumps({ - "template": template, - "scope_path": request.scope["path"] if request else None, - "awaitable": True, - }, default=lambda b: b.decode("utf8")), - "query_database": query_database, - } - return inner - - -@hookimpl -def asgi_wrapper(datasette): - def wrap_with_databases_header(app): - @wraps(app) - async def add_x_databases_header(scope, recieve, send): - async def wrapped_send(event): - if event["type"] == "http.response.start": - original_headers = event.get("headers") or [] - event = { - "type": event["type"], - "status": event["status"], - "headers": original_headers + [ - [b"x-databases", - ", ".join(datasette.databases.keys()).encode("utf-8")] - ], - } - await send(event) - await app(scope, recieve, wrapped_send) - return add_x_databases_header - return wrap_with_databases_header -""" - TABLES = ( """ CREATE TABLE simple_primary_key ( diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py new file mode 100644 index 00000000..e55a0a32 --- /dev/null +++ b/tests/plugins/my_plugin.py @@ -0,0 +1,89 @@ +from datasette import hookimpl +import base64 +import pint +import json + +ureg = pint.UnitRegistry() + + +@hookimpl +def prepare_connection(conn, database, datasette): + def convert_units(amount, from_, to_): + "select convert_units(100, 'm', 'ft');" + return (amount * ureg(from_)).to(to_).to_tuple()[0] + + conn.create_function("convert_units", 3, convert_units) + + def prepare_connection_args(): + return 'database={}, datasette.plugin_config("name-of-plugin")={}'.format( + database, datasette.plugin_config("name-of-plugin") + ) + + conn.create_function("prepare_connection_args", 0, prepare_connection_args) + + +@hookimpl +def extra_css_urls(template, database, table, datasette): + return [ + "https://plugin-example.com/{}/extra-css-urls-demo.css".format( + base64.b64encode( + json.dumps( + {"template": template, "database": database, "table": table,} + ).encode("utf8") + ).decode("utf8") + ) + ] + + +@hookimpl +def extra_js_urls(): + return [ + {"url": "https://plugin-example.com/jquery.js", "sri": "SRIHASH",}, + "https://plugin-example.com/plugin1.js", + ] + + +@hookimpl +def extra_body_script(template, database, table, datasette): + return "var extra_body_script = {};".format( + json.dumps( + { + "template": template, + "database": database, + "table": table, + "config": datasette.plugin_config( + "name-of-plugin", database=database, table=table, + ), + } + ) + ) + + +@hookimpl +def render_cell(value, column, table, database, datasette): + # Render some debug output in cell with value RENDER_CELL_DEMO + if value != "RENDER_CELL_DEMO": + return None + return json.dumps( + { + "column": column, + "table": table, + "database": database, + "config": datasette.plugin_config( + "name-of-plugin", database=database, table=table, + ), + } + ) + + +@hookimpl +def extra_template_vars(template, database, table, view_name, request, datasette): + return { + "extra_template_vars": json.dumps( + { + "template": template, + "scope_path": request.scope["path"] if request else None, + }, + default=lambda b: b.decode("utf8"), + ) + } diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py new file mode 100644 index 00000000..fdc6956d --- /dev/null +++ b/tests/plugins/my_plugin_2.py @@ -0,0 +1,94 @@ +from datasette import hookimpl +from functools import wraps +import jinja2 +import json + + +@hookimpl +def extra_js_urls(): + return [ + {"url": "https://plugin-example.com/jquery.js", "sri": "SRIHASH",}, + "https://plugin-example.com/plugin2.js", + ] + + +@hookimpl +def render_cell(value, database): + # Render {"href": "...", "label": "..."} as link + if not isinstance(value, str): + return None + stripped = value.strip() + if not stripped.startswith("{") and stripped.endswith("}"): + return None + try: + data = json.loads(value) + except ValueError: + return None + if not isinstance(data, dict): + return None + if set(data.keys()) != {"href", "label"}: + return None + href = data["href"] + if not ( + href.startswith("/") + or href.startswith("http://") + or href.startswith("https://") + ): + return None + return jinja2.Markup( + '{label}'.format( + database=database, + href=jinja2.escape(data["href"]), + label=jinja2.escape(data["label"] or "") or " ", + ) + ) + + +@hookimpl +def extra_template_vars(template, database, table, view_name, request, datasette): + async def query_database(sql): + first_db = list(datasette.databases.keys())[0] + return (await datasette.execute(first_db, sql)).rows[0][0] + + async def inner(): + return { + "extra_template_vars_from_awaitable": json.dumps( + { + "template": template, + "scope_path": request.scope["path"] if request else None, + "awaitable": True, + }, + default=lambda b: b.decode("utf8"), + ), + "query_database": query_database, + } + + return inner + + +@hookimpl +def asgi_wrapper(datasette): + def wrap_with_databases_header(app): + @wraps(app) + async def add_x_databases_header(scope, recieve, send): + async def wrapped_send(event): + if event["type"] == "http.response.start": + original_headers = event.get("headers") or [] + event = { + "type": event["type"], + "status": event["status"], + "headers": original_headers + + [ + [ + b"x-databases", + ", ".join(datasette.databases.keys()).encode("utf-8"), + ] + ], + } + await send(event) + + await app(scope, recieve, wrapped_send) + + return add_x_databases_header + + return wrap_with_databases_header diff --git a/tests/plugins/view_name.py b/tests/plugins/view_name.py new file mode 100644 index 00000000..4d29ab67 --- /dev/null +++ b/tests/plugins/view_name.py @@ -0,0 +1,9 @@ +from datasette import hookimpl + + +@hookimpl +def extra_template_vars(view_name, request): + return { + "view_name": view_name, + "request": request, + } diff --git a/tests/test_api.py b/tests/test_api.py index 7edd7ee6..260d399b 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1267,6 +1267,7 @@ def test_plugins_json(app_client): "templates": False, "version": None, }, + {"name": "view_name.py", "static": False, "templates": False, "version": None}, ] == sorted(response.json, key=lambda p: p["name"]) diff --git a/tests/test_custom_pages.py b/tests/test_custom_pages.py index 8ac75ec8..c69facb5 100644 --- a/tests/test_custom_pages.py +++ b/tests/test_custom_pages.py @@ -1,22 +1,10 @@ import pytest from .fixtures import make_app_client -VIEW_NAME_PLUGIN = """ -from datasette import hookimpl - -@hookimpl -def extra_template_vars(view_name, request): - return { - "view_name": view_name, - "request": request, - } -""" - @pytest.fixture(scope="session") def custom_pages_client(tmp_path_factory): template_dir = tmp_path_factory.mktemp("page-templates") - extra_plugins = {"view_name.py": VIEW_NAME_PLUGIN} pages_dir = template_dir / "pages" pages_dir.mkdir() (pages_dir / "about.html").write_text("ABOUT! view_name:{{ view_name }}", "utf-8") @@ -39,9 +27,7 @@ def custom_pages_client(tmp_path_factory): nested_dir = pages_dir / "nested" nested_dir.mkdir() (nested_dir / "nest.html").write_text("Nest!", "utf-8") - for client in make_app_client( - template_dir=str(template_dir), extra_plugins=extra_plugins - ): + for client in make_app_client(template_dir=str(template_dir)): yield client From 52c4387c7d37c867104e3728cc1f4c4d1e100642 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 19:21:41 -0700 Subject: [PATCH 0234/2113] Redesigned register_output_renderer plugin hook, closes #581 --- datasette/app.py | 6 +- datasette/views/base.py | 18 +++++- datasette/views/database.py | 1 + docs/plugins.rst | 53 ++++++++++++--- tests/plugins/register_output_renderer.py | 42 ++++++++++++ tests/test_api.py | 20 +++--- tests/test_html.py | 4 ++ tests/test_plugins.py | 78 +++++++++++++++++++++++ 8 files changed, 202 insertions(+), 20 deletions(-) create mode 100644 tests/plugins/register_output_renderer.py diff --git a/datasette/app.py b/datasette/app.py index f1fcc5eb..941b2895 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -586,7 +586,11 @@ class Datasette: hook_renderers.append(hook) for renderer in hook_renderers: - self.renderers[renderer["extension"]] = renderer["callback"] + self.renderers[renderer["extension"]] = ( + # It used to be called "callback" - remove this in Datasette 1.0 + renderer.get("render") + or renderer["callback"] + ) async def render_template( self, templates, context=None, request=None, view_name=None diff --git a/datasette/views/base.py b/datasette/views/base.py index f5eafe63..5a5fe056 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -14,6 +14,7 @@ from datasette.database import QueryInterrupted from datasette.utils import ( InvalidSql, LimitedWriter, + call_with_supported_arguments, is_url, path_with_added_args, path_with_removed_args, @@ -387,7 +388,21 @@ class DataView(BaseView): if _format in self.ds.renderers.keys(): # Dispatch request to the correct output format renderer # (CSV is not handled here due to streaming) - result = self.ds.renderers[_format](request.args, data, self.name) + result = call_with_supported_arguments( + self.ds.renderers[_format], + datasette=self.ds, + columns=data.get("columns") or [], + rows=data.get("rows") or [], + sql=data.get("query", {}).get("sql", None), + query_name=data.get("query_name"), + database=database, + table=data.get("table"), + request=request, + view_name=self.name, + # These will be deprecated in Datasette 1.0: + args=request.args, + data=data, + ) if result is None: raise NotFound("No data") @@ -395,6 +410,7 @@ class DataView(BaseView): body=result.get("body"), status=result.get("status_code", 200), content_type=result.get("content_type", "text/plain"), + headers=result.get("headers"), ) else: extras = {} diff --git a/datasette/views/database.py b/datasette/views/database.py index cd27dd5f..15545fb8 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -188,6 +188,7 @@ class QueryView(DataView): return ( { "database": database, + "query_name": canned_query, "rows": results.rows, "truncated": results.truncated, "columns": columns, diff --git a/docs/plugins.rst b/docs/plugins.rst index feb14593..27f00476 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -744,19 +744,37 @@ Allows the plugin to register a new output renderer, to output data in a custom def register_output_renderer(datasette): return { "extension": "test", - "callback": render_test + "render": render_test } -This will register ``render_test`` to be called when paths with the extension ``.test`` (for example ``/database.test``, ``/database/table.test``, or ``/database/table/row.test``) are requested. When a request is received, the callback function is called with three positional arguments: +This will register ``render_test`` to be called when paths with the extension ``.test`` (for example ``/database.test``, ``/database/table.test``, or ``/database/table/row.test``) are requested. When a request is received, the callback function is called with zero or more of the following arguments. Datasette will inspect your callback function and pass arguments that match its function signature. -``args`` - dictionary - The GET parameters of the request +``datasette`` - :ref:`internals_datasette` + For accessing plugin configuration and executing queries. -``data`` - dictionary - The data to be rendered +``columns`` - list of strings + The names of the columns returned by this query. + +``rows`` - list of ``sqlite3.Row`` objects + The rows returned by the query. + +``sql`` - string + The SQL query that was executed. + +``query_name`` - string or None + If this was the execution of a :ref:`canned query `, the name of that query. + +``database`` - string + The name of the database. + +``table`` - string or None + The table or view, if one is being rendered. + +``request`` - :ref:`internals_request` + The incoming HTTP request. ``view_name`` - string - The name of the view where the renderer is being called. (``index``, ``database``, ``table``, and ``row`` are the most important ones.) + The name of the current view being called. ``index``, ``database``, ``table``, and ``row`` are the most important ones. The callback function can return ``None``, if it is unable to render the data, or a dictionary with the following keys: @@ -769,15 +787,34 @@ The callback function can return ``None``, if it is unable to render the data, o ``status_code`` - integer, optional The HTTP status code, default 200 +``headers`` - dictionary, optional + Extra HTTP headers to be returned in the response. + A simple example of an output renderer callback function: .. code-block:: python - def render_test(args, data, view_name): + def render_test(): return { "body": "Hello World" } +Here is a more complex example: + +.. code-block:: python + + def render_test(columns, rows): + first_row = " | ".join(columns) + lines = [first_row] + lines.append("=" * len(first_row)) + for row in rows: + lines.append(" | ".join(row)) + return { + "body": "Hello World", + "content_type": "text/plain; charset=utf-8", + "headers": {"x-pipes": "yay-pipes"} + } + Examples: `datasette-atom `_, `datasette-ics `_ .. _plugin_register_facet_classes: diff --git a/tests/plugins/register_output_renderer.py b/tests/plugins/register_output_renderer.py new file mode 100644 index 00000000..2ea5660e --- /dev/null +++ b/tests/plugins/register_output_renderer.py @@ -0,0 +1,42 @@ +from datasette import hookimpl +import json + + +def render_test_all_parameters( + datasette, columns, rows, sql, query_name, database, table, request, view_name, data +): + headers = {} + for custom_header in request.args.getlist("header") or []: + key, value = custom_header.split(":") + headers[key] = value + return { + "body": json.dumps( + { + "datasette": datasette, + "columns": columns, + "rows": rows, + "sql": sql, + "query_name": query_name, + "database": database, + "table": table, + "request": request, + "view_name": view_name, + }, + default=repr, + ), + "content_type": request.args.get("content_type", "text/plain"), + "status_code": int(request.args.get("status_code", 200)), + "headers": headers, + } + + +def render_test_no_parameters(): + return {"body": "Hello"} + + +@hookimpl +def register_output_renderer(datasette): + return [ + {"extension": "testall", "render": render_test_all_parameters}, + {"extension": "testnone", "callback": render_test_no_parameters}, + ] diff --git a/tests/test_api.py b/tests/test_api.py index 260d399b..f92da45e 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1259,16 +1259,16 @@ def test_threads_json(app_client): def test_plugins_json(app_client): response = app_client.get("/-/plugins.json") - assert [ - {"name": "my_plugin.py", "static": False, "templates": False, "version": None}, - { - "name": "my_plugin_2.py", - "static": False, - "templates": False, - "version": None, - }, - {"name": "view_name.py", "static": False, "templates": False, "version": None}, - ] == sorted(response.json, key=lambda p: p["name"]) + expected = [ + {"name": name, "static": False, "templates": False, "version": None} + for name in ( + "my_plugin.py", + "my_plugin_2.py", + "register_output_renderer.py", + "view_name.py", + ) + ] + assert expected == sorted(response.json, key=lambda p: p["name"]) def test_versions_json(app_client): diff --git a/tests/test_html.py b/tests/test_html.py index 5a07953e..e602bf0e 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -546,6 +546,8 @@ def test_table_csv_json_export_interface(app_client): actual = [l["href"].split("/")[-1] for l in links] expected = [ "simple_primary_key.json?id__gt=2", + "simple_primary_key.testall?id__gt=2", + "simple_primary_key.testnone?id__gt=2", "simple_primary_key.csv?id__gt=2&_size=max", "#export", ] @@ -582,6 +584,8 @@ def test_csv_json_export_links_include_labels_if_foreign_keys(app_client): actual = [l["href"].split("/")[-1] for l in links] expected = [ "facetable.json?_labels=on", + "facetable.testall?_labels=on", + "facetable.testnone?_labels=on", "facetable.csv?_labels=on&_size=max", "#export", ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 1546de92..0e4186d5 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -19,6 +19,8 @@ import textwrap import pytest import urllib +at_memory_re = re.compile(r" at 0x\w+") + @pytest.mark.xfail @pytest.mark.parametrize( @@ -329,3 +331,79 @@ def test_view_names(view_names_client, path, view_name): response = view_names_client.get(path) assert response.status == 200 assert "view_name:{}".format(view_name) == response.body.decode("utf8") + + +def test_register_output_renderer_no_parameters(app_client): + response = app_client.get("/fixtures/facetable.testnone") + assert 200 == response.status + assert b"Hello" == response.body + + +def test_register_output_renderer_all_parameters(app_client): + response = app_client.get("/fixtures/facetable.testall") + assert 200 == response.status + # Lots of 'at 0x103a4a690' in here - replace those so we can do + # an easy comparison + body = response.body.decode("utf-8") + body = at_memory_re.sub(" at 0xXXX", body) + assert { + "datasette": "", + "columns": [ + "pk", + "created", + "planet_int", + "on_earth", + "state", + "city_id", + "neighborhood", + "tags", + "complex_array", + "distinct_some_null", + ], + "rows": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + ], + "sql": "select pk, created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array, distinct_some_null from facetable order by pk limit 51", + "query_name": None, + "database": "fixtures", + "table": "facetable", + "request": "", + "view_name": "table", + } == json.loads(body) + # Test that query_name is set correctly + query_response = app_client.get("/fixtures/pragma_cache_size.testall") + assert "pragma_cache_size" == json.loads(query_response.body)["query_name"] + + +def test_register_output_renderer_custom_status_code(app_client): + response = app_client.get("/fixtures/pragma_cache_size.testall?status_code=202") + assert 202 == response.status + + +def test_register_output_renderer_custom_content_type(app_client): + response = app_client.get( + "/fixtures/pragma_cache_size.testall?content_type=text/blah" + ) + assert "text/blah" == response.headers["content-type"] + + +def test_register_output_renderer_custom_headers(app_client): + response = app_client.get( + "/fixtures/pragma_cache_size.testall?header=x-wow:1&header=x-gosh:2" + ) + assert "1" == response.headers["x-wow"] + assert "2" == response.headers["x-gosh"] From 57f48b8416f5e13df138d63db5bfffd0bb99a9b4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 19:43:30 -0700 Subject: [PATCH 0235/2113] Made register_output_renderer callback optionally awaitable, closes #776 --- datasette/views/base.py | 2 ++ docs/plugins.rst | 16 +++++++++++----- tests/plugins/register_output_renderer.py | 4 +++- tests/test_plugins.py | 1 + 4 files changed, 17 insertions(+), 6 deletions(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index 5a5fe056..d56fd2f6 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -403,6 +403,8 @@ class DataView(BaseView): args=request.args, data=data, ) + if asyncio.iscoroutine(result): + result = await result if result is None: raise NotFound("No data") diff --git a/docs/plugins.rst b/docs/plugins.rst index 27f00476..ebf6adf6 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -736,7 +736,7 @@ register_output_renderer(datasette) ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` -Allows the plugin to register a new output renderer, to output data in a custom format. The hook function should return a dictionary, or a list of dictionaries, which contain the file extension you want to handle and a callback function: +Registers a new output renderer, to output data in a custom format. The hook function should return a dictionary, or a list of dictionaries, of the following shape: .. code-block:: python @@ -747,7 +747,11 @@ Allows the plugin to register a new output renderer, to output data in a custom "render": render_test } -This will register ``render_test`` to be called when paths with the extension ``.test`` (for example ``/database.test``, ``/database/table.test``, or ``/database/table/row.test``) are requested. When a request is received, the callback function is called with zero or more of the following arguments. Datasette will inspect your callback function and pass arguments that match its function signature. +This will register ``render_test`` to be called when paths with the extension ``.test`` (for example ``/database.test``, ``/database/table.test``, or ``/database/table/row.test``) are requested. + +``render_test`` is a Python function. It can be a regular function or an ``async def render_test()`` awaitable function, depending on if it needs to make any asynchronous calls. + +When a request is received, the callback function is called with zero or more of the following arguments. Datasette will inspect your callback function and pass arguments that match its function signature. ``datasette`` - :ref:`internals_datasette` For accessing plugin configuration and executing queries. @@ -803,16 +807,18 @@ Here is a more complex example: .. code-block:: python - def render_test(columns, rows): + async def render_test(datasette, columns, rows): + db = next(iter(datasette.databases.values())) + result = await db.execute("select sqlite_version()") first_row = " | ".join(columns) lines = [first_row] lines.append("=" * len(first_row)) for row in rows: lines.append(" | ".join(row)) return { - "body": "Hello World", + "body": "\n".join(lines), "content_type": "text/plain; charset=utf-8", - "headers": {"x-pipes": "yay-pipes"} + "headers": {"x-sqlite-version": result.first()[0]}, } Examples: `datasette-atom `_, `datasette-ics `_ diff --git a/tests/plugins/register_output_renderer.py b/tests/plugins/register_output_renderer.py index 2ea5660e..d4c1228d 100644 --- a/tests/plugins/register_output_renderer.py +++ b/tests/plugins/register_output_renderer.py @@ -2,13 +2,14 @@ from datasette import hookimpl import json -def render_test_all_parameters( +async def render_test_all_parameters( datasette, columns, rows, sql, query_name, database, table, request, view_name, data ): headers = {} for custom_header in request.args.getlist("header") or []: key, value = custom_header.split(":") headers[key] = value + result = await datasette.databases["fixtures"].execute("select 1 + 1") return { "body": json.dumps( { @@ -21,6 +22,7 @@ def render_test_all_parameters( "table": table, "request": request, "view_name": view_name, + "1+1": result.first()[0], }, default=repr, ), diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 0e4186d5..94b69c1f 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -347,6 +347,7 @@ def test_register_output_renderer_all_parameters(app_client): body = response.body.decode("utf-8") body = at_memory_re.sub(" at 0xXXX", body) assert { + "1+1": 2, "datasette": "", "columns": [ "pk", From cbeea23d00b36f72386e68b67d76fdb8a151a486 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 20:13:32 -0700 Subject: [PATCH 0236/2113] Test for prepare_jinja2_environment, refs #773 --- tests/plugins/my_plugin.py | 5 +++++ tests/test_plugins.py | 10 ++++++++++ 2 files changed, 15 insertions(+) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index e55a0a32..434a1977 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -87,3 +87,8 @@ def extra_template_vars(template, database, table, view_name, request, datasette default=lambda b: b.decode("utf8"), ) } + + +@hookimpl +def prepare_jinja2_environment(env): + env.filters["format_numeric"] = lambda s: "{:,.0f}".format(float(s)) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 94b69c1f..1bfd9d3f 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -9,6 +9,7 @@ from .fixtures import ( from datasette.app import Datasette from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm from datasette.utils import sqlite3 +from jinja2.environment import Template import base64 import json import os @@ -408,3 +409,12 @@ def test_register_output_renderer_custom_headers(app_client): ) assert "1" == response.headers["x-wow"] assert "2" == response.headers["x-gosh"] + + +@pytest.mark.asyncio +async def test_prepare_jinja2_environment(app_client): + template = app_client.ds.jinja_env.from_string( + "Hello there, {{ a|format_numeric }}", {"a": 3412341} + ) + rendered = await app_client.ds.render_template(template) + assert "Hello there, 3,412,341" == rendered From defead17a4c9d68670ba2d9aeec9c2a70b5b059e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 20:30:32 -0700 Subject: [PATCH 0237/2113] Test for publish_subcommand hook, refs #773 --- tests/test_plugins.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 1bfd9d3f..9ebf455a 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -7,6 +7,7 @@ from .fixtures import ( TestClient as _TestClient, ) # noqa from datasette.app import Datasette +from datasette import cli from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm from datasette.utils import sqlite3 from jinja2.environment import Template @@ -418,3 +419,12 @@ async def test_prepare_jinja2_environment(app_client): ) rendered = await app_client.ds.render_template(template) assert "Hello there, 3,412,341" == rendered + + +def test_publish_subcommand(): + # This is hard to test properly, because publish subcommand plugins + # cannot be loaded using the --plugins-dir mechanism - they need + # to be installed using "pip install". So I'm cheating and taking + # advantage of the fact that cloudrun/heroku use the plugin hook + # to register themselves as default plugins. + assert ["cloudrun", "heroku"] == cli.publish.list_commands({}) From 6d95cb4f9146a5c4584a147bdf243c778a0f23f5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 21:09:16 -0700 Subject: [PATCH 0238/2113] Unit test for register_facet_classes plugin, closes #773 I was a bit lazy with this one. I didn't hook up a test for the facet_results mechanism. The custom facet hook isn't a great design so I will probably rethink it at some point in the future anyway. --- tests/plugins/my_plugin.py | 34 +++++++++++++++++++++++++++++++ tests/test_plugins.py | 41 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 75 insertions(+) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 434a1977..10d7e7e6 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -1,4 +1,6 @@ from datasette import hookimpl +from datasette.facets import Facet +from datasette.utils import path_with_added_args import base64 import pint import json @@ -92,3 +94,35 @@ def extra_template_vars(template, database, table, view_name, request, datasette @hookimpl def prepare_jinja2_environment(env): env.filters["format_numeric"] = lambda s: "{:,.0f}".format(float(s)) + + +@hookimpl +def register_facet_classes(): + return [DummyFacet] + + +class DummyFacet(Facet): + type = "dummy" + + async def suggest(self): + columns = await self.get_columns(self.sql, self.params) + return ( + [ + { + "name": column, + "toggle_url": self.ds.absolute_url( + self.request, + path_with_added_args(self.request, {"_facet_dummy": column}), + ), + "type": "dummy", + } + for column in columns + ] + if self.request.args.get("_dummy_facet") + else [] + ) + + async def facet_results(self): + facet_results = {} + facets_timed_out = [] + return facet_results, facets_timed_out diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 9ebf455a..2aadb252 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -428,3 +428,44 @@ def test_publish_subcommand(): # advantage of the fact that cloudrun/heroku use the plugin hook # to register themselves as default plugins. assert ["cloudrun", "heroku"] == cli.publish.list_commands({}) + + +def test_register_facet_classes(app_client): + response = app_client.get( + "/fixtures/compound_three_primary_keys.json?_dummy_facet=1" + ) + data = json.loads(response.body) + assert [ + { + "name": "pk1", + "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet_dummy=pk1", + "type": "dummy", + }, + { + "name": "pk2", + "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet_dummy=pk2", + "type": "dummy", + }, + { + "name": "pk3", + "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet_dummy=pk3", + "type": "dummy", + }, + { + "name": "content", + "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet_dummy=content", + "type": "dummy", + }, + { + "name": "pk1", + "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet=pk1", + }, + { + "name": "pk2", + "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet=pk2", + }, + { + "name": "pk3", + "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet=pk3", + }, + ] == data["suggested_facets"] From 510c1989d43cd9b7c9f116ad161b7380220ac5d5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 21:11:53 -0700 Subject: [PATCH 0239/2113] Removed xfail, refs #773 --- tests/test_plugins.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 2aadb252..e9556b31 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -24,7 +24,6 @@ import urllib at_memory_re = re.compile(r" at 0x\w+") -@pytest.mark.xfail @pytest.mark.parametrize( "plugin_hook", [name for name in dir(pm.hook) if not name.startswith("_")] ) From 75cd432e5a96c5fe2577f839c3a059fd6bf41124 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 22:00:04 -0700 Subject: [PATCH 0240/2113] Ability to set custom table/view page size in metadata, closes #751 --- datasette/views/table.py | 2 +- docs/metadata.rst | 23 +++++++++++++++++++++++ tests/fixtures.py | 1 + tests/test_api.py | 4 ++-- 4 files changed, 27 insertions(+), 3 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index aab4bbe3..d014db71 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -527,7 +527,7 @@ class TableView(RowTableShared): extra_args = {} # Handle ?_size=500 - page_size = _size or request.args.get("_size") + page_size = _size or request.args.get("_size") or table_metadata.get("size") if page_size: if page_size == "max": page_size = self.ds.max_returned_rows diff --git a/docs/metadata.rst b/docs/metadata.rst index 3cb1f739..88ad5854 100644 --- a/docs/metadata.rst +++ b/docs/metadata.rst @@ -156,6 +156,29 @@ Or use ``"sort_desc"`` to sort in descending order: } } +.. _metadata_page_size: + +Setting a custom page size +-------------------------- + +Datasette defaults to displaing 100 rows per page, for both tables and views. You can change this default page size on a per-table or per-view basis using the ``"size"`` key in ``metadata.json``: + +.. code-block:: json + + { + "databases": { + "mydatabase": { + "tables": { + "example_table": { + "size": 10 + } + } + } + } + } + +This size can still be over-ridden by passing e.g. ``?_size=50`` in the querystring. + .. _metadata_sortable_columns: Setting which columns can be used for sorting diff --git a/tests/fixtures.py b/tests/fixtures.py index 1eaa1dfe..9479abf6 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -305,6 +305,7 @@ METADATA = { }, "attraction_characteristic": {"sort_desc": "pk"}, "facet_cities": {"sort": "name"}, + "paginated_view": {"size": 25}, }, "queries": { "𝐜𝐢𝐭𝐢𝐞𝐬": "select id, name from facet_cities order by id limit 1;", diff --git a/tests/test_api.py b/tests/test_api.py index f92da45e..eb80f8e7 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -754,9 +754,9 @@ def test_table_with_reserved_word_name(app_client): "path,expected_rows,expected_pages", [ ("/fixtures/no_primary_key.json", 201, 5), - ("/fixtures/paginated_view.json", 201, 5), + ("/fixtures/paginated_view.json", 201, 9), ("/fixtures/no_primary_key.json?_size=25", 201, 9), - ("/fixtures/paginated_view.json?_size=25", 201, 9), + ("/fixtures/paginated_view.json?_size=50", 201, 5), ("/fixtures/paginated_view.json?_size=max", 201, 3), ("/fixtures/123_starts_with_digits.json", 0, 1), # Ensure faceting doesn't break pagination: From 5ab411c733233435d613d04c610a5a41fd0b7735 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 27 May 2020 22:57:05 -0700 Subject: [PATCH 0241/2113] can_render mechanism for register_output_renderer, closes #770 --- datasette/app.py | 8 ++--- datasette/utils/__init__.py | 6 +++- datasette/views/base.py | 27 ++++++++++++++--- docs/plugins.rst | 22 ++++++++++---- tests/plugins/register_output_renderer.py | 26 +++++++++++++++- tests/test_plugins.py | 37 ++++++++++++++++++++++- 6 files changed, 108 insertions(+), 18 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 941b2895..40d39ac9 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -228,7 +228,7 @@ class Datasette: if config_dir and (config_dir / "config.json").exists() and not config: config = json.load((config_dir / "config.json").open()) self._config = dict(DEFAULT_CONFIG, **(config or {})) - self.renderers = {} # File extension -> renderer function + self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note self.executor = futures.ThreadPoolExecutor( max_workers=self.config("num_sql_threads") @@ -574,7 +574,7 @@ class Datasette: def register_renderers(self): """ Register output renderers which output data in custom formats. """ # Built-in renderers - self.renderers["json"] = json_renderer + self.renderers["json"] = (json_renderer, lambda: True) # Hooks hook_renderers = [] @@ -588,8 +588,8 @@ class Datasette: for renderer in hook_renderers: self.renderers[renderer["extension"]] = ( # It used to be called "callback" - remove this in Datasette 1.0 - renderer.get("render") - or renderer["callback"] + renderer.get("render") or renderer["callback"], + renderer.get("can_render") or (lambda: True), ) async def render_template( diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 03157072..2dab8e14 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -811,6 +811,10 @@ def call_with_supported_arguments(fn, **kwargs): call_with = [] for parameter in parameters: if parameter not in kwargs: - raise TypeError("{} requires parameters {}".format(fn, tuple(parameters))) + raise TypeError( + "{} requires parameters {}, missing: {}".format( + fn, tuple(parameters), set(parameters) - set(kwargs.keys()) + ) + ) call_with.append(kwargs[parameter]) return fn(*call_with) diff --git a/datasette/views/base.py b/datasette/views/base.py index d56fd2f6..06b78d5f 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -389,7 +389,7 @@ class DataView(BaseView): # Dispatch request to the correct output format renderer # (CSV is not handled here due to streaming) result = call_with_supported_arguments( - self.ds.renderers[_format], + self.ds.renderers[_format][0], datasette=self.ds, columns=data.get("columns") or [], rows=data.get("rows") or [], @@ -426,10 +426,27 @@ class DataView(BaseView): if data.get("expandable_columns"): url_labels_extra = {"_labels": "on"} - renderers = { - key: path_with_format(request, key, {**url_labels_extra}) - for key in self.ds.renderers.keys() - } + renderers = {} + for key, (_, can_render) in self.ds.renderers.items(): + it_can_render = call_with_supported_arguments( + can_render, + datasette=self.ds, + columns=data.get("columns") or [], + rows=data.get("rows") or [], + sql=data.get("query", {}).get("sql", None), + query_name=data.get("query_name"), + database=database, + table=data.get("table"), + request=request, + view_name=self.name, + ) + if asyncio.iscoroutine(it_can_render): + it_can_render = await it_can_render + if it_can_render: + renderers[key] = path_with_format( + request, key, {**url_labels_extra} + ) + url_csv_args = {"_size": "max", **url_labels_extra} url_csv = path_with_format(request, "csv", url_csv_args) url_csv_path = url_csv.split("?")[0] diff --git a/docs/plugins.rst b/docs/plugins.rst index ebf6adf6..b27daf3f 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -744,14 +744,17 @@ Registers a new output renderer, to output data in a custom format. The hook fun def register_output_renderer(datasette): return { "extension": "test", - "render": render_test + "render": render_demo, + "can_render": can_render_demo, # Optional } -This will register ``render_test`` to be called when paths with the extension ``.test`` (for example ``/database.test``, ``/database/table.test``, or ``/database/table/row.test``) are requested. +This will register ``render_demo`` to be called when paths with the extension ``.test`` (for example ``/database.test``, ``/database/table.test``, or ``/database/table/row.test``) are requested. -``render_test`` is a Python function. It can be a regular function or an ``async def render_test()`` awaitable function, depending on if it needs to make any asynchronous calls. +``render_demo`` is a Python function. It can be a regular function or an ``async def render_demo()`` awaitable function, depending on if it needs to make any asynchronous calls. -When a request is received, the callback function is called with zero or more of the following arguments. Datasette will inspect your callback function and pass arguments that match its function signature. +``can_render_demo`` is a Python function (or ``async def`` function) which acepts the same arguments as ``render_demo`` but just returns ``True`` or ``False``. It lets Datasette know if the current SQL query can be represented by the plugin - and hence influnce if a link to this output format is displayed in the user interface. If you omit the ``"can_render"`` key from the dictionary every query will be treated as being supported by the plugin. + +When a request is received, the ``"render"`` callback function is called with zero or more of the following arguments. Datasette will inspect your callback function and pass arguments that match its function signature. ``datasette`` - :ref:`internals_datasette` For accessing plugin configuration and executing queries. @@ -798,7 +801,7 @@ A simple example of an output renderer callback function: .. code-block:: python - def render_test(): + def render_demo(): return { "body": "Hello World" } @@ -807,7 +810,7 @@ Here is a more complex example: .. code-block:: python - async def render_test(datasette, columns, rows): + async def render_demo(datasette, columns, rows): db = next(iter(datasette.databases.values())) result = await db.execute("select sqlite_version()") first_row = " | ".join(columns) @@ -821,6 +824,13 @@ Here is a more complex example: "headers": {"x-sqlite-version": result.first()[0]}, } +And here is an example ``can_render`` function which returns ``True`` only if the query results contain the columns ``atom_id``, ``atom_title`` and ``atom_updated``: + +.. code-block:: python + + def can_render_demo(columns): + return {"atom_id", "atom_title", "atom_updated"}.issubset(columns) + Examples: `datasette-atom `_, `datasette-ics `_ .. _plugin_register_facet_classes: diff --git a/tests/plugins/register_output_renderer.py b/tests/plugins/register_output_renderer.py index d4c1228d..a9f0f157 100644 --- a/tests/plugins/register_output_renderer.py +++ b/tests/plugins/register_output_renderer.py @@ -2,6 +2,26 @@ from datasette import hookimpl import json +async def can_render( + datasette, columns, rows, sql, query_name, database, table, request, view_name +): + # We stash this on datasette so the calling unit test can see it + datasette._can_render_saw = { + "datasette": datasette, + "columns": columns, + "rows": rows, + "sql": sql, + "query_name": query_name, + "database": database, + "table": table, + "request": request, + "view_name": view_name, + } + if request.args.get("_no_can_render"): + return False + return True + + async def render_test_all_parameters( datasette, columns, rows, sql, query_name, database, table, request, view_name, data ): @@ -39,6 +59,10 @@ def render_test_no_parameters(): @hookimpl def register_output_renderer(datasette): return [ - {"extension": "testall", "render": render_test_all_parameters}, + { + "extension": "testall", + "render": render_test_all_parameters, + "can_render": can_render, + }, {"extension": "testnone", "callback": render_test_no_parameters}, ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index e9556b31..a34328a9 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -9,7 +9,7 @@ from .fixtures import ( from datasette.app import Datasette from datasette import cli from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm -from datasette.utils import sqlite3 +from datasette.utils import sqlite3, CustomRow from jinja2.environment import Template import base64 import json @@ -411,6 +411,41 @@ def test_register_output_renderer_custom_headers(app_client): assert "2" == response.headers["x-gosh"] +def test_register_output_renderer_can_render(app_client): + response = app_client.get("/fixtures/facetable?_no_can_render=1") + assert response.status == 200 + links = ( + Soup(response.body, "html.parser") + .find("p", {"class": "export-links"}) + .findAll("a") + ) + actual = [l["href"].split("/")[-1] for l in links] + # Should not be present because we sent ?_no_can_render=1 + assert "facetable.testall?_labels=on" not in actual + # Check that it was passed the values we expected + assert hasattr(app_client.ds, "_can_render_saw") + assert { + "datasette": app_client.ds, + "columns": [ + "pk", + "created", + "planet_int", + "on_earth", + "state", + "city_id", + "neighborhood", + "tags", + "complex_array", + "distinct_some_null", + ], + "sql": "select pk, created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array, distinct_some_null from facetable order by pk limit 51", + "query_name": None, + "database": "fixtures", + "table": "facetable", + "view_name": "table", + }.items() <= app_client.ds._can_render_saw.items() + + @pytest.mark.asyncio async def test_prepare_jinja2_environment(app_client): template = app_client.ds.jinja_env.from_string( From d56f402822df102f9cf1a9a056449d01a15e3aae Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 28 May 2020 07:10:21 -0700 Subject: [PATCH 0242/2113] Release notes for 0.43 Refs #581, #770, #729, #706, #751, #706, #744, #771, #773 --- README.md | 1 + docs/changelog.rst | 15 +++++++++++++++ 2 files changed, 16 insertions(+) diff --git a/README.md b/README.md index 7351c5c0..90df75de 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover ## News + * 28th May 2020: [Datasette 0.43](http://datasette.readthedocs.io/en/latest/changelog.html#v0-43) - Redesigned [register_output_renderer](https://datasette.readthedocs.io/en/latest/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes. * 8th May 2020: [Datasette 0.42](http://datasette.readthedocs.io/en/latest/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database. * 6th May 2020: [Datasette 0.41](http://datasette.readthedocs.io/en/latest/changelog.html#v0-41) - New mechanism for [creating custom pages](https://datasette.readthedocs.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://datasette.readthedocs.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements. * 21st April 2020: [Datasette 0.40](http://datasette.readthedocs.io/en/latest/changelog.html#v0-40) - Metadata can now be provided as YAML instead of JSON. Publishing to Zeit Now v1 is no longer supported, but Now v2 support is provided by the new [datasette-publish-now](https://github.com/simonw/datasette-publish-now) plugin. Various bug fixes. diff --git a/docs/changelog.rst b/docs/changelog.rst index 48d3128b..8f375dd1 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,21 @@ Changelog ========= +.. _v0_43: + +0.43 (2020-05-28) +----------------- + +The main focus of this release is a major upgrade to the :ref:`plugin_register_output_renderer` plugin hook, which allows plugins to provide new output formats for Datasette such as `datasette-atom `__ and `datasette-ics `__. + +* Redesign of :ref:`plugin_register_output_renderer` to provide more context to the render callback and support an optional ``"can_render"`` callback that controls if a suggested link to the output format is provided. (`#581 `__, `#770 `__) +* Visually distinguish float and integer columns - useful for figuring out why order-by-column might be returning unexpected results. (`#729 `__) +* The :ref:`internals_request`, which is passed to several plugin hooks, is now documented. (`#706 `__) +* New ``metadata.json`` option for setting a custom default page size for specific tables and views, see :ref:`metadata_page_size`. (`#751 `__) +* Canned queries can now be configured with a default URL fragment hash, useful when working with plugins such as `datasette-vega `__, see :ref:`canned_queries_default_fragment`. (`#706 `__) +* Fixed a bug in ``datasette publish`` when running on operating systems where the ``/tmp`` directory lives in a different volume, using a backport of the Python 3.8 ``shutil.copytree()`` function. (`#744 `__) +* Every plugin hook is now covered by the unit tests, and a new unit test checks that each plugin hook has at least one corresponding test. (`#771 `__, `#773 `__) + .. _v0_42: 0.42 (2020-05-08) From 40885ef24e32d91502b6b8bbad1c7376f50f2830 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 28 May 2020 07:41:22 -0700 Subject: [PATCH 0243/2113] Noted tool for converting release notes to Markdown --- docs/contributing.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 48930332..567c4f47 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -145,12 +145,12 @@ To release a new version, first create a commit that updates :ref:`the changelog For non-bugfix releases you may want to update the news section of ``README.md`` as part of the same commit. -Wait long enough for Travis to build and deploy the demo version of that commit (otherwise the tag deployment may fail to alias to it properly). Then run the following:: +To tag and push the releaes, run the following:: git tag 0.25.2 git push --tags Final steps once the release has deployed to https://pypi.org/project/datasette/ -* Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases +* Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases - you can convert the release notes to Markdown by copying and pasting the rendered HTML into this tool: https://euangoddard.github.io/clipboard2markdown/ * Manually kick off a build of the `stable` branch on Read The Docs: https://readthedocs.org/projects/datasette/builds/ From 7bb30c1f11f7246baf7bb6a229f6b93572c4cbe3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 28 May 2020 10:09:32 -0700 Subject: [PATCH 0244/2113] request.url now respects force_https_urls, closes #781 --- datasette/app.py | 7 +++++++ tests/plugins/my_plugin_2.py | 3 +++ tests/test_api.py | 4 ++++ 3 files changed, 14 insertions(+) diff --git a/datasette/app.py b/datasette/app.py index 40d39ac9..07190c16 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -786,6 +786,13 @@ class DatasetteRouter(AsgiRouter): base_url = self.ds.config("base_url") if base_url != "/" and path.startswith(base_url): path = "/" + path[len(base_url) :] + # Apply force_https_urls, if set + if ( + self.ds.config("force_https_urls") + and scope["type"] == "http" + and scope.get("scheme") != "https" + ): + scope = dict(scope, scheme="https") return await super().route_path(scope, receive, send, path) async def handle_404(self, scope, receive, send, exception=None): diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index fdc6956d..c9e7c78f 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -46,6 +46,9 @@ def render_cell(value, database): @hookimpl def extra_template_vars(template, database, table, view_name, request, datasette): + # This helps unit tests that want to run assertions against the request object: + datasette._last_request = request + async def query_database(sql): first_db = list(datasette.databases.keys())[0] return (await datasette.execute(first_db, sql)).rows[0][0] diff --git a/tests/test_api.py b/tests/test_api.py index eb80f8e7..d7e7c03f 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1676,6 +1676,10 @@ def test_config_force_https_urls(): "toggle_url" ].startswith("https://") assert response.json["suggested_facets"][0]["toggle_url"].startswith("https://") + # Also confirm that request.url and request.scheme are set correctly + response = client.get("/") + assert client.ds._last_request.url.startswith("https://") + assert client.ds._last_request.scheme == "https" def test_infinity_returned_as_null(app_client): From 21a8ffc82dcf5e8e5f484ce39ee9713f959e0ad5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 28 May 2020 10:49:58 -0700 Subject: [PATCH 0245/2113] Tip about referencing issues in release notes commit --- docs/contributing.rst | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 567c4f47..da4dc35a 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -137,12 +137,16 @@ We increment ``minor`` for new features. We increment ``patch`` for bugfix releass. -To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__:: +To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__:: # Update changelog - git commit -m "Release 0.25.2" -a + git commit -m "Release notes for 0.43 + + Refs #581, #770, #729, #706, #751, #706, #744, #771, #773" -a git push +Referencing the issues that are part of the release in the commit message ensures the name of the release shows up on those issue pages, e.g. `here `__. + For non-bugfix releases you may want to update the news section of ``README.md`` as part of the same commit. To tag and push the releaes, run the following:: From 3c1a60589e14849344acd8aa6da0a60b40fbfc60 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 28 May 2020 11:27:24 -0700 Subject: [PATCH 0246/2113] Consistent capitalization of SpatiaLite in the docs --- docs/changelog.rst | 2 +- docs/installation.rst | 2 +- docs/metadata.rst | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 8f375dd1..8b6272cb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -947,7 +947,7 @@ request all rows where that column is less than 50 meters or more than 20 feet f 404s for missing tables/databases closes `#184 `_ - long_description in markdown for the new PyPI -- Hide Spatialite system tables. [Russ Garrett] +- Hide SpatiaLite system tables. [Russ Garrett] - Allow ``explain select`` / ``explain query plan select`` `#201 `_ - Datasette inspect now finds primary_keys `#195 `_ - Ability to sort using form fields (for mobile portrait mode) `#199 `_ diff --git a/docs/installation.rst b/docs/installation.rst index cdf1467a..aacfed1d 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -149,7 +149,7 @@ To upgrade to the most recent release of Datasette, run the following:: docker pull datasetteproject/datasette -Loading Spatialite +Loading SpatiaLite ~~~~~~~~~~~~~~~~~~ The ``datasetteproject/datasette`` image includes a recent version of the diff --git a/docs/metadata.rst b/docs/metadata.rst index 88ad5854..18766bac 100644 --- a/docs/metadata.rst +++ b/docs/metadata.rst @@ -260,7 +260,7 @@ Hiding tables ------------- You can hide tables from the database listing view (in the same way that FTS and -Spatialite tables are automatically hidden) using ``"hidden": true``: +SpatiaLite tables are automatically hidden) using ``"hidden": true``: .. code-block:: json From 3e8932bf6443bd5168f22d559597aed619205995 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 29 May 2020 15:12:10 -0700 Subject: [PATCH 0247/2113] Upgrade to actions/cache@v2 --- .github/workflows/deploy-latest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 33490972..fd53f754 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -15,7 +15,7 @@ jobs: uses: actions/setup-python@v1 with: python-version: 3.8 - - uses: actions/cache@v1 + - uses: actions/cache@v2 name: Configure pip caching with: path: ~/.cache/pip From 7ccd55a1638d7d2762f2789f192e5bb81fb0d0c7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 28 May 2020 11:54:57 -0700 Subject: [PATCH 0248/2113] Views do support sorting now, refs #508 --- docs/metadata.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/metadata.rst b/docs/metadata.rst index 18766bac..024af01e 100644 --- a/docs/metadata.rst +++ b/docs/metadata.rst @@ -210,7 +210,7 @@ This will restrict sorting of ``example_table`` to just the ``height`` and You can also disable sorting entirely by setting ``"sortable_columns": []`` -By default, database views in Datasette do not support sorting. You can use ``sortable_columns`` to enable specific sort orders for a view called ``name_of_view`` in the database ``my_database`` like so: +You can use ``sortable_columns`` to enable specific sort orders for a view called ``name_of_view`` in the database ``my_database`` like so: .. code-block:: json From 84616a2364df56f966f579eecc0716b9877f0d70 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 29 May 2020 15:51:30 -0700 Subject: [PATCH 0249/2113] request.args.getlist() returns [] if missing, refs #774 Also added some unit tests for request.args --- datasette/utils/__init__.py | 4 ++-- docs/internals.rst | 2 +- tests/plugins/register_output_renderer.py | 2 +- tests/test_utils.py | 10 ++++++++++ 4 files changed, 14 insertions(+), 4 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 2dab8e14..9b4f21ba 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -761,9 +761,9 @@ class RequestParameters(dict): except (KeyError, TypeError): return default - def getlist(self, name, default=None): + def getlist(self, name): "Return full list" - return super().get(name, default) + return super().get(name) or [] class ConnectionProblem(Exception): diff --git a/docs/internals.rst b/docs/internals.rst index 5bcb9da9..bbf10cae 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -276,4 +276,4 @@ Conider the querystring ``?foo=1&foo=2``. This will produce a ``request.args`` t Calling ``request.args.get("foo")`` will return the first value, ``"1"``. If that key is not present it will return ``None`` - or the second argument if you passed one, which will be used as the default. -Calling ``request.args.getlist("foo")`` will return the full list, ``["1", "2"]``. \ No newline at end of file +Calling ``request.args.getlist("foo")`` will return the full list, ``["1", "2"]``. If you call it on a missing key it will return ``[]``. diff --git a/tests/plugins/register_output_renderer.py b/tests/plugins/register_output_renderer.py index a9f0f157..82b60d01 100644 --- a/tests/plugins/register_output_renderer.py +++ b/tests/plugins/register_output_renderer.py @@ -26,7 +26,7 @@ async def render_test_all_parameters( datasette, columns, rows, sql, query_name, database, table, request, view_name, data ): headers = {} - for custom_header in request.args.getlist("header") or []: + for custom_header in request.args.getlist("header"): key, value = custom_header.split(":") headers[key] = value result = await datasette.databases["fixtures"].execute("select 1 + 1") diff --git a/tests/test_utils.py b/tests/test_utils.py index 59b80a67..ffb66ca5 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -448,6 +448,16 @@ async def test_request_post_vars(): assert {"foo": "bar", "baz": "1"} == await request.post_vars() +def test_request_args(): + request = Request.fake("/foo?multi=1&multi=2&single=3") + assert "1" == request.args.get("multi") + assert "3" == request.args.get("single") + assert ["1", "2"] == request.args.getlist("multi") + assert [] == request.args.getlist("missing") + with pytest.raises(KeyError): + request.args["missing"] + + def test_call_with_supported_arguments(): def foo(a, b): return "{}+{}".format(a, b) From f272cbc65fbf56368413320e21c87dc842e0a083 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 29 May 2020 15:57:46 -0700 Subject: [PATCH 0250/2113] Use request.args.getlist instead of request.args[...], refs #774 --- datasette/views/table.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index d014db71..d1d92bb1 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -319,19 +319,19 @@ class TableView(RowTableShared): if not self.ds.config("allow_sql"): raise DatasetteError("_where= is not allowed", status=400) else: - where_clauses.extend(request.args["_where"]) + where_clauses.extend(request.args.getlist("_where")) extra_wheres_for_ui = [ { "text": text, "remove_url": path_with_removed_args(request, {"_where": text}), } - for text in request.args["_where"] + for text in request.args.getlist("_where") ] # Support for ?_through={table, column, value} extra_human_descriptions = [] if "_through" in request.args: - for through in request.args["_through"]: + for through in request.args.getlist("_through"): through_data = json.loads(through) through_table = through_data["table"] other_column = through_data["column"] @@ -559,7 +559,7 @@ class TableView(RowTableShared): ) if request.args.get("_timelimit"): - extra_args["custom_time_limit"] = int(request.args["_timelimit"]) + extra_args["custom_time_limit"] = int(request.args.get("_timelimit")) results = await db.execute(sql, params, truncate=True, **extra_args) @@ -633,7 +633,7 @@ class TableView(RowTableShared): all_labels = default_labels # Check for explicit _label= if "_label" in request.args: - columns_to_expand = request.args["_label"] + columns_to_expand = request.args.getlist("_label") if columns_to_expand is None and all_labels: # expand all columns with foreign keys columns_to_expand = [fk["column"] for fk, _ in expandable_columns] @@ -746,7 +746,7 @@ class TableView(RowTableShared): if arg in special_args: form_hidden_args.append((arg, special_args[arg])) if request.args.get("_where"): - for where_text in request.args["_where"]: + for where_text in request.args.getlist("_where"): form_hidden_args.append(("_where", where_text)) # if no sort specified AND table has a single primary key, From 81be31322a968d23cf57cee62b58df55433385e3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 29 May 2020 16:18:01 -0700 Subject: [PATCH 0251/2113] New implementation for RequestParams - no longer subclasses dict - request.args[key] now returns first item, not all items - removed request.raw_args entirely Closes #774 --- datasette/renderer.py | 2 +- datasette/utils/__init__.py | 30 +++++++++++++++++++++++++++--- datasette/utils/asgi.py | 5 ----- datasette/views/table.py | 6 +++--- docs/internals.rst | 12 ++++++++---- tests/test_utils.py | 10 ++++++++++ 6 files changed, 49 insertions(+), 16 deletions(-) diff --git a/datasette/renderer.py b/datasette/renderer.py index 349c2922..3f921fe7 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -32,7 +32,7 @@ def json_renderer(args, data, view_name): # Handle the _json= parameter which may modify data["rows"] json_cols = [] if "_json" in args: - json_cols = args["_json"] + json_cols = args.getlist("_json") if json_cols and "rows" in data and "columns" in data: data["rows"] = convert_specific_columns_to_json( data["rows"], data["columns"], json_cols diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 9b4f21ba..bf965413 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -753,17 +753,41 @@ def escape_fts(query): ) -class RequestParameters(dict): +class RequestParameters: + def __init__(self, data): + # data is a dictionary of key => [list, of, values] + assert isinstance(data, dict), "data should be a dictionary of key => [list]" + for key in data: + assert isinstance( + data[key], list + ), "data should be a dictionary of key => [list]" + self._data = data + + def __contains__(self, key): + return key in self._data + + def __getitem__(self, key): + return self._data[key][0] + + def keys(self): + return self._data.keys() + + def __iter__(self): + yield from self._data.keys() + + def __len__(self): + return len(self._data) + def get(self, name, default=None): "Return first value in the list, if available" try: - return super().get(name)[0] + return self._data.get(name)[0] except (KeyError, TypeError): return default def getlist(self, name): "Return full list" - return super().get(name) or [] + return self._data.get(name) or [] class ConnectionProblem(Exception): diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 62a2a0c8..24398b77 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -63,11 +63,6 @@ class Request: def args(self): return RequestParameters(parse_qs(qs=self.query_string)) - @property - def raw_args(self): - # Deprecated, undocumented - may be removed in Datasette 1.0 - return {key: value[0] for key, value in self.args.items()} - async def post_vars(self): body = [] body = b"" diff --git a/datasette/views/table.py b/datasette/views/table.py index d1d92bb1..a629346f 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -277,11 +277,11 @@ class TableView(RowTableShared): # it can still be queried using ?_col__exact=blah special_args = {} other_args = [] - for key, value in args.items(): + for key in args: if key.startswith("_") and "__" not in key: - special_args[key] = value[0] + special_args[key] = args[key] else: - for v in value: + for v in args.getlist(key): other_args.append((key, v)) # Handle ?_filter_column and redirect, if present diff --git a/docs/internals.rst b/docs/internals.rst index bbf10cae..ea015dbc 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -268,12 +268,16 @@ The object also has one awaitable method: The RequestParameters class --------------------------- -This class, returned by ``request.args``, is a subclass of a Python dictionary that provides methods for working with keys that map to lists of values. +This class, returned by ``request.args``, is a dictionary-like object. -Conider the querystring ``?foo=1&foo=2``. This will produce a ``request.args`` that looks like this:: +Consider the querystring ``?foo=1&foo=2``. This will produce a ``request.args`` that looks like this:: RequestParameters({"foo": ["1", "2"]}) -Calling ``request.args.get("foo")`` will return the first value, ``"1"``. If that key is not present it will return ``None`` - or the second argument if you passed one, which will be used as the default. +``request.args["foo"]`` returns the first value, ``"1"`` - or raises ``KeyError`` if that key is missing. -Calling ``request.args.getlist("foo")`` will return the full list, ``["1", "2"]``. If you call it on a missing key it will return ``[]``. +``request.args.get("foo")`` returns ``"1"`` - or ``None`` if the key is missing. A second argument can be used to specify a different default value. + +``request.args.getlist("foo")`` returns the full list, ``["1", "2"]``. If you call it on a missing key it will return ``[]``. + +You can use ``if key in request.args`` to check if a key is present. ``for key in request.args`` will iterate through the keys, or you can use ``request.args.keys()`` to get all of the keys. diff --git a/tests/test_utils.py b/tests/test_utils.py index ffb66ca5..9d6f45b0 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -452,8 +452,18 @@ def test_request_args(): request = Request.fake("/foo?multi=1&multi=2&single=3") assert "1" == request.args.get("multi") assert "3" == request.args.get("single") + assert "1" == request.args["multi"] + assert "3" == request.args["single"] assert ["1", "2"] == request.args.getlist("multi") assert [] == request.args.getlist("missing") + assert "multi" in request.args + assert "single" in request.args + assert "missing" not in request.args + expected = ["multi", "single"] + assert expected == list(request.args.keys()) + for i, key in enumerate(request.args): + assert expected[i] == key + assert 2 == len(request.args) with pytest.raises(KeyError): request.args["missing"] From 31fb006a9b05067a8eb2f774ad3a3b15b4565924 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 07:28:29 -0700 Subject: [PATCH 0252/2113] Added datasette.get_database() method Refs #576 --- datasette/app.py | 5 +++++ docs/internals.rst | 10 ++++++++++ docs/plugins.rst | 2 +- tests/test_database.py | 3 +++ tests/test_internals_datasette.py | 23 +++++++++++++++++++++++ 5 files changed, 42 insertions(+), 1 deletion(-) create mode 100644 tests/test_internals_datasette.py diff --git a/datasette/app.py b/datasette/app.py index 07190c16..30eb3dba 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -281,6 +281,11 @@ class Datasette: self.register_renderers() + def get_database(self, name=None): + if name is None: + return next(iter(self.databases.values())) + return self.databases[name] + def add_database(self, name, db): self.databases[name] = db diff --git a/docs/internals.rst b/docs/internals.rst index ea015dbc..886cb7e7 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -44,6 +44,16 @@ This method lets you read plugin configuration values that were set in ``metadat Renders a `Jinja template `__ using Datasette's preconfigured instance of Jinja and returns the resulting string. The template will have access to Datasette's default template functions and any functions that have been made available by other plugins. +.. _datasette_get_database: + +.get_database(name) +------------------- + +``name`` - string, optional + The name of the database - optional. + +Returns the specified database object. Raises a ``KeyError`` if the database does not exist. Call this method without an argument to return the first connected database. + .. _datasette_add_database: .add_database(name, db) diff --git a/docs/plugins.rst b/docs/plugins.rst index b27daf3f..f08f1217 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -811,7 +811,7 @@ Here is a more complex example: .. code-block:: python async def render_demo(datasette, columns, rows): - db = next(iter(datasette.databases.values())) + db = datasette.get_database() result = await db.execute("select sqlite_version()") first_row = " | ".join(columns) lines = [first_row] diff --git a/tests/test_database.py b/tests/test_database.py index 1f1a3a7e..bd7e7666 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -1,3 +1,6 @@ +""" +Tests for the datasette.database.Database class +""" from datasette.database import Results, MultipleValues from datasette.utils import sqlite3 from .fixtures import app_client diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py new file mode 100644 index 00000000..4993250d --- /dev/null +++ b/tests/test_internals_datasette.py @@ -0,0 +1,23 @@ +""" +Tests for the datasette.app.Datasette class +""" +from .fixtures import app_client +import pytest + + +@pytest.fixture +def datasette(app_client): + return app_client.ds + + +def test_get_database(datasette): + db = datasette.get_database("fixtures") + assert "fixtures" == db.name + with pytest.raises(KeyError): + datasette.get_database("missing") + + +def test_get_database_no_argument(datasette): + # Returns the first available database: + db = datasette.get_database() + assert "fixtures" == db.name From ca56c226a9f1b02e871d7d7b392619a805b7f1ed Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 07:33:02 -0700 Subject: [PATCH 0253/2113] Renamed test_database.py to test_internals_database.py Also added a db fixture to remove some boilerplate. --- ...database.py => test_internals_database.py} | 45 +++++++------------ 1 file changed, 17 insertions(+), 28 deletions(-) rename tests/{test_database.py => test_internals_database.py} (80%) diff --git a/tests/test_database.py b/tests/test_internals_database.py similarity index 80% rename from tests/test_database.py rename to tests/test_internals_database.py index bd7e7666..fde7ad2c 100644 --- a/tests/test_database.py +++ b/tests/test_internals_database.py @@ -9,17 +9,20 @@ import time import uuid +@pytest.fixture +def db(app_client): + return app_client.ds.get_database("fixtures") + + @pytest.mark.asyncio -async def test_execute(app_client): - db = app_client.ds.databases["fixtures"] +async def test_execute(db): results = await db.execute("select * from facetable") assert isinstance(results, Results) assert 15 == len(results) @pytest.mark.asyncio -async def test_results_first(app_client): - db = app_client.ds.databases["fixtures"] +async def test_results_first(db): assert None is (await db.execute("select * from facetable where pk > 100")).first() results = await db.execute("select * from facetable") row = results.first() @@ -35,8 +38,7 @@ async def test_results_first(app_client): ], ) @pytest.mark.asyncio -async def test_results_single_value(app_client, query, expected): - db = app_client.ds.databases["fixtures"] +async def test_results_single_value(db, query, expected): results = await db.execute(query) if expected: assert expected == results.single_value() @@ -46,9 +48,7 @@ async def test_results_single_value(app_client, query, expected): @pytest.mark.asyncio -async def test_execute_fn(app_client): - db = app_client.ds.databases["fixtures"] - +async def test_execute_fn(db): def get_1_plus_1(conn): return conn.execute("select 1 + 1").fetchall()[0][0] @@ -63,16 +63,14 @@ async def test_execute_fn(app_client): ), ) @pytest.mark.asyncio -async def test_table_exists(app_client, tables, exists): - db = app_client.ds.databases["fixtures"] +async def test_table_exists(db, tables, exists): for table in tables: actual = await db.table_exists(table) assert exists == actual @pytest.mark.asyncio -async def test_get_all_foreign_keys(app_client): - db = app_client.ds.databases["fixtures"] +async def test_get_all_foreign_keys(db): all_foreign_keys = await db.get_all_foreign_keys() assert { "incoming": [], @@ -102,8 +100,7 @@ async def test_get_all_foreign_keys(app_client): @pytest.mark.asyncio -async def test_table_names(app_client): - db = app_client.ds.databases["fixtures"] +async def test_table_names(db): table_names = await db.table_names() assert [ "simple_primary_key", @@ -139,8 +136,7 @@ async def test_table_names(app_client): @pytest.mark.asyncio -async def test_execute_write_block_true(app_client): - db = app_client.ds.databases["fixtures"] +async def test_execute_write_block_true(db): await db.execute_write( "update roadside_attractions set name = ? where pk = ?", ["Mystery!", 1], @@ -151,8 +147,7 @@ async def test_execute_write_block_true(app_client): @pytest.mark.asyncio -async def test_execute_write_block_false(app_client): - db = app_client.ds.databases["fixtures"] +async def test_execute_write_block_false(db): await db.execute_write( "update roadside_attractions set name = ? where pk = ?", ["Mystery!", 1], ) @@ -162,9 +157,7 @@ async def test_execute_write_block_false(app_client): @pytest.mark.asyncio -async def test_execute_write_fn_block_false(app_client): - db = app_client.ds.databases["fixtures"] - +async def test_execute_write_fn_block_false(db): def write_fn(conn): with conn: conn.execute("delete from roadside_attractions where pk = 1;") @@ -177,9 +170,7 @@ async def test_execute_write_fn_block_false(app_client): @pytest.mark.asyncio -async def test_execute_write_fn_block_true(app_client): - db = app_client.ds.databases["fixtures"] - +async def test_execute_write_fn_block_true(db): def write_fn(conn): with conn: conn.execute("delete from roadside_attractions where pk = 1;") @@ -191,9 +182,7 @@ async def test_execute_write_fn_block_true(app_client): @pytest.mark.asyncio -async def test_execute_write_fn_exception(app_client): - db = app_client.ds.databases["fixtures"] - +async def test_execute_write_fn_exception(db): def write_fn(conn): assert False From 012c76901af65442e90eac4b36db43455e3c922f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 07:38:46 -0700 Subject: [PATCH 0254/2113] _ prefix for many private methods of Datasette, refs #576 --- datasette/app.py | 28 ++++++++++++++-------------- datasette/database.py | 2 +- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 30eb3dba..4b9807b0 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -279,7 +279,7 @@ class Datasette: # pylint: disable=no-member pm.hook.prepare_jinja2_environment(env=self.jinja_env) - self.register_renderers() + self._register_renderers() def get_database(self, name=None): if name is None: @@ -392,7 +392,7 @@ class Datasette: } ) - def prepare_connection(self, conn, database): + def _prepare_connection(self, conn, database): conn.row_factory = sqlite3.Row conn.text_factory = lambda x: str(x, "utf-8", "replace") for name, num_args, func in self.sqlite_functions: @@ -468,12 +468,12 @@ class Datasette: url = "https://" + url[len("http://") :] return url - def register_custom_units(self): + def _register_custom_units(self): "Register any custom units defined in the metadata.json with Pint" for unit in self.metadata("custom_units") or []: ureg.define(unit) - def connected_databases(self): + def _connected_databases(self): return [ { "name": d.name, @@ -486,9 +486,9 @@ class Datasette: for d in sorted(self.databases.values(), key=lambda d: d.name) ] - def versions(self): + def _versions(self): conn = sqlite3.connect(":memory:") - self.prepare_connection(conn, ":memory:") + self._prepare_connection(conn, ":memory:") sqlite_version = conn.execute("select sqlite_version()").fetchone()[0] sqlite_extensions = {} for extension, testsql, hasversion in ( @@ -534,7 +534,7 @@ class Datasette: }, } - def plugins(self, show_all=False): + def _plugins(self, show_all=False): ps = list(get_plugins()) if not show_all: ps = [p for p in ps if p["name"] not in DEFAULT_PLUGINS] @@ -548,7 +548,7 @@ class Datasette: for p in ps ] - def threads(self): + def _threads(self): threads = list(threading.enumerate()) d = { "num_threads": len(threads), @@ -576,7 +576,7 @@ class Datasette: .get(table, {}) ) - def register_renderers(self): + def _register_renderers(self): """ Register output renderers which output data in custom formats. """ # Built-in renderers self.renderers["json"] = (json_renderer, lambda: True) @@ -724,11 +724,11 @@ class Datasette: r"/-/metadata(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi(self, "versions.json", self.versions), + JsonDataView.as_asgi(self, "versions.json", self._versions), r"/-/versions(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi(self, "plugins.json", self.plugins), + JsonDataView.as_asgi(self, "plugins.json", self._plugins), r"/-/plugins(?P(\.json)?)$", ) add_route( @@ -736,11 +736,11 @@ class Datasette: r"/-/config(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi(self, "threads.json", self.threads), + JsonDataView.as_asgi(self, "threads.json", self._threads), r"/-/threads(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi(self, "databases.json", self.connected_databases), + JsonDataView.as_asgi(self, "databases.json", self._connected_databases), r"/-/databases(?P(\.json)?)$", ) add_route( @@ -765,7 +765,7 @@ class Datasette: + renderer_regex + r")?$", ) - self.register_custom_units() + self._register_custom_units() async def setup_db(): # First time server starts up, calculate table counts for immutable databases diff --git a/datasette/database.py b/datasette/database.py index e6154caa..89bf47f4 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -104,7 +104,7 @@ class Database: conn = getattr(connections, self.name, None) if not conn: conn = self.connect() - self.ds.prepare_connection(conn, self.name) + self.ds._prepare_connection(conn, self.name) setattr(connections, self.name, conn) return fn(conn) From de1cde65a67cf9acb227b4df67230b47fdfc9a0e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 10:45:11 -0700 Subject: [PATCH 0255/2113] Moved request tests to test_internals_request.py --- tests/test_internals_request.py | 42 +++++++++++++++++++++++++++++++++ tests/test_utils.py | 40 ------------------------------- 2 files changed, 42 insertions(+), 40 deletions(-) create mode 100644 tests/test_internals_request.py diff --git a/tests/test_internals_request.py b/tests/test_internals_request.py new file mode 100644 index 00000000..5c9b254b --- /dev/null +++ b/tests/test_internals_request.py @@ -0,0 +1,42 @@ +from datasette.utils.asgi import Request +import pytest + + +@pytest.mark.asyncio +async def test_request_post_vars(): + scope = { + "http_version": "1.1", + "method": "POST", + "path": "/", + "raw_path": b"/", + "query_string": b"", + "scheme": "http", + "type": "http", + "headers": [[b"content-type", b"application/x-www-form-urlencoded"]], + } + + async def receive(): + return {"type": "http.request", "body": b"foo=bar&baz=1", "more_body": False} + + request = Request(scope, receive) + assert {"foo": "bar", "baz": "1"} == await request.post_vars() + + +def test_request_args(): + request = Request.fake("/foo?multi=1&multi=2&single=3") + assert "1" == request.args.get("multi") + assert "3" == request.args.get("single") + assert "1" == request.args["multi"] + assert "3" == request.args["single"] + assert ["1", "2"] == request.args.getlist("multi") + assert [] == request.args.getlist("missing") + assert "multi" in request.args + assert "single" in request.args + assert "missing" not in request.args + expected = ["multi", "single"] + assert expected == list(request.args.keys()) + for i, key in enumerate(request.args): + assert expected[i] == key + assert 2 == len(request.args) + with pytest.raises(KeyError): + request.args["missing"] diff --git a/tests/test_utils.py b/tests/test_utils.py index 9d6f45b0..01a10468 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -428,46 +428,6 @@ def test_check_connection_passes(): utils.check_connection(conn) -@pytest.mark.asyncio -async def test_request_post_vars(): - scope = { - "http_version": "1.1", - "method": "POST", - "path": "/", - "raw_path": b"/", - "query_string": b"", - "scheme": "http", - "type": "http", - "headers": [[b"content-type", b"application/x-www-form-urlencoded"]], - } - - async def receive(): - return {"type": "http.request", "body": b"foo=bar&baz=1", "more_body": False} - - request = Request(scope, receive) - assert {"foo": "bar", "baz": "1"} == await request.post_vars() - - -def test_request_args(): - request = Request.fake("/foo?multi=1&multi=2&single=3") - assert "1" == request.args.get("multi") - assert "3" == request.args.get("single") - assert "1" == request.args["multi"] - assert "3" == request.args["single"] - assert ["1", "2"] == request.args.getlist("multi") - assert [] == request.args.getlist("missing") - assert "multi" in request.args - assert "single" in request.args - assert "missing" not in request.args - expected = ["multi", "single"] - assert expected == list(request.args.keys()) - for i, key in enumerate(request.args): - assert expected[i] == key - assert 2 == len(request.args) - with pytest.raises(KeyError): - request.args["missing"] - - def test_call_with_supported_arguments(): def foo(a, b): return "{}+{}".format(a, b) From 5ae14c9f20e0dc59c588f0e93eedfefe0f0f3e8e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 10:54:22 -0700 Subject: [PATCH 0256/2113] Improved documentation for RequestParameters class --- docs/internals.rst | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/docs/internals.rst b/docs/internals.rst index 886cb7e7..ca725cc4 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -278,16 +278,27 @@ The object also has one awaitable method: The RequestParameters class --------------------------- -This class, returned by ``request.args``, is a dictionary-like object. +``request.args`` is a ``RequestParameters`` object - a dictionary-like object which provides access to querystring parameters that may have multiple values. -Consider the querystring ``?foo=1&foo=2``. This will produce a ``request.args`` that looks like this:: +Consider the querystring ``?foo=1&foo=2&bar=3`` - with two values for ``foo`` and one value for ``bar``. - RequestParameters({"foo": ["1", "2"]}) +``request.args[key]`` - string + Returns the first value for that key, or raises a ``KeyError`` if the key is missing. For the above example ``request.args["foo"]`` would return ``"1"``. -``request.args["foo"]`` returns the first value, ``"1"`` - or raises ``KeyError`` if that key is missing. +``request.args.get(key)`` - string or None + Returns the first value for that key, or ``None`` if the key is missing. Pass a second argument to specify a different default, e.g. ``q = request.args.get("q", "")``. -``request.args.get("foo")`` returns ``"1"`` - or ``None`` if the key is missing. A second argument can be used to specify a different default value. +``request.args.getlist(key)`` - list of strings + Returns the list of strings for that key. ``request.args.getlist("foo")`` would return ``["1", "2"]`` in the above example. ``request.args.getlist("bar")`` would return ``["3"]``. If the key is missing an empty list will be returned. -``request.args.getlist("foo")`` returns the full list, ``["1", "2"]``. If you call it on a missing key it will return ``[]``. +``request.args.keys()`` - list of strings + Returns the list of available keys - for the example this would be ``["foo", "bar"]``. -You can use ``if key in request.args`` to check if a key is present. ``for key in request.args`` will iterate through the keys, or you can use ``request.args.keys()`` to get all of the keys. +``key in request.args`` - True or False + You can use ``if key in request.args`` to check if a key is present. + +``for key in request.args`` - iterator + This lets you loop through every available key. + +``len(request.args)`` - integer + Returns the number of keys. From 3c5afaeb231c94a55309f1c0187ff6dedd5b5fb8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 11:06:13 -0700 Subject: [PATCH 0257/2113] Re-arranged internals documentation Request is more useful to most people than Database. --- docs/internals.rst | 136 ++++++++++++++++++++++----------------------- 1 file changed, 68 insertions(+), 68 deletions(-) diff --git a/docs/internals.rst b/docs/internals.rst index ca725cc4..4db710c0 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -3,7 +3,74 @@ Internals for plugins ===================== -Many :ref:`plugin_hooks` are passed objects that provide access to internal Datasette functionality. The interface to these objects should not be considered stable (at least until Datasette 1.0) with the exception of methods that are documented on this page. +Many :ref:`plugin_hooks` are passed objects that provide access to internal Datasette functionality. The interface to these objects should not be considered stable with the exception of methods that are documented here. + +.. _internals_request: + +Request object +~~~~~~~~~~~~~~ + +The request object is passed to various plugin hooks. It represents an incoming HTTP request. It has the following properties: + +``.scope`` - dictionary + The ASGI scope that was used to construct this request, described in the `ASGI HTTP connection scope `__ specification. + +``.method`` - string + The HTTP method for this request, usually ``GET`` or ``POST``. + +``.url`` - string + The full URL for this request, e.g. ``https://latest.datasette.io/fixtures``. + +``.scheme`` - string + The request scheme - usually ``https`` or ``http``. + +``.headers`` - dictionary (str -> str) + A dictionary of incoming HTTP request headers. + +``.host`` - string + The host header from the incoming request, e.g. ``latest.datasette.io`` or ``localhost``. + +``.path`` - string + The path of the request, e.g. ``/fixtures``. + +``.query_string`` - string + The querystring component of the request, without the ``?`` - e.g. ``name__contains=sam&age__gt=10``. + +``.args`` - RequestParameters + An object representing the parsed querystring parameters, see below. + +The object also has one awaitable method: + +``await request.post_vars()`` - dictionary + Returns a dictionary of form variables that were submitted in the request body via ``POST``. + +The RequestParameters class +--------------------------- + +``request.args`` is a ``RequestParameters`` object - a dictionary-like object which provides access to querystring parameters that may have multiple values. + +Consider the querystring ``?foo=1&foo=2&bar=3`` - with two values for ``foo`` and one value for ``bar``. + +``request.args[key]`` - string + Returns the first value for that key, or raises a ``KeyError`` if the key is missing. For the above example ``request.args["foo"]`` would return ``"1"``. + +``request.args.get(key)`` - string or None + Returns the first value for that key, or ``None`` if the key is missing. Pass a second argument to specify a different default, e.g. ``q = request.args.get("q", "")``. + +``request.args.getlist(key)`` - list of strings + Returns the list of strings for that key. ``request.args.getlist("foo")`` would return ``["1", "2"]`` in the above example. ``request.args.getlist("bar")`` would return ``["3"]``. If the key is missing an empty list will be returned. + +``request.args.keys()`` - list of strings + Returns the list of available keys - for the example this would be ``["foo", "bar"]``. + +``key in request.args`` - True or False + You can use ``if key in request.args`` to check if a key is present. + +``for key in request.args`` - iterator + This lets you loop through every available key. + +``len(request.args)`` - integer + Returns the number of keys. .. _internals_datasette: @@ -235,70 +302,3 @@ Here's an example of ``block=True`` in action: num_rows_left = await database.execute_write_fn(my_action, block=True) except Exception as e: print("An error occurred:", e) - -.. _internals_request: - -Request object -~~~~~~~~~~~~~~ - -The request object is passed to various plugin hooks. It represents an incoming HTTP request. It has the following properties: - -``.scope`` - dictionary - The ASGI scope that was used to construct this request, described in the `ASGI HTTP connection scope `__ specification. - -``.method`` - string - The HTTP method for this request, usually ``GET`` or ``POST``. - -``.url`` - string - The full URL for this request, e.g. ``https://latest.datasette.io/fixtures``. - -``.scheme`` - string - The request scheme - usually ``https`` or ``http``. - -``.headers`` - dictionary (str -> str) - A dictionary of incoming HTTP request headers. - -``.host`` - string - The host header from the incoming request, e.g. ``latest.datasette.io`` or ``localhost``. - -``.path`` - string - The path of the request, e.g. ``/fixtures``. - -``.query_string`` - string - The querystring component of the request, without the ``?`` - e.g. ``name__contains=sam&age__gt=10``. - -``.args`` - RequestParameters - An object representing the parsed querystring parameters, see below. - -The object also has one awaitable method: - -``await request.post_vars()`` - dictionary - Returns a dictionary of form variables that were submitted in the request body via ``POST``. - -The RequestParameters class ---------------------------- - -``request.args`` is a ``RequestParameters`` object - a dictionary-like object which provides access to querystring parameters that may have multiple values. - -Consider the querystring ``?foo=1&foo=2&bar=3`` - with two values for ``foo`` and one value for ``bar``. - -``request.args[key]`` - string - Returns the first value for that key, or raises a ``KeyError`` if the key is missing. For the above example ``request.args["foo"]`` would return ``"1"``. - -``request.args.get(key)`` - string or None - Returns the first value for that key, or ``None`` if the key is missing. Pass a second argument to specify a different default, e.g. ``q = request.args.get("q", "")``. - -``request.args.getlist(key)`` - list of strings - Returns the list of strings for that key. ``request.args.getlist("foo")`` would return ``["1", "2"]`` in the above example. ``request.args.getlist("bar")`` would return ``["3"]``. If the key is missing an empty list will be returned. - -``request.args.keys()`` - list of strings - Returns the list of available keys - for the example this would be ``["foo", "bar"]``. - -``key in request.args`` - True or False - You can use ``if key in request.args`` to check if a key is present. - -``for key in request.args`` - iterator - This lets you loop through every available key. - -``len(request.args)`` - integer - Returns the number of keys. From 4d798ca0e3df246bd47f0600cc7b5118ba33ac16 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 11:17:20 -0700 Subject: [PATCH 0258/2113] Added test for db.mtime_ns --- datasette/database.py | 4 +++- tests/test_internals_database.py | 12 +++++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 89bf47f4..ed119542 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -33,7 +33,7 @@ class Database: self.cached_table_counts = None self._write_thread = None self._write_queue = None - if not self.is_mutable: + if not self.is_mutable and not self.is_memory: p = Path(path) self.hash = inspect_hash(p) self.cached_size = p.stat().st_size @@ -197,6 +197,8 @@ class Database: @property def mtime_ns(self): + if self.is_memory: + return None return Path(self.path).stat().st_mtime_ns @property diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index fde7ad2c..5d5520dd 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -1,7 +1,7 @@ """ Tests for the datasette.database.Database class """ -from datasette.database import Results, MultipleValues +from datasette.database import Database, Results, MultipleValues from datasette.utils import sqlite3 from .fixtures import app_client import pytest @@ -188,3 +188,13 @@ async def test_execute_write_fn_exception(db): with pytest.raises(AssertionError): await db.execute_write_fn(write_fn, block=True) + + +@pytest.mark.asyncio +async def test_mtime_ns(db): + assert isinstance(db.mtime_ns, int) + + +def test_mtime_ns_is_none_for_memory(app_client): + memory_db = Database(app_client.ds, is_memory=True) + assert None is memory_db.mtime_ns From 124acf34a678f0af438dc31a2dceebf28612f249 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 11:39:46 -0700 Subject: [PATCH 0259/2113] Removed db.get_outbound_foreign_keys method It duplicated the functionality of db.foreign_keys_for_table. --- datasette/database.py | 5 ----- datasette/utils/__init__.py | 2 +- datasette/views/table.py | 4 +--- 3 files changed, 2 insertions(+), 9 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index ed119542..ab3c82c9 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -319,11 +319,6 @@ class Database: async def get_all_foreign_keys(self): return await self.execute_fn(get_all_foreign_keys) - async def get_outbound_foreign_keys(self, table): - return await self.execute_fn( - lambda conn: get_outbound_foreign_keys(conn, table) - ) - async def get_table_definition(self, table, type_="table"): table_definition_rows = list( await self.execute( diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index bf965413..2eb31502 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -428,7 +428,7 @@ def get_outbound_foreign_keys(conn, table): if info is not None: id, seq, table_name, from_, to_, on_update, on_delete, match = info fks.append( - {"other_table": table_name, "column": from_, "other_column": to_} + {"column": from_, "other_table": table_name, "other_column": to_} ) return fks diff --git a/datasette/views/table.py b/datasette/views/table.py index a629346f..2e9515c3 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -336,9 +336,7 @@ class TableView(RowTableShared): through_table = through_data["table"] other_column = through_data["column"] value = through_data["value"] - outgoing_foreign_keys = await db.get_outbound_foreign_keys( - through_table - ) + outgoing_foreign_keys = await db.foreign_keys_for_table(through_table) try: fk_to_us = [ fk for fk in outgoing_foreign_keys if fk["other_table"] == table From c4fbe50676929b512940aab90de590a78ac5d7fc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 11:40:30 -0700 Subject: [PATCH 0260/2113] Documentation for Database introspection methods, closes #684 Refs #576 --- docs/internals.rst | 68 ++++++++++++++++++++++++++++++++++++++++++++++ docs/metadata.rst | 2 ++ 2 files changed, 70 insertions(+) diff --git a/docs/internals.rst b/docs/internals.rst index 4db710c0..e9ba9567 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -302,3 +302,71 @@ Here's an example of ``block=True`` in action: num_rows_left = await database.execute_write_fn(my_action, block=True) except Exception as e: print("An error occurred:", e) + +Database introspection +---------------------- + +The ``Database`` class also provides properties and methods for introspecting the database. + +``db.name`` - string + The name of the database - usually the filename without the ``.db`` prefix. + +``db.size`` - integer + The size of the database file in bytes. 0 for ``:memory:`` databases. + +``db.mtime_ns`` - integer or None + The last modification time of the database file in nanoseconds since the epoch. ``None`` for ``:memory:`` databases. + +``await db.table_exists(table)`` - boolean + Check if a table called ``table`` exists. + +``await db.table_names()`` - list of strings + List of names of tables in the database. + +``await db.view_names()`` - list of strings + List of names of views in tha database. + +``await db.table_columns(table)`` - list of strings + Names of columns in a specific table. + +``await db.primary_keys(table)`` - list of strings + Names of the columns that are part of the primary key for this table. + +``await db.fts_table(table)`` - string or None + The name of the FTS table associated with this table, if one exists. + +``await db.label_column_for_table(table)`` - string or None + The label column that is associated with this table - either automatically detected or using the ``"label_column"`` key from :ref:`metadata`, see :ref:`label_columns`. + +``await db.foreign_keys_for_table(table)`` - list of dictionaries + Details of columns in this table which are foreign keys to other tables. A list of dictionaries where each dictionary is shaped like this: ``{"column": string, "other_table": string, "other_column": string}``. + +``await db.hidden_table_names()`` - list of strings + List of tables which Datasette "hides" by default - usually these are tables associated with SQLite's full-text search feature, the SpatiaLite extension or tables hidden using the :ref:`metadata_hiding_tables` feature. + +``await db.get_table_definition(table)`` - string + Returns the SQL definition for the table - the ``CREATE TABLE`` statement and any associated ``CREATE INDEX`` statements. + +``await db.get_view_definition(view)`` - string + Returns the SQL definition of the named view. + +``await db.get_all_foreign_keys()`` - dictionary + Dictionary representing both incoming and outgoing foreign keys for this table. It has two keys, ``"incoming"`` and ``"outgoing"``, each of which is a list of dictionaries with keys ``"column"``, ``"other_table"`` and ``"other_column"``. For example: + + .. code-block:: json + + { + "incoming": [], + "outgoing": [ + { + "other_table": "attraction_characteristic", + "column": "characteristic_id", + "other_column": "pk", + }, + { + "other_table": "roadside_attractions", + "column": "attraction_id", + "other_column": "pk", + } + ] + } diff --git a/docs/metadata.rst b/docs/metadata.rst index 024af01e..471a52e3 100644 --- a/docs/metadata.rst +++ b/docs/metadata.rst @@ -256,6 +256,8 @@ used for the link label with the ``label_column`` property: } } +.. _metadata_hiding_tables: + Hiding tables ------------- From 060a56735c1d3bde0a4c7674e82b5f45bef34dee Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 13:24:00 -0700 Subject: [PATCH 0261/2113] actor_from_request and permission_allowed hookspecs, refs #699 --- datasette/hookspecs.py | 10 ++++++++++ docs/plugins.rst | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index c2fc0126..65c1c859 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -58,3 +58,13 @@ def register_output_renderer(datasette): @hookspec def register_facet_classes(): "Register Facet subclasses" + + +@hookspec +def actor_from_request(datasette, request): + "Return an actor dictionary based on the incoming request" + + +@hookspec +def permission_allowed(actor, action, resource_type, resource_identifier): + "Check if actor is allowed to perfom this action - return True, False or None" diff --git a/docs/plugins.rst b/docs/plugins.rst index f08f1217..09e8f5e3 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -941,3 +941,40 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att return wrap_with_databases_header Examples: `datasette-auth-github `_, `datasette-search-all `_, `datasette-media `_ + +.. _plugin_actor_from_request: + +actor_from_request(datasette, request) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + +``request`` - object + The current HTTP :ref:`internals_request`. + +This is part of Datasette's authentication and permissions system. The function should attempt to authenticate an actor (either a user or an API actor of some sort) based on information in the request. + +If it cannot authenticate an actor, it should return ``None``. Otherwise it should return a dictionary representing that actor. + +.. _plugin_permission_allowed: + +permission_allowed(datasette, actor, action, resource_type, resource_identifier) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + +``actor`` - dictionary + The current actor, as decided by :ref:`plugin_actor_from_request`. + +``action`` - string + The action to be performed, e.g. ``"edit-table"``. + +``resource_type`` - string + The type of resource being acted on, e.g. ``"table"``. + +``resource`` - string + An identifier for the individual resource, e.g. the name of the table. + +Called to check that an actor has permission to perform an action on a resource. Can return ``True`` if the action is allowed, ``False`` if the action is not allowed or ``None`` if the plugin does not have an opinion one way or the other. From 461c82838d65dd9f61c5be725343a82c61b5c3f3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 15:06:33 -0700 Subject: [PATCH 0262/2113] Implemented actor_from_request with tests, refs #699 Also added datasette argument to permission_allowed hook --- datasette/app.py | 13 ++++++++++++- datasette/hookspecs.py | 2 +- docs/plugins.rst | 23 +++++++++++++++++++++++ tests/plugins/my_plugin.py | 8 ++++++++ tests/plugins/my_plugin_2.py | 12 ++++++++++++ tests/test_plugins.py | 24 ++++++++++++++++++++++++ 6 files changed, 80 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 4b9807b0..3f2876ec 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -798,7 +798,18 @@ class DatasetteRouter(AsgiRouter): and scope.get("scheme") != "https" ): scope = dict(scope, scheme="https") - return await super().route_path(scope, receive, send, path) + # Handle authentication + actor = None + for actor in pm.hook.actor_from_request( + datasette=self.ds, request=Request(scope, receive) + ): + if callable(actor): + actor = actor() + if asyncio.iscoroutine(actor): + actor = await actor + if actor: + break + return await super().route_path(dict(scope, actor=actor), receive, send, path) async def handle_404(self, scope, receive, send, exception=None): # If URL has a trailing slash, redirect to URL without it diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 65c1c859..71d06661 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -66,5 +66,5 @@ def actor_from_request(datasette, request): @hookspec -def permission_allowed(actor, action, resource_type, resource_identifier): +def permission_allowed(datasette, actor, action, resource_type, resource_identifier): "Check if actor is allowed to perfom this action - return True, False or None" diff --git a/docs/plugins.rst b/docs/plugins.rst index 09e8f5e3..fb2843f4 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -957,6 +957,29 @@ This is part of Datasette's authentication and permissions system. The function If it cannot authenticate an actor, it should return ``None``. Otherwise it should return a dictionary representing that actor. +Instead of returning a dictionary, this function can return an awaitable function which itself returns either ``None`` or a dictionary. This is useful for authentication functions that need to make a database query - for example: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def actor_from_request(datasette, request): + async def inner(): + token = request.args.get("_token") + if not token: + return None + # Look up ?_token=xxx in sessions table + result = await datasette.get_database().execute( + "select count(*) from sessions where token = ?", [token] + ) + if result.first()[0]: + return {"token": token} + else: + return None + + return inner + .. _plugin_permission_allowed: permission_allowed(datasette, actor, action, resource_type, resource_identifier) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 10d7e7e6..305cb3b7 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -126,3 +126,11 @@ class DummyFacet(Facet): facet_results = {} facets_timed_out = [] return facet_results, facets_timed_out + + +@hookimpl +def actor_from_request(datasette, request): + if request.args.get("_bot"): + return {"id": "bot"} + else: + return None diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index c9e7c78f..0a5cbba5 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -95,3 +95,15 @@ def asgi_wrapper(datasette): return add_x_databases_header return wrap_with_databases_header + + +@hookimpl +def actor_from_request(datasette, request): + async def inner(): + if request.args.get("_bot2"): + result = await datasette.get_database().execute("select 1 + 1") + return {"id": "bot2", "1+1": result.first()[0]} + else: + return None + + return inner diff --git a/tests/test_plugins.py b/tests/test_plugins.py index a34328a9..3ad26986 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -503,3 +503,27 @@ def test_register_facet_classes(app_client): "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet=pk3", }, ] == data["suggested_facets"] + + +def test_actor_from_request(app_client): + app_client.get("/") + # Should have no actor + assert None == app_client.ds._last_request.scope["actor"] + app_client.get("/?_bot=1") + # Should have bot actor + assert {"id": "bot"} == app_client.ds._last_request.scope["actor"] + + +def test_actor_from_request_async(app_client): + app_client.get("/") + # Should have no actor + assert None == app_client.ds._last_request.scope["actor"] + app_client.get("/?_bot2=1") + # Should have bot2 actor + assert {"id": "bot2", "1+1": 2} == app_client.ds._last_request.scope["actor"] + + +@pytest.mark.xfail +def test_permission_allowed(app_client): + # TODO + assert False From 9315bacf6f63e20781d21d170e55a55b2c54fcdd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 15:24:43 -0700 Subject: [PATCH 0263/2113] Implemented datasette.permission_allowed(), refs #699 --- datasette/app.py | 19 +++++++++++++++++++ docs/internals.rst | 19 +++++++++++++++++++ tests/plugins/my_plugin.py | 8 ++++++++ tests/plugins/my_plugin_2.py | 13 +++++++++++++ tests/test_plugins.py | 20 ++++++++++++++++---- 5 files changed, 75 insertions(+), 4 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 3f2876ec..773dee31 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -406,6 +406,25 @@ class Datasette: # pylint: disable=no-member pm.hook.prepare_connection(conn=conn, database=database, datasette=self) + async def permission_allowed( + self, actor, action, resource_type=None, resource_identifier=None, default=False + ): + "Check permissions using the permissions_allowed plugin hook" + for check in pm.hook.permission_allowed( + datasette=self, + actor=actor, + action=action, + resource_type=resource_type, + resource_identifier=resource_identifier, + ): + if callable(check): + check = check() + if asyncio.iscoroutine(check): + check = await check + if check is not None: + return check + return default + async def execute( self, db_name, diff --git a/docs/internals.rst b/docs/internals.rst index e9ba9567..2ba70722 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -111,6 +111,25 @@ This method lets you read plugin configuration values that were set in ``metadat Renders a `Jinja template `__ using Datasette's preconfigured instance of Jinja and returns the resulting string. The template will have access to Datasette's default template functions and any functions that have been made available by other plugins. +await .permission_allowed(actor, action, resource_type=None, resource_identifier=None, default=False) +----------------------------------------------------------------------------------------------------- + +``actor`` - dictionary + The authenticated actor. This is usually ``request.scope.get("actor")``. + +``action`` - string + The name of the action that is being permission checked. + +``resource_type`` - string, optional + The type of resource being checked, e.g. ``"table"``. + +``resource_identifier`` - string, optional + The resource identifier, e.g. the name of the table. + +Check if the given actor has permission to perform the given action on the given resource. This uses plugins that implement the :ref:`plugin_permission_allowed` plugin hook to decide if the action is allowed or not. + +If none of the plugins express an opinion, the return value will be the ``default`` argument. This is deny, but you can pass ``default=True`` to default allow instead. + .. _datasette_get_database: .get_database(name) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 305cb3b7..46893710 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -134,3 +134,11 @@ def actor_from_request(datasette, request): return {"id": "bot"} else: return None + + +@hookimpl +def permission_allowed(actor, action): + if action == "this_is_allowed": + return True + elif action == "this_is_denied": + return False diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index 0a5cbba5..039112f4 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -107,3 +107,16 @@ def actor_from_request(datasette, request): return None return inner + + +@hookimpl +def permission_allowed(datasette, actor, action): + # Testing asyncio version of permission_allowed + async def inner(): + assert 2 == (await datasette.get_database().execute("select 1 + 1")).first()[0] + if action == "this_is_allowed_async": + return True + elif action == "this_is_denied_async": + return False + + return inner diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 3ad26986..e123b7a0 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -523,7 +523,19 @@ def test_actor_from_request_async(app_client): assert {"id": "bot2", "1+1": 2} == app_client.ds._last_request.scope["actor"] -@pytest.mark.xfail -def test_permission_allowed(app_client): - # TODO - assert False +@pytest.mark.asyncio +@pytest.mark.parametrize( + "action,expected", + [ + ("this_is_allowed", True), + ("this_is_denied", False), + ("this_is_allowed_async", True), + ("this_is_denied_async", False), + ("no_match", None), + ], +) +async def test_permission_allowed(app_client, action, expected): + actual = await app_client.ds.permission_allowed( + {"id": "actor"}, action, default=None + ) + assert expected == actual From 1fc6ceefb9eddd29844e7bfe3e06a83df6ce3dc4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 18:51:00 -0700 Subject: [PATCH 0264/2113] Added /-/actor.json - refs #699 Also added JSON highlighting to introspection documentation. --- datasette/app.py | 7 ++++++ datasette/views/special.py | 8 +++++-- docs/introspection.rst | 44 ++++++++++++++++++++++++++++++++------ tests/test_plugins.py | 7 ++++++ 4 files changed, 57 insertions(+), 9 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 773dee31..37b4ed3d 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -586,6 +586,9 @@ class Datasette: ) return d + def _actor(self, request): + return {"actor": request.scope.get("actor", None)} + def table_metadata(self, database, table): "Fetch table-specific metadata." return ( @@ -762,6 +765,10 @@ class Datasette: JsonDataView.as_asgi(self, "databases.json", self._connected_databases), r"/-/databases(?P(\.json)?)$", ) + add_route( + JsonDataView.as_asgi(self, "actor.json", self._actor, needs_request=True), + r"/-/actor(?P(\.json)?)$", + ) add_route( PatternPortfolioView.as_asgi(self), r"/-/patterns$", ) diff --git a/datasette/views/special.py b/datasette/views/special.py index dfe5ea8c..840473a7 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -6,13 +6,17 @@ from .base import BaseView class JsonDataView(BaseView): name = "json_data" - def __init__(self, datasette, filename, data_callback): + def __init__(self, datasette, filename, data_callback, needs_request=False): self.ds = datasette self.filename = filename self.data_callback = data_callback + self.needs_request = needs_request async def get(self, request, as_format): - data = self.data_callback() + if self.needs_request: + data = self.data_callback(request) + else: + data = self.data_callback() if as_format: headers = {} if self.ds.cors: diff --git a/docs/introspection.rst b/docs/introspection.rst index 3cd4a40f..e5d08dbc 100644 --- a/docs/introspection.rst +++ b/docs/introspection.rst @@ -10,7 +10,9 @@ Each of these pages can be viewed in your browser. Add ``.json`` to the URL to g /-/metadata ----------- -Shows the contents of the ``metadata.json`` file that was passed to ``datasette serve``, if any. `Metadata example `_:: +Shows the contents of the ``metadata.json`` file that was passed to ``datasette serve``, if any. `Metadata example `_: + +.. code-block:: json { "license": "CC Attribution 4.0 License", @@ -18,7 +20,9 @@ Shows the contents of the ``metadata.json`` file that was passed to ``datasette "source": "fivethirtyeight/data on GitHub", "source_url": "https://github.com/fivethirtyeight/data", "title": "Five Thirty Eight", - "databases": {...} + "databases": { + + } } .. _JsonDataView_versions: @@ -26,7 +30,9 @@ Shows the contents of the ``metadata.json`` file that was passed to ``datasette /-/versions ----------- -Shows the version of Datasette, Python and SQLite. `Versions example `_:: +Shows the version of Datasette, Python and SQLite. `Versions example `_: + +.. code-block:: json { "datasette": { @@ -63,7 +69,9 @@ Shows the version of Datasette, Python and SQLite. `Versions example `_:: +Shows a list of currently installed plugins and their versions. `Plugins example `_: + +.. code-block:: json [ { @@ -79,7 +87,9 @@ Shows a list of currently installed plugins and their versions. `Plugins example /-/config --------- -Shows the :ref:`config` options for this instance of Datasette. `Config example `_:: +Shows the :ref:`config` options for this instance of Datasette. `Config example `_: + +.. code-block:: json { "default_facet_size": 30, @@ -95,7 +105,9 @@ Shows the :ref:`config` options for this instance of Datasette. `Config example /-/databases ------------ -Shows currently attached databases. `Databases example `_:: +Shows currently attached databases. `Databases example `_: + +.. code-block:: json [ { @@ -113,7 +125,9 @@ Shows currently attached databases. `Databases example `_:: +Shows details of threads and ``asyncio`` tasks. `Threads example `_: + +.. code-block:: json { "num_threads": 2, @@ -136,3 +150,19 @@ Shows details of threads and ``asyncio`` tasks. `Threads example wait_for=()]>>" ] } + +.. _JsonDataView_actor: + +/-/actor +-------- + +Shows the currently authenticated actor. Useful for debugging Datasette authentication plugins. + +.. code-block:: json + + { + "actor": { + "id": 1, + "username": "some-user" + } + } diff --git a/tests/test_plugins.py b/tests/test_plugins.py index e123b7a0..7a3fb49a 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -539,3 +539,10 @@ async def test_permission_allowed(app_client, action, expected): {"id": "actor"}, action, default=None ) assert expected == actual + + +def test_actor_json(app_client): + assert {"actor": None} == app_client.get("/-/actor.json").json + assert {"actor": {"id": "bot2", "1+1": 2}} == app_client.get( + "/-/actor.json/?_bot2=1" + ).json From fa27e44fe09f57dcb87157be97f15b6add7f14ad Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 31 May 2020 15:42:08 -0700 Subject: [PATCH 0265/2113] datasette.sign() and datasette.unsign() methods, refs #785 --- datasette/app.py | 9 +++++++++ datasette/cli.py | 7 +++++++ docs/datasette-serve-help.txt | 3 +++ docs/internals.rst | 28 ++++++++++++++++++++++++++++ setup.py | 1 + tests/test_cli.py | 1 + tests/test_internals_datasette.py | 12 ++++++++++++ 7 files changed, 61 insertions(+) diff --git a/datasette/app.py b/datasette/app.py index 37b4ed3d..5e3d3af5 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -14,6 +14,7 @@ from pathlib import Path import click from markupsafe import Markup +from itsdangerous import URLSafeSerializer import jinja2 from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader, escape from jinja2.environment import Template @@ -163,12 +164,14 @@ class Datasette: static_mounts=None, memory=False, config=None, + secret=None, version_note=None, config_dir=None, ): assert config_dir is None or isinstance( config_dir, Path ), "config_dir= should be a pathlib.Path" + self._secret = secret or os.urandom(32).hex() self.files = tuple(files) + tuple(immutables or []) if config_dir: self.files += tuple([str(p) for p in config_dir.glob("*.db")]) @@ -281,6 +284,12 @@ class Datasette: self._register_renderers() + def sign(self, value, namespace="default"): + return URLSafeSerializer(self._secret, namespace).dumps(value) + + def unsign(self, signed, namespace="default"): + return URLSafeSerializer(self._secret, namespace).loads(signed) + def get_database(self, name=None): if name is None: return next(iter(self.databases.values())) diff --git a/datasette/cli.py b/datasette/cli.py index c59fb6e0..dba3a612 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -299,6 +299,11 @@ def package( help="Set config option using configname:value datasette.readthedocs.io/en/latest/config.html", multiple=True, ) +@click.option( + "--secret", + help="Secret used for signing secure values, such as signed cookies", + envvar="DATASETTE_SECRET", +) @click.option("--version-note", help="Additional note to show on /-/versions") @click.option("--help-config", is_flag=True, help="Show available config options") def serve( @@ -317,6 +322,7 @@ def serve( static, memory, config, + secret, version_note, help_config, return_instance=False, @@ -362,6 +368,7 @@ def serve( static_mounts=static, config=dict(config), memory=memory, + secret=secret, version_note=version_note, ) diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index 5265c294..ab27714a 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -29,6 +29,9 @@ Options: --config CONFIG Set config option using configname:value datasette.readthedocs.io/en/latest/config.html + --secret TEXT Secret used for signing secure values, such as signed + cookies + --version-note TEXT Additional note to show on /-/versions --help-config Show available config options --help Show this message and exit. diff --git a/docs/internals.rst b/docs/internals.rst index 2ba70722..68a35312 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -183,6 +183,34 @@ Use ``is_memory`` if the connection is to an in-memory SQLite database. This removes a database that has been previously added. ``name=`` is the unique name of that database, also used in the URL for it. +.. _datasette_sign: + +.sign(value, namespace="default") +--------------------------------- + +``value`` - any serializable type + The value to be signed. + +``namespace`` - string, optional + An alternative namespace, see the `itsdangerous salt documentation `__. + +Utility method for signing values, such that you can safely pass data to and from an untrusted environment. This is a wrapper around the `itsdangerous `__ library. + +This method returns a signed string, which can be decoded and verified using :ref:`datasette_unsign`. + +.. _datasette_unsign: + +.unsign(value, namespace="default") +----------------------------------- + +``signed`` - any serializable type + The signed string that was created using :ref:`datasette_sign`. + +``namespace`` - string, optional + The alternative namespace, if one was used. + +Returns the original, decoded object that was passed to :ref:`datasette_sign`. If the signature is not valid this raises a ``itsdangerous.BadSignature`` exception. + .. _internals_database: Database class diff --git a/setup.py b/setup.py index d9c70de5..93628266 100644 --- a/setup.py +++ b/setup.py @@ -55,6 +55,7 @@ setup( "janus>=0.4,<0.6", "PyYAML~=5.3", "mergedeep>=1.1.1,<1.4.0", + "itsdangerous~=1.1", ], entry_points=""" [console_scripts] diff --git a/tests/test_cli.py b/tests/test_cli.py index ac5746c6..f52f17b4 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -75,6 +75,7 @@ def test_metadata_yaml(): static=[], memory=False, config=[], + secret=None, version_note=None, help_config=False, return_instance=True, diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index 4993250d..0be0b932 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -1,6 +1,7 @@ """ Tests for the datasette.app.Datasette class """ +from itsdangerous import BadSignature from .fixtures import app_client import pytest @@ -21,3 +22,14 @@ def test_get_database_no_argument(datasette): # Returns the first available database: db = datasette.get_database() assert "fixtures" == db.name + + +@pytest.mark.parametrize("value", ["hello", 123, {"key": "value"}]) +@pytest.mark.parametrize("namespace", [None, "two"]) +def test_sign_unsign(datasette, value, namespace): + extra_args = [namespace] if namespace else [] + signed = datasette.sign(value, *extra_args) + assert value != signed + assert value == datasette.unsign(signed, *extra_args) + with pytest.raises(BadSignature): + datasette.unsign(signed[:-1] + ("!" if signed[-1] != "!" else ":")) From 7690d5ba40fda37ba4ba38ad56fe06c3aed071de Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 31 May 2020 17:18:06 -0700 Subject: [PATCH 0266/2113] Docs for --secret/DATASETTE_SECRET - closes #785 --- docs/config.rst | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/docs/config.rst b/docs/config.rst index d8c2f550..da93e40a 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -288,3 +288,30 @@ For example, if you are sending traffic from ``https://www.example.com/tools/dat You can do that like so:: datasette mydatabase.db --config base_url:/tools/datasette/ + +.. _config_secret: + +Configuring the secret +---------------------- + +Datasette uses a secret string to sign secure values such as cookies. + +If you do not provide a secret, Datasette will create one when it starts up. This secret will reset every time the Datasette server restarts though, so things like authentication cookies will not stay valid between restarts. + +You can pass a secret to Datasette in two ways: with the ``--secret`` command-line option or by setting a ``DATASETTE_SECRET`` environment variable. + +:: + + $ datasette mydb.db --secret=SECRET_VALUE_HERE + +Or:: + + $ export DATASETTE_SECRET=SECRET_VALUE_HERE + $ datasette mydb.db + +One way to generate a secure random secret is to use Python like this:: + + $ python3 -c 'import os; print(os.urandom(32).hex())' + cdb19e94283a20f9d42cca50c5a4871c0aa07392db308755d60a1a5b9bb0fa52 + +Plugin authors make use of this signing mechanism in their plugins using :ref:`datasette_sign` and :ref:`datasette_unsign`. From 9f3d4aba31baf1e2de1910a40bc9663ef53b94e9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 31 May 2020 18:03:17 -0700 Subject: [PATCH 0267/2113] --root option and /-/auth-token view, refs #784 --- datasette/app.py | 6 +++++- datasette/cli.py | 8 ++++++++ datasette/plugins.py | 1 + datasette/views/special.py | 32 +++++++++++++++++++++++++++++++- docs/datasette-serve-help.txt | 1 + tests/fixtures.py | 19 +++++++++++++++---- tests/test_auth.py | 25 +++++++++++++++++++++++++ tests/test_cli.py | 1 + tests/test_docs.py | 4 ++-- 9 files changed, 89 insertions(+), 8 deletions(-) create mode 100644 tests/test_auth.py diff --git a/datasette/app.py b/datasette/app.py index 5e3d3af5..6b39ce12 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -24,7 +24,7 @@ import uvicorn from .views.base import DatasetteError, ureg, AsgiRouter from .views.database import DatabaseDownload, DatabaseView from .views.index import IndexView -from .views.special import JsonDataView, PatternPortfolioView +from .views.special import JsonDataView, PatternPortfolioView, AuthTokenView from .views.table import RowView, TableView from .renderer import json_renderer from .database import Database, QueryInterrupted @@ -283,6 +283,7 @@ class Datasette: pm.hook.prepare_jinja2_environment(env=self.jinja_env) self._register_renderers() + self._root_token = os.urandom(32).hex() def sign(self, value, namespace="default"): return URLSafeSerializer(self._secret, namespace).dumps(value) @@ -778,6 +779,9 @@ class Datasette: JsonDataView.as_asgi(self, "actor.json", self._actor, needs_request=True), r"/-/actor(?P(\.json)?)$", ) + add_route( + AuthTokenView.as_asgi(self), r"/-/auth-token$", + ) add_route( PatternPortfolioView.as_asgi(self), r"/-/patterns$", ) diff --git a/datasette/cli.py b/datasette/cli.py index dba3a612..23f9e36b 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -304,6 +304,11 @@ def package( help="Secret used for signing secure values, such as signed cookies", envvar="DATASETTE_SECRET", ) +@click.option( + "--root", + help="Output URL that sets a cookie authenticating the root user", + is_flag=True, +) @click.option("--version-note", help="Additional note to show on /-/versions") @click.option("--help-config", is_flag=True, help="Show available config options") def serve( @@ -323,6 +328,7 @@ def serve( memory, config, secret, + root, version_note, help_config, return_instance=False, @@ -387,6 +393,8 @@ def serve( asyncio.get_event_loop().run_until_complete(check_databases(ds)) # Start the server + if root: + print("http://{}:{}/-/auth-token?token={}".format(host, port, ds._root_token)) uvicorn.run(ds.app(), host=host, port=port, log_level="info") diff --git a/datasette/plugins.py b/datasette/plugins.py index 6c9677d0..487fce4d 100644 --- a/datasette/plugins.py +++ b/datasette/plugins.py @@ -9,6 +9,7 @@ DEFAULT_PLUGINS = ( "datasette.publish.cloudrun", "datasette.facets", "datasette.sql_functions", + "datasette.actor_auth_cookie", ) pm = pluggy.PluginManager("datasette") diff --git a/datasette/views/special.py b/datasette/views/special.py index 840473a7..910193e8 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -1,6 +1,8 @@ import json from datasette.utils.asgi import Response from .base import BaseView +from http.cookies import SimpleCookie +import secrets class JsonDataView(BaseView): @@ -45,4 +47,32 @@ class PatternPortfolioView(BaseView): self.ds = datasette async def get(self, request): - return await self.render(["patterns.html"], request=request,) + return await self.render(["patterns.html"], request=request) + + +class AuthTokenView(BaseView): + name = "auth_token" + + def __init__(self, datasette): + self.ds = datasette + + async def get(self, request): + token = request.args.get("token") or "" + if not self.ds._root_token: + return Response("Root token has already been used", status=403) + if secrets.compare_digest(token, self.ds._root_token): + self.ds._root_token = None + cookie = SimpleCookie() + cookie["ds_actor"] = self.ds.sign({"id": "root"}, "actor") + cookie["ds_actor"]["path"] = "/" + response = Response( + body="", + status=302, + headers={ + "Location": "/", + "set-cookie": cookie.output(header="").lstrip(), + }, + ) + return response + else: + return Response("Invalid token", status=403) diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index ab27714a..183ecc14 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -32,6 +32,7 @@ Options: --secret TEXT Secret used for signing secure values, such as signed cookies + --root Output URL that sets a cookie authenticating the root user --version-note TEXT Additional note to show on /-/versions --help-config Show available config options --help Show this message and exit. diff --git a/tests/fixtures.py b/tests/fixtures.py index 9479abf6..b2cfd3d6 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -2,6 +2,7 @@ from datasette.app import Datasette from datasette.utils import sqlite3 from asgiref.testing import ApplicationCommunicator from asgiref.sync import async_to_sync +from http.cookies import SimpleCookie import itertools import json import os @@ -44,10 +45,14 @@ class TestClient: self.asgi_app = asgi_app @async_to_sync - async def get(self, path, allow_redirects=True, redirect_count=0, method="GET"): - return await self._get(path, allow_redirects, redirect_count, method) + async def get( + self, path, allow_redirects=True, redirect_count=0, method="GET", cookies=None + ): + return await self._get(path, allow_redirects, redirect_count, method, cookies) - async def _get(self, path, allow_redirects=True, redirect_count=0, method="GET"): + async def _get( + self, path, allow_redirects=True, redirect_count=0, method="GET", cookies=None + ): query_string = b"" if "?" in path: path, _, query_string = path.partition("?") @@ -56,6 +61,12 @@ class TestClient: raw_path = path.encode("latin-1") else: raw_path = quote(path, safe="/:,").encode("latin-1") + headers = [[b"host", b"localhost"]] + if cookies: + sc = SimpleCookie() + for key, value in cookies.items(): + sc[key] = value + headers.append([b"cookie", sc.output(header="").encode("utf-8")]) scope = { "type": "http", "http_version": "1.0", @@ -63,7 +74,7 @@ class TestClient: "path": unquote(path), "raw_path": raw_path, "query_string": query_string, - "headers": [[b"host", b"localhost"]], + "headers": headers, } instance = ApplicationCommunicator(self.asgi_app, scope) await instance.send_input({"type": "http.request"}) diff --git a/tests/test_auth.py b/tests/test_auth.py new file mode 100644 index 00000000..6b69ab93 --- /dev/null +++ b/tests/test_auth.py @@ -0,0 +1,25 @@ +from .fixtures import app_client + + +def test_auth_token(app_client): + "The /-/auth-token endpoint sets the correct cookie" + assert app_client.ds._root_token is not None + path = "/-/auth-token?token={}".format(app_client.ds._root_token) + response = app_client.get(path, allow_redirects=False,) + assert 302 == response.status + assert "/" == response.headers["Location"] + set_cookie = response.headers["set-cookie"] + assert set_cookie.endswith("; Path=/") + assert set_cookie.startswith("ds_actor=") + cookie_value = set_cookie.split("ds_actor=")[1].split("; Path=/")[0] + assert {"id": "root"} == app_client.ds.unsign(cookie_value, "actor") + # Check that a second with same token fails + assert app_client.ds._root_token is None + assert 403 == app_client.get(path, allow_redirects=False,).status + + +def test_actor_cookie(app_client): + "A valid actor cookie sets request.scope['actor']" + cookie = app_client.ds.sign({"id": "test"}, "actor") + response = app_client.get("/", cookies={"ds_actor": cookie}) + assert {"id": "test"} == app_client.ds._last_request.scope["actor"] diff --git a/tests/test_cli.py b/tests/test_cli.py index f52f17b4..529661ce 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -76,6 +76,7 @@ def test_metadata_yaml(): memory=False, config=[], secret=None, + root=False, version_note=None, help_config=False, return_instance=True, diff --git a/tests/test_docs.py b/tests/test_docs.py index 77c2a611..09c00ddf 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -65,8 +65,8 @@ def documented_views(): first_word = label.split("_")[0] if first_word.endswith("View"): view_labels.add(first_word) - # We deliberately don't document this one: - view_labels.add("PatternPortfolioView") + # We deliberately don't document these: + view_labels.update(("PatternPortfolioView", "AuthTokenView")) return view_labels From 57cf5139c552cb7feab9947daa949ca434cc0a66 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 31 May 2020 18:06:16 -0700 Subject: [PATCH 0268/2113] Default actor_from_request hook supporting ds_actor signed cookie Refs #784, refs #699 --- datasette/actor_auth_cookie.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 datasette/actor_auth_cookie.py diff --git a/datasette/actor_auth_cookie.py b/datasette/actor_auth_cookie.py new file mode 100644 index 00000000..41f33fe9 --- /dev/null +++ b/datasette/actor_auth_cookie.py @@ -0,0 +1,18 @@ +from datasette import hookimpl +from itsdangerous import BadSignature +from http.cookies import SimpleCookie + + +@hookimpl +def actor_from_request(datasette, request): + cookies = SimpleCookie() + cookies.load( + dict(request.scope.get("headers") or []).get(b"cookie", b"").decode("utf-8") + ) + if "ds_actor" not in cookies: + return None + ds_actor = cookies["ds_actor"].value + try: + return datasette.unsign(ds_actor, "actor") + except BadSignature: + return None From dfdbdf378aba9afb66666f66b78df2f2069d2595 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 31 May 2020 22:00:36 -0700 Subject: [PATCH 0269/2113] Added /-/permissions debug tool, closes #788 Also started the authentication.rst docs page, refs #786. Part of authentication work, refs #699. --- datasette/app.py | 32 +++++++++++-- datasette/default_permissions.py | 7 +++ datasette/plugins.py | 1 + datasette/templates/permissions_debug.html | 55 ++++++++++++++++++++++ datasette/views/special.py | 18 +++++++ docs/authentication.rst | 18 +++++++ docs/index.rst | 1 + tests/test_auth.py | 23 +++++++++ 8 files changed, 152 insertions(+), 3 deletions(-) create mode 100644 datasette/default_permissions.py create mode 100644 datasette/templates/permissions_debug.html create mode 100644 docs/authentication.rst diff --git a/datasette/app.py b/datasette/app.py index 6b39ce12..b8a5e23d 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1,5 +1,6 @@ import asyncio import collections +import datetime import hashlib import itertools import json @@ -24,7 +25,12 @@ import uvicorn from .views.base import DatasetteError, ureg, AsgiRouter from .views.database import DatabaseDownload, DatabaseView from .views.index import IndexView -from .views.special import JsonDataView, PatternPortfolioView, AuthTokenView +from .views.special import ( + JsonDataView, + PatternPortfolioView, + AuthTokenView, + PermissionsDebugView, +) from .views.table import RowView, TableView from .renderer import json_renderer from .database import Database, QueryInterrupted @@ -283,6 +289,7 @@ class Datasette: pm.hook.prepare_jinja2_environment(env=self.jinja_env) self._register_renderers() + self.permission_checks = collections.deque(maxlen=30) self._root_token = os.urandom(32).hex() def sign(self, value, namespace="default"): @@ -420,6 +427,7 @@ class Datasette: self, actor, action, resource_type=None, resource_identifier=None, default=False ): "Check permissions using the permissions_allowed plugin hook" + result = None for check in pm.hook.permission_allowed( datasette=self, actor=actor, @@ -432,8 +440,23 @@ class Datasette: if asyncio.iscoroutine(check): check = await check if check is not None: - return check - return default + result = check + used_default = False + if result is None: + result = default + used_default = True + self.permission_checks.append( + { + "when": datetime.datetime.utcnow().isoformat(), + "actor": actor, + "action": action, + "resource_type": resource_type, + "resource_identifier": resource_identifier, + "used_default": used_default, + "result": result, + } + ) + return result async def execute( self, @@ -782,6 +805,9 @@ class Datasette: add_route( AuthTokenView.as_asgi(self), r"/-/auth-token$", ) + add_route( + PermissionsDebugView.as_asgi(self), r"/-/permissions$", + ) add_route( PatternPortfolioView.as_asgi(self), r"/-/patterns$", ) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py new file mode 100644 index 00000000..0b0d17f9 --- /dev/null +++ b/datasette/default_permissions.py @@ -0,0 +1,7 @@ +from datasette import hookimpl + + +@hookimpl +def permission_allowed(actor, action, resource_type, resource_identifier): + if actor and actor.get("id") == "root" and action == "permissions-debug": + return True diff --git a/datasette/plugins.py b/datasette/plugins.py index 487fce4d..26d4fd63 100644 --- a/datasette/plugins.py +++ b/datasette/plugins.py @@ -10,6 +10,7 @@ DEFAULT_PLUGINS = ( "datasette.facets", "datasette.sql_functions", "datasette.actor_auth_cookie", + "datasette.default_permissions", ) pm = pluggy.PluginManager("datasette") diff --git a/datasette/templates/permissions_debug.html b/datasette/templates/permissions_debug.html new file mode 100644 index 00000000..fb098c5c --- /dev/null +++ b/datasette/templates/permissions_debug.html @@ -0,0 +1,55 @@ +{% extends "base.html" %} + +{% block title %}Debug permissions{% endblock %} + +{% block extra_head %} + +{% endblock %} + +{% block nav %} +

    + home +

    + {{ super() }} +{% endblock %} + +{% block content %} + +

    Recent permissions checks

    + +{% for check in permission_checks %} +
    +

    + {{ check.action }} + checked at + {{ check.when }} + {% if check.result %} + + {% else %} + + {% endif %} + {% if check.used_default %} + (used default) + {% endif %} +

    +

    Actor: {{ check.actor|tojson }}

    + {% if check.resource_type %} +

    Resource: {{ check.resource_type }}: {{ check.resource_identifier }}

    + {% endif %} +
    +{% endfor %} + +{% endblock %} diff --git a/datasette/views/special.py b/datasette/views/special.py index 910193e8..b75355fb 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -76,3 +76,21 @@ class AuthTokenView(BaseView): return response else: return Response("Invalid token", status=403) + + +class PermissionsDebugView(BaseView): + name = "permissions_debug" + + def __init__(self, datasette): + self.ds = datasette + + async def get(self, request): + if not await self.ds.permission_allowed( + request.scope.get("actor"), "permissions-debug" + ): + return Response("Permission denied", status=403) + return await self.render( + ["permissions_debug.html"], + request, + {"permission_checks": reversed(self.ds.permission_checks)}, + ) diff --git a/docs/authentication.rst b/docs/authentication.rst new file mode 100644 index 00000000..0a9a4c0d --- /dev/null +++ b/docs/authentication.rst @@ -0,0 +1,18 @@ +.. _authentication: + +================================ + Authentication and permissions +================================ + +Datasette's authentication system is currently under construction. Follow `issue 699 `__ to track the development of this feature. + +.. _PermissionsDebugView: + +Permissions Debug +================= + +The debug tool at ``/-/permissions`` is only available to the root user. + +It shows the thirty most recent permission checks that have been carried out by the Datasette instance. + +This is designed to help administrators and plugin authors understand exactly how permission checks are being carried out, in order to effectively configure Datasette's permission system. diff --git a/docs/index.rst b/docs/index.rst index 2390e263..03988c8e 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -40,6 +40,7 @@ Contents publish json_api sql_queries + authentication performance csv_export facets diff --git a/tests/test_auth.py b/tests/test_auth.py index 6b69ab93..ddf328af 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,4 +1,5 @@ from .fixtures import app_client +from bs4 import BeautifulSoup as Soup def test_auth_token(app_client): @@ -23,3 +24,25 @@ def test_actor_cookie(app_client): cookie = app_client.ds.sign({"id": "test"}, "actor") response = app_client.get("/", cookies={"ds_actor": cookie}) assert {"id": "test"} == app_client.ds._last_request.scope["actor"] + + +def test_permissions_debug(app_client): + assert 403 == app_client.get("/-/permissions").status + # With the cookie it should work + cookie = app_client.ds.sign({"id": "root"}, "actor") + response = app_client.get("/-/permissions", cookies={"ds_actor": cookie}) + # Should show one failure and one success + soup = Soup(response.body, "html.parser") + check_divs = soup.findAll("div", {"class": "check"}) + checks = [ + { + "action": div.select_one(".check-action").text, + "result": bool(div.select(".check-result-true")), + "used_default": bool(div.select(".check-used-default")), + } + for div in check_divs + ] + assert [ + {"action": "permissions-debug", "result": True, "used_default": False}, + {"action": "permissions-debug", "result": False, "used_default": True}, + ] == checks From b4cd8797b8592a8bf060a76eb7227f3f1ba61d32 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jun 2020 10:43:50 -0700 Subject: [PATCH 0270/2113] permission_checks is now _permission_checks --- datasette/app.py | 4 ++-- datasette/views/special.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index b8a5e23d..e3ad5fc7 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -289,7 +289,7 @@ class Datasette: pm.hook.prepare_jinja2_environment(env=self.jinja_env) self._register_renderers() - self.permission_checks = collections.deque(maxlen=30) + self._permission_checks = collections.deque(maxlen=30) self._root_token = os.urandom(32).hex() def sign(self, value, namespace="default"): @@ -445,7 +445,7 @@ class Datasette: if result is None: result = default used_default = True - self.permission_checks.append( + self._permission_checks.append( { "when": datetime.datetime.utcnow().isoformat(), "actor": actor, diff --git a/datasette/views/special.py b/datasette/views/special.py index b75355fb..811ed4cb 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -92,5 +92,5 @@ class PermissionsDebugView(BaseView): return await self.render( ["permissions_debug.html"], request, - {"permission_checks": reversed(self.ds.permission_checks)}, + {"permission_checks": reversed(self.ds._permission_checks)}, ) From 1d0bea157ac7074f23229af247565a78fa71c03f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jun 2020 14:06:53 -0700 Subject: [PATCH 0271/2113] New request.cookies property --- datasette/actor_auth_cookie.py | 9 ++------- datasette/utils/asgi.py | 7 +++++++ docs/internals.rst | 3 +++ 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/datasette/actor_auth_cookie.py b/datasette/actor_auth_cookie.py index 41f33fe9..f3a0f306 100644 --- a/datasette/actor_auth_cookie.py +++ b/datasette/actor_auth_cookie.py @@ -5,14 +5,9 @@ from http.cookies import SimpleCookie @hookimpl def actor_from_request(datasette, request): - cookies = SimpleCookie() - cookies.load( - dict(request.scope.get("headers") or []).get(b"cookie", b"").decode("utf-8") - ) - if "ds_actor" not in cookies: + if "ds_actor" not in request.cookies: return None - ds_actor = cookies["ds_actor"].value try: - return datasette.unsign(ds_actor, "actor") + return datasette.unsign(request.cookies["ds_actor"], "actor") except BadSignature: return None diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 24398b77..960532ca 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -4,6 +4,7 @@ from mimetypes import guess_type from urllib.parse import parse_qs, urlunparse, parse_qsl from pathlib import Path from html import escape +from http.cookies import SimpleCookie import re import aiofiles @@ -44,6 +45,12 @@ class Request: def host(self): return self.headers.get("host") or "localhost" + @property + def cookies(self): + cookies = SimpleCookie() + cookies.load(self.headers.get("cookie", "")) + return {key: value.value for key, value in cookies.items()} + @property def path(self): if self.scope.get("raw_path") is not None: diff --git a/docs/internals.rst b/docs/internals.rst index 68a35312..b3ad623f 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -27,6 +27,9 @@ The request object is passed to various plugin hooks. It represents an incoming ``.headers`` - dictionary (str -> str) A dictionary of incoming HTTP request headers. +``.cookies`` - dictionary (str -> str) + A dictionary of incoming cookies + ``.host`` - string The host header from the incoming request, e.g. ``latest.datasette.io`` or ``localhost``. From 4fa7cf68536628344356d3ef8c92c25c249067a0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jun 2020 14:08:12 -0700 Subject: [PATCH 0272/2113] Flash messages mechanism, closes #790 --- datasette/app.py | 42 +++++++++++++++++++++++ datasette/static/app.css | 16 +++++++++ datasette/templates/base.html | 8 +++++ datasette/templates/messages_debug.html | 26 ++++++++++++++ datasette/utils/asgi.py | 4 +-- datasette/views/base.py | 16 +++++++++ datasette/views/special.py | 24 +++++++++++++ docs/internals.rst | 18 ++++++++++ docs/introspection.rst | 8 +++++ tests/fixtures.py | 6 ++++ tests/plugins/messages_output_renderer.py | 21 ++++++++++++ tests/test_api.py | 1 + tests/test_auth.py | 6 +--- tests/test_messages.py | 28 +++++++++++++++ 14 files changed, 217 insertions(+), 7 deletions(-) create mode 100644 datasette/templates/messages_debug.html create mode 100644 tests/plugins/messages_output_renderer.py create mode 100644 tests/test_messages.py diff --git a/datasette/app.py b/datasette/app.py index e3ad5fc7..41c73900 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -2,6 +2,7 @@ import asyncio import collections import datetime import hashlib +from http.cookies import SimpleCookie import itertools import json import os @@ -30,6 +31,7 @@ from .views.special import ( PatternPortfolioView, AuthTokenView, PermissionsDebugView, + MessagesDebugView, ) from .views.table import RowView, TableView from .renderer import json_renderer @@ -156,6 +158,11 @@ async def favicon(scope, receive, send): class Datasette: + # Message constants: + INFO = 1 + WARNING = 2 + ERROR = 3 + def __init__( self, files, @@ -423,6 +430,38 @@ class Datasette: # pylint: disable=no-member pm.hook.prepare_connection(conn=conn, database=database, datasette=self) + def add_message(self, request, message, type=INFO): + if not hasattr(request, "_messages"): + request._messages = [] + request._messages_should_clear = False + request._messages.append((message, type)) + + def _write_messages_to_response(self, request, response): + if getattr(request, "_messages", None): + # Set those messages + cookie = SimpleCookie() + cookie["ds_messages"] = self.sign(request._messages, "messages") + cookie["ds_messages"]["path"] = "/" + # TODO: Co-exist with existing set-cookie headers + assert "set-cookie" not in response.headers + response.headers["set-cookie"] = cookie.output(header="").lstrip() + elif getattr(request, "_messages_should_clear", False): + cookie = SimpleCookie() + cookie["ds_messages"] = "" + cookie["ds_messages"]["path"] = "/" + # TODO: Co-exist with existing set-cookie headers + assert "set-cookie" not in response.headers + response.headers["set-cookie"] = cookie.output(header="").lstrip() + + def _show_messages(self, request): + if getattr(request, "_messages", None): + request._messages_should_clear = True + messages = request._messages + request._messages = [] + return messages + else: + return [] + async def permission_allowed( self, actor, action, resource_type=None, resource_identifier=None, default=False ): @@ -808,6 +847,9 @@ class Datasette: add_route( PermissionsDebugView.as_asgi(self), r"/-/permissions$", ) + add_route( + MessagesDebugView.as_asgi(self), r"/-/messages$", + ) add_route( PatternPortfolioView.as_asgi(self), r"/-/patterns$", ) diff --git a/datasette/static/app.css b/datasette/static/app.css index 92f268ae..774a2235 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -351,3 +351,19 @@ p.zero-results { .type-float, .type-int { color: #666; } + +.message-info { + padding: 1em; + border: 1px solid green; + background-color: #c7fbc7; +} +.message-warning { + padding: 1em; + border: 1px solid #ae7100; + background-color: #fbdda5; +} +.message-error { + padding: 1em; + border: 1px solid red; + background-color: pink; +} diff --git a/datasette/templates/base.html b/datasette/templates/base.html index d9fd945b..9b871d03 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -17,6 +17,14 @@
    +{% block messages %} +{% if show_messages %} + {% for message, message_type in show_messages() %} +

    {{ message }}

    + {% endfor %} +{% endif %} +{% endblock %} + {% block content %} {% endblock %}
    diff --git a/datasette/templates/messages_debug.html b/datasette/templates/messages_debug.html new file mode 100644 index 00000000..b2e1bc7c --- /dev/null +++ b/datasette/templates/messages_debug.html @@ -0,0 +1,26 @@ +{% extends "base.html" %} + +{% block title %}Debug messages{% endblock %} + +{% block content %} + +

    Debug messages

    + +

    Set a message:

    + +
    +
    + +
    + +
    + +
    +
    + +{% endblock %} diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 960532ca..5682da48 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -180,9 +180,9 @@ class AsgiLifespan: class AsgiView: - def dispatch_request(self, request, *args, **kwargs): + async def dispatch_request(self, request, *args, **kwargs): handler = getattr(self, request.method.lower(), None) - return handler(request, *args, **kwargs) + return await handler(request, *args, **kwargs) @classmethod def as_asgi(cls, *class_args, **class_kwargs): diff --git a/datasette/views/base.py b/datasette/views/base.py index 06b78d5f..2402406a 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -1,6 +1,7 @@ import asyncio import csv import itertools +from itsdangerous import BadSignature import json import re import time @@ -73,6 +74,20 @@ class BaseView(AsgiView): def database_color(self, database): return "ff0000" + async def dispatch_request(self, request, *args, **kwargs): + # Populate request_messages if ds_messages cookie is present + if self.ds: + try: + request._messages = self.ds.unsign( + request.cookies.get("ds_messages", ""), "messages" + ) + except BadSignature: + pass + response = await super().dispatch_request(request, *args, **kwargs) + if self.ds: + self.ds._write_messages_to_response(request, response) + return response + async def render(self, templates, request, context=None): context = context or {} template = self.ds.jinja_env.select_template(templates) @@ -81,6 +96,7 @@ class BaseView(AsgiView): **{ "database_url": self.database_url, "database_color": self.database_color, + "show_messages": lambda: self.ds._show_messages(request), "select_templates": [ "{}{}".format( "*" if template_name == template.name else "", template_name diff --git a/datasette/views/special.py b/datasette/views/special.py index 811ed4cb..37c04697 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -94,3 +94,27 @@ class PermissionsDebugView(BaseView): request, {"permission_checks": reversed(self.ds._permission_checks)}, ) + + +class MessagesDebugView(BaseView): + name = "messages_debug" + + def __init__(self, datasette): + self.ds = datasette + + async def get(self, request): + return await self.render(["messages_debug.html"], request) + + async def post(self, request): + post = await request.post_vars() + message = post.get("message", "") + message_type = post.get("message_type") or "INFO" + assert message_type in ("INFO", "WARNING", "ERROR", "all") + datasette = self.ds + if message_type == "all": + datasette.add_message(request, message, datasette.INFO) + datasette.add_message(request, message, datasette.WARNING) + datasette.add_message(request, message, datasette.ERROR) + else: + datasette.add_message(request, message, getattr(datasette, message_type)) + return Response.redirect("/") diff --git a/docs/internals.rst b/docs/internals.rst index b3ad623f..4d51d614 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -214,6 +214,24 @@ This method returns a signed string, which can be decoded and verified using :re Returns the original, decoded object that was passed to :ref:`datasette_sign`. If the signature is not valid this raises a ``itsdangerous.BadSignature`` exception. +.. _datasette_add_message: + +.add_message(request, message, message_type=datasette.INFO) +----------------------------------------------------------- + +``request`` - Request + The current Request object + +``message`` - string + The message string + +``message_type`` - constant, optional + The message type - ``datasette.INFO``, ``datasette.WARNING`` or ``datasette.ERROR`` + +Datasette's flash messaging mechanism allows you to add a message that will be displayed to the user on the next page that they visit. Messages are persisted in a ``ds_messages`` cookie. This method adds a message to that cookie. + +You can try out these messages (including the different visual styling of the three message types) using the ``/-/messages`` debugging tool. + .. _internals_database: Database class diff --git a/docs/introspection.rst b/docs/introspection.rst index e5d08dbc..084ee144 100644 --- a/docs/introspection.rst +++ b/docs/introspection.rst @@ -166,3 +166,11 @@ Shows the currently authenticated actor. Useful for debugging Datasette authenti "username": "some-user" } } + + +.. _MessagesDebugView: + +/-/messages +----------- + +The debug tool at ``/-/messages`` can be used to set flash messages to try out that feature. See :ref:`datasette_add_message` for details of this feature. diff --git a/tests/fixtures.py b/tests/fixtures.py index b2cfd3d6..daff0168 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -29,6 +29,12 @@ class TestResponse: self.headers = headers self.body = body + @property + def cookies(self): + cookie = SimpleCookie() + cookie.load(self.headers.get("set-cookie") or "") + return {key: value.value for key, value in cookie.items()} + @property def json(self): return json.loads(self.text) diff --git a/tests/plugins/messages_output_renderer.py b/tests/plugins/messages_output_renderer.py new file mode 100644 index 00000000..6b52f801 --- /dev/null +++ b/tests/plugins/messages_output_renderer.py @@ -0,0 +1,21 @@ +from datasette import hookimpl + + +def render_message_debug(datasette, request): + if request.args.get("add_msg"): + msg_type = request.args.get("type", "INFO") + datasette.add_message( + request, request.args["add_msg"], getattr(datasette, msg_type) + ) + return {"body": "Hello from message debug"} + + +@hookimpl +def register_output_renderer(datasette): + return [ + { + "extension": "message", + "render": render_message_debug, + "can_render": lambda: False, + } + ] diff --git a/tests/test_api.py b/tests/test_api.py index d7e7c03f..a5c6f6a2 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1262,6 +1262,7 @@ def test_plugins_json(app_client): expected = [ {"name": name, "static": False, "templates": False, "version": None} for name in ( + "messages_output_renderer.py", "my_plugin.py", "my_plugin_2.py", "register_output_renderer.py", diff --git a/tests/test_auth.py b/tests/test_auth.py index ddf328af..ac8d7abe 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -9,11 +9,7 @@ def test_auth_token(app_client): response = app_client.get(path, allow_redirects=False,) assert 302 == response.status assert "/" == response.headers["Location"] - set_cookie = response.headers["set-cookie"] - assert set_cookie.endswith("; Path=/") - assert set_cookie.startswith("ds_actor=") - cookie_value = set_cookie.split("ds_actor=")[1].split("; Path=/")[0] - assert {"id": "root"} == app_client.ds.unsign(cookie_value, "actor") + assert {"id": "root"} == app_client.ds.unsign(response.cookies["ds_actor"], "actor") # Check that a second with same token fails assert app_client.ds._root_token is None assert 403 == app_client.get(path, allow_redirects=False,).status diff --git a/tests/test_messages.py b/tests/test_messages.py new file mode 100644 index 00000000..d17e015c --- /dev/null +++ b/tests/test_messages.py @@ -0,0 +1,28 @@ +from .fixtures import app_client +import pytest + + +@pytest.mark.parametrize( + "qs,expected", + [ + ("add_msg=added-message", [["added-message", 1]]), + ("add_msg=added-warning&type=WARNING", [["added-warning", 2]]), + ("add_msg=added-error&type=ERROR", [["added-error", 3]]), + ], +) +def test_add_message_sets_cookie(app_client, qs, expected): + response = app_client.get("/fixtures.message?{}".format(qs)) + signed = response.cookies["ds_messages"] + decoded = app_client.ds.unsign(signed, "messages") + assert expected == decoded + + +def test_messages_are_displayed_and_cleared(app_client): + # First set the message cookie + set_msg_response = app_client.get("/fixtures.message?add_msg=xmessagex") + # Now access a page that displays messages + response = app_client.get("/", cookies=set_msg_response.cookies) + # Messages should be in that HTML + assert "xmessagex" in response.text + # Cookie should have been set that clears messages + assert "" == response.cookies["ds_messages"] From 5278c04682929f0b155102827f9150c7b2112215 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jun 2020 14:29:12 -0700 Subject: [PATCH 0273/2113] More consistent use of response.text/response.json in tests, closes #792 --- tests/test_api.py | 10 ++-------- tests/test_config_dir.py | 13 ++++++------- tests/test_csv.py | 2 +- tests/test_html.py | 4 +--- tests/test_plugins.py | 10 ++++------ 5 files changed, 14 insertions(+), 25 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index a5c6f6a2..7ed4cced 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1762,16 +1762,10 @@ def test_common_prefix_database_names(app_client_conflicting_database_names): # https://github.com/simonw/datasette/issues/597 assert ["fixtures", "foo", "foo-bar"] == [ d["name"] - for d in json.loads( - app_client_conflicting_database_names.get("/-/databases.json").body.decode( - "utf8" - ) - ) + for d in app_client_conflicting_database_names.get("/-/databases.json").json ] for db_name, path in (("foo", "/foo.json"), ("foo-bar", "/foo-bar.json")): - data = json.loads( - app_client_conflicting_database_names.get(path).body.decode("utf8") - ) + data = app_client_conflicting_database_names.get(path).json assert db_name == data["database"] diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index 50e67f80..490b1f1d 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -84,21 +84,20 @@ def config_dir_client(tmp_path_factory): def test_metadata(config_dir_client): response = config_dir_client.get("/-/metadata.json") assert 200 == response.status - assert METADATA == json.loads(response.text) + assert METADATA == response.json def test_config(config_dir_client): response = config_dir_client.get("/-/config.json") assert 200 == response.status - config = json.loads(response.text) - assert 60 == config["default_cache_ttl"] - assert not config["allow_sql"] + assert 60 == response.json["default_cache_ttl"] + assert not response.json["allow_sql"] def test_plugins(config_dir_client): response = config_dir_client.get("/-/plugins.json") assert 200 == response.status - assert "hooray.py" in {p["name"] for p in json.loads(response.text)} + assert "hooray.py" in {p["name"] for p in response.json} def test_templates_and_plugin(config_dir_client): @@ -123,7 +122,7 @@ def test_static_directory_browsing_not_allowed(config_dir_client): def test_databases(config_dir_client): response = config_dir_client.get("/-/databases.json") assert 200 == response.status - databases = json.loads(response.text) + databases = response.json assert 2 == len(databases) databases.sort(key=lambda d: d["name"]) assert "demo" == databases[0]["name"] @@ -141,4 +140,4 @@ def test_metadata_yaml(tmp_path_factory, filename): client.ds = ds response = client.get("/-/metadata.json") assert 200 == response.status - assert {"title": "Title from metadata"} == json.loads(response.text) + assert {"title": "Title from metadata"} == response.json diff --git a/tests/test_csv.py b/tests/test_csv.py index 1030c2bb..42022726 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -101,7 +101,7 @@ def test_csv_with_non_ascii_characters(app_client): ) assert response.status == 200 assert "text/plain; charset=utf-8" == response.headers["content-type"] - assert "text,number\r\n𝐜𝐢𝐭𝐢𝐞𝐬,1\r\nbob,2\r\n" == response.body.decode("utf8") + assert "text,number\r\n𝐜𝐢𝐭𝐢𝐞𝐬,1\r\nbob,2\r\n" == response.text def test_max_csv_mb(app_client_csv_max_mb_one): diff --git a/tests/test_html.py b/tests/test_html.py index e602bf0e..2d2a141a 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -606,9 +606,7 @@ def test_row_html_simple_primary_key(app_client): def test_table_not_exists(app_client): - assert "Table not found: blah" in app_client.get("/fixtures/blah").body.decode( - "utf8" - ) + assert "Table not found: blah" in app_client.get("/fixtures/blah").text def test_table_html_no_primary_key(app_client): diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 7a3fb49a..f69e7fa7 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -218,7 +218,7 @@ def test_plugin_config_file(app_client): ) def test_plugins_extra_body_script(app_client, path, expected_extra_body_script): r = re.compile(r"") - json_data = r.search(app_client.get(path).body.decode("utf8")).group(1) + json_data = r.search(app_client.get(path).text).group(1) actual_data = json.loads(json_data) assert expected_extra_body_script == actual_data @@ -331,7 +331,7 @@ def view_names_client(tmp_path_factory): def test_view_names(view_names_client, path, view_name): response = view_names_client.get(path) assert response.status == 200 - assert "view_name:{}".format(view_name) == response.body.decode("utf8") + assert "view_name:{}".format(view_name) == response.text def test_register_output_renderer_no_parameters(app_client): @@ -345,8 +345,7 @@ def test_register_output_renderer_all_parameters(app_client): assert 200 == response.status # Lots of 'at 0x103a4a690' in here - replace those so we can do # an easy comparison - body = response.body.decode("utf-8") - body = at_memory_re.sub(" at 0xXXX", body) + body = at_memory_re.sub(" at 0xXXX", response.text) assert { "1+1": 2, "datasette": "", @@ -468,7 +467,6 @@ def test_register_facet_classes(app_client): response = app_client.get( "/fixtures/compound_three_primary_keys.json?_dummy_facet=1" ) - data = json.loads(response.body) assert [ { "name": "pk1", @@ -502,7 +500,7 @@ def test_register_facet_classes(app_client): "name": "pk3", "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet=pk3", }, - ] == data["suggested_facets"] + ] == response.json["suggested_facets"] def test_actor_from_request(app_client): From a7137dfe069e5fceca56f78631baebd4a6a19967 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jun 2020 14:49:28 -0700 Subject: [PATCH 0274/2113] /-/plugins now shows details of hooks, closes #794 Also added /-/plugins?all=1 parameter to see default plugins. --- datasette/app.py | 9 ++++--- datasette/plugins.py | 1 + docs/introspection.rst | 5 +++- tests/test_api.py | 61 ++++++++++++++++++++++++++++++++++++------ 4 files changed, 64 insertions(+), 12 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 41c73900..22fb04c6 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -625,9 +625,9 @@ class Datasette: }, } - def _plugins(self, show_all=False): + def _plugins(self, request): ps = list(get_plugins()) - if not show_all: + if not request.args.get("all"): ps = [p for p in ps if p["name"] not in DEFAULT_PLUGINS] return [ { @@ -635,6 +635,7 @@ class Datasette: "static": p["static_path"] is not None, "templates": p["templates_path"] is not None, "version": p.get("version"), + "hooks": p["hooks"], } for p in ps ] @@ -822,7 +823,9 @@ class Datasette: r"/-/versions(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi(self, "plugins.json", self._plugins), + JsonDataView.as_asgi( + self, "plugins.json", self._plugins, needs_request=True + ), r"/-/plugins(?P(\.json)?)$", ) add_route( diff --git a/datasette/plugins.py b/datasette/plugins.py index 26d4fd63..b35b750f 100644 --- a/datasette/plugins.py +++ b/datasette/plugins.py @@ -49,6 +49,7 @@ def get_plugins(): "name": plugin.__name__, "static_path": static_path, "templates_path": templates_path, + "hooks": [h.name for h in pm.get_hookcallers(plugin)], } distinfo = plugin_to_distinfo.get(plugin) if distinfo: diff --git a/docs/introspection.rst b/docs/introspection.rst index 084ee144..08006529 100644 --- a/docs/introspection.rst +++ b/docs/introspection.rst @@ -78,10 +78,13 @@ Shows a list of currently installed plugins and their versions. `Plugins example "name": "datasette_cluster_map", "static": true, "templates": false, - "version": "0.4" + "version": "0.10", + "hooks": ["extra_css_urls", "extra_js_urls", "extra_body_script"] } ] +Add ``?all=1`` to include details of the default plugins baked into Datasette. + .. _JsonDataView_config: /-/config diff --git a/tests/test_api.py b/tests/test_api.py index 7ed4cced..4b752f31 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1260,14 +1260,59 @@ def test_threads_json(app_client): def test_plugins_json(app_client): response = app_client.get("/-/plugins.json") expected = [ - {"name": name, "static": False, "templates": False, "version": None} - for name in ( - "messages_output_renderer.py", - "my_plugin.py", - "my_plugin_2.py", - "register_output_renderer.py", - "view_name.py", - ) + { + "name": "messages_output_renderer.py", + "static": False, + "templates": False, + "version": None, + "hooks": ["register_output_renderer"], + }, + { + "name": "my_plugin.py", + "static": False, + "templates": False, + "version": None, + "hooks": [ + "actor_from_request", + "extra_body_script", + "extra_css_urls", + "extra_js_urls", + "extra_template_vars", + "permission_allowed", + "prepare_connection", + "prepare_jinja2_environment", + "register_facet_classes", + "render_cell", + ], + }, + { + "name": "my_plugin_2.py", + "static": False, + "templates": False, + "version": None, + "hooks": [ + "actor_from_request", + "asgi_wrapper", + "extra_js_urls", + "extra_template_vars", + "permission_allowed", + "render_cell", + ], + }, + { + "name": "register_output_renderer.py", + "static": False, + "templates": False, + "version": None, + "hooks": ["register_output_renderer"], + }, + { + "name": "view_name.py", + "static": False, + "templates": False, + "version": None, + "hooks": ["extra_template_vars"], + }, ] assert expected == sorted(response.json, key=lambda p: p["name"]) From 3c5e4f266dfa07bd0bbb530d17019207f787d806 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jun 2020 15:34:50 -0700 Subject: [PATCH 0275/2113] Added messages to pattern portfolio, refs #790 --- datasette/templates/patterns.html | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/datasette/templates/patterns.html b/datasette/templates/patterns.html index 9ea4ae42..73443ac2 100644 --- a/datasette/templates/patterns.html +++ b/datasette/templates/patterns.html @@ -20,6 +20,12 @@ attraction_characteristic

    +

    Messages

    +
    +

    Example message

    +

    Example message

    +

    Example message

    +

    .bd for /

    Datasette Fixtures

    From 9690ce606823bbfceb0c50d59e03adf7bb1a8475 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jun 2020 17:05:33 -0700 Subject: [PATCH 0276/2113] More efficient modifiation of scope --- datasette/app.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 22fb04c6..f9bf91a8 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -901,13 +901,14 @@ class DatasetteRouter(AsgiRouter): base_url = self.ds.config("base_url") if base_url != "/" and path.startswith(base_url): path = "/" + path[len(base_url) :] + scope_modifications = {} # Apply force_https_urls, if set if ( self.ds.config("force_https_urls") and scope["type"] == "http" and scope.get("scheme") != "https" ): - scope = dict(scope, scheme="https") + scope_modifications["scheme"] = "https" # Handle authentication actor = None for actor in pm.hook.actor_from_request( @@ -919,7 +920,10 @@ class DatasetteRouter(AsgiRouter): actor = await actor if actor: break - return await super().route_path(dict(scope, actor=actor), receive, send, path) + scope_modifications["actor"] = actor + return await super().route_path( + dict(scope, **scope_modifications), receive, send, path + ) async def handle_404(self, scope, receive, send, exception=None): # If URL has a trailing slash, redirect to URL without it From 0934844c0b6d124163d0185fb6a41ba5a71433da Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 3 Jun 2020 06:48:39 -0700 Subject: [PATCH 0277/2113] request.post_vars() no longer discards empty values --- datasette/utils/asgi.py | 2 +- tests/test_internals_request.py | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 5682da48..c7810a50 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -80,7 +80,7 @@ class Request: body += message.get("body", b"") more_body = message.get("more_body", False) - return dict(parse_qsl(body.decode("utf-8"))) + return dict(parse_qsl(body.decode("utf-8"), keep_blank_values=True)) @classmethod def fake(cls, path_with_query_string, method="GET", scheme="http"): diff --git a/tests/test_internals_request.py b/tests/test_internals_request.py index 5c9b254b..433b23d5 100644 --- a/tests/test_internals_request.py +++ b/tests/test_internals_request.py @@ -16,10 +16,14 @@ async def test_request_post_vars(): } async def receive(): - return {"type": "http.request", "body": b"foo=bar&baz=1", "more_body": False} + return { + "type": "http.request", + "body": b"foo=bar&baz=1&empty=", + "more_body": False, + } request = Request(scope, receive) - assert {"foo": "bar", "baz": "1"} == await request.post_vars() + assert {"foo": "bar", "baz": "1", "empty": ""} == await request.post_vars() def test_request_args(): From aa82d0370463580f2cb10d9617f1bcbe45cc994a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 3 Jun 2020 08:16:50 -0700 Subject: [PATCH 0278/2113] Basic writable canned queries Refs #698. First working version of this feature. * request.post_vars() no longer discards empty values --- datasette/templates/query.html | 7 ++- datasette/views/database.py | 60 +++++++++++++++++++++-- datasette/views/table.py | 18 +++++++ docs/sql_queries.rst | 65 +++++++++++++++++++++++-- tests/fixtures.py | 37 +++++++++++--- tests/test_canned_write.py | 88 ++++++++++++++++++++++++++++++++++ 6 files changed, 256 insertions(+), 19 deletions(-) create mode 100644 tests/test_canned_write.py diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 2c8c05a0..52896e96 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -27,11 +27,12 @@ {% endblock %} {% block content %} +

    {{ metadata.title or database }}

    {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} -
    +

    Custom SQL query{% if display_rows %} returning {% if truncated %}more than {% endif %}{{ "{:,}".format(display_rows|length) }} row{% if display_rows|length == 1 %}{% else %}s{% endif %}{% endif %} {% if hide_sql %}(show){% else %}(hide){% endif %}

    {% if not hide_sql %} {% if editable and config.allow_sql %} @@ -74,7 +75,9 @@ {% else %} -

    0 results

    + {% if not canned_write %} +

    0 results

    + {% endif %} {% endif %} {% include "_codemirror_foot.html" %} diff --git a/datasette/views/database.py b/datasette/views/database.py index 15545fb8..558dd0f0 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -106,6 +106,8 @@ class QueryView(DataView): canned_query=None, metadata=None, _size=None, + named_parameters=None, + write=False, ): params = {key: request.args.get(key) for key in request.args} if "sql" in params: @@ -113,7 +115,7 @@ class QueryView(DataView): if "_shape" in params: params.pop("_shape") # Extract any :named parameters - named_parameters = self.re_named_parameter.findall(sql) + named_parameters = named_parameters or self.re_named_parameter.findall(sql) named_parameter_values = { named_parameter: params.get(named_parameter) or "" for named_parameter in named_parameters @@ -129,12 +131,60 @@ class QueryView(DataView): extra_args["custom_time_limit"] = int(params["_timelimit"]) if _size: extra_args["page_size"] = _size - results = await self.ds.execute( - database, sql, params, truncate=True, **extra_args - ) - columns = [r[0] for r in results.description] templates = ["query-{}.html".format(to_css_class(database)), "query.html"] + + # Execute query - as write or as read + if write: + if request.method == "POST": + params = await request.post_vars() + try: + cursor = await self.ds.databases[database].execute_write( + sql, params, block=True + ) + message = metadata.get( + "on_success_message" + ) or "Query executed, {} row{} affected".format( + cursor.rowcount, "" if cursor.rowcount == 1 else "s" + ) + message_type = self.ds.INFO + redirect_url = metadata.get("on_success_redirect") + except Exception as e: + message = metadata.get("on_error_message") or str(e) + message_type = self.ds.ERROR + redirect_url = metadata.get("on_error_redirect") + self.ds.add_message(request, message, message_type) + return self.redirect(request, redirect_url or request.path) + else: + + async def extra_template(): + return { + "request": request, + "path_with_added_args": path_with_added_args, + "path_with_removed_args": path_with_removed_args, + "named_parameter_values": named_parameter_values, + "canned_query": canned_query, + "success_message": request.args.get("_success") or "", + "canned_write": True, + } + + return ( + { + "database": database, + "rows": [], + "truncated": False, + "columns": [], + "query": {"sql": sql, "params": params}, + }, + extra_template, + templates, + ) + else: # Not a write + results = await self.ds.execute( + database, sql, params, truncate=True, **extra_args + ) + columns = [r[0] for r in results.description] + if canned_query: templates.insert( 0, diff --git a/datasette/views/table.py b/datasette/views/table.py index 2e9515c3..79bf8b08 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -221,6 +221,22 @@ class RowTableShared(DataView): class TableView(RowTableShared): name = "table" + async def post(self, request, db_name, table_and_format): + # Handle POST to a canned query + canned_query = self.ds.get_canned_query(db_name, table_and_format) + assert canned_query, "You may only POST to a canned query" + return await QueryView(self.ds).data( + request, + db_name, + None, + canned_query["sql"], + metadata=canned_query, + editable=False, + canned_query=table_and_format, + named_parameters=canned_query.get("params"), + write=bool(canned_query.get("write")), + ) + async def data( self, request, @@ -241,6 +257,8 @@ class TableView(RowTableShared): metadata=canned_query, editable=False, canned_query=table, + named_parameters=canned_query.get("params"), + write=bool(canned_query.get("write")), ) db = self.ds.databases[database] diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index c3efd930..dc239a84 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -161,11 +161,12 @@ You can set a default fragment hash that will be included in the link to the can { "databases": { - "fixtures": { - "queries": { - "neighborhood_search": { - "sql": "select neighborhood, facet_cities.name, state\nfrom facetable join facet_cities on facetable.city_id = facet_cities.id\nwhere neighborhood like '%' || :text || '%' order by neighborhood;", - "fragment": "fragment-goes-here" + "fixtures": { + "queries": { + "neighborhood_search": { + "sql": "select neighborhood, facet_cities.name, state\nfrom facetable join facet_cities on facetable.city_id = facet_cities.id\nwhere neighborhood like '%' || :text || '%' order by neighborhood;", + "fragment": "fragment-goes-here" + } } } } @@ -173,6 +174,60 @@ You can set a default fragment hash that will be included in the link to the can `See here `__ for a demo of this in action. +.. _canned_queries_writable: + +Writable canned queries +~~~~~~~~~~~~~~~~~~~~~~~ + +Canned queries by default are read-only. You can use the ``"write": true`` key to indicate that a canned query can write to the database. + +.. code-block:: json + + { + "databases": { + "mydatabase": { + "queries": { + "add_name": { + "sql": "INSERT INTO names (name) VALUES (:name)", + "write": true + } + } + } + } + } + +This configuration will create a page at ``/mydatabase/add_name`` displaying a form with a ``name`` field. Submitting that form will execute the configured ``INSERT`` query. + +You can customize how Datasette represents success and errors using the following optional properties: + +- ``on_success_message`` - the message shown when a query is successful +- ``on_success_redirect`` - the path or URL the user is redirected to on success +- ``on_error_message`` - the message shown when a query throws an error +- ``on_error_redirect`` - the path or URL the user is redirected to on error + +For example: + +.. code-block:: json + + { + "databases": { + "mydatabase": { + "queries": { + "add_name": { + "sql": "INSERT INTO names (name) VALUES (:name)", + "write": true, + "on_success_message": "Name inserted", + "on_success_redirect": "/mydatabase/names", + "on_error_message": "Name insert failed", + "on_error_redirect": "/mydatabase" + } + } + } + } + } + +You may wish to use this feature in conjunction with :ref:`authentication`. + .. _pagination: Pagination diff --git a/tests/fixtures.py b/tests/fixtures.py index daff0168..78a54c68 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -14,7 +14,7 @@ import string import tempfile import textwrap import time -from urllib.parse import unquote, quote +from urllib.parse import unquote, quote, urlencode # This temp file is used by one of the plugin config tests @@ -54,10 +54,26 @@ class TestClient: async def get( self, path, allow_redirects=True, redirect_count=0, method="GET", cookies=None ): - return await self._get(path, allow_redirects, redirect_count, method, cookies) + return await self._request( + path, allow_redirects, redirect_count, method, cookies + ) - async def _get( - self, path, allow_redirects=True, redirect_count=0, method="GET", cookies=None + @async_to_sync + async def post( + self, path, post_data=None, allow_redirects=True, redirect_count=0, cookies=None + ): + return await self._request( + path, allow_redirects, redirect_count, "POST", cookies, post_data + ) + + async def _request( + self, + path, + allow_redirects=True, + redirect_count=0, + method="GET", + cookies=None, + post_data=None, ): query_string = b"" if "?" in path: @@ -83,7 +99,13 @@ class TestClient: "headers": headers, } instance = ApplicationCommunicator(self.asgi_app, scope) - await instance.send_input({"type": "http.request"}) + + if post_data: + body = urlencode(post_data, doseq=True).encode("utf-8") + await instance.send_input({"type": "http.request", "body": body}) + else: + await instance.send_input({"type": "http.request"}) + # First message back should be response.start with headers and status messages = [] start = await instance.receive_output(2) @@ -110,7 +132,7 @@ class TestClient: redirect_count, self.max_redirects ) location = response.headers["Location"] - return await self._get( + return await self._request( location, allow_redirects=True, redirect_count=redirect_count + 1 ) return response @@ -128,6 +150,7 @@ def make_app_client( inspect_data=None, static_mounts=None, template_dir=None, + metadata=None, ): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, filename) @@ -161,7 +184,7 @@ def make_app_client( immutables=immutables, memory=memory, cors=cors, - metadata=METADATA, + metadata=metadata or METADATA, plugins_dir=PLUGINS_DIR, config=config, inspect_data=inspect_data, diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py new file mode 100644 index 00000000..52c8aec2 --- /dev/null +++ b/tests/test_canned_write.py @@ -0,0 +1,88 @@ +import pytest +from .fixtures import make_app_client + + +@pytest.fixture +def canned_write_client(): + for client in make_app_client( + extra_databases={"data.db": "create table names (name text)"}, + metadata={ + "databases": { + "data": { + "queries": { + "add_name": { + "sql": "insert into names (name) values (:name)", + "write": True, + "on_success_redirect": "/data/add_name?success", + }, + "add_name_specify_id": { + "sql": "insert into names (rowid, name) values (:rowid, :name)", + "write": True, + "on_error_redirect": "/data/add_name_specify_id?error", + }, + "delete_name": { + "sql": "delete from names where rowid = :rowid", + "write": True, + "on_success_message": "Name deleted", + }, + "update_name": { + "sql": "update names set name = :name where rowid = :rowid", + "params": ["rowid", "name"], + "write": True, + }, + } + } + } + }, + ): + yield client + + +def test_insert(canned_write_client): + response = canned_write_client.post( + "/data/add_name", {"name": "Hello"}, allow_redirects=False + ) + assert 302 == response.status + assert "/data/add_name?success" == response.headers["Location"] + messages = canned_write_client.ds.unsign( + response.cookies["ds_messages"], "messages" + ) + assert [["Query executed, 1 row affected", 1]] == messages + + +def test_custom_success_message(canned_write_client): + response = canned_write_client.post( + "/data/delete_name", {"rowid": 1}, allow_redirects=False + ) + assert 302 == response.status + messages = canned_write_client.ds.unsign( + response.cookies["ds_messages"], "messages" + ) + assert [["Name deleted", 1]] == messages + + +def test_insert_error(canned_write_client): + canned_write_client.post("/data/add_name", {"name": "Hello"}) + response = canned_write_client.post( + "/data/add_name_specify_id", + {"rowid": 1, "name": "Should fail"}, + allow_redirects=False, + ) + assert 302 == response.status + assert "/data/add_name_specify_id?error" == response.headers["Location"] + messages = canned_write_client.ds.unsign( + response.cookies["ds_messages"], "messages" + ) + assert [["UNIQUE constraint failed: names.rowid", 3]] == messages + # How about with a custom error message? + canned_write_client.ds._metadata["databases"]["data"]["queries"][ + "add_name_specify_id" + ]["on_error_message"] = "ERROR" + response = canned_write_client.post( + "/data/add_name_specify_id", + {"rowid": 1, "name": "Should fail"}, + allow_redirects=False, + ) + assert [["ERROR", 3]] == canned_write_client.ds.unsign( + response.cookies["ds_messages"], "messages" + ) From 9cb44be42f012a68c8c3904a37008200cc7bb1f4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 3 Jun 2020 14:04:40 -0700 Subject: [PATCH 0279/2113] Docs and tests for "params", closes #797 --- docs/sql_queries.rst | 70 ++++++++++++++++++++++++++++++-------- tests/test_canned_write.py | 7 +++- 2 files changed, 61 insertions(+), 16 deletions(-) diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index dc239a84..aa1edc98 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -121,32 +121,68 @@ Here's an example of a canned query with a named parameter: .. code-block:: sql select neighborhood, facet_cities.name, state - from facetable join facet_cities on facetable.city_id = facet_cities.id - where neighborhood like '%' || :text || '%' order by neighborhood; + from facetable + join facet_cities on facetable.city_id = facet_cities.id + where neighborhood like '%' || :text || '%' + order by neighborhood; -In the canned query JSON it looks like this: +In the canned query metadata (here :ref:`metadata_yaml` as ``metadata.yaml``) it looks like this: + +.. code-block:: yaml + + databases: + fixtures: + queries: + neighborhood_search: + sql: |- + select neighborhood, facet_cities.name, state + from facetable + join facet_cities on facetable.city_id = facet_cities.id + where neighborhood like '%' || :text || '%' + order by neighborhood + title: Search neighborhoods + +Here's the equivalent using JSON (as ``metadata.json``): .. code-block:: json { "databases": { - "fixtures": { - "queries": { - "neighborhood_search": { - "sql": "select neighborhood, facet_cities.name, state\nfrom facetable join facet_cities on facetable.city_id = facet_cities.id\nwhere neighborhood like '%' || :text || '%' order by neighborhood;", - "title": "Search neighborhoods", - "description_html": "Demonstrating simple like search" - } - } - } + "fixtures": { + "queries": { + "neighborhood_search": { + "sql": "select neighborhood, facet_cities.name, state\nfrom facetable\n join facet_cities on facetable.city_id = facet_cities.id\nwhere neighborhood like '%' || :text || '%'\norder by neighborhood", + "title": "Search neighborhoods" + } + } + } } } +Note that we are using SQLite string concatenation here - the ``||`` operator - to add wildcard ``%`` characters to the string provided by the user. + You can try this canned query out here: https://latest.datasette.io/fixtures/neighborhood_search?text=town -Note that we are using SQLite string concatenation here - the ``||`` operator - -to add wildcard ``%`` characters to the string provided by the user. +In this example the ``:text`` named parameter is automatically extracted from the query using a regular expression. + +You can alternatively provide an explicit list of named parameters using the ``"params"`` key, like this: + +.. code-block:: yaml + + databases: + fixtures: + queries: + neighborhood_search: + params: + - text + sql: |- + select neighborhood, facet_cities.name, state + from facetable + join facet_cities on facetable.city_id = facet_cities.id + where neighborhood like '%' || :text || '%' + order by neighborhood + title: Search neighborhoods .. _canned_queries_default_fragment: @@ -181,6 +217,8 @@ Writable canned queries Canned queries by default are read-only. You can use the ``"write": true`` key to indicate that a canned query can write to the database. +You may wish to use this feature in conjunction with :ref:`authentication`. + .. code-block:: json { @@ -226,7 +264,9 @@ For example: } } -You may wish to use this feature in conjunction with :ref:`authentication`. +You can use ``"params"`` to explicitly list the named parameters that should be displayed as form fields - otherwise they will be automatically detected. + +You can pre-populate form fields when the page first loads using a querystring, e.g. ``/mydatabase/add_name?name=Prepopulated``. The user will have to submit the form to execute the query. .. _pagination: diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index 52c8aec2..692d726e 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -27,7 +27,7 @@ def canned_write_client(): }, "update_name": { "sql": "update names set name = :name where rowid = :rowid", - "params": ["rowid", "name"], + "params": ["rowid", "name", "extra"], "write": True, }, } @@ -86,3 +86,8 @@ def test_insert_error(canned_write_client): assert [["ERROR", 3]] == canned_write_client.ds.unsign( response.cookies["ds_messages"], "messages" ) + + +def test_custom_params(canned_write_client): + response = canned_write_client.get("/data/update_name?extra=foo") + assert '' in response.text From 8524866fdf0b43a68e1ee24c419c80b5cddaaeca Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 4 Jun 2020 16:58:19 -0700 Subject: [PATCH 0280/2113] Link to authentication docs --- docs/plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index fb2843f4..3777bba7 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -953,7 +953,7 @@ actor_from_request(datasette, request) ``request`` - object The current HTTP :ref:`internals_request`. -This is part of Datasette's authentication and permissions system. The function should attempt to authenticate an actor (either a user or an API actor of some sort) based on information in the request. +This is part of Datasette's :ref:`authentication and permissions system `. The function should attempt to authenticate an actor (either a user or an API actor of some sort) based on information in the request. If it cannot authenticate an actor, it should return ``None``. Otherwise it should return a dictionary representing that actor. From 2074efa5a49f72cf1c47c28894de6c0b1f0fb3b1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 4 Jun 2020 18:38:32 -0700 Subject: [PATCH 0281/2113] Another actor_from_request example --- docs/plugins.rst | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/docs/plugins.rst b/docs/plugins.rst index 3777bba7..8004e118 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -957,6 +957,27 @@ This is part of Datasette's :ref:`authentication and permissions system Date: Thu, 4 Jun 2020 20:10:40 -0700 Subject: [PATCH 0282/2113] More things you can do with plugins --- docs/plugins.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/plugins.rst b/docs/plugins.rst index 8004e118..ecc7cbf1 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -18,6 +18,8 @@ Things you can do with plugins include: * Make new custom SQL functions available for use within Datasette, for example `datasette-haversine `__ and `datasette-jellyfish `__. +* Define custom output formats with custom extensions, for example `datasette-atom `__ and + `datasette-ics `__. * Add template functions that can be called within your Jinja custom templates, for example `datasette-render-markdown `__. * Customize how database values are rendered in the Datasette interface, for example From 0da7f49b24e429e81317e370cb01de941f1b873e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jun 2020 10:52:50 -0700 Subject: [PATCH 0283/2113] Rename RequestParameters to MultiParams, refs #799 --- datasette/utils/__init__.py | 25 +++++++++++++++++-------- datasette/utils/asgi.py | 4 ++-- datasette/views/table.py | 4 ++-- docs/internals.rst | 10 ++++++---- tests/test_utils.py | 9 +++++++++ 5 files changed, 36 insertions(+), 16 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 2eb31502..083fba0c 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -753,15 +753,24 @@ def escape_fts(query): ) -class RequestParameters: +class MultiParams: def __init__(self, data): - # data is a dictionary of key => [list, of, values] - assert isinstance(data, dict), "data should be a dictionary of key => [list]" - for key in data: - assert isinstance( - data[key], list - ), "data should be a dictionary of key => [list]" - self._data = data + # data is a dictionary of key => [list, of, values] or a list of [["key", "value"]] pairs + if isinstance(data, dict): + for key in data: + assert isinstance( + data[key], list + ), "dictionary data should be a dictionary of key => [list]" + self._data = data + elif isinstance(data, list): + new_data = {} + for item in data: + assert ( + isinstance(item, list) and len(item) == 2 + ), "list data should be a list of [key, value] pairs" + key, value = item + new_data.setdefault(key, []).append(value) + self._data = new_data def __contains__(self, key): return key in self._data diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index c7810a50..ba131dc8 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -1,5 +1,5 @@ import json -from datasette.utils import RequestParameters +from datasette.utils import MultiParams from mimetypes import guess_type from urllib.parse import parse_qs, urlunparse, parse_qsl from pathlib import Path @@ -68,7 +68,7 @@ class Request: @property def args(self): - return RequestParameters(parse_qs(qs=self.query_string)) + return MultiParams(parse_qs(qs=self.query_string)) async def post_vars(self): body = [] diff --git a/datasette/views/table.py b/datasette/views/table.py index 79bf8b08..ec1b6c7c 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -8,7 +8,7 @@ from datasette.plugins import pm from datasette.database import QueryInterrupted from datasette.utils import ( CustomRow, - RequestParameters, + MultiParams, append_querystring, compound_keys_after_sql, escape_sqlite, @@ -286,7 +286,7 @@ class TableView(RowTableShared): order_by = "" # Ensure we don't drop anything with an empty value e.g. ?name__exact= - args = RequestParameters( + args = MultiParams( urllib.parse.parse_qs(request.query_string, keep_blank_values=True) ) diff --git a/docs/internals.rst b/docs/internals.rst index 4d51d614..4b4adc5e 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -39,7 +39,7 @@ The request object is passed to various plugin hooks. It represents an incoming ``.query_string`` - string The querystring component of the request, without the ``?`` - e.g. ``name__contains=sam&age__gt=10``. -``.args`` - RequestParameters +``.args`` - MultiParams An object representing the parsed querystring parameters, see below. The object also has one awaitable method: @@ -47,10 +47,12 @@ The object also has one awaitable method: ``await request.post_vars()`` - dictionary Returns a dictionary of form variables that were submitted in the request body via ``POST``. -The RequestParameters class ---------------------------- +.. _internals_multiparams: -``request.args`` is a ``RequestParameters`` object - a dictionary-like object which provides access to querystring parameters that may have multiple values. +The MultiParams class +--------------------- + +``request.args`` is a ``MultiParams`` object - a dictionary-like object which provides access to querystring parameters that may have multiple values. Consider the querystring ``?foo=1&foo=2&bar=3`` - with two values for ``foo`` and one value for ``bar``. diff --git a/tests/test_utils.py b/tests/test_utils.py index 01a10468..ffe14587 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -437,3 +437,12 @@ def test_call_with_supported_arguments(): with pytest.raises(TypeError): utils.call_with_supported_arguments(foo, a=1) + + +def test_multi_params_list(): + p1 = utils.MultiParams([["foo", "bar"], ["foo", "baz"]]) + assert "bar" == p1["foo"] + assert ["bar", "baz"] == p1.getlist("foo") + # Should raise an error if list isn't pairs + with pytest.raises(AssertionError): + utils.MultiParams([["foo", "bar"], ["foo", "baz", "bar"]]) From d96ac1d52cacf34bae09705eb8f9a0e3f81c426b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jun 2020 11:01:06 -0700 Subject: [PATCH 0284/2113] Allow tuples as well as lists in MultiParams, refs #799 --- datasette/utils/__init__.py | 6 +++--- tests/test_utils.py | 22 ++++++++++++++++------ 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 083fba0c..69e288e6 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -759,14 +759,14 @@ class MultiParams: if isinstance(data, dict): for key in data: assert isinstance( - data[key], list + data[key], (list, tuple) ), "dictionary data should be a dictionary of key => [list]" self._data = data - elif isinstance(data, list): + elif isinstance(data, list) or isinstance(data, tuple): new_data = {} for item in data: assert ( - isinstance(item, list) and len(item) == 2 + isinstance(item, (list, tuple)) and len(item) == 2 ), "list data should be a list of [key, value] pairs" key, value = item new_data.setdefault(key, []).append(value) diff --git a/tests/test_utils.py b/tests/test_utils.py index ffe14587..a7968e54 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -439,10 +439,20 @@ def test_call_with_supported_arguments(): utils.call_with_supported_arguments(foo, a=1) -def test_multi_params_list(): - p1 = utils.MultiParams([["foo", "bar"], ["foo", "baz"]]) +@pytest.mark.parametrize("data,should_raise", [ + ([["foo", "bar"], ["foo", "baz"]], False), + ([("foo", "bar"), ("foo", "baz")], False), + ((["foo", "bar"], ["foo", "baz"]), False), + ([["foo", "bar"], ["foo", "baz", "bax"]], True), + ({"foo": ["bar", "baz"]}, False), + ({"foo": ("bar", "baz")}, False), + ({"foo": "bar"}, True), +]) +def test_multi_params(data, should_raise): + if should_raise: + with pytest.raises(AssertionError): + utils.MultiParams(data) + return + p1 = utils.MultiParams(data) assert "bar" == p1["foo"] - assert ["bar", "baz"] == p1.getlist("foo") - # Should raise an error if list isn't pairs - with pytest.raises(AssertionError): - utils.MultiParams([["foo", "bar"], ["foo", "baz", "bar"]]) + assert ["bar", "baz"] == list(p1.getlist("foo")) From 84a9c4ff75460f91c049bd30bba3cee1fd89d9e2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jun 2020 12:05:57 -0700 Subject: [PATCH 0285/2113] CSRF protection (#798) Closes #793. * Rename RequestParameters to MultiParams, refs #799 * Allow tuples as well as lists in MultiParams, refs #799 * Use csrftokens when running tests, refs #799 * Use new csrftoken() function, refs https://github.com/simonw/asgi-csrf/issues/7 * Check for Vary: Cookie hedaer, refs https://github.com/simonw/asgi-csrf/issues/8 --- datasette/app.py | 10 ++++++- datasette/templates/messages_debug.html | 3 +- datasette/templates/query.html | 1 + datasette/utils/__init__.py | 3 ++ datasette/views/base.py | 1 + setup.py | 1 + tests/fixtures.py | 38 +++++++++++++++++++++---- tests/test_canned_write.py | 8 ++++-- tests/test_utils.py | 21 ++++++++------ 9 files changed, 67 insertions(+), 19 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f9bf91a8..54cf02f8 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1,4 +1,5 @@ import asyncio +import asgi_csrf import collections import datetime import hashlib @@ -884,7 +885,14 @@ class Datasette: await database.table_counts(limit=60 * 60 * 1000) asgi = AsgiLifespan( - AsgiTracer(DatasetteRouter(self, routes)), on_startup=setup_db + AsgiTracer( + asgi_csrf.asgi_csrf( + DatasetteRouter(self, routes), + signing_secret=self._secret, + cookie_name="ds_csrftoken", + ) + ), + on_startup=setup_db, ) for wrapper in pm.hook.asgi_wrapper(datasette=self): asgi = wrapper(asgi) diff --git a/datasette/templates/messages_debug.html b/datasette/templates/messages_debug.html index b2e1bc7c..e83d2a2f 100644 --- a/datasette/templates/messages_debug.html +++ b/datasette/templates/messages_debug.html @@ -8,7 +8,7 @@

    Set a message:

    - +
    @@ -19,6 +19,7 @@
    +
    diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 52896e96..a7cb6647 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -52,6 +52,7 @@ {% endif %}

    + {% if canned_query %}{% endif %}

    diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 69e288e6..059db184 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -772,6 +772,9 @@ class MultiParams: new_data.setdefault(key, []).append(value) self._data = new_data + def __repr__(self): + return "".format(self._data) + def __contains__(self, key): return key in self._data diff --git a/datasette/views/base.py b/datasette/views/base.py index 2402406a..315c96fe 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -95,6 +95,7 @@ class BaseView(AsgiView): **context, **{ "database_url": self.database_url, + "csrftoken": request.scope["csrftoken"], "database_color": self.database_color, "show_messages": lambda: self.ds._show_messages(request), "select_templates": [ diff --git a/setup.py b/setup.py index 93628266..c0316deb 100644 --- a/setup.py +++ b/setup.py @@ -53,6 +53,7 @@ setup( "uvicorn~=0.11", "aiofiles>=0.4,<0.6", "janus>=0.4,<0.6", + "asgi-csrf>=0.4", "PyYAML~=5.3", "mergedeep>=1.1.1,<1.4.0", "itsdangerous~=1.1", diff --git a/tests/fixtures.py b/tests/fixtures.py index 78a54c68..a64a8295 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -1,5 +1,5 @@ from datasette.app import Datasette -from datasette.utils import sqlite3 +from datasette.utils import sqlite3, MultiParams from asgiref.testing import ApplicationCommunicator from asgiref.sync import async_to_sync from http.cookies import SimpleCookie @@ -60,10 +60,35 @@ class TestClient: @async_to_sync async def post( - self, path, post_data=None, allow_redirects=True, redirect_count=0, cookies=None + self, + path, + post_data=None, + allow_redirects=True, + redirect_count=0, + content_type="application/x-www-form-urlencoded", + cookies=None, + csrftoken_from=None, ): + cookies = cookies or {} + post_data = post_data or {} + # Maybe fetch a csrftoken first + if csrftoken_from is not None: + if csrftoken_from is True: + csrftoken_from = path + token_response = await self._request(csrftoken_from) + # Check this had a Vary: Cookie header + assert "Cookie" == token_response.headers["vary"] + csrftoken = token_response.cookies["ds_csrftoken"] + cookies["ds_csrftoken"] = csrftoken + post_data["csrftoken"] = csrftoken return await self._request( - path, allow_redirects, redirect_count, "POST", cookies, post_data + path, + allow_redirects, + redirect_count, + "POST", + cookies, + post_data, + content_type, ) async def _request( @@ -74,6 +99,7 @@ class TestClient: method="GET", cookies=None, post_data=None, + content_type=None, ): query_string = b"" if "?" in path: @@ -84,6 +110,8 @@ class TestClient: else: raw_path = quote(path, safe="/:,").encode("latin-1") headers = [[b"host", b"localhost"]] + if content_type: + headers.append((b"content-type", content_type.encode("utf-8"))) if cookies: sc = SimpleCookie() for key, value in cookies.items(): @@ -111,7 +139,7 @@ class TestClient: start = await instance.receive_output(2) messages.append(start) assert start["type"] == "http.response.start" - headers = dict( + response_headers = MultiParams( [(k.decode("utf8"), v.decode("utf8")) for k, v in start["headers"]] ) status = start["status"] @@ -124,7 +152,7 @@ class TestClient: body += message["body"] if not message.get("more_body"): break - response = TestResponse(status, headers, body) + response = TestResponse(status, response_headers, body) if allow_redirects and response.status in (301, 302): assert ( redirect_count < self.max_redirects diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index 692d726e..be838063 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -40,7 +40,7 @@ def canned_write_client(): def test_insert(canned_write_client): response = canned_write_client.post( - "/data/add_name", {"name": "Hello"}, allow_redirects=False + "/data/add_name", {"name": "Hello"}, allow_redirects=False, csrftoken_from=True, ) assert 302 == response.status assert "/data/add_name?success" == response.headers["Location"] @@ -52,7 +52,7 @@ def test_insert(canned_write_client): def test_custom_success_message(canned_write_client): response = canned_write_client.post( - "/data/delete_name", {"rowid": 1}, allow_redirects=False + "/data/delete_name", {"rowid": 1}, allow_redirects=False, csrftoken_from=True ) assert 302 == response.status messages = canned_write_client.ds.unsign( @@ -62,11 +62,12 @@ def test_custom_success_message(canned_write_client): def test_insert_error(canned_write_client): - canned_write_client.post("/data/add_name", {"name": "Hello"}) + canned_write_client.post("/data/add_name", {"name": "Hello"}, csrftoken_from=True) response = canned_write_client.post( "/data/add_name_specify_id", {"rowid": 1, "name": "Should fail"}, allow_redirects=False, + csrftoken_from=True, ) assert 302 == response.status assert "/data/add_name_specify_id?error" == response.headers["Location"] @@ -82,6 +83,7 @@ def test_insert_error(canned_write_client): "/data/add_name_specify_id", {"rowid": 1, "name": "Should fail"}, allow_redirects=False, + csrftoken_from=True, ) assert [["ERROR", 3]] == canned_write_client.ds.unsign( response.cookies["ds_messages"], "messages" diff --git a/tests/test_utils.py b/tests/test_utils.py index a7968e54..cf714215 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -439,15 +439,18 @@ def test_call_with_supported_arguments(): utils.call_with_supported_arguments(foo, a=1) -@pytest.mark.parametrize("data,should_raise", [ - ([["foo", "bar"], ["foo", "baz"]], False), - ([("foo", "bar"), ("foo", "baz")], False), - ((["foo", "bar"], ["foo", "baz"]), False), - ([["foo", "bar"], ["foo", "baz", "bax"]], True), - ({"foo": ["bar", "baz"]}, False), - ({"foo": ("bar", "baz")}, False), - ({"foo": "bar"}, True), -]) +@pytest.mark.parametrize( + "data,should_raise", + [ + ([["foo", "bar"], ["foo", "baz"]], False), + ([("foo", "bar"), ("foo", "baz")], False), + ((["foo", "bar"], ["foo", "baz"]), False), + ([["foo", "bar"], ["foo", "baz", "bax"]], True), + ({"foo": ["bar", "baz"]}, False), + ({"foo": ("bar", "baz")}, False), + ({"foo": "bar"}, True), + ] +) def test_multi_params(data, should_raise): if should_raise: with pytest.raises(AssertionError): From 033a1bb22c70a955d9fd1d3b4675a0e2e5c8b8cd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jun 2020 12:06:43 -0700 Subject: [PATCH 0286/2113] Removed rogue print() from test --- tests/test_internals_database.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 5d5520dd..2d288cc8 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -162,7 +162,6 @@ async def test_execute_write_fn_block_false(db): with conn: conn.execute("delete from roadside_attractions where pk = 1;") row = conn.execute("select count(*) from roadside_attractions").fetchone() - print("row = ", row) return row[0] task_id = await db.execute_write_fn(write_fn) From f786033a5f0098371cb1df1ce83959b27c588115 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jun 2020 16:46:37 -0700 Subject: [PATCH 0287/2113] Fixed 'datasette plugins' command, with tests - closes #802 --- datasette/app.py | 4 ++-- datasette/cli.py | 2 +- tests/fixtures.py | 56 +++++++++++++++++++++++++++++++++++++++++++ tests/test_api.py | 58 ++------------------------------------------- tests/test_cli.py | 31 +++++++++++++++++++++++- tests/test_utils.py | 2 +- 6 files changed, 92 insertions(+), 61 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 54cf02f8..444a065a 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -626,9 +626,9 @@ class Datasette: }, } - def _plugins(self, request): + def _plugins(self, request=None, all=False): ps = list(get_plugins()) - if not request.args.get("all"): + if all is False or (request is not None and request.args.get("all")): ps = [p for p in ps if p["name"] not in DEFAULT_PLUGINS] return [ { diff --git a/datasette/cli.py b/datasette/cli.py index 23f9e36b..2e3c8e36 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -126,7 +126,7 @@ pm.hook.publish_subcommand(publish=publish) def plugins(all, plugins_dir): "List currently available plugins" app = Datasette([], plugins_dir=plugins_dir) - click.echo(json.dumps(app.plugins(all), indent=4)) + click.echo(json.dumps(app._plugins(all=all), indent=4)) @cli.command() diff --git a/tests/fixtures.py b/tests/fixtures.py index a64a8295..4ca7b10f 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -22,6 +22,62 @@ TEMP_PLUGIN_SECRET_FILE = os.path.join(tempfile.gettempdir(), "plugin-secret") PLUGINS_DIR = str(pathlib.Path(__file__).parent / "plugins") +EXPECTED_PLUGINS = [ + { + "name": "messages_output_renderer.py", + "static": False, + "templates": False, + "version": None, + "hooks": ["register_output_renderer"], + }, + { + "name": "my_plugin.py", + "static": False, + "templates": False, + "version": None, + "hooks": [ + "actor_from_request", + "extra_body_script", + "extra_css_urls", + "extra_js_urls", + "extra_template_vars", + "permission_allowed", + "prepare_connection", + "prepare_jinja2_environment", + "register_facet_classes", + "render_cell", + ], + }, + { + "name": "my_plugin_2.py", + "static": False, + "templates": False, + "version": None, + "hooks": [ + "actor_from_request", + "asgi_wrapper", + "extra_js_urls", + "extra_template_vars", + "permission_allowed", + "render_cell", + ], + }, + { + "name": "register_output_renderer.py", + "static": False, + "templates": False, + "version": None, + "hooks": ["register_output_renderer"], + }, + { + "name": "view_name.py", + "static": False, + "templates": False, + "version": None, + "hooks": ["extra_template_vars"], + }, +] + class TestResponse: def __init__(self, status, headers, body): diff --git a/tests/test_api.py b/tests/test_api.py index 4b752f31..0aa62a95 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -15,6 +15,7 @@ from .fixtures import ( # noqa generate_compound_rows, generate_sortable_rows, make_app_client, + EXPECTED_PLUGINS, METADATA, ) import json @@ -1259,62 +1260,7 @@ def test_threads_json(app_client): def test_plugins_json(app_client): response = app_client.get("/-/plugins.json") - expected = [ - { - "name": "messages_output_renderer.py", - "static": False, - "templates": False, - "version": None, - "hooks": ["register_output_renderer"], - }, - { - "name": "my_plugin.py", - "static": False, - "templates": False, - "version": None, - "hooks": [ - "actor_from_request", - "extra_body_script", - "extra_css_urls", - "extra_js_urls", - "extra_template_vars", - "permission_allowed", - "prepare_connection", - "prepare_jinja2_environment", - "register_facet_classes", - "render_cell", - ], - }, - { - "name": "my_plugin_2.py", - "static": False, - "templates": False, - "version": None, - "hooks": [ - "actor_from_request", - "asgi_wrapper", - "extra_js_urls", - "extra_template_vars", - "permission_allowed", - "render_cell", - ], - }, - { - "name": "register_output_renderer.py", - "static": False, - "templates": False, - "version": None, - "hooks": ["register_output_renderer"], - }, - { - "name": "view_name.py", - "static": False, - "templates": False, - "version": None, - "hooks": ["extra_template_vars"], - }, - ] - assert expected == sorted(response.json, key=lambda p: p["name"]) + assert EXPECTED_PLUGINS == sorted(response.json, key=lambda p: p["name"]) def test_versions_json(app_client): diff --git a/tests/test_cli.py b/tests/test_cli.py index 529661ce..c53e9a3e 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,4 +1,9 @@ -from .fixtures import app_client, make_app_client, TestClient as _TestClient +from .fixtures import ( + app_client, + make_app_client, + TestClient as _TestClient, + EXPECTED_PLUGINS, +) from datasette.cli import cli, serve from click.testing import CliRunner import io @@ -50,6 +55,30 @@ def test_spatialite_error_if_attempt_to_open_spatialite(): assert "trying to load a SpatiaLite database" in result.output +def test_plugins_cli(app_client): + runner = CliRunner() + result1 = runner.invoke(cli, ["plugins"]) + assert sorted(EXPECTED_PLUGINS, key=lambda p: p["name"]) == sorted( + json.loads(result1.output), key=lambda p: p["name"] + ) + # Try with --all + result2 = runner.invoke(cli, ["plugins", "--all"]) + names = [p["name"] for p in json.loads(result2.output)] + # Should have all the EXPECTED_PLUGINS + assert set(names).issuperset(set(p["name"] for p in EXPECTED_PLUGINS)) + # And the following too: + assert set(names).issuperset( + [ + "datasette.sql_functions", + "datasette.actor_auth_cookie", + "datasette.facets", + "datasette.publish.cloudrun", + "datasette.default_permissions", + "datasette.publish.heroku", + ] + ) + + def test_metadata_yaml(): yaml_file = io.StringIO( textwrap.dedent( diff --git a/tests/test_utils.py b/tests/test_utils.py index cf714215..4931ef3b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -449,7 +449,7 @@ def test_call_with_supported_arguments(): ({"foo": ["bar", "baz"]}, False), ({"foo": ("bar", "baz")}, False), ({"foo": "bar"}, True), - ] + ], ) def test_multi_params(data, should_raise): if should_raise: From 75c143a84cee2fad878c6318755582522b9afff3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jun 2020 16:55:08 -0700 Subject: [PATCH 0288/2113] Fixed /-/plugins?all=1, refs #802 --- datasette/app.py | 7 ++++++- tests/test_api.py | 6 ++++++ tests/test_cli.py | 12 ++---------- 3 files changed, 14 insertions(+), 11 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 444a065a..1624f6ea 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -628,7 +628,12 @@ class Datasette: def _plugins(self, request=None, all=False): ps = list(get_plugins()) - if all is False or (request is not None and request.args.get("all")): + should_show_all = False + if request is not None: + should_show_all = request.args.get("all") + else: + should_show_all = all + if not should_show_all: ps = [p for p in ps if p["name"] not in DEFAULT_PLUGINS] return [ { diff --git a/tests/test_api.py b/tests/test_api.py index 0aa62a95..b35c0a2d 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,3 +1,4 @@ +from datasette.plugins import DEFAULT_PLUGINS from datasette.utils import detect_json1 from .fixtures import ( # noqa app_client, @@ -1261,6 +1262,11 @@ def test_threads_json(app_client): def test_plugins_json(app_client): response = app_client.get("/-/plugins.json") assert EXPECTED_PLUGINS == sorted(response.json, key=lambda p: p["name"]) + # Try with ?all=1 + response = app_client.get("/-/plugins.json?all=1") + names = {p["name"] for p in response.json} + assert names.issuperset(p["name"] for p in EXPECTED_PLUGINS) + assert names.issuperset(DEFAULT_PLUGINS) def test_versions_json(app_client): diff --git a/tests/test_cli.py b/tests/test_cli.py index c53e9a3e..2616f1d1 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -4,6 +4,7 @@ from .fixtures import ( TestClient as _TestClient, EXPECTED_PLUGINS, ) +from datasette.plugins import DEFAULT_PLUGINS from datasette.cli import cli, serve from click.testing import CliRunner import io @@ -67,16 +68,7 @@ def test_plugins_cli(app_client): # Should have all the EXPECTED_PLUGINS assert set(names).issuperset(set(p["name"] for p in EXPECTED_PLUGINS)) # And the following too: - assert set(names).issuperset( - [ - "datasette.sql_functions", - "datasette.actor_auth_cookie", - "datasette.facets", - "datasette.publish.cloudrun", - "datasette.default_permissions", - "datasette.publish.heroku", - ] - ) + assert set(names).issuperset(DEFAULT_PLUGINS) def test_metadata_yaml(): From 9c563d6aed072f14d3d25f58e84659f9caa1a243 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jun 2020 17:15:52 -0700 Subject: [PATCH 0289/2113] Bump asgi-csrf to 0.5.1 for a bug fix Refs https://github.com/simonw/asgi-csrf/issues/10 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index c0316deb..678a022f 100644 --- a/setup.py +++ b/setup.py @@ -53,7 +53,7 @@ setup( "uvicorn~=0.11", "aiofiles>=0.4,<0.6", "janus>=0.4,<0.6", - "asgi-csrf>=0.4", + "asgi-csrf>=0.5.1", "PyYAML~=5.3", "mergedeep>=1.1.1,<1.4.0", "itsdangerous~=1.1", From 30a8132d58a89fed0e034e058b62fab5180fae0f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 11:18:46 -0700 Subject: [PATCH 0290/2113] Docs for authentication + canned query permissions, refs #800 Closes #786 --- docs/authentication.rst | 108 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 106 insertions(+), 2 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 0a9a4c0d..2c07f75a 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -4,14 +4,118 @@ Authentication and permissions ================================ -Datasette's authentication system is currently under construction. Follow `issue 699 `__ to track the development of this feature. +Datasette does not require authentication by default. Any visitor to a Datasette instance can explore the full data and execute SQL queries. + +Datasette's plugin system can be used to add many different styles of authentication, such as user accounts, single sign-on or API keys. + +.. _authentication_actor: + +Actors +====== + +Through plugins, Datasette can support both authenticated users (with cookies) and authenticated API agents (via authentication tokens). The word "actor" is used to cover both of these cases. + +Every request to Datasette has an associated actor value. This can be ``None`` for unauthenticated requests, or a JSON compatible Python dictionary for authenticated users or API agents. + +The only required field in an actor is ``"id"``, which must be a string. Plugins may decide to add any other fields to the actor dictionary. + +Plugins can use the :ref:`plugin_actor_from_request` hook to implement custom logic for authenticating an actor based on the incoming HTTP request. + +.. _authentication_root: + +Using the "root" actor +====================== + +Datasette currently leaves almost all forms of authentication to plugins - `datasette-auth-github `__ for example. + +The one exception is the "root" account, which you can sign into while using Datasette on your local machine. This provides access to a small number of debugging features. + +To sign in as root, start Datasette using the ``--root`` command-line option, like this:: + + $ datasette --root + http://127.0.0.1:8001/-/auth-token?token=786fc524e0199d70dc9a581d851f466244e114ca92f33aa3b42a139e9388daa7 + INFO: Started server process [25801] + INFO: Waiting for application startup. + INFO: Application startup complete. + INFO: Uvicorn running on http://127.0.0.1:8001 (Press CTRL+C to quit) + +The URL on the first line includes a one-use token which can be used to sign in as the "root" actor in your browser. Click on that link and then visit ``http://127.0.0.1:8001/-/actor`` to confirm that you are authenticated as an actor that looks like this: + +.. code-block:: json + + { + "id": "root" + } + + +.. _authentication_permissions_canned_queries: + +Setting permissions for canned queries +====================================== + +Datasette's :ref:`canned_queries` default to allowing any user to execute them. + +You can limit who is allowed to execute a specific query with the ``"allow"`` key in the :ref:`metadata` configuration for that query. + +Here's how to restrict access to a write query to just the "root" user: + +.. code-block:: json + + { + "databases": { + "mydatabase": { + "queries": { + "add_name": { + "sql": "INSERT INTO names (name) VALUES (:name)", + "write": true, + "allow": { + "id": ["root"] + } + } + } + } + } + } + +To allow any of the actors with an ``id`` matching a specific list of values, use this: + +.. code-block:: json + + { + "allow": { + "id": ["simon", "cleopaws"] + } + } + +This works for other keys as well. Imagine an actor that looks like this: + +.. code-block:: json + + { + "id": "simon", + "roles": ["staff", "developer"] + } + +You can provide access to any user that has "developer" as one of their roles like so: + +.. code-block:: json + + { + "allow": { + "roles": ["developer"] + } + } + +Note that "roles" is not a concept that is baked into Datasette - it's more of a convention that plugins can choose to implement and act on. + +These keys act as an "or" mechanism. A actor will be able to execute the query if any of their JSON properties match any of the values in the corresponding lists in the ``allow`` block. .. _PermissionsDebugView: Permissions Debug ================= -The debug tool at ``/-/permissions`` is only available to the root user. +The debug tool at ``/-/permissions`` is only available to the :ref:`authenticated root user ` (or any actor granted the ``permissions-debug`` action according to a plugin). It shows the thirty most recent permission checks that have been carried out by the Datasette instance. From d4c7b85f556230923d37ff327a068ed08aa9b62b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 11:23:54 -0700 Subject: [PATCH 0291/2113] Documentation for "id": "*", refs #800 --- docs/authentication.rst | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/authentication.rst b/docs/authentication.rst index 2c07f75a..a90dcc41 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -108,6 +108,16 @@ You can provide access to any user that has "developer" as one of their roles li Note that "roles" is not a concept that is baked into Datasette - it's more of a convention that plugins can choose to implement and act on. +If you want to provide access to any actor with a value for a specific key, use ``"*"``. For example, to spceify that a query can be accessed by any logged-in user use this: + +.. code-block:: json + + { + "allow": { + "id": "*" + } + } + These keys act as an "or" mechanism. A actor will be able to execute the query if any of their JSON properties match any of the values in the corresponding lists in the ``allow`` block. .. _PermissionsDebugView: From 14f6b4d200f24940a795ddc0825319ab2891bde2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 11:39:11 -0700 Subject: [PATCH 0292/2113] actor_matches_allow utility function, refs #800 --- datasette/utils/__init__.py | 19 +++++++++++++++++++ docs/authentication.rst | 18 ++++++++++++++++-- tests/test_utils.py | 27 +++++++++++++++++++++++++++ 3 files changed, 62 insertions(+), 2 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 059db184..eb118f38 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -854,3 +854,22 @@ def call_with_supported_arguments(fn, **kwargs): ) call_with.append(kwargs[parameter]) return fn(*call_with) + + +def actor_matches_allow(actor, allow): + if allow is None: + return True + for key, values in allow.items(): + if values == "*" and key in actor: + return True + if isinstance(values, str): + values = [values] + actor_values = actor.get(key) + if actor_values is None: + return False + if isinstance(actor_values, str): + actor_values = [actor_values] + actor_values = set(actor_values) + if actor_values.intersection(values): + return True + return False diff --git a/docs/authentication.rst b/docs/authentication.rst index a90dcc41..85bbbbbd 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -50,8 +50,8 @@ The URL on the first line includes a one-use token which can be used to sign in .. _authentication_permissions_canned_queries: -Setting permissions for canned queries -====================================== +Permissions for canned queries +============================== Datasette's :ref:`canned_queries` default to allowing any user to execute them. @@ -120,6 +120,20 @@ If you want to provide access to any actor with a value for a specific key, use These keys act as an "or" mechanism. A actor will be able to execute the query if any of their JSON properties match any of the values in the corresponding lists in the ``allow`` block. +.. _authentication_actor_matches_allow: + +actor_matches_allow() +===================== + +Plugins that wish to implement the same permissions scheme as canned queries can take advantage of the ``datasette.utils.actor_matches_allow(actor, allow)`` function: + +.. code-block:: python + + from datasette.utils import actor_matches_allow + + actor_matches_allow({"id": "root"}, {"id": "*"}) + # returns True + .. _PermissionsDebugView: Permissions Debug diff --git a/tests/test_utils.py b/tests/test_utils.py index 4931ef3b..7c24648a 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -459,3 +459,30 @@ def test_multi_params(data, should_raise): p1 = utils.MultiParams(data) assert "bar" == p1["foo"] assert ["bar", "baz"] == list(p1.getlist("foo")) + + +@pytest.mark.parametrize( + "actor,allow,expected", + [ + ({"id": "root"}, None, True), + ({"id": "root"}, {}, False), + # Special "*" value for any key: + ({"id": "root"}, {"id": "*"}, True), + ({}, {"id": "*"}, False), + ({"name": "root"}, {"id": "*"}, False), + # Supports single strings or list of values: + ({"id": "root"}, {"id": "bob"}, False), + ({"id": "root"}, {"id": ["bob"]}, False), + ({"id": "root"}, {"id": "root"}, True), + ({"id": "root"}, {"id": ["root"]}, True), + # Any matching role will work: + ({"id": "garry", "roles": ["staff", "dev"]}, {"roles": ["staff"]}, True), + ({"id": "garry", "roles": ["staff", "dev"]}, {"roles": ["dev"]}, True), + ({"id": "garry", "roles": ["staff", "dev"]}, {"roles": ["otter"]}, False), + ({"id": "garry", "roles": ["staff", "dev"]}, {"roles": ["dev", "otter"]}, True), + ({"id": "garry", "roles": []}, {"roles": ["staff"]}, False), + ({"id": "garry"}, {"roles": ["staff"]}, False), + ], +) +def test_actor_matches_allow(actor, allow, expected): + assert expected == utils.actor_matches_allow(actor, allow) From 3f83d4632a643266f46ccd955d951be7aacbab99 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 12:05:22 -0700 Subject: [PATCH 0293/2113] Respect query permissions on database page, refs #800 --- datasette/templates/database.html | 2 +- datasette/utils/__init__.py | 1 + datasette/views/database.py | 13 ++++++++++++- tests/test_canned_write.py | 31 ++++++++++++++++++++++++++++++- tests/test_utils.py | 3 +++ 5 files changed, 47 insertions(+), 3 deletions(-) diff --git a/datasette/templates/database.html b/datasette/templates/database.html index e47b2418..fc88003c 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -60,7 +60,7 @@

    Queries

    {% endif %} diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index eb118f38..077728f4 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -857,6 +857,7 @@ def call_with_supported_arguments(fn, **kwargs): def actor_matches_allow(actor, allow): + actor = actor or {} if allow is None: return True for key, values in allow.items(): diff --git a/datasette/views/database.py b/datasette/views/database.py index 558dd0f0..abc7d3bb 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -2,6 +2,7 @@ import os import jinja2 from datasette.utils import ( + actor_matches_allow, to_css_class, validate_sql_select, is_url, @@ -53,6 +54,16 @@ class DatabaseView(DataView): ) tables.sort(key=lambda t: (t["hidden"], t["name"])) + canned_queries = [ + dict( + query, + requires_auth=not actor_matches_allow(None, query.get("allow", None)), + ) + for query in self.ds.get_canned_queries(database) + if actor_matches_allow( + request.scope.get("actor", None), query.get("allow", None) + ) + ] return ( { "database": database, @@ -60,7 +71,7 @@ class DatabaseView(DataView): "tables": tables, "hidden_count": len([t for t in tables if t["hidden"]]), "views": views, - "queries": self.ds.get_canned_queries(database), + "queries": canned_queries, }, { "show_hidden": request.args.get("_show_hidden"), diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index be838063..5b5756b0 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -24,6 +24,7 @@ def canned_write_client(): "sql": "delete from names where rowid = :rowid", "write": True, "on_success_message": "Name deleted", + "allow": {"id": "root"}, }, "update_name": { "sql": "update names set name = :name where rowid = :rowid", @@ -52,7 +53,11 @@ def test_insert(canned_write_client): def test_custom_success_message(canned_write_client): response = canned_write_client.post( - "/data/delete_name", {"rowid": 1}, allow_redirects=False, csrftoken_from=True + "/data/delete_name", + {"rowid": 1}, + cookies={"ds_actor": canned_write_client.ds.sign({"id": "root"}, "actor")}, + allow_redirects=False, + csrftoken_from=True, ) assert 302 == response.status messages = canned_write_client.ds.unsign( @@ -93,3 +98,27 @@ def test_insert_error(canned_write_client): def test_custom_params(canned_write_client): response = canned_write_client.get("/data/update_name?extra=foo") assert '' in response.text + + +def test_canned_query_permissions_on_database_page(canned_write_client): + # Without auth only shows three queries + query_names = [ + q["name"] for q in canned_write_client.get("/data.json").json["queries"] + ] + assert ["add_name", "add_name_specify_id", "update_name"] == query_names + + # With auth shows four + response = canned_write_client.get( + "/data.json", + cookies={"ds_actor": canned_write_client.ds.sign({"id": "root"}, "actor")}, + ) + assert 200 == response.status + assert [ + {"name": "add_name", "requires_auth": False}, + {"name": "add_name_specify_id", "requires_auth": False}, + {"name": "delete_name", "requires_auth": True}, + {"name": "update_name", "requires_auth": False}, + ] == [ + {"name": q["name"], "requires_auth": q["requires_auth"]} + for q in response.json["queries"] + ] diff --git a/tests/test_utils.py b/tests/test_utils.py index 7c24648a..975ed0fd 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -466,6 +466,9 @@ def test_multi_params(data, should_raise): [ ({"id": "root"}, None, True), ({"id": "root"}, {}, False), + (None, None, True), + (None, {}, False), + (None, {"id": "root"}, False), # Special "*" value for any key: ({"id": "root"}, {"id": "*"}, True), ({}, {"id": "*"}, False), From 070838bfa19b177f59ef3bd8f0139266adecda90 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 12:26:19 -0700 Subject: [PATCH 0294/2113] Better test for Vary header --- tests/fixtures.py | 2 -- tests/test_canned_write.py | 6 ++++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index 4ca7b10f..2268ef4d 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -132,8 +132,6 @@ class TestClient: if csrftoken_from is True: csrftoken_from = path token_response = await self._request(csrftoken_from) - # Check this had a Vary: Cookie header - assert "Cookie" == token_response.headers["vary"] csrftoken = token_response.cookies["ds_csrftoken"] cookies["ds_csrftoken"] = csrftoken post_data["csrftoken"] = csrftoken diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index 5b5756b0..aacc586f 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -100,6 +100,12 @@ def test_custom_params(canned_write_client): assert '' in response.text +def test_vary_header(canned_write_client): + # These forms embed a csrftoken so they should be served with Vary: Cookie + assert "vary" not in canned_write_client.get("/data").headers + assert "Cookie" == canned_write_client.get("/data/update_name").headers["vary"] + + def test_canned_query_permissions_on_database_page(canned_write_client): # Without auth only shows three queries query_names = [ From 966eec7f75d2e1b809b001bb7e82f35d477f77ea Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 12:27:00 -0700 Subject: [PATCH 0295/2113] Check permissions on canned query page, refs #800 --- datasette/views/database.py | 10 +++++++++- tests/test_canned_write.py | 8 ++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index abc7d3bb..4e9a6da7 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -9,7 +9,7 @@ from datasette.utils import ( path_with_added_args, path_with_removed_args, ) -from datasette.utils.asgi import AsgiFileDownload +from datasette.utils.asgi import AsgiFileDownload, Response from datasette.plugins import pm from .base import DatasetteError, DataView @@ -125,6 +125,14 @@ class QueryView(DataView): params.pop("sql") if "_shape" in params: params.pop("_shape") + + # Respect canned query permissions + if canned_query: + if not actor_matches_allow( + request.scope.get("actor", None), metadata.get("allow") + ): + return Response("Permission denied", status=403) + # Extract any :named parameters named_parameters = named_parameters or self.re_named_parameter.findall(sql) named_parameter_values = { diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index aacc586f..73b01e51 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -128,3 +128,11 @@ def test_canned_query_permissions_on_database_page(canned_write_client): {"name": q["name"], "requires_auth": q["requires_auth"]} for q in response.json["queries"] ] + + +def test_canned_query_permissions(canned_write_client): + assert 403 == canned_write_client.get("/data/delete_name").status + assert 200 == canned_write_client.get("/data/update_name").status + cookies = {"ds_actor": canned_write_client.ds.sign({"id": "root"}, "actor")} + assert 200 == canned_write_client.get("/data/delete_name", cookies=cookies).status + assert 200 == canned_write_client.get("/data/update_name", cookies=cookies).status From 3359d54a4eb9c9725c27a85437661b5180c4099a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 12:33:08 -0700 Subject: [PATCH 0296/2113] Use cookies when accessing csrftoken_from --- tests/fixtures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index 2268ef4d..75bd6b94 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -131,7 +131,7 @@ class TestClient: if csrftoken_from is not None: if csrftoken_from is True: csrftoken_from = path - token_response = await self._request(csrftoken_from) + token_response = await self._request(csrftoken_from, cookies=cookies) csrftoken = token_response.cookies["ds_csrftoken"] cookies["ds_csrftoken"] = csrftoken post_data["csrftoken"] = csrftoken From f1daf64e722f9aedc61bea1636a9df715c4c4a8e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 12:46:40 -0700 Subject: [PATCH 0297/2113] Link to canned query permissions documentation --- docs/authentication.rst | 2 +- docs/sql_queries.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 85bbbbbd..8b24a44a 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -53,7 +53,7 @@ The URL on the first line includes a one-use token which can be used to sign in Permissions for canned queries ============================== -Datasette's :ref:`canned_queries` default to allowing any user to execute them. +Datasette's :ref:`canned queries ` default to allowing any user to execute them. You can limit who is allowed to execute a specific query with the ``"allow"`` key in the :ref:`metadata` configuration for that query. diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index aa1edc98..5df8bdb0 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -217,7 +217,7 @@ Writable canned queries Canned queries by default are read-only. You can use the ``"write": true`` key to indicate that a canned query can write to the database. -You may wish to use this feature in conjunction with :ref:`authentication`. +See :ref:`authentication_permissions_canned_queries` for details on how to add permission checks to canned queries, using the ``"allow"`` key. .. code-block:: json From 7dc23cd71aeb5a0e194f25fd1b8e569e3bb2149b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 13:05:09 -0700 Subject: [PATCH 0298/2113] Whitespace --- docs/authentication.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 8b24a44a..730a86c8 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -47,7 +47,6 @@ The URL on the first line includes a one-use token which can be used to sign in "id": "root" } - .. _authentication_permissions_canned_queries: Permissions for canned queries From bd4de0647d660709de122303a1aece3a8ef88394 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 19:09:59 -0700 Subject: [PATCH 0299/2113] Improved permissions documentation --- docs/authentication.rst | 7 +++++++ docs/internals.rst | 2 ++ 2 files changed, 9 insertions(+) diff --git a/docs/authentication.rst b/docs/authentication.rst index 730a86c8..fd70000e 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -47,6 +47,13 @@ The URL on the first line includes a one-use token which can be used to sign in "id": "root" } +.. _authentication_permissions: + +Permissions +=========== + +Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. This method is also used by Datasette core code itself, which allows plugins to help make decisions on which actions are allowed by implementing the :ref:`permission_allowed(...) ` plugin hook. + .. _authentication_permissions_canned_queries: Permissions for canned queries diff --git a/docs/internals.rst b/docs/internals.rst index 4b4adc5e..25b2d875 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -116,6 +116,8 @@ This method lets you read plugin configuration values that were set in ``metadat Renders a `Jinja template `__ using Datasette's preconfigured instance of Jinja and returns the resulting string. The template will have access to Datasette's default template functions and any functions that have been made available by other plugins. +.. _datasette_permission_allowed: + await .permission_allowed(actor, action, resource_type=None, resource_identifier=None, default=False) ----------------------------------------------------------------------------------------------------- From 86dec9e8fffd6c4efec928ae9b5713748dec7e74 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 22:30:36 -0700 Subject: [PATCH 0300/2113] Added permission check to every view, closes #808 --- datasette/app.py | 5 ++ datasette/templates/permissions_debug.html | 2 +- datasette/utils/asgi.py | 4 + datasette/views/base.py | 14 ++++ datasette/views/database.py | 8 ++ datasette/views/index.py | 1 + datasette/views/table.py | 5 ++ docs/authentication.rst | 88 ++++++++++++++++++++++ tests/conftest.py | 38 ++++++++++ tests/fixtures.py | 16 ++++ tests/test_api.py | 2 +- tests/test_auth.py | 1 + tests/test_html.py | 38 ++++++++++ 13 files changed, 220 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 1624f6ea..f433a10a 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -49,6 +49,7 @@ from .utils import ( ) from .utils.asgi import ( AsgiLifespan, + Forbidden, NotFound, Request, Response, @@ -1003,6 +1004,10 @@ class DatasetteRouter(AsgiRouter): status = 404 info = {} message = exception.args[0] + elif isinstance(exception, Forbidden): + status = 403 + info = {} + message = exception.args[0] elif isinstance(exception, DatasetteError): status = exception.status info = exception.error_dict diff --git a/datasette/templates/permissions_debug.html b/datasette/templates/permissions_debug.html index fb098c5c..dda57dfa 100644 --- a/datasette/templates/permissions_debug.html +++ b/datasette/templates/permissions_debug.html @@ -47,7 +47,7 @@

    Actor: {{ check.actor|tojson }}

    {% if check.resource_type %} -

    Resource: {{ check.resource_type }}: {{ check.resource_identifier }}

    +

    Resource: {{ check.resource_type }} = {{ check.resource_identifier }}

    {% endif %}
    {% endfor %} diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index ba131dc8..fa78c8df 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -13,6 +13,10 @@ class NotFound(Exception): pass +class Forbidden(Exception): + pass + + class Request: def __init__(self, scope, receive): self.scope = scope diff --git a/datasette/views/base.py b/datasette/views/base.py index 315c96fe..9c2cbbcc 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -29,6 +29,7 @@ from datasette.utils.asgi import ( AsgiWriter, AsgiRouter, AsgiView, + Forbidden, NotFound, Response, ) @@ -63,6 +64,19 @@ class BaseView(AsgiView): response.body = b"" return response + async def check_permission( + self, request, action, resource_type=None, resource_identifier=None + ): + ok = await self.ds.permission_allowed( + request.scope.get("actor"), + action, + resource_type=resource_type, + resource_identifier=resource_identifier, + default=True, + ) + if not ok: + raise Forbidden(action) + def database_url(self, database): db = self.ds.databases[database] base_url = self.ds.config("base_url") diff --git a/datasette/views/database.py b/datasette/views/database.py index 4e9a6da7..eb7c29ca 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -19,6 +19,7 @@ class DatabaseView(DataView): name = "database" async def data(self, request, database, hash, default_labels=False, _size=None): + await self.check_permission(request, "view-database", "database", database) metadata = (self.ds.metadata("databases") or {}).get(database, {}) self.ds.update_with_inherited_metadata(metadata) @@ -89,6 +90,9 @@ class DatabaseDownload(DataView): name = "database_download" async def view_get(self, request, database, hash, correct_hash_present, **kwargs): + await self.check_permission( + request, "view-database-download", "database", database + ) if database not in self.ds.databases: raise DatasetteError("Invalid database", status=404) db = self.ds.databases[database] @@ -128,6 +132,10 @@ class QueryView(DataView): # Respect canned query permissions if canned_query: + await self.check_permission( + request, "view-query", "query", (database, canned_query) + ) + # TODO: fix this to use that permission check if not actor_matches_allow( request.scope.get("actor", None), metadata.get("allow") ): diff --git a/datasette/views/index.py b/datasette/views/index.py index fe88a38c..40c41002 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -22,6 +22,7 @@ class IndexView(BaseView): self.ds = datasette async def get(self, request, as_format): + await self.check_permission(request, "view-index") databases = [] for name, db in self.ds.databases.items(): table_names = await db.table_names() diff --git a/datasette/views/table.py b/datasette/views/table.py index ec1b6c7c..32c7f839 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -267,6 +267,8 @@ class TableView(RowTableShared): if not is_view and not table_exists: raise NotFound("Table not found: {}".format(table)) + await self.check_permission(request, "view-table", "table", (database, table)) + pks = await db.primary_keys(table) table_columns = await db.table_columns(table) @@ -844,6 +846,9 @@ class RowView(RowTableShared): async def data(self, request, database, hash, table, pk_path, default_labels=False): pk_values = urlsafe_components(pk_path) + await self.check_permission( + request, "view-row", "row", tuple([database, table] + list(pk_values)) + ) db = self.ds.databases[database] pks = await db.primary_keys(table) use_rowid = not pks diff --git a/docs/authentication.rst b/docs/authentication.rst index fd70000e..b0473ee8 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -150,3 +150,91 @@ The debug tool at ``/-/permissions`` is only available to the :ref:`authenticate It shows the thirty most recent permission checks that have been carried out by the Datasette instance. This is designed to help administrators and plugin authors understand exactly how permission checks are being carried out, in order to effectively configure Datasette's permission system. + + +.. _permissions: + +Permissions +=========== + +This section lists all of the permission checks that are carried out by Datasette core, along with their ``resource_type`` and ``resource_identifier`` if those are passed. + +.. _permissions_view_index: + +view-index +---------- + +Actor is allowed to view the index page, e.g. https://latest.datasette.io/ + + +.. _permissions_view_database: + +view-database +------------- + +Actor is allowed to view a database page, e.g. https://latest.datasette.io/fixtures + +``resource_type`` - string + "database" + +``resource_identifier`` - string + The name of the database + +.. _permissions_view_database_download: + +view-database-download +----------------------- + +Actor is allowed to download a database, e.g. https://latest.datasette.io/fixtures.db + +``resource_type`` - string + "database" + +``resource_identifier`` - string + The name of the database + +.. _permissions_view_table: + +view-table +---------- + +Actor is allowed to view a table (or view) page, e.g. https://latest.datasette.io/fixtures/complex_foreign_keys + +``resource_type`` - string + "table" - even if this is actually a SQL view + +``resource_identifier`` - tuple: (string, string) + The name of the database, then the name of the table + +.. _permissions_view_row: + +view-row +-------- + +Actor is allowed to view a row page, e.g. https://latest.datasette.io/fixtures/compound_primary_key/a,b + +``resource_type`` - string + "row" + +``resource_identifier`` - tuple: (string, string, strings...) + The name of the database, then the name of the table, then the primary key of the row. The primary key may be a single value or multiple values, so the ``resource_identifier`` tuple may be three or more items long. + +.. _permissions_view_query: + +view-query +---------- + +Actor is allowed to view a :ref:`canned query ` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size + +``resource_type`` - string + "query" + +``resource_identifier`` - string + The name of the canned query + +.. _permissions_permissions_debug: + +permissions-debug +----------------- + +Actor is allowed to view the ``/-/permissions`` debug page. diff --git a/tests/conftest.py b/tests/conftest.py index a19ad18d..1921ae3a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,15 @@ import os +import pathlib import pytest +import re + +UNDOCUMENTED_PERMISSIONS = { + "this_is_allowed", + "this_is_denied", + "this_is_allowed_async", + "this_is_denied_async", + "no_match", +} def pytest_configure(config): @@ -39,3 +49,31 @@ def restore_working_directory(tmpdir, request): os.chdir(previous_cwd) request.addfinalizer(return_to_previous) + + +@pytest.fixture(scope="session", autouse=True) +def check_permission_actions_are_documented(): + from datasette.plugins import pm + + content = ( + (pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst") + .open() + .read() + ) + permissions_re = re.compile(r"\.\. _permissions_([^\s:]+):") + documented_permission_actions = set(permissions_re.findall(content)).union( + UNDOCUMENTED_PERMISSIONS + ) + + def before(hook_name, hook_impls, kwargs): + if hook_name == "permission_allowed": + action = kwargs.get("action").replace("-", "_") + assert ( + action in documented_permission_actions + ), "Undocumented permission action: {}, resource_type: {}, resource_identifier: {}".format( + action, kwargs["resource_type"], kwargs["resource_identifier"] + ) + + pm.add_hookcall_monitoring( + before=before, after=lambda outcome, hook_name, hook_impls, kwargs: None + ) diff --git a/tests/fixtures.py b/tests/fixtures.py index 75bd6b94..d175dfd5 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -840,3 +840,19 @@ if __name__ == "__main__": sys.argv[0] ) ) + + +def assert_permission_checked( + datasette, action, resource_type=None, resource_identifier=None +): + assert [ + pc + for pc in datasette._permission_checks + if pc["action"] == action + and pc["resource_type"] == resource_type + and pc["resource_identifier"] == resource_identifier + ], """Missing expected permission check: action={}, resource_type={}, resource_identifier={} + Permission checks seen: {} + """.format( + action, resource_type, resource_identifier, datasette._permission_checks + ) diff --git a/tests/test_api.py b/tests/test_api.py index b35c0a2d..555e394a 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1721,7 +1721,7 @@ def test_trace(app_client): assert isinstance(trace["traceback"], list) assert isinstance(trace["database"], str) assert isinstance(trace["sql"], str) - assert isinstance(trace["params"], (list, dict)) + assert isinstance(trace["params"], (list, dict, None.__class__)) @pytest.mark.parametrize( diff --git a/tests/test_auth.py b/tests/test_auth.py index ac8d7abe..40dc2587 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -23,6 +23,7 @@ def test_actor_cookie(app_client): def test_permissions_debug(app_client): + app_client.ds._permission_checks.clear() assert 403 == app_client.get("/-/permissions").status # With the cookie it should work cookie = app_client.ds.sign({"id": "root"}, "actor") diff --git a/tests/test_html.py b/tests/test_html.py index 2d2a141a..3569b92c 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -4,6 +4,7 @@ from .fixtures import ( # noqa app_client_shorter_time_limit, app_client_two_attached_databases, app_client_with_hash, + assert_permission_checked, make_app_client, METADATA, ) @@ -17,6 +18,7 @@ import urllib.parse def test_homepage(app_client_two_attached_databases): response = app_client_two_attached_databases.get("/") + assert_permission_checked(app_client_two_attached_databases.ds, "view-index") assert response.status == 200 assert "text/html; charset=utf-8" == response.headers["content-type"] soup = Soup(response.body, "html.parser") @@ -75,6 +77,12 @@ def test_static_mounts(): def test_memory_database_page(): for client in make_app_client(memory=True): response = client.get("/:memory:") + assert_permission_checked( + client.ds, + "view-database", + resource_type="database", + resource_identifier=":memory:", + ) assert response.status == 200 @@ -87,6 +95,12 @@ def test_database_page_redirects_with_url_hash(app_client_with_hash): def test_database_page(app_client): response = app_client.get("/fixtures") + assert_permission_checked( + app_client.ds, + "view-database", + resource_type="database", + resource_identifier="fixtures", + ) soup = Soup(response.body, "html.parser") queries_ul = soup.find("h2", text="Queries").find_next_sibling("ul") assert queries_ul is not None @@ -197,6 +211,12 @@ def test_row_page_does_not_truncate(): for client in make_app_client(config={"truncate_cells_html": 5}): response = client.get("/fixtures/facetable/1") assert response.status == 200 + assert_permission_checked( + client.ds, + "view-row", + resource_type="row", + resource_identifier=("fixtures", "facetable", "1"), + ) table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] assert ["Mission"] == [ @@ -506,6 +526,12 @@ def test_templates_considered(app_client, path, expected_considered): def test_table_html_simple_primary_key(app_client): response = app_client.get("/fixtures/simple_primary_key?_size=3") + assert_permission_checked( + app_client.ds, + "view-table", + resource_type="table", + resource_identifier=("fixtures", "simple_primary_key"), + ) assert response.status == 200 table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] @@ -896,6 +922,12 @@ def test_database_download_allowed_for_immutable(): assert len(soup.findAll("a", {"href": re.compile(r"\.db$")})) # Check we can actually download it assert 200 == client.get("/fixtures.db").status + assert_permission_checked( + client.ds, + "view-database-download", + resource_type="database", + resource_identifier="fixtures", + ) def test_database_download_disallowed_for_mutable(app_client): @@ -991,6 +1023,12 @@ def test_404_content_type(app_client): def test_canned_query_with_custom_metadata(app_client): response = app_client.get("/fixtures/neighborhood_search?text=town") + assert_permission_checked( + app_client.ds, + "view-query", + resource_type="query", + resource_identifier=("fixtures", "neighborhood_search"), + ) assert response.status == 200 soup = Soup(response.body, "html.parser") assert "Search neighborhoods" == soup.find("h1").text From 4340845754e90fe778a7da8668b4fd9bf6ccc2c6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 7 Jun 2020 13:03:08 -0700 Subject: [PATCH 0301/2113] Nested permission checks for all views, refs #811 --- datasette/views/database.py | 10 +++++- datasette/views/index.py | 2 +- datasette/views/table.py | 5 +++ docs/authentication.rst | 21 ++++++++--- tests/fixtures.py | 36 +++++++++++-------- tests/test_html.py | 71 ++++++++++++++++++++++--------------- 6 files changed, 97 insertions(+), 48 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index eb7c29ca..4eae9e33 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -19,6 +19,7 @@ class DatabaseView(DataView): name = "database" async def data(self, request, database, hash, default_labels=False, _size=None): + await self.check_permission(request, "view-instance") await self.check_permission(request, "view-database", "database", database) metadata = (self.ds.metadata("databases") or {}).get(database, {}) self.ds.update_with_inherited_metadata(metadata) @@ -90,6 +91,8 @@ class DatabaseDownload(DataView): name = "database_download" async def view_get(self, request, database, hash, correct_hash_present, **kwargs): + await self.check_permission(request, "view-instance") + await self.check_permission(request, "view-database", "database", database) await self.check_permission( request, "view-database-download", "database", database ) @@ -132,6 +135,8 @@ class QueryView(DataView): # Respect canned query permissions if canned_query: + await self.check_permission(request, "view-instance") + await self.check_permission(request, "view-database", "database", database) await self.check_permission( request, "view-query", "query", (database, canned_query) ) @@ -140,7 +145,10 @@ class QueryView(DataView): request.scope.get("actor", None), metadata.get("allow") ): return Response("Permission denied", status=403) - + else: + await self.check_permission(request, "view-instance") + await self.check_permission(request, "view-database", "database", database) + await self.check_permission(request, "execute-query", "database", database) # Extract any :named parameters named_parameters = named_parameters or self.re_named_parameter.findall(sql) named_parameter_values = { diff --git a/datasette/views/index.py b/datasette/views/index.py index 40c41002..5f903474 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -22,7 +22,7 @@ class IndexView(BaseView): self.ds = datasette async def get(self, request, as_format): - await self.check_permission(request, "view-index") + await self.check_permission(request, "view-instance") databases = [] for name, db in self.ds.databases.items(): table_names = await db.table_names() diff --git a/datasette/views/table.py b/datasette/views/table.py index 32c7f839..10d6725a 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -267,6 +267,8 @@ class TableView(RowTableShared): if not is_view and not table_exists: raise NotFound("Table not found: {}".format(table)) + await self.check_permission(request, "view-instance") + await self.check_permission(request, "view-database", "database", database) await self.check_permission(request, "view-table", "table", (database, table)) pks = await db.primary_keys(table) @@ -846,6 +848,9 @@ class RowView(RowTableShared): async def data(self, request, database, hash, table, pk_path, default_labels=False): pk_values = urlsafe_components(pk_path) + await self.check_permission(request, "view-instance") + await self.check_permission(request, "view-database", "database", database) + await self.check_permission(request, "view-table", "table", (database, table)) await self.check_permission( request, "view-row", "row", tuple([database, table] + list(pk_values)) ) diff --git a/docs/authentication.rst b/docs/authentication.rst index b0473ee8..7fa96b35 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -159,12 +159,12 @@ Permissions This section lists all of the permission checks that are carried out by Datasette core, along with their ``resource_type`` and ``resource_identifier`` if those are passed. -.. _permissions_view_index: +.. _permissions_view_instance: -view-index ----------- +view-instance +------------- -Actor is allowed to view the index page, e.g. https://latest.datasette.io/ +Top level permission - Actor is allowed to view any pages within this instance, starting at https://latest.datasette.io/ .. _permissions_view_database: @@ -232,6 +232,19 @@ Actor is allowed to view a :ref:`canned query ` page, e.g. https ``resource_identifier`` - string The name of the canned query +.. _permissions_execute_query: + +execute-query +------------- + +Actor is allowed to run arbitrary SQL queries against a specific database, e.g. https://latest.datasette.io/fixtures?sql=select+100 + +``resource_type`` - string + "database" + +``resource_identifier`` - string + The name of the database + .. _permissions_permissions_debug: permissions-debug diff --git a/tests/fixtures.py b/tests/fixtures.py index d175dfd5..f767dc84 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -842,17 +842,25 @@ if __name__ == "__main__": ) -def assert_permission_checked( - datasette, action, resource_type=None, resource_identifier=None -): - assert [ - pc - for pc in datasette._permission_checks - if pc["action"] == action - and pc["resource_type"] == resource_type - and pc["resource_identifier"] == resource_identifier - ], """Missing expected permission check: action={}, resource_type={}, resource_identifier={} - Permission checks seen: {} - """.format( - action, resource_type, resource_identifier, datasette._permission_checks - ) +def assert_permissions_checked(datasette, actions): + # actions is a list of "action" or (action, resource_type, resource_identifier) tuples + for action in actions: + if isinstance(action, str): + resource_type = None + resource_identifier = None + else: + action, resource_type, resource_identifier = action + assert [ + pc + for pc in datasette._permission_checks + if pc["action"] == action + and pc["resource_type"] == resource_type + and pc["resource_identifier"] == resource_identifier + ], """Missing expected permission check: action={}, resource_type={}, resource_identifier={} + Permission checks seen: {} + """.format( + action, + resource_type, + resource_identifier, + json.dumps(list(datasette._permission_checks), indent=4), + ) diff --git a/tests/test_html.py b/tests/test_html.py index 3569b92c..b41c1943 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -4,7 +4,7 @@ from .fixtures import ( # noqa app_client_shorter_time_limit, app_client_two_attached_databases, app_client_with_hash, - assert_permission_checked, + assert_permissions_checked, make_app_client, METADATA, ) @@ -18,7 +18,7 @@ import urllib.parse def test_homepage(app_client_two_attached_databases): response = app_client_two_attached_databases.get("/") - assert_permission_checked(app_client_two_attached_databases.ds, "view-index") + assert_permissions_checked(app_client_two_attached_databases.ds, ["view-instance"]) assert response.status == 200 assert "text/html; charset=utf-8" == response.headers["content-type"] soup = Soup(response.body, "html.parser") @@ -77,11 +77,8 @@ def test_static_mounts(): def test_memory_database_page(): for client in make_app_client(memory=True): response = client.get("/:memory:") - assert_permission_checked( - client.ds, - "view-database", - resource_type="database", - resource_identifier=":memory:", + assert_permissions_checked( + client.ds, ["view-instance", ("view-database", "database", ":memory:")] ) assert response.status == 200 @@ -95,11 +92,8 @@ def test_database_page_redirects_with_url_hash(app_client_with_hash): def test_database_page(app_client): response = app_client.get("/fixtures") - assert_permission_checked( - app_client.ds, - "view-database", - resource_type="database", - resource_identifier="fixtures", + assert_permissions_checked( + app_client.ds, ["view-instance", ("view-database", "database", "fixtures")] ) soup = Soup(response.body, "html.parser") queries_ul = soup.find("h2", text="Queries").find_next_sibling("ul") @@ -211,11 +205,13 @@ def test_row_page_does_not_truncate(): for client in make_app_client(config={"truncate_cells_html": 5}): response = client.get("/fixtures/facetable/1") assert response.status == 200 - assert_permission_checked( + assert_permissions_checked( client.ds, - "view-row", - resource_type="row", - resource_identifier=("fixtures", "facetable", "1"), + [ + "view-instance", + ("view-table", "table", ("fixtures", "facetable")), + ("view-row", "row", ("fixtures", "facetable", "1")), + ], ) table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] @@ -526,11 +522,13 @@ def test_templates_considered(app_client, path, expected_considered): def test_table_html_simple_primary_key(app_client): response = app_client.get("/fixtures/simple_primary_key?_size=3") - assert_permission_checked( + assert_permissions_checked( app_client.ds, - "view-table", - resource_type="table", - resource_identifier=("fixtures", "simple_primary_key"), + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("view-table", "table", ("fixtures", "simple_primary_key")), + ], ) assert response.status == 200 table = Soup(response.body, "html.parser").find("table") @@ -887,6 +885,19 @@ def test_database_metadata(app_client): assert_footer_links(soup) +def test_database_query_permission_checks(app_client): + response = app_client.get("/fixtures?sql=select+1") + assert response.status == 200 + assert_permissions_checked( + app_client.ds, + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("execute-query", "database", "fixtures"), + ], + ) + + def test_database_metadata_with_custom_sql(app_client): response = app_client.get("/fixtures?sql=select+*+from+simple_primary_key") assert response.status == 200 @@ -922,11 +933,13 @@ def test_database_download_allowed_for_immutable(): assert len(soup.findAll("a", {"href": re.compile(r"\.db$")})) # Check we can actually download it assert 200 == client.get("/fixtures.db").status - assert_permission_checked( + assert_permissions_checked( client.ds, - "view-database-download", - resource_type="database", - resource_identifier="fixtures", + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("view-database-download", "database", "fixtures"), + ], ) @@ -1023,11 +1036,13 @@ def test_404_content_type(app_client): def test_canned_query_with_custom_metadata(app_client): response = app_client.get("/fixtures/neighborhood_search?text=town") - assert_permission_checked( + assert_permissions_checked( app_client.ds, - "view-query", - resource_type="query", - resource_identifier=("fixtures", "neighborhood_search"), + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("view-query", "query", ("fixtures", "neighborhood_search")), + ], ) assert response.status == 200 soup = Soup(response.body, "html.parser") From a1e801453aaeb540d2aea8cccb90b425af737c44 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 7 Jun 2020 13:20:59 -0700 Subject: [PATCH 0302/2113] Renamed execute-query permission to execute-sql, refs #811 --- datasette/views/database.py | 13 +++---------- docs/authentication.rst | 4 ++-- tests/test_html.py | 2 +- 3 files changed, 6 insertions(+), 13 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index 4eae9e33..961ab61e 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -134,21 +134,14 @@ class QueryView(DataView): params.pop("_shape") # Respect canned query permissions + await self.check_permission(request, "view-instance") + await self.check_permission(request, "view-database", "database", database) if canned_query: - await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", "database", database) await self.check_permission( request, "view-query", "query", (database, canned_query) ) - # TODO: fix this to use that permission check - if not actor_matches_allow( - request.scope.get("actor", None), metadata.get("allow") - ): - return Response("Permission denied", status=403) else: - await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", "database", database) - await self.check_permission(request, "execute-query", "database", database) + await self.check_permission(request, "execute-sql", "database", database) # Extract any :named parameters named_parameters = named_parameters or self.re_named_parameter.findall(sql) named_parameter_values = { diff --git a/docs/authentication.rst b/docs/authentication.rst index 7fa96b35..ee8e7125 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -234,8 +234,8 @@ Actor is allowed to view a :ref:`canned query ` page, e.g. https .. _permissions_execute_query: -execute-query -------------- +execute-sql +----------- Actor is allowed to run arbitrary SQL queries against a specific database, e.g. https://latest.datasette.io/fixtures?sql=select+100 diff --git a/tests/test_html.py b/tests/test_html.py index b41c1943..ac7432d7 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -893,7 +893,7 @@ def test_database_query_permission_checks(app_client): [ "view-instance", ("view-database", "database", "fixtures"), - ("execute-query", "database", "fixtures"), + ("execute-sql", "database", "fixtures"), ], ) From 5ed2853cf3432a0f5a3511df8d2ffe9c6c79a584 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 7 Jun 2020 14:01:22 -0700 Subject: [PATCH 0303/2113] Fix permissions documenation test --- docs/authentication.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index ee8e7125..1bf2a1a5 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -232,7 +232,7 @@ Actor is allowed to view a :ref:`canned query ` page, e.g. https ``resource_identifier`` - string The name of the canned query -.. _permissions_execute_query: +.. _permissions_execute_sql: execute-sql ----------- From abc733912447f284b38ddc389d18ba0a8cef8bcf Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 7 Jun 2020 14:14:10 -0700 Subject: [PATCH 0304/2113] Nicer pattern for make_app_client() in tests, closes #395 --- tests/fixtures.py | 44 +++++++++++++++++++++++++------------- tests/test_api.py | 10 ++++----- tests/test_canned_write.py | 4 ++-- tests/test_cli.py | 2 +- tests/test_custom_pages.py | 2 +- tests/test_html.py | 28 ++++++++++++------------ tests/test_plugins.py | 8 +++---- 7 files changed, 56 insertions(+), 42 deletions(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index f767dc84..2ac73fb1 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -2,6 +2,7 @@ from datasette.app import Datasette from datasette.utils import sqlite3, MultiParams from asgiref.testing import ApplicationCommunicator from asgiref.sync import async_to_sync +import contextlib from http.cookies import SimpleCookie import itertools import json @@ -220,6 +221,7 @@ class TestClient: return response +@contextlib.contextmanager def make_app_client( sql_time_limit_ms=None, max_returned_rows=None, @@ -281,7 +283,8 @@ def make_app_client( @pytest.fixture(scope="session") def app_client(): - yield from make_app_client() + with make_app_client() as client: + yield client @pytest.fixture(scope="session") @@ -294,64 +297,75 @@ def app_client_no_files(): @pytest.fixture(scope="session") def app_client_two_attached_databases(): - yield from make_app_client( + with make_app_client( extra_databases={"extra database.db": EXTRA_DATABASE_SQL} - ) + ) as client: + yield client @pytest.fixture(scope="session") def app_client_conflicting_database_names(): - yield from make_app_client( + with make_app_client( extra_databases={"foo.db": EXTRA_DATABASE_SQL, "foo-bar.db": EXTRA_DATABASE_SQL} - ) + ) as client: + yield client @pytest.fixture(scope="session") def app_client_two_attached_databases_one_immutable(): - yield from make_app_client( + with make_app_client( is_immutable=True, extra_databases={"extra database.db": EXTRA_DATABASE_SQL} - ) + ) as client: + yield client @pytest.fixture(scope="session") def app_client_with_hash(): - yield from make_app_client(config={"hash_urls": True}, is_immutable=True) + with make_app_client(config={"hash_urls": True}, is_immutable=True) as client: + yield client @pytest.fixture(scope="session") def app_client_shorter_time_limit(): - yield from make_app_client(20) + with make_app_client(20) as client: + yield client @pytest.fixture(scope="session") def app_client_returned_rows_matches_page_size(): - yield from make_app_client(max_returned_rows=50) + with make_app_client(max_returned_rows=50) as client: + yield client @pytest.fixture(scope="session") def app_client_larger_cache_size(): - yield from make_app_client(config={"cache_size_kb": 2500}) + with make_app_client(config={"cache_size_kb": 2500}) as client: + yield client @pytest.fixture(scope="session") def app_client_csv_max_mb_one(): - yield from make_app_client(config={"max_csv_mb": 1}) + with make_app_client(config={"max_csv_mb": 1}) as client: + yield client @pytest.fixture(scope="session") def app_client_with_dot(): - yield from make_app_client(filename="fixtures.dot.db") + with make_app_client(filename="fixtures.dot.db") as client: + yield client @pytest.fixture(scope="session") def app_client_with_cors(): - yield from make_app_client(cors=True) + with make_app_client(cors=True) as client: + yield client @pytest.fixture(scope="session") def app_client_immutable_and_inspect_file(): inspect_data = {"fixtures": {"tables": {"sortable": {"count": 100}}}} - yield from make_app_client(is_immutable=True, inspect_data=inspect_data) + with make_app_client(is_immutable=True, inspect_data=inspect_data) as client: + yield client def generate_compound_rows(num): diff --git a/tests/test_api.py b/tests/test_api.py index 555e394a..22378946 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -605,7 +605,7 @@ def test_invalid_custom_sql(app_client): def test_allow_sql_off(): - for client in make_app_client(config={"allow_sql": False}): + with make_app_client(config={"allow_sql": False}) as client: response = client.get("/fixtures.json?sql=select+sleep(0.01)") assert 400 == response.status assert "sql= is not allowed" == response.json["error"] @@ -1107,7 +1107,7 @@ def test_table_filter_extra_where_invalid(app_client): def test_table_filter_extra_where_disabled_if_no_sql_allowed(): - for client in make_app_client(config={"allow_sql": False}): + with make_app_client(config={"allow_sql": False}) as client: response = client.get("/fixtures/facetable.json?_where=neighborhood='Dogpatch'") assert 400 == response.status assert "_where= is not allowed" == response.json["error"] @@ -1528,14 +1528,14 @@ def test_suggested_facets(app_client): def test_allow_facet_off(): - for client in make_app_client(config={"allow_facet": False}): + with make_app_client(config={"allow_facet": False}) as client: assert 400 == client.get("/fixtures/facetable.json?_facet=planet_int").status # Should not suggest any facets either: assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] def test_suggest_facets_off(): - for client in make_app_client(config={"suggest_facets": False}): + with make_app_client(config={"suggest_facets": False}) as client: # Now suggested_facets should be [] assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] @@ -1667,7 +1667,7 @@ def test_config_cache_size(app_client_larger_cache_size): def test_config_force_https_urls(): - for client in make_app_client(config={"force_https_urls": True}): + with make_app_client(config={"force_https_urls": True}) as client: response = client.get("/fixtures/facetable.json?_size=3&_facet=state") assert response.json["next_url"].startswith("https://") assert response.json["facet_results"]["state"]["results"][0][ diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index 73b01e51..c217be8f 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -4,7 +4,7 @@ from .fixtures import make_app_client @pytest.fixture def canned_write_client(): - for client in make_app_client( + with make_app_client( extra_databases={"data.db": "create table names (name text)"}, metadata={ "databases": { @@ -35,7 +35,7 @@ def canned_write_client(): } } }, - ): + ) as client: yield client diff --git a/tests/test_cli.py b/tests/test_cli.py index 2616f1d1..6939fe57 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -41,7 +41,7 @@ def test_inspect_cli_writes_to_file(app_client): def test_serve_with_inspect_file_prepopulates_table_counts_cache(): inspect_data = {"fixtures": {"tables": {"hithere": {"count": 44}}}} - for client in make_app_client(inspect_data=inspect_data, is_immutable=True): + with make_app_client(inspect_data=inspect_data, is_immutable=True) as client: assert inspect_data == client.ds.inspect_data db = client.ds.databases["fixtures"] assert {"hithere": 44} == db.cached_table_counts diff --git a/tests/test_custom_pages.py b/tests/test_custom_pages.py index c69facb5..4e4b2a67 100644 --- a/tests/test_custom_pages.py +++ b/tests/test_custom_pages.py @@ -27,7 +27,7 @@ def custom_pages_client(tmp_path_factory): nested_dir = pages_dir / "nested" nested_dir.mkdir() (nested_dir / "nest.html").write_text("Nest!", "utf-8") - for client in make_app_client(template_dir=str(template_dir)): + with make_app_client(template_dir=str(template_dir)) as client: yield client diff --git a/tests/test_html.py b/tests/test_html.py index ac7432d7..4e913bcf 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -63,9 +63,9 @@ def test_static(app_client): def test_static_mounts(): - for client in make_app_client( + with make_app_client( static_mounts=[("custom-static", str(pathlib.Path(__file__).parent))] - ): + ) as client: response = client.get("/custom-static/test_html.py") assert response.status == 200 response = client.get("/custom-static/not_exists.py") @@ -75,7 +75,7 @@ def test_static_mounts(): def test_memory_database_page(): - for client in make_app_client(memory=True): + with make_app_client(memory=True) as client: response = client.get("/:memory:") assert_permissions_checked( client.ds, ["view-instance", ("view-database", "database", ":memory:")] @@ -177,7 +177,7 @@ def test_definition_sql(path, expected_definition_sql, app_client): def test_table_cell_truncation(): - for client in make_app_client(config={"truncate_cells_html": 5}): + with make_app_client(config={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") @@ -202,7 +202,7 @@ def test_table_cell_truncation(): def test_row_page_does_not_truncate(): - for client in make_app_client(config={"truncate_cells_html": 5}): + with make_app_client(config={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable/1") assert response.status == 200 assert_permissions_checked( @@ -925,7 +925,7 @@ def test_table_metadata(app_client): def test_database_download_allowed_for_immutable(): - for client in make_app_client(is_immutable=True): + with make_app_client(is_immutable=True) as client: assert not client.ds.databases["fixtures"].is_mutable # Regular page should have a download link response = client.get("/fixtures") @@ -951,7 +951,7 @@ def test_database_download_disallowed_for_mutable(app_client): def test_database_download_disallowed_for_memory(): - for client in make_app_client(memory=True): + with make_app_client(memory=True) as client: # Memory page should NOT have a download link response = client.get("/:memory:") soup = Soup(response.body, "html.parser") @@ -960,7 +960,7 @@ def test_database_download_disallowed_for_memory(): def test_allow_download_off(): - for client in make_app_client(is_immutable=True, config={"allow_download": False}): + with make_app_client(is_immutable=True, config={"allow_download": False}) as client: response = client.get("/fixtures") soup = Soup(response.body, "html.parser") assert not len(soup.findAll("a", {"href": re.compile(r"\.db$")})) @@ -978,7 +978,7 @@ def test_allow_sql_on(app_client): def test_allow_sql_off(): - for client in make_app_client(config={"allow_sql": False}): + with make_app_client(config={"allow_sql": False}) as client: response = client.get("/fixtures") soup = Soup(response.body, "html.parser") assert not len(soup.findAll("textarea", {"name": "sql"})) @@ -1170,9 +1170,9 @@ def test_metadata_json_html(app_client): def test_custom_table_include(): - for client in make_app_client( + with make_app_client( template_dir=str(pathlib.Path(__file__).parent / "test_templates") - ): + ) as client: response = client.get("/fixtures/complex_foreign_keys") assert response.status == 200 assert ( @@ -1197,7 +1197,7 @@ def test_zero_results(app_client, path): def test_config_template_debug_on(): - for client in make_app_client(config={"template_debug": True}): + with make_app_client(config={"template_debug": True}) as client: response = client.get("/fixtures/facetable?_context=1") assert response.status == 200 assert response.text.startswith("
    {")
    @@ -1211,7 +1211,7 @@ def test_config_template_debug_off(app_client):
     
     def test_debug_context_includes_extra_template_vars():
         # https://github.com/simonw/datasette/issues/693
    -    for client in make_app_client(config={"template_debug": True}):
    +    with make_app_client(config={"template_debug": True}) as client:
             response = client.get("/fixtures/facetable?_context=1")
             # scope_path is added by PLUGIN1
             assert "scope_path" in response.text
    @@ -1292,7 +1292,7 @@ def test_metadata_sort_desc(app_client):
         ],
     )
     def test_base_url_config(base_url, path):
    -    for client in make_app_client(config={"base_url": base_url}):
    +    with make_app_client(config={"base_url": base_url}) as client:
             response = client.get(base_url + path.lstrip("/"))
             soup = Soup(response.body, "html.parser")
             for el in soup.findAll(["a", "link", "script"]):
    diff --git a/tests/test_plugins.py b/tests/test_plugins.py
    index f69e7fa7..c782b87b 100644
    --- a/tests/test_plugins.py
    +++ b/tests/test_plugins.py
    @@ -229,9 +229,9 @@ def test_plugins_asgi_wrapper(app_client):
     
     
     def test_plugins_extra_template_vars(restore_working_directory):
    -    for client in make_app_client(
    +    with make_app_client(
             template_dir=str(pathlib.Path(__file__).parent / "test_templates")
    -    ):
    +    ) as client:
             response = client.get("/-/metadata")
             assert response.status == 200
             extra_template_vars = json.loads(
    @@ -254,9 +254,9 @@ def test_plugins_extra_template_vars(restore_working_directory):
     
     
     def test_plugins_async_template_function(restore_working_directory):
    -    for client in make_app_client(
    +    with make_app_client(
             template_dir=str(pathlib.Path(__file__).parent / "test_templates")
    -    ):
    +    ) as client:
             response = client.get("/-/metadata")
             assert response.status == 200
             extra_from_awaitable_function = (
    
    From ece0ba6f4bc152af6f605fc5f536ffa46af95274 Mon Sep 17 00:00:00 2001
    From: Simon Willison 
    Date: Sun, 7 Jun 2020 14:23:16 -0700
    Subject: [PATCH 0305/2113] Test + default impl for view-query permission, refs
     #811
    
    ---
     datasette/default_permissions.py | 21 ++++++++++++++++++---
     tests/test_permissions.py        | 22 ++++++++++++++++++++++
     2 files changed, 40 insertions(+), 3 deletions(-)
     create mode 100644 tests/test_permissions.py
    
    diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py
    index 0b0d17f9..40ae54ab 100644
    --- a/datasette/default_permissions.py
    +++ b/datasette/default_permissions.py
    @@ -1,7 +1,22 @@
     from datasette import hookimpl
    +from datasette.utils import actor_matches_allow
     
     
     @hookimpl
    -def permission_allowed(actor, action, resource_type, resource_identifier):
    -    if actor and actor.get("id") == "root" and action == "permissions-debug":
    -        return True
    +def permission_allowed(datasette, actor, action, resource_type, resource_identifier):
    +    if action == "permissions-debug":
    +        if actor and actor.get("id") == "root":
    +            return True
    +    elif action == "view-query":
    +        # Check if this query has a "allow" block in metadata
    +        assert resource_type == "query"
    +        database, query_name = resource_identifier
    +        queries_metadata = datasette.metadata("queries", database=database)
    +        assert query_name in queries_metadata
    +        if isinstance(queries_metadata[query_name], str):
    +            return True
    +        allow = queries_metadata[query_name].get("allow")
    +        print("checking allow - actor = {}, allow = {}".format(actor, allow))
    +        if allow is None:
    +            return True
    +        return actor_matches_allow(actor, allow)
    diff --git a/tests/test_permissions.py b/tests/test_permissions.py
    new file mode 100644
    index 00000000..c90fdf7a
    --- /dev/null
    +++ b/tests/test_permissions.py
    @@ -0,0 +1,22 @@
    +from .fixtures import make_app_client
    +import pytest
    +
    +
    +@pytest.mark.parametrize(
    +    "allow,expected_anon,expected_auth",
    +    [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),],
    +)
    +def test_execute_sql(allow, expected_anon, expected_auth):
    +    with make_app_client(
    +        metadata={
    +            "databases": {
    +                "fixtures": {"queries": {"q": {"sql": "select 1 + 1", "allow": allow}}}
    +            }
    +        }
    +    ) as client:
    +        anon_response = client.get("/fixtures/q")
    +        assert expected_anon == anon_response.status
    +        auth_response = client.get(
    +            "/fixtures/q", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}
    +        )
    +        assert expected_auth == auth_response.status
    
    From 8571ce388a23dd98adbdc1b7eff6c6eef5a9d1af Mon Sep 17 00:00:00 2001
    From: Simon Willison 
    Date: Sun, 7 Jun 2020 14:30:39 -0700
    Subject: [PATCH 0306/2113] Implemented view-instance permission, refs #811
    
    ---
     datasette/default_permissions.py |  4 ++++
     tests/test_permissions.py        | 20 ++++++++++++++++++++
     2 files changed, 24 insertions(+)
    
    diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py
    index 40ae54ab..ee182c85 100644
    --- a/datasette/default_permissions.py
    +++ b/datasette/default_permissions.py
    @@ -7,6 +7,10 @@ def permission_allowed(datasette, actor, action, resource_type, resource_identif
         if action == "permissions-debug":
             if actor and actor.get("id") == "root":
                 return True
    +    elif action == "view-instance":
    +        allow = datasette.metadata("allow")
    +        if allow is not None:
    +            return actor_matches_allow(actor, allow)
         elif action == "view-query":
             # Check if this query has a "allow" block in metadata
             assert resource_type == "query"
    diff --git a/tests/test_permissions.py b/tests/test_permissions.py
    index c90fdf7a..b5c2e00c 100644
    --- a/tests/test_permissions.py
    +++ b/tests/test_permissions.py
    @@ -20,3 +20,23 @@ def test_execute_sql(allow, expected_anon, expected_auth):
                 "/fixtures/q", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}
             )
             assert expected_auth == auth_response.status
    +
    +
    +@pytest.mark.parametrize(
    +    "allow,expected_anon,expected_auth",
    +    [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),],
    +)
    +def test_view_instance(allow, expected_anon, expected_auth):
    +    with make_app_client(metadata={"allow": allow}) as client:
    +        for path in (
    +            "/",
    +            "/fixtures",
    +            "/fixtures/compound_three_primary_keys",
    +            "/fixtures/compound_three_primary_keys/a,a,a",
    +        ):
    +            anon_response = client.get(path)
    +            assert expected_anon == anon_response.status
    +            auth_response = client.get(
    +                path, cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")},
    +            )
    +            assert expected_auth == auth_response.status
    
    From cd92e4fe2a47039a8c780e4e7183a0d2e7446884 Mon Sep 17 00:00:00 2001
    From: Simon Willison 
    Date: Sun, 7 Jun 2020 14:33:52 -0700
    Subject: [PATCH 0307/2113] Fixed test name, this executes view-query, not
     execute-sql - refs #811
    
    ---
     tests/test_permissions.py | 2 +-
     1 file changed, 1 insertion(+), 1 deletion(-)
    
    diff --git a/tests/test_permissions.py b/tests/test_permissions.py
    index b5c2e00c..bf66bc9c 100644
    --- a/tests/test_permissions.py
    +++ b/tests/test_permissions.py
    @@ -6,7 +6,7 @@ import pytest
         "allow,expected_anon,expected_auth",
         [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),],
     )
    -def test_execute_sql(allow, expected_anon, expected_auth):
    +def test_view_query(allow, expected_anon, expected_auth):
         with make_app_client(
             metadata={
                 "databases": {
    
    From 613fa551a1be31645deb0ece4b46638c181827e0 Mon Sep 17 00:00:00 2001
    From: Simon Willison 
    Date: Sun, 7 Jun 2020 20:14:27 -0700
    Subject: [PATCH 0308/2113] Removed view-row permission, for the moment - refs
     #811
    
    https://github.com/simonw/datasette/issues/811#issuecomment-640338347
    ---
     datasette/views/table.py |  3 ---
     docs/authentication.rst  | 13 -------------
     tests/test_html.py       |  1 -
     3 files changed, 17 deletions(-)
    
    diff --git a/datasette/views/table.py b/datasette/views/table.py
    index 10d6725a..935fed3d 100644
    --- a/datasette/views/table.py
    +++ b/datasette/views/table.py
    @@ -851,9 +851,6 @@ class RowView(RowTableShared):
             await self.check_permission(request, "view-instance")
             await self.check_permission(request, "view-database", "database", database)
             await self.check_permission(request, "view-table", "table", (database, table))
    -        await self.check_permission(
    -            request, "view-row", "row", tuple([database, table] + list(pk_values))
    -        )
             db = self.ds.databases[database]
             pks = await db.primary_keys(table)
             use_rowid = not pks
    diff --git a/docs/authentication.rst b/docs/authentication.rst
    index 1bf2a1a5..2caca66f 100644
    --- a/docs/authentication.rst
    +++ b/docs/authentication.rst
    @@ -206,19 +206,6 @@ Actor is allowed to view a table (or view) page, e.g. https://latest.datasette.i
     ``resource_identifier`` - tuple: (string, string)
         The name of the database, then the name of the table
     
    -.. _permissions_view_row:
    -
    -view-row
    ---------
    -
    -Actor is allowed to view a row page, e.g. https://latest.datasette.io/fixtures/compound_primary_key/a,b
    -
    -``resource_type`` - string
    -    "row"
    -
    -``resource_identifier`` - tuple: (string, string, strings...)
    -    The name of the database, then the name of the table, then the primary key of the row. The primary key may be a single value or multiple values, so the ``resource_identifier`` tuple may be three or more items long.
    -
     .. _permissions_view_query:
     
     view-query
    diff --git a/tests/test_html.py b/tests/test_html.py
    index 4e913bcf..e05640d7 100644
    --- a/tests/test_html.py
    +++ b/tests/test_html.py
    @@ -210,7 +210,6 @@ def test_row_page_does_not_truncate():
                 [
                     "view-instance",
                     ("view-table", "table", ("fixtures", "facetable")),
    -                ("view-row", "row", ("fixtures", "facetable", "1")),
                 ],
             )
             table = Soup(response.body, "html.parser").find("table")
    
    From 9b42e1a4f5902fb7d6ad0111189900e2656ffda3 Mon Sep 17 00:00:00 2001
    From: Simon Willison 
    Date: Sun, 7 Jun 2020 20:50:37 -0700
    Subject: [PATCH 0309/2113] view-database permission
    MIME-Version: 1.0
    Content-Type: text/plain; charset=UTF-8
    Content-Transfer-Encoding: 8bit
    
    Also now using 🔒 to indicate private resources - resources that
    would not be available to the anonymous user. Refs #811
    ---
     datasette/default_permissions.py  |  7 +++++-
     datasette/templates/database.html |  2 +-
     datasette/templates/index.html    |  2 +-
     datasette/views/database.py       |  3 +--
     datasette/views/index.py          | 19 +++++++++++++++-
     tests/test_canned_write.py        | 11 +++++-----
     tests/test_html.py                |  5 +----
     tests/test_permissions.py         | 36 +++++++++++++++++++++++++++++++
     8 files changed, 69 insertions(+), 16 deletions(-)
    
    diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py
    index ee182c85..40be8d34 100644
    --- a/datasette/default_permissions.py
    +++ b/datasette/default_permissions.py
    @@ -11,6 +11,12 @@ def permission_allowed(datasette, actor, action, resource_type, resource_identif
             allow = datasette.metadata("allow")
             if allow is not None:
                 return actor_matches_allow(actor, allow)
    +    elif action == "view-database":
    +        assert resource_type == "database"
    +        database_allow = datasette.metadata("allow", database=resource_identifier)
    +        if database_allow is None:
    +            return True
    +        return actor_matches_allow(actor, database_allow)
         elif action == "view-query":
             # Check if this query has a "allow" block in metadata
             assert resource_type == "query"
    @@ -20,7 +26,6 @@ def permission_allowed(datasette, actor, action, resource_type, resource_identif
             if isinstance(queries_metadata[query_name], str):
                 return True
             allow = queries_metadata[query_name].get("allow")
    -        print("checking allow - actor = {}, allow = {}".format(actor, allow))
             if allow is None:
                 return True
             return actor_matches_allow(actor, allow)
    diff --git a/datasette/templates/database.html b/datasette/templates/database.html
    index fc88003c..eaebfdf7 100644
    --- a/datasette/templates/database.html
    +++ b/datasette/templates/database.html
    @@ -60,7 +60,7 @@
         

    Queries

    {% endif %} diff --git a/datasette/templates/index.html b/datasette/templates/index.html index b394564a..3b8568b3 100644 --- a/datasette/templates/index.html +++ b/datasette/templates/index.html @@ -10,7 +10,7 @@ {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} {% for database in databases %} -

    {{ database.name }}

    +

    {{ database.name }}{% if database.private %} 🔒{% endif %}

    {% if database.show_table_row_counts %}{{ "{:,}".format(database.table_rows_sum) }} rows in {% endif %}{{ database.tables_count }} table{% if database.tables_count != 1 %}s{% endif %}{% if database.tables_count and database.hidden_tables_count %}, {% endif -%} {% if database.hidden_tables_count -%} diff --git a/datasette/views/database.py b/datasette/views/database.py index 961ab61e..4804b2a9 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -58,8 +58,7 @@ class DatabaseView(DataView): tables.sort(key=lambda t: (t["hidden"], t["name"])) canned_queries = [ dict( - query, - requires_auth=not actor_matches_allow(None, query.get("allow", None)), + query, private=not actor_matches_allow(None, query.get("allow", None)), ) for query in self.ds.get_canned_queries(database) if actor_matches_allow( diff --git a/datasette/views/index.py b/datasette/views/index.py index 5f903474..7b88028b 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -2,7 +2,7 @@ import hashlib import json from datasette.utils import CustomJSONEncoder -from datasette.utils.asgi import Response +from datasette.utils.asgi import Response, Forbidden from datasette.version import __version__ from .base import BaseView @@ -25,6 +25,22 @@ class IndexView(BaseView): await self.check_permission(request, "view-instance") databases = [] for name, db in self.ds.databases.items(): + # Check permission + allowed = await self.ds.permission_allowed( + request.scope.get("actor"), + "view-database", + resource_type="database", + resource_identifier=name, + default=True, + ) + if not allowed: + continue + private = not await self.ds.permission_allowed( + None, + "view-database", + resource_type="database", + resource_identifier=name, + ) table_names = await db.table_names() hidden_table_names = set(await db.hidden_table_names()) views = await db.view_names() @@ -95,6 +111,7 @@ class IndexView(BaseView): ), "hidden_tables_count": len(hidden_tables), "views_count": len(views), + "private": private, } ) diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index c217be8f..dc3fba3f 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -120,13 +120,12 @@ def test_canned_query_permissions_on_database_page(canned_write_client): ) assert 200 == response.status assert [ - {"name": "add_name", "requires_auth": False}, - {"name": "add_name_specify_id", "requires_auth": False}, - {"name": "delete_name", "requires_auth": True}, - {"name": "update_name", "requires_auth": False}, + {"name": "add_name", "private": False}, + {"name": "add_name_specify_id", "private": False}, + {"name": "delete_name", "private": True}, + {"name": "update_name", "private": False}, ] == [ - {"name": q["name"], "requires_auth": q["requires_auth"]} - for q in response.json["queries"] + {"name": q["name"], "private": q["private"]} for q in response.json["queries"] ] diff --git a/tests/test_html.py b/tests/test_html.py index e05640d7..3f6dc4df 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -207,10 +207,7 @@ def test_row_page_does_not_truncate(): assert response.status == 200 assert_permissions_checked( client.ds, - [ - "view-instance", - ("view-table", "table", ("fixtures", "facetable")), - ], + ["view-instance", ("view-table", "table", ("fixtures", "facetable")),], ) table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] diff --git a/tests/test_permissions.py b/tests/test_permissions.py index bf66bc9c..21014a25 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -40,3 +40,39 @@ def test_view_instance(allow, expected_anon, expected_auth): path, cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, ) assert expected_auth == auth_response.status + + +@pytest.mark.parametrize( + "allow,expected_anon,expected_auth", + [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),], +) +def test_view_database(allow, expected_anon, expected_auth): + with make_app_client( + metadata={"databases": {"fixtures": {"allow": allow}}} + ) as client: + for path in ( + "/fixtures", + "/fixtures/compound_three_primary_keys", + "/fixtures/compound_three_primary_keys/a,a,a", + ): + anon_response = client.get(path) + assert expected_anon == anon_response.status + auth_response = client.get( + path, cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + ) + assert expected_auth == auth_response.status + + +def test_database_list_respects_view_database(): + with make_app_client( + metadata={"databases": {"fixtures": {"allow": {"id": "root"}}}}, + extra_databases={"data.db": "create table names (name text)"}, + ) as client: + anon_response = client.get("/") + assert 'data' in anon_response.text + assert 'fixtures' not in anon_response.text + auth_response = client.get( + "/", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + ) + assert 'data' in auth_response.text + assert 'fixtures 🔒' in auth_response.text From b26292a4582ea7fe16c59d0ac99f3bd8c3d4b1d0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 7 Jun 2020 20:56:49 -0700 Subject: [PATCH 0310/2113] Test that view-query is respected by query list, refs #811 --- datasette/templates/database.html | 2 +- tests/test_permissions.py | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/datasette/templates/database.html b/datasette/templates/database.html index eaebfdf7..dfafc049 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -60,7 +60,7 @@

    Queries

    {% endif %} diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 21014a25..e66b9291 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -22,6 +22,26 @@ def test_view_query(allow, expected_anon, expected_auth): assert expected_auth == auth_response.status +def test_query_list_respects_view_query(): + with make_app_client( + metadata={ + "databases": { + "fixtures": { + "queries": {"q": {"sql": "select 1 + 1", "allow": {"id": "root"}}} + } + } + } + ) as client: + html_fragment = '
  • q 🔒
  • ' + anon_response = client.get("/fixtures") + assert html_fragment not in anon_response.text + assert '"/fixtures/q"' not in anon_response.text + auth_response = client.get( + "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} + ) + assert html_fragment in auth_response.text + + @pytest.mark.parametrize( "allow,expected_anon,expected_auth", [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),], From 9397d718345c4b35d2a5c55bfcbd1468876b5ab9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 7 Jun 2020 21:47:22 -0700 Subject: [PATCH 0311/2113] Implemented view-table, refs #811 --- datasette/default_permissions.py | 8 ++ datasette/templates/database.html | 2 +- datasette/views/database.py | 16 ++++ tests/test_permissions.py | 123 ++++++++++++++++++++---------- 4 files changed, 108 insertions(+), 41 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 40be8d34..dd1770a3 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -17,6 +17,14 @@ def permission_allowed(datasette, actor, action, resource_type, resource_identif if database_allow is None: return True return actor_matches_allow(actor, database_allow) + elif action == "view-table": + assert resource_type == "table" + database, table = resource_identifier + tables = datasette.metadata("tables", database=database) or {} + table_allow = (tables.get(table) or {}).get("allow") + if table_allow is None: + return True + return actor_matches_allow(actor, table_allow) elif action == "view-query": # Check if this query has a "allow" block in metadata assert resource_type == "query" diff --git a/datasette/templates/database.html b/datasette/templates/database.html index dfafc049..1187267d 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -36,7 +36,7 @@ {% for table in tables %} {% if show_hidden or not table.hidden %}
    -

    {{ table.name }}{% if table.hidden %} (hidden){% endif %}

    +

    {{ table.name }}{% if table.private %} 🔒{% endif %}{% if table.hidden %} (hidden){% endif %}

    {% for column in table.columns[:9] %}{{ column }}{% if not loop.last %}, {% endif %}{% endfor %}{% if table.columns|length > 9 %}...{% endif %}

    {% if table.count is none %}Many rows{% else %}{{ "{:,}".format(table.count) }} row{% if table.count == 1 %}{% else %}s{% endif %}{% endif %}

    diff --git a/datasette/views/database.py b/datasette/views/database.py index 4804b2a9..ba3d22d9 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -42,6 +42,21 @@ class DatabaseView(DataView): tables = [] for table in table_counts: + allowed = await self.ds.permission_allowed( + request.scope.get("actor"), + "view-table", + resource_type="table", + resource_identifier=(database, table), + default=True, + ) + if not allowed: + continue + private = not await self.ds.permission_allowed( + None, + "view-table", + resource_type="table", + resource_identifier=(database, table), + ) table_columns = await db.table_columns(table) tables.append( { @@ -52,6 +67,7 @@ class DatabaseView(DataView): "hidden": table in hidden_table_names, "fts_table": await db.fts_table(table), "foreign_keys": all_foreign_keys[table], + "private": private, } ) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index e66b9291..7c5b02c0 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -2,46 +2,6 @@ from .fixtures import make_app_client import pytest -@pytest.mark.parametrize( - "allow,expected_anon,expected_auth", - [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),], -) -def test_view_query(allow, expected_anon, expected_auth): - with make_app_client( - metadata={ - "databases": { - "fixtures": {"queries": {"q": {"sql": "select 1 + 1", "allow": allow}}} - } - } - ) as client: - anon_response = client.get("/fixtures/q") - assert expected_anon == anon_response.status - auth_response = client.get( - "/fixtures/q", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} - ) - assert expected_auth == auth_response.status - - -def test_query_list_respects_view_query(): - with make_app_client( - metadata={ - "databases": { - "fixtures": { - "queries": {"q": {"sql": "select 1 + 1", "allow": {"id": "root"}}} - } - } - } - ) as client: - html_fragment = '
  • q 🔒
  • ' - anon_response = client.get("/fixtures") - assert html_fragment not in anon_response.text - assert '"/fixtures/q"' not in anon_response.text - auth_response = client.get( - "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} - ) - assert html_fragment in auth_response.text - - @pytest.mark.parametrize( "allow,expected_anon,expected_auth", [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),], @@ -96,3 +56,86 @@ def test_database_list_respects_view_database(): ) assert 'data' in auth_response.text assert 'fixtures 🔒' in auth_response.text + + +@pytest.mark.parametrize( + "allow,expected_anon,expected_auth", + [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),], +) +def test_view_table(allow, expected_anon, expected_auth): + with make_app_client( + metadata={ + "databases": { + "fixtures": { + "tables": {"compound_three_primary_keys": {"allow": allow}} + } + } + } + ) as client: + anon_response = client.get("/fixtures/compound_three_primary_keys") + assert expected_anon == anon_response.status + auth_response = client.get( + "/fixtures/compound_three_primary_keys", + cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + ) + assert expected_auth == auth_response.status + + +def test_table_list_respects_view_table(): + with make_app_client( + metadata={ + "databases": { + "fixtures": { + "tables": {"compound_three_primary_keys": {"allow": {"id": "root"}}} + } + } + } + ) as client: + html_fragment = 'compound_three_primary_keys 🔒' + anon_response = client.get("/fixtures") + assert html_fragment not in anon_response.text + assert '"/fixtures/compound_three_primary_keys"' not in anon_response.text + auth_response = client.get( + "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} + ) + assert html_fragment in auth_response.text + + +@pytest.mark.parametrize( + "allow,expected_anon,expected_auth", + [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),], +) +def test_view_query(allow, expected_anon, expected_auth): + with make_app_client( + metadata={ + "databases": { + "fixtures": {"queries": {"q": {"sql": "select 1 + 1", "allow": allow}}} + } + } + ) as client: + anon_response = client.get("/fixtures/q") + assert expected_anon == anon_response.status + auth_response = client.get( + "/fixtures/q", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} + ) + assert expected_auth == auth_response.status + + +def test_query_list_respects_view_query(): + with make_app_client( + metadata={ + "databases": { + "fixtures": { + "queries": {"q": {"sql": "select 1 + 1", "allow": {"id": "root"}}} + } + } + } + ) as client: + html_fragment = '
  • q 🔒
  • ' + anon_response = client.get("/fixtures") + assert html_fragment not in anon_response.text + assert '"/fixtures/q"' not in anon_response.text + auth_response = client.get( + "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} + ) + assert html_fragment in auth_response.text From e18f8c3f871fe1e9e00554b5c6c75409cc1a5e6d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 06:49:55 -0700 Subject: [PATCH 0312/2113] New check_visibility() utility function, refs #811 --- datasette/utils/__init__.py | 23 +++++++++++++++++++++++ datasette/views/database.py | 35 ++++++++++++++++------------------- datasette/views/index.py | 19 ++++--------------- 3 files changed, 43 insertions(+), 34 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 077728f4..3d964049 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -874,3 +874,26 @@ def actor_matches_allow(actor, allow): if actor_values.intersection(values): return True return False + + +async def check_visibility( + datasette, actor, action, resource_type, resource_identifier, default=True +): + "Returns (visible, private) - visible = can you see it, private = can others see it too" + visible = await datasette.permission_allowed( + actor, + action, + resource_type=resource_type, + resource_identifier=resource_identifier, + default=default, + ) + if not visible: + return (False, False) + private = not await datasette.permission_allowed( + None, + action, + resource_type=resource_type, + resource_identifier=resource_identifier, + default=default, + ) + return visible, private diff --git a/datasette/views/database.py b/datasette/views/database.py index ba3d22d9..afbb6b05 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -3,6 +3,7 @@ import jinja2 from datasette.utils import ( actor_matches_allow, + check_visibility, to_css_class, validate_sql_select, is_url, @@ -42,21 +43,15 @@ class DatabaseView(DataView): tables = [] for table in table_counts: - allowed = await self.ds.permission_allowed( + visible, private = await check_visibility( + self.ds, request.scope.get("actor"), "view-table", - resource_type="table", - resource_identifier=(database, table), - default=True, + "table", + (database, table), ) - if not allowed: + if not visible: continue - private = not await self.ds.permission_allowed( - None, - "view-table", - resource_type="table", - resource_identifier=(database, table), - ) table_columns = await db.table_columns(table) tables.append( { @@ -72,15 +67,17 @@ class DatabaseView(DataView): ) tables.sort(key=lambda t: (t["hidden"], t["name"])) - canned_queries = [ - dict( - query, private=not actor_matches_allow(None, query.get("allow", None)), + canned_queries = [] + for query in self.ds.get_canned_queries(database): + visible, private = await check_visibility( + self.ds, + request.scope.get("actor"), + "view-query", + "query", + (database, query["name"]), ) - for query in self.ds.get_canned_queries(database) - if actor_matches_allow( - request.scope.get("actor", None), query.get("allow", None) - ) - ] + if visible: + canned_queries.append(dict(query, private=private)) return ( { "database": database, diff --git a/datasette/views/index.py b/datasette/views/index.py index 7b88028b..0f7fb613 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -1,7 +1,7 @@ import hashlib import json -from datasette.utils import CustomJSONEncoder +from datasette.utils import check_visibility, CustomJSONEncoder from datasette.utils.asgi import Response, Forbidden from datasette.version import __version__ @@ -25,22 +25,11 @@ class IndexView(BaseView): await self.check_permission(request, "view-instance") databases = [] for name, db in self.ds.databases.items(): - # Check permission - allowed = await self.ds.permission_allowed( - request.scope.get("actor"), - "view-database", - resource_type="database", - resource_identifier=name, - default=True, + visible, private = await check_visibility( + self.ds, request.scope.get("actor"), "view-database", "database", name, ) - if not allowed: + if not visible: continue - private = not await self.ds.permission_allowed( - None, - "view-database", - resource_type="database", - resource_identifier=name, - ) table_names = await db.table_names() hidden_table_names = set(await db.hidden_table_names()) views = await db.view_names() From cc218fa9be55842656d030545c308392e3736053 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 07:02:31 -0700 Subject: [PATCH 0313/2113] Move assert_permissions_checked() calls from test_html.py to test_permissions.py, refs #811 --- datasette/app.py | 2 +- tests/test_html.py | 49 ------------------------------------ tests/test_permissions.py | 52 ++++++++++++++++++++++++++++++++++++++- 3 files changed, 52 insertions(+), 51 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f433a10a..23c293c9 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -298,7 +298,7 @@ class Datasette: pm.hook.prepare_jinja2_environment(env=self.jinja_env) self._register_renderers() - self._permission_checks = collections.deque(maxlen=30) + self._permission_checks = collections.deque(maxlen=200) self._root_token = os.urandom(32).hex() def sign(self, value, namespace="default"): diff --git a/tests/test_html.py b/tests/test_html.py index 3f6dc4df..cb0e0c90 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -4,7 +4,6 @@ from .fixtures import ( # noqa app_client_shorter_time_limit, app_client_two_attached_databases, app_client_with_hash, - assert_permissions_checked, make_app_client, METADATA, ) @@ -18,7 +17,6 @@ import urllib.parse def test_homepage(app_client_two_attached_databases): response = app_client_two_attached_databases.get("/") - assert_permissions_checked(app_client_two_attached_databases.ds, ["view-instance"]) assert response.status == 200 assert "text/html; charset=utf-8" == response.headers["content-type"] soup = Soup(response.body, "html.parser") @@ -77,9 +75,6 @@ def test_static_mounts(): def test_memory_database_page(): with make_app_client(memory=True) as client: response = client.get("/:memory:") - assert_permissions_checked( - client.ds, ["view-instance", ("view-database", "database", ":memory:")] - ) assert response.status == 200 @@ -92,9 +87,6 @@ def test_database_page_redirects_with_url_hash(app_client_with_hash): def test_database_page(app_client): response = app_client.get("/fixtures") - assert_permissions_checked( - app_client.ds, ["view-instance", ("view-database", "database", "fixtures")] - ) soup = Soup(response.body, "html.parser") queries_ul = soup.find("h2", text="Queries").find_next_sibling("ul") assert queries_ul is not None @@ -205,10 +197,6 @@ def test_row_page_does_not_truncate(): with make_app_client(config={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable/1") assert response.status == 200 - assert_permissions_checked( - client.ds, - ["view-instance", ("view-table", "table", ("fixtures", "facetable")),], - ) table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] assert ["Mission"] == [ @@ -518,14 +506,6 @@ def test_templates_considered(app_client, path, expected_considered): def test_table_html_simple_primary_key(app_client): response = app_client.get("/fixtures/simple_primary_key?_size=3") - assert_permissions_checked( - app_client.ds, - [ - "view-instance", - ("view-database", "database", "fixtures"), - ("view-table", "table", ("fixtures", "simple_primary_key")), - ], - ) assert response.status == 200 table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] @@ -881,19 +861,6 @@ def test_database_metadata(app_client): assert_footer_links(soup) -def test_database_query_permission_checks(app_client): - response = app_client.get("/fixtures?sql=select+1") - assert response.status == 200 - assert_permissions_checked( - app_client.ds, - [ - "view-instance", - ("view-database", "database", "fixtures"), - ("execute-sql", "database", "fixtures"), - ], - ) - - def test_database_metadata_with_custom_sql(app_client): response = app_client.get("/fixtures?sql=select+*+from+simple_primary_key") assert response.status == 200 @@ -929,14 +896,6 @@ def test_database_download_allowed_for_immutable(): assert len(soup.findAll("a", {"href": re.compile(r"\.db$")})) # Check we can actually download it assert 200 == client.get("/fixtures.db").status - assert_permissions_checked( - client.ds, - [ - "view-instance", - ("view-database", "database", "fixtures"), - ("view-database-download", "database", "fixtures"), - ], - ) def test_database_download_disallowed_for_mutable(app_client): @@ -1032,14 +991,6 @@ def test_404_content_type(app_client): def test_canned_query_with_custom_metadata(app_client): response = app_client.get("/fixtures/neighborhood_search?text=town") - assert_permissions_checked( - app_client.ds, - [ - "view-instance", - ("view-database", "database", "fixtures"), - ("view-query", "query", ("fixtures", "neighborhood_search")), - ], - ) assert response.status == 200 soup = Soup(response.body, "html.parser") assert "Search neighborhoods" == soup.find("h1").text diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 7c5b02c0..df905aa1 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -1,4 +1,4 @@ -from .fixtures import make_app_client +from .fixtures import app_client, assert_permissions_checked, make_app_client import pytest @@ -139,3 +139,53 @@ def test_query_list_respects_view_query(): "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} ) assert html_fragment in auth_response.text + + +@pytest.mark.parametrize( + "path,permissions", + [ + ("/", ["view-instance"]), + ("/fixtures", ["view-instance", ("view-database", "database", "fixtures")]), + ( + "/fixtures/facetable/1", + ["view-instance", ("view-table", "table", ("fixtures", "facetable"))], + ), + ( + "/fixtures/simple_primary_key", + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("view-table", "table", ("fixtures", "simple_primary_key")), + ], + ), + ( + "/fixtures?sql=select+1", + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("execute-sql", "database", "fixtures"), + ], + ), + ( + "/fixtures.db", + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("view-database-download", "database", "fixtures"), + ], + ), + ( + "/fixtures/neighborhood_search", + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("view-query", "query", ("fixtures", "neighborhood_search")), + ], + ), + ], +) +def test_permissions_checked(app_client, path, permissions): + app_client.ds._permission_checks.clear() + response = app_client.get(path) + assert response.status in (200, 403) + assert_permissions_checked(app_client.ds, permissions) From 1cf86e5eccf3f92b483bacbad860879cf39b0ad6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 07:18:37 -0700 Subject: [PATCH 0314/2113] Show padlock on private index page, refs #811 --- datasette/templates/index.html | 2 +- datasette/views/index.py | 3 +++ tests/test_permissions.py | 6 ++++++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/datasette/templates/index.html b/datasette/templates/index.html index 3b8568b3..5a8dccae 100644 --- a/datasette/templates/index.html +++ b/datasette/templates/index.html @@ -5,7 +5,7 @@ {% block body_class %}index{% endblock %} {% block content %} -

    {{ metadata.title or "Datasette" }}

    +

    {{ metadata.title or "Datasette" }}{% if private %} 🔒{% endif %}

    {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} diff --git a/datasette/views/index.py b/datasette/views/index.py index 0f7fb613..8cbe28f0 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -121,5 +121,8 @@ class IndexView(BaseView): "databases": databases, "metadata": self.ds.metadata(), "datasette_version": __version__, + "private": not await self.ds.permission_allowed( + None, "view-instance" + ), }, ) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index df905aa1..5dcf46ad 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -16,10 +16,16 @@ def test_view_instance(allow, expected_anon, expected_auth): ): anon_response = client.get(path) assert expected_anon == anon_response.status + if allow and path == "/" and anon_response.status == 200: + # Should be no padlock + assert "

    Datasette 🔒

    " not in anon_response.text auth_response = client.get( path, cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, ) assert expected_auth == auth_response.status + # Check for the padlock + if allow and path == "/" and expected_anon == 403 and expected_auth == 200: + assert "

    Datasette 🔒

    " in auth_response.text @pytest.mark.parametrize( From 3ce7f2e7dae010de97b67618c111ea5853164a69 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 07:23:10 -0700 Subject: [PATCH 0315/2113] Show padlock on private database page, refs #811 --- datasette/templates/database.html | 2 +- datasette/views/database.py | 3 +++ tests/test_permissions.py | 10 ++++++++++ 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 1187267d..089142e2 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -18,7 +18,7 @@ {% block content %} -

    {{ metadata.title or database }}

    +

    {{ metadata.title or database }}{% if private %} 🔒{% endif %}

    {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} diff --git a/datasette/views/database.py b/datasette/views/database.py index afbb6b05..2d7e6b31 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -86,6 +86,9 @@ class DatabaseView(DataView): "hidden_count": len([t for t in tables if t["hidden"]]), "views": views, "queries": canned_queries, + "private": not await self.ds.permission_allowed( + None, "view-database", "database", database + ), }, { "show_hidden": request.args.get("_show_hidden"), diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 5dcf46ad..d76d1e15 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -43,10 +43,20 @@ def test_view_database(allow, expected_anon, expected_auth): ): anon_response = client.get(path) assert expected_anon == anon_response.status + if allow and path == "/fixtures" and anon_response.status == 200: + # Should be no padlock + assert ">fixtures 🔒" not in anon_response.text auth_response = client.get( path, cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, ) assert expected_auth == auth_response.status + if ( + allow + and path == "/fixtures" + and expected_anon == 403 + and expected_auth == 200 + ): + assert ">fixtures 🔒" in auth_response.text def test_database_list_respects_view_database(): From 2a8b39800f194925658bd9e1b5e4cc12619d5e9c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 07:50:06 -0700 Subject: [PATCH 0316/2113] Updated tests, refs #811 --- tests/test_api.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/tests/test_api.py b/tests/test_api.py index 22378946..13a98b6a 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -70,6 +70,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "Table With Space In Name", @@ -79,6 +80,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "attraction_characteristic", @@ -97,6 +99,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "binary_data", @@ -106,6 +109,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "complex_foreign_keys", @@ -134,6 +138,7 @@ def test_database_page(app_client): }, ], }, + "private": False, }, { "name": "compound_primary_key", @@ -143,6 +148,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "compound_three_primary_keys", @@ -152,6 +158,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "custom_foreign_key_label", @@ -170,6 +177,7 @@ def test_database_page(app_client): } ], }, + "private": False, }, { "name": "facet_cities", @@ -188,6 +196,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "facetable", @@ -217,6 +226,7 @@ def test_database_page(app_client): } ], }, + "private": False, }, { "name": "foreign_key_references", @@ -240,6 +250,7 @@ def test_database_page(app_client): }, ], }, + "private": False, }, { "name": "infinity", @@ -249,6 +260,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "primary_key_multiple_columns", @@ -267,6 +279,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "primary_key_multiple_columns_explicit_label", @@ -285,6 +298,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "roadside_attraction_characteristics", @@ -308,6 +322,7 @@ def test_database_page(app_client): }, ], }, + "private": False, }, { "name": "roadside_attractions", @@ -326,6 +341,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "searchable", @@ -344,6 +360,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "searchable_tags", @@ -363,6 +380,7 @@ def test_database_page(app_client): }, ], }, + "private": False, }, { "name": "select", @@ -372,6 +390,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "simple_primary_key", @@ -405,6 +424,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "sortable", @@ -422,6 +442,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "table/with/slashes.csv", @@ -431,6 +452,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "tags", @@ -449,6 +471,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "units", @@ -458,6 +481,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "no_primary_key", @@ -467,6 +491,7 @@ def test_database_page(app_client): "hidden": True, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "searchable_fts", @@ -476,6 +501,7 @@ def test_database_page(app_client): "hidden": True, "fts_table": "searchable_fts", "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "searchable_fts_content", @@ -491,6 +517,7 @@ def test_database_page(app_client): "hidden": True, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "searchable_fts_segdir", @@ -507,6 +534,7 @@ def test_database_page(app_client): "hidden": True, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "searchable_fts_segments", @@ -516,6 +544,7 @@ def test_database_page(app_client): "hidden": True, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, ] == data["tables"] @@ -537,6 +566,7 @@ def test_no_files_uses_memory_database(app_client_no_files): "tables_and_views_more": False, "tables_and_views_truncated": [], "views_count": 0, + "private": False, } } == response.json # Try that SQL query From 177059284dc953e6c76f86213aa470db2ff3eaca Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 10:05:32 -0700 Subject: [PATCH 0317/2113] New request.actor property, refs #811 --- datasette/app.py | 2 +- datasette/utils/asgi.py | 4 ++++ datasette/views/base.py | 2 +- datasette/views/database.py | 4 ++-- datasette/views/index.py | 2 +- datasette/views/special.py | 2 +- docs/authentication.rst | 2 ++ docs/internals.rst | 5 ++++- 8 files changed, 16 insertions(+), 7 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 23c293c9..87e542c1 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -667,7 +667,7 @@ class Datasette: return d def _actor(self, request): - return {"actor": request.scope.get("actor", None)} + return {"actor": request.actor} def table_metadata(self, database, table): "Fetch table-specific metadata." diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index fa78c8df..bca9c9ab 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -74,6 +74,10 @@ class Request: def args(self): return MultiParams(parse_qs(qs=self.query_string)) + @property + def actor(self): + return self.scope.get("actor", None) + async def post_vars(self): body = [] body = b"" diff --git a/datasette/views/base.py b/datasette/views/base.py index 9c2cbbcc..000d354b 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -68,7 +68,7 @@ class BaseView(AsgiView): self, request, action, resource_type=None, resource_identifier=None ): ok = await self.ds.permission_allowed( - request.scope.get("actor"), + request.actor, action, resource_type=resource_type, resource_identifier=resource_identifier, diff --git a/datasette/views/database.py b/datasette/views/database.py index 2d7e6b31..dee6c9c8 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -45,7 +45,7 @@ class DatabaseView(DataView): for table in table_counts: visible, private = await check_visibility( self.ds, - request.scope.get("actor"), + request.actor, "view-table", "table", (database, table), @@ -71,7 +71,7 @@ class DatabaseView(DataView): for query in self.ds.get_canned_queries(database): visible, private = await check_visibility( self.ds, - request.scope.get("actor"), + request.actor, "view-query", "query", (database, query["name"]), diff --git a/datasette/views/index.py b/datasette/views/index.py index 8cbe28f0..609bfa6a 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -26,7 +26,7 @@ class IndexView(BaseView): databases = [] for name, db in self.ds.databases.items(): visible, private = await check_visibility( - self.ds, request.scope.get("actor"), "view-database", "database", name, + self.ds, request.actor, "view-database", "database", name, ) if not visible: continue diff --git a/datasette/views/special.py b/datasette/views/special.py index 37c04697..b8bd57c6 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -86,7 +86,7 @@ class PermissionsDebugView(BaseView): async def get(self, request): if not await self.ds.permission_allowed( - request.scope.get("actor"), "permissions-debug" + request.actor, "permissions-debug" ): return Response("Permission denied", status=403) return await self.render( diff --git a/docs/authentication.rst b/docs/authentication.rst index 2caca66f..bda6a0b7 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -140,6 +140,8 @@ Plugins that wish to implement the same permissions scheme as canned queries can actor_matches_allow({"id": "root"}, {"id": "*"}) # returns True +The currently authenticated actor is made available to plugins as ``request.actor``. + .. _PermissionsDebugView: Permissions Debug diff --git a/docs/internals.rst b/docs/internals.rst index 25b2d875..7498f017 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -42,6 +42,9 @@ The request object is passed to various plugin hooks. It represents an incoming ``.args`` - MultiParams An object representing the parsed querystring parameters, see below. +``.actor`` - dictionary (str -> Any) or None + The currently authenticated actor (see :ref:`actors `), or ``None`` if the request is unauthenticated. + The object also has one awaitable method: ``await request.post_vars()`` - dictionary @@ -122,7 +125,7 @@ await .permission_allowed(actor, action, resource_type=None, resource_identifier ----------------------------------------------------------------------------------------------------- ``actor`` - dictionary - The authenticated actor. This is usually ``request.scope.get("actor")``. + The authenticated actor. This is usually ``request.actor``. ``action`` - string The name of the action that is being permission checked. From ab14b20b248dafbe7f9f9487985614939c83b517 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 10:16:24 -0700 Subject: [PATCH 0318/2113] Get tests working again --- datasette/views/database.py | 6 +----- datasette/views/index.py | 2 +- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index dee6c9c8..6f6404a7 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -44,11 +44,7 @@ class DatabaseView(DataView): tables = [] for table in table_counts: visible, private = await check_visibility( - self.ds, - request.actor, - "view-table", - "table", - (database, table), + self.ds, request.actor, "view-table", "table", (database, table), ) if not visible: continue diff --git a/datasette/views/index.py b/datasette/views/index.py index 609bfa6a..59d3e042 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -122,7 +122,7 @@ class IndexView(BaseView): "metadata": self.ds.metadata(), "datasette_version": __version__, "private": not await self.ds.permission_allowed( - None, "view-instance" + None, "view-instance", default=True ), }, ) From dfff34e1987976e72f58ee7b274952840b1f4b71 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 11:03:33 -0700 Subject: [PATCH 0319/2113] Applied black, refs #811 --- datasette/views/special.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/datasette/views/special.py b/datasette/views/special.py index b8bd57c6..7a5fbe21 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -85,9 +85,7 @@ class PermissionsDebugView(BaseView): self.ds = datasette async def get(self, request): - if not await self.ds.permission_allowed( - request.actor, "permissions-debug" - ): + if not await self.ds.permission_allowed(request.actor, "permissions-debug"): return Response("Permission denied", status=403) return await self.render( ["permissions_debug.html"], From aa420009c08921d0c9a68cf60a57959be0e8a2e5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 11:07:11 -0700 Subject: [PATCH 0320/2113] Show padlock on private table page, refs #811 --- datasette/templates/table.html | 2 +- datasette/views/table.py | 5 +++++ tests/test_permissions.py | 5 +++++ 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/datasette/templates/table.html b/datasette/templates/table.html index fa6766a8..1289e125 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -26,7 +26,7 @@ {% block content %} -

    {{ metadata.title or table }}{% if is_view %} (view){% endif %}

    +

    {{ metadata.title or table }}{% if is_view %} (view){% endif %}{% if private %} 🔒{% endif %}

    {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} diff --git a/datasette/views/table.py b/datasette/views/table.py index 935fed3d..cd952568 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -271,6 +271,10 @@ class TableView(RowTableShared): await self.check_permission(request, "view-database", "database", database) await self.check_permission(request, "view-table", "table", (database, table)) + private = not await self.ds.permission_allowed( + None, "view-table", "table", (database, table), default=True + ) + pks = await db.primary_keys(table) table_columns = await db.table_columns(table) @@ -834,6 +838,7 @@ class TableView(RowTableShared): "suggested_facets": suggested_facets, "next": next_value and str(next_value) or None, "next_url": next_url, + "private": private, }, extra_template, ( diff --git a/tests/test_permissions.py b/tests/test_permissions.py index d76d1e15..733afd5f 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -90,11 +90,16 @@ def test_view_table(allow, expected_anon, expected_auth): ) as client: anon_response = client.get("/fixtures/compound_three_primary_keys") assert expected_anon == anon_response.status + if allow and anon_response.status == 200: + # Should be no padlock + assert ">compound_three_primary_keys 🔒" not in anon_response.text auth_response = client.get( "/fixtures/compound_three_primary_keys", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, ) assert expected_auth == auth_response.status + if allow and expected_anon == 403 and expected_auth == 200: + assert ">compound_three_primary_keys 🔒" in auth_response.text def test_table_list_respects_view_table(): From 9ac27f67fe346e753b562b711a2086e4c616d51d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 11:13:32 -0700 Subject: [PATCH 0321/2113] Show padlock on private query page, refs #811 --- datasette/templates/query.html | 2 +- datasette/views/database.py | 6 ++++++ tests/test_permissions.py | 5 +++++ 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/datasette/templates/query.html b/datasette/templates/query.html index a7cb6647..7771b101 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -28,7 +28,7 @@ {% block content %} -

    {{ metadata.title or database }}

    +

    {{ metadata.title or database }}{% if private %} 🔒{% endif %}

    {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} diff --git a/datasette/views/database.py b/datasette/views/database.py index 6f6404a7..30817106 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -147,10 +147,14 @@ class QueryView(DataView): # Respect canned query permissions await self.check_permission(request, "view-instance") await self.check_permission(request, "view-database", "database", database) + private = False if canned_query: await self.check_permission( request, "view-query", "query", (database, canned_query) ) + private = not await self.ds.permission_allowed( + None, "view-query", "query", (database, canned_query), default=True + ) else: await self.check_permission(request, "execute-sql", "database", database) # Extract any :named parameters @@ -214,6 +218,7 @@ class QueryView(DataView): "truncated": False, "columns": [], "query": {"sql": sql, "params": params}, + "private": private, }, extra_template, templates, @@ -282,6 +287,7 @@ class QueryView(DataView): "truncated": results.truncated, "columns": columns, "query": {"sql": sql, "params": params}, + "private": private, }, extra_template, templates, diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 733afd5f..55b2d673 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -136,10 +136,15 @@ def test_view_query(allow, expected_anon, expected_auth): ) as client: anon_response = client.get("/fixtures/q") assert expected_anon == anon_response.status + if allow and anon_response.status == 200: + # Should be no padlock + assert ">fixtures 🔒" not in anon_response.text auth_response = client.get( "/fixtures/q", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} ) assert expected_auth == auth_response.status + if allow and expected_anon == 403 and expected_auth == 200: + assert ">fixtures 🔒" in auth_response.text def test_query_list_respects_view_query(): From dcec89270a2e3b9fabed93f1d7b9be3ef86e9ed2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 11:20:21 -0700 Subject: [PATCH 0322/2113] View list respects view-table permission, refs #811 Also makes a small change to the /fixtures.json JSON: "views": ["view_name"] Is now: "views": [{"name": "view_name", "private": true}] --- datasette/templates/database.html | 2 +- datasette/views/database.py | 11 ++++++++++- tests/test_permissions.py | 18 +++++++++++++----- 3 files changed, 24 insertions(+), 7 deletions(-) diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 089142e2..100faee4 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -51,7 +51,7 @@

    Views

    {% endif %} diff --git a/datasette/views/database.py b/datasette/views/database.py index 30817106..824cb632 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -37,10 +37,19 @@ class DatabaseView(DataView): db = self.ds.databases[database] table_counts = await db.table_counts(5) - views = await db.view_names() hidden_table_names = set(await db.hidden_table_names()) all_foreign_keys = await db.get_all_foreign_keys() + views = [] + for view_name in await db.view_names(): + visible, private = await check_visibility( + self.ds, request.actor, "view-table", "table", (database, view_name), + ) + if visible: + views.append( + {"name": view_name, "private": private,} + ) + tables = [] for table in table_counts: visible, private = await check_visibility( diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 55b2d673..5c338e04 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -107,19 +107,27 @@ def test_table_list_respects_view_table(): metadata={ "databases": { "fixtures": { - "tables": {"compound_three_primary_keys": {"allow": {"id": "root"}}} + "tables": { + "compound_three_primary_keys": {"allow": {"id": "root"}}, + # And a SQL view too: + "paginated_view": {"allow": {"id": "root"}}, + } } } } ) as client: - html_fragment = 'compound_three_primary_keys 🔒' + html_fragments = [ + ">compound_three_primary_keys 🔒", + ">paginated_view 🔒", + ] anon_response = client.get("/fixtures") - assert html_fragment not in anon_response.text - assert '"/fixtures/compound_three_primary_keys"' not in anon_response.text + for html_fragment in html_fragments: + assert html_fragment not in anon_response.text auth_response = client.get( "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} ) - assert html_fragment in auth_response.text + for html_fragment in html_fragments: + assert html_fragment in auth_response.text @pytest.mark.parametrize( From 5598c5de011db95396b65b5c8c251cbe6884d6ae Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 11:34:14 -0700 Subject: [PATCH 0323/2113] Database list on index page respects table/view permissions, refs #811 --- datasette/templates/index.html | 2 +- datasette/views/index.py | 25 ++++++++++++++++++++----- tests/test_permissions.py | 31 +++++++++++++++++++++++++++++++ 3 files changed, 52 insertions(+), 6 deletions(-) diff --git a/datasette/templates/index.html b/datasette/templates/index.html index 5a8dccae..c1adfc59 100644 --- a/datasette/templates/index.html +++ b/datasette/templates/index.html @@ -22,7 +22,7 @@ {% endif %}

    {% for table in database.tables_and_views_truncated %}{{ table.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}

    + }}"{% if table.count %} title="{{ table.count }} rows"{% endif %}>{{ table.name }}{% if table.private %} 🔒{% endif %}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}

    {% endfor %} {% endblock %} diff --git a/datasette/views/index.py b/datasette/views/index.py index 59d3e042..a3e8388c 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -25,14 +25,22 @@ class IndexView(BaseView): await self.check_permission(request, "view-instance") databases = [] for name, db in self.ds.databases.items(): - visible, private = await check_visibility( + visible, database_private = await check_visibility( self.ds, request.actor, "view-database", "database", name, ) if not visible: continue table_names = await db.table_names() hidden_table_names = set(await db.hidden_table_names()) - views = await db.view_names() + + views = [] + for view_name in await db.view_names(): + visible, private = await check_visibility( + self.ds, request.actor, "view-table", "table", (name, view_name), + ) + if visible: + views.append({"name": view_name, "private": private}) + # Perform counts only for immutable or DBS with <= COUNT_TABLE_LIMIT tables table_counts = {} if not db.is_mutable or db.size < COUNT_DB_SIZE_LIMIT: @@ -40,8 +48,14 @@ class IndexView(BaseView): # If any of these are None it means at least one timed out - ignore them all if any(v is None for v in table_counts.values()): table_counts = {} + tables = {} for table in table_names: + visible, private = await check_visibility( + self.ds, request.actor, "view-table", "table", (name, table), + ) + if not visible: + continue table_columns = await db.table_columns(table) tables[table] = { "name": table, @@ -51,6 +65,7 @@ class IndexView(BaseView): "hidden": table in hidden_table_names, "fts_table": await db.fts_table(table), "num_relationships_for_sorting": 0, + "private": private, } if request.args.get("_sort") == "relationships" or not table_counts: @@ -78,8 +93,8 @@ class IndexView(BaseView): # Only add views if this is less than TRUNCATE_AT if len(tables_and_views_truncated) < TRUNCATE_AT: num_views_to_add = TRUNCATE_AT - len(tables_and_views_truncated) - for view_name in views[:num_views_to_add]: - tables_and_views_truncated.append({"name": view_name}) + for view in views[:num_views_to_add]: + tables_and_views_truncated.append(view) databases.append( { @@ -100,7 +115,7 @@ class IndexView(BaseView): ), "hidden_tables_count": len(hidden_tables), "views_count": len(views), - "private": private, + "private": database_private, } ) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 5c338e04..475f93dd 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -74,6 +74,37 @@ def test_database_list_respects_view_database(): assert 'fixtures 🔒' in auth_response.text +def test_database_list_respects_view_table(): + with make_app_client( + metadata={ + "databases": { + "data": { + "tables": { + "names": {"allow": {"id": "root"}}, + "v": {"allow": {"id": "root"}}, + } + } + } + }, + extra_databases={ + "data.db": "create table names (name text); create view v as select * from names" + }, + ) as client: + html_fragments = [ + ">names 🔒", + ">v 🔒", + ] + anon_response_text = client.get("/").text + assert "0 rows in 0 tables" in anon_response_text + for html_fragment in html_fragments: + assert html_fragment not in anon_response_text + auth_response_text = client.get( + "/", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + ).text + for html_fragment in html_fragments: + assert html_fragment in auth_response_text + + @pytest.mark.parametrize( "allow,expected_anon,expected_auth", [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),], From c9f1ec616e5a8c83f554baaedd38663569fb9b91 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 11:51:03 -0700 Subject: [PATCH 0324/2113] Removed resource_type from permissions system, closes #817 Refs #811, #699 --- datasette/app.py | 4 +--- datasette/default_permissions.py | 5 +--- datasette/hookspecs.py | 2 +- datasette/templates/permissions_debug.html | 4 ++-- datasette/utils/__init__.py | 16 +++---------- datasette/views/base.py | 5 +--- datasette/views/database.py | 28 ++++++++-------------- datasette/views/index.py | 6 ++--- datasette/views/table.py | 10 ++++---- docs/authentication.rst | 19 ++------------- docs/internals.rst | 7 ++---- docs/plugins.rst | 9 +++---- tests/conftest.py | 4 ++-- tests/fixtures.py | 9 +++---- 14 files changed, 39 insertions(+), 89 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 87e542c1..c12e0af0 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -465,7 +465,7 @@ class Datasette: return [] async def permission_allowed( - self, actor, action, resource_type=None, resource_identifier=None, default=False + self, actor, action, resource_identifier=None, default=False ): "Check permissions using the permissions_allowed plugin hook" result = None @@ -473,7 +473,6 @@ class Datasette: datasette=self, actor=actor, action=action, - resource_type=resource_type, resource_identifier=resource_identifier, ): if callable(check): @@ -491,7 +490,6 @@ class Datasette: "when": datetime.datetime.utcnow().isoformat(), "actor": actor, "action": action, - "resource_type": resource_type, "resource_identifier": resource_identifier, "used_default": used_default, "result": result, diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index dd1770a3..d27704aa 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -3,7 +3,7 @@ from datasette.utils import actor_matches_allow @hookimpl -def permission_allowed(datasette, actor, action, resource_type, resource_identifier): +def permission_allowed(datasette, actor, action, resource_identifier): if action == "permissions-debug": if actor and actor.get("id") == "root": return True @@ -12,13 +12,11 @@ def permission_allowed(datasette, actor, action, resource_type, resource_identif if allow is not None: return actor_matches_allow(actor, allow) elif action == "view-database": - assert resource_type == "database" database_allow = datasette.metadata("allow", database=resource_identifier) if database_allow is None: return True return actor_matches_allow(actor, database_allow) elif action == "view-table": - assert resource_type == "table" database, table = resource_identifier tables = datasette.metadata("tables", database=database) or {} table_allow = (tables.get(table) or {}).get("allow") @@ -27,7 +25,6 @@ def permission_allowed(datasette, actor, action, resource_type, resource_identif return actor_matches_allow(actor, table_allow) elif action == "view-query": # Check if this query has a "allow" block in metadata - assert resource_type == "query" database, query_name = resource_identifier queries_metadata = datasette.metadata("queries", database=database) assert query_name in queries_metadata diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 71d06661..3c202553 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -66,5 +66,5 @@ def actor_from_request(datasette, request): @hookspec -def permission_allowed(datasette, actor, action, resource_type, resource_identifier): +def permission_allowed(datasette, actor, action, resource_identifier): "Check if actor is allowed to perfom this action - return True, False or None" diff --git a/datasette/templates/permissions_debug.html b/datasette/templates/permissions_debug.html index dda57dfa..7d3ee712 100644 --- a/datasette/templates/permissions_debug.html +++ b/datasette/templates/permissions_debug.html @@ -46,8 +46,8 @@ {% endif %}

    Actor: {{ check.actor|tojson }}

    - {% if check.resource_type %} -

    Resource: {{ check.resource_type }} = {{ check.resource_identifier }}

    + {% if check.resource_identifier %} +

    Resource: {{ check.resource_identifier }}

    {% endif %} {% endfor %} diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 3d964049..257d1285 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -876,24 +876,14 @@ def actor_matches_allow(actor, allow): return False -async def check_visibility( - datasette, actor, action, resource_type, resource_identifier, default=True -): +async def check_visibility(datasette, actor, action, resource_identifier, default=True): "Returns (visible, private) - visible = can you see it, private = can others see it too" visible = await datasette.permission_allowed( - actor, - action, - resource_type=resource_type, - resource_identifier=resource_identifier, - default=default, + actor, action, resource_identifier=resource_identifier, default=default, ) if not visible: return (False, False) private = not await datasette.permission_allowed( - None, - action, - resource_type=resource_type, - resource_identifier=resource_identifier, - default=default, + None, action, resource_identifier=resource_identifier, default=default, ) return visible, private diff --git a/datasette/views/base.py b/datasette/views/base.py index 000d354b..2ca5e86a 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -64,13 +64,10 @@ class BaseView(AsgiView): response.body = b"" return response - async def check_permission( - self, request, action, resource_type=None, resource_identifier=None - ): + async def check_permission(self, request, action, resource_identifier=None): ok = await self.ds.permission_allowed( request.actor, action, - resource_type=resource_type, resource_identifier=resource_identifier, default=True, ) diff --git a/datasette/views/database.py b/datasette/views/database.py index 824cb632..d562ecb1 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -21,7 +21,7 @@ class DatabaseView(DataView): async def data(self, request, database, hash, default_labels=False, _size=None): await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", "database", database) + await self.check_permission(request, "view-database", database) metadata = (self.ds.metadata("databases") or {}).get(database, {}) self.ds.update_with_inherited_metadata(metadata) @@ -43,7 +43,7 @@ class DatabaseView(DataView): views = [] for view_name in await db.view_names(): visible, private = await check_visibility( - self.ds, request.actor, "view-table", "table", (database, view_name), + self.ds, request.actor, "view-table", (database, view_name), ) if visible: views.append( @@ -53,7 +53,7 @@ class DatabaseView(DataView): tables = [] for table in table_counts: visible, private = await check_visibility( - self.ds, request.actor, "view-table", "table", (database, table), + self.ds, request.actor, "view-table", (database, table), ) if not visible: continue @@ -75,11 +75,7 @@ class DatabaseView(DataView): canned_queries = [] for query in self.ds.get_canned_queries(database): visible, private = await check_visibility( - self.ds, - request.actor, - "view-query", - "query", - (database, query["name"]), + self.ds, request.actor, "view-query", (database, query["name"]), ) if visible: canned_queries.append(dict(query, private=private)) @@ -112,10 +108,8 @@ class DatabaseDownload(DataView): async def view_get(self, request, database, hash, correct_hash_present, **kwargs): await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", "database", database) - await self.check_permission( - request, "view-database-download", "database", database - ) + await self.check_permission(request, "view-database", database) + await self.check_permission(request, "view-database-download", database) if database not in self.ds.databases: raise DatasetteError("Invalid database", status=404) db = self.ds.databases[database] @@ -155,17 +149,15 @@ class QueryView(DataView): # Respect canned query permissions await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", "database", database) + await self.check_permission(request, "view-database", database) private = False if canned_query: - await self.check_permission( - request, "view-query", "query", (database, canned_query) - ) + await self.check_permission(request, "view-query", (database, canned_query)) private = not await self.ds.permission_allowed( - None, "view-query", "query", (database, canned_query), default=True + None, "view-query", (database, canned_query), default=True ) else: - await self.check_permission(request, "execute-sql", "database", database) + await self.check_permission(request, "execute-sql", database) # Extract any :named parameters named_parameters = named_parameters or self.re_named_parameter.findall(sql) named_parameter_values = { diff --git a/datasette/views/index.py b/datasette/views/index.py index a3e8388c..b2706251 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -26,7 +26,7 @@ class IndexView(BaseView): databases = [] for name, db in self.ds.databases.items(): visible, database_private = await check_visibility( - self.ds, request.actor, "view-database", "database", name, + self.ds, request.actor, "view-database", name, ) if not visible: continue @@ -36,7 +36,7 @@ class IndexView(BaseView): views = [] for view_name in await db.view_names(): visible, private = await check_visibility( - self.ds, request.actor, "view-table", "table", (name, view_name), + self.ds, request.actor, "view-table", (name, view_name), ) if visible: views.append({"name": view_name, "private": private}) @@ -52,7 +52,7 @@ class IndexView(BaseView): tables = {} for table in table_names: visible, private = await check_visibility( - self.ds, request.actor, "view-table", "table", (name, table), + self.ds, request.actor, "view-table", (name, table), ) if not visible: continue diff --git a/datasette/views/table.py b/datasette/views/table.py index cd952568..4cec0cda 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -268,11 +268,11 @@ class TableView(RowTableShared): raise NotFound("Table not found: {}".format(table)) await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", "database", database) - await self.check_permission(request, "view-table", "table", (database, table)) + await self.check_permission(request, "view-database", database) + await self.check_permission(request, "view-table", (database, table)) private = not await self.ds.permission_allowed( - None, "view-table", "table", (database, table), default=True + None, "view-table", (database, table), default=True ) pks = await db.primary_keys(table) @@ -854,8 +854,8 @@ class RowView(RowTableShared): async def data(self, request, database, hash, table, pk_path, default_labels=False): pk_values = urlsafe_components(pk_path) await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", "database", database) - await self.check_permission(request, "view-table", "table", (database, table)) + await self.check_permission(request, "view-database", database) + await self.check_permission(request, "view-table", (database, table)) db = self.ds.databases[database] pks = await db.primary_keys(table) use_rowid = not pks diff --git a/docs/authentication.rst b/docs/authentication.rst index bda6a0b7..67112969 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -52,7 +52,7 @@ The URL on the first line includes a one-use token which can be used to sign in Permissions =========== -Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. This method is also used by Datasette core code itself, which allows plugins to help make decisions on which actions are allowed by implementing the :ref:`permission_allowed(...) ` plugin hook. +Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. This method is also used by Datasette core code itself, which allows plugins to help make decisions on which actions are allowed by implementing the :ref:`plugin_permission_allowed` plugin hook. .. _authentication_permissions_canned_queries: @@ -159,7 +159,7 @@ This is designed to help administrators and plugin authors understand exactly ho Permissions =========== -This section lists all of the permission checks that are carried out by Datasette core, along with their ``resource_type`` and ``resource_identifier`` if those are passed. +This section lists all of the permission checks that are carried out by Datasette core, along with the ``resource_identifier`` if it was passed. .. _permissions_view_instance: @@ -176,9 +176,6 @@ view-database Actor is allowed to view a database page, e.g. https://latest.datasette.io/fixtures -``resource_type`` - string - "database" - ``resource_identifier`` - string The name of the database @@ -189,9 +186,6 @@ view-database-download Actor is allowed to download a database, e.g. https://latest.datasette.io/fixtures.db -``resource_type`` - string - "database" - ``resource_identifier`` - string The name of the database @@ -202,9 +196,6 @@ view-table Actor is allowed to view a table (or view) page, e.g. https://latest.datasette.io/fixtures/complex_foreign_keys -``resource_type`` - string - "table" - even if this is actually a SQL view - ``resource_identifier`` - tuple: (string, string) The name of the database, then the name of the table @@ -215,9 +206,6 @@ view-query Actor is allowed to view a :ref:`canned query ` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size -``resource_type`` - string - "query" - ``resource_identifier`` - string The name of the canned query @@ -228,9 +216,6 @@ execute-sql Actor is allowed to run arbitrary SQL queries against a specific database, e.g. https://latest.datasette.io/fixtures?sql=select+100 -``resource_type`` - string - "database" - ``resource_identifier`` - string The name of the database diff --git a/docs/internals.rst b/docs/internals.rst index 7498f017..1d61b6cb 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -121,8 +121,8 @@ Renders a `Jinja template `__ usin .. _datasette_permission_allowed: -await .permission_allowed(actor, action, resource_type=None, resource_identifier=None, default=False) ------------------------------------------------------------------------------------------------------ +await .permission_allowed(actor, action, resource_identifier=None, default=False) +--------------------------------------------------------------------------------- ``actor`` - dictionary The authenticated actor. This is usually ``request.actor``. @@ -130,9 +130,6 @@ await .permission_allowed(actor, action, resource_type=None, resource_identifier ``action`` - string The name of the action that is being permission checked. -``resource_type`` - string, optional - The type of resource being checked, e.g. ``"table"``. - ``resource_identifier`` - string, optional The resource identifier, e.g. the name of the table. diff --git a/docs/plugins.rst b/docs/plugins.rst index ecc7cbf1..118fab84 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -1005,8 +1005,8 @@ Instead of returning a dictionary, this function can return an awaitable functio .. _plugin_permission_allowed: -permission_allowed(datasette, actor, action, resource_type, resource_identifier) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +permission_allowed(datasette, actor, action, resource_identifier) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. @@ -1017,10 +1017,7 @@ permission_allowed(datasette, actor, action, resource_type, resource_identifier) ``action`` - string The action to be performed, e.g. ``"edit-table"``. -``resource_type`` - string - The type of resource being acted on, e.g. ``"table"``. - -``resource`` - string +``resource_identifier`` - string An identifier for the individual resource, e.g. the name of the table. Called to check that an actor has permission to perform an action on a resource. Can return ``True`` if the action is allowed, ``False`` if the action is not allowed or ``None`` if the plugin does not have an opinion one way or the other. diff --git a/tests/conftest.py b/tests/conftest.py index 1921ae3a..7f1e9387 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -70,8 +70,8 @@ def check_permission_actions_are_documented(): action = kwargs.get("action").replace("-", "_") assert ( action in documented_permission_actions - ), "Undocumented permission action: {}, resource_type: {}, resource_identifier: {}".format( - action, kwargs["resource_type"], kwargs["resource_identifier"] + ), "Undocumented permission action: {}, resource_identifier: {}".format( + action, kwargs["resource_identifier"] ) pm.add_hookcall_monitoring( diff --git a/tests/fixtures.py b/tests/fixtures.py index 2ac73fb1..8210d34f 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -857,24 +857,21 @@ if __name__ == "__main__": def assert_permissions_checked(datasette, actions): - # actions is a list of "action" or (action, resource_type, resource_identifier) tuples + # actions is a list of "action" or (action, resource_identifier) tuples for action in actions: if isinstance(action, str): - resource_type = None resource_identifier = None else: - action, resource_type, resource_identifier = action + action, resource_identifier = action assert [ pc for pc in datasette._permission_checks if pc["action"] == action - and pc["resource_type"] == resource_type and pc["resource_identifier"] == resource_identifier - ], """Missing expected permission check: action={}, resource_type={}, resource_identifier={} + ], """Missing expected permission check: action={}, resource_identifier={} Permission checks seen: {} """.format( action, - resource_type, resource_identifier, json.dumps(list(datasette._permission_checks), indent=4), ) From 799c5d53570d773203527f19530cf772dc2eeb24 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 11:59:11 -0700 Subject: [PATCH 0325/2113] Renamed resource_identifier to resource, refs #817 --- datasette/app.py | 11 +++-------- datasette/default_permissions.py | 8 ++++---- datasette/hookspecs.py | 2 +- datasette/templates/permissions_debug.html | 4 ++-- datasette/utils/__init__.py | 6 +++--- datasette/views/base.py | 7 ++----- datasette/views/database.py | 2 +- docs/authentication.rst | 12 ++++++------ docs/internals.rst | 10 ++++++---- docs/plugins.rst | 6 ++++-- tests/conftest.py | 4 ++-- tests/fixtures.py | 15 ++++++--------- 12 files changed, 40 insertions(+), 47 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index c12e0af0..2f89d17c 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -464,16 +464,11 @@ class Datasette: else: return [] - async def permission_allowed( - self, actor, action, resource_identifier=None, default=False - ): + async def permission_allowed(self, actor, action, resource=None, default=False): "Check permissions using the permissions_allowed plugin hook" result = None for check in pm.hook.permission_allowed( - datasette=self, - actor=actor, - action=action, - resource_identifier=resource_identifier, + datasette=self, actor=actor, action=action, resource=resource, ): if callable(check): check = check() @@ -490,7 +485,7 @@ class Datasette: "when": datetime.datetime.utcnow().isoformat(), "actor": actor, "action": action, - "resource_identifier": resource_identifier, + "resource": resource, "used_default": used_default, "result": result, } diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index d27704aa..e989c0fa 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -3,7 +3,7 @@ from datasette.utils import actor_matches_allow @hookimpl -def permission_allowed(datasette, actor, action, resource_identifier): +def permission_allowed(datasette, actor, action, resource): if action == "permissions-debug": if actor and actor.get("id") == "root": return True @@ -12,12 +12,12 @@ def permission_allowed(datasette, actor, action, resource_identifier): if allow is not None: return actor_matches_allow(actor, allow) elif action == "view-database": - database_allow = datasette.metadata("allow", database=resource_identifier) + database_allow = datasette.metadata("allow", database=resource) if database_allow is None: return True return actor_matches_allow(actor, database_allow) elif action == "view-table": - database, table = resource_identifier + database, table = resource tables = datasette.metadata("tables", database=database) or {} table_allow = (tables.get(table) or {}).get("allow") if table_allow is None: @@ -25,7 +25,7 @@ def permission_allowed(datasette, actor, action, resource_identifier): return actor_matches_allow(actor, table_allow) elif action == "view-query": # Check if this query has a "allow" block in metadata - database, query_name = resource_identifier + database, query_name = resource queries_metadata = datasette.metadata("queries", database=database) assert query_name in queries_metadata if isinstance(queries_metadata[query_name], str): diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 3c202553..d5fd232f 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -66,5 +66,5 @@ def actor_from_request(datasette, request): @hookspec -def permission_allowed(datasette, actor, action, resource_identifier): +def permission_allowed(datasette, actor, action, resource): "Check if actor is allowed to perfom this action - return True, False or None" diff --git a/datasette/templates/permissions_debug.html b/datasette/templates/permissions_debug.html index 7d3ee712..d898ea8c 100644 --- a/datasette/templates/permissions_debug.html +++ b/datasette/templates/permissions_debug.html @@ -46,8 +46,8 @@ {% endif %}

    Actor: {{ check.actor|tojson }}

    - {% if check.resource_identifier %} -

    Resource: {{ check.resource_identifier }}

    + {% if check.resource %} +

    Resource: {{ check.resource }}

    {% endif %} {% endfor %} diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 257d1285..7c1f34e0 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -876,14 +876,14 @@ def actor_matches_allow(actor, allow): return False -async def check_visibility(datasette, actor, action, resource_identifier, default=True): +async def check_visibility(datasette, actor, action, resource, default=True): "Returns (visible, private) - visible = can you see it, private = can others see it too" visible = await datasette.permission_allowed( - actor, action, resource_identifier=resource_identifier, default=default, + actor, action, resource=resource, default=default, ) if not visible: return (False, False) private = not await datasette.permission_allowed( - None, action, resource_identifier=resource_identifier, default=default, + None, action, resource=resource, default=default, ) return visible, private diff --git a/datasette/views/base.py b/datasette/views/base.py index 2ca5e86a..f327c6cd 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -64,12 +64,9 @@ class BaseView(AsgiView): response.body = b"" return response - async def check_permission(self, request, action, resource_identifier=None): + async def check_permission(self, request, action, resource=None): ok = await self.ds.permission_allowed( - request.actor, - action, - resource_identifier=resource_identifier, - default=True, + request.actor, action, resource=resource, default=True, ) if not ok: raise Forbidden(action) diff --git a/datasette/views/database.py b/datasette/views/database.py index d562ecb1..e1b29c27 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -88,7 +88,7 @@ class DatabaseView(DataView): "views": views, "queries": canned_queries, "private": not await self.ds.permission_allowed( - None, "view-database", "database", database + None, "view-database", database ), }, { diff --git a/docs/authentication.rst b/docs/authentication.rst index 67112969..f5209dfc 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -159,7 +159,7 @@ This is designed to help administrators and plugin authors understand exactly ho Permissions =========== -This section lists all of the permission checks that are carried out by Datasette core, along with the ``resource_identifier`` if it was passed. +This section lists all of the permission checks that are carried out by Datasette core, along with the ``resource`` if it was passed. .. _permissions_view_instance: @@ -176,7 +176,7 @@ view-database Actor is allowed to view a database page, e.g. https://latest.datasette.io/fixtures -``resource_identifier`` - string +``resource`` - string The name of the database .. _permissions_view_database_download: @@ -186,7 +186,7 @@ view-database-download Actor is allowed to download a database, e.g. https://latest.datasette.io/fixtures.db -``resource_identifier`` - string +``resource`` - string The name of the database .. _permissions_view_table: @@ -196,7 +196,7 @@ view-table Actor is allowed to view a table (or view) page, e.g. https://latest.datasette.io/fixtures/complex_foreign_keys -``resource_identifier`` - tuple: (string, string) +``resource`` - tuple: (string, string) The name of the database, then the name of the table .. _permissions_view_query: @@ -206,7 +206,7 @@ view-query Actor is allowed to view a :ref:`canned query ` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size -``resource_identifier`` - string +``resource`` - string The name of the canned query .. _permissions_execute_sql: @@ -216,7 +216,7 @@ execute-sql Actor is allowed to run arbitrary SQL queries against a specific database, e.g. https://latest.datasette.io/fixtures?sql=select+100 -``resource_identifier`` - string +``resource`` - string The name of the database .. _permissions_permissions_debug: diff --git a/docs/internals.rst b/docs/internals.rst index 1d61b6cb..83dbd897 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -121,8 +121,8 @@ Renders a `Jinja template `__ usin .. _datasette_permission_allowed: -await .permission_allowed(actor, action, resource_identifier=None, default=False) ---------------------------------------------------------------------------------- +await .permission_allowed(actor, action, resource=None, default=False) +---------------------------------------------------------------------- ``actor`` - dictionary The authenticated actor. This is usually ``request.actor``. @@ -130,13 +130,15 @@ await .permission_allowed(actor, action, resource_identifier=None, default=False ``action`` - string The name of the action that is being permission checked. -``resource_identifier`` - string, optional - The resource identifier, e.g. the name of the table. +``resource`` - string, optional + The resource, e.g. the name of the table. Only some permissions apply to a resource. Check if the given actor has permission to perform the given action on the given resource. This uses plugins that implement the :ref:`plugin_permission_allowed` plugin hook to decide if the action is allowed or not. If none of the plugins express an opinion, the return value will be the ``default`` argument. This is deny, but you can pass ``default=True`` to default allow instead. +See :ref:`permissions` for a full list of permissions included in Datasette core. + .. _datasette_get_database: .get_database(name) diff --git a/docs/plugins.rst b/docs/plugins.rst index 118fab84..56041d0c 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -1005,7 +1005,7 @@ Instead of returning a dictionary, this function can return an awaitable functio .. _plugin_permission_allowed: -permission_allowed(datasette, actor, action, resource_identifier) +permission_allowed(datasette, actor, action, resource) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``datasette`` - :ref:`internals_datasette` @@ -1017,7 +1017,9 @@ permission_allowed(datasette, actor, action, resource_identifier) ``action`` - string The action to be performed, e.g. ``"edit-table"``. -``resource_identifier`` - string +``resource`` - string or None An identifier for the individual resource, e.g. the name of the table. Called to check that an actor has permission to perform an action on a resource. Can return ``True`` if the action is allowed, ``False`` if the action is not allowed or ``None`` if the plugin does not have an opinion one way or the other. + +See :ref:`permissions` for a full list of permissions included in Datasette core. diff --git a/tests/conftest.py b/tests/conftest.py index 7f1e9387..320aa45b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -70,8 +70,8 @@ def check_permission_actions_are_documented(): action = kwargs.get("action").replace("-", "_") assert ( action in documented_permission_actions - ), "Undocumented permission action: {}, resource_identifier: {}".format( - action, kwargs["resource_identifier"] + ), "Undocumented permission action: {}, resource: {}".format( + action, kwargs["resource"] ) pm.add_hookcall_monitoring( diff --git a/tests/fixtures.py b/tests/fixtures.py index 8210d34f..e9175b57 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -857,21 +857,18 @@ if __name__ == "__main__": def assert_permissions_checked(datasette, actions): - # actions is a list of "action" or (action, resource_identifier) tuples + # actions is a list of "action" or (action, resource) tuples for action in actions: if isinstance(action, str): - resource_identifier = None + resource = None else: - action, resource_identifier = action + action, resource = action assert [ pc for pc in datasette._permission_checks - if pc["action"] == action - and pc["resource_identifier"] == resource_identifier - ], """Missing expected permission check: action={}, resource_identifier={} + if pc["action"] == action and pc["resource"] == resource + ], """Missing expected permission check: action={}, resource={} Permission checks seen: {} """.format( - action, - resource_identifier, - json.dumps(list(datasette._permission_checks), indent=4), + action, resource, json.dumps(list(datasette._permission_checks), indent=4), ) From 040fc0546f1ad602125ecdc27d9d013d830aa808 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 12:02:56 -0700 Subject: [PATCH 0326/2113] Updated tests, refs #817 --- tests/test_permissions.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 475f93dd..90ba1494 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -210,41 +210,41 @@ def test_query_list_respects_view_query(): "path,permissions", [ ("/", ["view-instance"]), - ("/fixtures", ["view-instance", ("view-database", "database", "fixtures")]), + ("/fixtures", ["view-instance", ("view-database", "fixtures")]), ( "/fixtures/facetable/1", - ["view-instance", ("view-table", "table", ("fixtures", "facetable"))], + ["view-instance", ("view-table", ("fixtures", "facetable"))], ), ( "/fixtures/simple_primary_key", [ "view-instance", - ("view-database", "database", "fixtures"), - ("view-table", "table", ("fixtures", "simple_primary_key")), + ("view-database", "fixtures"), + ("view-table", ("fixtures", "simple_primary_key")), ], ), ( "/fixtures?sql=select+1", [ "view-instance", - ("view-database", "database", "fixtures"), - ("execute-sql", "database", "fixtures"), + ("view-database", "fixtures"), + ("execute-sql", "fixtures"), ], ), ( "/fixtures.db", [ "view-instance", - ("view-database", "database", "fixtures"), - ("view-database-download", "database", "fixtures"), + ("view-database", "fixtures"), + ("view-database-download", "fixtures"), ], ), ( "/fixtures/neighborhood_search", [ "view-instance", - ("view-database", "database", "fixtures"), - ("view-query", "query", ("fixtures", "neighborhood_search")), + ("view-database", "fixtures"), + ("view-query", ("fixtures", "neighborhood_search")), ], ), ], From c7d145e016522dd6ee229d4d0b3ba79a7a8877c1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 12:06:05 -0700 Subject: [PATCH 0327/2113] Updated example for extra_template_vars hook, closes #816 --- docs/plugins.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index 56041d0c..6b1e60f2 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -689,14 +689,14 @@ Function that returns an awaitable function that returns a dictionary Datasette runs Jinja2 in `async mode `__, which means you can add awaitable functions to the template scope and they will be automatically awaited when they are rendered by the template. -Here's an example plugin that returns an authentication object from the ASGI scope: +Here's an example plugin that adds a ``"user_agent"`` variable to the template context containing the current request's User-Agent header: .. code-block:: python @hookimpl def extra_template_vars(request): return { - "auth": request.scope.get("auth") + "user_agent": request.headers.get("user-agent") } This example returns an awaitable function which adds a list of ``hidden_table_names`` to the context: From 54370853828bdf87ca844fd0fc00900e0e2e659d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 12:32:27 -0700 Subject: [PATCH 0328/2113] Documentation for allow blocks on more stuff, closes #811 --- docs/authentication.rst | 121 ++++++++++++++++++++++++++++++++-------- docs/sql_queries.rst | 2 +- 2 files changed, 100 insertions(+), 23 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index f5209dfc..a6c4ee79 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -15,7 +15,7 @@ Actors Through plugins, Datasette can support both authenticated users (with cookies) and authenticated API agents (via authentication tokens). The word "actor" is used to cover both of these cases. -Every request to Datasette has an associated actor value. This can be ``None`` for unauthenticated requests, or a JSON compatible Python dictionary for authenticated users or API agents. +Every request to Datasette has an associated actor value, available in the code as ``request.actor``. This can be ``None`` for unauthenticated requests, or a JSON compatible Python dictionary for authenticated users or API agents. The only required field in an actor is ``"id"``, which must be a string. Plugins may decide to add any other fields to the actor dictionary. @@ -24,7 +24,7 @@ Plugins can use the :ref:`plugin_actor_from_request` hook to implement custom lo .. _authentication_root: Using the "root" actor -====================== +---------------------- Datasette currently leaves almost all forms of authentication to plugins - `datasette-auth-github `__ for example. @@ -49,37 +49,40 @@ The URL on the first line includes a one-use token which can be used to sign in .. _authentication_permissions: -Permissions -=========== +Checking permission +=================== Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. This method is also used by Datasette core code itself, which allows plugins to help make decisions on which actions are allowed by implementing the :ref:`plugin_permission_allowed` plugin hook. -.. _authentication_permissions_canned_queries: +.. _authentication_permissions_metadata: -Permissions for canned queries -============================== +Configuring permissions in metadata.json +======================================== -Datasette's :ref:`canned queries ` default to allowing any user to execute them. +You can limit who is allowed to view different parts of your Datasette instance using ``"allow"`` keys in your :ref:`metadata` configuration. -You can limit who is allowed to execute a specific query with the ``"allow"`` key in the :ref:`metadata` configuration for that query. +You can control the following: -Here's how to restrict access to a write query to just the "root" user: +* Access to the entire Datasette instance +* Access to specific databases +* Access to specific tables and views +* Access to specific :ref:`canned_queries` + +If a user cannot access a specific database, they will not be able to access tables, views or queries within that database. If a user cannot access the instance they will not be able to access any of the databases, tables, views or queries. + +.. _authentication_permissions_instance: + +Controlling access to an instance +--------------------------------- + +Here's how to restrict access to your entire Datasette instance to just the ``"id": "root"`` user: .. code-block:: json { - "databases": { - "mydatabase": { - "queries": { - "add_name": { - "sql": "INSERT INTO names (name) VALUES (:name)", - "write": true, - "allow": { - "id": ["root"] - } - } - } - } + "title": "My private Datasette instance", + "allow": { + "id": "root" } } @@ -126,6 +129,80 @@ If you want to provide access to any actor with a value for a specific key, use These keys act as an "or" mechanism. A actor will be able to execute the query if any of their JSON properties match any of the values in the corresponding lists in the ``allow`` block. +.. _authentication_permissions_database: + +Controlling access to specific databases +---------------------------------------- + +To limit access to a specific ``private.db`` database to just authenticated users, use the ``"allow"`` block like this: + +.. code-block:: json + + { + "databases": { + "private": { + "allow": { + "id": "*" + } + } + } + } + +.. _authentication_permissions_table: + +Controlling access to specific tables and views +----------------------------------------------- + +To limit access to the ``users`` table in your ``bakery.db`` database: + +.. code-block:: json + + { + "databases": { + "bakery": { + "tables": { + "users": { + "allow": { + "id": "*" + } + } + } + } + } + } + +This works for SQL views as well - you can treat them as if they are tables. + +.. warning:: + Restricting access to tables and views in this way will NOT prevent users from querying them using arbitrary SQL queries. + + If you are restricting access to specific tables you should also use the ``"allow_sql"`` block to prevent users from accessing + +.. _authentication_permissions_table: + +Controlling access to specific canned queries +--------------------------------------------- + +To limit access to the ``add_name`` canned query in your ``dogs.db`` database to just the :ref:`root user`: + +.. code-block:: json + + { + "databases": { + "dogs": { + "queries": { + "add_name": { + "sql": "INSERT INTO names (name) VALUES (:name)", + "write": true, + "allow": { + "id": ["root"] + } + } + } + } + } + } + .. _authentication_actor_matches_allow: actor_matches_allow() diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index 5df8bdb0..5295a2e0 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -217,7 +217,7 @@ Writable canned queries Canned queries by default are read-only. You can use the ``"write": true`` key to indicate that a canned query can write to the database. -See :ref:`authentication_permissions_canned_queries` for details on how to add permission checks to canned queries, using the ``"allow"`` key. +See :ref:`authentication_permissions_metadata` for details on how to add permission checks to canned queries, using the ``"allow"`` key. .. code-block:: json From 8205d58316ced1d5ae589b29a5a1b5ecb6257ab0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 13:10:40 -0700 Subject: [PATCH 0329/2113] Corrected documentation for resource in view-query --- docs/authentication.rst | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index a6c4ee79..88808428 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -245,7 +245,6 @@ view-instance Top level permission - Actor is allowed to view any pages within this instance, starting at https://latest.datasette.io/ - .. _permissions_view_database: view-database @@ -283,8 +282,8 @@ view-query Actor is allowed to view a :ref:`canned query ` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size -``resource`` - string - The name of the canned query +``resource`` - tuple: (string, string) + The name of the database, then the name of the canned query .. _permissions_execute_sql: From e0a4664fbab5556454dac7f3c798253a34db2928 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 15:09:57 -0700 Subject: [PATCH 0330/2113] Better example plugin for permission_allowed Also fixed it so default permission checks run after plugin permission checks, refs #818 --- datasette/default_permissions.py | 2 +- docs/authentication.rst | 4 ++-- docs/plugins.rst | 40 ++++++++++++++++++++++++++++++-- 3 files changed, 41 insertions(+), 5 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index e989c0fa..a2f4a315 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -2,7 +2,7 @@ from datasette import hookimpl from datasette.utils import actor_matches_allow -@hookimpl +@hookimpl(tryfirst=True) def permission_allowed(datasette, actor, action, resource): if action == "permissions-debug": if actor and actor.get("id") == "root": diff --git a/docs/authentication.rst b/docs/authentication.rst index 88808428..34d46511 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -174,11 +174,11 @@ To limit access to the ``users`` table in your ``bakery.db`` database: This works for SQL views as well - you can treat them as if they are tables. .. warning:: - Restricting access to tables and views in this way will NOT prevent users from querying them using arbitrary SQL queries. + Restricting access to tables and views in this way will NOT prevent users from querying them using arbitrary SQL queries, `like this `__ for example. If you are restricting access to specific tables you should also use the ``"allow_sql"`` block to prevent users from accessing -.. _authentication_permissions_table: +.. _authentication_permissions_query: Controlling access to specific canned queries --------------------------------------------- diff --git a/docs/plugins.rst b/docs/plugins.rst index 6b1e60f2..73d2eabd 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -1006,7 +1006,7 @@ Instead of returning a dictionary, this function can return an awaitable functio .. _plugin_permission_allowed: permission_allowed(datasette, actor, action, resource) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. @@ -1022,4 +1022,40 @@ permission_allowed(datasette, actor, action, resource) Called to check that an actor has permission to perform an action on a resource. Can return ``True`` if the action is allowed, ``False`` if the action is not allowed or ``None`` if the plugin does not have an opinion one way or the other. -See :ref:`permissions` for a full list of permissions included in Datasette core. +Here's an example plugin which randomly selects if a permission should be allowed or denied, except for ``view-instance`` which always uses the default permission scheme instead. + +.. code-block:: python + + from datasette import hookimpl + import random + + @hookimpl + def permission_allowed(action): + if action != "view-instance": + # Return True or False at random + return random.random() > 0.5 + # Returning None falls back to default permissions + +This function can alternatively return an awaitable function which itself returns ``True``, ``False`` or ``None``. You can use this option if you need to execute additional database queries using ``await datasette.execute(...)``. + +Here's an example that allows users to view the ``admin_log`` table only if their actor ``id`` is present in the ``admin_users`` table. It aso disallows arbitrary SQL queries for the ``staff.db`` database for all users. + +.. code-block:: python + + @hookimpl + def permission_allowed(datasette, actor, action, resource): + async def inner(): + if action == "execute-sql" and resource == "staff": + return False + if action == "view-table" and resource == ("staff", "admin_log"): + if not actor: + return False + user_id = actor["id"] + return await datasette.get_database("staff").execute( + "select count(*) from admin_users where user_id = :user_id", + {"user_id": user_id}, + ) + + return inner + +See :ref:`permissions` for a full list of permissions that are included in Datasette core. From 49d6d2f7b0f6cb02e25022e1c9403811f1fa0a7c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 17:05:44 -0700 Subject: [PATCH 0331/2113] allow_sql block to control execute-sql upermission in metadata.json, closes #813 Also removed the --config allow_sql:0 mechanism in favour of the new allow_sql block. --- datasette/app.py | 1 - datasette/default_permissions.py | 8 ++++++++ datasette/templates/database.html | 2 +- datasette/templates/query.html | 2 +- datasette/templates/table.html | 2 +- datasette/views/database.py | 8 ++++++-- datasette/views/table.py | 9 +++++++-- docs/authentication.rst | 33 ++++++++++++++++++++++++++++++- docs/config.rst | 9 --------- docs/json_api.rst | 2 +- docs/pages.rst | 2 +- docs/sql_queries.rst | 4 ++-- tests/test_api.py | 12 ++--------- tests/test_config_dir.py | 3 --- tests/test_html.py | 10 +--------- tests/test_permissions.py | 29 +++++++++++++++++++++++++++ 16 files changed, 92 insertions(+), 44 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 2f89d17c..a7c3c66a 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -110,7 +110,6 @@ CONFIG_OPTIONS = ( "Allow users to download the original SQLite database files", ), ConfigOption("suggest_facets", True, "Calculate and display suggested facets"), - ConfigOption("allow_sql", True, "Allow arbitrary SQL queries via ?sql= parameter"), ConfigOption( "default_cache_ttl", 5, diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index a2f4a315..e750acbf 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -34,3 +34,11 @@ def permission_allowed(datasette, actor, action, resource): if allow is None: return True return actor_matches_allow(actor, allow) + elif action == "execute-sql": + # Use allow_sql block from database block, or from top-level + database_allow_sql = datasette.metadata("allow_sql", database=resource) + if database_allow_sql is None: + database_allow_sql = datasette.metadata("allow_sql") + if database_allow_sql is None: + return True + return actor_matches_allow(actor, database_allow_sql) diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 100faee4..5ae51ef7 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -22,7 +22,7 @@ {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} -{% if config.allow_sql %} +{% if allow_execute_sql %}

    Custom SQL query

    diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 7771b101..c65953fb 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -35,7 +35,7 @@

    Custom SQL query{% if display_rows %} returning {% if truncated %}more than {% endif %}{{ "{:,}".format(display_rows|length) }} row{% if display_rows|length == 1 %}{% else %}s{% endif %}{% endif %} {% if hide_sql %}(show){% else %}(hide){% endif %}

    {% if not hide_sql %} - {% if editable and config.allow_sql %} + {% if editable and allow_execute_sql %}

    {% else %}
    {% if query %}{{ query.sql }}{% endif %}
    diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 1289e125..373fd576 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -109,7 +109,7 @@ {% endif %} -{% if query.sql and config.allow_sql %} +{% if query.sql and allow_execute_sql %}

    View and edit SQL

    {% endif %} diff --git a/datasette/views/database.py b/datasette/views/database.py index e1b29c27..ee99bc2d 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -26,8 +26,6 @@ class DatabaseView(DataView): self.ds.update_with_inherited_metadata(metadata) if request.args.get("sql"): - if not self.ds.config("allow_sql"): - raise DatasetteError("sql= is not allowed", status=400) sql = request.args.get("sql") validate_sql_select(sql) return await QueryView(self.ds).data( @@ -90,6 +88,9 @@ class DatabaseView(DataView): "private": not await self.ds.permission_allowed( None, "view-database", database ), + "allow_execute_sql": await self.ds.permission_allowed( + request.actor, "execute-sql", database, default=True + ), }, { "show_hidden": request.args.get("_show_hidden"), @@ -289,6 +290,9 @@ class QueryView(DataView): "columns": columns, "query": {"sql": sql, "params": params}, "private": private, + "allow_execute_sql": await self.ds.permission_allowed( + request.actor, "execute-sql", database, default=True + ), }, extra_template, templates, diff --git a/datasette/views/table.py b/datasette/views/table.py index 4cec0cda..91245293 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -342,8 +342,10 @@ class TableView(RowTableShared): extra_wheres_for_ui = [] # Add _where= from querystring if "_where" in request.args: - if not self.ds.config("allow_sql"): - raise DatasetteError("_where= is not allowed", status=400) + if not await self.ds.permission_allowed( + request.actor, "execute-sql", resource=database, default=True, + ): + raise DatasetteError("_where= is not allowed", status=403) else: where_clauses.extend(request.args.getlist("_where")) extra_wheres_for_ui = [ @@ -839,6 +841,9 @@ class TableView(RowTableShared): "next": next_value and str(next_value) or None, "next_url": next_url, "private": private, + "allow_execute_sql": await self.ds.permission_allowed( + request.actor, "execute-sql", database, default=True + ), }, extra_template, ( diff --git a/docs/authentication.rst b/docs/authentication.rst index 34d46511..f7281db4 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -176,7 +176,7 @@ This works for SQL views as well - you can treat them as if they are tables. .. warning:: Restricting access to tables and views in this way will NOT prevent users from querying them using arbitrary SQL queries, `like this `__ for example. - If you are restricting access to specific tables you should also use the ``"allow_sql"`` block to prevent users from accessing + If you are restricting access to specific tables you should also use the ``"allow_sql"`` block to prevent users from bypassing the limit with their own SQL queries - see :ref:`authentication_permissions_execute_sql`. .. _authentication_permissions_query: @@ -203,6 +203,37 @@ To limit access to the ``add_name`` canned query in your ``dogs.db`` database to } } +.. _authentication_permissions_execute_sql: + +Controlling the ability to execute arbitrary SQL +------------------------------------------------ + +The ``"allow_sql"`` block can be used to control who is allowed to execute arbitrary SQL queries, both using the form on the database page e.g. https://latest.datasette.io/fixtures or by appending a ``?_where=`` parameter to the table page as seen on https://latest.datasette.io/fixtures/facetable?_where=city_id=1. + +To enable just the :ref:`root user` to execute SQL for all databases in your instance, use the following: + +.. code-block:: json + + { + "allow_sql": { + "id": "root" + } + } + +To limit this ability for just one specific database, use this: + +.. code-block:: json + + { + "databases": { + "mydatabase": { + "allow_sql": { + "id": "root" + } + } + } + } + .. _authentication_actor_matches_allow: actor_matches_allow() diff --git a/docs/config.rst b/docs/config.rst index da93e40a..56b38613 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -150,15 +150,6 @@ Should users be able to download the original SQLite database using a link on th datasette mydatabase.db --config allow_download:off -.. _config_allow_sql: - -allow_sql -~~~~~~~~~ - -Enable/disable the ability for users to run custom SQL directly against a database. To disable this feature, run:: - - datasette mydatabase.db --config allow_sql:off - .. _config_default_cache_ttl: default_cache_ttl diff --git a/docs/json_api.rst b/docs/json_api.rst index 7d37d425..af98eecd 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -291,7 +291,7 @@ Special table arguments though this could potentially result in errors if the wrong syntax is used. ``?_where=SQL-fragment`` - If the :ref:`config_allow_sql` config option is enabled, this parameter + If the :ref:`permissions_execute_sql` permission is enabled, this parameter can be used to pass one or more additional SQL fragments to be used in the `WHERE` clause of the SQL used to query the table. diff --git a/docs/pages.rst b/docs/pages.rst index f220f94d..ce8f5d06 100644 --- a/docs/pages.rst +++ b/docs/pages.rst @@ -29,7 +29,7 @@ Database ======== Each database has a page listing the tables, views and canned queries -available for that database. If the :ref:`config_allow_sql` config option is enabled (it's turned on by default) there will also be an interface for executing arbitrary SQL select queries against the data. +available for that database. If the :ref:`permissions_execute_sql` permission is enabled (it's on by default) there will also be an interface for executing arbitrary SQL select queries against the data. Examples: diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index 5295a2e0..db72deb7 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -12,8 +12,8 @@ you like. You can also construct queries using the filter interface on the tables page, then click "View and edit SQL" to open that query in the custom SQL editor. -Note that this interface is only available if the :ref:`config_allow_sql` option -has not been disabled. +Note that this interface is only available if the :ref:`permissions_execute_sql` +permission is allowed. Any Datasette SQL query is reflected in the URL of the page, allowing you to bookmark them, share them with others and navigate through previous queries diff --git a/tests/test_api.py b/tests/test_api.py index 13a98b6a..1a54edec 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -634,13 +634,6 @@ def test_invalid_custom_sql(app_client): assert "Statement must be a SELECT" == response.json["error"] -def test_allow_sql_off(): - with make_app_client(config={"allow_sql": False}) as client: - response = client.get("/fixtures.json?sql=select+sleep(0.01)") - assert 400 == response.status - assert "sql= is not allowed" == response.json["error"] - - def test_table_json(app_client): response = app_client.get("/fixtures/simple_primary_key.json?_shape=objects") assert response.status == 200 @@ -1137,9 +1130,9 @@ def test_table_filter_extra_where_invalid(app_client): def test_table_filter_extra_where_disabled_if_no_sql_allowed(): - with make_app_client(config={"allow_sql": False}) as client: + with make_app_client(metadata={"allow_sql": {}}) as client: response = client.get("/fixtures/facetable.json?_where=neighborhood='Dogpatch'") - assert 400 == response.status + assert 403 == response.status assert "_where= is not allowed" == response.json["error"] @@ -1325,7 +1318,6 @@ def test_config_json(app_client): "allow_download": True, "allow_facet": True, "suggest_facets": True, - "allow_sql": True, "default_cache_ttl": 5, "default_cache_ttl_hashed": 365 * 24 * 60 * 60, "num_sql_threads": 3, diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index 490b1f1d..b1f6994f 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -10,7 +10,6 @@ from datasette import hookimpl @hookimpl def extra_template_vars(): - print("this is template vars") return { "from_plugin": "hooray" } @@ -18,7 +17,6 @@ def extra_template_vars(): METADATA = {"title": "This is from metadata"} CONFIG = { "default_cache_ttl": 60, - "allow_sql": False, } CSS = """ body { margin-top: 3em} @@ -91,7 +89,6 @@ def test_config(config_dir_client): response = config_dir_client.get("/-/config.json") assert 200 == response.status assert 60 == response.json["default_cache_ttl"] - assert not response.json["allow_sql"] def test_plugins(config_dir_client): diff --git a/tests/test_html.py b/tests/test_html.py index cb0e0c90..e6933dfe 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -924,16 +924,8 @@ def test_allow_download_off(): assert 403 == response.status -def test_allow_sql_on(app_client): - response = app_client.get("/fixtures") - soup = Soup(response.body, "html.parser") - assert len(soup.findAll("textarea", {"name": "sql"})) - response = app_client.get("/fixtures/sortable") - assert b"View and edit SQL" in response.body - - def test_allow_sql_off(): - with make_app_client(config={"allow_sql": False}) as client: + with make_app_client(metadata={"allow_sql": {}}) as client: response = client.get("/fixtures") soup = Soup(response.body, "html.parser") assert not len(soup.findAll("textarea", {"name": "sql"})) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 90ba1494..d8c98825 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -186,6 +186,35 @@ def test_view_query(allow, expected_anon, expected_auth): assert ">fixtures 🔒" in auth_response.text +@pytest.mark.parametrize( + "metadata", + [ + {"allow_sql": {"id": "root"}}, + {"databases": {"fixtures": {"allow_sql": {"id": "root"}}}}, + ], +) +def test_execute_sql(metadata): + with make_app_client(metadata=metadata) as client: + form_fragment = ' Date: Mon, 8 Jun 2020 17:35:23 -0700 Subject: [PATCH 0332/2113] Fixed broken CSS on 404 page, closes #777 --- datasette/app.py | 11 ++++++++++- tests/test_html.py | 12 ++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index a7c3c66a..d562e611 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1015,7 +1015,16 @@ class DatasetteRouter(AsgiRouter): templates = ["500.html"] if status != 500: templates = ["{}.html".format(status)] + templates - info.update({"ok": False, "error": message, "status": status, "title": title}) + info.update( + { + "ok": False, + "error": message, + "status": status, + "title": title, + "base_url": self.ds.config("base_url"), + "app_css_hash": self.ds.app_css_hash(), + } + ) headers = {} if self.ds.cors: headers["Access-Control-Allow-Origin"] = "*" diff --git a/tests/test_html.py b/tests/test_html.py index e6933dfe..f9b18daa 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -965,6 +965,18 @@ def inner_html(soup): return inner_html.strip() +@pytest.mark.parametrize("path", ["/404", "/fixtures/404"]) +def test_404(app_client, path): + response = app_client.get(path) + assert 404 == response.status + assert ( + ' Date: Mon, 8 Jun 2020 19:22:40 -0700 Subject: [PATCH 0333/2113] Fixed test_table_not_exists_json test --- datasette/app.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index d562e611..79f52a54 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1016,14 +1016,7 @@ class DatasetteRouter(AsgiRouter): if status != 500: templates = ["{}.html".format(status)] + templates info.update( - { - "ok": False, - "error": message, - "status": status, - "title": title, - "base_url": self.ds.config("base_url"), - "app_css_hash": self.ds.app_css_hash(), - } + {"ok": False, "error": message, "status": status, "title": title,} ) headers = {} if self.ds.cors: @@ -1033,7 +1026,16 @@ class DatasetteRouter(AsgiRouter): else: template = self.ds.jinja_env.select_template(templates) await asgi_send_html( - send, await template.render_async(info), status=status, headers=headers + send, + await template.render_async( + dict( + info, + base_url=self.ds.config("base_url"), + app_css_hash=self.ds.app_css_hash(), + ) + ), + status=status, + headers=headers, ) From f5e79adf26d0daa3831e3fba022f1b749a9efdee Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 20:12:06 -0700 Subject: [PATCH 0334/2113] register_routes() plugin hook (#819) Fixes #215 --- datasette/app.py | 21 ++++++++++++++++ datasette/hookspecs.py | 5 ++++ datasette/utils/__init__.py | 12 ++++++++- datasette/utils/asgi.py | 2 +- docs/index.rst | 2 +- docs/plugins.rst | 50 ++++++++++++++++++++++++++++++++++++- tests/fixtures.py | 1 + tests/plugins/my_plugin.py | 25 +++++++++++++++++++ tests/test_plugins.py | 15 +++++++++++ 9 files changed, 129 insertions(+), 4 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 79f52a54..120091f7 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -39,6 +39,7 @@ from .renderer import json_renderer from .database import Database, QueryInterrupted from .utils import ( + async_call_with_supported_arguments, escape_css_string, escape_sqlite, format_bytes, @@ -783,6 +784,10 @@ class Datasette: "Returns an ASGI app function that serves the whole of Datasette" routes = [] + for routes_to_add in pm.hook.register_routes(): + for regex, view_fn in routes_to_add: + routes.append((regex, wrap_view(view_fn, self))) + def add_route(view, regex): routes.append((regex, view)) @@ -1048,3 +1053,19 @@ def _cleaner_task_str(task): # running at /Users/simonw/Dropbox/Development/datasette/venv-3.7.5/lib/python3.7/site-packages/uvicorn/main.py:361> # Clean up everything up to and including site-packages return _cleaner_task_str_re.sub("", s) + + +def wrap_view(view_fn, datasette): + async def asgi_view_fn(scope, receive, send): + response = await async_call_with_supported_arguments( + view_fn, + scope=scope, + receive=receive, + send=send, + request=Request(scope, receive), + datasette=datasette, + ) + if response is not None: + await response.asgi_send(send) + + return asgi_view_fn diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index d5fd232f..ab3e131c 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -60,6 +60,11 @@ def register_facet_classes(): "Register Facet subclasses" +@hookspec +def register_routes(): + "Register URL routes: return a list of (regex, view_function) pairs" + + @hookspec def actor_from_request(datasette, request): "Return an actor dictionary based on the incoming request" diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 7c1f34e0..49268638 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -842,7 +842,7 @@ def parse_metadata(content): raise BadMetadataError("Metadata is not valid JSON or YAML") -def call_with_supported_arguments(fn, **kwargs): +def _gather_arguments(fn, kwargs): parameters = inspect.signature(fn).parameters.keys() call_with = [] for parameter in parameters: @@ -853,9 +853,19 @@ def call_with_supported_arguments(fn, **kwargs): ) ) call_with.append(kwargs[parameter]) + return call_with + + +def call_with_supported_arguments(fn, **kwargs): + call_with = _gather_arguments(fn, kwargs) return fn(*call_with) +async def async_call_with_supported_arguments(fn, **kwargs): + call_with = _gather_arguments(fn, kwargs) + return await fn(*call_with) + + def actor_matches_allow(actor, allow): actor = actor or {} if allow is None: diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index bca9c9ab..349f2a0a 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -399,7 +399,7 @@ class Response: @classmethod def text(cls, body, status=200, headers=None): return cls( - body, + str(body), status=status, headers=headers, content_type="text/plain; charset=utf-8", diff --git a/docs/index.rst b/docs/index.rst index 03988c8e..5334386f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -31,7 +31,7 @@ Contents -------- .. toctree:: - :maxdepth: 2 + :maxdepth: 3 getting_started installation diff --git a/docs/plugins.rst b/docs/plugins.rst index 73d2eabd..caca0019 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -835,6 +835,55 @@ And here is an example ``can_render`` function which returns ``True`` only if th Examples: `datasette-atom `_, `datasette-ics `_ +.. _plugin_register_routes: + +register_routes() +~~~~~~~~~~~~~~~~~ + +Register additional view functions to execute for specified URL routes. + +Return a list of ``(regex, async_view_function)`` pairs, something like this: + +.. code-block:: python + + from datasette.utils.asgi import Response + import html + + + async def hello_from(scope): + name = scope["url_route"]["kwargs"]["name"] + return Response.html("Hello from {}".format( + html.escape(name) + )) + + + @hookimpl + def register_routes(): + return [ + (r"^/hello-from/(?P.*)$"), hello_from) + ] + +The view functions can take a number of different optional arguments. The corresponding argument will be passed to your function depending on its named parameters - a form of dependency injection. + +The optional view function arguments are as follows: + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + +``request`` - Request object + The current HTTP :ref:`internals_request`. + +``scope`` - dictionary + The incoming ASGI scope dictionary. + +``send`` - function + The ASGI send function. + +``receive`` - function + The ASGI receive function. + +The function can either return a ``Response`` or it can return nothing and instead respond directly to the request using the ASGI ``receive`` function (for advanced uses only). + .. _plugin_register_facet_classes: register_facet_classes() @@ -901,7 +950,6 @@ The plugin hook can then be used to register the new facet class like this: def register_facet_classes(): return [SpecialFacet] - .. _plugin_asgi_wrapper: asgi_wrapper(datasette) diff --git a/tests/fixtures.py b/tests/fixtures.py index e9175b57..a51a869d 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -46,6 +46,7 @@ EXPECTED_PLUGINS = [ "prepare_connection", "prepare_jinja2_environment", "register_facet_classes", + "register_routes", "render_cell", ], }, diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 46893710..57803178 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -1,6 +1,7 @@ from datasette import hookimpl from datasette.facets import Facet from datasette.utils import path_with_added_args +from datasette.utils.asgi import asgi_send_json, Response import base64 import pint import json @@ -142,3 +143,27 @@ def permission_allowed(actor, action): return True elif action == "this_is_denied": return False + + +@hookimpl +def register_routes(): + async def one(datasette): + return Response.text( + (await datasette.get_database().execute("select 1 + 1")).first()[0] + ) + + async def two(request, scope): + name = scope["url_route"]["kwargs"]["name"] + greeting = request.args.get("greeting") + return Response.text("{} {}".format(greeting, name)) + + async def three(scope, send): + await asgi_send_json( + send, {"hello": "world"}, status=200, headers={"x-three": "1"} + ) + + return [ + (r"/one/$", one), + (r"/two/(?P.*)$", two), + (r"/three/$", three), + ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index c782b87b..c7bb4859 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -544,3 +544,18 @@ def test_actor_json(app_client): assert {"actor": {"id": "bot2", "1+1": 2}} == app_client.get( "/-/actor.json/?_bot2=1" ).json + + +@pytest.mark.parametrize( + "path,body", [("/one/", "2"), ("/two/Ray?greeting=Hail", "Hail Ray"),] +) +def test_register_routes(app_client, path, body): + response = app_client.get(path) + assert 200 == response.status + assert body == response.text + + +def test_register_routes_asgi(app_client): + response = app_client.get("/three/") + assert {"hello": "world"} == response.json + assert "1" == response.headers["x-three"] From db660db4632409334e646237c3dd214764729cd4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 20:32:10 -0700 Subject: [PATCH 0335/2113] Docs + unit tests for Response, closes #821 --- datasette/utils/asgi.py | 9 ++++++ docs/internals.rst | 48 ++++++++++++++++++++++++++++++++ docs/plugins.rst | 2 +- tests/test_internals_response.py | 28 +++++++++++++++++++ 4 files changed, 86 insertions(+), 1 deletion(-) create mode 100644 tests/test_internals_response.py diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 349f2a0a..9e6c82dd 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -405,6 +405,15 @@ class Response: content_type="text/plain; charset=utf-8", ) + @classmethod + def json(cls, body, status=200, headers=None): + return cls( + json.dumps(body), + status=status, + headers=headers, + content_type="application/json; charset=utf-8", + ) + @classmethod def redirect(cls, path, status=302, headers=None): headers = headers or {} diff --git a/docs/internals.rst b/docs/internals.rst index 83dbd897..b0096cfa 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -80,6 +80,54 @@ Consider the querystring ``?foo=1&foo=2&bar=3`` - with two values for ``foo`` an ``len(request.args)`` - integer Returns the number of keys. +.. _internals_response: + +Response class +~~~~~~~~~~~~~~ + +The ``Response`` class can be returned from view functions that have been registered using the :ref:`plugin_register_routes` hook. + +The ``Response()`` constructor takes the following arguments: + +``body`` - string + The body of the response. + +``status`` - integer (optional) + The HTTP status - defaults to 200. + +``headers`` - dictionary (optional) + A dictionary of extra HTTP headers, e.g. ``{"x-hello": "world"}``. + +``content_type`` - string (optional) + The content-type for the response. Defaults to ``text/plain``. + +For example: + +.. code-block:: python + + from datasette.utils.asgi import Response + + response = Response( + "This is XML", + content_type="application/xml; charset=utf-8" + ) + +The easiest way to create responses is using the ``Response.text(...)``, ``Response.html(...)``, ``Response.json(...)`` or ``Response.redirect(...)`` helper methods: + +.. code-block:: python + + from datasette.utils.asgi import Response + + html_response = Response.html("This is HTML") + json_response = Response.json({"this_is": "json"}) + text_response = Response.text("This will become utf-8 encoded text") + # Redirects are served as 302, unless you pass status=301: + redirect_response = Response.redirect("https://latest.datasette.io/") + +Each of these responses will use the correct corresponding content-type - ``text/html; charset=utf-8``, ``application/json; charset=utf-8`` or ``text/plain; charset=utf-8`` respectively. + +Each of the helper methods take optional ``status=`` and ``headers=`` arguments, documented above. + .. _internals_datasette: Datasette class diff --git a/docs/plugins.rst b/docs/plugins.rst index caca0019..465fcd52 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -882,7 +882,7 @@ The optional view function arguments are as follows: ``receive`` - function The ASGI receive function. -The function can either return a ``Response`` or it can return nothing and instead respond directly to the request using the ASGI ``receive`` function (for advanced uses only). +The function can either return a :ref:`internals_response` or it can return nothing and instead respond directly to the request using the ASGI ``send`` function (for advanced uses only). .. _plugin_register_facet_classes: diff --git a/tests/test_internals_response.py b/tests/test_internals_response.py new file mode 100644 index 00000000..7c11f858 --- /dev/null +++ b/tests/test_internals_response.py @@ -0,0 +1,28 @@ +from datasette.utils.asgi import Response + + +def test_response_html(): + response = Response.html("Hello from HTML") + assert 200 == response.status + assert "Hello from HTML" == response.body + assert "text/html; charset=utf-8" == response.content_type + + +def test_response_text(): + response = Response.text("Hello from text") + assert 200 == response.status + assert "Hello from text" == response.body + assert "text/plain; charset=utf-8" == response.content_type + + +def test_response_json(): + response = Response.json({"this_is": "json"}) + assert 200 == response.status + assert '{"this_is": "json"}' == response.body + assert "application/json; charset=utf-8" == response.content_type + + +def test_response_redirect(): + response = Response.redirect("/foo") + assert 302 == response.status + assert "/foo" == response.headers["Location"] From fac8e9381500fc02cec99281122ee8e0c72fabe1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 20:40:00 -0700 Subject: [PATCH 0336/2113] request.url_vars property, closes #822 --- datasette/utils/asgi.py | 4 ++++ docs/internals.rst | 3 +++ docs/plugins.rst | 4 ++-- tests/plugins/my_plugin.py | 4 ++-- tests/test_internals_request.py | 17 +++++++++++++++++ 5 files changed, 28 insertions(+), 4 deletions(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 9e6c82dd..cdd6b148 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -32,6 +32,10 @@ class Request: (self.scheme, self.host, self.path, None, self.query_string, None) ) + @property + def url_vars(self): + return (self.scope.get("url_route") or {}).get("kwargs") or {} + @property def scheme(self): return self.scope.get("scheme") or "http" diff --git a/docs/internals.rst b/docs/internals.rst index b0096cfa..df21eb09 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -42,6 +42,9 @@ The request object is passed to various plugin hooks. It represents an incoming ``.args`` - MultiParams An object representing the parsed querystring parameters, see below. +``.url_vars`` - dictionary (str -> str) + Variables extracted from the URL path, if that path was defined using a regular expression. See :ref:`plugin_register_routes`. + ``.actor`` - dictionary (str -> Any) or None The currently authenticated actor (see :ref:`actors `), or ``None`` if the request is unauthenticated. diff --git a/docs/plugins.rst b/docs/plugins.rst index 465fcd52..17fd64df 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -850,8 +850,8 @@ Return a list of ``(regex, async_view_function)`` pairs, something like this: import html - async def hello_from(scope): - name = scope["url_route"]["kwargs"]["name"] + async def hello_from(request): + name = request.url_vars["name"] return Response.html("Hello from {}".format( html.escape(name) )) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 57803178..a0f7441b 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -152,8 +152,8 @@ def register_routes(): (await datasette.get_database().execute("select 1 + 1")).first()[0] ) - async def two(request, scope): - name = scope["url_route"]["kwargs"]["name"] + async def two(request): + name = request.url_vars["name"] greeting = request.args.get("greeting") return Response.text("{} {}".format(greeting, name)) diff --git a/tests/test_internals_request.py b/tests/test_internals_request.py index 433b23d5..8367a693 100644 --- a/tests/test_internals_request.py +++ b/tests/test_internals_request.py @@ -44,3 +44,20 @@ def test_request_args(): assert 2 == len(request.args) with pytest.raises(KeyError): request.args["missing"] + + +def test_request_url_vars(): + scope = { + "http_version": "1.1", + "method": "POST", + "path": "/", + "raw_path": b"/", + "query_string": b"", + "scheme": "http", + "type": "http", + "headers": [[b"content-type", b"application/x-www-form-urlencoded"]], + } + assert {} == Request(scope, None).url_vars + assert {"name": "cleo"} == Request( + dict(scope, url_route={"kwargs": {"name": "cleo"}}), None + ).url_vars From 5a6a73e3190cac103906b479d56129413e5ef190 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 21:37:35 -0700 Subject: [PATCH 0337/2113] Replace os.urandom(32).hex() with secrets.token_hex(32) --- datasette/app.py | 5 +++-- docs/config.rst | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 120091f7..633ca4fe 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -8,6 +8,7 @@ import itertools import json import os import re +import secrets import sys import threading import traceback @@ -186,7 +187,7 @@ class Datasette: assert config_dir is None or isinstance( config_dir, Path ), "config_dir= should be a pathlib.Path" - self._secret = secret or os.urandom(32).hex() + self._secret = secret or secrets.token_hex(32) self.files = tuple(files) + tuple(immutables or []) if config_dir: self.files += tuple([str(p) for p in config_dir.glob("*.db")]) @@ -299,7 +300,7 @@ class Datasette: self._register_renderers() self._permission_checks = collections.deque(maxlen=200) - self._root_token = os.urandom(32).hex() + self._root_token = secrets.token_hex(32) def sign(self, value, namespace="default"): return URLSafeSerializer(self._secret, namespace).dumps(value) diff --git a/docs/config.rst b/docs/config.rst index 56b38613..ab14ea7b 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -302,7 +302,7 @@ Or:: One way to generate a secure random secret is to use Python like this:: - $ python3 -c 'import os; print(os.urandom(32).hex())' + $ python3 -c 'import secrets; print(secrets.token_hex(32))' cdb19e94283a20f9d42cca50c5a4871c0aa07392db308755d60a1a5b9bb0fa52 Plugin authors make use of this signing mechanism in their plugins using :ref:`datasette_sign` and :ref:`datasette_unsign`. From eb3ec279becd3b81e5fa509244711548c86f434f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 23:33:06 -0700 Subject: [PATCH 0338/2113] Test for anonymous: true, refs #825 --- tests/test_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_utils.py b/tests/test_utils.py index 975ed0fd..4bade18b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -466,6 +466,7 @@ def test_multi_params(data, should_raise): [ ({"id": "root"}, None, True), ({"id": "root"}, {}, False), + ({"anonymous": True}, {"anonymous": True}, True), (None, None, True), (None, {}, False), (None, {"id": "root"}, False), From fec750435d405ac06cb61a5ddeda7317ef24843a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 07:01:23 -0700 Subject: [PATCH 0339/2113] Support anonymous: true in actor_matches_allow, refs #825 --- datasette/utils/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 49268638..d8cde95a 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -873,12 +873,12 @@ def actor_matches_allow(actor, allow): for key, values in allow.items(): if values == "*" and key in actor: return True - if isinstance(values, str): + if not isinstance(values, list): values = [values] actor_values = actor.get(key) if actor_values is None: return False - if isinstance(actor_values, str): + if not isinstance(actor_values, list): actor_values = [actor_values] actor_values = set(actor_values) if actor_values.intersection(values): From eefeafaa27a16af3bcb3150b4fe1ef6ee8d5c19f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 07:09:39 -0700 Subject: [PATCH 0340/2113] Removed unused import --- datasette/views/database.py | 1 - 1 file changed, 1 deletion(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index ee99bc2d..4fab2cfb 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -2,7 +2,6 @@ import os import jinja2 from datasette.utils import ( - actor_matches_allow, check_visibility, to_css_class, validate_sql_select, From fa87d16612ff671683f35ecc5f5e36af007599e4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 07:10:46 -0700 Subject: [PATCH 0341/2113] Clearer docs for actor_matches_allow --- datasette/utils/__init__.py | 3 ++- docs/authentication.rst | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index d8cde95a..5873fcaa 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -867,7 +867,8 @@ async def async_call_with_supported_arguments(fn, **kwargs): def actor_matches_allow(actor, allow): - actor = actor or {} + if actor is None: + actor = {"anonymous": True} if allow is None: return True for key, values in allow.items(): diff --git a/docs/authentication.rst b/docs/authentication.rst index f7281db4..04564886 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -239,7 +239,7 @@ To limit this ability for just one specific database, use this: actor_matches_allow() ===================== -Plugins that wish to implement the same permissions scheme as canned queries can take advantage of the ``datasette.utils.actor_matches_allow(actor, allow)`` function: +Plugins that wish to implement this same ``"allow"`` block permissions scheme can take advantage of the ``datasette.utils.actor_matches_allow(actor, allow)`` function: .. code-block:: python From 3aa87eeaf21083e32d9e02bd857fd44707dc4113 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 07:58:12 -0700 Subject: [PATCH 0342/2113] Documentation no loger suggests that actor["id"] is required, closes #823 --- docs/authentication.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 04564886..153466ad 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -17,7 +17,7 @@ Through plugins, Datasette can support both authenticated users (with cookies) a Every request to Datasette has an associated actor value, available in the code as ``request.actor``. This can be ``None`` for unauthenticated requests, or a JSON compatible Python dictionary for authenticated users or API agents. -The only required field in an actor is ``"id"``, which must be a string. Plugins may decide to add any other fields to the actor dictionary. +The actor dictionary can be any shape - the design of that data structure is left up to the plugins. A useful convention is to include an ``"id"`` string, as demonstrated by the "root" actor below. Plugins can use the :ref:`plugin_actor_from_request` hook to implement custom logic for authenticating an actor based on the incoming HTTP request. From 70dd14876e305ddb15263ec0687e23bef5b1ab78 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 09:04:46 -0700 Subject: [PATCH 0343/2113] Improved documentation for permissions, refs #699 --- docs/authentication.rst | 37 +++++++++++++++++++++++++++++-------- docs/sql_queries.rst | 6 ++++++ 2 files changed, 35 insertions(+), 8 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 153466ad..e26c8fc5 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -4,7 +4,7 @@ Authentication and permissions ================================ -Datasette does not require authentication by default. Any visitor to a Datasette instance can explore the full data and execute SQL queries. +Datasette does not require authentication by default. Any visitor to a Datasette instance can explore the full data and execute read-only SQL queries. Datasette's plugin system can be used to add many different styles of authentication, such as user accounts, single sign-on or API keys. @@ -49,10 +49,20 @@ The URL on the first line includes a one-use token which can be used to sign in .. _authentication_permissions: -Checking permission -=================== +Permissions +=========== -Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. This method is also used by Datasette core code itself, which allows plugins to help make decisions on which actions are allowed by implementing the :ref:`plugin_permission_allowed` plugin hook. +Datasette has an extensive permissions system built-in, which can be further extended and customized by plugins. + +The key question the permissions system answers is this: + + Is this **actor** allowed to perform this **action**, optionally against this particular **resource**? + +**Actors** are :ref:`described above `. + +An **action** is a string describing the action the actor would like to perfom. A full list is :ref:`provided below ` - examples include ``view-table`` and ``execute-sql``. + +A **resource** is the item the actor wishes to interact with - for example a specific database or table. Some actions, such as ``permissions-debug``, are not associated with a particular resource. .. _authentication_permissions_metadata: @@ -115,7 +125,7 @@ You can provide access to any user that has "developer" as one of their roles li } } -Note that "roles" is not a concept that is baked into Datasette - it's more of a convention that plugins can choose to implement and act on. +Note that "roles" is not a concept that is baked into Datasette - it's a convention that plugins can choose to implement and act on. If you want to provide access to any actor with a value for a specific key, use ``"*"``. For example, to spceify that a query can be accessed by any logged-in user use this: @@ -171,7 +181,7 @@ To limit access to the ``users`` table in your ``bakery.db`` database: } } -This works for SQL views as well - you can treat them as if they are tables. +This works for SQL views as well - you can list their names in the ``"tables"`` block above in the same way as regular tables. .. warning:: Restricting access to tables and views in this way will NOT prevent users from querying them using arbitrary SQL queries, `like this `__ for example. @@ -183,6 +193,8 @@ This works for SQL views as well - you can treat them as if they are tables. Controlling access to specific canned queries --------------------------------------------- +:ref:`canned_queries` allow you to configure named SQL queries in your ``metadata.json`` that can be executed by users. These queries can be set up to both read and write to the database, so controlling who can execute them can be important. + To limit access to the ``add_name`` canned query in your ``dogs.db`` database to just the :ref:`root user`: .. code-block:: json @@ -234,6 +246,15 @@ To limit this ability for just one specific database, use this: } } +.. _permissions_plugins: + +Checking permissions in plugins +=============================== + +Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. + +Datasette core performs a number of permission checks, :ref:`documented below `. Plugins can implement the :ref:`plugin_permission_allowed` plugin hook to participate in decisions about whether an actor should be able to perform a specified action. + .. _authentication_actor_matches_allow: actor_matches_allow() @@ -264,8 +285,8 @@ This is designed to help administrators and plugin authors understand exactly ho .. _permissions: -Permissions -=========== +Built-in permissions +==================== This section lists all of the permission checks that are carried out by Datasette core, along with the ``resource`` if it was passed. diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index db72deb7..a73f6bc2 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -1,3 +1,5 @@ +.. _sql: + Running SQL queries =================== @@ -22,6 +24,8 @@ using your browser back button. You can also retrieve the results of any query as JSON by adding ``.json`` to the base URL. +.. _sql_parameters: + Named parameters ---------------- @@ -51,6 +55,8 @@ statements can be used to change database settings at runtime. If you need to include the string "pragma" in a query you can do so safely using a named parameter. +.. _sql_views: + Views ----- From 7633b9ab249b2dce5ee0b4fcf9542c13a1703ef0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 10:01:03 -0700 Subject: [PATCH 0344/2113] unauthenticated: true method plus allow block docs, closes #825 --- datasette/utils/__init__.py | 5 +- docs/authentication.rst | 142 +++++++++++++++++++++++++----------- docs/internals.rst | 11 ++- tests/test_auth.py | 24 ------ tests/test_permissions.py | 37 ++++++++++ tests/test_utils.py | 10 ++- 6 files changed, 154 insertions(+), 75 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 5873fcaa..51373c46 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -867,10 +867,11 @@ async def async_call_with_supported_arguments(fn, **kwargs): def actor_matches_allow(actor, allow): - if actor is None: - actor = {"anonymous": True} + if actor is None and allow and allow.get("unauthenticated") is True: + return True if allow is None: return True + actor = actor or {} for key, values in allow.items(): if values == "*" and key in actor: return True diff --git a/docs/authentication.rst b/docs/authentication.rst index e26c8fc5..a9537a20 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -64,6 +64,91 @@ An **action** is a string describing the action the actor would like to perfom. A **resource** is the item the actor wishes to interact with - for example a specific database or table. Some actions, such as ``permissions-debug``, are not associated with a particular resource. +Datasette's built-in view permissions (``view-database``, ``view-table`` etc) default to *allow* - unless you :ref:`configure additional permission rules ` unauthenticated users will be allowed to access content. + +Permissions with potentially harmful effects should default to *deny*. Plugin authors should account for this when designing new plugins - for example, the `datasette-upload-csvs `__ plugin defaults to deny so that installations don't accidentally allow unauthenticated users to create new tables by uploading a CSV file. + +.. _authentication_permissions_allow: + +Defining permissions with "allow" blocks +---------------------------------------- + +The standard way to define permissions in Datasette is to use an ``"allow"`` block. This is a JSON document describing which actors are allowed to perfom a permission. + +The most basic form of allow block is this: + +.. code-block:: json + + { + "allow": { + "id": "root" + } + } + +This will match any actors with an ``"id"`` property of ``"root"`` - for example, an actor that looks like this: + +.. code-block:: json + + { + "id": "root", + "name": "Root User" + } + +Allow keys can provide a list of values. These will match any actor that has any of those values. + +.. code-block:: json + + { + "allow": { + "id": ["simon", "cleopaws"] + } + } + +This will match any actor with an ``"id"`` of either ``"simon"`` or ``"cleopaws"``. + +Actors can have properties that feature a list of values. These will be matched against the list of values in an allow block. Consider the following actor: + +.. code-block:: json + + { + "id": "simon", + "roles": ["staff", "developer"] + } + +This allow block will provide access to any actor that has ``"developer"`` as one of their roles: + +.. code-block:: json + + { + "allow": { + "roles": ["developer"] + } + } + +Note that "roles" is not a concept that is baked into Datasette - it's a convention that plugins can choose to implement and act on. + +If you want to provide access to any actor with a value for a specific key, use ``"*"``. For example, to match any logged-in user specify the following: + +.. code-block:: json + + { + "allow": { + "id": "*" + } + } + +You can specify that unauthenticated actors (from anynomous HTTP requests) should be allowed access using the special ``"unauthenticated": true`` key in an allow block: + +.. code-block:: json + + { + "allow": { + "unauthenticated": true + } + } + +Allow keys act as an "or" mechanism. An actor will be able to execute the query if any of their JSON properties match any of the values in the corresponding lists in the ``allow`` block. + .. _authentication_permissions_metadata: Configuring permissions in metadata.json @@ -96,49 +181,6 @@ Here's how to restrict access to your entire Datasette instance to just the ``"i } } -To allow any of the actors with an ``id`` matching a specific list of values, use this: - -.. code-block:: json - - { - "allow": { - "id": ["simon", "cleopaws"] - } - } - -This works for other keys as well. Imagine an actor that looks like this: - -.. code-block:: json - - { - "id": "simon", - "roles": ["staff", "developer"] - } - -You can provide access to any user that has "developer" as one of their roles like so: - -.. code-block:: json - - { - "allow": { - "roles": ["developer"] - } - } - -Note that "roles" is not a concept that is baked into Datasette - it's a convention that plugins can choose to implement and act on. - -If you want to provide access to any actor with a value for a specific key, use ``"*"``. For example, to spceify that a query can be accessed by any logged-in user use this: - -.. code-block:: json - - { - "allow": { - "id": "*" - } - } - -These keys act as an "or" mechanism. A actor will be able to execute the query if any of their JSON properties match any of the values in the corresponding lists in the ``allow`` block. - .. _authentication_permissions_database: Controlling access to specific databases @@ -297,6 +339,8 @@ view-instance Top level permission - Actor is allowed to view any pages within this instance, starting at https://latest.datasette.io/ +Default *allow*. + .. _permissions_view_database: view-database @@ -307,6 +351,8 @@ Actor is allowed to view a database page, e.g. https://latest.datasette.io/fixtu ``resource`` - string The name of the database +Default *allow*. + .. _permissions_view_database_download: view-database-download @@ -317,6 +363,8 @@ Actor is allowed to download a database, e.g. https://latest.datasette.io/fixtur ``resource`` - string The name of the database +Default *allow*. + .. _permissions_view_table: view-table @@ -327,6 +375,8 @@ Actor is allowed to view a table (or view) page, e.g. https://latest.datasette.i ``resource`` - tuple: (string, string) The name of the database, then the name of the table +Default *allow*. + .. _permissions_view_query: view-query @@ -337,6 +387,8 @@ Actor is allowed to view a :ref:`canned query ` page, e.g. https ``resource`` - tuple: (string, string) The name of the database, then the name of the canned query +Default *allow*. + .. _permissions_execute_sql: execute-sql @@ -347,9 +399,13 @@ Actor is allowed to run arbitrary SQL queries against a specific database, e.g. ``resource`` - string The name of the database +Default *allow*. + .. _permissions_permissions_debug: permissions-debug ----------------- Actor is allowed to view the ``/-/permissions`` debug page. + +Default *deny*. \ No newline at end of file diff --git a/docs/internals.rst b/docs/internals.rst index df21eb09..8136d8ac 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -184,11 +184,16 @@ await .permission_allowed(actor, action, resource=None, default=False) ``resource`` - string, optional The resource, e.g. the name of the table. Only some permissions apply to a resource. -Check if the given actor has permission to perform the given action on the given resource. This uses plugins that implement the :ref:`plugin_permission_allowed` plugin hook to decide if the action is allowed or not. +``default`` - optional, True or False + Should this permission check be default allow or default deny. -If none of the plugins express an opinion, the return value will be the ``default`` argument. This is deny, but you can pass ``default=True`` to default allow instead. +Check if the given actor has :ref:`permission ` to perform the given action on the given resource. -See :ref:`permissions` for a full list of permissions included in Datasette core. +Some permission checks are carried out against :ref:`rules defined in metadata.json `, while other custom permissions may be decided by plugins that implement the :ref:`plugin_permission_allowed` plugin hook. + +If neither ``metadata.json`` nor any of the plugins provide an answer to the permission query the ``default`` argument will be returned. + +See :ref:`permissions` for a full list of permission actions included in Datasette core. .. _datasette_get_database: diff --git a/tests/test_auth.py b/tests/test_auth.py index 40dc2587..0e5563a3 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,5 +1,4 @@ from .fixtures import app_client -from bs4 import BeautifulSoup as Soup def test_auth_token(app_client): @@ -20,26 +19,3 @@ def test_actor_cookie(app_client): cookie = app_client.ds.sign({"id": "test"}, "actor") response = app_client.get("/", cookies={"ds_actor": cookie}) assert {"id": "test"} == app_client.ds._last_request.scope["actor"] - - -def test_permissions_debug(app_client): - app_client.ds._permission_checks.clear() - assert 403 == app_client.get("/-/permissions").status - # With the cookie it should work - cookie = app_client.ds.sign({"id": "root"}, "actor") - response = app_client.get("/-/permissions", cookies={"ds_actor": cookie}) - # Should show one failure and one success - soup = Soup(response.body, "html.parser") - check_divs = soup.findAll("div", {"class": "check"}) - checks = [ - { - "action": div.select_one(".check-action").text, - "result": bool(div.select(".check-result-true")), - "used_default": bool(div.select(".check-used-default")), - } - for div in check_divs - ] - assert [ - {"action": "permissions-debug", "result": True, "used_default": False}, - {"action": "permissions-debug", "result": False, "used_default": True}, - ] == checks diff --git a/tests/test_permissions.py b/tests/test_permissions.py index d8c98825..c088facd 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -1,4 +1,5 @@ from .fixtures import app_client, assert_permissions_checked, make_app_client +from bs4 import BeautifulSoup as Soup import pytest @@ -283,3 +284,39 @@ def test_permissions_checked(app_client, path, permissions): response = app_client.get(path) assert response.status in (200, 403) assert_permissions_checked(app_client.ds, permissions) + + +def test_permissions_debug(app_client): + app_client.ds._permission_checks.clear() + assert 403 == app_client.get("/-/permissions").status + # With the cookie it should work + cookie = app_client.ds.sign({"id": "root"}, "actor") + response = app_client.get("/-/permissions", cookies={"ds_actor": cookie}) + # Should show one failure and one success + soup = Soup(response.body, "html.parser") + check_divs = soup.findAll("div", {"class": "check"}) + checks = [ + { + "action": div.select_one(".check-action").text, + "result": bool(div.select(".check-result-true")), + "used_default": bool(div.select(".check-used-default")), + } + for div in check_divs + ] + assert [ + {"action": "permissions-debug", "result": True, "used_default": False}, + {"action": "permissions-debug", "result": False, "used_default": True}, + ] == checks + + +@pytest.mark.parametrize("allow,expected", [ + ({"id": "root"}, 403), + ({"id": "root", "unauthenticated": True}, 200), +]) +def test_allow_unauthenticated(allow, expected): + with make_app_client( + metadata={ + "allow": allow + } + ) as client: + assert expected == client.get("/").status diff --git a/tests/test_utils.py b/tests/test_utils.py index 4bade18b..0ffe8ae6 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -464,12 +464,16 @@ def test_multi_params(data, should_raise): @pytest.mark.parametrize( "actor,allow,expected", [ - ({"id": "root"}, None, True), - ({"id": "root"}, {}, False), - ({"anonymous": True}, {"anonymous": True}, True), (None, None, True), (None, {}, False), (None, {"id": "root"}, False), + ({"id": "root"}, None, True), + ({"id": "root"}, {}, False), + ({"id": "simon", "staff": True}, {"staff": True}, True), + ({"id": "simon", "staff": False}, {"staff": True}, False), + # Special case for "unauthenticated": true + (None, {"unauthenticated": True}, True), + (None, {"unauthenticated": False}, False), # Special "*" value for any key: ({"id": "root"}, {"id": "*"}, True), ({}, {"id": "*"}, False), From 5ef3b7b0c9b9e318af711bbd03e84af2abffdc29 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 12:25:44 -0700 Subject: [PATCH 0345/2113] Applied Black Refs #825 --- tests/test_permissions.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index c088facd..477b8160 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -309,14 +309,10 @@ def test_permissions_debug(app_client): ] == checks -@pytest.mark.parametrize("allow,expected", [ - ({"id": "root"}, 403), - ({"id": "root", "unauthenticated": True}, 200), -]) +@pytest.mark.parametrize( + "allow,expected", + [({"id": "root"}, 403), ({"id": "root", "unauthenticated": True}, 200),], +) def test_allow_unauthenticated(allow, expected): - with make_app_client( - metadata={ - "allow": allow - } - ) as client: + with make_app_client(metadata={"allow": allow}) as client: assert expected == client.get("/").status From 56eb80a45925d804b443701e2c86315f194b5f7d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 12:32:52 -0700 Subject: [PATCH 0346/2113] Documented CSRF protection, closes #827 --- docs/internals.rst | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/docs/internals.rst b/docs/internals.rst index 8136d8ac..d92c985f 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -51,7 +51,7 @@ The request object is passed to various plugin hooks. It represents an incoming The object also has one awaitable method: ``await request.post_vars()`` - dictionary - Returns a dictionary of form variables that were submitted in the request body via ``POST``. + Returns a dictionary of form variables that were submitted in the request body via ``POST``. Don't forget to read about :ref:`internals_csrf`! .. _internals_multiparams: @@ -500,3 +500,17 @@ The ``Database`` class also provides properties and methods for introspecting th } ] } + + +.. _internals_csrf: + +CSRF protection +~~~~~~~~~~~~~~~ + +Datasette uses `asgi-csrf `__ to guard against CSRF attacks on form POST submissions. Users receive a ``ds_csrftoken`` cookie which is compared against the ``csrftoken`` form field (or ``x-csrftoken`` HTTP header) for every incoming request. + +If your plugin implements a ```` anywhere you will need to include that token. You can do so with the following template snippet: + +.. code-block:: html + + From f240970b834d595947c8d27d46d1f19b9119376d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 12:57:54 -0700 Subject: [PATCH 0347/2113] Fixed tests/fixtures.py, closes #804 --- docs/contributing.rst | 13 +++++- tests/fixtures.py | 97 ++++++++++++++++++++++++------------------- 2 files changed, 65 insertions(+), 45 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index da4dc35a..9c44d177 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -70,11 +70,20 @@ You can also use the ``fixtures.py`` script to recreate the testing version of ` python tests/fixtures.py fixtures.db fixtures-metadata.json -(You may need to delete ``fixtures.db`` before running this command.) +Or to output the plugins used by the tests, run this:: + + python tests/fixtures.py fixtures.db fixtures-metadata.json fixtures-plugins + Test tables written to fixtures.db + - metadata written to fixtures-metadata.json + Wrote plugin: fixtures-plugins/register_output_renderer.py + Wrote plugin: fixtures-plugins/view_name.py + Wrote plugin: fixtures-plugins/my_plugin.py + Wrote plugin: fixtures-plugins/messages_output_renderer.py + Wrote plugin: fixtures-plugins/my_plugin_2.py Then run Datasette like this:: - datasette fixtures.db -m fixtures-metadata.json + datasette fixtures.db -m fixtures-metadata.json --plugins-dir=fixtures-plugins/ .. _contributing_documentation: diff --git a/tests/fixtures.py b/tests/fixtures.py index a51a869d..1eb1bb6e 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -2,6 +2,7 @@ from datasette.app import Datasette from datasette.utils import sqlite3, MultiParams from asgiref.testing import ApplicationCommunicator from asgiref.sync import async_to_sync +import click import contextlib from http.cookies import SimpleCookie import itertools @@ -813,49 +814,6 @@ INSERT INTO "searchable_fts" (rowid, text1, text2) SELECT rowid, text1, text2 FROM searchable; """ -if __name__ == "__main__": - # Can be called with data.db OR data.db metadata.json - arg_index = -1 - db_filename = sys.argv[arg_index] - metadata_filename = None - plugins_path = None - if db_filename.endswith("/"): - # It's the plugins dir - plugins_path = db_filename - arg_index -= 1 - db_filename = sys.argv[arg_index] - if db_filename.endswith(".json"): - metadata_filename = db_filename - arg_index -= 1 - db_filename = sys.argv[arg_index] - if db_filename.endswith(".db"): - conn = sqlite3.connect(db_filename) - conn.executescript(TABLES) - for sql, params in TABLE_PARAMETERIZED_SQL: - with conn: - conn.execute(sql, params) - print("Test tables written to {}".format(db_filename)) - if metadata_filename: - open(metadata_filename, "w").write(json.dumps(METADATA)) - print("- metadata written to {}".format(metadata_filename)) - if plugins_path: - path = pathlib.Path(plugins_path) - if not path.exists(): - path.mkdir() - for filename, content in ( - ("my_plugin.py", PLUGIN1), - ("my_plugin_2.py", PLUGIN2), - ): - filepath = path / filename - filepath.write_text(content) - print(" Wrote plugin: {}".format(filepath)) - else: - print( - "Usage: {} db_to_write.db [metadata_to_write.json] [plugins-dir/]".format( - sys.argv[0] - ) - ) - def assert_permissions_checked(datasette, actions): # actions is a list of "action" or (action, resource) tuples @@ -873,3 +831,56 @@ def assert_permissions_checked(datasette, actions): """.format( action, resource, json.dumps(list(datasette._permission_checks), indent=4), ) + + +@click.command() +@click.argument( + "db_filename", + default="fixtures.db", + type=click.Path(file_okay=True, dir_okay=False), +) +@click.argument("metadata", required=False) +@click.argument( + "plugins_path", type=click.Path(file_okay=False, dir_okay=True), required=False +) +@click.option( + "--recreate", + is_flag=True, + default=False, + help="Delete and recreate database if it exists", +) +def cli(db_filename, metadata, plugins_path, recreate): + "Write out the fixtures database used by Datasette's test suite" + if metadata and not metadata.endswith(".json"): + raise click.ClickException("Metadata should end with .json") + if not db_filename.endswith(".db"): + raise click.ClickException("Database file should end with .db") + if pathlib.Path(db_filename).exists(): + if not recreate: + raise click.ClickException( + "{} already exists, use --recreate to reset it".format(db_filename) + ) + else: + pathlib.Path(db_filename).unlink() + conn = sqlite3.connect(db_filename) + conn.executescript(TABLES) + for sql, params in TABLE_PARAMETERIZED_SQL: + with conn: + conn.execute(sql, params) + print("Test tables written to {}".format(db_filename)) + if metadata: + open(metadata, "w").write(json.dumps(METADATA, indent=4)) + print("- metadata written to {}".format(metadata)) + if plugins_path: + path = pathlib.Path(plugins_path) + if not path.exists(): + path.mkdir() + test_plugins = pathlib.Path(__file__).parent / "plugins" + for filepath in test_plugins.glob("*.py"): + newpath = path / filepath.name + newpath.write_text(filepath.open().read()) + print(" Wrote plugin: {}".format(newpath)) + + +if __name__ == "__main__": + cli() From 008e2f63c217aa066027a872ee706b07bd084857 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 15:19:37 -0700 Subject: [PATCH 0348/2113] response.set_cookie(), closes #795 --- datasette/actor_auth_cookie.py | 1 - datasette/app.py | 15 ++------- datasette/utils/asgi.py | 53 +++++++++++++++++++++++++++++--- datasette/views/special.py | 14 ++------- docs/internals.rst | 30 ++++++++++++++++++ tests/test_internals_response.py | 26 ++++++++++++++++ 6 files changed, 108 insertions(+), 31 deletions(-) diff --git a/datasette/actor_auth_cookie.py b/datasette/actor_auth_cookie.py index f3a0f306..a2aa6889 100644 --- a/datasette/actor_auth_cookie.py +++ b/datasette/actor_auth_cookie.py @@ -1,6 +1,5 @@ from datasette import hookimpl from itsdangerous import BadSignature -from http.cookies import SimpleCookie @hookimpl diff --git a/datasette/app.py b/datasette/app.py index 633ca4fe..71fa9afb 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -3,7 +3,6 @@ import asgi_csrf import collections import datetime import hashlib -from http.cookies import SimpleCookie import itertools import json import os @@ -442,19 +441,9 @@ class Datasette: def _write_messages_to_response(self, request, response): if getattr(request, "_messages", None): # Set those messages - cookie = SimpleCookie() - cookie["ds_messages"] = self.sign(request._messages, "messages") - cookie["ds_messages"]["path"] = "/" - # TODO: Co-exist with existing set-cookie headers - assert "set-cookie" not in response.headers - response.headers["set-cookie"] = cookie.output(header="").lstrip() + response.set_cookie("ds_messages", self.sign(request._messages, "messages")) elif getattr(request, "_messages_should_clear", False): - cookie = SimpleCookie() - cookie["ds_messages"] = "" - cookie["ds_messages"]["path"] = "/" - # TODO: Co-exist with existing set-cookie headers - assert "set-cookie" not in response.headers - response.headers["set-cookie"] = cookie.output(header="").lstrip() + response.set_cookie("ds_messages", "", expires=0, max_age=0) def _show_messages(self, request): if getattr(request, "_messages", None): diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index cdd6b148..5a152570 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -4,10 +4,15 @@ from mimetypes import guess_type from urllib.parse import parse_qs, urlunparse, parse_qsl from pathlib import Path from html import escape -from http.cookies import SimpleCookie +from http.cookies import SimpleCookie, Morsel import re import aiofiles +# Workaround for adding samesite support to pre 3.8 python +Morsel._reserved["samesite"] = "SameSite" +# Thanks, Starlette: +# https://github.com/encode/starlette/blob/519f575/starlette/responses.py#L17 + class NotFound(Exception): pass @@ -17,6 +22,9 @@ class Forbidden(Exception): pass +SAMESITE_VALUES = ("strict", "lax", "none") + + class Request: def __init__(self, scope, receive): self.scope = scope @@ -370,20 +378,24 @@ class Response: self.body = body self.status = status self.headers = headers or {} + self._set_cookie_headers = [] self.content_type = content_type async def asgi_send(self, send): headers = {} headers.update(self.headers) headers["content-type"] = self.content_type + raw_headers = [ + [key.encode("utf-8"), value.encode("utf-8")] + for key, value in headers.items() + ] + for set_cookie in self._set_cookie_headers: + raw_headers.append([b"set-cookie", set_cookie.encode("utf-8")]) await send( { "type": "http.response.start", "status": self.status, - "headers": [ - [key.encode("utf-8"), value.encode("utf-8")] - for key, value in headers.items() - ], + "headers": raw_headers, } ) body = self.body @@ -391,6 +403,37 @@ class Response: body = body.encode("utf-8") await send({"type": "http.response.body", "body": body}) + def set_cookie( + self, + key, + value="", + max_age=None, + expires=None, + path="/", + domain=None, + secure=False, + httponly=False, + samesite="lax", + ): + assert samesite in SAMESITE_VALUES, "samesite should be one of {}".format( + SAMESITE_VALUES + ) + cookie = SimpleCookie() + cookie[key] = value + for prop_name, prop_value in ( + ("max_age", max_age), + ("expires", expires), + ("path", path), + ("domain", domain), + ("samesite", samesite), + ): + if prop_value is not None: + cookie[key][prop_name.replace("_", "-")] = prop_value + for prop_name, prop_value in (("secure", secure), ("httponly", httponly)): + if prop_value: + cookie[key][prop_name] = True + self._set_cookie_headers.append(cookie.output(header="").strip()) + @classmethod def html(cls, body, status=200, headers=None): return cls( diff --git a/datasette/views/special.py b/datasette/views/special.py index 7a5fbe21..7f4284a1 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -1,7 +1,6 @@ import json from datasette.utils.asgi import Response from .base import BaseView -from http.cookies import SimpleCookie import secrets @@ -62,17 +61,8 @@ class AuthTokenView(BaseView): return Response("Root token has already been used", status=403) if secrets.compare_digest(token, self.ds._root_token): self.ds._root_token = None - cookie = SimpleCookie() - cookie["ds_actor"] = self.ds.sign({"id": "root"}, "actor") - cookie["ds_actor"]["path"] = "/" - response = Response( - body="", - status=302, - headers={ - "Location": "/", - "set-cookie": cookie.output(header="").lstrip(), - }, - ) + response = Response.redirect("/") + response.set_cookie("ds_actor", self.ds.sign({"id": "root"}, "actor")) return response else: return Response("Invalid token", status=403) diff --git a/docs/internals.rst b/docs/internals.rst index d92c985f..7978e3d7 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -131,6 +131,36 @@ Each of these responses will use the correct corresponding content-type - ``text Each of the helper methods take optional ``status=`` and ``headers=`` arguments, documented above. +.. _internals_response_set_cookie: + +Setting cookies with response.set_cookie() +------------------------------------------ + +To set cookies on the response, use the ``response.set_cookie(...)`` method. The method signature looks like this: + +.. code-block:: python + + def set_cookie( + self, + key, + value="", + max_age=None, + expires=None, + path="/", + domain=None, + secure=False, + httponly=False, + samesite="lax", + ): + +You can use this with :ref:`datasette.sign() ` to set signed cookies. Here's how you would set the ``ds_actor`` cookie for use with Datasette :ref:`authentication `: + +.. code-block:: python + + response = Response.redirect("/") + response.set_cookie("ds_actor", datasette.sign({"id": "cleopaws"}, "actor")) + return response + .. _internals_datasette: Datasette class diff --git a/tests/test_internals_response.py b/tests/test_internals_response.py index 7c11f858..820b20b2 100644 --- a/tests/test_internals_response.py +++ b/tests/test_internals_response.py @@ -1,4 +1,5 @@ from datasette.utils.asgi import Response +import pytest def test_response_html(): @@ -26,3 +27,28 @@ def test_response_redirect(): response = Response.redirect("/foo") assert 302 == response.status assert "/foo" == response.headers["Location"] + + +@pytest.mark.asyncio +async def test_response_set_cookie(): + events = [] + + async def send(event): + events.append(event) + + response = Response.redirect("/foo") + response.set_cookie("foo", "bar", max_age=10, httponly=True) + await response.asgi_send(send) + + assert [ + { + "type": "http.response.start", + "status": 302, + "headers": [ + [b"Location", b"/foo"], + [b"content-type", b"text/plain"], + [b"set-cookie", b"foo=bar; HttpOnly; Max-Age=10; Path=/; SameSite=lax"], + ], + }, + {"type": "http.response.body", "body": b""}, + ] == events From b5f04f42ab56be90735e1df9660e334089fbd6aa Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 15:32:24 -0700 Subject: [PATCH 0349/2113] ds_actor cookie documentation, closes #826 --- docs/authentication.rst | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index a9537a20..f511e373 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -315,8 +315,8 @@ The currently authenticated actor is made available to plugins as ``request.acto .. _PermissionsDebugView: -Permissions Debug -================= +The permissions debug tool +========================== The debug tool at ``/-/permissions`` is only available to the :ref:`authenticated root user ` (or any actor granted the ``permissions-debug`` action according to a plugin). @@ -324,6 +324,22 @@ It shows the thirty most recent permission checks that have been carried out by This is designed to help administrators and plugin authors understand exactly how permission checks are being carried out, in order to effectively configure Datasette's permission system. +.. _authentication_ds_actor: + +The ds_actor cookie +=================== + +Datasette includes a default authentication plugin which looks for a signed ``ds_actor`` cookie containing a JSON actor dictionary. This is how the :ref:`root actor ` mechanism works. + +Authentication plugins can set signed ``ds_actor`` cookies themselves like so: + +.. code-block:: python + + response = Response.redirect("/") + response.set_cookie("ds_actor", datasette.sign({"id": "cleopaws"}, "actor")) + return response + +Note that you need to pass ``"actor"`` as the namespace to :ref:`datasette_sign`. .. _permissions: From b3919d8059a519eb7709f0b4fa1561fec219bc98 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 16:03:42 -0700 Subject: [PATCH 0350/2113] Mostly complete release notes for 0.44, refs #806 --- docs/changelog.rst | 140 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 140 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 8b6272cb..e4e6057b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,146 @@ Changelog ========= +.. _v0_44: + +0.44 (2020-06-??) +----------------- + +Authentication and permissions, writable canned queries, flash messages, new plugin hooks and more. + +Authentication +~~~~~~~~~~~~~~ + +Prior to this release the Datasette ecosystem has treated authentication as exclusively the realm of plugins, most notably through `datasette-auth-github `__. + +0.44 introduces :ref:`authentication` as core Datasette concepts (`#699 `__). This makes it easier for different plugins can share responsibility for authenticating requests - you might have one plugin that handles user accounts and another one that allows automated access via API keys, for example. + +You'll need to install plugins if you want full user accounts, but default Datasette can now authenticate a single root user with the new ``--root`` command-line option, which outputs a one-time use URL to :ref:`authenticate as a root actor ` (`#784 `__):: + + $ datasette fixtures.db --root + http://127.0.0.1:8001/-/auth-token?token=5b632f8cd44b868df625f5a6e2185d88eea5b22237fd3cc8773f107cc4fd6477 + INFO: Started server process [14973] + INFO: Waiting for application startup. + INFO: Application startup complete. + INFO: Uvicorn running on http://127.0.0.1:8001 (Press CTRL+C to quit) + +Plugins can implement new ways of authenticating users using the new :ref:`plugin_actor_from_request` hook. + +Permissions +~~~~~~~~~~~ + +Datasette also now has a built-in concept of :ref:`authentication_permissions`. The permissions system answers the following question: + + Is this **actor** allowed to perform this **action**, optionally against this particular **resource**? + +You can use the new ``"allow"`` block syntax in ``metadata.json`` (or ``metadata.yaml``) to set required permissions at the instance, database, table or canned query level. For example, to restrict access to the ``fixtures.db`` database to the ``"root"`` user: + +.. code-block:: json + + { + "databases": { + "fixtures": { + "allow": { + "id" "root" + } + } + } + } + +See :ref:`authentication_permissions_allow` for more details. + +Plugins can implement their own custom permission checks using the new :ref:`plugin_permission_allowed` hook. + +A new debug page at ``/-/permissions`` shows recent permission checks, to help administrators and plugin authors understand exactly what checks are being performed. This tool defaults to only being available to the root user, but can be exposed to other users by plugins that respond to the ``permissions-debug`` permission. (`#788 `__) + +Writable canned queries +~~~~~~~~~~~~~~~~~~~~~~~ + +Datasette's :ref:`canned_queries` feature lets you define SQL queries in ``metadata.json`` which can then be executed by users visiting a specific URL. https://latest.datasette.io/fixtures/neighborhood_search for example. + +Canned queries were previously restricted to ``SELECT``, but Datasette 0.44 introduces the ability for canned queries to execute ``INSERT`` or ``UPDATE`` queries as well, using the new ``"write": true`` property (`#800 `__): + +.. code-block:: json + + { + "databases": { + "dogs": { + "queries": { + "add_name": { + "sql": "INSERT INTO names (name) VALUES (:name)", + "write": true + } + } + } + } + } + +See :ref:`canned_queries_writable` for more details. + +Flash messages +~~~~~~~~~~~~~~ + +Writable canned queries needed a mechanism to let the user know that the query has been successfully executed. The new flash messaging system (`#790 `__) allows messages to persist in signed cookies which are then displayed to the user on the next page that they visit. Plugins can use this mechanism to display their own messages, see :ref:`datasette_add_message` for details. + +You can try out the new messages using the ``/-/messages`` debug tool, for example at https://latest.datasette.io/-/messages + +Signed values and secrets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Both flash messages and user authentication needed a way to sign values and set signed cookies. Two new methods are now available for plugins to take advantage of this mechanism: :ref:`datasette_sign` and :ref:`datasette_unsign`. + +Datasette will generate a secret automatically when it starts up, but to avoid resetting the secret (and hence invalidating any cookies) every time the server restarts you should set your own secret. You can pass a secret to Datasette using the new ``--secret`` option or with a ``DATASETTE_SECRET`` environment variable. See :ref:`config_secret` for more details. + +Plugins can now sign value and verify their signatures using the :ref:`datasette.sign() ` and :ref:`datasette.unsign() ` methods. + +CSRF protection +~~~~~~~~~~~~~~~ + +Since writable canned queries are built using POST forms, Datasette now ships with :ref:`internals_csrf` (`#798 `__). This applies automatically to any POST request, which means plugins need to include a ``csrftoken`` in any POST forms that they render. They can do that like so: + +.. code-block:: html + + + +register_routes() plugin hooks +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Plugins can now register new views and routes via the :ref:`plugin_register_routes` plugin hook (`#819 `__). View functions can be defined that accept any of the current ``datasette`` object, the current ``request``, or the ASGI ``scope``, ``send`` and ``receive`` objects. + +Smaller changes +~~~~~~~~~~~~~~~ + +- New internals documentation for :ref:`internals_request` and :ref:`internals_response`. (`#706 `__) +- ``request.url`` now respects the ``force_https_urls`` config setting. closes (`#781 `__) +- ``request.args.getlist()`` returns ``[]`` if missing. Removed ``request.raw_args`` entirely. (`#774 `__) +- New :ref:`datasette.get_database() ` method. +- Added ``_`` prefix to many private, undocumented methods of the Datasette class. (`#576 `__) +- Removed the ``db.get_outbound_foreign_keys()`` method which duplicated the behaviour of ``db.foreign_keys_for_table()``. +- New :ref:`await datasette.permission_allowed() ` method. +- ``/-/actor`` debugging endpoint for viewing the currently authenticated actor. +- New ``request.cookies`` property. +- ``/-/plugins`` endpoint now shows a list of hooks implemented by each plugin, e.g. https://latest.datasette.io/-/plugins?all=1 +- ``request.post_vars()`` method no longer discards empty values. +- New "params" canned query key for explicitly setting named parameters, see :ref:`canned_queries_named_parameters`. (`#797 `__) +- ``request.args`` is now a :ref:`MultiParams ` object. +- Fixed a bug with the ``datasette plugins`` command. (`#802 `__) +- Nicer pattern for using ``make_app_client()`` in tests. (`#395 `__) +- New ``request.actor`` property. +- Fixed broken CSS on nested 404 pages. (`#777 `__) +- New ``request.url_vars`` property. (`#822 `__) +- Fixed a bug with the ``python tests/fixtures.py`` command for outputting Datasette's testing fixtures database and plugins. (`#804 `__) + +The road to Datasette 1.0 +~~~~~~~~~~~~~~~~~~~~~~~~~ + +I've assembled a `milestone for Datasette 1.0 `__. The focus of the 1.0 release will be the following: + +- Signify confidence in the quality/stability of Datasette +- Give plugin authors confidence that their plugins will work for the whole 1.x release cycle +- Provide the same confidence to developers building against Datasette JSON APIs + +If you have thoughts about what you would like to see for Datasette 1.0 you can join `the conversation on issue #519 `__. + .. _v0_43: 0.43 (2020-05-28) From d94fc39e33b5eccae853e62f54bd8cc8e74688ff Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 16:43:58 -0700 Subject: [PATCH 0351/2113] Crafty JavaScript trick for generating commit references --- docs/contributing.rst | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index 9c44d177..6562afc8 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -156,6 +156,18 @@ To release a new version, first create a commit that updates :ref:`the changelog Referencing the issues that are part of the release in the commit message ensures the name of the release shows up on those issue pages, e.g. `here `__. +You can generate the list of issue references for a specific release by pasting the following into the browser devtools while looking at the :ref:`changelog` page (replace ``v0-44`` with the most recent version): + +.. code-block:: javascript + + [ + ...new Set( + Array.from( + document.getElementById("v0-44").querySelectorAll("a[href*=issues]") + ).map((a) => "#" + a.href.split("/issues/")[1]) + ), + ].sort().join(", "); + For non-bugfix releases you may want to update the news section of ``README.md`` as part of the same commit. To tag and push the releaes, run the following:: From f3951539f1750698976359411e19c1ccb79210ed Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 18:19:11 -0700 Subject: [PATCH 0352/2113] Hopefully fix horizontal scroll with changelog on mobile --- docs/changelog.rst | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index e4e6057b..911fb1b6 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -1051,9 +1051,7 @@ request all rows where that column is less than 50 meters or more than 20 feet f - Fix SQLite error when loading rows with no incoming FKs. [Russ Garrett] - This fixes ``ERROR: conn=, sql - = 'select ', params = {'id': '1'}`` caused by an invalid query when - loading incoming FKs. + This fixes an error caused by an invalid query when loading incoming FKs. The error was ignored due to async but it still got printed to the console. From d828abaddec0dce3ec4b4eeddc3a74384e52cf34 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 21:20:07 -0700 Subject: [PATCH 0353/2113] Fix horizontal scrollbar on changelog, refs #828 --- docs/_static/css/custom.css | 3 +++ docs/conf.py | 5 +++++ 2 files changed, 8 insertions(+) create mode 100644 docs/_static/css/custom.css diff --git a/docs/_static/css/custom.css b/docs/_static/css/custom.css new file mode 100644 index 00000000..d7c2f164 --- /dev/null +++ b/docs/_static/css/custom.css @@ -0,0 +1,3 @@ +a.external { + overflow-wrap: anywhere; +} diff --git a/docs/conf.py b/docs/conf.py index 5e0bb328..b273afca 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -99,6 +99,11 @@ html_theme = "sphinx_rtd_theme" # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] +html_css_files = [ + "css/custom.css", +] + + # Custom sidebar templates, must be a dictionary that maps document names # to template names. # From 57e812d5de9663a3c177e0344f4d1e552a74d484 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 10 Jun 2020 12:39:54 -0700 Subject: [PATCH 0354/2113] ds_author cookie can now expire, closes #829 Refs https://github.com/simonw/datasette-auth-github/issues/62#issuecomment-642152076 --- datasette/actor_auth_cookie.py | 13 ++++++++- datasette/views/special.py | 4 ++- docs/authentication.rst | 48 ++++++++++++++++++++++++++++++++-- docs/internals.rst | 4 +-- setup.py | 1 + tests/fixtures.py | 3 +++ tests/test_auth.py | 21 +++++++++++++-- tests/test_canned_write.py | 6 ++--- tests/test_permissions.py | 20 +++++++------- 9 files changed, 99 insertions(+), 21 deletions(-) diff --git a/datasette/actor_auth_cookie.py b/datasette/actor_auth_cookie.py index a2aa6889..15ecd331 100644 --- a/datasette/actor_auth_cookie.py +++ b/datasette/actor_auth_cookie.py @@ -1,5 +1,7 @@ from datasette import hookimpl from itsdangerous import BadSignature +import baseconv +import time @hookimpl @@ -7,6 +9,15 @@ def actor_from_request(datasette, request): if "ds_actor" not in request.cookies: return None try: - return datasette.unsign(request.cookies["ds_actor"], "actor") + decoded = datasette.unsign(request.cookies["ds_actor"], "actor") + # If it has "e" and "a" keys process the "e" expiry + if not isinstance(decoded, dict) or "a" not in decoded: + return None + expires_at = decoded.get("e") + if expires_at: + timestamp = int(baseconv.base62.decode(expires_at)) + if time.time() > timestamp: + return None + return decoded["a"] except BadSignature: return None diff --git a/datasette/views/special.py b/datasette/views/special.py index 7f4284a1..dc6a25dc 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -62,7 +62,9 @@ class AuthTokenView(BaseView): if secrets.compare_digest(token, self.ds._root_token): self.ds._root_token = None response = Response.redirect("/") - response.set_cookie("ds_actor", self.ds.sign({"id": "root"}, "actor")) + response.set_cookie( + "ds_actor", self.ds.sign({"a": {"id": "root"}}, "actor") + ) return response else: return Response("Invalid token", status=403) diff --git a/docs/authentication.rst b/docs/authentication.rst index f511e373..9b66132a 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -336,11 +336,55 @@ Authentication plugins can set signed ``ds_actor`` cookies themselves like so: .. code-block:: python response = Response.redirect("/") - response.set_cookie("ds_actor", datasette.sign({"id": "cleopaws"}, "actor")) - return response + response.set_cookie("ds_actor", datasette.sign({ + "a": { + "id": "cleopaws" + } + }, "actor")) Note that you need to pass ``"actor"`` as the namespace to :ref:`datasette_sign`. +The shape of data encoded in the cookie is as follows:: + + { + "a": {... actor ...} + } + +.. _authentication_ds_actor_expiry: + +Including an expiry time +------------------------ + +``ds_actor`` cookies can optionally include a signed expiry timestamp, after which the cookies will no longer be valid. Authentication plugins may chose to use this mechanism to limit the lifetime of the cookie. For example, if a plugin implements single-sign-on against another source it may decide to set short-lived cookies so that if the user is removed from the SSO system their existing Datasette cookies will stop working shortly afterwards. + +To include an expiry, add a ``"e"`` key to the cookie value containing a `base62-encoded integer `__ representing the timestamp when the cookie should expire. For example, here's how to set a cookie that expires after 24 hours: + +.. code-block:: python + + import time + import baseconv + + expires_at = int(time.time()) + (24 * 60 * 60) + + response = Response.redirect("/") + response.set_cookie("ds_actor", datasette.sign({ + "a": { + "id": "cleopaws" + }, + "e": baseconv.base62.encode(expires_at), + }, "actor")) + +The resulting cookie will encode data that looks something like this: + +.. code-block:: json + + { + "a": { + "id": "cleopaws" + }, + "e": "1jjSji" + } + .. _permissions: Built-in permissions diff --git a/docs/internals.rst b/docs/internals.rst index 7978e3d7..d75544e1 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -153,12 +153,12 @@ To set cookies on the response, use the ``response.set_cookie(...)`` method. The samesite="lax", ): -You can use this with :ref:`datasette.sign() ` to set signed cookies. Here's how you would set the ``ds_actor`` cookie for use with Datasette :ref:`authentication `: +You can use this with :ref:`datasette.sign() ` to set signed cookies. Here's how you would set the :ref:`ds_actor cookie ` for use with Datasette :ref:`authentication `: .. code-block:: python response = Response.redirect("/") - response.set_cookie("ds_actor", datasette.sign({"id": "cleopaws"}, "actor")) + response.set_cookie("ds_actor", datasette.sign({"a": {"id": "cleopaws"}}, "actor")) return response .. _internals_datasette: diff --git a/setup.py b/setup.py index 678a022f..45af0253 100644 --- a/setup.py +++ b/setup.py @@ -57,6 +57,7 @@ setup( "PyYAML~=5.3", "mergedeep>=1.1.1,<1.4.0", "itsdangerous~=1.1", + "python-baseconv==1.2.2", ], entry_points=""" [console_scripts] diff --git a/tests/fixtures.py b/tests/fixtures.py index 1eb1bb6e..a846999b 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -109,6 +109,9 @@ class TestClient: def __init__(self, asgi_app): self.asgi_app = asgi_app + def actor_cookie(self, actor): + return self.ds.sign({"a": actor}, "actor") + @async_to_sync async def get( self, path, allow_redirects=True, redirect_count=0, method="GET", cookies=None diff --git a/tests/test_auth.py b/tests/test_auth.py index 0e5563a3..5e847445 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,4 +1,7 @@ from .fixtures import app_client +import baseconv +import pytest +import time def test_auth_token(app_client): @@ -8,7 +11,9 @@ def test_auth_token(app_client): response = app_client.get(path, allow_redirects=False,) assert 302 == response.status assert "/" == response.headers["Location"] - assert {"id": "root"} == app_client.ds.unsign(response.cookies["ds_actor"], "actor") + assert {"a": {"id": "root"}} == app_client.ds.unsign( + response.cookies["ds_actor"], "actor" + ) # Check that a second with same token fails assert app_client.ds._root_token is None assert 403 == app_client.get(path, allow_redirects=False,).status @@ -16,6 +21,18 @@ def test_auth_token(app_client): def test_actor_cookie(app_client): "A valid actor cookie sets request.scope['actor']" - cookie = app_client.ds.sign({"id": "test"}, "actor") + cookie = app_client.actor_cookie({"id": "test"}) response = app_client.get("/", cookies={"ds_actor": cookie}) assert {"id": "test"} == app_client.ds._last_request.scope["actor"] + + +@pytest.mark.parametrize( + "offset,expected", [((24 * 60 * 60), {"id": "test"}), (-(24 * 60 * 60), None),] +) +def test_actor_cookie_that_expires(app_client, offset, expected): + expires_at = int(time.time()) + offset + cookie = app_client.ds.sign( + {"a": {"id": "test"}, "e": baseconv.base62.encode(expires_at)}, "actor" + ) + response = app_client.get("/", cookies={"ds_actor": cookie}) + assert expected == app_client.ds._last_request.scope["actor"] diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index dc3fba3f..4257806e 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -55,7 +55,7 @@ def test_custom_success_message(canned_write_client): response = canned_write_client.post( "/data/delete_name", {"rowid": 1}, - cookies={"ds_actor": canned_write_client.ds.sign({"id": "root"}, "actor")}, + cookies={"ds_actor": canned_write_client.actor_cookie({"id": "root"})}, allow_redirects=False, csrftoken_from=True, ) @@ -116,7 +116,7 @@ def test_canned_query_permissions_on_database_page(canned_write_client): # With auth shows four response = canned_write_client.get( "/data.json", - cookies={"ds_actor": canned_write_client.ds.sign({"id": "root"}, "actor")}, + cookies={"ds_actor": canned_write_client.actor_cookie({"id": "root"})}, ) assert 200 == response.status assert [ @@ -132,6 +132,6 @@ def test_canned_query_permissions_on_database_page(canned_write_client): def test_canned_query_permissions(canned_write_client): assert 403 == canned_write_client.get("/data/delete_name").status assert 200 == canned_write_client.get("/data/update_name").status - cookies = {"ds_actor": canned_write_client.ds.sign({"id": "root"}, "actor")} + cookies = {"ds_actor": canned_write_client.actor_cookie({"id": "root"})} assert 200 == canned_write_client.get("/data/delete_name", cookies=cookies).status assert 200 == canned_write_client.get("/data/update_name", cookies=cookies).status diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 477b8160..1be9529a 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -21,7 +21,7 @@ def test_view_instance(allow, expected_anon, expected_auth): # Should be no padlock assert "

    Datasette 🔒

    " not in anon_response.text auth_response = client.get( - path, cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + path, cookies={"ds_actor": client.actor_cookie({"id": "root"})}, ) assert expected_auth == auth_response.status # Check for the padlock @@ -48,7 +48,7 @@ def test_view_database(allow, expected_anon, expected_auth): # Should be no padlock assert ">fixtures 🔒" not in anon_response.text auth_response = client.get( - path, cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + path, cookies={"ds_actor": client.actor_cookie({"id": "root"})}, ) assert expected_auth == auth_response.status if ( @@ -69,7 +69,7 @@ def test_database_list_respects_view_database(): assert 'data' in anon_response.text assert 'fixtures' not in anon_response.text auth_response = client.get( - "/", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + "/", cookies={"ds_actor": client.actor_cookie({"id": "root"})}, ) assert 'data' in auth_response.text assert 'fixtures 🔒' in auth_response.text @@ -100,7 +100,7 @@ def test_database_list_respects_view_table(): for html_fragment in html_fragments: assert html_fragment not in anon_response_text auth_response_text = client.get( - "/", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + "/", cookies={"ds_actor": client.actor_cookie({"id": "root"})}, ).text for html_fragment in html_fragments: assert html_fragment in auth_response_text @@ -127,7 +127,7 @@ def test_view_table(allow, expected_anon, expected_auth): assert ">compound_three_primary_keys 🔒" not in anon_response.text auth_response = client.get( "/fixtures/compound_three_primary_keys", - cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + cookies={"ds_actor": client.actor_cookie({"id": "root"})}, ) assert expected_auth == auth_response.status if allow and expected_anon == 403 and expected_auth == 200: @@ -156,7 +156,7 @@ def test_table_list_respects_view_table(): for html_fragment in html_fragments: assert html_fragment not in anon_response.text auth_response = client.get( - "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} + "/fixtures", cookies={"ds_actor": client.actor_cookie({"id": "root"})} ) for html_fragment in html_fragments: assert html_fragment in auth_response.text @@ -180,7 +180,7 @@ def test_view_query(allow, expected_anon, expected_auth): # Should be no padlock assert ">fixtures 🔒" not in anon_response.text auth_response = client.get( - "/fixtures/q", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} + "/fixtures/q", cookies={"ds_actor": client.actor_cookie({"id": "root"})} ) assert expected_auth == auth_response.status if allow and expected_anon == 403 and expected_auth == 200: @@ -206,7 +206,7 @@ def test_execute_sql(metadata): assert 403 == client.get("/fixtures/facet_cities?_where=id=3").status # But for logged in user all of these should work: - cookies = {"ds_actor": client.ds.sign({"id": "root"}, "actor")} + cookies = {"ds_actor": client.actor_cookie({"id": "root"})} response_text = client.get("/fixtures", cookies=cookies).text assert form_fragment in response_text assert 200 == client.get("/fixtures?sql=select+1", cookies=cookies).status @@ -231,7 +231,7 @@ def test_query_list_respects_view_query(): assert html_fragment not in anon_response.text assert '"/fixtures/q"' not in anon_response.text auth_response = client.get( - "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} + "/fixtures", cookies={"ds_actor": client.actor_cookie({"id": "root"})} ) assert html_fragment in auth_response.text @@ -290,7 +290,7 @@ def test_permissions_debug(app_client): app_client.ds._permission_checks.clear() assert 403 == app_client.get("/-/permissions").status # With the cookie it should work - cookie = app_client.ds.sign({"id": "root"}, "actor") + cookie = app_client.actor_cookie({"id": "root"}) response = app_client.get("/-/permissions", cookies={"ds_actor": cookie}) # Should show one failure and one success soup = Soup(response.body, "html.parser") From 9f236c4c00689a022fd1d508f2b809ee2305927f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 10 Jun 2020 13:06:46 -0700 Subject: [PATCH 0355/2113] Warn that register_facet_classes may change, refs #830 Also documented policy that plugin hooks should not be shipped without a real example. Refs #818 --- docs/contributing.rst | 1 + docs/plugins.rst | 3 +++ 2 files changed, 4 insertions(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index 6562afc8..ba52839c 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -12,6 +12,7 @@ General guidelines * **master should always be releasable**. Incomplete features should live in branches. This ensures that any small bug fixes can be quickly released. * **The ideal commit** should bundle together the implementation, unit tests and associated documentation updates. The commit message should link to an associated issue. +* **New plugin hooks** should only be shipped if accompanied by a separate release of a non-demo plugin that uses them. .. _devenvironment: diff --git a/docs/plugins.rst b/docs/plugins.rst index 17fd64df..a28092a3 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -891,6 +891,9 @@ register_facet_classes() Return a list of additional Facet subclasses to be registered. +.. warning:: + The design of this plugin hook is unstable and may change. See `issue 830 `__. + Each Facet subclass implements a new type of facet operation. The class should look like this: .. code-block:: python From 198545733b7a34d7b36ab6510ed30fb7687bcc7e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 10 Jun 2020 16:56:53 -0700 Subject: [PATCH 0356/2113] Document that "allow": {} denies all https://github.com/simonw/datasette/issues/831#issuecomment-642324847 --- docs/authentication.rst | 19 +++++++++++++++++++ tests/test_utils.py | 11 +++++++---- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 9b66132a..0da5a38b 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -94,6 +94,14 @@ This will match any actors with an ``"id"`` property of ``"root"`` - for example "name": "Root User" } +An allow block can specify "no-one is allowed to do this" using an empty ``{}``: + +.. code-block:: json + + { + "allow": {} + } + Allow keys can provide a list of values. These will match any actor that has any of those values. .. code-block:: json @@ -181,6 +189,17 @@ Here's how to restrict access to your entire Datasette instance to just the ``"i } } +To deny access to all users, you can use ``"allow": {}``: + +.. code-block:: json + + { + "title": "My entirely inaccessible instance", + "allow": {} + } + +One reason to do this is if you are using a Datasette plugin - such as `datasette-permissions-sql `__ - to control permissions instead. + .. _authentication_permissions_database: Controlling access to specific databases diff --git a/tests/test_utils.py b/tests/test_utils.py index 0ffe8ae6..b490953f 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -464,16 +464,19 @@ def test_multi_params(data, should_raise): @pytest.mark.parametrize( "actor,allow,expected", [ + # Default is to allow: (None, None, True), + # {} means deny-all: (None, {}, False), - (None, {"id": "root"}, False), - ({"id": "root"}, None, True), ({"id": "root"}, {}, False), - ({"id": "simon", "staff": True}, {"staff": True}, True), - ({"id": "simon", "staff": False}, {"staff": True}, False), # Special case for "unauthenticated": true (None, {"unauthenticated": True}, True), (None, {"unauthenticated": False}, False), + # Match on just one property: + (None, {"id": "root"}, False), + ({"id": "root"}, None, True), + ({"id": "simon", "staff": True}, {"staff": True}, True), + ({"id": "simon", "staff": False}, {"staff": True}, False), # Special "*" value for any key: ({"id": "root"}, {"id": "*"}, True), ({}, {"id": "*"}, False), From ce4958018ede00fbdadf0c37a99889b6901bfb9b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 10 Jun 2020 17:10:28 -0700 Subject: [PATCH 0357/2113] Clarify that view-query also lets you execute writable queries --- docs/authentication.rst | 2 +- docs/sql_queries.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 0da5a38b..6a526f34 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -461,7 +461,7 @@ Default *allow*. view-query ---------- -Actor is allowed to view a :ref:`canned query ` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size +Actor is allowed to view (and execute) a :ref:`canned query ` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size - this includes executing :ref:`canned_queries_writable`. ``resource`` - tuple: (string, string) The name of the database, then the name of the canned query diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index a73f6bc2..6cc32da1 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -223,7 +223,7 @@ Writable canned queries Canned queries by default are read-only. You can use the ``"write": true`` key to indicate that a canned query can write to the database. -See :ref:`authentication_permissions_metadata` for details on how to add permission checks to canned queries, using the ``"allow"`` key. +See :ref:`authentication_permissions_query` for details on how to add permission checks to canned queries, using the ``"allow"`` key. .. code-block:: json From 371170eee8d1659437e42c8ee267cb4b2abcffb5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 08:44:44 -0700 Subject: [PATCH 0358/2113] publish heroku now deploys with Python 3.8.3 --- datasette/publish/heroku.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index 4db81d8e..7adf9d92 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -167,7 +167,7 @@ def temporary_heroku_directory( if metadata_content: open("metadata.json", "w").write(json.dumps(metadata_content, indent=2)) - open("runtime.txt", "w").write("python-3.8.0") + open("runtime.txt", "w").write("python-3.8.3") if branch: install = [ From 98632f0a874b7b9dac6abf0abb9fdb7e2839a4d3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 09:02:03 -0700 Subject: [PATCH 0359/2113] --secret command for datasette publish Closes #787 --- datasette/cli.py | 28 +++++++++++++++--------- datasette/publish/cloudrun.py | 2 ++ datasette/publish/common.py | 7 ++++++ datasette/publish/heroku.py | 3 +++ datasette/utils/__init__.py | 7 +++++- docs/datasette-package-help.txt | 3 +++ docs/datasette-publish-cloudrun-help.txt | 3 +++ docs/datasette-publish-heroku-help.txt | 3 +++ docs/plugins.rst | 1 + tests/test_package.py | 8 ++++--- tests/test_publish_cloudrun.py | 3 +++ tests/test_utils.py | 4 ++++ 12 files changed, 58 insertions(+), 14 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 2e3c8e36..ff9a2d5c 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -165,6 +165,12 @@ def plugins(all, plugins_dir): ) @click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension") @click.option("--version-note", help="Additional note to show on /-/versions") +@click.option( + "--secret", + help="Secret used for signing secure values, such as signed cookies", + envvar="DATASETTE_PUBLISH_SECRET", + default=lambda: os.urandom(32).hex(), +) @click.option( "-p", "--port", default=8001, help="Port to run the server on, defaults to 8001", ) @@ -187,6 +193,7 @@ def package( install, spatialite, version_note, + secret, port, **extra_metadata ): @@ -203,16 +210,17 @@ def package( with temporary_docker_directory( files, "datasette", - metadata, - extra_options, - branch, - template_dir, - plugins_dir, - static, - install, - spatialite, - version_note, - extra_metadata, + metadata=metadata, + extra_options=extra_options, + branch=branch, + template_dir=template_dir, + plugins_dir=plugins_dir, + static=static, + install=install, + spatialite=spatialite, + version_note=version_note, + secret=secret, + extra_metadata=extra_metadata, port=port, ): args = ["docker", "build"] diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 8271209a..8f99dc2e 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -47,6 +47,7 @@ def publish_subcommand(publish): install, plugin_secret, version_note, + secret, title, license, license_url, @@ -120,6 +121,7 @@ def publish_subcommand(publish): install, spatialite, version_note, + secret, extra_metadata, environment_variables, ): diff --git a/datasette/publish/common.py b/datasette/publish/common.py index 2911029d..49a4798e 100644 --- a/datasette/publish/common.py +++ b/datasette/publish/common.py @@ -1,5 +1,6 @@ from ..utils import StaticMount import click +import os import shutil import sys @@ -52,6 +53,12 @@ def add_common_publish_arguments_and_options(subcommand): click.option( "--version-note", help="Additional note to show on /-/versions" ), + click.option( + "--secret", + help="Secret used for signing secure values, such as signed cookies", + envvar="DATASETTE_PUBLISH_SECRET", + default=lambda: os.urandom(32).hex(), + ), click.option("--title", help="Title for metadata"), click.option("--license", help="License label for metadata"), click.option("--license_url", help="License URL for metadata"), diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index 7adf9d92..6cda68da 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -35,6 +35,7 @@ def publish_subcommand(publish): install, plugin_secret, version_note, + secret, title, license, license_url, @@ -100,6 +101,7 @@ def publish_subcommand(publish): static, install, version_note, + secret, extra_metadata, ): app_name = None @@ -144,6 +146,7 @@ def temporary_heroku_directory( static, install, version_note, + secret, extra_metadata=None, ): extra_metadata = extra_metadata or {} diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 51373c46..5090f67e 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -278,10 +278,13 @@ def make_dockerfile( install, spatialite, version_note, + secret, environment_variables=None, port=8001, ): cmd = ["datasette", "serve", "--host", "0.0.0.0"] + environment_variables = environment_variables or {} + environment_variables["DATASETTE_SECRET"] = secret for filename in files: cmd.extend(["-i", filename]) cmd.extend(["--cors", "--inspect-file", "inspect-data.json"]) @@ -324,7 +327,7 @@ CMD {cmd}""".format( environment_variables="\n".join( [ "ENV {} '{}'".format(key, value) - for key, value in (environment_variables or {}).items() + for key, value in environment_variables.items() ] ), files=" ".join(files), @@ -348,6 +351,7 @@ def temporary_docker_directory( install, spatialite, version_note, + secret, extra_metadata=None, environment_variables=None, port=8001, @@ -381,6 +385,7 @@ def temporary_docker_directory( install, spatialite, version_note, + secret, environment_variables, port=port, ) diff --git a/docs/datasette-package-help.txt b/docs/datasette-package-help.txt index 326b66cb..1b14f908 100644 --- a/docs/datasette-package-help.txt +++ b/docs/datasette-package-help.txt @@ -17,6 +17,9 @@ Options: --install TEXT Additional packages (e.g. plugins) to install --spatialite Enable SpatialLite extension --version-note TEXT Additional note to show on /-/versions + --secret TEXT Secret used for signing secure values, such as signed + cookies + -p, --port INTEGER Port to run the server on, defaults to 8001 --title TEXT Title for metadata --license TEXT License label for metadata diff --git a/docs/datasette-publish-cloudrun-help.txt b/docs/datasette-publish-cloudrun-help.txt index 98fc9c71..a625bd10 100644 --- a/docs/datasette-publish-cloudrun-help.txt +++ b/docs/datasette-publish-cloudrun-help.txt @@ -15,6 +15,9 @@ Options: datasette-auth-github client_id xxx --version-note TEXT Additional note to show on /-/versions + --secret TEXT Secret used for signing secure values, such as signed + cookies + --title TEXT Title for metadata --license TEXT License label for metadata --license_url TEXT License URL for metadata diff --git a/docs/datasette-publish-heroku-help.txt b/docs/datasette-publish-heroku-help.txt index ec157753..b2caa2cc 100644 --- a/docs/datasette-publish-heroku-help.txt +++ b/docs/datasette-publish-heroku-help.txt @@ -15,6 +15,9 @@ Options: datasette-auth-github client_id xxx --version-note TEXT Additional note to show on /-/versions + --secret TEXT Secret used for signing secure values, such as signed + cookies + --title TEXT Title for metadata --license TEXT License label for metadata --license_url TEXT License URL for metadata diff --git a/docs/plugins.rst b/docs/plugins.rst index a28092a3..989cf672 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -536,6 +536,7 @@ Let's say you want to build a plugin that adds a ``datasette publish my_hosting_ install, plugin_secret, version_note, + secret, title, license, license_url, diff --git a/tests/test_package.py b/tests/test_package.py index f0cbe88f..3248b3a4 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -15,7 +15,7 @@ FROM python:3.8 COPY . /app WORKDIR /app - +ENV DATASETTE_SECRET 'sekrit' RUN pip install -U datasette RUN datasette inspect test.db --inspect-file inspect-data.json ENV PORT {port} @@ -33,7 +33,7 @@ def test_package(mock_call, mock_which): mock_call.side_effect = capture with runner.isolated_filesystem(): open("test.db", "w").write("data") - result = runner.invoke(cli.cli, ["package", "test.db"]) + result = runner.invoke(cli.cli, ["package", "test.db", "--secret", "sekrit"]) assert 0 == result.exit_code mock_call.assert_has_calls([mock.call(["docker", "build", "."])]) assert EXPECTED_DOCKERFILE.format(port=8001) == capture.captured @@ -48,6 +48,8 @@ def test_package_with_port(mock_call, mock_which): runner = CliRunner() with runner.isolated_filesystem(): open("test.db", "w").write("data") - result = runner.invoke(cli.cli, ["package", "test.db", "-p", "8080"]) + result = runner.invoke( + cli.cli, ["package", "test.db", "-p", "8080", "--secret", "sekrit"] + ) assert 0 == result.exit_code assert EXPECTED_DOCKERFILE.format(port=8080) == capture.captured diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 55c207c7..c3ed1f90 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -172,6 +172,8 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which): "client_id", "x-client-id", "--show-files", + "--secret", + "x-secret", ], ) dockerfile = ( @@ -184,6 +186,7 @@ COPY . /app WORKDIR /app ENV DATASETTE_AUTH_GITHUB_CLIENT_ID 'x-client-id' +ENV DATASETTE_SECRET 'x-secret' RUN pip install -U datasette RUN datasette inspect test.db --inspect-file inspect-data.json ENV PORT 8001 diff --git a/tests/test_utils.py b/tests/test_utils.py index b490953f..d613e999 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -247,6 +247,7 @@ def test_temporary_docker_directory_uses_hard_link(): install=[], spatialite=False, version_note=None, + secret="secret", ) as temp_docker: hello = os.path.join(temp_docker, "hello") assert "world" == open(hello).read() @@ -274,6 +275,7 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): install=[], spatialite=False, version_note=None, + secret=None, ) as temp_docker: hello = os.path.join(temp_docker, "hello") assert "world" == open(hello).read() @@ -297,11 +299,13 @@ def test_temporary_docker_directory_quotes_args(): install=[], spatialite=False, version_note="$PWD", + secret="secret", ) as temp_docker: df = os.path.join(temp_docker, "Dockerfile") df_contents = open(df).read() assert "'$PWD'" in df_contents assert "'--$HOME'" in df_contents + assert "ENV DATASETTE_SECRET 'secret'" in df_contents def test_compound_keys_after_sql(): From fcc7cd6379ab62b5c2440d26935659a797133030 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 09:04:32 -0700 Subject: [PATCH 0360/2113] rST formatting --- docs/publish.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/publish.rst b/docs/publish.rst index c1024bd7..6eff74d0 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -139,7 +139,7 @@ You can now run the resulting container like so:: This exposes port 8001 inside the container as port 8081 on your host machine, so you can access the application at ``http://localhost:8081/`` -You can customize the port that is exposed by the countainer using the ``--port`` option: +You can customize the port that is exposed by the countainer using the ``--port`` option:: datasette package mydatabase.db --port 8080 From 09bf3c63225babe8e28cde880ca4399ca7dbd78b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 09:14:30 -0700 Subject: [PATCH 0361/2113] Documentation for publish --secret, refs #787 --- docs/config.rst | 13 +++++++++++++ docs/publish.rst | 2 ++ 2 files changed, 15 insertions(+) diff --git a/docs/config.rst b/docs/config.rst index ab14ea7b..bbbea822 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -306,3 +306,16 @@ One way to generate a secure random secret is to use Python like this:: cdb19e94283a20f9d42cca50c5a4871c0aa07392db308755d60a1a5b9bb0fa52 Plugin authors make use of this signing mechanism in their plugins using :ref:`datasette_sign` and :ref:`datasette_unsign`. + +.. _config_publish_secrets: + +Using secrets with datasette publish +------------------------------------ + +The :ref:`cli_publish` and :ref:`cli_package` commands both generate a secret for you automatically when Datasette is deployed. + +This means that every time you deploy a new version of a Datasette project, a new secret will be generated. This will cause signed cookies to become inalid on every fresh deploy. + +You can fix this by creating a secret that will be used for multiple deploys and passing it using the ``--secret`` option:: + + datasette publish cloudrun mydb.db --service=my-service --secret=cdb19e94283a20f9d42cca5 diff --git a/docs/publish.rst b/docs/publish.rst index 6eff74d0..ebaf826a 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -100,6 +100,8 @@ If a plugin has any :ref:`plugins_configuration_secret` you can use the ``--plug --plugin-secret datasette-auth-github client_id your_client_id \ --plugin-secret datasette-auth-github client_secret your_client_secret +.. _cli_package: + datasette package ================= From 29c5ff493ad7918b8fc44ea7920b41530e56dd5d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 15:14:51 -0700 Subject: [PATCH 0362/2113] view-instance permission for debug URLs, closes #833 --- datasette/views/special.py | 8 ++++++-- tests/test_permissions.py | 30 ++++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 2 deletions(-) diff --git a/datasette/views/special.py b/datasette/views/special.py index dc6a25dc..6fcb6b5e 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -14,6 +14,7 @@ class JsonDataView(BaseView): self.needs_request = needs_request async def get(self, request, as_format): + await self.check_permission(request, "view-instance") if self.needs_request: data = self.data_callback(request) else: @@ -46,6 +47,7 @@ class PatternPortfolioView(BaseView): self.ds = datasette async def get(self, request): + await self.check_permission(request, "view-instance") return await self.render(["patterns.html"], request=request) @@ -77,8 +79,8 @@ class PermissionsDebugView(BaseView): self.ds = datasette async def get(self, request): - if not await self.ds.permission_allowed(request.actor, "permissions-debug"): - return Response("Permission denied", status=403) + await self.check_permission(request, "view-instance") + await self.check_permission(request, "permissions-debug") return await self.render( ["permissions_debug.html"], request, @@ -93,9 +95,11 @@ class MessagesDebugView(BaseView): self.ds = datasette async def get(self, request): + await self.check_permission(request, "view-instance") return await self.render(["messages_debug.html"], request) async def post(self, request): + await self.check_permission(request, "view-instance") post = await request.post_vars() message = post.get("message", "") message_type = post.get("message_type") or "INFO" diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 1be9529a..fcc1b5ed 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -316,3 +316,33 @@ def test_permissions_debug(app_client): def test_allow_unauthenticated(allow, expected): with make_app_client(metadata={"allow": allow}) as client: assert expected == client.get("/").status + + +@pytest.fixture(scope="session") +def view_instance_client(): + with make_app_client(metadata={"allow": {}}) as client: + yield client + + +@pytest.mark.parametrize( + "path", + [ + "/", + "/fixtures", + "/fixtures/facetable", + "/-/metadata", + "/-/versions", + "/-/plugins", + "/-/config", + "/-/threads", + "/-/databases", + "/-/actor", + "/-/permissions", + "/-/messages", + "/-/patterns", + ], +) +def test_view_instance(path, view_instance_client): + assert 403 == view_instance_client.get(path).status + if path not in ("/-/permissions", "/-/messages", "/-/patterns"): + assert 403 == view_instance_client.get(path + ".json").status From f39f11133126158e28780dee91bb9c7719ef5875 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 15:47:19 -0700 Subject: [PATCH 0363/2113] Fixed actor_matches_allow bug, closes #836 --- datasette/utils/__init__.py | 2 +- tests/test_utils.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 5090f67e..69cfa400 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -884,7 +884,7 @@ def actor_matches_allow(actor, allow): values = [values] actor_values = actor.get(key) if actor_values is None: - return False + continue if not isinstance(actor_values, list): actor_values = [actor_values] actor_values = set(actor_values) diff --git a/tests/test_utils.py b/tests/test_utils.py index d613e999..da1d298b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -497,6 +497,8 @@ def test_multi_params(data, should_raise): ({"id": "garry", "roles": ["staff", "dev"]}, {"roles": ["dev", "otter"]}, True), ({"id": "garry", "roles": []}, {"roles": ["staff"]}, False), ({"id": "garry"}, {"roles": ["staff"]}, False), + # Any single matching key works: + ({"id": "root"}, {"bot_id": "my-bot", "id": ["root"]}, True), ], ) def test_actor_matches_allow(actor, allow, expected): From fba8ff6e76253af2b03749ed8dd6e28985a7fb8f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 17:21:48 -0700 Subject: [PATCH 0364/2113] "$env": "X" mechanism now works with nested lists, closes #837 --- datasette/app.py | 14 ++------------ datasette/utils/__init__.py | 16 ++++++++++++++++ docs/changelog.rst | 2 ++ tests/fixtures.py | 1 + tests/test_plugins.py | 13 +++++++++++++ tests/test_utils.py | 14 ++++++++++++++ 6 files changed, 48 insertions(+), 12 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 71fa9afb..ebab3bee 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -45,6 +45,7 @@ from .utils import ( format_bytes, module_from_path, parse_metadata, + resolve_env_secrets, sqlite3, to_css_class, ) @@ -367,18 +368,7 @@ class Datasette: return None plugin_config = plugins.get(plugin_name) # Resolve any $file and $env keys - if isinstance(plugin_config, dict): - # Create a copy so we don't mutate the version visible at /-/metadata.json - plugin_config_copy = dict(plugin_config) - for key, value in plugin_config_copy.items(): - if isinstance(value, dict): - if list(value.keys()) == ["$env"]: - plugin_config_copy[key] = os.environ.get( - list(value.values())[0] - ) - elif list(value.keys()) == ["$file"]: - plugin_config_copy[key] = open(list(value.values())[0]).read() - return plugin_config_copy + plugin_config = resolve_env_secrets(plugin_config, os.environ) return plugin_config def app_css_hash(self): diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 69cfa400..ae7bbdb5 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -904,3 +904,19 @@ async def check_visibility(datasette, actor, action, resource, default=True): None, action, resource=resource, default=default, ) return visible, private + + +def resolve_env_secrets(config, environ): + 'Create copy that recursively replaces {"$env": "NAME"} with values from environ' + if isinstance(config, dict): + if list(config.keys()) == ["$env"]: + return environ.get(list(config.values())[0]) + else: + return { + key: resolve_env_secrets(value, environ) + for key, value in config.items() + } + elif isinstance(config, list): + return [resolve_env_secrets(value, environ) for value in config] + else: + return config diff --git a/docs/changelog.rst b/docs/changelog.rst index 911fb1b6..3a01d05e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -94,6 +94,8 @@ Both flash messages and user authentication needed a way to sign values and set Datasette will generate a secret automatically when it starts up, but to avoid resetting the secret (and hence invalidating any cookies) every time the server restarts you should set your own secret. You can pass a secret to Datasette using the new ``--secret`` option or with a ``DATASETTE_SECRET`` environment variable. See :ref:`config_secret` for more details. +You can also set a secret when you deploy Datasette using ``datasette publish`` or ``datasette package`` - see :ref:`config_publish_secrets`. + Plugins can now sign value and verify their signatures using the :ref:`datasette.sign() ` and :ref:`datasette.unsign() ` methods. CSRF protection diff --git a/tests/fixtures.py b/tests/fixtures.py index a846999b..907bf895 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -408,6 +408,7 @@ METADATA = { "plugins": { "name-of-plugin": {"depth": "root"}, "env-plugin": {"foo": {"$env": "FOO_ENV"}}, + "env-plugin-list": [{"in_a_list": {"$env": "FOO_ENV"}}], "file-plugin": {"foo": {"$file": TEMP_PLUGIN_SECRET_FILE}}, }, "databases": { diff --git a/tests/test_plugins.py b/tests/test_plugins.py index c7bb4859..0fae3740 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -173,6 +173,19 @@ def test_plugin_config_env(app_client): del os.environ["FOO_ENV"] +def test_plugin_config_env_from_list(app_client): + os.environ["FOO_ENV"] = "FROM_ENVIRONMENT" + assert [{"in_a_list": "FROM_ENVIRONMENT"}] == app_client.ds.plugin_config( + "env-plugin-list" + ) + # Ensure secrets aren't visible in /-/metadata.json + metadata = app_client.get("/-/metadata.json") + assert [{"in_a_list": {"$env": "FOO_ENV"}}] == metadata.json["plugins"][ + "env-plugin-list" + ] + del os.environ["FOO_ENV"] + + def test_plugin_config_file(app_client): open(TEMP_PLUGIN_SECRET_FILE, "w").write("FROM_FILE") assert {"foo": "FROM_FILE"} == app_client.ds.plugin_config("file-plugin") diff --git a/tests/test_utils.py b/tests/test_utils.py index da1d298b..80c6f223 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -503,3 +503,17 @@ def test_multi_params(data, should_raise): ) def test_actor_matches_allow(actor, allow, expected): assert expected == utils.actor_matches_allow(actor, allow) + + +@pytest.mark.parametrize( + "config,expected", + [ + ({"foo": "bar"}, {"foo": "bar"}), + ({"$env": "FOO"}, "x"), + ({"k": {"$env": "FOO"}}, {"k": "x"}), + ([{"k": {"$env": "FOO"}}, {"z": {"$env": "FOO"}}], [{"k": "x"}, {"z": "x"}]), + ({"k": [{"in_a_list": {"$env": "FOO"}}]}, {"k": [{"in_a_list": "x"}]}), + ], +) +def test_resolve_env_secrets(config, expected): + assert expected == utils.resolve_env_secrets(config, {"FOO": "x"}) From 308bcc8805236b8eb5a08d8045c84f68bd0ddf0e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 17:25:12 -0700 Subject: [PATCH 0365/2113] Fixed test_permissions_debug --- datasette/views/special.py | 3 ++- tests/test_permissions.py | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/datasette/views/special.py b/datasette/views/special.py index 6fcb6b5e..6c378995 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -80,7 +80,8 @@ class PermissionsDebugView(BaseView): async def get(self, request): await self.check_permission(request, "view-instance") - await self.check_permission(request, "permissions-debug") + if not await self.ds.permission_allowed(request.actor, "permissions-debug"): + return Response("Permission denied", status=403) return await self.render( ["permissions_debug.html"], request, diff --git a/tests/test_permissions.py b/tests/test_permissions.py index fcc1b5ed..241dd2e5 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -305,7 +305,9 @@ def test_permissions_debug(app_client): ] assert [ {"action": "permissions-debug", "result": True, "used_default": False}, + {"action": "view-instance", "result": True, "used_default": True}, {"action": "permissions-debug", "result": False, "used_default": True}, + {"action": "view-instance", "result": True, "used_default": True}, ] == checks From 1d2e8e09a00a4b695317627483f352464ea8a105 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 17:33:16 -0700 Subject: [PATCH 0366/2113] Some last touches to the 0.44 release notes, refs #806 --- docs/changelog.rst | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 3a01d05e..aca8f8c2 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -107,6 +107,13 @@ Since writable canned queries are built using POST forms, Datasette now ships wi +Cookie methods +~~~~~~~~~~~~~~ + +Plugins can now use the new :ref:`response.set_cookie() ` method to set cookies. + +A new ``request.cookies`` method on the :ref:internals_request` can be used to read incoming cookies. + register_routes() plugin hooks ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -134,6 +141,9 @@ Smaller changes - Fixed broken CSS on nested 404 pages. (`#777 `__) - New ``request.url_vars`` property. (`#822 `__) - Fixed a bug with the ``python tests/fixtures.py`` command for outputting Datasette's testing fixtures database and plugins. (`#804 `__) +- ``datasette publish heroku`` now deploys using Python 3.8.3. +- Added a warning that the :ref:`plugin_register_facet_classes` hook is unstable and may change in the future. (`#830 `__) +- The ``{"$env": "ENVIRONMENT_VARIBALE"}`` mechanism (see :ref:`plugins_configuration_secret`) now works with variables inside nested lists. (`#837 `__) The road to Datasette 1.0 ~~~~~~~~~~~~~~~~~~~~~~~~~ From 793a52b31771280a6c8660efb9e48b9b763477ff Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 17:43:51 -0700 Subject: [PATCH 0367/2113] Link to datasett-auth-tokens and datasette-permissions-sql in docs, refs #806 --- docs/authentication.rst | 4 ++-- docs/changelog.rst | 4 ++-- docs/ecosystem.rst | 10 ++++++++++ docs/internals.rst | 2 +- docs/plugins.rst | 17 ++++++++++------- 5 files changed, 25 insertions(+), 12 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 6a526f34..2a6fa9bc 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -19,7 +19,7 @@ Every request to Datasette has an associated actor value, available in the code The actor dictionary can be any shape - the design of that data structure is left up to the plugins. A useful convention is to include an ``"id"`` string, as demonstrated by the "root" actor below. -Plugins can use the :ref:`plugin_actor_from_request` hook to implement custom logic for authenticating an actor based on the incoming HTTP request. +Plugins can use the :ref:`plugin_hook_actor_from_request` hook to implement custom logic for authenticating an actor based on the incoming HTTP request. .. _authentication_root: @@ -314,7 +314,7 @@ Checking permissions in plugins Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. -Datasette core performs a number of permission checks, :ref:`documented below `. Plugins can implement the :ref:`plugin_permission_allowed` plugin hook to participate in decisions about whether an actor should be able to perform a specified action. +Datasette core performs a number of permission checks, :ref:`documented below `. Plugins can implement the :ref:`plugin_hook_permission_allowed` plugin hook to participate in decisions about whether an actor should be able to perform a specified action. .. _authentication_actor_matches_allow: diff --git a/docs/changelog.rst b/docs/changelog.rst index aca8f8c2..3a7f9562 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -27,7 +27,7 @@ You'll need to install plugins if you want full user accounts, but default Datas INFO: Application startup complete. INFO: Uvicorn running on http://127.0.0.1:8001 (Press CTRL+C to quit) -Plugins can implement new ways of authenticating users using the new :ref:`plugin_actor_from_request` hook. +Plugins can implement new ways of authenticating users using the new :ref:`plugin_hook_actor_from_request` hook. Permissions ~~~~~~~~~~~ @@ -52,7 +52,7 @@ You can use the new ``"allow"`` block syntax in ``metadata.json`` (or ``metadata See :ref:`authentication_permissions_allow` for more details. -Plugins can implement their own custom permission checks using the new :ref:`plugin_permission_allowed` hook. +Plugins can implement their own custom permission checks using the new :ref:`plugin_hook_permission_allowed` hook. A new debug page at ``/-/permissions`` shows recent permission checks, to help administrators and plugin authors understand exactly what checks are being performed. This tool defaults to only being available to the root user, but can be exposed to other users by plugins that respond to the ``permissions-debug`` permission. (`#788 `__) diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst index 4777cc16..dcb5a887 100644 --- a/docs/ecosystem.rst +++ b/docs/ecosystem.rst @@ -87,6 +87,16 @@ datasette-auth-github `datasette-auth-github `__ adds an authentication layer to Datasette. Users will have to sign in using their GitHub account before they can view data or interact with Datasette. You can also use it to restrict access to specific GitHub users, or to members of specified GitHub `organizations `__ or `teams `__. +datasette-auth-tokens +--------------------- + +`datasette-auth-tokens `__ provides a mechanism for creating secret API tokens that can then be used with Datasette's :ref:`authentication` system. + +datasette-permissions-sql +--------------------- + +`datasette-permissions-sql `__ lets you configure Datasette permissions checks to use custom SQL queries, which means you can make permisison decisions based on data contained within your databases. + datasette-upload-csvs --------------------- diff --git a/docs/internals.rst b/docs/internals.rst index d75544e1..ab9da410 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -219,7 +219,7 @@ await .permission_allowed(actor, action, resource=None, default=False) Check if the given actor has :ref:`permission ` to perform the given action on the given resource. -Some permission checks are carried out against :ref:`rules defined in metadata.json `, while other custom permissions may be decided by plugins that implement the :ref:`plugin_permission_allowed` plugin hook. +Some permission checks are carried out against :ref:`rules defined in metadata.json `, while other custom permissions may be decided by plugins that implement the :ref:`plugin_hook_permission_allowed` plugin hook. If neither ``metadata.json`` nor any of the plugins provide an answer to the permission query the ``default`` argument will be returned. diff --git a/docs/plugins.rst b/docs/plugins.rst index 989cf672..608f93da 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -25,9 +25,8 @@ Things you can do with plugins include: * Customize how database values are rendered in the Datasette interface, for example `datasette-render-binary `__ and `datasette-pretty-json `__. -* Wrap the entire Datasette application in custom ASGI middleware to add new pages - or implement authentication, for example - `datasette-auth-github `__. +* Customize how Datasette's authentication and permissions systems work, for example `datasette-auth-tokens `__ and + `datasette-permissions-sql `__. .. _plugins_installing: @@ -996,7 +995,7 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att Examples: `datasette-auth-github `_, `datasette-search-all `_, `datasette-media `_ -.. _plugin_actor_from_request: +.. _plugin_hook_actor_from_request: actor_from_request(datasette, request) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1055,7 +1054,9 @@ Instead of returning a dictionary, this function can return an awaitable functio return inner -.. _plugin_permission_allowed: +Example: `datasette-auth-tokens `_ + +.. _plugin_hook_permission_allowed: permission_allowed(datasette, actor, action, resource) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1064,7 +1065,7 @@ permission_allowed(datasette, actor, action, resource) You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. ``actor`` - dictionary - The current actor, as decided by :ref:`plugin_actor_from_request`. + The current actor, as decided by :ref:`plugin_hook_actor_from_request`. ``action`` - string The action to be performed, e.g. ``"edit-table"``. @@ -1110,4 +1111,6 @@ Here's an example that allows users to view the ``admin_log`` table only if thei return inner -See :ref:`permissions` for a full list of permissions that are included in Datasette core. +See :ref:`built-in permissions ` for a full list of permissions that are included in Datasette core. + +Example: `datasette-permissions-sql `_ From 9ae0d483ead93c0832142e5dc85959ae3c8f73ea Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 17:48:20 -0700 Subject: [PATCH 0368/2113] Get "$file": "../path" mechanism working again, closes #839 --- datasette/utils/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index ae7bbdb5..14060669 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -911,6 +911,8 @@ def resolve_env_secrets(config, environ): if isinstance(config, dict): if list(config.keys()) == ["$env"]: return environ.get(list(config.values())[0]) + elif list(config.keys()) == ["$file"]: + return open(list(config.values())[0]).read() else: return { key: resolve_env_secrets(value, environ) From b906030235efbdff536405d66078f4868ce0d3bd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 18:19:30 -0700 Subject: [PATCH 0369/2113] Release Datasette 0.44 Refs #395, #519, #576, #699, #706, #774, #777, #781, #784, #788, #790, #797, #798, #800, #802, #804, #819, #822, #825, #826, #827, #828, #829, #830, #833, #836, #837, #839 Closes #806. --- README.md | 1 + docs/changelog.rst | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 90df75de..925d68d2 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover ## News + * 11th June 2020: [Datasette 0.44](http://datasette.readthedocs.io/en/latest/changelog.html#v0-44) - [Authentication and permissions](https://datasette.readthedocs.io/en/latest/authentication.html), [writable canned queries](https://datasette.readthedocs.io/en/latest/sql_queries.html#writable-canned-queries), flash messages, new plugin hooks and much, much more. * 28th May 2020: [Datasette 0.43](http://datasette.readthedocs.io/en/latest/changelog.html#v0-43) - Redesigned [register_output_renderer](https://datasette.readthedocs.io/en/latest/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes. * 8th May 2020: [Datasette 0.42](http://datasette.readthedocs.io/en/latest/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database. * 6th May 2020: [Datasette 0.41](http://datasette.readthedocs.io/en/latest/changelog.html#v0-41) - New mechanism for [creating custom pages](https://datasette.readthedocs.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://datasette.readthedocs.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements. diff --git a/docs/changelog.rst b/docs/changelog.rst index 3a7f9562..b1e95bb7 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,7 +6,7 @@ Changelog .. _v0_44: -0.44 (2020-06-??) +0.44 (2020-06-11) ----------------- Authentication and permissions, writable canned queries, flash messages, new plugin hooks and more. From 09a3479a5402df96489ed6cab6cc9fd674bf3433 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 10:55:41 -0700 Subject: [PATCH 0370/2113] New "startup" plugin hook, closes #834 --- datasette/app.py | 7 +++++++ datasette/cli.py | 3 +++ datasette/hookspecs.py | 5 +++++ docs/plugins.rst | 33 +++++++++++++++++++++++++++++++++ tests/fixtures.py | 1 + tests/plugins/my_plugin.py | 5 +++++ tests/test_cli.py | 1 + tests/test_plugins.py | 6 ++++++ 8 files changed, 61 insertions(+) diff --git a/datasette/app.py b/datasette/app.py index ebab3bee..ca2efa91 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -302,6 +302,13 @@ class Datasette: self._permission_checks = collections.deque(maxlen=200) self._root_token = secrets.token_hex(32) + async def invoke_startup(self): + for hook in pm.hook.startup(datasette=self): + if callable(hook): + hook = hook() + if asyncio.iscoroutine(hook): + hook = await hook + def sign(self, value, namespace="default"): return URLSafeSerializer(self._secret, namespace).dumps(value) diff --git a/datasette/cli.py b/datasette/cli.py index ff9a2d5c..bba72484 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -397,6 +397,9 @@ def serve( # Private utility mechanism for writing unit tests return ds + # Run the "startup" plugin hooks + asyncio.get_event_loop().run_until_complete(ds.invoke_startup()) + # Run async sanity checks - but only if we're not under pytest asyncio.get_event_loop().run_until_complete(check_databases(ds)) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index ab3e131c..9fceee41 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -5,6 +5,11 @@ hookspec = HookspecMarker("datasette") hookimpl = HookimplMarker("datasette") +@hookspec +def startup(datasette): + "Fires directly after Datasette first starts running" + + @hookspec def asgi_wrapper(datasette): "Returns an ASGI middleware callable to wrap our ASGI application with" diff --git a/docs/plugins.rst b/docs/plugins.rst index 608f93da..289be649 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -995,6 +995,39 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att Examples: `datasette-auth-github `_, `datasette-search-all `_, `datasette-media `_ +.. _plugin_hook_startup: + +startup(datasette) +~~~~~~~~~~~~~~~~~~ + +This hook fires when the Datasette application server first starts up. You can implement a regular function, for example to validate required plugin configuration: + +.. code-block:: python + + @hookimpl + def startup(datasette): + config = datasette.plugin_config("my-plugin") or {} + assert "required-setting" in config, "my-plugin requires setting required-setting" + +Or you can return an async function which will be awaited on startup. Use this option if you need to make any database queries: + + @hookimpl + def startup(datasette): + async def inner(): + db = datasette.get_database() + if "my_table" not in await db.table_names(): + await db.execute_write(""" + create table my_table (mycol text) + """, block=True) + return inner + + +Potential use-cases: + +* Run some initialization code for the plugin +* Create database tables that a plugin needs +* Validate the metadata configuration for a plugin on startup, and raise an error if it is invalid + .. _plugin_hook_actor_from_request: actor_from_request(datasette, request) diff --git a/tests/fixtures.py b/tests/fixtures.py index 907bf895..09819575 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -49,6 +49,7 @@ EXPECTED_PLUGINS = [ "register_facet_classes", "register_routes", "render_cell", + "startup", ], }, { diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index a0f7441b..3f019a84 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -167,3 +167,8 @@ def register_routes(): (r"/two/(?P.*)$", two), (r"/three/$", three), ] + + +@hookimpl +def startup(datasette): + datasette._startup_hook_fired = True diff --git a/tests/test_cli.py b/tests/test_cli.py index 6939fe57..90aa990d 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -10,6 +10,7 @@ from click.testing import CliRunner import io import json import pathlib +import pytest import textwrap diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 0fae3740..c0a7438f 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -572,3 +572,9 @@ def test_register_routes_asgi(app_client): response = app_client.get("/three/") assert {"hello": "world"} == response.json assert "1" == response.headers["x-three"] + + +@pytest.mark.asyncio +async def test_startup(app_client): + await app_client.ds.invoke_startup() + assert app_client.ds._startup_hook_fired From 72ae975156a09619a808cdd03fddddcf62e6f533 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 10:58:32 -0700 Subject: [PATCH 0371/2113] Added test for async startup hook, refs #834 --- tests/plugins/my_plugin_2.py | 8 ++++++++ tests/test_plugins.py | 1 + 2 files changed, 9 insertions(+) diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index 039112f4..bdfaea8d 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -120,3 +120,11 @@ def permission_allowed(datasette, actor, action): return False return inner + + +@hookimpl +def startup(datasette): + async def inner(): + result = await datasette.get_database().execute("select 1 + 1") + datasette._startup_hook_calculation = result.first()[0] + return inner diff --git a/tests/test_plugins.py b/tests/test_plugins.py index c0a7438f..bc759385 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -578,3 +578,4 @@ def test_register_routes_asgi(app_client): async def test_startup(app_client): await app_client.ds.invoke_startup() assert app_client.ds._startup_hook_fired + assert 2 == app_client.ds._startup_hook_calculation From ae99af25361c9248c721153922c623bd5f440159 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 10:59:35 -0700 Subject: [PATCH 0372/2113] Fixed rST code formatting, refs #834 --- docs/plugins.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index 289be649..8add7352 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -1011,6 +1011,8 @@ This hook fires when the Datasette application server first starts up. You can i Or you can return an async function which will be awaited on startup. Use this option if you need to make any database queries: +.. code-block:: python + @hookimpl def startup(datasette): async def inner(): @@ -1021,7 +1023,6 @@ Or you can return an async function which will be awaited on startup. Use this o """, block=True) return inner - Potential use-cases: * Run some initialization code for the plugin From d60bd6ad13ef908d7e66a677caee20536f3fb277 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 11:15:33 -0700 Subject: [PATCH 0373/2113] Update plugin tests, refs #834 --- tests/fixtures.py | 1 + tests/plugins/my_plugin_2.py | 1 + 2 files changed, 2 insertions(+) diff --git a/tests/fixtures.py b/tests/fixtures.py index 09819575..e2f90f09 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -64,6 +64,7 @@ EXPECTED_PLUGINS = [ "extra_template_vars", "permission_allowed", "render_cell", + "startup", ], }, { diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index bdfaea8d..f4a082a0 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -127,4 +127,5 @@ def startup(datasette): async def inner(): result = await datasette.get_database().execute("select 1 + 1") datasette._startup_hook_calculation = result.first()[0] + return inner From 0e49842e227a0f1f69d48108c87d17fe0379e548 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 11:29:14 -0700 Subject: [PATCH 0374/2113] datasette/actor_auth_cookie.py coverae to 100%, refs #841 --- tests/test_auth.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/test_auth.py b/tests/test_auth.py index 5e847445..bb4bee4b 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -26,6 +26,17 @@ def test_actor_cookie(app_client): assert {"id": "test"} == app_client.ds._last_request.scope["actor"] +def test_actor_cookie_invalid(app_client): + cookie = app_client.actor_cookie({"id": "test"}) + # Break the signature + response = app_client.get("/", cookies={"ds_actor": cookie[:-1] + "."}) + assert None == app_client.ds._last_request.scope["actor"] + # Break the cookie format + cookie = app_client.ds.sign({"b": {"id": "test"}}, "actor") + response = app_client.get("/", cookies={"ds_actor": cookie}) + assert None == app_client.ds._last_request.scope["actor"] + + @pytest.mark.parametrize( "offset,expected", [((24 * 60 * 60), {"id": "test"}), (-(24 * 60 * 60), None),] ) From 80c18a18fc444b89cc12b73599d56e091f3a3c87 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 13:48:23 -0700 Subject: [PATCH 0375/2113] Configure code coverage, refs #841, #843 --- .coveragerc | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .coveragerc diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000..6ca0fac8 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,2 @@ +[run] +omit = datasette/_version.py, datasette/utils/shutil_backport.py From cf7a2bdb404734910ec07abc7571351a2d934828 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 14:36:49 -0700 Subject: [PATCH 0376/2113] Action to run tests and upload coverage to codecov.io Closes #843. --- .github/workflows/test-coverage.yml | 41 +++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 .github/workflows/test-coverage.yml diff --git a/.github/workflows/test-coverage.yml b/.github/workflows/test-coverage.yml new file mode 100644 index 00000000..99c0526a --- /dev/null +++ b/.github/workflows/test-coverage.yml @@ -0,0 +1,41 @@ +name: Calculate test coverage + +on: + push: + branches: + - master + pull_request: + branches: + - master +jobs: + test: + runs-on: ubuntu-latest + steps: + - name: Check out datasette + uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v1 + with: + python-version: 3.8 + - uses: actions/cache@v2 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Install Python dependencies + run: | + python -m pip install -e .[test] + python -m pip install pytest-cov + - name: Run tests + run: |- + ls -lah + cat .coveragerc + pytest --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term + ls -lah + - name: Upload coverage report + uses: codecov/codecov-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} + file: coverage.xml From 0c27f10f9d2124f0f534c25612b58be20441c9d8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 16:41:26 -0700 Subject: [PATCH 0377/2113] Updated plugin examples to include datasette-psutil --- docs/plugins.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index 8add7352..113e6b24 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -884,6 +884,8 @@ The optional view function arguments are as follows: The function can either return a :ref:`internals_response` or it can return nothing and instead respond directly to the request using the ASGI ``send`` function (for advanced uses only). +Examples: `datasette-auth-github `__, `datasette-psutil `__ + .. _plugin_register_facet_classes: register_facet_classes() @@ -993,7 +995,7 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att return add_x_databases_header return wrap_with_databases_header -Examples: `datasette-auth-github `_, `datasette-search-all `_, `datasette-media `_ +Examples: `datasette-search-all `_, `datasette-media `_ .. _plugin_hook_startup: From a4ad5a504c161bc3b1caaa40b22e46d600f7d4fc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 17:26:02 -0700 Subject: [PATCH 0378/2113] Workaround for 'Too many open files' in test runs, refs #846 --- tests/fixtures.py | 3 +++ tests/test_api.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index e2f90f09..a4a96919 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -268,6 +268,9 @@ def make_app_client( "default_page_size": 50, "max_returned_rows": max_returned_rows or 100, "sql_time_limit_ms": sql_time_limit_ms or 200, + # Default is 3 but this results in "too many open files" + # errors when running the full test suite: + "num_sql_threads": 1, } ) ds = Datasette( diff --git a/tests/test_api.py b/tests/test_api.py index 1a54edec..322a0001 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1320,7 +1320,7 @@ def test_config_json(app_client): "suggest_facets": True, "default_cache_ttl": 5, "default_cache_ttl_hashed": 365 * 24 * 60 * 60, - "num_sql_threads": 3, + "num_sql_threads": 1, "cache_size_kb": 0, "allow_csv_stream": True, "max_csv_mb": 100, From d2aef9f7ef30fa20b1450cd181cf803f44fb4e21 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 09:21:15 -0700 Subject: [PATCH 0379/2113] Test illustrating POST against register_routes(), closes #853 --- tests/plugins/my_plugin.py | 7 +++++++ tests/test_plugins.py | 7 +++++++ 2 files changed, 14 insertions(+) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 3f019a84..72736e84 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -162,10 +162,17 @@ def register_routes(): send, {"hello": "world"}, status=200, headers={"x-three": "1"} ) + async def post(request): + if request.method == "GET": + return Response.html(request.scope["csrftoken"]()) + else: + return Response.json(await request.post_vars()) + return [ (r"/one/$", one), (r"/two/(?P.*)$", two), (r"/three/$", three), + (r"/post/$", post), ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index bc759385..e3a234f2 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -568,6 +568,13 @@ def test_register_routes(app_client, path, body): assert body == response.text +def test_register_routes_post(app_client): + response = app_client.post("/post/", {"this is": "post data"}, csrftoken_from=True) + assert 200 == response.status + assert "csrftoken" in response.json + assert "post data" == response.json["this is"] + + def test_register_routes_asgi(app_client): response = app_client.get("/three/") assert {"hello": "world"} == response.json From 6151c25a5a8d566c109af296244b9267c536bd9a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 11:37:28 -0700 Subject: [PATCH 0380/2113] Respect existing scope["actor"] if set, closes #854 --- datasette/app.py | 3 ++- tests/fixtures.py | 1 + tests/plugins/my_plugin.py | 14 ++++++++++++++ tests/test_plugins.py | 5 +++++ 4 files changed, 22 insertions(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index ca2efa91..c684eabc 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -908,6 +908,7 @@ class DatasetteRouter(AsgiRouter): ): scope_modifications["scheme"] = "https" # Handle authentication + default_actor = scope.get("actor") or None actor = None for actor in pm.hook.actor_from_request( datasette=self.ds, request=Request(scope, receive) @@ -918,7 +919,7 @@ class DatasetteRouter(AsgiRouter): actor = await actor if actor: break - scope_modifications["actor"] = actor + scope_modifications["actor"] = actor or default_actor return await super().route_path( dict(scope, **scope_modifications), receive, send, path ) diff --git a/tests/fixtures.py b/tests/fixtures.py index a4a96919..612bee99 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -39,6 +39,7 @@ EXPECTED_PLUGINS = [ "version": None, "hooks": [ "actor_from_request", + "asgi_wrapper", "extra_body_script", "extra_css_urls", "extra_js_urls", diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 72736e84..a86e3cbf 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -137,6 +137,20 @@ def actor_from_request(datasette, request): return None +@hookimpl +def asgi_wrapper(): + def wrap(app): + async def maybe_set_actor_in_scope(scope, recieve, send): + if b"_actor_in_scope" in scope["query_string"]: + scope = dict(scope, actor={"id": "from-scope"}) + print(scope) + await app(scope, recieve, send) + + return maybe_set_actor_in_scope + + return wrap + + @hookimpl def permission_allowed(actor, action): if action == "this_is_allowed": diff --git a/tests/test_plugins.py b/tests/test_plugins.py index e3a234f2..245c60f7 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -534,6 +534,11 @@ def test_actor_from_request_async(app_client): assert {"id": "bot2", "1+1": 2} == app_client.ds._last_request.scope["actor"] +def test_existing_scope_actor_respected(app_client): + app_client.get("/?_actor_in_scope=1") + assert {"id": "from-scope"} == app_client.ds._last_request.scope["actor"] + + @pytest.mark.asyncio @pytest.mark.parametrize( "action,expected", From 13216cb6bd715b3068b917bdeb1f1f24d159c34c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 13:40:33 -0700 Subject: [PATCH 0381/2113] Don't push alpha/beta tagged releases to Docker Hub Refs #807 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 5e328d7a..5aafe398 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,7 +32,7 @@ jobs: branch: master tags: true - stage: publish docker image - if: tag IS present + if: (tag IS present) AND NOT (tag =~ [ab]) python: 3.6 script: # Build and release to Docker Hub From c81f637d862a6b13ac4b07cef5a493b62e079c81 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 13:49:52 -0700 Subject: [PATCH 0382/2113] Documentation for alpha/beta release process, refs #807 --- docs/contributing.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index ba52839c..75c1c3b2 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -147,6 +147,8 @@ We increment ``minor`` for new features. We increment ``patch`` for bugfix releass. +:ref:`contributing_release_alpha_beta` may have an additional ``a0`` or ``b0`` prefix - the integer component will be incremented with each subsequent alpha or beta. + To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__:: # Update changelog @@ -180,3 +182,14 @@ Final steps once the release has deployed to https://pypi.org/project/datasette/ * Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases - you can convert the release notes to Markdown by copying and pasting the rendered HTML into this tool: https://euangoddard.github.io/clipboard2markdown/ * Manually kick off a build of the `stable` branch on Read The Docs: https://readthedocs.org/projects/datasette/builds/ + +.. _contributing_release_alpha_beta: + +Alpha and beta releases +----------------------- + +Alpha and beta releases are published to preview upcoming features that may not yet be stable - in particular to preview new plugin hooks. + +You are welcome to try these out, but please be aware that details may change before the final release. + +Please join `discussions on the issue tracker `__ to share your thoughts and experiences with on alpha and beta features that you try out. From dda932d818b34ccab11730a76554f0a3748d8348 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 13:58:09 -0700 Subject: [PATCH 0383/2113] Release notes for 0.45a0 Refs #834 #846 #854 #807 --- docs/changelog.rst | 12 ++++++++++++ docs/contributing.rst | 4 ++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b1e95bb7..705ba4d4 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,18 @@ Changelog ========= +.. _v0_45 alpha: + +0.45a0 (2020-06-18) +------------------- + +.. warning:: This is an **alpha** release. See :ref:`contributing_alpha_beta`. + +- New :ref:`plugin_hook_startup` plugin hook. (`#834 `__) +- Workaround for "Too many open files" error in test runs. (`#846 `__) +- Respect existing ``scope["actor"]`` if already set by ASGI middleware. (`#854 `__) +- New process for shipping :ref:`contributing_alpha_beta`. (`#807 `__) + .. _v0_44: 0.44 (2020-06-11) diff --git a/docs/contributing.rst b/docs/contributing.rst index 75c1c3b2..03af7644 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -147,7 +147,7 @@ We increment ``minor`` for new features. We increment ``patch`` for bugfix releass. -:ref:`contributing_release_alpha_beta` may have an additional ``a0`` or ``b0`` prefix - the integer component will be incremented with each subsequent alpha or beta. +:ref:`contributing_alpha_beta` may have an additional ``a0`` or ``b0`` prefix - the integer component will be incremented with each subsequent alpha or beta. To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__:: @@ -183,7 +183,7 @@ Final steps once the release has deployed to https://pypi.org/project/datasette/ * Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases - you can convert the release notes to Markdown by copying and pasting the rendered HTML into this tool: https://euangoddard.github.io/clipboard2markdown/ * Manually kick off a build of the `stable` branch on Read The Docs: https://readthedocs.org/projects/datasette/builds/ -.. _contributing_release_alpha_beta: +.. _contributing_alpha_beta: Alpha and beta releases ----------------------- From d2f387591bdda3949162e1802816be6ca1bb777a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 14:01:36 -0700 Subject: [PATCH 0384/2113] Better rST label for alpha release, refs #807 --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 705ba4d4..e117663f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,7 +4,7 @@ Changelog ========= -.. _v0_45 alpha: +.. _v0_45a0: 0.45a0 (2020-06-18) ------------------- From 6c2634583627bfab750c115cb13850252821d637 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 16:22:33 -0700 Subject: [PATCH 0385/2113] New plugin hook: canned_queries(), refs #852 --- datasette/app.py | 26 +++++++---- datasette/default_permissions.py | 75 ++++++++++++++++---------------- datasette/hookspecs.py | 5 +++ datasette/views/database.py | 4 +- datasette/views/table.py | 6 ++- docs/plugins.rst | 67 ++++++++++++++++++++++++++++ tests/fixtures.py | 2 + tests/plugins/my_plugin.py | 9 ++++ tests/plugins/my_plugin_2.py | 14 ++++++ tests/test_canned_write.py | 10 ++++- tests/test_html.py | 2 + tests/test_plugins.py | 31 +++++++++++++ 12 files changed, 202 insertions(+), 49 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index c684eabc..e131ba46 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -387,18 +387,28 @@ class Datasette: ).hexdigest()[:6] return self._app_css_hash - def get_canned_queries(self, database_name): + async def get_canned_queries(self, database_name, actor): queries = self.metadata("queries", database=database_name, fallback=False) or {} - names = queries.keys() - return [self.get_canned_query(database_name, name) for name in names] + for more_queries in pm.hook.canned_queries( + datasette=self, database=database_name, actor=actor, + ): + if callable(more_queries): + more_queries = more_queries() + if asyncio.iscoroutine(more_queries): + more_queries = await more_queries + queries.update(more_queries or {}) + # Fix any {"name": "select ..."} queries to be {"name": {"sql": "select ..."}} + for key in queries: + if not isinstance(queries[key], dict): + queries[key] = {"sql": queries[key]} + # Also make sure "name" is available: + queries[key]["name"] = key + return queries - def get_canned_query(self, database_name, query_name): - queries = self.metadata("queries", database=database_name, fallback=False) or {} + async def get_canned_query(self, database_name, query_name, actor): + queries = await self.get_canned_queries(database_name, actor) query = queries.get(query_name) if query: - if not isinstance(query, dict): - query = {"sql": query} - query["name"] = query_name return query def update_with_inherited_metadata(self, metadata): diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index e750acbf..0929a17a 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -4,41 +4,42 @@ from datasette.utils import actor_matches_allow @hookimpl(tryfirst=True) def permission_allowed(datasette, actor, action, resource): - if action == "permissions-debug": - if actor and actor.get("id") == "root": - return True - elif action == "view-instance": - allow = datasette.metadata("allow") - if allow is not None: + async def inner(): + if action == "permissions-debug": + if actor and actor.get("id") == "root": + return True + elif action == "view-instance": + allow = datasette.metadata("allow") + if allow is not None: + return actor_matches_allow(actor, allow) + elif action == "view-database": + database_allow = datasette.metadata("allow", database=resource) + if database_allow is None: + return True + return actor_matches_allow(actor, database_allow) + elif action == "view-table": + database, table = resource + tables = datasette.metadata("tables", database=database) or {} + table_allow = (tables.get(table) or {}).get("allow") + if table_allow is None: + return True + return actor_matches_allow(actor, table_allow) + elif action == "view-query": + # Check if this query has a "allow" block in metadata + database, query_name = resource + query = await datasette.get_canned_query(database, query_name, actor) + assert query is not None + allow = query.get("allow") + if allow is None: + return True return actor_matches_allow(actor, allow) - elif action == "view-database": - database_allow = datasette.metadata("allow", database=resource) - if database_allow is None: - return True - return actor_matches_allow(actor, database_allow) - elif action == "view-table": - database, table = resource - tables = datasette.metadata("tables", database=database) or {} - table_allow = (tables.get(table) or {}).get("allow") - if table_allow is None: - return True - return actor_matches_allow(actor, table_allow) - elif action == "view-query": - # Check if this query has a "allow" block in metadata - database, query_name = resource - queries_metadata = datasette.metadata("queries", database=database) - assert query_name in queries_metadata - if isinstance(queries_metadata[query_name], str): - return True - allow = queries_metadata[query_name].get("allow") - if allow is None: - return True - return actor_matches_allow(actor, allow) - elif action == "execute-sql": - # Use allow_sql block from database block, or from top-level - database_allow_sql = datasette.metadata("allow_sql", database=resource) - if database_allow_sql is None: - database_allow_sql = datasette.metadata("allow_sql") - if database_allow_sql is None: - return True - return actor_matches_allow(actor, database_allow_sql) + elif action == "execute-sql": + # Use allow_sql block from database block, or from top-level + database_allow_sql = datasette.metadata("allow_sql", database=resource) + if database_allow_sql is None: + database_allow_sql = datasette.metadata("allow_sql") + if database_allow_sql is None: + return True + return actor_matches_allow(actor, database_allow_sql) + + return inner diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 9fceee41..91feb49b 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -78,3 +78,8 @@ def actor_from_request(datasette, request): @hookspec def permission_allowed(datasette, actor, action, resource): "Check if actor is allowed to perfom this action - return True, False or None" + + +@hookspec +def canned_queries(datasette, database, actor): + "Return a dictonary of canned query definitions or an awaitable function that returns them" diff --git a/datasette/views/database.py b/datasette/views/database.py index 4fab2cfb..ad28fb63 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -70,7 +70,9 @@ class DatabaseView(DataView): tables.sort(key=lambda t: (t["hidden"], t["name"])) canned_queries = [] - for query in self.ds.get_canned_queries(database): + for query in ( + await self.ds.get_canned_queries(database, request.actor) + ).values(): visible, private = await check_visibility( self.ds, request.actor, "view-query", (database, query["name"]), ) diff --git a/datasette/views/table.py b/datasette/views/table.py index 91245293..1a55a495 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -223,7 +223,9 @@ class TableView(RowTableShared): async def post(self, request, db_name, table_and_format): # Handle POST to a canned query - canned_query = self.ds.get_canned_query(db_name, table_and_format) + canned_query = await self.ds.get_canned_query( + db_name, table_and_format, request.actor + ) assert canned_query, "You may only POST to a canned query" return await QueryView(self.ds).data( request, @@ -247,7 +249,7 @@ class TableView(RowTableShared): _next=None, _size=None, ): - canned_query = self.ds.get_canned_query(database, table) + canned_query = await self.ds.get_canned_query(database, table, request.actor) if canned_query: return await QueryView(self.ds).data( request, diff --git a/docs/plugins.rst b/docs/plugins.rst index 113e6b24..8444516c 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -1031,6 +1031,73 @@ Potential use-cases: * Create database tables that a plugin needs * Validate the metadata configuration for a plugin on startup, and raise an error if it is invalid +.. _plugin_hook_canned_queries: + +canned_queries(datasette, database, actor) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + +``database`` - string + The name of the database. + +``actor`` - dictionary or None + The currently authenticated :ref:`authentication_actor`. + +Ues this hook to return a dictionary of additional :ref:`canned query ` definitions for the specified database. The return value should be the same shape as the JSON described in the :ref:`canned query ` documentation. + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def canned_queries(datasette, database): + if database == "mydb": + return { + "my_query": { + "sql": "select * from my_table where id > :min_id" + } + } + +The hook can alternatively return an awaitable function that returns a list. Here's an example that returns queries that have been stored in the ``saved_queries`` database table, if one exists: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def canned_queries(datasette, database): + async def inner(): + db = datasette.get_database(database) + if await db.table_exists("saved_queries"): + results = await db.execute("select name, sql from saved_queries") + return {result["name"]: { + "sql": result["sql"] + } for result in results} + return inner + +The actor parameter can be used to include the currently authenticated actor in your decision. Here's an example that returns saved queries that were saved by that actor: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def canned_queries(datasette, database, actor): + async def inner(): + db = datasette.get_database(database) + if actor is not None and await db.table_exists("saved_queries"): + results = await db.execute( + "select name, sql from saved_queries where actor_id = :id", { + "id": actor["id"] + } + ) + return {result["name"]: { + "sql": result["sql"] + } for result in results} + return inner + .. _plugin_hook_actor_from_request: actor_from_request(datasette, request) diff --git a/tests/fixtures.py b/tests/fixtures.py index 612bee99..9b28c283 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -40,6 +40,7 @@ EXPECTED_PLUGINS = [ "hooks": [ "actor_from_request", "asgi_wrapper", + "canned_queries", "extra_body_script", "extra_css_urls", "extra_js_urls", @@ -61,6 +62,7 @@ EXPECTED_PLUGINS = [ "hooks": [ "actor_from_request", "asgi_wrapper", + "canned_queries", "extra_js_urls", "extra_template_vars", "permission_allowed", diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index a86e3cbf..7ed26908 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -193,3 +193,12 @@ def register_routes(): @hookimpl def startup(datasette): datasette._startup_hook_fired = True + + +@hookimpl +def canned_queries(datasette, database, actor): + return { + "from_hook": "select 1, '{}' as actor_id".format( + actor["id"] if actor else "null" + ) + } diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index f4a082a0..556c8090 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -129,3 +129,17 @@ def startup(datasette): datasette._startup_hook_calculation = result.first()[0] return inner + + +@hookimpl +def canned_queries(datasette, database): + async def inner(): + return { + "from_async_hook": "select {}".format( + ( + await datasette.get_database(database).execute("select 1 + 1") + ).first()[0] + ) + } + + return inner diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index 4257806e..c36baa09 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -111,7 +111,13 @@ def test_canned_query_permissions_on_database_page(canned_write_client): query_names = [ q["name"] for q in canned_write_client.get("/data.json").json["queries"] ] - assert ["add_name", "add_name_specify_id", "update_name"] == query_names + assert [ + "add_name", + "add_name_specify_id", + "update_name", + "from_async_hook", + "from_hook", + ] == query_names # With auth shows four response = canned_write_client.get( @@ -124,6 +130,8 @@ def test_canned_query_permissions_on_database_page(canned_write_client): {"name": "add_name_specify_id", "private": False}, {"name": "delete_name", "private": True}, {"name": "update_name", "private": False}, + {"name": "from_async_hook", "private": False}, + {"name": "from_hook", "private": False}, ] == [ {"name": q["name"], "private": q["private"]} for q in response.json["queries"] ] diff --git a/tests/test_html.py b/tests/test_html.py index f9b18daa..7bc935b0 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -97,6 +97,8 @@ def test_database_page(app_client): ), ("/fixtures/pragma_cache_size", "pragma_cache_size"), ("/fixtures/neighborhood_search#fragment-goes-here", "Search neighborhoods"), + ("/fixtures/from_async_hook", "from_async_hook"), + ("/fixtures/from_hook", "from_hook"), ] == [(a["href"], a.text) for a in queries_ul.find_all("a")] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 245c60f7..4f44430e 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -591,3 +591,34 @@ async def test_startup(app_client): await app_client.ds.invoke_startup() assert app_client.ds._startup_hook_fired assert 2 == app_client.ds._startup_hook_calculation + + +def test_canned_queries(app_client): + queries = app_client.get("/fixtures.json").json["queries"] + queries_by_name = {q["name"]: q for q in queries} + assert { + "sql": "select 2", + "name": "from_async_hook", + "private": False, + } == queries_by_name["from_async_hook"] + assert { + "sql": "select 1, 'null' as actor_id", + "name": "from_hook", + "private": False, + } == queries_by_name["from_hook"] + + +def test_canned_queries_non_async(app_client): + response = app_client.get("/fixtures/from_hook.json?_shape=array") + assert [{"1": 1, "actor_id": "null"}] == response.json + + +def test_canned_queries_async(app_client): + response = app_client.get("/fixtures/from_async_hook.json?_shape=array") + assert [{"2": 2}] == response.json + + +def test_canned_queries_actor(app_client): + assert [{"1": 1, "actor_id": "bot"}] == app_client.get( + "/fixtures/from_hook.json?_bot=1&_shape=array" + ).json From 9216127ace8d80493f743a4ef4c469f83a3b81ce Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 16:39:43 -0700 Subject: [PATCH 0386/2113] Documentation tweak, refs #852 --- docs/plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index 8444516c..dce1bdf0 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -1043,7 +1043,7 @@ canned_queries(datasette, database, actor) The name of the database. ``actor`` - dictionary or None - The currently authenticated :ref:`authentication_actor`. + The currently authenticated :ref:`actor `. Ues this hook to return a dictionary of additional :ref:`canned query ` definitions for the specified database. The return value should be the same shape as the JSON described in the :ref:`canned query ` documentation. From 0807c4200f6b31c804c476eb546ead3f875a2ecc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 16:40:45 -0700 Subject: [PATCH 0387/2113] Release notes for 0.45a1, refs #852 --- docs/changelog.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index e117663f..6f3af8ce 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,14 +4,15 @@ Changelog ========= -.. _v0_45a0: +.. _v0_45a1: -0.45a0 (2020-06-18) +0.45a1 (2020-06-18) ------------------- .. warning:: This is an **alpha** release. See :ref:`contributing_alpha_beta`. - New :ref:`plugin_hook_startup` plugin hook. (`#834 `__) +- New :ref:`plugin_hook_canned_queries` plugin hook. (`#852 `__) - Workaround for "Too many open files" error in test runs. (`#846 `__) - Respect existing ``scope["actor"]`` if already set by ASGI middleware. (`#854 `__) - New process for shipping :ref:`contributing_alpha_beta`. (`#807 `__) From b59b92b1b0517cf18fa748ff9d0a0bf86298dd43 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 16:52:06 -0700 Subject: [PATCH 0388/2113] Fix for tests - order was inconsistent, refs #852 --- tests/test_canned_write.py | 20 ++++++++++++-------- tests/test_html.py | 8 +++++--- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index c36baa09..e33eed69 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -108,16 +108,16 @@ def test_vary_header(canned_write_client): def test_canned_query_permissions_on_database_page(canned_write_client): # Without auth only shows three queries - query_names = [ + query_names = { q["name"] for q in canned_write_client.get("/data.json").json["queries"] - ] - assert [ + } + assert { "add_name", "add_name_specify_id", "update_name", "from_async_hook", "from_hook", - ] == query_names + } == query_names # With auth shows four response = canned_write_client.get( @@ -129,12 +129,16 @@ def test_canned_query_permissions_on_database_page(canned_write_client): {"name": "add_name", "private": False}, {"name": "add_name_specify_id", "private": False}, {"name": "delete_name", "private": True}, - {"name": "update_name", "private": False}, {"name": "from_async_hook", "private": False}, {"name": "from_hook", "private": False}, - ] == [ - {"name": q["name"], "private": q["private"]} for q in response.json["queries"] - ] + {"name": "update_name", "private": False}, + ] == sorted( + [ + {"name": q["name"], "private": q["private"]} + for q in response.json["queries"] + ], + key=lambda q: q["name"], + ) def test_canned_query_permissions(canned_write_client): diff --git a/tests/test_html.py b/tests/test_html.py index 7bc935b0..1c7dce90 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -95,11 +95,13 @@ def test_database_page(app_client): "/fixtures/%F0%9D%90%9C%F0%9D%90%A2%F0%9D%90%AD%F0%9D%90%A2%F0%9D%90%9E%F0%9D%90%AC", "𝐜𝐢𝐭𝐢𝐞𝐬", ), - ("/fixtures/pragma_cache_size", "pragma_cache_size"), - ("/fixtures/neighborhood_search#fragment-goes-here", "Search neighborhoods"), ("/fixtures/from_async_hook", "from_async_hook"), ("/fixtures/from_hook", "from_hook"), - ] == [(a["href"], a.text) for a in queries_ul.find_all("a")] + ("/fixtures/neighborhood_search#fragment-goes-here", "Search neighborhoods"), + ("/fixtures/pragma_cache_size", "pragma_cache_size"), + ] == sorted( + [(a["href"], a.text) for a in queries_ul.find_all("a")], key=lambda p: p[0] + ) def test_invalid_custom_sql(app_client): From 64cc536b89b988b17e3ab853e4c64d9706543116 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 17:03:23 -0700 Subject: [PATCH 0389/2113] Don't include prereleases in changelog badge --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 925d68d2..42eaaa81 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Datasette [![PyPI](https://img.shields.io/pypi/v/datasette.svg)](https://pypi.org/project/datasette/) -[![Changelog](https://img.shields.io/github/v/release/simonw/datasette?include_prereleases&label=changelog)](https://datasette.readthedocs.io/en/stable/changelog.html) +[![Changelog](https://img.shields.io/github/v/release/simonw/datasette?label=changelog)](https://datasette.readthedocs.io/en/stable/changelog.html) [![Python 3.x](https://img.shields.io/pypi/pyversions/datasette.svg?logo=python&logoColor=white)](https://pypi.org/project/datasette/) [![Travis CI](https://travis-ci.org/simonw/datasette.svg?branch=master)](https://travis-ci.org/simonw/datasette) [![Documentation Status](https://readthedocs.org/projects/datasette/badge/?version=latest)](http://datasette.readthedocs.io/en/latest/?badge=latest) From 55a6ffb93c57680e71a070416baae1129a0243b8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 19 Jun 2020 20:08:30 -0700 Subject: [PATCH 0390/2113] Link to datasette-saved-queries plugin, closes #852 --- docs/changelog.rst | 2 +- docs/ecosystem.rst | 6 +++++- docs/plugins.rst | 6 +++++- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6f3af8ce..d580f03e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -12,7 +12,7 @@ Changelog .. warning:: This is an **alpha** release. See :ref:`contributing_alpha_beta`. - New :ref:`plugin_hook_startup` plugin hook. (`#834 `__) -- New :ref:`plugin_hook_canned_queries` plugin hook. (`#852 `__) +- New :ref:`plugin_hook_canned_queries` plugin hook. See `datasette-saved-queries `__ for an example of this hook in action. (`#852 `__) - Workaround for "Too many open files" error in test runs. (`#846 `__) - Respect existing ``scope["actor"]`` if already set by ASGI middleware. (`#854 `__) - New process for shipping :ref:`contributing_alpha_beta`. (`#807 `__) diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst index dcb5a887..f2da885c 100644 --- a/docs/ecosystem.rst +++ b/docs/ecosystem.rst @@ -157,12 +157,16 @@ datasette-leaflet-geojson `datasette-leaflet-geojson `__ looks out for columns containing GeoJSON formatted geographical information and displays them on a `Leaflet-powered `__ map. - datasette-pretty-json --------------------- `datasette-pretty-json `__ seeks out JSON values in Datasette's table browsing interface and pretty-prints them, making them easier to read. +datasette-saved-queries +----------------------- + +`datasette-saved-queries `__ lets users interactively save queries to a ``saved_queries`` table. They are then made available as additional :ref:`canned queries `. + datasette-haversine ------------------- diff --git a/docs/plugins.rst b/docs/plugins.rst index dce1bdf0..d2743419 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -1028,9 +1028,11 @@ Or you can return an async function which will be awaited on startup. Use this o Potential use-cases: * Run some initialization code for the plugin -* Create database tables that a plugin needs +* Create database tables that a plugin needs on startup * Validate the metadata configuration for a plugin on startup, and raise an error if it is invalid +Example: `datasette-saved-queries `__ + .. _plugin_hook_canned_queries: canned_queries(datasette, database, actor) @@ -1098,6 +1100,8 @@ The actor parameter can be used to include the currently authenticated actor in } for result in results} return inner +Example: `datasette-saved-queries `__ + .. _plugin_hook_actor_from_request: actor_from_request(datasette, request) From d1640ba76b8f10830c56d8289f476fefde3bd1fb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Jun 2020 08:48:39 -0700 Subject: [PATCH 0391/2113] Don't show prereleases on changelog badge --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 5334386f..fa5d7f87 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -6,7 +6,7 @@ datasette| .. |PyPI| image:: https://img.shields.io/pypi/v/datasette.svg :target: https://pypi.org/project/datasette/ -.. |Changelog| image:: https://img.shields.io/github/v/release/simonw/datasette?include_prereleases&label=changelog +.. |Changelog| image:: https://img.shields.io/github/v/release/simonw/datasette?label=changelog :target: https://datasette.readthedocs.io/en/stable/changelog.html .. |Python 3.x| image:: https://img.shields.io/pypi/pyversions/datasette.svg?logo=python&logoColor=white :target: https://pypi.org/project/datasette/ From 84cbf1766083a785f5ce5154d0805654a5314d10 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Jun 2020 10:40:05 -0700 Subject: [PATCH 0392/2113] News: A cookiecutter template for writing Datasette plugins --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 42eaaa81..84d1dcd4 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover ## News + * 20th June 2020: [A cookiecutter template for writing Datasette plugins](https://simonwillison.net/2020/Jun/20/cookiecutter-plugins/) * 11th June 2020: [Datasette 0.44](http://datasette.readthedocs.io/en/latest/changelog.html#v0-44) - [Authentication and permissions](https://datasette.readthedocs.io/en/latest/authentication.html), [writable canned queries](https://datasette.readthedocs.io/en/latest/sql_queries.html#writable-canned-queries), flash messages, new plugin hooks and much, much more. * 28th May 2020: [Datasette 0.43](http://datasette.readthedocs.io/en/latest/changelog.html#v0-43) - Redesigned [register_output_renderer](https://datasette.readthedocs.io/en/latest/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes. * 8th May 2020: [Datasette 0.42](http://datasette.readthedocs.io/en/latest/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database. From e4216ff5035f57f2fb66031f105e41c3b9728bc1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 21 Jun 2020 14:55:17 -0700 Subject: [PATCH 0393/2113] Fixed rST warning --- docs/ecosystem.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst index f2da885c..7c8959dd 100644 --- a/docs/ecosystem.rst +++ b/docs/ecosystem.rst @@ -93,7 +93,7 @@ datasette-auth-tokens `datasette-auth-tokens `__ provides a mechanism for creating secret API tokens that can then be used with Datasette's :ref:`authentication` system. datasette-permissions-sql ---------------------- +------------------------- `datasette-permissions-sql `__ lets you configure Datasette permissions checks to use custom SQL queries, which means you can make permisison decisions based on data contained within your databases. From 36e77e100632573e1cf907aba9462debac7928e9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 21 Jun 2020 17:33:48 -0700 Subject: [PATCH 0394/2113] Move plugin hooks docs to plugin_hooks.rst, refs #687 --- docs/index.rst | 1 + docs/plugin_hooks.rst | 888 +++++++++++++++++++++++++++++++++++++++++ docs/plugins.rst | 889 ------------------------------------------ 3 files changed, 889 insertions(+), 889 deletions(-) create mode 100644 docs/plugin_hooks.rst diff --git a/docs/index.rst b/docs/index.rst index fa5d7f87..20a55b2c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -51,6 +51,7 @@ Contents introspection custom_templates plugins + plugin_hooks internals contributing changelog diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst new file mode 100644 index 00000000..19f076b9 --- /dev/null +++ b/docs/plugin_hooks.rst @@ -0,0 +1,888 @@ +.. _plugin_hooks: + +Plugin hooks +============ + +When you implement a plugin hook you can accept any or all of the parameters that are documented as being passed to that hook. For example, you can implement a ``render_cell`` plugin hook like this even though the hook definition defines more parameters than just ``value`` and ``column``: + +.. code-block:: python + + @hookimpl + def render_cell(value, column): + if column == "stars": + return "*" * int(value) + +The full list of available plugin hooks is as follows. + +.. _plugin_hook_prepare_connection: + +prepare_connection(conn, database, datasette) +--------------------------------------------- + +``conn`` - sqlite3 connection object + The connection that is being opened + +``database`` - string + The name of the database + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` + +This hook is called when a new SQLite database connection is created. You can +use it to `register custom SQL functions `_, +aggregates and collations. For example: + +.. code-block:: python + + from datasette import hookimpl + import random + + @hookimpl + def prepare_connection(conn): + conn.create_function('random_integer', 2, random.randint) + +This registers a SQL function called ``random_integer`` which takes two +arguments and can be called like this:: + + select random_integer(1, 10); + +Examples: `datasette-jellyfish `_, `datasette-jq `_, `datasette-haversine `__, `datasette-rure `__ + +.. _plugin_hook_prepare_jinja2_environment: + +prepare_jinja2_environment(env) +------------------------------- + +``env`` - jinja2 Environment + The template environment that is being prepared + +This hook is called with the Jinja2 environment that is used to evaluate +Datasette HTML templates. You can use it to do things like `register custom +template filters `_, for +example: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def prepare_jinja2_environment(env): + env.filters['uppercase'] = lambda u: u.upper() + +You can now use this filter in your custom templates like so:: + + Table name: {{ table|uppercase }} + +.. _plugin_hook_extra_css_urls: + +extra_css_urls(template, database, table, datasette) +---------------------------------------------------- + +``template`` - string + The template that is being rendered, e.g. ``database.html`` + +``database`` - string or None + The name of the database + +``table`` - string or None + The name of the table + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` + +Return a list of extra CSS URLs that should be included on the page. These can +take advantage of the CSS class hooks described in :ref:`customization`. + +This can be a list of URLs: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def extra_css_urls(): + return [ + 'https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css' + ] + +Or a list of dictionaries defining both a URL and an +`SRI hash `_: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def extra_css_urls(): + return [{ + 'url': 'https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css', + 'sri': 'sha384-9gVQ4dYFwwWSjIDZnLEWnxCjeSWFphJiwGPXr1jddIhOegiu1FwO5qRGvFXOdJZ4', + }] + +Examples: `datasette-cluster-map `_, `datasette-vega `_ + +.. _plugin_hook_extra_js_urls: + +extra_js_urls(template, database, table, datasette) +--------------------------------------------------- + +Same arguments as ``extra_css_urls``. + +This works in the same way as ``extra_css_urls()`` but for JavaScript. You can +return either a list of URLs or a list of dictionaries: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def extra_js_urls(): + return [{ + 'url': 'https://code.jquery.com/jquery-3.3.1.slim.min.js', + 'sri': 'sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo', + }] + +You can also return URLs to files from your plugin's ``static/`` directory, if +you have one: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def extra_js_urls(): + return [ + '/-/static-plugins/your-plugin/app.js' + ] + +Examples: `datasette-cluster-map `_, `datasette-vega `_ + +.. _plugin_hook_publish_subcommand: + +publish_subcommand(publish) +--------------------------- + +``publish`` - Click publish command group + The Click command group for the ``datasette publish`` subcommand + +This hook allows you to create new providers for the ``datasette publish`` +command. Datasette uses this hook internally to implement the default ``now`` +and ``heroku`` subcommands, so you can read +`their source `_ +to see examples of this hook in action. + +Let's say you want to build a plugin that adds a ``datasette publish my_hosting_provider --api_key=xxx mydatabase.db`` publish command. Your implementation would start like this: + +.. code-block:: python + + from datasette import hookimpl + from datasette.publish.common import add_common_publish_arguments_and_options + import click + + + @hookimpl + def publish_subcommand(publish): + @publish.command() + @add_common_publish_arguments_and_options + @click.option( + "-k", + "--api_key", + help="API key for talking to my hosting provider", + ) + def my_hosting_provider( + files, + metadata, + extra_options, + branch, + template_dir, + plugins_dir, + static, + install, + plugin_secret, + version_note, + secret, + title, + license, + license_url, + source, + source_url, + about, + about_url, + api_key, + ): + # Your implementation goes here + +Examples: `datasette-publish-fly `_, `datasette-publish-now `_ + +.. _plugin_hook_render_cell: + +render_cell(value, column, table, database, datasette) +------------------------------------------------------ + +Lets you customize the display of values within table cells in the HTML table view. + +``value`` - string, integer or None + The value that was loaded from the database + +``column`` - string + The name of the column being rendered + +``table`` - string or None + The name of the table - or ``None`` if this is a custom SQL query + +``database`` - string + The name of the database + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` + +If your hook returns ``None``, it will be ignored. Use this to indicate that your hook is not able to custom render this particular value. + +If the hook returns a string, that string will be rendered in the table cell. + +If you want to return HTML markup you can do so by returning a ``jinja2.Markup`` object. + +Datasette will loop through all available ``render_cell`` hooks and display the value returned by the first one that does not return ``None``. + +Here is an example of a custom ``render_cell()`` plugin which looks for values that are a JSON string matching the following format:: + + {"href": "https://www.example.com/", "label": "Name"} + +If the value matches that pattern, the plugin returns an HTML link element: + +.. code-block:: python + + from datasette import hookimpl + import jinja2 + import json + + + @hookimpl + def render_cell(value): + # Render {"href": "...", "label": "..."} as link + if not isinstance(value, str): + return None + stripped = value.strip() + if not stripped.startswith("{") and stripped.endswith("}"): + return None + try: + data = json.loads(value) + except ValueError: + return None + if not isinstance(data, dict): + return None + if set(data.keys()) != {"href", "label"}: + return None + href = data["href"] + if not ( + href.startswith("/") or href.startswith("http://") + or href.startswith("https://") + ): + return None + return jinja2.Markup('{label}'.format( + href=jinja2.escape(data["href"]), + label=jinja2.escape(data["label"] or "") or " " + )) + +Examples: `datasette-render-binary `_, `datasette-render-markdown `_ + +.. _plugin_hook_extra_body_script: + +extra_body_script(template, database, table, view_name, datasette) +------------------------------------------------------------------ + +Extra JavaScript to be added to a ``") json_data = r.search(app_client.get(path).text).group(1) actual_data = json.loads(json_data) assert expected_extra_body_script == actual_data -def test_plugins_asgi_wrapper(app_client): +def test_hook_asgi_wrapper(app_client): response = app_client.get("/fixtures") assert "fixtures" == response.headers["x-databases"] -def test_plugins_extra_template_vars(restore_working_directory): +def test_hook_extra_template_vars(restore_working_directory): with make_app_client( template_dir=str(pathlib.Path(__file__).parent / "test_templates") ) as client: @@ -380,13 +380,13 @@ def test_view_names(view_names_client, path, view_name): assert "view_name:{}".format(view_name) == response.text -def test_register_output_renderer_no_parameters(app_client): +def test_hook_register_output_renderer_no_parameters(app_client): response = app_client.get("/fixtures/facetable.testnone") assert 200 == response.status assert b"Hello" == response.body -def test_register_output_renderer_all_parameters(app_client): +def test_hook_register_output_renderer_all_parameters(app_client): response = app_client.get("/fixtures/facetable.testall") assert 200 == response.status # Lots of 'at 0x103a4a690' in here - replace those so we can do @@ -436,19 +436,19 @@ def test_register_output_renderer_all_parameters(app_client): assert "pragma_cache_size" == json.loads(query_response.body)["query_name"] -def test_register_output_renderer_custom_status_code(app_client): +def test_hook_register_output_renderer_custom_status_code(app_client): response = app_client.get("/fixtures/pragma_cache_size.testall?status_code=202") assert 202 == response.status -def test_register_output_renderer_custom_content_type(app_client): +def test_hook_register_output_renderer_custom_content_type(app_client): response = app_client.get( "/fixtures/pragma_cache_size.testall?content_type=text/blah" ) assert "text/blah" == response.headers["content-type"] -def test_register_output_renderer_custom_headers(app_client): +def test_hook_register_output_renderer_custom_headers(app_client): response = app_client.get( "/fixtures/pragma_cache_size.testall?header=x-wow:1&header=x-gosh:2" ) @@ -456,7 +456,7 @@ def test_register_output_renderer_custom_headers(app_client): assert "2" == response.headers["x-gosh"] -def test_register_output_renderer_can_render(app_client): +def test_hook_register_output_renderer_can_render(app_client): response = app_client.get("/fixtures/facetable?_no_can_render=1") assert response.status == 200 links = ( @@ -492,7 +492,7 @@ def test_register_output_renderer_can_render(app_client): @pytest.mark.asyncio -async def test_prepare_jinja2_environment(app_client): +async def test_hook_prepare_jinja2_environment(app_client): template = app_client.ds.jinja_env.from_string( "Hello there, {{ a|format_numeric }}", {"a": 3412341} ) @@ -500,7 +500,7 @@ async def test_prepare_jinja2_environment(app_client): assert "Hello there, 3,412,341" == rendered -def test_publish_subcommand(): +def test_hook_publish_subcommand(): # This is hard to test properly, because publish subcommand plugins # cannot be loaded using the --plugins-dir mechanism - they need # to be installed using "pip install". So I'm cheating and taking @@ -509,7 +509,7 @@ def test_publish_subcommand(): assert ["cloudrun", "heroku"] == cli.publish.list_commands({}) -def test_register_facet_classes(app_client): +def test_hook_register_facet_classes(app_client): response = app_client.get( "/fixtures/compound_three_primary_keys.json?_dummy_facet=1" ) @@ -549,7 +549,7 @@ def test_register_facet_classes(app_client): ] == response.json["suggested_facets"] -def test_actor_from_request(app_client): +def test_hook_actor_from_request(app_client): app_client.get("/") # Should have no actor assert None == app_client.ds._last_request.scope["actor"] @@ -558,7 +558,7 @@ def test_actor_from_request(app_client): assert {"id": "bot"} == app_client.ds._last_request.scope["actor"] -def test_actor_from_request_async(app_client): +def test_hook_actor_from_request_async(app_client): app_client.get("/") # Should have no actor assert None == app_client.ds._last_request.scope["actor"] @@ -583,7 +583,7 @@ def test_existing_scope_actor_respected(app_client): ("no_match", None), ], ) -async def test_permission_allowed(app_client, action, expected): +async def test_hook_permission_allowed(app_client, action, expected): actual = await app_client.ds.permission_allowed( {"id": "actor"}, action, default=None ) @@ -605,20 +605,20 @@ def test_actor_json(app_client): ("/not-async/", "This was not async"), ], ) -def test_register_routes(app_client, path, body): +def test_hook_register_routes(app_client, path, body): response = app_client.get(path) assert 200 == response.status assert body == response.text -def test_register_routes_post(app_client): +def test_hook_register_routes_post(app_client): response = app_client.post("/post/", {"this is": "post data"}, csrftoken_from=True) assert 200 == response.status assert "csrftoken" in response.json assert "post data" == response.json["this is"] -def test_register_routes_csrftoken(restore_working_directory, tmpdir_factory): +def test_hook_register_routes_csrftoken(restore_working_directory, tmpdir_factory): templates = tmpdir_factory.mktemp("templates") (templates / "csrftoken_form.html").write_text( "CSRFTOKEN: {{ csrftoken() }}", "utf-8" @@ -629,13 +629,13 @@ def test_register_routes_csrftoken(restore_working_directory, tmpdir_factory): assert "CSRFTOKEN: {}".format(expected_token) == response.text -def test_register_routes_asgi(app_client): +def test_hook_register_routes_asgi(app_client): response = app_client.get("/three/") assert {"hello": "world"} == response.json assert "1" == response.headers["x-three"] -def test_register_routes_add_message(app_client): +def test_hook_register_routes_add_message(app_client): response = app_client.get("/add-message/") assert 200 == response.status assert "Added message" == response.text @@ -643,7 +643,7 @@ def test_register_routes_add_message(app_client): assert [["Hello from messages", 1]] == decoded -def test_register_routes_render_message(restore_working_directory, tmpdir_factory): +def test_hook_register_routes_render_message(restore_working_directory, tmpdir_factory): templates = tmpdir_factory.mktemp("templates") (templates / "render_message.html").write_text('{% extends "base.html" %}', "utf-8") with make_app_client(template_dir=templates) as client: @@ -654,13 +654,13 @@ def test_register_routes_render_message(restore_working_directory, tmpdir_factor @pytest.mark.asyncio -async def test_startup(app_client): +async def test_hook_startup(app_client): await app_client.ds.invoke_startup() assert app_client.ds._startup_hook_fired assert 2 == app_client.ds._startup_hook_calculation -def test_canned_queries(app_client): +def test_hook_canned_queries(app_client): queries = app_client.get("/fixtures.json").json["queries"] queries_by_name = {q["name"]: q for q in queries} assert { @@ -675,23 +675,23 @@ def test_canned_queries(app_client): } == queries_by_name["from_hook"] -def test_canned_queries_non_async(app_client): +def test_hook_canned_queries_non_async(app_client): response = app_client.get("/fixtures/from_hook.json?_shape=array") assert [{"1": 1, "actor_id": "null"}] == response.json -def test_canned_queries_async(app_client): +def test_hook_canned_queries_async(app_client): response = app_client.get("/fixtures/from_async_hook.json?_shape=array") assert [{"2": 2}] == response.json -def test_canned_queries_actor(app_client): +def test_hook_canned_queries_actor(app_client): assert [{"1": 1, "actor_id": "bot"}] == app_client.get( "/fixtures/from_hook.json?_bot=1&_shape=array" ).json -def test_register_magic_parameters(restore_working_directory): +def test_hook_register_magic_parameters(restore_working_directory): with make_app_client( extra_databases={"data.db": "create table logs (line text)"}, metadata={ @@ -719,7 +719,7 @@ def test_register_magic_parameters(restore_working_directory): assert 4 == new_uuid.count("-") -def test_forbidden(restore_working_directory): +def test_hook_forbidden(restore_working_directory): with make_app_client( extra_databases={"data2.db": "create table logs (line text)"}, metadata={"allow": {}}, From 3a4c8ed36aa97211e46849d32a09f2f386f342dd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 16 Aug 2020 11:09:53 -0700 Subject: [PATCH 0505/2113] Added columns argument to various extra_ plugin hooks, closes #938 --- datasette/app.py | 5 +- datasette/hookspecs.py | 12 +- docs/plugin_hooks.rst | 254 +++++++++++++++++-------------------- tests/plugins/my_plugin.py | 13 +- tests/test_plugins.py | 25 +++- 5 files changed, 159 insertions(+), 150 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 180ba246..2185a3ab 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -713,6 +713,7 @@ class Datasette: template=template.name, database=context.get("database"), table=context.get("table"), + columns=context.get("columns"), view_name=view_name, request=request, datasette=self, @@ -729,6 +730,7 @@ class Datasette: template=template.name, database=context.get("database"), table=context.get("table"), + columns=context.get("columns"), view_name=view_name, request=request, datasette=self, @@ -779,9 +781,10 @@ class Datasette: template=template.name, database=context.get("database"), table=context.get("table"), - datasette=self, + columns=context.get("columns"), view_name=view_name, request=request, + datasette=self, ): if callable(hook): hook = hook() diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 0e9c20cf..f7e90e4e 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -26,22 +26,26 @@ def prepare_jinja2_environment(env): @hookspec -def extra_css_urls(template, database, table, view_name, request, datasette): +def extra_css_urls(template, database, table, columns, view_name, request, datasette): "Extra CSS URLs added by this plugin" @hookspec -def extra_js_urls(template, database, table, view_name, request, datasette): +def extra_js_urls(template, database, table, columns, view_name, request, datasette): "Extra JavaScript URLs added by this plugin" @hookspec -def extra_body_script(template, database, table, view_name, request, datasette): +def extra_body_script( + template, database, table, columns, view_name, request, datasette +): "Extra JavaScript code to be included in ") json_data = r.search(app_client.get(path).text).group(1) actual_data = json.loads(json_data) @@ -286,6 +308,7 @@ def test_hook_extra_template_vars(restore_working_directory): assert { "template": "show_json.html", "scope_path": "/-/metadata", + "columns": None, } == extra_template_vars extra_template_vars_from_awaitable = json.loads( Soup(response.body, "html.parser") From 8e7e6458a6787a06a4488798bd643dd7728b8a5b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 16 Aug 2020 11:24:39 -0700 Subject: [PATCH 0506/2113] Fix bug with ?_nl=on and binary data, closes #914 --- datasette/renderer.py | 2 +- tests/fixtures.py | 3 ++- tests/test_api.py | 31 ++++++++++++++++++++++++++++++- tests/test_html.py | 9 +++++++-- 4 files changed, 40 insertions(+), 5 deletions(-) diff --git a/datasette/renderer.py b/datasette/renderer.py index 3f921fe7..27a5092f 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -84,7 +84,7 @@ def json_renderer(args, data, view_name): # Handle _nl option for _shape=array nl = args.get("_nl", "") if nl and shape == "array": - body = "\n".join(json.dumps(item) for item in data) + body = "\n".join(json.dumps(item, cls=CustomJSONEncoder) for item in data) content_type = "text/plain" else: body = json.dumps(data, cls=CustomJSONEncoder) diff --git a/tests/fixtures.py b/tests/fixtures.py index 139eff83..5bd063d9 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -663,7 +663,8 @@ CREATE VIEW searchable_view_configured_by_metadata AS ) ) TABLE_PARAMETERIZED_SQL = [ - ("insert into binary_data (data) values (?);", [b"this is binary data"]) + ("insert into binary_data (data) values (?);", [b"\x15\x1c\x02\xc7\xad\x05\xfe"]), + ("insert into binary_data (data) values (?);", [b"\x15\x1c\x03\xc7\xad\x05\xfe"]), ] EXTRA_DATABASE_SQL = """ diff --git a/tests/test_api.py b/tests/test_api.py index 1f93c1a7..22fa87d4 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -105,7 +105,7 @@ def test_database_page(app_client): "name": "binary_data", "columns": ["data"], "primary_keys": [], - "count": 1, + "count": 2, "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, @@ -1793,3 +1793,32 @@ def test_null_foreign_keys_are_not_expanded(app_client): def test_inspect_file_used_for_count(app_client_immutable_and_inspect_file): response = app_client_immutable_and_inspect_file.get("/fixtures/sortable.json") assert response.json["filtered_table_rows_count"] == 100 + + +@pytest.mark.parametrize( + "path,expected_json,expected_text", + [ + ( + "/fixtures/binary_data.json?_shape=array", + [ + {"rowid": 1, "data": {"$base64": True, "encoded": "FRwCx60F/g=="}}, + {"rowid": 2, "data": {"$base64": True, "encoded": "FRwDx60F/g=="}}, + ], + None, + ), + ( + "/fixtures/binary_data.json?_shape=array&_nl=on", + None, + ( + '{"rowid": 1, "data": {"$base64": true, "encoded": "FRwCx60F/g=="}}\n' + '{"rowid": 2, "data": {"$base64": true, "encoded": "FRwDx60F/g=="}}' + ), + ), + ], +) +def test_binary_data_in_json(app_client, path, expected_json, expected_text): + response = app_client.get(path) + if expected_json: + assert response.json == expected_json + else: + assert response.text == expected_text diff --git a/tests/test_html.py b/tests/test_html.py index 89aa4d06..1a12b3ce 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1134,8 +1134,13 @@ def test_binary_data_display(app_client): [ '1', '1', - '<Binary\xa0data:\xa019\xa0bytes>', - ] + '<Binary\xa0data:\xa07\xa0bytes>', + ], + [ + '2', + '2', + '<Binary\xa0data:\xa07\xa0bytes>', + ], ] assert expected_tds == [ [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") From 52eabb019d4051084b21524bd0fd9c2731126985 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 16 Aug 2020 11:56:31 -0700 Subject: [PATCH 0507/2113] Release 0.48 Refs #939, #938, #935, #914 --- README.md | 1 + docs/changelog.rst | 12 ++++++++++++ docs/internals.rst | 2 ++ 3 files changed, 15 insertions(+) diff --git a/README.md b/README.md index 9b49cc14..ee3246a5 100644 --- a/README.md +++ b/README.md @@ -23,6 +23,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover ## News + * 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. * 11th August 2020: [Datasette 0.47](https://docs.datasette.io/en/stable/changelog.html#v0-47) - Datasette can now be installed using Homebrew! `brew install simonw/datasette/datasette`. Also new: `datasette install name-of-plugin` and `datasette uninstall name-of-plugin` commands, and `datasette --get '/-/versions.json'` to output the result of Datasette HTTP calls on the command-line. * 9th August 2020: [Datasette 0.46](https://docs.datasette.io/en/stable/changelog.html#v0-46) - security fix relating to CSRF protection for writable canned queries, a new logo, new debugging tools, improved file downloads and more. * 6th August 2020: [GraphQL in Datasette with the new datasette-graphql plugin](https://simonwillison.net/2020/Aug/7/datasette-graphql/) diff --git a/docs/changelog.rst b/docs/changelog.rst index bf53b6f3..d18dae80 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,18 @@ Changelog ========= +.. _v0_48: + +0.48 (2020-08-16) +----------------- + +- Datasette documentation now lives at `docs.datasette.io `__. +- ``db.is_mutable`` property is now documented and tested, see :ref:`internals_database_introspection`. +- The ``extra_template_vars``, ``extra_css_urls``, ``extra_js_urls`` and ``extra_body_script`` plugin hooks now all accept the same arguments. See :ref:`plugin_hook_extra_template_vars` for details. (`#939 `__) +- Those hooks now accept a new ``columns`` argument detailing the table columns that will be rendered on that page. (`#938 `__) +- Fixed bug where plugins calling ``db.execute_write_fn()`` could hang Datasette if the connection failed. (`#935 `__) +- Fixed bug with the ``?_nl=on`` output option and binary data. (`#914 `__) + .. _v0_47_3: 0.47.3 (2020-08-15) diff --git a/docs/internals.rst b/docs/internals.rst index f8d4a136..ff7e883c 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -466,6 +466,8 @@ Here's an example of ``block=True`` in action: except Exception as e: print("An error occurred:", e) +.. _internals_database_introspection: + Database introspection ---------------------- From 5e0b72247ecab4ce0fcec599b77a83d73a480872 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 17 Aug 2020 22:09:34 -0700 Subject: [PATCH 0508/2113] Run CI on GitHub Actions, not Travis * Run CI on GitHub Actions, not Travis - refs #940 * Update documentation refs to Travis * Release action now runs parallel tests, then pushes to PyPI, then Docker Hub --- .dockerignore | 1 - .github/workflows/publish.yml | 72 +++++++++++++++++++++++++++++++++++ .github/workflows/test.yml | 29 ++++++++++++++ .travis.yml | 47 ----------------------- README.md | 2 +- docs/contributing.rst | 2 +- docs/index.rst | 4 +- setup.py | 2 +- 8 files changed, 106 insertions(+), 53 deletions(-) create mode 100644 .github/workflows/publish.yml create mode 100644 .github/workflows/test.yml delete mode 100644 .travis.yml diff --git a/.dockerignore b/.dockerignore index 938173e9..490f509e 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,7 +3,6 @@ .eggs .gitignore .ipynb_checkpoints -.travis.yml build *.spec *.egg-info diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000..4e554eda --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,72 @@ +name: Publish Python Package + +on: + release: + types: [created] + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.6, 3.7, 3.8] + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - uses: actions/cache@v2 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Install dependencies + run: | + pip install -e '.[test]' + - name: Run tests + run: | + pytest + deploy: + runs-on: ubuntu-latest + needs: [test] + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.8' + - uses: actions/cache@v2 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-publish-pip- + - name: Install dependencies + run: | + pip install setuptools wheel twine + - name: Publish + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} + run: | + python setup.py sdist bdist_wheel + twine upload dist/* + deploy_docker: + runs-on: ubuntu-latest + needs: [deploy] + steps: + - uses: actions/checkout@v2 + - name: Build and push to Docker Hub + env: + DOCKER_USER: ${{ secrets.DOCKER_USER }} + DOCKER_PASS: ${{ secrets.DOCKER_PASS }} + run: |- + docker login -u $DOCKER_USER -p $DOCKER_PASS + export REPO=datasetteproject/datasette + docker build -f Dockerfile -t $REPO::${GITHUB_REF#refs/tags/} . + docker tag $REPO::${GITHUB_REF#refs/tags/} $REPO:latest + docker push $REPO diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000..74e56e13 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,29 @@ +name: Test + +on: [push] + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.6, 3.7, 3.8] + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - uses: actions/cache@v2 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Install dependencies + run: | + pip install -e '.[test]' + - name: Run tests + run: | + pytest diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 181bc3f3..00000000 --- a/.travis.yml +++ /dev/null @@ -1,47 +0,0 @@ -language: python -dist: xenial - -branches: - except: - - master - -# 3.6 is listed first so it gets used for the later build stages -python: - - "3.6" - - "3.7" - - "3.8" - -# Executed for 3.5 AND 3.5 as the first "test" stage: -script: - - pip install -U pip wheel - - pip install .[test] - - pytest - -cache: - directories: - - $HOME/.cache/pip - -# This defines further stages that execute after the tests -jobs: - include: - - stage: release tagged version - if: tag IS present - python: 3.6 - deploy: - - provider: pypi - user: simonw - distributions: "sdist bdist_wheel" - password: ${PYPI_PASSWORD} - on: - branch: master - tags: true - - stage: publish docker image - if: (tag IS present) AND NOT (tag =~ [ab]) - python: 3.6 - script: - # Build and release to Docker Hub - - docker login -u $DOCKER_USER -p $DOCKER_PASS - - export REPO=datasetteproject/datasette - - docker build -f Dockerfile -t $REPO:$TRAVIS_TAG . - - docker tag $REPO:$TRAVIS_TAG $REPO:latest - - docker push $REPO diff --git a/README.md b/README.md index ee3246a5..38ea7f79 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ [![PyPI](https://img.shields.io/pypi/v/datasette.svg)](https://pypi.org/project/datasette/) [![Changelog](https://img.shields.io/github/v/release/simonw/datasette?label=changelog)](https://docs.datasette.io/en/stable/changelog.html) [![Python 3.x](https://img.shields.io/pypi/pyversions/datasette.svg?logo=python&logoColor=white)](https://pypi.org/project/datasette/) -[![Travis CI](https://travis-ci.org/simonw/datasette.svg?branch=main)](https://travis-ci.org/simonw/datasette) +[![Tests](https://github.com/simonw/datasette/workflows/Test/badge.svg)](https://github.com/simonw/datasette/actions?query=workflow%3ATest) [![Documentation Status](https://readthedocs.org/projects/datasette/badge/?version=latest)](https://docs.datasette.io/en/latest/?badge=latest) [![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://github.com/simonw/datasette/blob/main/LICENSE) [![docker: datasette](https://img.shields.io/badge/docker-datasette-blue)](https://hub.docker.com/r/datasetteproject/datasette) diff --git a/docs/contributing.rst b/docs/contributing.rst index 118146cf..95663dbc 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -126,7 +126,7 @@ Now browse to ``http://localhost:8000/`` to view the documentation. Any edits yo Release process --------------- -Datasette releases are performed using tags. When a new version tag is pushed to GitHub, a `Travis CI task `__ will perform the following: +Datasette releases are performed using tags. When a new release is published on GitHub, a `GitHub Action workflow `__ will perform the following: * Run the unit tests against all supported Python versions. If the tests pass... * Build a Docker image of the release and push a tag to https://hub.docker.com/r/datasetteproject/datasette diff --git a/docs/index.rst b/docs/index.rst index f9f2f0bb..946fa542 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -10,8 +10,8 @@ datasette| :target: https://docs.datasette.io/en/stable/changelog.html .. |Python 3.x| image:: https://img.shields.io/pypi/pyversions/datasette.svg?logo=python&logoColor=white :target: https://pypi.org/project/datasette/ -.. |Travis CI| image:: https://travis-ci.org/simonw/datasette.svg?branch=main - :target: https://travis-ci.org/simonw/datasette +.. |Tests| image:: https://github.com/simonw/datasette/workflows/Test/badge.svg + :target: https://github.com/simonw/datasette/actions?query=workflow%3ATest .. |License| image:: https://img.shields.io/badge/license-Apache%202.0-blue.svg :target: https://github.com/simonw/datasette/blob/main/LICENSE .. |docker: datasette| image:: https://img.shields.io/badge/docker-datasette-blue diff --git a/setup.py b/setup.py index bbd0aa8b..d9526149 100644 --- a/setup.py +++ b/setup.py @@ -38,7 +38,7 @@ setup( "Live demo": "https://latest.datasette.io/", "Source code": "https://github.com/simonw/datasette", "Issues": "https://github.com/simonw/datasette/issues", - "CI": "https://travis-ci.org/simonw/datasette", + "CI": "https://github.com/simonw/datasette/actions?query=workflow%3ATest", }, packages=find_packages(exclude=("tests",)), package_data={"datasette": ["templates/*.html"]}, From b21ed237ab940768574c834aa5a7130724bd3a2d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 18 Aug 2020 13:49:13 -0700 Subject: [PATCH 0509/2113] publish heroku now deploys with Python 3.8.5 --- datasette/publish/heroku.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index 6cda68da..24393b90 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -170,7 +170,7 @@ def temporary_heroku_directory( if metadata_content: open("metadata.json", "w").write(json.dumps(metadata_content, indent=2)) - open("runtime.txt", "w").write("python-3.8.3") + open("runtime.txt", "w").write("python-3.8.5") if branch: install = [ From 69033c6ec4a76d720e5c866aaa43b175c5ec1d8b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 19 Aug 2020 10:20:41 -0700 Subject: [PATCH 0510/2113] datasette install --upgrade option, closes #945 --- datasette/cli.py | 11 +++++++++-- docs/plugins.rst | 10 +++++++++- tests/test_cli.py | 9 +++++++++ 3 files changed, 27 insertions(+), 3 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index f3455f72..8dbc97c4 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -235,9 +235,16 @@ def package( @cli.command() @click.argument("packages", nargs=-1, required=True) -def install(packages): +@click.option( + "-U", "--upgrade", is_flag=True, help="Upgrade packages to latest version" +) +def install(packages, upgrade): "Install Python packages - e.g. Datasette plugins - into the same environment as Datasette" - sys.argv = ["pip", "install"] + list(packages) + args = ["pip", "install"] + if upgrade: + args += ["--upgrade"] + args += list(packages) + sys.argv = args run_module("pip", run_name="__main__") diff --git a/docs/plugins.rst b/docs/plugins.rst index e67c77b3..1c0dd588 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -43,7 +43,15 @@ You can uninstall plugins with ``datasette uninstall``:: datasette uninstall datasette-vega -These ommands are thin wrappers around ``pip install`` and ``pip uninstall``, which ensure they run ``pip`` in the same virtual environment as Datasette itself. +You can upgrade plugins with ``datasette install --upgrade`` or ``datasette install -U``:: + + datasette install -U datasette-vega + +This command can also be used to upgrade Datasette itself to the latest released version:: + + datasette install -U datasette + +These commands are thin wrappers around ``pip install`` and ``pip uninstall``, which ensure they run ``pip`` in the same virtual environment as Datasette itself. One-off plugins using --plugins-dir ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests/test_cli.py b/tests/test_cli.py index 38bb8834..dc5229cd 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -125,6 +125,15 @@ def test_install(run_module): ] +@pytest.mark.parametrize("flag", ["-U", "--upgrade"]) +@mock.patch("datasette.cli.run_module") +def test_install_upgrade(run_module, flag): + runner = CliRunner() + runner.invoke(cli, ["install", flag, "datasette"]) + run_module.assert_called_once_with("pip", run_name="__main__") + assert sys.argv == ["pip", "install", "--upgrade", "datasette"] + + @mock.patch("datasette.cli.run_module") def test_uninstall(run_module): runner = CliRunner() From 86aefc39c5aca01b00dbc57ba386a6743c21fb46 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 19 Aug 2020 10:22:33 -0700 Subject: [PATCH 0511/2113] Fixed undefined reference in index.rst --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 946fa542..db87f029 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,7 +1,7 @@ Datasette ========= -|PyPI| |Changelog| |Python 3.x| |Travis CI| |License| |docker: +|PyPI| |Changelog| |Python 3.x| |Tests| |License| |docker: datasette| .. |PyPI| image:: https://img.shields.io/pypi/v/datasette.svg From 799ecae94824640bdff21f86997f69844048d5c3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Aug 2020 21:02:50 -0700 Subject: [PATCH 0512/2113] register_output_renderer can now return Response, closes #953 --- datasette/views/base.py | 18 +++++++++++------- docs/plugin_hooks.rst | 18 +++++++++--------- tests/plugins/register_output_renderer.py | 8 ++++++++ tests/test_html.py | 2 ++ tests/test_plugins.py | 12 ++++++++++++ 5 files changed, 42 insertions(+), 16 deletions(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index a1f38f21..fa730af8 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -455,13 +455,17 @@ class DataView(BaseView): result = await result if result is None: raise NotFound("No data") - - r = Response( - body=result.get("body"), - status=result.get("status_code", 200), - content_type=result.get("content_type", "text/plain"), - headers=result.get("headers"), - ) + if isinstance(result, dict): + r = Response( + body=result.get("body"), + status=result.get("status_code", 200), + content_type=result.get("content_type", "text/plain"), + headers=result.get("headers"), + ) + elif isinstance(result, Response): + r = result + else: + assert False, "{} should be dict or Response".format(result) else: extras = {} if callable(extra_template_data): diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 96a1cd7f..fc710a2b 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -455,7 +455,9 @@ When a request is received, the ``"render"`` callback function is called with ze ``view_name`` - string The name of the current view being called. ``index``, ``database``, ``table``, and ``row`` are the most important ones. -The callback function can return ``None``, if it is unable to render the data, or a dictionary with the following keys: +The callback function can return ``None``, if it is unable to render the data, or a :ref:`internals_response` that will be returned to the caller. + +It can also return a dictionary with the following keys. This format is **deprecated** as-of Datasette 0.49 and will be removed by Datasette 1.0. ``body`` - string or bytes, optional The response body, default empty @@ -474,9 +476,7 @@ A simple example of an output renderer callback function: .. code-block:: python def render_demo(): - return { - "body": "Hello World" - } + return Response.text("Hello World") Here is a more complex example: @@ -490,11 +490,11 @@ Here is a more complex example: lines.append("=" * len(first_row)) for row in rows: lines.append(" | ".join(row)) - return { - "body": "\n".join(lines), - "content_type": "text/plain; charset=utf-8", - "headers": {"x-sqlite-version": result.first()[0]}, - } + return Response( + "\n".join(lines), + content_type="text/plain; charset=utf-8", + headers={"x-sqlite-version": result.first()[0]} + ) And here is an example ``can_render`` function which returns ``True`` only if the query results contain the columns ``atom_id``, ``atom_title`` and ``atom_updated``: diff --git a/tests/plugins/register_output_renderer.py b/tests/plugins/register_output_renderer.py index 82b60d01..cfe15215 100644 --- a/tests/plugins/register_output_renderer.py +++ b/tests/plugins/register_output_renderer.py @@ -1,4 +1,5 @@ from datasette import hookimpl +from datasette.utils.asgi import Response import json @@ -56,6 +57,12 @@ def render_test_no_parameters(): return {"body": "Hello"} +async def render_response(request): + if request.args.get("_broken"): + return "this should break" + return Response.json({"this_is": "json"}) + + @hookimpl def register_output_renderer(datasette): return [ @@ -65,4 +72,5 @@ def register_output_renderer(datasette): "can_render": can_render, }, {"extension": "testnone", "callback": render_test_no_parameters}, + {"extension": "testresponse", "render": render_response}, ] diff --git a/tests/test_html.py b/tests/test_html.py index 1a12b3ce..aec4db1d 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -559,6 +559,7 @@ def test_table_csv_json_export_interface(app_client): "simple_primary_key.json?id__gt=2", "simple_primary_key.testall?id__gt=2", "simple_primary_key.testnone?id__gt=2", + "simple_primary_key.testresponse?id__gt=2", "simple_primary_key.csv?id__gt=2&_size=max", "#export", ] @@ -597,6 +598,7 @@ def test_csv_json_export_links_include_labels_if_foreign_keys(app_client): "facetable.json?_labels=on", "facetable.testall?_labels=on", "facetable.testnone?_labels=on", + "facetable.testresponse?_labels=on", "facetable.csv?_labels=on&_size=max", "#export", ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index c535810c..f2017f07 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -479,6 +479,18 @@ def test_hook_register_output_renderer_custom_headers(app_client): assert "2" == response.headers["x-gosh"] +def test_hook_register_output_renderer_returning_response(app_client): + response = app_client.get("/fixtures/facetable.testresponse") + assert 200 == response.status + assert response.json == {"this_is": "json"} + + +def test_hook_register_output_renderer_returning_broken_value(app_client): + response = app_client.get("/fixtures/facetable.testresponse?_broken=1") + assert 500 == response.status + assert "this should break should be dict or Response" in response.text + + def test_hook_register_output_renderer_can_render(app_client): response = app_client.get("/fixtures/facetable?_no_can_render=1") assert response.status == 200 From 7178126d902e2cfca606be0b0cff96c6c679c5b8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 28 Aug 2020 16:12:47 -0700 Subject: [PATCH 0513/2113] Release notes for 0.49a0 Refs #953, #945 --- docs/changelog.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index d18dae80..74426f52 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,17 @@ Changelog ========= +.. _v0_49a0: + +0.49a0 (2020-08-28) +------------------- + +.. warning:: This is an **alpha** release. See :ref:`contributing_alpha_beta`. + +- ``register_output_renderer()`` render functions can now return a ``Response``. (`#953 `__) +- New ``--upgrade`` option for ``datasette install``. (`#945 `__) +- ``datasette publish heroku`` now deploys using Python 3.8.5 + .. _v0_48: 0.48 (2020-08-16) From c36e287d71d68ecb2a45e9808eede15f19f931fb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 28 Aug 2020 18:18:52 -0700 Subject: [PATCH 0514/2113] Don't deploy alpha/betas to Docker Hub Refs #956 --- .github/workflows/publish.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 4e554eda..e538a463 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -58,6 +58,8 @@ jobs: deploy_docker: runs-on: ubuntu-latest needs: [deploy] + if: | + !(contains(github.ref, "a") || contains(github.ref, "b")) steps: - uses: actions/checkout@v2 - name: Build and push to Docker Hub From 44cf424a94a85b74552075272660bb96a7432661 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 28 Aug 2020 18:33:05 -0700 Subject: [PATCH 0515/2113] Remove double colon, refs #956 --- .github/workflows/publish.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index e538a463..1a94a6b3 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -69,6 +69,6 @@ jobs: run: |- docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette - docker build -f Dockerfile -t $REPO::${GITHUB_REF#refs/tags/} . - docker tag $REPO::${GITHUB_REF#refs/tags/} $REPO:latest + docker build -f Dockerfile -t $REPO:${GITHUB_REF#refs/tags/} . + docker tag $REPO:${GITHUB_REF#refs/tags/} $REPO:latest docker push $REPO From 9dbbfa1f0b5cf07c91ba4c8d7b0145cf0ed4cf0f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 30 Aug 2020 10:39:16 -0700 Subject: [PATCH 0516/2113] Upgrade CodeMirror to 5.57.0, refs #948 --- datasette/static/codemirror-5.31.0-min.css | 2 - datasette/static/codemirror-5.31.0-sql.min.js | 1 - datasette/static/codemirror-5.31.0.js | 9659 ----------------- datasette/static/codemirror-5.57.0-sql.min.js | 5 + datasette/static/codemirror-5.57.0.min.css | 1 + datasette/static/codemirror-5.57.0.min.js | 11 + datasette/templates/_codemirror.html | 6 +- 7 files changed, 20 insertions(+), 9665 deletions(-) delete mode 100644 datasette/static/codemirror-5.31.0-min.css delete mode 100644 datasette/static/codemirror-5.31.0-sql.min.js delete mode 100644 datasette/static/codemirror-5.31.0.js create mode 100644 datasette/static/codemirror-5.57.0-sql.min.js create mode 100644 datasette/static/codemirror-5.57.0.min.css create mode 100644 datasette/static/codemirror-5.57.0.min.js diff --git a/datasette/static/codemirror-5.31.0-min.css b/datasette/static/codemirror-5.31.0-min.css deleted file mode 100644 index 7e162037..00000000 --- a/datasette/static/codemirror-5.31.0-min.css +++ /dev/null @@ -1,2 +0,0 @@ -.CodeMirror{font-family:monospace;height:300px;color:#000;direction:ltr}.CodeMirror-lines{padding:4px 0}.CodeMirror pre{padding:0 4px}.CodeMirror-gutter-filler,.CodeMirror-scrollbar-filler{background-color:#fff}.CodeMirror-gutters{border-right:1px solid #ddd;background-color:#f7f7f7;white-space:nowrap}.CodeMirror-linenumber{padding:0 3px 0 5px;min-width:20px;text-align:right;color:#999;white-space:nowrap}.CodeMirror-guttermarker{color:#000}.CodeMirror-guttermarker-subtle{color:#999}.CodeMirror-cursor{border-left:1px solid #000;border-right:none;width:0}.CodeMirror div.CodeMirror-secondarycursor{border-left:1px solid silver}.cm-fat-cursor .CodeMirror-cursor{width:auto;border:0!important;background:#7e7}.cm-fat-cursor div.CodeMirror-cursors{z-index:1}.cm-fat-cursor-mark{background-color:rgba(20,255,20,.5);-webkit-animation:blink 1.06s steps(1) infinite;-moz-animation:blink 1.06s steps(1) infinite;animation:blink 1.06s steps(1) infinite}.cm-animate-fat-cursor{width:auto;border:0;-webkit-animation:blink 1.06s steps(1) infinite;-moz-animation:blink 1.06s steps(1) infinite;animation:blink 1.06s steps(1) infinite;background-color:#7e7}@-moz-keyframes blink{50%{background-color:transparent}}@-webkit-keyframes blink{50%{background-color:transparent}}@keyframes blink{50%{background-color:transparent}}.cm-tab{display:inline-block;text-decoration:inherit}.CodeMirror-rulers{position:absolute;left:0;right:0;top:-50px;bottom:-20px;overflow:hidden}.CodeMirror-ruler{border-left:1px solid #ccc;top:0;bottom:0;position:absolute}.cm-s-default .cm-header{color:#00f}.cm-s-default .cm-quote{color:#090}.cm-negative{color:#d44}.cm-positive{color:#292}.cm-header,.cm-strong{font-weight:700}.cm-em{font-style:italic}.cm-link{text-decoration:underline}.cm-strikethrough{text-decoration:line-through}.cm-s-default .cm-keyword{color:#708}.cm-s-default .cm-atom{color:#219}.cm-s-default .cm-number{color:#164}.cm-s-default .cm-def{color:#00f}.cm-s-default .cm-variable-2{color:#05a}.cm-s-default .cm-type,.cm-s-default .cm-variable-3{color:#085}.cm-s-default .cm-comment{color:#a50}.cm-s-default .cm-string{color:#a11}.cm-s-default .cm-string-2{color:#f50}.cm-s-default .cm-meta{color:#555}.cm-s-default .cm-qualifier{color:#555}.cm-s-default .cm-builtin{color:#30a}.cm-s-default .cm-bracket{color:#997}.cm-s-default .cm-tag{color:#170}.cm-s-default .cm-attribute{color:#00c}.cm-s-default .cm-hr{color:#999}.cm-s-default .cm-link{color:#00c}.cm-s-default .cm-error{color:red}.cm-invalidchar{color:red}.CodeMirror-composing{border-bottom:2px solid}div.CodeMirror span.CodeMirror-matchingbracket{color:#0f0}div.CodeMirror span.CodeMirror-nonmatchingbracket{color:#f22}.CodeMirror-matchingtag{background:rgba(255,150,0,.3)}.CodeMirror-activeline-background{background:#e8f2ff}.CodeMirror{position:relative;overflow:hidden;background:#fff}.CodeMirror-scroll{overflow:scroll!important;margin-bottom:-30px;margin-right:-30px;padding-bottom:30px;height:100%;outline:0;position:relative}.CodeMirror-sizer{position:relative;border-right:30px solid transparent}.CodeMirror-gutter-filler,.CodeMirror-hscrollbar,.CodeMirror-scrollbar-filler,.CodeMirror-vscrollbar{position:absolute;z-index:6;display:none}.CodeMirror-vscrollbar{right:0;top:0;overflow-x:hidden;overflow-y:scroll}.CodeMirror-hscrollbar{bottom:0;left:0;overflow-y:hidden;overflow-x:scroll}.CodeMirror-scrollbar-filler{right:0;bottom:0}.CodeMirror-gutter-filler{left:0;bottom:0}.CodeMirror-gutters{position:absolute;left:0;top:0;min-height:100%;z-index:3}.CodeMirror-gutter{white-space:normal;height:100%;display:inline-block;vertical-align:top;margin-bottom:-30px}.CodeMirror-gutter-wrapper{position:absolute;z-index:4;background:0 0!important;border:none!important}.CodeMirror-gutter-background{position:absolute;top:0;bottom:0;z-index:4}.CodeMirror-gutter-elt{position:absolute;cursor:default;z-index:4}.CodeMirror-gutter-wrapper ::selection{background-color:transparent}.CodeMirror-gutter-wrapper ::-moz-selection{background-color:transparent}.CodeMirror-lines{cursor:text;min-height:1px}.CodeMirror pre{-moz-border-radius:0;-webkit-border-radius:0;border-radius:0;border-width:0;background:0 0;font-family:inherit;font-size:inherit;margin:0;white-space:pre;word-wrap:normal;line-height:inherit;color:inherit;z-index:2;position:relative;overflow:visible;-webkit-tap-highlight-color:transparent;-webkit-font-variant-ligatures:contextual;font-variant-ligatures:contextual}.CodeMirror-wrap pre{word-wrap:break-word;white-space:pre-wrap;word-break:normal}.CodeMirror-linebackground{position:absolute;left:0;right:0;top:0;bottom:0;z-index:0}.CodeMirror-linewidget{position:relative;z-index:2;overflow:auto}.CodeMirror-rtl pre{direction:rtl}.CodeMirror-code{outline:0}.CodeMirror-gutter,.CodeMirror-gutters,.CodeMirror-linenumber,.CodeMirror-scroll,.CodeMirror-sizer{-moz-box-sizing:content-box;box-sizing:content-box}.CodeMirror-measure{position:absolute;width:100%;height:0;overflow:hidden;visibility:hidden}.CodeMirror-cursor{position:absolute;pointer-events:none}.CodeMirror-measure pre{position:static}div.CodeMirror-cursors{visibility:hidden;position:relative;z-index:3}div.CodeMirror-dragcursors{visibility:visible}.CodeMirror-focused div.CodeMirror-cursors{visibility:visible}.CodeMirror-selected{background:#d9d9d9}.CodeMirror-focused .CodeMirror-selected{background:#d7d4f0}.CodeMirror-crosshair{cursor:crosshair}.CodeMirror-line::selection,.CodeMirror-line>span::selection,.CodeMirror-line>span>span::selection{background:#d7d4f0}.CodeMirror-line::-moz-selection,.CodeMirror-line>span::-moz-selection,.CodeMirror-line>span>span::-moz-selection{background:#d7d4f0}.cm-searching{background-color:#ffa;background-color:rgba(255,255,0,.4)}.cm-force-border{padding-right:.1px}@media print{.CodeMirror div.CodeMirror-cursors{visibility:hidden}}.cm-tab-wrap-hack:after{content:''}span.CodeMirror-selectedtext{background:0 0} -/*# sourceMappingURL=codemirror.min.css.map */ \ No newline at end of file diff --git a/datasette/static/codemirror-5.31.0-sql.min.js b/datasette/static/codemirror-5.31.0-sql.min.js deleted file mode 100644 index 1f05c0d0..00000000 --- a/datasette/static/codemirror-5.31.0-sql.min.js +++ /dev/null @@ -1 +0,0 @@ -!function(e){"object"==typeof exports&&"object"==typeof module?e(require("../../lib/codemirror")):"function"==typeof define&&define.amd?define(["../../lib/codemirror"],e):e(CodeMirror)}(function(e){"use strict";e.defineMode("sql",function(t,r){function a(e,t){var r=e.next();if(g[r]){var a=g[r](e,t);if(!1!==a)return a}if(p.hexNumber&&("0"==r&&e.match(/^[xX][0-9a-fA-F]+/)||("x"==r||"X"==r)&&e.match(/^'[0-9a-fA-F]+'/)))return"number";if(p.binaryNumber&&(("b"==r||"B"==r)&&e.match(/^'[01]+'/)||"0"==r&&e.match(/^b[01]+/)))return"number";if(r.charCodeAt(0)>47&&r.charCodeAt(0)<58)return e.match(/^[0-9]*(\.[0-9]+)?([eE][-+]?[0-9]+)?/),p.decimallessFloat&&e.match(/^\.(?!\.)/),"number";if("?"==r&&(e.eatSpace()||e.eol()||e.eat(";")))return"variable-3";if("'"==r||'"'==r&&p.doubleQuote)return t.tokenize=n(r),t.tokenize(e,t);if((p.nCharCast&&("n"==r||"N"==r)||p.charsetCast&&"_"==r&&e.match(/[a-z][a-z0-9]*/i))&&("'"==e.peek()||'"'==e.peek()))return"keyword";if(/^[\(\),\;\[\]]/.test(r))return null;if(p.commentSlashSlash&&"/"==r&&e.eat("/"))return e.skipToEnd(),"comment";if(p.commentHash&&"#"==r||"-"==r&&e.eat("-")&&(!p.commentSpaceRequired||e.eat(" ")))return e.skipToEnd(),"comment";if("/"==r&&e.eat("*"))return t.tokenize=i(1),t.tokenize(e,t);if("."!=r){if(m.test(r))return e.eatWhile(m),null;if("{"==r&&(e.match(/^( )*(d|D|t|T|ts|TS)( )*'[^']*'( )*}/)||e.match(/^( )*(d|D|t|T|ts|TS)( )*"[^"]*"( )*}/)))return"number";e.eatWhile(/^[_\w\d]/);var o=e.current().toLowerCase();return b.hasOwnProperty(o)&&(e.match(/^( )+'[^']*'/)||e.match(/^( )+"[^"]*"/))?"number":c.hasOwnProperty(o)?"atom":u.hasOwnProperty(o)?"builtin":d.hasOwnProperty(o)?"keyword":l.hasOwnProperty(o)?"string-2":null}return p.zerolessFloat&&e.match(/^(?:\d+(?:e[+-]?\d+)?)/i)?"number":e.match(/^\.+/)?null:p.ODBCdotTable&&e.match(/^[\w\d_]+/)?"variable-2":void 0}function n(e){return function(t,r){for(var n,i=!1;null!=(n=t.next());){if(n==e&&!i){r.tokenize=a;break}i=!i&&"\\"==n}return"string"}}function i(e){return function(t,r){var n=t.match(/^.*?(\/\*|\*\/)/);return n?"/*"==n[1]?r.tokenize=i(e+1):r.tokenize=e>1?i(e-1):a:t.skipToEnd(),"comment"}}function o(e,t,r){t.context={prev:t.context,indent:e.indentation(),col:e.column(),type:r}}function s(e){e.indent=e.context.indent,e.context=e.context.prev}var l=r.client||{},c=r.atoms||{false:!0,true:!0,null:!0},u=r.builtin||{},d=r.keywords||{},m=r.operatorChars||/^[*+\-%<>!=&|~^]/,p=r.support||{},g=r.hooks||{},b=r.dateSQL||{date:!0,time:!0,timestamp:!0};return{startState:function(){return{tokenize:a,context:null}},token:function(e,t){if(e.sol()&&t.context&&null==t.context.align&&(t.context.align=!1),t.tokenize==a&&e.eatSpace())return null;var r=t.tokenize(e,t);if("comment"==r)return r;t.context&&null==t.context.align&&(t.context.align=!0);var n=e.current();return"("==n?o(e,t,")"):"["==n?o(e,t,"]"):t.context&&t.context.type==n&&s(t),r},indent:function(r,a){var n=r.context;if(!n)return e.Pass;var i=a.charAt(0)==n.type;return n.align?n.col+(i?0:1):n.indent+(i?0:t.indentUnit)},blockCommentStart:"/*",blockCommentEnd:"*/",lineComment:p.commentSlashSlash?"//":p.commentHash?"#":"--"}}),function(){function t(e){for(var t;null!=(t=e.next());)if("`"==t&&!e.eat("`"))return"variable-2";return e.backUp(e.current().length-1),e.eatWhile(/\w/)?"variable-2":null}function r(e){return e.eat("@")&&(e.match(/^session\./),e.match(/^local\./),e.match(/^global\./)),e.eat("'")?(e.match(/^.*'/),"variable-2"):e.eat('"')?(e.match(/^.*"/),"variable-2"):e.eat("`")?(e.match(/^.*`/),"variable-2"):e.match(/^[0-9a-zA-Z$\.\_]+/)?"variable-2":null}function a(e){return e.eat("N")?"atom":e.match(/^[a-zA-Z.#!?]/)?"variable-2":null}function n(e){for(var t={},r=e.split(" "),a=0;a!=]/,dateSQL:n("date time timestamp"),support:n("ODBCdotTable doubleQuote binaryNumber hexNumber")}),e.defineMIME("text/x-mssql",{name:"sql",client:n("charset clear connect edit ego exit go help nopager notee nowarning pager print prompt quit rehash source status system tee"),keywords:n(i+"begin trigger proc view index for add constraint key primary foreign collate clustered nonclustered declare exec"),builtin:n("bigint numeric bit smallint decimal smallmoney int tinyint money float real char varchar text nchar nvarchar ntext binary varbinary image cursor timestamp hierarchyid uniqueidentifier sql_variant xml table "),atoms:n("false true null unknown"),operatorChars:/^[*+\-%<>!=]/,dateSQL:n("date datetimeoffset datetime2 smalldatetime datetime time"),hooks:{"@":r}}),e.defineMIME("text/x-mysql",{name:"sql",client:n("charset clear connect edit ego exit go help nopager notee nowarning pager print prompt quit rehash source status system tee"),keywords:n(i+"accessible action add after algorithm all analyze asensitive at authors auto_increment autocommit avg avg_row_length before binary binlog both btree cache call cascade cascaded case catalog_name chain change changed character check checkpoint checksum class_origin client_statistics close coalesce code collate collation collations column columns comment commit committed completion concurrent condition connection consistent constraint contains continue contributors convert cross current current_date current_time current_timestamp current_user cursor data database databases day_hour day_microsecond day_minute day_second deallocate dec declare default delay_key_write delayed delimiter des_key_file describe deterministic dev_pop dev_samp deviance diagnostics directory disable discard distinctrow div dual dumpfile each elseif enable enclosed end ends engine engines enum errors escape escaped even event events every execute exists exit explain extended fast fetch field fields first flush for force foreign found_rows full fulltext function general get global grant grants group group_concat handler hash help high_priority hosts hour_microsecond hour_minute hour_second if ignore ignore_server_ids import index index_statistics infile inner innodb inout insensitive insert_method install interval invoker isolation iterate key keys kill language last leading leave left level limit linear lines list load local localtime localtimestamp lock logs low_priority master master_heartbeat_period master_ssl_verify_server_cert masters match max max_rows maxvalue message_text middleint migrate min min_rows minute_microsecond minute_second mod mode modifies modify mutex mysql_errno natural next no no_write_to_binlog offline offset one online open optimize option optionally out outer outfile pack_keys parser partition partitions password phase plugin plugins prepare preserve prev primary privileges procedure processlist profile profiles purge query quick range read read_write reads real rebuild recover references regexp relaylog release remove rename reorganize repair repeatable replace require resignal restrict resume return returns revoke right rlike rollback rollup row row_format rtree savepoint schedule schema schema_name schemas second_microsecond security sensitive separator serializable server session share show signal slave slow smallint snapshot soname spatial specific sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_no_cache sql_small_result sqlexception sqlstate sqlwarning ssl start starting starts status std stddev stddev_pop stddev_samp storage straight_join subclass_origin sum suspend table_name table_statistics tables tablespace temporary terminated to trailing transaction trigger triggers truncate uncommitted undo uninstall unique unlock upgrade usage use use_frm user user_resources user_statistics using utc_date utc_time utc_timestamp value variables varying view views warnings when while with work write xa xor year_month zerofill begin do then else loop repeat"),builtin:n("bool boolean bit blob decimal double float long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text bigint int int1 int2 int3 int4 int8 integer float float4 float8 double char varbinary varchar varcharacter precision date datetime year unsigned signed numeric"),atoms:n("false true null unknown"),operatorChars:/^[*+\-%<>!=&|^]/,dateSQL:n("date time timestamp"),support:n("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber doubleQuote nCharCast charsetCast commentHash commentSpaceRequired"),hooks:{"@":r,"`":t,"\\":a}}),e.defineMIME("text/x-mariadb",{name:"sql",client:n("charset clear connect edit ego exit go help nopager notee nowarning pager print prompt quit rehash source status system tee"),keywords:n(i+"accessible action add after algorithm all always analyze asensitive at authors auto_increment autocommit avg avg_row_length before binary binlog both btree cache call cascade cascaded case catalog_name chain change changed character check checkpoint checksum class_origin client_statistics close coalesce code collate collation collations column columns comment commit committed completion concurrent condition connection consistent constraint contains continue contributors convert cross current current_date current_time current_timestamp current_user cursor data database databases day_hour day_microsecond day_minute day_second deallocate dec declare default delay_key_write delayed delimiter des_key_file describe deterministic dev_pop dev_samp deviance diagnostics directory disable discard distinctrow div dual dumpfile each elseif enable enclosed end ends engine engines enum errors escape escaped even event events every execute exists exit explain extended fast fetch field fields first flush for force foreign found_rows full fulltext function general generated get global grant grants group groupby_concat handler hard hash help high_priority hosts hour_microsecond hour_minute hour_second if ignore ignore_server_ids import index index_statistics infile inner innodb inout insensitive insert_method install interval invoker isolation iterate key keys kill language last leading leave left level limit linear lines list load local localtime localtimestamp lock logs low_priority master master_heartbeat_period master_ssl_verify_server_cert masters match max max_rows maxvalue message_text middleint migrate min min_rows minute_microsecond minute_second mod mode modifies modify mutex mysql_errno natural next no no_write_to_binlog offline offset one online open optimize option optionally out outer outfile pack_keys parser partition partitions password persistent phase plugin plugins prepare preserve prev primary privileges procedure processlist profile profiles purge query quick range read read_write reads real rebuild recover references regexp relaylog release remove rename reorganize repair repeatable replace require resignal restrict resume return returns revoke right rlike rollback rollup row row_format rtree savepoint schedule schema schema_name schemas second_microsecond security sensitive separator serializable server session share show shutdown signal slave slow smallint snapshot soft soname spatial specific sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_no_cache sql_small_result sqlexception sqlstate sqlwarning ssl start starting starts status std stddev stddev_pop stddev_samp storage straight_join subclass_origin sum suspend table_name table_statistics tables tablespace temporary terminated to trailing transaction trigger triggers truncate uncommitted undo uninstall unique unlock upgrade usage use use_frm user user_resources user_statistics using utc_date utc_time utc_timestamp value variables varying view views virtual warnings when while with work write xa xor year_month zerofill begin do then else loop repeat"),builtin:n("bool boolean bit blob decimal double float long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text bigint int int1 int2 int3 int4 int8 integer float float4 float8 double char varbinary varchar varcharacter precision date datetime year unsigned signed numeric"),atoms:n("false true null unknown"),operatorChars:/^[*+\-%<>!=&|^]/,dateSQL:n("date time timestamp"),support:n("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber doubleQuote nCharCast charsetCast commentHash commentSpaceRequired"),hooks:{"@":r,"`":t,"\\":a}}),e.defineMIME("text/x-sqlite",{name:"sql",client:n("auth backup bail binary changes check clone databases dbinfo dump echo eqp exit explain fullschema headers help import imposter indexes iotrace limit lint load log mode nullvalue once open output print prompt quit read restore save scanstats schema separator session shell show stats system tables testcase timeout timer trace vfsinfo vfslist vfsname width"),keywords:n(i+"abort action add after all analyze attach autoincrement before begin cascade case cast check collate column commit conflict constraint cross current_date current_time current_timestamp database default deferrable deferred detach each else end escape except exclusive exists explain fail for foreign full glob if ignore immediate index indexed initially inner instead intersect isnull key left limit match natural no notnull null of offset outer plan pragma primary query raise recursive references regexp reindex release rename replace restrict right rollback row savepoint temp temporary then to transaction trigger unique using vacuum view virtual when with without"),builtin:n("bool boolean bit blob decimal double float long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text clob bigint int int2 int8 integer float double char varchar date datetime year unsigned signed numeric real"),atoms:n("null current_date current_time current_timestamp"),operatorChars:/^[*+\-%<>!=&|/~]/,dateSQL:n("date time timestamp datetime"),support:n("decimallessFloat zerolessFloat"),identifierQuote:'"',hooks:{"@":r,":":r,"?":r,$:r,'"':function(e){for(var t;null!=(t=e.next());)if('"'==t&&!e.eat('"'))return"variable-2";return e.backUp(e.current().length-1),e.eatWhile(/\w/)?"variable-2":null},"`":t}}),e.defineMIME("text/x-cassandra",{name:"sql",client:{},keywords:n("add all allow alter and any apply as asc authorize batch begin by clustering columnfamily compact consistency count create custom delete desc distinct drop each_quorum exists filtering from grant if in index insert into key keyspace keyspaces level limit local_one local_quorum modify nan norecursive nosuperuser not of on one order password permission permissions primary quorum rename revoke schema select set storage superuser table three to token truncate ttl two type unlogged update use user users using values where with writetime"),builtin:n("ascii bigint blob boolean counter decimal double float frozen inet int list map static text timestamp timeuuid tuple uuid varchar varint"),atoms:n("false true infinity NaN"),operatorChars:/^[<>=]/,dateSQL:{},support:n("commentSlashSlash decimallessFloat"),hooks:{}}),e.defineMIME("text/x-plsql",{name:"sql",client:n("appinfo arraysize autocommit autoprint autorecovery autotrace blockterminator break btitle cmdsep colsep compatibility compute concat copycommit copytypecheck define describe echo editfile embedded escape exec execute feedback flagger flush heading headsep instance linesize lno loboffset logsource long longchunksize markup native newpage numformat numwidth pagesize pause pno recsep recsepchar release repfooter repheader serveroutput shiftinout show showmode size spool sqlblanklines sqlcase sqlcode sqlcontinue sqlnumber sqlpluscompatibility sqlprefix sqlprompt sqlterminator suffix tab term termout time timing trimout trimspool ttitle underline verify version wrap"),keywords:n("abort accept access add all alter and any array arraylen as asc assert assign at attributes audit authorization avg base_table begin between binary_integer body boolean by case cast char char_base check close cluster clusters colauth column comment commit compress connect connected constant constraint crash create current currval cursor data_base database date dba deallocate debugoff debugon decimal declare default definition delay delete desc digits dispose distinct do drop else elseif elsif enable end entry escape exception exception_init exchange exclusive exists exit external fast fetch file for force form from function generic goto grant group having identified if immediate in increment index indexes indicator initial initrans insert interface intersect into is key level library like limited local lock log logging long loop master maxextents maxtrans member minextents minus mislabel mode modify multiset new next no noaudit nocompress nologging noparallel not nowait number_base object of off offline on online only open option or order out package parallel partition pctfree pctincrease pctused pls_integer positive positiven pragma primary prior private privileges procedure public raise range raw read rebuild record ref references refresh release rename replace resource restrict return returning returns reverse revoke rollback row rowid rowlabel rownum rows run savepoint schema segment select separate session set share snapshot some space split sql start statement storage subtype successful synonym tabauth table tables tablespace task terminate then to trigger truncate type union unique unlimited unrecoverable unusable update use using validate value values variable view views when whenever where while with work"),builtin:n("abs acos add_months ascii asin atan atan2 average bfile bfilename bigserial bit blob ceil character chartorowid chr clob concat convert cos cosh count dec decode deref dual dump dup_val_on_index empty error exp false float floor found glb greatest hextoraw initcap instr instrb int integer isopen last_day least length lengthb ln lower lpad ltrim lub make_ref max min mlslabel mod months_between natural naturaln nchar nclob new_time next_day nextval nls_charset_decl_len nls_charset_id nls_charset_name nls_initcap nls_lower nls_sort nls_upper nlssort no_data_found notfound null number numeric nvarchar2 nvl others power rawtohex real reftohex round rowcount rowidtochar rowtype rpad rtrim serial sign signtype sin sinh smallint soundex sqlcode sqlerrm sqrt stddev string substr substrb sum sysdate tan tanh to_char text to_date to_label to_multi_byte to_number to_single_byte translate true trunc uid unlogged upper user userenv varchar varchar2 variance varying vsize xml"),operatorChars:/^[*+\-%<>!=~]/,dateSQL:n("date time timestamp"),support:n("doubleQuote nCharCast zerolessFloat binaryNumber hexNumber")}),e.defineMIME("text/x-hive",{name:"sql",keywords:n("select alter $elem$ $key$ $value$ add after all analyze and archive as asc before between binary both bucket buckets by cascade case cast change cluster clustered clusterstatus collection column columns comment compute concatenate continue create cross cursor data database databases dbproperties deferred delete delimited desc describe directory disable distinct distribute drop else enable end escaped exclusive exists explain export extended external false fetch fields fileformat first format formatted from full function functions grant group having hold_ddltime idxproperties if import in index indexes inpath inputdriver inputformat insert intersect into is items join keys lateral left like limit lines load local location lock locks mapjoin materialized minus msck no_drop nocompress not of offline on option or order out outer outputdriver outputformat overwrite partition partitioned partitions percent plus preserve procedure purge range rcfile read readonly reads rebuild recordreader recordwriter recover reduce regexp rename repair replace restrict revoke right rlike row schema schemas semi sequencefile serde serdeproperties set shared show show_database sort sorted ssl statistics stored streamtable table tables tablesample tblproperties temporary terminated textfile then tmp to touch transform trigger true unarchive undo union uniquejoin unlock update use using utc utc_tmestamp view when where while with"),builtin:n("bool boolean long timestamp tinyint smallint bigint int float double date datetime unsigned string array struct map uniontype"),atoms:n("false true null unknown"),operatorChars:/^[*+\-%<>!=]/,dateSQL:n("date timestamp"),support:n("ODBCdotTable doubleQuote binaryNumber hexNumber")}),e.defineMIME("text/x-pgsql",{name:"sql",client:n("source"),keywords:n(i+"a abort abs absent absolute access according action ada add admin after aggregate all allocate also always analyse analyze any are array array_agg array_max_cardinality asensitive assertion assignment asymmetric at atomic attribute attributes authorization avg backward base64 before begin begin_frame begin_partition bernoulli binary bit_length blob blocked bom both breadth c cache call called cardinality cascade cascaded case cast catalog catalog_name ceil ceiling chain characteristics characters character_length character_set_catalog character_set_name character_set_schema char_length check checkpoint class class_origin clob close cluster coalesce cobol collate collation collation_catalog collation_name collation_schema collect column columns column_name command_function command_function_code comment comments commit committed concurrently condition condition_number configuration conflict connect connection connection_name constraint constraints constraint_catalog constraint_name constraint_schema constructor contains content continue control conversion convert copy corr corresponding cost covar_pop covar_samp cross csv cube cume_dist current current_catalog current_date current_default_transform_group current_path current_role current_row current_schema current_time current_timestamp current_transform_group_for_type current_user cursor cursor_name cycle data database datalink datetime_interval_code datetime_interval_precision day db deallocate dec declare default defaults deferrable deferred defined definer degree delimiter delimiters dense_rank depth deref derived describe descriptor deterministic diagnostics dictionary disable discard disconnect dispatch dlnewcopy dlpreviouscopy dlurlcomplete dlurlcompleteonly dlurlcompletewrite dlurlpath dlurlpathonly dlurlpathwrite dlurlscheme dlurlserver dlvalue do document domain dynamic dynamic_function dynamic_function_code each element else empty enable encoding encrypted end end-exec end_frame end_partition enforced enum equals escape event every except exception exclude excluding exclusive exec execute exists exp explain expression extension external extract false family fetch file filter final first first_value flag float floor following for force foreign fortran forward found frame_row free freeze fs full function functions fusion g general generated get global go goto grant granted greatest grouping groups handler header hex hierarchy hold hour id identity if ignore ilike immediate immediately immutable implementation implicit import including increment indent index indexes indicator inherit inherits initially inline inner inout input insensitive instance instantiable instead integrity intersect intersection invoker isnull isolation k key key_member key_type label lag language large last last_value lateral lead leading leakproof least left length level library like_regex link listen ln load local localtime localtimestamp location locator lock locked logged lower m map mapping match matched materialized max maxvalue max_cardinality member merge message_length message_octet_length message_text method min minute minvalue mod mode modifies module month more move multiset mumps name names namespace national natural nchar nclob nesting new next nfc nfd nfkc nfkd nil no none normalize normalized nothing notify notnull nowait nth_value ntile null nullable nullif nulls number object occurrences_regex octets octet_length of off offset oids old only open operator option options ordering ordinality others out outer output over overlaps overlay overriding owned owner p pad parallel parameter parameter_mode parameter_name parameter_ordinal_position parameter_specific_catalog parameter_specific_name parameter_specific_schema parser partial partition pascal passing passthrough password percent percentile_cont percentile_disc percent_rank period permission placing plans pli policy portion position position_regex power precedes preceding prepare prepared preserve primary prior privileges procedural procedure program public quote range rank read reads reassign recheck recovery recursive ref references referencing refresh regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy regr_syy reindex relative release rename repeatable replace replica requiring reset respect restart restore restrict restricted result return returned_cardinality returned_length returned_octet_length returned_sqlstate returning returns revoke right role rollback rollup routine routine_catalog routine_name routine_schema row rows row_count row_number rule savepoint scale schema schema_name scope scope_catalog scope_name scope_schema scroll search second section security selective self sensitive sequence sequences serializable server server_name session session_user setof sets share show similar simple size skip snapshot some source space specific specifictype specific_name sql sqlcode sqlerror sqlexception sqlstate sqlwarning sqrt stable standalone start state statement static statistics stddev_pop stddev_samp stdin stdout storage strict strip structure style subclass_origin submultiset substring substring_regex succeeds sum symmetric sysid system system_time system_user t tables tablesample tablespace table_name temp template temporary then ties timezone_hour timezone_minute to token top_level_count trailing transaction transactions_committed transactions_rolled_back transaction_active transform transforms translate translate_regex translation treat trigger trigger_catalog trigger_name trigger_schema trim trim_array true truncate trusted type types uescape unbounded uncommitted under unencrypted unique unknown unlink unlisten unlogged unnamed unnest until untyped upper uri usage user user_defined_type_catalog user_defined_type_code user_defined_type_name user_defined_type_schema using vacuum valid validate validator value value_of varbinary variadic var_pop var_samp verbose version versioning view views volatile when whenever whitespace width_bucket window within work wrapper write xmlagg xmlattributes xmlbinary xmlcast xmlcomment xmlconcat xmldeclaration xmldocument xmlelement xmlexists xmlforest xmliterate xmlnamespaces xmlparse xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltext xmlvalidate year yes loop repeat attach path depends detach zone"),builtin:n("bigint int8 bigserial serial8 bit varying varbit boolean bool box bytea character char varchar cidr circle date double precision float8 inet integer int int4 interval json jsonb line lseg macaddr macaddr8 money numeric decimal path pg_lsn point polygon real float4 smallint int2 smallserial serial2 serial serial4 text time without zone with timetz timestamp timestamptz tsquery tsvector txid_snapshot uuid xml"),atoms:n("false true null unknown"),operatorChars:/^[*+\-%<>!=&|^\/#@?~]/,dateSQL:n("date time timestamp"),support:n("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber nCharCast charsetCast")}),e.defineMIME("text/x-gql",{name:"sql",keywords:n("ancestor and asc by contains desc descendant distinct from group has in is limit offset on order select superset where"),atoms:n("false true"),builtin:n("blob datetime first key __key__ string integer double boolean null"),operatorChars:/^[*+\-%<>!=]/}),e.defineMIME("text/x-gpsql",{name:"sql",client:n("source"),keywords:n("abort absolute access action active add admin after aggregate all also alter always analyse analyze and any array as asc assertion assignment asymmetric at authorization backward before begin between bigint binary bit boolean both by cache called cascade cascaded case cast chain char character characteristics check checkpoint class close cluster coalesce codegen collate column comment commit committed concurrency concurrently configuration connection constraint constraints contains content continue conversion copy cost cpu_rate_limit create createdb createexttable createrole createuser cross csv cube current current_catalog current_date current_role current_schema current_time current_timestamp current_user cursor cycle data database day deallocate dec decimal declare decode default defaults deferrable deferred definer delete delimiter delimiters deny desc dictionary disable discard distinct distributed do document domain double drop dxl each else enable encoding encrypted end enum errors escape every except exchange exclude excluding exclusive execute exists explain extension external extract false family fetch fields filespace fill filter first float following for force foreign format forward freeze from full function global grant granted greatest group group_id grouping handler hash having header hold host hour identity if ignore ilike immediate immutable implicit in including inclusive increment index indexes inherit inherits initially inline inner inout input insensitive insert instead int integer intersect interval into invoker is isnull isolation join key language large last leading least left level like limit list listen load local localtime localtimestamp location lock log login mapping master match maxvalue median merge minute minvalue missing mode modifies modify month move name names national natural nchar new newline next no nocreatedb nocreateexttable nocreaterole nocreateuser noinherit nologin none noovercommit nosuperuser not nothing notify notnull nowait null nullif nulls numeric object of off offset oids old on only operator option options or order ordered others out outer over overcommit overlaps overlay owned owner parser partial partition partitions passing password percent percentile_cont percentile_disc placing plans position preceding precision prepare prepared preserve primary prior privileges procedural procedure protocol queue quote randomly range read readable reads real reassign recheck recursive ref references reindex reject relative release rename repeatable replace replica reset resource restart restrict returning returns revoke right role rollback rollup rootpartition row rows rule savepoint scatter schema scroll search second security segment select sequence serializable session session_user set setof sets share show similar simple smallint some split sql stable standalone start statement statistics stdin stdout storage strict strip subpartition subpartitions substring superuser symmetric sysid system table tablespace temp template temporary text then threshold ties time timestamp to trailing transaction treat trigger trim true truncate trusted type unbounded uncommitted unencrypted union unique unknown unlisten until update user using vacuum valid validation validator value values varchar variadic varying verbose version view volatile web when where whitespace window with within without work writable write xml xmlattributes xmlconcat xmlelement xmlexists xmlforest xmlparse xmlpi xmlroot xmlserialize year yes zone"),builtin:n("bigint int8 bigserial serial8 bit varying varbit boolean bool box bytea character char varchar cidr circle date double precision float float8 inet integer int int4 interval json jsonb line lseg macaddr macaddr8 money numeric decimal path pg_lsn point polygon real float4 smallint int2 smallserial serial2 serial serial4 text time without zone with timetz timestamp timestamptz tsquery tsvector txid_snapshot uuid xml"),atoms:n("false true null unknown"),operatorChars:/^[*+\-%<>!=&|^\/#@?~]/,dateSQL:n("date time timestamp"),support:n("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber nCharCast charsetCast")}),e.defineMIME("text/x-sparksql",{name:"sql",keywords:n("add after all alter analyze and anti archive array as asc at between bucket buckets by cache cascade case cast change clear cluster clustered codegen collection column columns comment commit compact compactions compute concatenate cost create cross cube current current_date current_timestamp database databases datata dbproperties defined delete delimited desc describe dfs directories distinct distribute drop else end escaped except exchange exists explain export extended external false fields fileformat first following for format formatted from full function functions global grant group grouping having if ignore import in index indexes inner inpath inputformat insert intersect interval into is items join keys last lateral lazy left like limit lines list load local location lock locks logical macro map minus msck natural no not null nulls of on option options or order out outer outputformat over overwrite partition partitioned partitions percent preceding principals purge range recordreader recordwriter recover reduce refresh regexp rename repair replace reset restrict revoke right rlike role roles rollback rollup row rows schema schemas select semi separated serde serdeproperties set sets show skewed sort sorted start statistics stored stratify struct table tables tablesample tblproperties temp temporary terminated then to touch transaction transactions transform true truncate unarchive unbounded uncache union unlock unset use using values view when where window with"),builtin:n("tinyint smallint int bigint boolean float double string binary timestamp decimal array map struct uniontype delimited serde sequencefile textfile rcfile inputformat outputformat"),atoms:n("false true null"),operatorChars:/^[*+\-%<>!=~&|^]/,dateSQL:n("date time timestamp"),support:n("ODBCdotTable doubleQuote zerolessFloat")}),e.defineMIME("text/x-esper",{name:"sql",client:n("source"),keywords:n("alter and as asc between by count create delete desc distinct drop from group having in insert into is join like not on or order select set table union update values where limit after all and as at asc avedev avg between by case cast coalesce count create current_timestamp day days delete define desc distinct else end escape events every exists false first from full group having hour hours in inner insert instanceof into irstream is istream join last lastweekday left limit like max match_recognize matches median measures metadatasql min minute minutes msec millisecond milliseconds not null offset on or order outer output partition pattern prev prior regexp retain-union retain-intersection right rstream sec second seconds select set some snapshot sql stddev sum then true unidirectional until update variable weekday when where window"),builtin:{},atoms:n("false true null"),operatorChars:/^[*+\-%<>!=&|^\/#@?~]/,dateSQL:n("time"),support:n("decimallessFloat zerolessFloat binaryNumber hexNumber")})}()}); \ No newline at end of file diff --git a/datasette/static/codemirror-5.31.0.js b/datasette/static/codemirror-5.31.0.js deleted file mode 100644 index a0d5d688..00000000 --- a/datasette/static/codemirror-5.31.0.js +++ /dev/null @@ -1,9659 +0,0 @@ -// CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE - -// This is CodeMirror (http://codemirror.net), a code editor -// implemented in JavaScript on top of the browser's DOM. -// -// You can find some technical background for some of the code below -// at http://marijnhaverbeke.nl/blog/#cm-internals . - -(function (global, factory) { - typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : - typeof define === 'function' && define.amd ? define(factory) : - (global.CodeMirror = factory()); -}(this, (function () { 'use strict'; - -// Kludges for bugs and behavior differences that can't be feature -// detected are enabled based on userAgent etc sniffing. -var userAgent = navigator.userAgent; -var platform = navigator.platform; - -var gecko = /gecko\/\d/i.test(userAgent); -var ie_upto10 = /MSIE \d/.test(userAgent); -var ie_11up = /Trident\/(?:[7-9]|\d{2,})\..*rv:(\d+)/.exec(userAgent); -var edge = /Edge\/(\d+)/.exec(userAgent); -var ie = ie_upto10 || ie_11up || edge; -var ie_version = ie && (ie_upto10 ? document.documentMode || 6 : +(edge || ie_11up)[1]); -var webkit = !edge && /WebKit\//.test(userAgent); -var qtwebkit = webkit && /Qt\/\d+\.\d+/.test(userAgent); -var chrome = !edge && /Chrome\//.test(userAgent); -var presto = /Opera\//.test(userAgent); -var safari = /Apple Computer/.test(navigator.vendor); -var mac_geMountainLion = /Mac OS X 1\d\D([8-9]|\d\d)\D/.test(userAgent); -var phantom = /PhantomJS/.test(userAgent); - -var ios = !edge && /AppleWebKit/.test(userAgent) && /Mobile\/\w+/.test(userAgent); -var android = /Android/.test(userAgent); -// This is woefully incomplete. Suggestions for alternative methods welcome. -var mobile = ios || android || /webOS|BlackBerry|Opera Mini|Opera Mobi|IEMobile/i.test(userAgent); -var mac = ios || /Mac/.test(platform); -var chromeOS = /\bCrOS\b/.test(userAgent); -var windows = /win/i.test(platform); - -var presto_version = presto && userAgent.match(/Version\/(\d*\.\d*)/); -if (presto_version) { presto_version = Number(presto_version[1]); } -if (presto_version && presto_version >= 15) { presto = false; webkit = true; } -// Some browsers use the wrong event properties to signal cmd/ctrl on OS X -var flipCtrlCmd = mac && (qtwebkit || presto && (presto_version == null || presto_version < 12.11)); -var captureRightClick = gecko || (ie && ie_version >= 9); - -function classTest(cls) { return new RegExp("(^|\\s)" + cls + "(?:$|\\s)\\s*") } - -var rmClass = function(node, cls) { - var current = node.className; - var match = classTest(cls).exec(current); - if (match) { - var after = current.slice(match.index + match[0].length); - node.className = current.slice(0, match.index) + (after ? match[1] + after : ""); - } -}; - -function removeChildren(e) { - for (var count = e.childNodes.length; count > 0; --count) - { e.removeChild(e.firstChild); } - return e -} - -function removeChildrenAndAdd(parent, e) { - return removeChildren(parent).appendChild(e) -} - -function elt(tag, content, className, style) { - var e = document.createElement(tag); - if (className) { e.className = className; } - if (style) { e.style.cssText = style; } - if (typeof content == "string") { e.appendChild(document.createTextNode(content)); } - else if (content) { for (var i = 0; i < content.length; ++i) { e.appendChild(content[i]); } } - return e -} -// wrapper for elt, which removes the elt from the accessibility tree -function eltP(tag, content, className, style) { - var e = elt(tag, content, className, style); - e.setAttribute("role", "presentation"); - return e -} - -var range; -if (document.createRange) { range = function(node, start, end, endNode) { - var r = document.createRange(); - r.setEnd(endNode || node, end); - r.setStart(node, start); - return r -}; } -else { range = function(node, start, end) { - var r = document.body.createTextRange(); - try { r.moveToElementText(node.parentNode); } - catch(e) { return r } - r.collapse(true); - r.moveEnd("character", end); - r.moveStart("character", start); - return r -}; } - -function contains(parent, child) { - if (child.nodeType == 3) // Android browser always returns false when child is a textnode - { child = child.parentNode; } - if (parent.contains) - { return parent.contains(child) } - do { - if (child.nodeType == 11) { child = child.host; } - if (child == parent) { return true } - } while (child = child.parentNode) -} - -function activeElt() { - // IE and Edge may throw an "Unspecified Error" when accessing document.activeElement. - // IE < 10 will throw when accessed while the page is loading or in an iframe. - // IE > 9 and Edge will throw when accessed in an iframe if document.body is unavailable. - var activeElement; - try { - activeElement = document.activeElement; - } catch(e) { - activeElement = document.body || null; - } - while (activeElement && activeElement.shadowRoot && activeElement.shadowRoot.activeElement) - { activeElement = activeElement.shadowRoot.activeElement; } - return activeElement -} - -function addClass(node, cls) { - var current = node.className; - if (!classTest(cls).test(current)) { node.className += (current ? " " : "") + cls; } -} -function joinClasses(a, b) { - var as = a.split(" "); - for (var i = 0; i < as.length; i++) - { if (as[i] && !classTest(as[i]).test(b)) { b += " " + as[i]; } } - return b -} - -var selectInput = function(node) { node.select(); }; -if (ios) // Mobile Safari apparently has a bug where select() is broken. - { selectInput = function(node) { node.selectionStart = 0; node.selectionEnd = node.value.length; }; } -else if (ie) // Suppress mysterious IE10 errors - { selectInput = function(node) { try { node.select(); } catch(_e) {} }; } - -function bind(f) { - var args = Array.prototype.slice.call(arguments, 1); - return function(){return f.apply(null, args)} -} - -function copyObj(obj, target, overwrite) { - if (!target) { target = {}; } - for (var prop in obj) - { if (obj.hasOwnProperty(prop) && (overwrite !== false || !target.hasOwnProperty(prop))) - { target[prop] = obj[prop]; } } - return target -} - -// Counts the column offset in a string, taking tabs into account. -// Used mostly to find indentation. -function countColumn(string, end, tabSize, startIndex, startValue) { - if (end == null) { - end = string.search(/[^\s\u00a0]/); - if (end == -1) { end = string.length; } - } - for (var i = startIndex || 0, n = startValue || 0;;) { - var nextTab = string.indexOf("\t", i); - if (nextTab < 0 || nextTab >= end) - { return n + (end - i) } - n += nextTab - i; - n += tabSize - (n % tabSize); - i = nextTab + 1; - } -} - -var Delayed = function() {this.id = null;}; -Delayed.prototype.set = function (ms, f) { - clearTimeout(this.id); - this.id = setTimeout(f, ms); -}; - -function indexOf(array, elt) { - for (var i = 0; i < array.length; ++i) - { if (array[i] == elt) { return i } } - return -1 -} - -// Number of pixels added to scroller and sizer to hide scrollbar -var scrollerGap = 30; - -// Returned or thrown by various protocols to signal 'I'm not -// handling this'. -var Pass = {toString: function(){return "CodeMirror.Pass"}}; - -// Reused option objects for setSelection & friends -var sel_dontScroll = {scroll: false}; -var sel_mouse = {origin: "*mouse"}; -var sel_move = {origin: "+move"}; - -// The inverse of countColumn -- find the offset that corresponds to -// a particular column. -function findColumn(string, goal, tabSize) { - for (var pos = 0, col = 0;;) { - var nextTab = string.indexOf("\t", pos); - if (nextTab == -1) { nextTab = string.length; } - var skipped = nextTab - pos; - if (nextTab == string.length || col + skipped >= goal) - { return pos + Math.min(skipped, goal - col) } - col += nextTab - pos; - col += tabSize - (col % tabSize); - pos = nextTab + 1; - if (col >= goal) { return pos } - } -} - -var spaceStrs = [""]; -function spaceStr(n) { - while (spaceStrs.length <= n) - { spaceStrs.push(lst(spaceStrs) + " "); } - return spaceStrs[n] -} - -function lst(arr) { return arr[arr.length-1] } - -function map(array, f) { - var out = []; - for (var i = 0; i < array.length; i++) { out[i] = f(array[i], i); } - return out -} - -function insertSorted(array, value, score) { - var pos = 0, priority = score(value); - while (pos < array.length && score(array[pos]) <= priority) { pos++; } - array.splice(pos, 0, value); -} - -function nothing() {} - -function createObj(base, props) { - var inst; - if (Object.create) { - inst = Object.create(base); - } else { - nothing.prototype = base; - inst = new nothing(); - } - if (props) { copyObj(props, inst); } - return inst -} - -var nonASCIISingleCaseWordChar = /[\u00df\u0587\u0590-\u05f4\u0600-\u06ff\u3040-\u309f\u30a0-\u30ff\u3400-\u4db5\u4e00-\u9fcc\uac00-\ud7af]/; -function isWordCharBasic(ch) { - return /\w/.test(ch) || ch > "\x80" && - (ch.toUpperCase() != ch.toLowerCase() || nonASCIISingleCaseWordChar.test(ch)) -} -function isWordChar(ch, helper) { - if (!helper) { return isWordCharBasic(ch) } - if (helper.source.indexOf("\\w") > -1 && isWordCharBasic(ch)) { return true } - return helper.test(ch) -} - -function isEmpty(obj) { - for (var n in obj) { if (obj.hasOwnProperty(n) && obj[n]) { return false } } - return true -} - -// Extending unicode characters. A series of a non-extending char + -// any number of extending chars is treated as a single unit as far -// as editing and measuring is concerned. This is not fully correct, -// since some scripts/fonts/browsers also treat other configurations -// of code points as a group. -var extendingChars = /[\u0300-\u036f\u0483-\u0489\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u065e\u0670\u06d6-\u06dc\u06de-\u06e4\u06e7\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0900-\u0902\u093c\u0941-\u0948\u094d\u0951-\u0955\u0962\u0963\u0981\u09bc\u09be\u09c1-\u09c4\u09cd\u09d7\u09e2\u09e3\u0a01\u0a02\u0a3c\u0a41\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a70\u0a71\u0a75\u0a81\u0a82\u0abc\u0ac1-\u0ac5\u0ac7\u0ac8\u0acd\u0ae2\u0ae3\u0b01\u0b3c\u0b3e\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b57\u0b62\u0b63\u0b82\u0bbe\u0bc0\u0bcd\u0bd7\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0cbc\u0cbf\u0cc2\u0cc6\u0ccc\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0d3e\u0d41-\u0d44\u0d4d\u0d57\u0d62\u0d63\u0dca\u0dcf\u0dd2-\u0dd4\u0dd6\u0ddf\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0f18\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86\u0f87\u0f90-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039\u103a\u103d\u103e\u1058\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085\u1086\u108d\u109d\u135f\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u18a9\u1920-\u1922\u1927\u1928\u1932\u1939-\u193b\u1a17\u1a18\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80\u1b81\u1ba2-\u1ba5\u1ba8\u1ba9\u1c2c-\u1c33\u1c36\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1dc0-\u1de6\u1dfd-\u1dff\u200c\u200d\u20d0-\u20f0\u2cef-\u2cf1\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua66f-\ua672\ua67c\ua67d\ua6f0\ua6f1\ua802\ua806\ua80b\ua825\ua826\ua8c4\ua8e0-\ua8f1\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\uaa29-\uaa2e\uaa31\uaa32\uaa35\uaa36\uaa43\uaa4c\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uabe5\uabe8\uabed\udc00-\udfff\ufb1e\ufe00-\ufe0f\ufe20-\ufe26\uff9e\uff9f]/; -function isExtendingChar(ch) { return ch.charCodeAt(0) >= 768 && extendingChars.test(ch) } - -// Returns a number from the range [`0`; `str.length`] unless `pos` is outside that range. -function skipExtendingChars(str, pos, dir) { - while ((dir < 0 ? pos > 0 : pos < str.length) && isExtendingChar(str.charAt(pos))) { pos += dir; } - return pos -} - -// Returns the value from the range [`from`; `to`] that satisfies -// `pred` and is closest to `from`. Assumes that at least `to` -// satisfies `pred`. Supports `from` being greater than `to`. -function findFirst(pred, from, to) { - // At any point we are certain `to` satisfies `pred`, don't know - // whether `from` does. - var dir = from > to ? -1 : 1; - for (;;) { - if (from == to) { return from } - var midF = (from + to) / 2, mid = dir < 0 ? Math.ceil(midF) : Math.floor(midF); - if (mid == from) { return pred(mid) ? from : to } - if (pred(mid)) { to = mid; } - else { from = mid + dir; } - } -} - -// The display handles the DOM integration, both for input reading -// and content drawing. It holds references to DOM nodes and -// display-related state. - -function Display(place, doc, input) { - var d = this; - this.input = input; - - // Covers bottom-right square when both scrollbars are present. - d.scrollbarFiller = elt("div", null, "CodeMirror-scrollbar-filler"); - d.scrollbarFiller.setAttribute("cm-not-content", "true"); - // Covers bottom of gutter when coverGutterNextToScrollbar is on - // and h scrollbar is present. - d.gutterFiller = elt("div", null, "CodeMirror-gutter-filler"); - d.gutterFiller.setAttribute("cm-not-content", "true"); - // Will contain the actual code, positioned to cover the viewport. - d.lineDiv = eltP("div", null, "CodeMirror-code"); - // Elements are added to these to represent selection and cursors. - d.selectionDiv = elt("div", null, null, "position: relative; z-index: 1"); - d.cursorDiv = elt("div", null, "CodeMirror-cursors"); - // A visibility: hidden element used to find the size of things. - d.measure = elt("div", null, "CodeMirror-measure"); - // When lines outside of the viewport are measured, they are drawn in this. - d.lineMeasure = elt("div", null, "CodeMirror-measure"); - // Wraps everything that needs to exist inside the vertically-padded coordinate system - d.lineSpace = eltP("div", [d.measure, d.lineMeasure, d.selectionDiv, d.cursorDiv, d.lineDiv], - null, "position: relative; outline: none"); - var lines = eltP("div", [d.lineSpace], "CodeMirror-lines"); - // Moved around its parent to cover visible view. - d.mover = elt("div", [lines], null, "position: relative"); - // Set to the height of the document, allowing scrolling. - d.sizer = elt("div", [d.mover], "CodeMirror-sizer"); - d.sizerWidth = null; - // Behavior of elts with overflow: auto and padding is - // inconsistent across browsers. This is used to ensure the - // scrollable area is big enough. - d.heightForcer = elt("div", null, null, "position: absolute; height: " + scrollerGap + "px; width: 1px;"); - // Will contain the gutters, if any. - d.gutters = elt("div", null, "CodeMirror-gutters"); - d.lineGutter = null; - // Actual scrollable element. - d.scroller = elt("div", [d.sizer, d.heightForcer, d.gutters], "CodeMirror-scroll"); - d.scroller.setAttribute("tabIndex", "-1"); - // The element in which the editor lives. - d.wrapper = elt("div", [d.scrollbarFiller, d.gutterFiller, d.scroller], "CodeMirror"); - - // Work around IE7 z-index bug (not perfect, hence IE7 not really being supported) - if (ie && ie_version < 8) { d.gutters.style.zIndex = -1; d.scroller.style.paddingRight = 0; } - if (!webkit && !(gecko && mobile)) { d.scroller.draggable = true; } - - if (place) { - if (place.appendChild) { place.appendChild(d.wrapper); } - else { place(d.wrapper); } - } - - // Current rendered range (may be bigger than the view window). - d.viewFrom = d.viewTo = doc.first; - d.reportedViewFrom = d.reportedViewTo = doc.first; - // Information about the rendered lines. - d.view = []; - d.renderedView = null; - // Holds info about a single rendered line when it was rendered - // for measurement, while not in view. - d.externalMeasured = null; - // Empty space (in pixels) above the view - d.viewOffset = 0; - d.lastWrapHeight = d.lastWrapWidth = 0; - d.updateLineNumbers = null; - - d.nativeBarWidth = d.barHeight = d.barWidth = 0; - d.scrollbarsClipped = false; - - // Used to only resize the line number gutter when necessary (when - // the amount of lines crosses a boundary that makes its width change) - d.lineNumWidth = d.lineNumInnerWidth = d.lineNumChars = null; - // Set to true when a non-horizontal-scrolling line widget is - // added. As an optimization, line widget aligning is skipped when - // this is false. - d.alignWidgets = false; - - d.cachedCharWidth = d.cachedTextHeight = d.cachedPaddingH = null; - - // Tracks the maximum line length so that the horizontal scrollbar - // can be kept static when scrolling. - d.maxLine = null; - d.maxLineLength = 0; - d.maxLineChanged = false; - - // Used for measuring wheel scrolling granularity - d.wheelDX = d.wheelDY = d.wheelStartX = d.wheelStartY = null; - - // True when shift is held down. - d.shift = false; - - // Used to track whether anything happened since the context menu - // was opened. - d.selForContextMenu = null; - - d.activeTouch = null; - - input.init(d); -} - -// Find the line object corresponding to the given line number. -function getLine(doc, n) { - n -= doc.first; - if (n < 0 || n >= doc.size) { throw new Error("There is no line " + (n + doc.first) + " in the document.") } - var chunk = doc; - while (!chunk.lines) { - for (var i = 0;; ++i) { - var child = chunk.children[i], sz = child.chunkSize(); - if (n < sz) { chunk = child; break } - n -= sz; - } - } - return chunk.lines[n] -} - -// Get the part of a document between two positions, as an array of -// strings. -function getBetween(doc, start, end) { - var out = [], n = start.line; - doc.iter(start.line, end.line + 1, function (line) { - var text = line.text; - if (n == end.line) { text = text.slice(0, end.ch); } - if (n == start.line) { text = text.slice(start.ch); } - out.push(text); - ++n; - }); - return out -} -// Get the lines between from and to, as array of strings. -function getLines(doc, from, to) { - var out = []; - doc.iter(from, to, function (line) { out.push(line.text); }); // iter aborts when callback returns truthy value - return out -} - -// Update the height of a line, propagating the height change -// upwards to parent nodes. -function updateLineHeight(line, height) { - var diff = height - line.height; - if (diff) { for (var n = line; n; n = n.parent) { n.height += diff; } } -} - -// Given a line object, find its line number by walking up through -// its parent links. -function lineNo(line) { - if (line.parent == null) { return null } - var cur = line.parent, no = indexOf(cur.lines, line); - for (var chunk = cur.parent; chunk; cur = chunk, chunk = chunk.parent) { - for (var i = 0;; ++i) { - if (chunk.children[i] == cur) { break } - no += chunk.children[i].chunkSize(); - } - } - return no + cur.first -} - -// Find the line at the given vertical position, using the height -// information in the document tree. -function lineAtHeight(chunk, h) { - var n = chunk.first; - outer: do { - for (var i$1 = 0; i$1 < chunk.children.length; ++i$1) { - var child = chunk.children[i$1], ch = child.height; - if (h < ch) { chunk = child; continue outer } - h -= ch; - n += child.chunkSize(); - } - return n - } while (!chunk.lines) - var i = 0; - for (; i < chunk.lines.length; ++i) { - var line = chunk.lines[i], lh = line.height; - if (h < lh) { break } - h -= lh; - } - return n + i -} - -function isLine(doc, l) {return l >= doc.first && l < doc.first + doc.size} - -function lineNumberFor(options, i) { - return String(options.lineNumberFormatter(i + options.firstLineNumber)) -} - -// A Pos instance represents a position within the text. -function Pos(line, ch, sticky) { - if ( sticky === void 0 ) sticky = null; - - if (!(this instanceof Pos)) { return new Pos(line, ch, sticky) } - this.line = line; - this.ch = ch; - this.sticky = sticky; -} - -// Compare two positions, return 0 if they are the same, a negative -// number when a is less, and a positive number otherwise. -function cmp(a, b) { return a.line - b.line || a.ch - b.ch } - -function equalCursorPos(a, b) { return a.sticky == b.sticky && cmp(a, b) == 0 } - -function copyPos(x) {return Pos(x.line, x.ch)} -function maxPos(a, b) { return cmp(a, b) < 0 ? b : a } -function minPos(a, b) { return cmp(a, b) < 0 ? a : b } - -// Most of the external API clips given positions to make sure they -// actually exist within the document. -function clipLine(doc, n) {return Math.max(doc.first, Math.min(n, doc.first + doc.size - 1))} -function clipPos(doc, pos) { - if (pos.line < doc.first) { return Pos(doc.first, 0) } - var last = doc.first + doc.size - 1; - if (pos.line > last) { return Pos(last, getLine(doc, last).text.length) } - return clipToLen(pos, getLine(doc, pos.line).text.length) -} -function clipToLen(pos, linelen) { - var ch = pos.ch; - if (ch == null || ch > linelen) { return Pos(pos.line, linelen) } - else if (ch < 0) { return Pos(pos.line, 0) } - else { return pos } -} -function clipPosArray(doc, array) { - var out = []; - for (var i = 0; i < array.length; i++) { out[i] = clipPos(doc, array[i]); } - return out -} - -// Optimize some code when these features are not used. -var sawReadOnlySpans = false; -var sawCollapsedSpans = false; - -function seeReadOnlySpans() { - sawReadOnlySpans = true; -} - -function seeCollapsedSpans() { - sawCollapsedSpans = true; -} - -// TEXTMARKER SPANS - -function MarkedSpan(marker, from, to) { - this.marker = marker; - this.from = from; this.to = to; -} - -// Search an array of spans for a span matching the given marker. -function getMarkedSpanFor(spans, marker) { - if (spans) { for (var i = 0; i < spans.length; ++i) { - var span = spans[i]; - if (span.marker == marker) { return span } - } } -} -// Remove a span from an array, returning undefined if no spans are -// left (we don't store arrays for lines without spans). -function removeMarkedSpan(spans, span) { - var r; - for (var i = 0; i < spans.length; ++i) - { if (spans[i] != span) { (r || (r = [])).push(spans[i]); } } - return r -} -// Add a span to a line. -function addMarkedSpan(line, span) { - line.markedSpans = line.markedSpans ? line.markedSpans.concat([span]) : [span]; - span.marker.attachLine(line); -} - -// Used for the algorithm that adjusts markers for a change in the -// document. These functions cut an array of spans at a given -// character position, returning an array of remaining chunks (or -// undefined if nothing remains). -function markedSpansBefore(old, startCh, isInsert) { - var nw; - if (old) { for (var i = 0; i < old.length; ++i) { - var span = old[i], marker = span.marker; - var startsBefore = span.from == null || (marker.inclusiveLeft ? span.from <= startCh : span.from < startCh); - if (startsBefore || span.from == startCh && marker.type == "bookmark" && (!isInsert || !span.marker.insertLeft)) { - var endsAfter = span.to == null || (marker.inclusiveRight ? span.to >= startCh : span.to > startCh);(nw || (nw = [])).push(new MarkedSpan(marker, span.from, endsAfter ? null : span.to)); - } - } } - return nw -} -function markedSpansAfter(old, endCh, isInsert) { - var nw; - if (old) { for (var i = 0; i < old.length; ++i) { - var span = old[i], marker = span.marker; - var endsAfter = span.to == null || (marker.inclusiveRight ? span.to >= endCh : span.to > endCh); - if (endsAfter || span.from == endCh && marker.type == "bookmark" && (!isInsert || span.marker.insertLeft)) { - var startsBefore = span.from == null || (marker.inclusiveLeft ? span.from <= endCh : span.from < endCh);(nw || (nw = [])).push(new MarkedSpan(marker, startsBefore ? null : span.from - endCh, - span.to == null ? null : span.to - endCh)); - } - } } - return nw -} - -// Given a change object, compute the new set of marker spans that -// cover the line in which the change took place. Removes spans -// entirely within the change, reconnects spans belonging to the -// same marker that appear on both sides of the change, and cuts off -// spans partially within the change. Returns an array of span -// arrays with one element for each line in (after) the change. -function stretchSpansOverChange(doc, change) { - if (change.full) { return null } - var oldFirst = isLine(doc, change.from.line) && getLine(doc, change.from.line).markedSpans; - var oldLast = isLine(doc, change.to.line) && getLine(doc, change.to.line).markedSpans; - if (!oldFirst && !oldLast) { return null } - - var startCh = change.from.ch, endCh = change.to.ch, isInsert = cmp(change.from, change.to) == 0; - // Get the spans that 'stick out' on both sides - var first = markedSpansBefore(oldFirst, startCh, isInsert); - var last = markedSpansAfter(oldLast, endCh, isInsert); - - // Next, merge those two ends - var sameLine = change.text.length == 1, offset = lst(change.text).length + (sameLine ? startCh : 0); - if (first) { - // Fix up .to properties of first - for (var i = 0; i < first.length; ++i) { - var span = first[i]; - if (span.to == null) { - var found = getMarkedSpanFor(last, span.marker); - if (!found) { span.to = startCh; } - else if (sameLine) { span.to = found.to == null ? null : found.to + offset; } - } - } - } - if (last) { - // Fix up .from in last (or move them into first in case of sameLine) - for (var i$1 = 0; i$1 < last.length; ++i$1) { - var span$1 = last[i$1]; - if (span$1.to != null) { span$1.to += offset; } - if (span$1.from == null) { - var found$1 = getMarkedSpanFor(first, span$1.marker); - if (!found$1) { - span$1.from = offset; - if (sameLine) { (first || (first = [])).push(span$1); } - } - } else { - span$1.from += offset; - if (sameLine) { (first || (first = [])).push(span$1); } - } - } - } - // Make sure we didn't create any zero-length spans - if (first) { first = clearEmptySpans(first); } - if (last && last != first) { last = clearEmptySpans(last); } - - var newMarkers = [first]; - if (!sameLine) { - // Fill gap with whole-line-spans - var gap = change.text.length - 2, gapMarkers; - if (gap > 0 && first) - { for (var i$2 = 0; i$2 < first.length; ++i$2) - { if (first[i$2].to == null) - { (gapMarkers || (gapMarkers = [])).push(new MarkedSpan(first[i$2].marker, null, null)); } } } - for (var i$3 = 0; i$3 < gap; ++i$3) - { newMarkers.push(gapMarkers); } - newMarkers.push(last); - } - return newMarkers -} - -// Remove spans that are empty and don't have a clearWhenEmpty -// option of false. -function clearEmptySpans(spans) { - for (var i = 0; i < spans.length; ++i) { - var span = spans[i]; - if (span.from != null && span.from == span.to && span.marker.clearWhenEmpty !== false) - { spans.splice(i--, 1); } - } - if (!spans.length) { return null } - return spans -} - -// Used to 'clip' out readOnly ranges when making a change. -function removeReadOnlyRanges(doc, from, to) { - var markers = null; - doc.iter(from.line, to.line + 1, function (line) { - if (line.markedSpans) { for (var i = 0; i < line.markedSpans.length; ++i) { - var mark = line.markedSpans[i].marker; - if (mark.readOnly && (!markers || indexOf(markers, mark) == -1)) - { (markers || (markers = [])).push(mark); } - } } - }); - if (!markers) { return null } - var parts = [{from: from, to: to}]; - for (var i = 0; i < markers.length; ++i) { - var mk = markers[i], m = mk.find(0); - for (var j = 0; j < parts.length; ++j) { - var p = parts[j]; - if (cmp(p.to, m.from) < 0 || cmp(p.from, m.to) > 0) { continue } - var newParts = [j, 1], dfrom = cmp(p.from, m.from), dto = cmp(p.to, m.to); - if (dfrom < 0 || !mk.inclusiveLeft && !dfrom) - { newParts.push({from: p.from, to: m.from}); } - if (dto > 0 || !mk.inclusiveRight && !dto) - { newParts.push({from: m.to, to: p.to}); } - parts.splice.apply(parts, newParts); - j += newParts.length - 3; - } - } - return parts -} - -// Connect or disconnect spans from a line. -function detachMarkedSpans(line) { - var spans = line.markedSpans; - if (!spans) { return } - for (var i = 0; i < spans.length; ++i) - { spans[i].marker.detachLine(line); } - line.markedSpans = null; -} -function attachMarkedSpans(line, spans) { - if (!spans) { return } - for (var i = 0; i < spans.length; ++i) - { spans[i].marker.attachLine(line); } - line.markedSpans = spans; -} - -// Helpers used when computing which overlapping collapsed span -// counts as the larger one. -function extraLeft(marker) { return marker.inclusiveLeft ? -1 : 0 } -function extraRight(marker) { return marker.inclusiveRight ? 1 : 0 } - -// Returns a number indicating which of two overlapping collapsed -// spans is larger (and thus includes the other). Falls back to -// comparing ids when the spans cover exactly the same range. -function compareCollapsedMarkers(a, b) { - var lenDiff = a.lines.length - b.lines.length; - if (lenDiff != 0) { return lenDiff } - var aPos = a.find(), bPos = b.find(); - var fromCmp = cmp(aPos.from, bPos.from) || extraLeft(a) - extraLeft(b); - if (fromCmp) { return -fromCmp } - var toCmp = cmp(aPos.to, bPos.to) || extraRight(a) - extraRight(b); - if (toCmp) { return toCmp } - return b.id - a.id -} - -// Find out whether a line ends or starts in a collapsed span. If -// so, return the marker for that span. -function collapsedSpanAtSide(line, start) { - var sps = sawCollapsedSpans && line.markedSpans, found; - if (sps) { for (var sp = (void 0), i = 0; i < sps.length; ++i) { - sp = sps[i]; - if (sp.marker.collapsed && (start ? sp.from : sp.to) == null && - (!found || compareCollapsedMarkers(found, sp.marker) < 0)) - { found = sp.marker; } - } } - return found -} -function collapsedSpanAtStart(line) { return collapsedSpanAtSide(line, true) } -function collapsedSpanAtEnd(line) { return collapsedSpanAtSide(line, false) } - -// Test whether there exists a collapsed span that partially -// overlaps (covers the start or end, but not both) of a new span. -// Such overlap is not allowed. -function conflictingCollapsedRange(doc, lineNo$$1, from, to, marker) { - var line = getLine(doc, lineNo$$1); - var sps = sawCollapsedSpans && line.markedSpans; - if (sps) { for (var i = 0; i < sps.length; ++i) { - var sp = sps[i]; - if (!sp.marker.collapsed) { continue } - var found = sp.marker.find(0); - var fromCmp = cmp(found.from, from) || extraLeft(sp.marker) - extraLeft(marker); - var toCmp = cmp(found.to, to) || extraRight(sp.marker) - extraRight(marker); - if (fromCmp >= 0 && toCmp <= 0 || fromCmp <= 0 && toCmp >= 0) { continue } - if (fromCmp <= 0 && (sp.marker.inclusiveRight && marker.inclusiveLeft ? cmp(found.to, from) >= 0 : cmp(found.to, from) > 0) || - fromCmp >= 0 && (sp.marker.inclusiveRight && marker.inclusiveLeft ? cmp(found.from, to) <= 0 : cmp(found.from, to) < 0)) - { return true } - } } -} - -// A visual line is a line as drawn on the screen. Folding, for -// example, can cause multiple logical lines to appear on the same -// visual line. This finds the start of the visual line that the -// given line is part of (usually that is the line itself). -function visualLine(line) { - var merged; - while (merged = collapsedSpanAtStart(line)) - { line = merged.find(-1, true).line; } - return line -} - -function visualLineEnd(line) { - var merged; - while (merged = collapsedSpanAtEnd(line)) - { line = merged.find(1, true).line; } - return line -} - -// Returns an array of logical lines that continue the visual line -// started by the argument, or undefined if there are no such lines. -function visualLineContinued(line) { - var merged, lines; - while (merged = collapsedSpanAtEnd(line)) { - line = merged.find(1, true).line - ;(lines || (lines = [])).push(line); - } - return lines -} - -// Get the line number of the start of the visual line that the -// given line number is part of. -function visualLineNo(doc, lineN) { - var line = getLine(doc, lineN), vis = visualLine(line); - if (line == vis) { return lineN } - return lineNo(vis) -} - -// Get the line number of the start of the next visual line after -// the given line. -function visualLineEndNo(doc, lineN) { - if (lineN > doc.lastLine()) { return lineN } - var line = getLine(doc, lineN), merged; - if (!lineIsHidden(doc, line)) { return lineN } - while (merged = collapsedSpanAtEnd(line)) - { line = merged.find(1, true).line; } - return lineNo(line) + 1 -} - -// Compute whether a line is hidden. Lines count as hidden when they -// are part of a visual line that starts with another line, or when -// they are entirely covered by collapsed, non-widget span. -function lineIsHidden(doc, line) { - var sps = sawCollapsedSpans && line.markedSpans; - if (sps) { for (var sp = (void 0), i = 0; i < sps.length; ++i) { - sp = sps[i]; - if (!sp.marker.collapsed) { continue } - if (sp.from == null) { return true } - if (sp.marker.widgetNode) { continue } - if (sp.from == 0 && sp.marker.inclusiveLeft && lineIsHiddenInner(doc, line, sp)) - { return true } - } } -} -function lineIsHiddenInner(doc, line, span) { - if (span.to == null) { - var end = span.marker.find(1, true); - return lineIsHiddenInner(doc, end.line, getMarkedSpanFor(end.line.markedSpans, span.marker)) - } - if (span.marker.inclusiveRight && span.to == line.text.length) - { return true } - for (var sp = (void 0), i = 0; i < line.markedSpans.length; ++i) { - sp = line.markedSpans[i]; - if (sp.marker.collapsed && !sp.marker.widgetNode && sp.from == span.to && - (sp.to == null || sp.to != span.from) && - (sp.marker.inclusiveLeft || span.marker.inclusiveRight) && - lineIsHiddenInner(doc, line, sp)) { return true } - } -} - -// Find the height above the given line. -function heightAtLine(lineObj) { - lineObj = visualLine(lineObj); - - var h = 0, chunk = lineObj.parent; - for (var i = 0; i < chunk.lines.length; ++i) { - var line = chunk.lines[i]; - if (line == lineObj) { break } - else { h += line.height; } - } - for (var p = chunk.parent; p; chunk = p, p = chunk.parent) { - for (var i$1 = 0; i$1 < p.children.length; ++i$1) { - var cur = p.children[i$1]; - if (cur == chunk) { break } - else { h += cur.height; } - } - } - return h -} - -// Compute the character length of a line, taking into account -// collapsed ranges (see markText) that might hide parts, and join -// other lines onto it. -function lineLength(line) { - if (line.height == 0) { return 0 } - var len = line.text.length, merged, cur = line; - while (merged = collapsedSpanAtStart(cur)) { - var found = merged.find(0, true); - cur = found.from.line; - len += found.from.ch - found.to.ch; - } - cur = line; - while (merged = collapsedSpanAtEnd(cur)) { - var found$1 = merged.find(0, true); - len -= cur.text.length - found$1.from.ch; - cur = found$1.to.line; - len += cur.text.length - found$1.to.ch; - } - return len -} - -// Find the longest line in the document. -function findMaxLine(cm) { - var d = cm.display, doc = cm.doc; - d.maxLine = getLine(doc, doc.first); - d.maxLineLength = lineLength(d.maxLine); - d.maxLineChanged = true; - doc.iter(function (line) { - var len = lineLength(line); - if (len > d.maxLineLength) { - d.maxLineLength = len; - d.maxLine = line; - } - }); -} - -// BIDI HELPERS - -function iterateBidiSections(order, from, to, f) { - if (!order) { return f(from, to, "ltr", 0) } - var found = false; - for (var i = 0; i < order.length; ++i) { - var part = order[i]; - if (part.from < to && part.to > from || from == to && part.to == from) { - f(Math.max(part.from, from), Math.min(part.to, to), part.level == 1 ? "rtl" : "ltr", i); - found = true; - } - } - if (!found) { f(from, to, "ltr"); } -} - -var bidiOther = null; -function getBidiPartAt(order, ch, sticky) { - var found; - bidiOther = null; - for (var i = 0; i < order.length; ++i) { - var cur = order[i]; - if (cur.from < ch && cur.to > ch) { return i } - if (cur.to == ch) { - if (cur.from != cur.to && sticky == "before") { found = i; } - else { bidiOther = i; } - } - if (cur.from == ch) { - if (cur.from != cur.to && sticky != "before") { found = i; } - else { bidiOther = i; } - } - } - return found != null ? found : bidiOther -} - -// Bidirectional ordering algorithm -// See http://unicode.org/reports/tr9/tr9-13.html for the algorithm -// that this (partially) implements. - -// One-char codes used for character types: -// L (L): Left-to-Right -// R (R): Right-to-Left -// r (AL): Right-to-Left Arabic -// 1 (EN): European Number -// + (ES): European Number Separator -// % (ET): European Number Terminator -// n (AN): Arabic Number -// , (CS): Common Number Separator -// m (NSM): Non-Spacing Mark -// b (BN): Boundary Neutral -// s (B): Paragraph Separator -// t (S): Segment Separator -// w (WS): Whitespace -// N (ON): Other Neutrals - -// Returns null if characters are ordered as they appear -// (left-to-right), or an array of sections ({from, to, level} -// objects) in the order in which they occur visually. -var bidiOrdering = (function() { - // Character types for codepoints 0 to 0xff - var lowTypes = "bbbbbbbbbtstwsbbbbbbbbbbbbbbssstwNN%%%NNNNNN,N,N1111111111NNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNbbbbbbsbbbbbbbbbbbbbbbbbbbbbbbbbb,N%%%%NNNNLNNNNN%%11NLNNN1LNNNNNLLLLLLLLLLLLLLLLLLLLLLLNLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLN"; - // Character types for codepoints 0x600 to 0x6f9 - var arabicTypes = "nnnnnnNNr%%r,rNNmmmmmmmmmmmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmmmmmmmmmmmmmmmnnnnnnnnnn%nnrrrmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmnNmmmmmmrrmmNmmmmrr1111111111"; - function charType(code) { - if (code <= 0xf7) { return lowTypes.charAt(code) } - else if (0x590 <= code && code <= 0x5f4) { return "R" } - else if (0x600 <= code && code <= 0x6f9) { return arabicTypes.charAt(code - 0x600) } - else if (0x6ee <= code && code <= 0x8ac) { return "r" } - else if (0x2000 <= code && code <= 0x200b) { return "w" } - else if (code == 0x200c) { return "b" } - else { return "L" } - } - - var bidiRE = /[\u0590-\u05f4\u0600-\u06ff\u0700-\u08ac]/; - var isNeutral = /[stwN]/, isStrong = /[LRr]/, countsAsLeft = /[Lb1n]/, countsAsNum = /[1n]/; - - function BidiSpan(level, from, to) { - this.level = level; - this.from = from; this.to = to; - } - - return function(str, direction) { - var outerType = direction == "ltr" ? "L" : "R"; - - if (str.length == 0 || direction == "ltr" && !bidiRE.test(str)) { return false } - var len = str.length, types = []; - for (var i = 0; i < len; ++i) - { types.push(charType(str.charCodeAt(i))); } - - // W1. Examine each non-spacing mark (NSM) in the level run, and - // change the type of the NSM to the type of the previous - // character. If the NSM is at the start of the level run, it will - // get the type of sor. - for (var i$1 = 0, prev = outerType; i$1 < len; ++i$1) { - var type = types[i$1]; - if (type == "m") { types[i$1] = prev; } - else { prev = type; } - } - - // W2. Search backwards from each instance of a European number - // until the first strong type (R, L, AL, or sor) is found. If an - // AL is found, change the type of the European number to Arabic - // number. - // W3. Change all ALs to R. - for (var i$2 = 0, cur = outerType; i$2 < len; ++i$2) { - var type$1 = types[i$2]; - if (type$1 == "1" && cur == "r") { types[i$2] = "n"; } - else if (isStrong.test(type$1)) { cur = type$1; if (type$1 == "r") { types[i$2] = "R"; } } - } - - // W4. A single European separator between two European numbers - // changes to a European number. A single common separator between - // two numbers of the same type changes to that type. - for (var i$3 = 1, prev$1 = types[0]; i$3 < len - 1; ++i$3) { - var type$2 = types[i$3]; - if (type$2 == "+" && prev$1 == "1" && types[i$3+1] == "1") { types[i$3] = "1"; } - else if (type$2 == "," && prev$1 == types[i$3+1] && - (prev$1 == "1" || prev$1 == "n")) { types[i$3] = prev$1; } - prev$1 = type$2; - } - - // W5. A sequence of European terminators adjacent to European - // numbers changes to all European numbers. - // W6. Otherwise, separators and terminators change to Other - // Neutral. - for (var i$4 = 0; i$4 < len; ++i$4) { - var type$3 = types[i$4]; - if (type$3 == ",") { types[i$4] = "N"; } - else if (type$3 == "%") { - var end = (void 0); - for (end = i$4 + 1; end < len && types[end] == "%"; ++end) {} - var replace = (i$4 && types[i$4-1] == "!") || (end < len && types[end] == "1") ? "1" : "N"; - for (var j = i$4; j < end; ++j) { types[j] = replace; } - i$4 = end - 1; - } - } - - // W7. Search backwards from each instance of a European number - // until the first strong type (R, L, or sor) is found. If an L is - // found, then change the type of the European number to L. - for (var i$5 = 0, cur$1 = outerType; i$5 < len; ++i$5) { - var type$4 = types[i$5]; - if (cur$1 == "L" && type$4 == "1") { types[i$5] = "L"; } - else if (isStrong.test(type$4)) { cur$1 = type$4; } - } - - // N1. A sequence of neutrals takes the direction of the - // surrounding strong text if the text on both sides has the same - // direction. European and Arabic numbers act as if they were R in - // terms of their influence on neutrals. Start-of-level-run (sor) - // and end-of-level-run (eor) are used at level run boundaries. - // N2. Any remaining neutrals take the embedding direction. - for (var i$6 = 0; i$6 < len; ++i$6) { - if (isNeutral.test(types[i$6])) { - var end$1 = (void 0); - for (end$1 = i$6 + 1; end$1 < len && isNeutral.test(types[end$1]); ++end$1) {} - var before = (i$6 ? types[i$6-1] : outerType) == "L"; - var after = (end$1 < len ? types[end$1] : outerType) == "L"; - var replace$1 = before == after ? (before ? "L" : "R") : outerType; - for (var j$1 = i$6; j$1 < end$1; ++j$1) { types[j$1] = replace$1; } - i$6 = end$1 - 1; - } - } - - // Here we depart from the documented algorithm, in order to avoid - // building up an actual levels array. Since there are only three - // levels (0, 1, 2) in an implementation that doesn't take - // explicit embedding into account, we can build up the order on - // the fly, without following the level-based algorithm. - var order = [], m; - for (var i$7 = 0; i$7 < len;) { - if (countsAsLeft.test(types[i$7])) { - var start = i$7; - for (++i$7; i$7 < len && countsAsLeft.test(types[i$7]); ++i$7) {} - order.push(new BidiSpan(0, start, i$7)); - } else { - var pos = i$7, at = order.length; - for (++i$7; i$7 < len && types[i$7] != "L"; ++i$7) {} - for (var j$2 = pos; j$2 < i$7;) { - if (countsAsNum.test(types[j$2])) { - if (pos < j$2) { order.splice(at, 0, new BidiSpan(1, pos, j$2)); } - var nstart = j$2; - for (++j$2; j$2 < i$7 && countsAsNum.test(types[j$2]); ++j$2) {} - order.splice(at, 0, new BidiSpan(2, nstart, j$2)); - pos = j$2; - } else { ++j$2; } - } - if (pos < i$7) { order.splice(at, 0, new BidiSpan(1, pos, i$7)); } - } - } - if (direction == "ltr") { - if (order[0].level == 1 && (m = str.match(/^\s+/))) { - order[0].from = m[0].length; - order.unshift(new BidiSpan(0, 0, m[0].length)); - } - if (lst(order).level == 1 && (m = str.match(/\s+$/))) { - lst(order).to -= m[0].length; - order.push(new BidiSpan(0, len - m[0].length, len)); - } - } - - return direction == "rtl" ? order.reverse() : order - } -})(); - -// Get the bidi ordering for the given line (and cache it). Returns -// false for lines that are fully left-to-right, and an array of -// BidiSpan objects otherwise. -function getOrder(line, direction) { - var order = line.order; - if (order == null) { order = line.order = bidiOrdering(line.text, direction); } - return order -} - -// EVENT HANDLING - -// Lightweight event framework. on/off also work on DOM nodes, -// registering native DOM handlers. - -var noHandlers = []; - -var on = function(emitter, type, f) { - if (emitter.addEventListener) { - emitter.addEventListener(type, f, false); - } else if (emitter.attachEvent) { - emitter.attachEvent("on" + type, f); - } else { - var map$$1 = emitter._handlers || (emitter._handlers = {}); - map$$1[type] = (map$$1[type] || noHandlers).concat(f); - } -}; - -function getHandlers(emitter, type) { - return emitter._handlers && emitter._handlers[type] || noHandlers -} - -function off(emitter, type, f) { - if (emitter.removeEventListener) { - emitter.removeEventListener(type, f, false); - } else if (emitter.detachEvent) { - emitter.detachEvent("on" + type, f); - } else { - var map$$1 = emitter._handlers, arr = map$$1 && map$$1[type]; - if (arr) { - var index = indexOf(arr, f); - if (index > -1) - { map$$1[type] = arr.slice(0, index).concat(arr.slice(index + 1)); } - } - } -} - -function signal(emitter, type /*, values...*/) { - var handlers = getHandlers(emitter, type); - if (!handlers.length) { return } - var args = Array.prototype.slice.call(arguments, 2); - for (var i = 0; i < handlers.length; ++i) { handlers[i].apply(null, args); } -} - -// The DOM events that CodeMirror handles can be overridden by -// registering a (non-DOM) handler on the editor for the event name, -// and preventDefault-ing the event in that handler. -function signalDOMEvent(cm, e, override) { - if (typeof e == "string") - { e = {type: e, preventDefault: function() { this.defaultPrevented = true; }}; } - signal(cm, override || e.type, cm, e); - return e_defaultPrevented(e) || e.codemirrorIgnore -} - -function signalCursorActivity(cm) { - var arr = cm._handlers && cm._handlers.cursorActivity; - if (!arr) { return } - var set = cm.curOp.cursorActivityHandlers || (cm.curOp.cursorActivityHandlers = []); - for (var i = 0; i < arr.length; ++i) { if (indexOf(set, arr[i]) == -1) - { set.push(arr[i]); } } -} - -function hasHandler(emitter, type) { - return getHandlers(emitter, type).length > 0 -} - -// Add on and off methods to a constructor's prototype, to make -// registering events on such objects more convenient. -function eventMixin(ctor) { - ctor.prototype.on = function(type, f) {on(this, type, f);}; - ctor.prototype.off = function(type, f) {off(this, type, f);}; -} - -// Due to the fact that we still support jurassic IE versions, some -// compatibility wrappers are needed. - -function e_preventDefault(e) { - if (e.preventDefault) { e.preventDefault(); } - else { e.returnValue = false; } -} -function e_stopPropagation(e) { - if (e.stopPropagation) { e.stopPropagation(); } - else { e.cancelBubble = true; } -} -function e_defaultPrevented(e) { - return e.defaultPrevented != null ? e.defaultPrevented : e.returnValue == false -} -function e_stop(e) {e_preventDefault(e); e_stopPropagation(e);} - -function e_target(e) {return e.target || e.srcElement} -function e_button(e) { - var b = e.which; - if (b == null) { - if (e.button & 1) { b = 1; } - else if (e.button & 2) { b = 3; } - else if (e.button & 4) { b = 2; } - } - if (mac && e.ctrlKey && b == 1) { b = 3; } - return b -} - -// Detect drag-and-drop -var dragAndDrop = function() { - // There is *some* kind of drag-and-drop support in IE6-8, but I - // couldn't get it to work yet. - if (ie && ie_version < 9) { return false } - var div = elt('div'); - return "draggable" in div || "dragDrop" in div -}(); - -var zwspSupported; -function zeroWidthElement(measure) { - if (zwspSupported == null) { - var test = elt("span", "\u200b"); - removeChildrenAndAdd(measure, elt("span", [test, document.createTextNode("x")])); - if (measure.firstChild.offsetHeight != 0) - { zwspSupported = test.offsetWidth <= 1 && test.offsetHeight > 2 && !(ie && ie_version < 8); } - } - var node = zwspSupported ? elt("span", "\u200b") : - elt("span", "\u00a0", null, "display: inline-block; width: 1px; margin-right: -1px"); - node.setAttribute("cm-text", ""); - return node -} - -// Feature-detect IE's crummy client rect reporting for bidi text -var badBidiRects; -function hasBadBidiRects(measure) { - if (badBidiRects != null) { return badBidiRects } - var txt = removeChildrenAndAdd(measure, document.createTextNode("A\u062eA")); - var r0 = range(txt, 0, 1).getBoundingClientRect(); - var r1 = range(txt, 1, 2).getBoundingClientRect(); - removeChildren(measure); - if (!r0 || r0.left == r0.right) { return false } // Safari returns null in some cases (#2780) - return badBidiRects = (r1.right - r0.right < 3) -} - -// See if "".split is the broken IE version, if so, provide an -// alternative way to split lines. -var splitLinesAuto = "\n\nb".split(/\n/).length != 3 ? function (string) { - var pos = 0, result = [], l = string.length; - while (pos <= l) { - var nl = string.indexOf("\n", pos); - if (nl == -1) { nl = string.length; } - var line = string.slice(pos, string.charAt(nl - 1) == "\r" ? nl - 1 : nl); - var rt = line.indexOf("\r"); - if (rt != -1) { - result.push(line.slice(0, rt)); - pos += rt + 1; - } else { - result.push(line); - pos = nl + 1; - } - } - return result -} : function (string) { return string.split(/\r\n?|\n/); }; - -var hasSelection = window.getSelection ? function (te) { - try { return te.selectionStart != te.selectionEnd } - catch(e) { return false } -} : function (te) { - var range$$1; - try {range$$1 = te.ownerDocument.selection.createRange();} - catch(e) {} - if (!range$$1 || range$$1.parentElement() != te) { return false } - return range$$1.compareEndPoints("StartToEnd", range$$1) != 0 -}; - -var hasCopyEvent = (function () { - var e = elt("div"); - if ("oncopy" in e) { return true } - e.setAttribute("oncopy", "return;"); - return typeof e.oncopy == "function" -})(); - -var badZoomedRects = null; -function hasBadZoomedRects(measure) { - if (badZoomedRects != null) { return badZoomedRects } - var node = removeChildrenAndAdd(measure, elt("span", "x")); - var normal = node.getBoundingClientRect(); - var fromRange = range(node, 0, 1).getBoundingClientRect(); - return badZoomedRects = Math.abs(normal.left - fromRange.left) > 1 -} - -// Known modes, by name and by MIME -var modes = {}; -var mimeModes = {}; - -// Extra arguments are stored as the mode's dependencies, which is -// used by (legacy) mechanisms like loadmode.js to automatically -// load a mode. (Preferred mechanism is the require/define calls.) -function defineMode(name, mode) { - if (arguments.length > 2) - { mode.dependencies = Array.prototype.slice.call(arguments, 2); } - modes[name] = mode; -} - -function defineMIME(mime, spec) { - mimeModes[mime] = spec; -} - -// Given a MIME type, a {name, ...options} config object, or a name -// string, return a mode config object. -function resolveMode(spec) { - if (typeof spec == "string" && mimeModes.hasOwnProperty(spec)) { - spec = mimeModes[spec]; - } else if (spec && typeof spec.name == "string" && mimeModes.hasOwnProperty(spec.name)) { - var found = mimeModes[spec.name]; - if (typeof found == "string") { found = {name: found}; } - spec = createObj(found, spec); - spec.name = found.name; - } else if (typeof spec == "string" && /^[\w\-]+\/[\w\-]+\+xml$/.test(spec)) { - return resolveMode("application/xml") - } else if (typeof spec == "string" && /^[\w\-]+\/[\w\-]+\+json$/.test(spec)) { - return resolveMode("application/json") - } - if (typeof spec == "string") { return {name: spec} } - else { return spec || {name: "null"} } -} - -// Given a mode spec (anything that resolveMode accepts), find and -// initialize an actual mode object. -function getMode(options, spec) { - spec = resolveMode(spec); - var mfactory = modes[spec.name]; - if (!mfactory) { return getMode(options, "text/plain") } - var modeObj = mfactory(options, spec); - if (modeExtensions.hasOwnProperty(spec.name)) { - var exts = modeExtensions[spec.name]; - for (var prop in exts) { - if (!exts.hasOwnProperty(prop)) { continue } - if (modeObj.hasOwnProperty(prop)) { modeObj["_" + prop] = modeObj[prop]; } - modeObj[prop] = exts[prop]; - } - } - modeObj.name = spec.name; - if (spec.helperType) { modeObj.helperType = spec.helperType; } - if (spec.modeProps) { for (var prop$1 in spec.modeProps) - { modeObj[prop$1] = spec.modeProps[prop$1]; } } - - return modeObj -} - -// This can be used to attach properties to mode objects from -// outside the actual mode definition. -var modeExtensions = {}; -function extendMode(mode, properties) { - var exts = modeExtensions.hasOwnProperty(mode) ? modeExtensions[mode] : (modeExtensions[mode] = {}); - copyObj(properties, exts); -} - -function copyState(mode, state) { - if (state === true) { return state } - if (mode.copyState) { return mode.copyState(state) } - var nstate = {}; - for (var n in state) { - var val = state[n]; - if (val instanceof Array) { val = val.concat([]); } - nstate[n] = val; - } - return nstate -} - -// Given a mode and a state (for that mode), find the inner mode and -// state at the position that the state refers to. -function innerMode(mode, state) { - var info; - while (mode.innerMode) { - info = mode.innerMode(state); - if (!info || info.mode == mode) { break } - state = info.state; - mode = info.mode; - } - return info || {mode: mode, state: state} -} - -function startState(mode, a1, a2) { - return mode.startState ? mode.startState(a1, a2) : true -} - -// STRING STREAM - -// Fed to the mode parsers, provides helper functions to make -// parsers more succinct. - -var StringStream = function(string, tabSize, lineOracle) { - this.pos = this.start = 0; - this.string = string; - this.tabSize = tabSize || 8; - this.lastColumnPos = this.lastColumnValue = 0; - this.lineStart = 0; - this.lineOracle = lineOracle; -}; - -StringStream.prototype.eol = function () {return this.pos >= this.string.length}; -StringStream.prototype.sol = function () {return this.pos == this.lineStart}; -StringStream.prototype.peek = function () {return this.string.charAt(this.pos) || undefined}; -StringStream.prototype.next = function () { - if (this.pos < this.string.length) - { return this.string.charAt(this.pos++) } -}; -StringStream.prototype.eat = function (match) { - var ch = this.string.charAt(this.pos); - var ok; - if (typeof match == "string") { ok = ch == match; } - else { ok = ch && (match.test ? match.test(ch) : match(ch)); } - if (ok) {++this.pos; return ch} -}; -StringStream.prototype.eatWhile = function (match) { - var start = this.pos; - while (this.eat(match)){} - return this.pos > start -}; -StringStream.prototype.eatSpace = function () { - var this$1 = this; - - var start = this.pos; - while (/[\s\u00a0]/.test(this.string.charAt(this.pos))) { ++this$1.pos; } - return this.pos > start -}; -StringStream.prototype.skipToEnd = function () {this.pos = this.string.length;}; -StringStream.prototype.skipTo = function (ch) { - var found = this.string.indexOf(ch, this.pos); - if (found > -1) {this.pos = found; return true} -}; -StringStream.prototype.backUp = function (n) {this.pos -= n;}; -StringStream.prototype.column = function () { - if (this.lastColumnPos < this.start) { - this.lastColumnValue = countColumn(this.string, this.start, this.tabSize, this.lastColumnPos, this.lastColumnValue); - this.lastColumnPos = this.start; - } - return this.lastColumnValue - (this.lineStart ? countColumn(this.string, this.lineStart, this.tabSize) : 0) -}; -StringStream.prototype.indentation = function () { - return countColumn(this.string, null, this.tabSize) - - (this.lineStart ? countColumn(this.string, this.lineStart, this.tabSize) : 0) -}; -StringStream.prototype.match = function (pattern, consume, caseInsensitive) { - if (typeof pattern == "string") { - var cased = function (str) { return caseInsensitive ? str.toLowerCase() : str; }; - var substr = this.string.substr(this.pos, pattern.length); - if (cased(substr) == cased(pattern)) { - if (consume !== false) { this.pos += pattern.length; } - return true - } - } else { - var match = this.string.slice(this.pos).match(pattern); - if (match && match.index > 0) { return null } - if (match && consume !== false) { this.pos += match[0].length; } - return match - } -}; -StringStream.prototype.current = function (){return this.string.slice(this.start, this.pos)}; -StringStream.prototype.hideFirstChars = function (n, inner) { - this.lineStart += n; - try { return inner() } - finally { this.lineStart -= n; } -}; -StringStream.prototype.lookAhead = function (n) { - var oracle = this.lineOracle; - return oracle && oracle.lookAhead(n) -}; -StringStream.prototype.baseToken = function () { - var oracle = this.lineOracle; - return oracle && oracle.baseToken(this.pos) -}; - -var SavedContext = function(state, lookAhead) { - this.state = state; - this.lookAhead = lookAhead; -}; - -var Context = function(doc, state, line, lookAhead) { - this.state = state; - this.doc = doc; - this.line = line; - this.maxLookAhead = lookAhead || 0; - this.baseTokens = null; - this.baseTokenPos = 1; -}; - -Context.prototype.lookAhead = function (n) { - var line = this.doc.getLine(this.line + n); - if (line != null && n > this.maxLookAhead) { this.maxLookAhead = n; } - return line -}; - -Context.prototype.baseToken = function (n) { - var this$1 = this; - - if (!this.baseTokens) { return null } - while (this.baseTokens[this.baseTokenPos] <= n) - { this$1.baseTokenPos += 2; } - var type = this.baseTokens[this.baseTokenPos + 1]; - return {type: type && type.replace(/( |^)overlay .*/, ""), - size: this.baseTokens[this.baseTokenPos] - n} -}; - -Context.prototype.nextLine = function () { - this.line++; - if (this.maxLookAhead > 0) { this.maxLookAhead--; } -}; - -Context.fromSaved = function (doc, saved, line) { - if (saved instanceof SavedContext) - { return new Context(doc, copyState(doc.mode, saved.state), line, saved.lookAhead) } - else - { return new Context(doc, copyState(doc.mode, saved), line) } -}; - -Context.prototype.save = function (copy) { - var state = copy !== false ? copyState(this.doc.mode, this.state) : this.state; - return this.maxLookAhead > 0 ? new SavedContext(state, this.maxLookAhead) : state -}; - - -// Compute a style array (an array starting with a mode generation -// -- for invalidation -- followed by pairs of end positions and -// style strings), which is used to highlight the tokens on the -// line. -function highlightLine(cm, line, context, forceToEnd) { - // A styles array always starts with a number identifying the - // mode/overlays that it is based on (for easy invalidation). - var st = [cm.state.modeGen], lineClasses = {}; - // Compute the base array of styles - runMode(cm, line.text, cm.doc.mode, context, function (end, style) { return st.push(end, style); }, - lineClasses, forceToEnd); - var state = context.state; - - // Run overlays, adjust style array. - var loop = function ( o ) { - context.baseTokens = st; - var overlay = cm.state.overlays[o], i = 1, at = 0; - context.state = true; - runMode(cm, line.text, overlay.mode, context, function (end, style) { - var start = i; - // Ensure there's a token end at the current position, and that i points at it - while (at < end) { - var i_end = st[i]; - if (i_end > end) - { st.splice(i, 1, end, st[i+1], i_end); } - i += 2; - at = Math.min(end, i_end); - } - if (!style) { return } - if (overlay.opaque) { - st.splice(start, i - start, end, "overlay " + style); - i = start + 2; - } else { - for (; start < i; start += 2) { - var cur = st[start+1]; - st[start+1] = (cur ? cur + " " : "") + "overlay " + style; - } - } - }, lineClasses); - context.state = state; - context.baseTokens = null; - context.baseTokenPos = 1; - }; - - for (var o = 0; o < cm.state.overlays.length; ++o) loop( o ); - - return {styles: st, classes: lineClasses.bgClass || lineClasses.textClass ? lineClasses : null} -} - -function getLineStyles(cm, line, updateFrontier) { - if (!line.styles || line.styles[0] != cm.state.modeGen) { - var context = getContextBefore(cm, lineNo(line)); - var resetState = line.text.length > cm.options.maxHighlightLength && copyState(cm.doc.mode, context.state); - var result = highlightLine(cm, line, context); - if (resetState) { context.state = resetState; } - line.stateAfter = context.save(!resetState); - line.styles = result.styles; - if (result.classes) { line.styleClasses = result.classes; } - else if (line.styleClasses) { line.styleClasses = null; } - if (updateFrontier === cm.doc.highlightFrontier) - { cm.doc.modeFrontier = Math.max(cm.doc.modeFrontier, ++cm.doc.highlightFrontier); } - } - return line.styles -} - -function getContextBefore(cm, n, precise) { - var doc = cm.doc, display = cm.display; - if (!doc.mode.startState) { return new Context(doc, true, n) } - var start = findStartLine(cm, n, precise); - var saved = start > doc.first && getLine(doc, start - 1).stateAfter; - var context = saved ? Context.fromSaved(doc, saved, start) : new Context(doc, startState(doc.mode), start); - - doc.iter(start, n, function (line) { - processLine(cm, line.text, context); - var pos = context.line; - line.stateAfter = pos == n - 1 || pos % 5 == 0 || pos >= display.viewFrom && pos < display.viewTo ? context.save() : null; - context.nextLine(); - }); - if (precise) { doc.modeFrontier = context.line; } - return context -} - -// Lightweight form of highlight -- proceed over this line and -// update state, but don't save a style array. Used for lines that -// aren't currently visible. -function processLine(cm, text, context, startAt) { - var mode = cm.doc.mode; - var stream = new StringStream(text, cm.options.tabSize, context); - stream.start = stream.pos = startAt || 0; - if (text == "") { callBlankLine(mode, context.state); } - while (!stream.eol()) { - readToken(mode, stream, context.state); - stream.start = stream.pos; - } -} - -function callBlankLine(mode, state) { - if (mode.blankLine) { return mode.blankLine(state) } - if (!mode.innerMode) { return } - var inner = innerMode(mode, state); - if (inner.mode.blankLine) { return inner.mode.blankLine(inner.state) } -} - -function readToken(mode, stream, state, inner) { - for (var i = 0; i < 10; i++) { - if (inner) { inner[0] = innerMode(mode, state).mode; } - var style = mode.token(stream, state); - if (stream.pos > stream.start) { return style } - } - throw new Error("Mode " + mode.name + " failed to advance stream.") -} - -var Token = function(stream, type, state) { - this.start = stream.start; this.end = stream.pos; - this.string = stream.current(); - this.type = type || null; - this.state = state; -}; - -// Utility for getTokenAt and getLineTokens -function takeToken(cm, pos, precise, asArray) { - var doc = cm.doc, mode = doc.mode, style; - pos = clipPos(doc, pos); - var line = getLine(doc, pos.line), context = getContextBefore(cm, pos.line, precise); - var stream = new StringStream(line.text, cm.options.tabSize, context), tokens; - if (asArray) { tokens = []; } - while ((asArray || stream.pos < pos.ch) && !stream.eol()) { - stream.start = stream.pos; - style = readToken(mode, stream, context.state); - if (asArray) { tokens.push(new Token(stream, style, copyState(doc.mode, context.state))); } - } - return asArray ? tokens : new Token(stream, style, context.state) -} - -function extractLineClasses(type, output) { - if (type) { for (;;) { - var lineClass = type.match(/(?:^|\s+)line-(background-)?(\S+)/); - if (!lineClass) { break } - type = type.slice(0, lineClass.index) + type.slice(lineClass.index + lineClass[0].length); - var prop = lineClass[1] ? "bgClass" : "textClass"; - if (output[prop] == null) - { output[prop] = lineClass[2]; } - else if (!(new RegExp("(?:^|\s)" + lineClass[2] + "(?:$|\s)")).test(output[prop])) - { output[prop] += " " + lineClass[2]; } - } } - return type -} - -// Run the given mode's parser over a line, calling f for each token. -function runMode(cm, text, mode, context, f, lineClasses, forceToEnd) { - var flattenSpans = mode.flattenSpans; - if (flattenSpans == null) { flattenSpans = cm.options.flattenSpans; } - var curStart = 0, curStyle = null; - var stream = new StringStream(text, cm.options.tabSize, context), style; - var inner = cm.options.addModeClass && [null]; - if (text == "") { extractLineClasses(callBlankLine(mode, context.state), lineClasses); } - while (!stream.eol()) { - if (stream.pos > cm.options.maxHighlightLength) { - flattenSpans = false; - if (forceToEnd) { processLine(cm, text, context, stream.pos); } - stream.pos = text.length; - style = null; - } else { - style = extractLineClasses(readToken(mode, stream, context.state, inner), lineClasses); - } - if (inner) { - var mName = inner[0].name; - if (mName) { style = "m-" + (style ? mName + " " + style : mName); } - } - if (!flattenSpans || curStyle != style) { - while (curStart < stream.start) { - curStart = Math.min(stream.start, curStart + 5000); - f(curStart, curStyle); - } - curStyle = style; - } - stream.start = stream.pos; - } - while (curStart < stream.pos) { - // Webkit seems to refuse to render text nodes longer than 57444 - // characters, and returns inaccurate measurements in nodes - // starting around 5000 chars. - var pos = Math.min(stream.pos, curStart + 5000); - f(pos, curStyle); - curStart = pos; - } -} - -// Finds the line to start with when starting a parse. Tries to -// find a line with a stateAfter, so that it can start with a -// valid state. If that fails, it returns the line with the -// smallest indentation, which tends to need the least context to -// parse correctly. -function findStartLine(cm, n, precise) { - var minindent, minline, doc = cm.doc; - var lim = precise ? -1 : n - (cm.doc.mode.innerMode ? 1000 : 100); - for (var search = n; search > lim; --search) { - if (search <= doc.first) { return doc.first } - var line = getLine(doc, search - 1), after = line.stateAfter; - if (after && (!precise || search + (after instanceof SavedContext ? after.lookAhead : 0) <= doc.modeFrontier)) - { return search } - var indented = countColumn(line.text, null, cm.options.tabSize); - if (minline == null || minindent > indented) { - minline = search - 1; - minindent = indented; - } - } - return minline -} - -function retreatFrontier(doc, n) { - doc.modeFrontier = Math.min(doc.modeFrontier, n); - if (doc.highlightFrontier < n - 10) { return } - var start = doc.first; - for (var line = n - 1; line > start; line--) { - var saved = getLine(doc, line).stateAfter; - // change is on 3 - // state on line 1 looked ahead 2 -- so saw 3 - // test 1 + 2 < 3 should cover this - if (saved && (!(saved instanceof SavedContext) || line + saved.lookAhead < n)) { - start = line + 1; - break - } - } - doc.highlightFrontier = Math.min(doc.highlightFrontier, start); -} - -// LINE DATA STRUCTURE - -// Line objects. These hold state related to a line, including -// highlighting info (the styles array). -var Line = function(text, markedSpans, estimateHeight) { - this.text = text; - attachMarkedSpans(this, markedSpans); - this.height = estimateHeight ? estimateHeight(this) : 1; -}; - -Line.prototype.lineNo = function () { return lineNo(this) }; -eventMixin(Line); - -// Change the content (text, markers) of a line. Automatically -// invalidates cached information and tries to re-estimate the -// line's height. -function updateLine(line, text, markedSpans, estimateHeight) { - line.text = text; - if (line.stateAfter) { line.stateAfter = null; } - if (line.styles) { line.styles = null; } - if (line.order != null) { line.order = null; } - detachMarkedSpans(line); - attachMarkedSpans(line, markedSpans); - var estHeight = estimateHeight ? estimateHeight(line) : 1; - if (estHeight != line.height) { updateLineHeight(line, estHeight); } -} - -// Detach a line from the document tree and its markers. -function cleanUpLine(line) { - line.parent = null; - detachMarkedSpans(line); -} - -// Convert a style as returned by a mode (either null, or a string -// containing one or more styles) to a CSS style. This is cached, -// and also looks for line-wide styles. -var styleToClassCache = {}; -var styleToClassCacheWithMode = {}; -function interpretTokenStyle(style, options) { - if (!style || /^\s*$/.test(style)) { return null } - var cache = options.addModeClass ? styleToClassCacheWithMode : styleToClassCache; - return cache[style] || - (cache[style] = style.replace(/\S+/g, "cm-$&")) -} - -// Render the DOM representation of the text of a line. Also builds -// up a 'line map', which points at the DOM nodes that represent -// specific stretches of text, and is used by the measuring code. -// The returned object contains the DOM node, this map, and -// information about line-wide styles that were set by the mode. -function buildLineContent(cm, lineView) { - // The padding-right forces the element to have a 'border', which - // is needed on Webkit to be able to get line-level bounding - // rectangles for it (in measureChar). - var content = eltP("span", null, null, webkit ? "padding-right: .1px" : null); - var builder = {pre: eltP("pre", [content], "CodeMirror-line"), content: content, - col: 0, pos: 0, cm: cm, - trailingSpace: false, - splitSpaces: (ie || webkit) && cm.getOption("lineWrapping")}; - lineView.measure = {}; - - // Iterate over the logical lines that make up this visual line. - for (var i = 0; i <= (lineView.rest ? lineView.rest.length : 0); i++) { - var line = i ? lineView.rest[i - 1] : lineView.line, order = (void 0); - builder.pos = 0; - builder.addToken = buildToken; - // Optionally wire in some hacks into the token-rendering - // algorithm, to deal with browser quirks. - if (hasBadBidiRects(cm.display.measure) && (order = getOrder(line, cm.doc.direction))) - { builder.addToken = buildTokenBadBidi(builder.addToken, order); } - builder.map = []; - var allowFrontierUpdate = lineView != cm.display.externalMeasured && lineNo(line); - insertLineContent(line, builder, getLineStyles(cm, line, allowFrontierUpdate)); - if (line.styleClasses) { - if (line.styleClasses.bgClass) - { builder.bgClass = joinClasses(line.styleClasses.bgClass, builder.bgClass || ""); } - if (line.styleClasses.textClass) - { builder.textClass = joinClasses(line.styleClasses.textClass, builder.textClass || ""); } - } - - // Ensure at least a single node is present, for measuring. - if (builder.map.length == 0) - { builder.map.push(0, 0, builder.content.appendChild(zeroWidthElement(cm.display.measure))); } - - // Store the map and a cache object for the current logical line - if (i == 0) { - lineView.measure.map = builder.map; - lineView.measure.cache = {}; - } else { - (lineView.measure.maps || (lineView.measure.maps = [])).push(builder.map) - ;(lineView.measure.caches || (lineView.measure.caches = [])).push({}); - } - } - - // See issue #2901 - if (webkit) { - var last = builder.content.lastChild; - if (/\bcm-tab\b/.test(last.className) || (last.querySelector && last.querySelector(".cm-tab"))) - { builder.content.className = "cm-tab-wrap-hack"; } - } - - signal(cm, "renderLine", cm, lineView.line, builder.pre); - if (builder.pre.className) - { builder.textClass = joinClasses(builder.pre.className, builder.textClass || ""); } - - return builder -} - -function defaultSpecialCharPlaceholder(ch) { - var token = elt("span", "\u2022", "cm-invalidchar"); - token.title = "\\u" + ch.charCodeAt(0).toString(16); - token.setAttribute("aria-label", token.title); - return token -} - -// Build up the DOM representation for a single token, and add it to -// the line map. Takes care to render special characters separately. -function buildToken(builder, text, style, startStyle, endStyle, title, css) { - if (!text) { return } - var displayText = builder.splitSpaces ? splitSpaces(text, builder.trailingSpace) : text; - var special = builder.cm.state.specialChars, mustWrap = false; - var content; - if (!special.test(text)) { - builder.col += text.length; - content = document.createTextNode(displayText); - builder.map.push(builder.pos, builder.pos + text.length, content); - if (ie && ie_version < 9) { mustWrap = true; } - builder.pos += text.length; - } else { - content = document.createDocumentFragment(); - var pos = 0; - while (true) { - special.lastIndex = pos; - var m = special.exec(text); - var skipped = m ? m.index - pos : text.length - pos; - if (skipped) { - var txt = document.createTextNode(displayText.slice(pos, pos + skipped)); - if (ie && ie_version < 9) { content.appendChild(elt("span", [txt])); } - else { content.appendChild(txt); } - builder.map.push(builder.pos, builder.pos + skipped, txt); - builder.col += skipped; - builder.pos += skipped; - } - if (!m) { break } - pos += skipped + 1; - var txt$1 = (void 0); - if (m[0] == "\t") { - var tabSize = builder.cm.options.tabSize, tabWidth = tabSize - builder.col % tabSize; - txt$1 = content.appendChild(elt("span", spaceStr(tabWidth), "cm-tab")); - txt$1.setAttribute("role", "presentation"); - txt$1.setAttribute("cm-text", "\t"); - builder.col += tabWidth; - } else if (m[0] == "\r" || m[0] == "\n") { - txt$1 = content.appendChild(elt("span", m[0] == "\r" ? "\u240d" : "\u2424", "cm-invalidchar")); - txt$1.setAttribute("cm-text", m[0]); - builder.col += 1; - } else { - txt$1 = builder.cm.options.specialCharPlaceholder(m[0]); - txt$1.setAttribute("cm-text", m[0]); - if (ie && ie_version < 9) { content.appendChild(elt("span", [txt$1])); } - else { content.appendChild(txt$1); } - builder.col += 1; - } - builder.map.push(builder.pos, builder.pos + 1, txt$1); - builder.pos++; - } - } - builder.trailingSpace = displayText.charCodeAt(text.length - 1) == 32; - if (style || startStyle || endStyle || mustWrap || css) { - var fullStyle = style || ""; - if (startStyle) { fullStyle += startStyle; } - if (endStyle) { fullStyle += endStyle; } - var token = elt("span", [content], fullStyle, css); - if (title) { token.title = title; } - return builder.content.appendChild(token) - } - builder.content.appendChild(content); -} - -function splitSpaces(text, trailingBefore) { - if (text.length > 1 && !/ /.test(text)) { return text } - var spaceBefore = trailingBefore, result = ""; - for (var i = 0; i < text.length; i++) { - var ch = text.charAt(i); - if (ch == " " && spaceBefore && (i == text.length - 1 || text.charCodeAt(i + 1) == 32)) - { ch = "\u00a0"; } - result += ch; - spaceBefore = ch == " "; - } - return result -} - -// Work around nonsense dimensions being reported for stretches of -// right-to-left text. -function buildTokenBadBidi(inner, order) { - return function (builder, text, style, startStyle, endStyle, title, css) { - style = style ? style + " cm-force-border" : "cm-force-border"; - var start = builder.pos, end = start + text.length; - for (;;) { - // Find the part that overlaps with the start of this text - var part = (void 0); - for (var i = 0; i < order.length; i++) { - part = order[i]; - if (part.to > start && part.from <= start) { break } - } - if (part.to >= end) { return inner(builder, text, style, startStyle, endStyle, title, css) } - inner(builder, text.slice(0, part.to - start), style, startStyle, null, title, css); - startStyle = null; - text = text.slice(part.to - start); - start = part.to; - } - } -} - -function buildCollapsedSpan(builder, size, marker, ignoreWidget) { - var widget = !ignoreWidget && marker.widgetNode; - if (widget) { builder.map.push(builder.pos, builder.pos + size, widget); } - if (!ignoreWidget && builder.cm.display.input.needsContentAttribute) { - if (!widget) - { widget = builder.content.appendChild(document.createElement("span")); } - widget.setAttribute("cm-marker", marker.id); - } - if (widget) { - builder.cm.display.input.setUneditable(widget); - builder.content.appendChild(widget); - } - builder.pos += size; - builder.trailingSpace = false; -} - -// Outputs a number of spans to make up a line, taking highlighting -// and marked text into account. -function insertLineContent(line, builder, styles) { - var spans = line.markedSpans, allText = line.text, at = 0; - if (!spans) { - for (var i$1 = 1; i$1 < styles.length; i$1+=2) - { builder.addToken(builder, allText.slice(at, at = styles[i$1]), interpretTokenStyle(styles[i$1+1], builder.cm.options)); } - return - } - - var len = allText.length, pos = 0, i = 1, text = "", style, css; - var nextChange = 0, spanStyle, spanEndStyle, spanStartStyle, title, collapsed; - for (;;) { - if (nextChange == pos) { // Update current marker set - spanStyle = spanEndStyle = spanStartStyle = title = css = ""; - collapsed = null; nextChange = Infinity; - var foundBookmarks = [], endStyles = (void 0); - for (var j = 0; j < spans.length; ++j) { - var sp = spans[j], m = sp.marker; - if (m.type == "bookmark" && sp.from == pos && m.widgetNode) { - foundBookmarks.push(m); - } else if (sp.from <= pos && (sp.to == null || sp.to > pos || m.collapsed && sp.to == pos && sp.from == pos)) { - if (sp.to != null && sp.to != pos && nextChange > sp.to) { - nextChange = sp.to; - spanEndStyle = ""; - } - if (m.className) { spanStyle += " " + m.className; } - if (m.css) { css = (css ? css + ";" : "") + m.css; } - if (m.startStyle && sp.from == pos) { spanStartStyle += " " + m.startStyle; } - if (m.endStyle && sp.to == nextChange) { (endStyles || (endStyles = [])).push(m.endStyle, sp.to); } - if (m.title && !title) { title = m.title; } - if (m.collapsed && (!collapsed || compareCollapsedMarkers(collapsed.marker, m) < 0)) - { collapsed = sp; } - } else if (sp.from > pos && nextChange > sp.from) { - nextChange = sp.from; - } - } - if (endStyles) { for (var j$1 = 0; j$1 < endStyles.length; j$1 += 2) - { if (endStyles[j$1 + 1] == nextChange) { spanEndStyle += " " + endStyles[j$1]; } } } - - if (!collapsed || collapsed.from == pos) { for (var j$2 = 0; j$2 < foundBookmarks.length; ++j$2) - { buildCollapsedSpan(builder, 0, foundBookmarks[j$2]); } } - if (collapsed && (collapsed.from || 0) == pos) { - buildCollapsedSpan(builder, (collapsed.to == null ? len + 1 : collapsed.to) - pos, - collapsed.marker, collapsed.from == null); - if (collapsed.to == null) { return } - if (collapsed.to == pos) { collapsed = false; } - } - } - if (pos >= len) { break } - - var upto = Math.min(len, nextChange); - while (true) { - if (text) { - var end = pos + text.length; - if (!collapsed) { - var tokenText = end > upto ? text.slice(0, upto - pos) : text; - builder.addToken(builder, tokenText, style ? style + spanStyle : spanStyle, - spanStartStyle, pos + tokenText.length == nextChange ? spanEndStyle : "", title, css); - } - if (end >= upto) {text = text.slice(upto - pos); pos = upto; break} - pos = end; - spanStartStyle = ""; - } - text = allText.slice(at, at = styles[i++]); - style = interpretTokenStyle(styles[i++], builder.cm.options); - } - } -} - - -// These objects are used to represent the visible (currently drawn) -// part of the document. A LineView may correspond to multiple -// logical lines, if those are connected by collapsed ranges. -function LineView(doc, line, lineN) { - // The starting line - this.line = line; - // Continuing lines, if any - this.rest = visualLineContinued(line); - // Number of logical lines in this visual line - this.size = this.rest ? lineNo(lst(this.rest)) - lineN + 1 : 1; - this.node = this.text = null; - this.hidden = lineIsHidden(doc, line); -} - -// Create a range of LineView objects for the given lines. -function buildViewArray(cm, from, to) { - var array = [], nextPos; - for (var pos = from; pos < to; pos = nextPos) { - var view = new LineView(cm.doc, getLine(cm.doc, pos), pos); - nextPos = pos + view.size; - array.push(view); - } - return array -} - -var operationGroup = null; - -function pushOperation(op) { - if (operationGroup) { - operationGroup.ops.push(op); - } else { - op.ownsGroup = operationGroup = { - ops: [op], - delayedCallbacks: [] - }; - } -} - -function fireCallbacksForOps(group) { - // Calls delayed callbacks and cursorActivity handlers until no - // new ones appear - var callbacks = group.delayedCallbacks, i = 0; - do { - for (; i < callbacks.length; i++) - { callbacks[i].call(null); } - for (var j = 0; j < group.ops.length; j++) { - var op = group.ops[j]; - if (op.cursorActivityHandlers) - { while (op.cursorActivityCalled < op.cursorActivityHandlers.length) - { op.cursorActivityHandlers[op.cursorActivityCalled++].call(null, op.cm); } } - } - } while (i < callbacks.length) -} - -function finishOperation(op, endCb) { - var group = op.ownsGroup; - if (!group) { return } - - try { fireCallbacksForOps(group); } - finally { - operationGroup = null; - endCb(group); - } -} - -var orphanDelayedCallbacks = null; - -// Often, we want to signal events at a point where we are in the -// middle of some work, but don't want the handler to start calling -// other methods on the editor, which might be in an inconsistent -// state or simply not expect any other events to happen. -// signalLater looks whether there are any handlers, and schedules -// them to be executed when the last operation ends, or, if no -// operation is active, when a timeout fires. -function signalLater(emitter, type /*, values...*/) { - var arr = getHandlers(emitter, type); - if (!arr.length) { return } - var args = Array.prototype.slice.call(arguments, 2), list; - if (operationGroup) { - list = operationGroup.delayedCallbacks; - } else if (orphanDelayedCallbacks) { - list = orphanDelayedCallbacks; - } else { - list = orphanDelayedCallbacks = []; - setTimeout(fireOrphanDelayed, 0); - } - var loop = function ( i ) { - list.push(function () { return arr[i].apply(null, args); }); - }; - - for (var i = 0; i < arr.length; ++i) - loop( i ); -} - -function fireOrphanDelayed() { - var delayed = orphanDelayedCallbacks; - orphanDelayedCallbacks = null; - for (var i = 0; i < delayed.length; ++i) { delayed[i](); } -} - -// When an aspect of a line changes, a string is added to -// lineView.changes. This updates the relevant part of the line's -// DOM structure. -function updateLineForChanges(cm, lineView, lineN, dims) { - for (var j = 0; j < lineView.changes.length; j++) { - var type = lineView.changes[j]; - if (type == "text") { updateLineText(cm, lineView); } - else if (type == "gutter") { updateLineGutter(cm, lineView, lineN, dims); } - else if (type == "class") { updateLineClasses(cm, lineView); } - else if (type == "widget") { updateLineWidgets(cm, lineView, dims); } - } - lineView.changes = null; -} - -// Lines with gutter elements, widgets or a background class need to -// be wrapped, and have the extra elements added to the wrapper div -function ensureLineWrapped(lineView) { - if (lineView.node == lineView.text) { - lineView.node = elt("div", null, null, "position: relative"); - if (lineView.text.parentNode) - { lineView.text.parentNode.replaceChild(lineView.node, lineView.text); } - lineView.node.appendChild(lineView.text); - if (ie && ie_version < 8) { lineView.node.style.zIndex = 2; } - } - return lineView.node -} - -function updateLineBackground(cm, lineView) { - var cls = lineView.bgClass ? lineView.bgClass + " " + (lineView.line.bgClass || "") : lineView.line.bgClass; - if (cls) { cls += " CodeMirror-linebackground"; } - if (lineView.background) { - if (cls) { lineView.background.className = cls; } - else { lineView.background.parentNode.removeChild(lineView.background); lineView.background = null; } - } else if (cls) { - var wrap = ensureLineWrapped(lineView); - lineView.background = wrap.insertBefore(elt("div", null, cls), wrap.firstChild); - cm.display.input.setUneditable(lineView.background); - } -} - -// Wrapper around buildLineContent which will reuse the structure -// in display.externalMeasured when possible. -function getLineContent(cm, lineView) { - var ext = cm.display.externalMeasured; - if (ext && ext.line == lineView.line) { - cm.display.externalMeasured = null; - lineView.measure = ext.measure; - return ext.built - } - return buildLineContent(cm, lineView) -} - -// Redraw the line's text. Interacts with the background and text -// classes because the mode may output tokens that influence these -// classes. -function updateLineText(cm, lineView) { - var cls = lineView.text.className; - var built = getLineContent(cm, lineView); - if (lineView.text == lineView.node) { lineView.node = built.pre; } - lineView.text.parentNode.replaceChild(built.pre, lineView.text); - lineView.text = built.pre; - if (built.bgClass != lineView.bgClass || built.textClass != lineView.textClass) { - lineView.bgClass = built.bgClass; - lineView.textClass = built.textClass; - updateLineClasses(cm, lineView); - } else if (cls) { - lineView.text.className = cls; - } -} - -function updateLineClasses(cm, lineView) { - updateLineBackground(cm, lineView); - if (lineView.line.wrapClass) - { ensureLineWrapped(lineView).className = lineView.line.wrapClass; } - else if (lineView.node != lineView.text) - { lineView.node.className = ""; } - var textClass = lineView.textClass ? lineView.textClass + " " + (lineView.line.textClass || "") : lineView.line.textClass; - lineView.text.className = textClass || ""; -} - -function updateLineGutter(cm, lineView, lineN, dims) { - if (lineView.gutter) { - lineView.node.removeChild(lineView.gutter); - lineView.gutter = null; - } - if (lineView.gutterBackground) { - lineView.node.removeChild(lineView.gutterBackground); - lineView.gutterBackground = null; - } - if (lineView.line.gutterClass) { - var wrap = ensureLineWrapped(lineView); - lineView.gutterBackground = elt("div", null, "CodeMirror-gutter-background " + lineView.line.gutterClass, - ("left: " + (cm.options.fixedGutter ? dims.fixedPos : -dims.gutterTotalWidth) + "px; width: " + (dims.gutterTotalWidth) + "px")); - cm.display.input.setUneditable(lineView.gutterBackground); - wrap.insertBefore(lineView.gutterBackground, lineView.text); - } - var markers = lineView.line.gutterMarkers; - if (cm.options.lineNumbers || markers) { - var wrap$1 = ensureLineWrapped(lineView); - var gutterWrap = lineView.gutter = elt("div", null, "CodeMirror-gutter-wrapper", ("left: " + (cm.options.fixedGutter ? dims.fixedPos : -dims.gutterTotalWidth) + "px")); - cm.display.input.setUneditable(gutterWrap); - wrap$1.insertBefore(gutterWrap, lineView.text); - if (lineView.line.gutterClass) - { gutterWrap.className += " " + lineView.line.gutterClass; } - if (cm.options.lineNumbers && (!markers || !markers["CodeMirror-linenumbers"])) - { lineView.lineNumber = gutterWrap.appendChild( - elt("div", lineNumberFor(cm.options, lineN), - "CodeMirror-linenumber CodeMirror-gutter-elt", - ("left: " + (dims.gutterLeft["CodeMirror-linenumbers"]) + "px; width: " + (cm.display.lineNumInnerWidth) + "px"))); } - if (markers) { for (var k = 0; k < cm.options.gutters.length; ++k) { - var id = cm.options.gutters[k], found = markers.hasOwnProperty(id) && markers[id]; - if (found) - { gutterWrap.appendChild(elt("div", [found], "CodeMirror-gutter-elt", - ("left: " + (dims.gutterLeft[id]) + "px; width: " + (dims.gutterWidth[id]) + "px"))); } - } } - } -} - -function updateLineWidgets(cm, lineView, dims) { - if (lineView.alignable) { lineView.alignable = null; } - for (var node = lineView.node.firstChild, next = (void 0); node; node = next) { - next = node.nextSibling; - if (node.className == "CodeMirror-linewidget") - { lineView.node.removeChild(node); } - } - insertLineWidgets(cm, lineView, dims); -} - -// Build a line's DOM representation from scratch -function buildLineElement(cm, lineView, lineN, dims) { - var built = getLineContent(cm, lineView); - lineView.text = lineView.node = built.pre; - if (built.bgClass) { lineView.bgClass = built.bgClass; } - if (built.textClass) { lineView.textClass = built.textClass; } - - updateLineClasses(cm, lineView); - updateLineGutter(cm, lineView, lineN, dims); - insertLineWidgets(cm, lineView, dims); - return lineView.node -} - -// A lineView may contain multiple logical lines (when merged by -// collapsed spans). The widgets for all of them need to be drawn. -function insertLineWidgets(cm, lineView, dims) { - insertLineWidgetsFor(cm, lineView.line, lineView, dims, true); - if (lineView.rest) { for (var i = 0; i < lineView.rest.length; i++) - { insertLineWidgetsFor(cm, lineView.rest[i], lineView, dims, false); } } -} - -function insertLineWidgetsFor(cm, line, lineView, dims, allowAbove) { - if (!line.widgets) { return } - var wrap = ensureLineWrapped(lineView); - for (var i = 0, ws = line.widgets; i < ws.length; ++i) { - var widget = ws[i], node = elt("div", [widget.node], "CodeMirror-linewidget"); - if (!widget.handleMouseEvents) { node.setAttribute("cm-ignore-events", "true"); } - positionLineWidget(widget, node, lineView, dims); - cm.display.input.setUneditable(node); - if (allowAbove && widget.above) - { wrap.insertBefore(node, lineView.gutter || lineView.text); } - else - { wrap.appendChild(node); } - signalLater(widget, "redraw"); - } -} - -function positionLineWidget(widget, node, lineView, dims) { - if (widget.noHScroll) { - (lineView.alignable || (lineView.alignable = [])).push(node); - var width = dims.wrapperWidth; - node.style.left = dims.fixedPos + "px"; - if (!widget.coverGutter) { - width -= dims.gutterTotalWidth; - node.style.paddingLeft = dims.gutterTotalWidth + "px"; - } - node.style.width = width + "px"; - } - if (widget.coverGutter) { - node.style.zIndex = 5; - node.style.position = "relative"; - if (!widget.noHScroll) { node.style.marginLeft = -dims.gutterTotalWidth + "px"; } - } -} - -function widgetHeight(widget) { - if (widget.height != null) { return widget.height } - var cm = widget.doc.cm; - if (!cm) { return 0 } - if (!contains(document.body, widget.node)) { - var parentStyle = "position: relative;"; - if (widget.coverGutter) - { parentStyle += "margin-left: -" + cm.display.gutters.offsetWidth + "px;"; } - if (widget.noHScroll) - { parentStyle += "width: " + cm.display.wrapper.clientWidth + "px;"; } - removeChildrenAndAdd(cm.display.measure, elt("div", [widget.node], null, parentStyle)); - } - return widget.height = widget.node.parentNode.offsetHeight -} - -// Return true when the given mouse event happened in a widget -function eventInWidget(display, e) { - for (var n = e_target(e); n != display.wrapper; n = n.parentNode) { - if (!n || (n.nodeType == 1 && n.getAttribute("cm-ignore-events") == "true") || - (n.parentNode == display.sizer && n != display.mover)) - { return true } - } -} - -// POSITION MEASUREMENT - -function paddingTop(display) {return display.lineSpace.offsetTop} -function paddingVert(display) {return display.mover.offsetHeight - display.lineSpace.offsetHeight} -function paddingH(display) { - if (display.cachedPaddingH) { return display.cachedPaddingH } - var e = removeChildrenAndAdd(display.measure, elt("pre", "x")); - var style = window.getComputedStyle ? window.getComputedStyle(e) : e.currentStyle; - var data = {left: parseInt(style.paddingLeft), right: parseInt(style.paddingRight)}; - if (!isNaN(data.left) && !isNaN(data.right)) { display.cachedPaddingH = data; } - return data -} - -function scrollGap(cm) { return scrollerGap - cm.display.nativeBarWidth } -function displayWidth(cm) { - return cm.display.scroller.clientWidth - scrollGap(cm) - cm.display.barWidth -} -function displayHeight(cm) { - return cm.display.scroller.clientHeight - scrollGap(cm) - cm.display.barHeight -} - -// Ensure the lineView.wrapping.heights array is populated. This is -// an array of bottom offsets for the lines that make up a drawn -// line. When lineWrapping is on, there might be more than one -// height. -function ensureLineHeights(cm, lineView, rect) { - var wrapping = cm.options.lineWrapping; - var curWidth = wrapping && displayWidth(cm); - if (!lineView.measure.heights || wrapping && lineView.measure.width != curWidth) { - var heights = lineView.measure.heights = []; - if (wrapping) { - lineView.measure.width = curWidth; - var rects = lineView.text.firstChild.getClientRects(); - for (var i = 0; i < rects.length - 1; i++) { - var cur = rects[i], next = rects[i + 1]; - if (Math.abs(cur.bottom - next.bottom) > 2) - { heights.push((cur.bottom + next.top) / 2 - rect.top); } - } - } - heights.push(rect.bottom - rect.top); - } -} - -// Find a line map (mapping character offsets to text nodes) and a -// measurement cache for the given line number. (A line view might -// contain multiple lines when collapsed ranges are present.) -function mapFromLineView(lineView, line, lineN) { - if (lineView.line == line) - { return {map: lineView.measure.map, cache: lineView.measure.cache} } - for (var i = 0; i < lineView.rest.length; i++) - { if (lineView.rest[i] == line) - { return {map: lineView.measure.maps[i], cache: lineView.measure.caches[i]} } } - for (var i$1 = 0; i$1 < lineView.rest.length; i$1++) - { if (lineNo(lineView.rest[i$1]) > lineN) - { return {map: lineView.measure.maps[i$1], cache: lineView.measure.caches[i$1], before: true} } } -} - -// Render a line into the hidden node display.externalMeasured. Used -// when measurement is needed for a line that's not in the viewport. -function updateExternalMeasurement(cm, line) { - line = visualLine(line); - var lineN = lineNo(line); - var view = cm.display.externalMeasured = new LineView(cm.doc, line, lineN); - view.lineN = lineN; - var built = view.built = buildLineContent(cm, view); - view.text = built.pre; - removeChildrenAndAdd(cm.display.lineMeasure, built.pre); - return view -} - -// Get a {top, bottom, left, right} box (in line-local coordinates) -// for a given character. -function measureChar(cm, line, ch, bias) { - return measureCharPrepared(cm, prepareMeasureForLine(cm, line), ch, bias) -} - -// Find a line view that corresponds to the given line number. -function findViewForLine(cm, lineN) { - if (lineN >= cm.display.viewFrom && lineN < cm.display.viewTo) - { return cm.display.view[findViewIndex(cm, lineN)] } - var ext = cm.display.externalMeasured; - if (ext && lineN >= ext.lineN && lineN < ext.lineN + ext.size) - { return ext } -} - -// Measurement can be split in two steps, the set-up work that -// applies to the whole line, and the measurement of the actual -// character. Functions like coordsChar, that need to do a lot of -// measurements in a row, can thus ensure that the set-up work is -// only done once. -function prepareMeasureForLine(cm, line) { - var lineN = lineNo(line); - var view = findViewForLine(cm, lineN); - if (view && !view.text) { - view = null; - } else if (view && view.changes) { - updateLineForChanges(cm, view, lineN, getDimensions(cm)); - cm.curOp.forceUpdate = true; - } - if (!view) - { view = updateExternalMeasurement(cm, line); } - - var info = mapFromLineView(view, line, lineN); - return { - line: line, view: view, rect: null, - map: info.map, cache: info.cache, before: info.before, - hasHeights: false - } -} - -// Given a prepared measurement object, measures the position of an -// actual character (or fetches it from the cache). -function measureCharPrepared(cm, prepared, ch, bias, varHeight) { - if (prepared.before) { ch = -1; } - var key = ch + (bias || ""), found; - if (prepared.cache.hasOwnProperty(key)) { - found = prepared.cache[key]; - } else { - if (!prepared.rect) - { prepared.rect = prepared.view.text.getBoundingClientRect(); } - if (!prepared.hasHeights) { - ensureLineHeights(cm, prepared.view, prepared.rect); - prepared.hasHeights = true; - } - found = measureCharInner(cm, prepared, ch, bias); - if (!found.bogus) { prepared.cache[key] = found; } - } - return {left: found.left, right: found.right, - top: varHeight ? found.rtop : found.top, - bottom: varHeight ? found.rbottom : found.bottom} -} - -var nullRect = {left: 0, right: 0, top: 0, bottom: 0}; - -function nodeAndOffsetInLineMap(map$$1, ch, bias) { - var node, start, end, collapse, mStart, mEnd; - // First, search the line map for the text node corresponding to, - // or closest to, the target character. - for (var i = 0; i < map$$1.length; i += 3) { - mStart = map$$1[i]; - mEnd = map$$1[i + 1]; - if (ch < mStart) { - start = 0; end = 1; - collapse = "left"; - } else if (ch < mEnd) { - start = ch - mStart; - end = start + 1; - } else if (i == map$$1.length - 3 || ch == mEnd && map$$1[i + 3] > ch) { - end = mEnd - mStart; - start = end - 1; - if (ch >= mEnd) { collapse = "right"; } - } - if (start != null) { - node = map$$1[i + 2]; - if (mStart == mEnd && bias == (node.insertLeft ? "left" : "right")) - { collapse = bias; } - if (bias == "left" && start == 0) - { while (i && map$$1[i - 2] == map$$1[i - 3] && map$$1[i - 1].insertLeft) { - node = map$$1[(i -= 3) + 2]; - collapse = "left"; - } } - if (bias == "right" && start == mEnd - mStart) - { while (i < map$$1.length - 3 && map$$1[i + 3] == map$$1[i + 4] && !map$$1[i + 5].insertLeft) { - node = map$$1[(i += 3) + 2]; - collapse = "right"; - } } - break - } - } - return {node: node, start: start, end: end, collapse: collapse, coverStart: mStart, coverEnd: mEnd} -} - -function getUsefulRect(rects, bias) { - var rect = nullRect; - if (bias == "left") { for (var i = 0; i < rects.length; i++) { - if ((rect = rects[i]).left != rect.right) { break } - } } else { for (var i$1 = rects.length - 1; i$1 >= 0; i$1--) { - if ((rect = rects[i$1]).left != rect.right) { break } - } } - return rect -} - -function measureCharInner(cm, prepared, ch, bias) { - var place = nodeAndOffsetInLineMap(prepared.map, ch, bias); - var node = place.node, start = place.start, end = place.end, collapse = place.collapse; - - var rect; - if (node.nodeType == 3) { // If it is a text node, use a range to retrieve the coordinates. - for (var i$1 = 0; i$1 < 4; i$1++) { // Retry a maximum of 4 times when nonsense rectangles are returned - while (start && isExtendingChar(prepared.line.text.charAt(place.coverStart + start))) { --start; } - while (place.coverStart + end < place.coverEnd && isExtendingChar(prepared.line.text.charAt(place.coverStart + end))) { ++end; } - if (ie && ie_version < 9 && start == 0 && end == place.coverEnd - place.coverStart) - { rect = node.parentNode.getBoundingClientRect(); } - else - { rect = getUsefulRect(range(node, start, end).getClientRects(), bias); } - if (rect.left || rect.right || start == 0) { break } - end = start; - start = start - 1; - collapse = "right"; - } - if (ie && ie_version < 11) { rect = maybeUpdateRectForZooming(cm.display.measure, rect); } - } else { // If it is a widget, simply get the box for the whole widget. - if (start > 0) { collapse = bias = "right"; } - var rects; - if (cm.options.lineWrapping && (rects = node.getClientRects()).length > 1) - { rect = rects[bias == "right" ? rects.length - 1 : 0]; } - else - { rect = node.getBoundingClientRect(); } - } - if (ie && ie_version < 9 && !start && (!rect || !rect.left && !rect.right)) { - var rSpan = node.parentNode.getClientRects()[0]; - if (rSpan) - { rect = {left: rSpan.left, right: rSpan.left + charWidth(cm.display), top: rSpan.top, bottom: rSpan.bottom}; } - else - { rect = nullRect; } - } - - var rtop = rect.top - prepared.rect.top, rbot = rect.bottom - prepared.rect.top; - var mid = (rtop + rbot) / 2; - var heights = prepared.view.measure.heights; - var i = 0; - for (; i < heights.length - 1; i++) - { if (mid < heights[i]) { break } } - var top = i ? heights[i - 1] : 0, bot = heights[i]; - var result = {left: (collapse == "right" ? rect.right : rect.left) - prepared.rect.left, - right: (collapse == "left" ? rect.left : rect.right) - prepared.rect.left, - top: top, bottom: bot}; - if (!rect.left && !rect.right) { result.bogus = true; } - if (!cm.options.singleCursorHeightPerLine) { result.rtop = rtop; result.rbottom = rbot; } - - return result -} - -// Work around problem with bounding client rects on ranges being -// returned incorrectly when zoomed on IE10 and below. -function maybeUpdateRectForZooming(measure, rect) { - if (!window.screen || screen.logicalXDPI == null || - screen.logicalXDPI == screen.deviceXDPI || !hasBadZoomedRects(measure)) - { return rect } - var scaleX = screen.logicalXDPI / screen.deviceXDPI; - var scaleY = screen.logicalYDPI / screen.deviceYDPI; - return {left: rect.left * scaleX, right: rect.right * scaleX, - top: rect.top * scaleY, bottom: rect.bottom * scaleY} -} - -function clearLineMeasurementCacheFor(lineView) { - if (lineView.measure) { - lineView.measure.cache = {}; - lineView.measure.heights = null; - if (lineView.rest) { for (var i = 0; i < lineView.rest.length; i++) - { lineView.measure.caches[i] = {}; } } - } -} - -function clearLineMeasurementCache(cm) { - cm.display.externalMeasure = null; - removeChildren(cm.display.lineMeasure); - for (var i = 0; i < cm.display.view.length; i++) - { clearLineMeasurementCacheFor(cm.display.view[i]); } -} - -function clearCaches(cm) { - clearLineMeasurementCache(cm); - cm.display.cachedCharWidth = cm.display.cachedTextHeight = cm.display.cachedPaddingH = null; - if (!cm.options.lineWrapping) { cm.display.maxLineChanged = true; } - cm.display.lineNumChars = null; -} - -function pageScrollX() { - // Work around https://bugs.chromium.org/p/chromium/issues/detail?id=489206 - // which causes page_Offset and bounding client rects to use - // different reference viewports and invalidate our calculations. - if (chrome && android) { return -(document.body.getBoundingClientRect().left - parseInt(getComputedStyle(document.body).marginLeft)) } - return window.pageXOffset || (document.documentElement || document.body).scrollLeft -} -function pageScrollY() { - if (chrome && android) { return -(document.body.getBoundingClientRect().top - parseInt(getComputedStyle(document.body).marginTop)) } - return window.pageYOffset || (document.documentElement || document.body).scrollTop -} - -function widgetTopHeight(lineObj) { - var height = 0; - if (lineObj.widgets) { for (var i = 0; i < lineObj.widgets.length; ++i) { if (lineObj.widgets[i].above) - { height += widgetHeight(lineObj.widgets[i]); } } } - return height -} - -// Converts a {top, bottom, left, right} box from line-local -// coordinates into another coordinate system. Context may be one of -// "line", "div" (display.lineDiv), "local"./null (editor), "window", -// or "page". -function intoCoordSystem(cm, lineObj, rect, context, includeWidgets) { - if (!includeWidgets) { - var height = widgetTopHeight(lineObj); - rect.top += height; rect.bottom += height; - } - if (context == "line") { return rect } - if (!context) { context = "local"; } - var yOff = heightAtLine(lineObj); - if (context == "local") { yOff += paddingTop(cm.display); } - else { yOff -= cm.display.viewOffset; } - if (context == "page" || context == "window") { - var lOff = cm.display.lineSpace.getBoundingClientRect(); - yOff += lOff.top + (context == "window" ? 0 : pageScrollY()); - var xOff = lOff.left + (context == "window" ? 0 : pageScrollX()); - rect.left += xOff; rect.right += xOff; - } - rect.top += yOff; rect.bottom += yOff; - return rect -} - -// Coverts a box from "div" coords to another coordinate system. -// Context may be "window", "page", "div", or "local"./null. -function fromCoordSystem(cm, coords, context) { - if (context == "div") { return coords } - var left = coords.left, top = coords.top; - // First move into "page" coordinate system - if (context == "page") { - left -= pageScrollX(); - top -= pageScrollY(); - } else if (context == "local" || !context) { - var localBox = cm.display.sizer.getBoundingClientRect(); - left += localBox.left; - top += localBox.top; - } - - var lineSpaceBox = cm.display.lineSpace.getBoundingClientRect(); - return {left: left - lineSpaceBox.left, top: top - lineSpaceBox.top} -} - -function charCoords(cm, pos, context, lineObj, bias) { - if (!lineObj) { lineObj = getLine(cm.doc, pos.line); } - return intoCoordSystem(cm, lineObj, measureChar(cm, lineObj, pos.ch, bias), context) -} - -// Returns a box for a given cursor position, which may have an -// 'other' property containing the position of the secondary cursor -// on a bidi boundary. -// A cursor Pos(line, char, "before") is on the same visual line as `char - 1` -// and after `char - 1` in writing order of `char - 1` -// A cursor Pos(line, char, "after") is on the same visual line as `char` -// and before `char` in writing order of `char` -// Examples (upper-case letters are RTL, lower-case are LTR): -// Pos(0, 1, ...) -// before after -// ab a|b a|b -// aB a|B aB| -// Ab |Ab A|b -// AB B|A B|A -// Every position after the last character on a line is considered to stick -// to the last character on the line. -function cursorCoords(cm, pos, context, lineObj, preparedMeasure, varHeight) { - lineObj = lineObj || getLine(cm.doc, pos.line); - if (!preparedMeasure) { preparedMeasure = prepareMeasureForLine(cm, lineObj); } - function get(ch, right) { - var m = measureCharPrepared(cm, preparedMeasure, ch, right ? "right" : "left", varHeight); - if (right) { m.left = m.right; } else { m.right = m.left; } - return intoCoordSystem(cm, lineObj, m, context) - } - var order = getOrder(lineObj, cm.doc.direction), ch = pos.ch, sticky = pos.sticky; - if (ch >= lineObj.text.length) { - ch = lineObj.text.length; - sticky = "before"; - } else if (ch <= 0) { - ch = 0; - sticky = "after"; - } - if (!order) { return get(sticky == "before" ? ch - 1 : ch, sticky == "before") } - - function getBidi(ch, partPos, invert) { - var part = order[partPos], right = part.level == 1; - return get(invert ? ch - 1 : ch, right != invert) - } - var partPos = getBidiPartAt(order, ch, sticky); - var other = bidiOther; - var val = getBidi(ch, partPos, sticky == "before"); - if (other != null) { val.other = getBidi(ch, other, sticky != "before"); } - return val -} - -// Used to cheaply estimate the coordinates for a position. Used for -// intermediate scroll updates. -function estimateCoords(cm, pos) { - var left = 0; - pos = clipPos(cm.doc, pos); - if (!cm.options.lineWrapping) { left = charWidth(cm.display) * pos.ch; } - var lineObj = getLine(cm.doc, pos.line); - var top = heightAtLine(lineObj) + paddingTop(cm.display); - return {left: left, right: left, top: top, bottom: top + lineObj.height} -} - -// Positions returned by coordsChar contain some extra information. -// xRel is the relative x position of the input coordinates compared -// to the found position (so xRel > 0 means the coordinates are to -// the right of the character position, for example). When outside -// is true, that means the coordinates lie outside the line's -// vertical range. -function PosWithInfo(line, ch, sticky, outside, xRel) { - var pos = Pos(line, ch, sticky); - pos.xRel = xRel; - if (outside) { pos.outside = true; } - return pos -} - -// Compute the character position closest to the given coordinates. -// Input must be lineSpace-local ("div" coordinate system). -function coordsChar(cm, x, y) { - var doc = cm.doc; - y += cm.display.viewOffset; - if (y < 0) { return PosWithInfo(doc.first, 0, null, true, -1) } - var lineN = lineAtHeight(doc, y), last = doc.first + doc.size - 1; - if (lineN > last) - { return PosWithInfo(doc.first + doc.size - 1, getLine(doc, last).text.length, null, true, 1) } - if (x < 0) { x = 0; } - - var lineObj = getLine(doc, lineN); - for (;;) { - var found = coordsCharInner(cm, lineObj, lineN, x, y); - var merged = collapsedSpanAtEnd(lineObj); - var mergedPos = merged && merged.find(0, true); - if (merged && (found.ch > mergedPos.from.ch || found.ch == mergedPos.from.ch && found.xRel > 0)) - { lineN = lineNo(lineObj = mergedPos.to.line); } - else - { return found } - } -} - -function wrappedLineExtent(cm, lineObj, preparedMeasure, y) { - y -= widgetTopHeight(lineObj); - var end = lineObj.text.length; - var begin = findFirst(function (ch) { return measureCharPrepared(cm, preparedMeasure, ch - 1).bottom <= y; }, end, 0); - end = findFirst(function (ch) { return measureCharPrepared(cm, preparedMeasure, ch).top > y; }, begin, end); - return {begin: begin, end: end} -} - -function wrappedLineExtentChar(cm, lineObj, preparedMeasure, target) { - if (!preparedMeasure) { preparedMeasure = prepareMeasureForLine(cm, lineObj); } - var targetTop = intoCoordSystem(cm, lineObj, measureCharPrepared(cm, preparedMeasure, target), "line").top; - return wrappedLineExtent(cm, lineObj, preparedMeasure, targetTop) -} - -// Returns true if the given side of a box is after the given -// coordinates, in top-to-bottom, left-to-right order. -function boxIsAfter(box, x, y, left) { - return box.bottom <= y ? false : box.top > y ? true : (left ? box.left : box.right) > x -} - -function coordsCharInner(cm, lineObj, lineNo$$1, x, y) { - // Move y into line-local coordinate space - y -= heightAtLine(lineObj); - var preparedMeasure = prepareMeasureForLine(cm, lineObj); - // When directly calling `measureCharPrepared`, we have to adjust - // for the widgets at this line. - var widgetHeight$$1 = widgetTopHeight(lineObj); - var begin = 0, end = lineObj.text.length, ltr = true; - - var order = getOrder(lineObj, cm.doc.direction); - // If the line isn't plain left-to-right text, first figure out - // which bidi section the coordinates fall into. - if (order) { - var part = (cm.options.lineWrapping ? coordsBidiPartWrapped : coordsBidiPart) - (cm, lineObj, lineNo$$1, preparedMeasure, order, x, y); - ltr = part.level != 1; - // The awkward -1 offsets are needed because findFirst (called - // on these below) will treat its first bound as inclusive, - // second as exclusive, but we want to actually address the - // characters in the part's range - begin = ltr ? part.from : part.to - 1; - end = ltr ? part.to : part.from - 1; - } - - // A binary search to find the first character whose bounding box - // starts after the coordinates. If we run across any whose box wrap - // the coordinates, store that. - var chAround = null, boxAround = null; - var ch = findFirst(function (ch) { - var box = measureCharPrepared(cm, preparedMeasure, ch); - box.top += widgetHeight$$1; box.bottom += widgetHeight$$1; - if (!boxIsAfter(box, x, y, false)) { return false } - if (box.top <= y && box.left <= x) { - chAround = ch; - boxAround = box; - } - return true - }, begin, end); - - var baseX, sticky, outside = false; - // If a box around the coordinates was found, use that - if (boxAround) { - // Distinguish coordinates nearer to the left or right side of the box - var atLeft = x - boxAround.left < boxAround.right - x, atStart = atLeft == ltr; - ch = chAround + (atStart ? 0 : 1); - sticky = atStart ? "after" : "before"; - baseX = atLeft ? boxAround.left : boxAround.right; - } else { - // (Adjust for extended bound, if necessary.) - if (!ltr && (ch == end || ch == begin)) { ch++; } - // To determine which side to associate with, get the box to the - // left of the character and compare it's vertical position to the - // coordinates - sticky = ch == 0 ? "after" : ch == lineObj.text.length ? "before" : - (measureCharPrepared(cm, preparedMeasure, ch - (ltr ? 1 : 0)).bottom + widgetHeight$$1 <= y) == ltr ? - "after" : "before"; - // Now get accurate coordinates for this place, in order to get a - // base X position - var coords = cursorCoords(cm, Pos(lineNo$$1, ch, sticky), "line", lineObj, preparedMeasure); - baseX = coords.left; - outside = y < coords.top || y >= coords.bottom; - } - - ch = skipExtendingChars(lineObj.text, ch, 1); - return PosWithInfo(lineNo$$1, ch, sticky, outside, x - baseX) -} - -function coordsBidiPart(cm, lineObj, lineNo$$1, preparedMeasure, order, x, y) { - // Bidi parts are sorted left-to-right, and in a non-line-wrapping - // situation, we can take this ordering to correspond to the visual - // ordering. This finds the first part whose end is after the given - // coordinates. - var index = findFirst(function (i) { - var part = order[i], ltr = part.level != 1; - return boxIsAfter(cursorCoords(cm, Pos(lineNo$$1, ltr ? part.to : part.from, ltr ? "before" : "after"), - "line", lineObj, preparedMeasure), x, y, true) - }, 0, order.length - 1); - var part = order[index]; - // If this isn't the first part, the part's start is also after - // the coordinates, and the coordinates aren't on the same line as - // that start, move one part back. - if (index > 0) { - var ltr = part.level != 1; - var start = cursorCoords(cm, Pos(lineNo$$1, ltr ? part.from : part.to, ltr ? "after" : "before"), - "line", lineObj, preparedMeasure); - if (boxIsAfter(start, x, y, true) && start.top > y) - { part = order[index - 1]; } - } - return part -} - -function coordsBidiPartWrapped(cm, lineObj, _lineNo, preparedMeasure, order, x, y) { - // In a wrapped line, rtl text on wrapping boundaries can do things - // that don't correspond to the ordering in our `order` array at - // all, so a binary search doesn't work, and we want to return a - // part that only spans one line so that the binary search in - // coordsCharInner is safe. As such, we first find the extent of the - // wrapped line, and then do a flat search in which we discard any - // spans that aren't on the line. - var ref = wrappedLineExtent(cm, lineObj, preparedMeasure, y); - var begin = ref.begin; - var end = ref.end; - if (/\s/.test(lineObj.text.charAt(end - 1))) { end--; } - var part = null, closestDist = null; - for (var i = 0; i < order.length; i++) { - var p = order[i]; - if (p.from >= end || p.to <= begin) { continue } - var ltr = p.level != 1; - var endX = measureCharPrepared(cm, preparedMeasure, ltr ? Math.min(end, p.to) - 1 : Math.max(begin, p.from)).right; - // Weigh against spans ending before this, so that they are only - // picked if nothing ends after - var dist = endX < x ? x - endX + 1e9 : endX - x; - if (!part || closestDist > dist) { - part = p; - closestDist = dist; - } - } - if (!part) { part = order[order.length - 1]; } - // Clip the part to the wrapped line. - if (part.from < begin) { part = {from: begin, to: part.to, level: part.level}; } - if (part.to > end) { part = {from: part.from, to: end, level: part.level}; } - return part -} - -var measureText; -// Compute the default text height. -function textHeight(display) { - if (display.cachedTextHeight != null) { return display.cachedTextHeight } - if (measureText == null) { - measureText = elt("pre"); - // Measure a bunch of lines, for browsers that compute - // fractional heights. - for (var i = 0; i < 49; ++i) { - measureText.appendChild(document.createTextNode("x")); - measureText.appendChild(elt("br")); - } - measureText.appendChild(document.createTextNode("x")); - } - removeChildrenAndAdd(display.measure, measureText); - var height = measureText.offsetHeight / 50; - if (height > 3) { display.cachedTextHeight = height; } - removeChildren(display.measure); - return height || 1 -} - -// Compute the default character width. -function charWidth(display) { - if (display.cachedCharWidth != null) { return display.cachedCharWidth } - var anchor = elt("span", "xxxxxxxxxx"); - var pre = elt("pre", [anchor]); - removeChildrenAndAdd(display.measure, pre); - var rect = anchor.getBoundingClientRect(), width = (rect.right - rect.left) / 10; - if (width > 2) { display.cachedCharWidth = width; } - return width || 10 -} - -// Do a bulk-read of the DOM positions and sizes needed to draw the -// view, so that we don't interleave reading and writing to the DOM. -function getDimensions(cm) { - var d = cm.display, left = {}, width = {}; - var gutterLeft = d.gutters.clientLeft; - for (var n = d.gutters.firstChild, i = 0; n; n = n.nextSibling, ++i) { - left[cm.options.gutters[i]] = n.offsetLeft + n.clientLeft + gutterLeft; - width[cm.options.gutters[i]] = n.clientWidth; - } - return {fixedPos: compensateForHScroll(d), - gutterTotalWidth: d.gutters.offsetWidth, - gutterLeft: left, - gutterWidth: width, - wrapperWidth: d.wrapper.clientWidth} -} - -// Computes display.scroller.scrollLeft + display.gutters.offsetWidth, -// but using getBoundingClientRect to get a sub-pixel-accurate -// result. -function compensateForHScroll(display) { - return display.scroller.getBoundingClientRect().left - display.sizer.getBoundingClientRect().left -} - -// Returns a function that estimates the height of a line, to use as -// first approximation until the line becomes visible (and is thus -// properly measurable). -function estimateHeight(cm) { - var th = textHeight(cm.display), wrapping = cm.options.lineWrapping; - var perLine = wrapping && Math.max(5, cm.display.scroller.clientWidth / charWidth(cm.display) - 3); - return function (line) { - if (lineIsHidden(cm.doc, line)) { return 0 } - - var widgetsHeight = 0; - if (line.widgets) { for (var i = 0; i < line.widgets.length; i++) { - if (line.widgets[i].height) { widgetsHeight += line.widgets[i].height; } - } } - - if (wrapping) - { return widgetsHeight + (Math.ceil(line.text.length / perLine) || 1) * th } - else - { return widgetsHeight + th } - } -} - -function estimateLineHeights(cm) { - var doc = cm.doc, est = estimateHeight(cm); - doc.iter(function (line) { - var estHeight = est(line); - if (estHeight != line.height) { updateLineHeight(line, estHeight); } - }); -} - -// Given a mouse event, find the corresponding position. If liberal -// is false, it checks whether a gutter or scrollbar was clicked, -// and returns null if it was. forRect is used by rectangular -// selections, and tries to estimate a character position even for -// coordinates beyond the right of the text. -function posFromMouse(cm, e, liberal, forRect) { - var display = cm.display; - if (!liberal && e_target(e).getAttribute("cm-not-content") == "true") { return null } - - var x, y, space = display.lineSpace.getBoundingClientRect(); - // Fails unpredictably on IE[67] when mouse is dragged around quickly. - try { x = e.clientX - space.left; y = e.clientY - space.top; } - catch (e) { return null } - var coords = coordsChar(cm, x, y), line; - if (forRect && coords.xRel == 1 && (line = getLine(cm.doc, coords.line).text).length == coords.ch) { - var colDiff = countColumn(line, line.length, cm.options.tabSize) - line.length; - coords = Pos(coords.line, Math.max(0, Math.round((x - paddingH(cm.display).left) / charWidth(cm.display)) - colDiff)); - } - return coords -} - -// Find the view element corresponding to a given line. Return null -// when the line isn't visible. -function findViewIndex(cm, n) { - if (n >= cm.display.viewTo) { return null } - n -= cm.display.viewFrom; - if (n < 0) { return null } - var view = cm.display.view; - for (var i = 0; i < view.length; i++) { - n -= view[i].size; - if (n < 0) { return i } - } -} - -function updateSelection(cm) { - cm.display.input.showSelection(cm.display.input.prepareSelection()); -} - -function prepareSelection(cm, primary) { - if ( primary === void 0 ) primary = true; - - var doc = cm.doc, result = {}; - var curFragment = result.cursors = document.createDocumentFragment(); - var selFragment = result.selection = document.createDocumentFragment(); - - for (var i = 0; i < doc.sel.ranges.length; i++) { - if (!primary && i == doc.sel.primIndex) { continue } - var range$$1 = doc.sel.ranges[i]; - if (range$$1.from().line >= cm.display.viewTo || range$$1.to().line < cm.display.viewFrom) { continue } - var collapsed = range$$1.empty(); - if (collapsed || cm.options.showCursorWhenSelecting) - { drawSelectionCursor(cm, range$$1.head, curFragment); } - if (!collapsed) - { drawSelectionRange(cm, range$$1, selFragment); } - } - return result -} - -// Draws a cursor for the given range -function drawSelectionCursor(cm, head, output) { - var pos = cursorCoords(cm, head, "div", null, null, !cm.options.singleCursorHeightPerLine); - - var cursor = output.appendChild(elt("div", "\u00a0", "CodeMirror-cursor")); - cursor.style.left = pos.left + "px"; - cursor.style.top = pos.top + "px"; - cursor.style.height = Math.max(0, pos.bottom - pos.top) * cm.options.cursorHeight + "px"; - - if (pos.other) { - // Secondary cursor, shown when on a 'jump' in bi-directional text - var otherCursor = output.appendChild(elt("div", "\u00a0", "CodeMirror-cursor CodeMirror-secondarycursor")); - otherCursor.style.display = ""; - otherCursor.style.left = pos.other.left + "px"; - otherCursor.style.top = pos.other.top + "px"; - otherCursor.style.height = (pos.other.bottom - pos.other.top) * .85 + "px"; - } -} - -function cmpCoords(a, b) { return a.top - b.top || a.left - b.left } - -// Draws the given range as a highlighted selection -function drawSelectionRange(cm, range$$1, output) { - var display = cm.display, doc = cm.doc; - var fragment = document.createDocumentFragment(); - var padding = paddingH(cm.display), leftSide = padding.left; - var rightSide = Math.max(display.sizerWidth, displayWidth(cm) - display.sizer.offsetLeft) - padding.right; - var docLTR = doc.direction == "ltr"; - - function add(left, top, width, bottom) { - if (top < 0) { top = 0; } - top = Math.round(top); - bottom = Math.round(bottom); - fragment.appendChild(elt("div", null, "CodeMirror-selected", ("position: absolute; left: " + left + "px;\n top: " + top + "px; width: " + (width == null ? rightSide - left : width) + "px;\n height: " + (bottom - top) + "px"))); - } - - function drawForLine(line, fromArg, toArg) { - var lineObj = getLine(doc, line); - var lineLen = lineObj.text.length; - var start, end; - function coords(ch, bias) { - return charCoords(cm, Pos(line, ch), "div", lineObj, bias) - } - - function wrapX(pos, dir, side) { - var extent = wrappedLineExtentChar(cm, lineObj, null, pos); - var prop = (dir == "ltr") == (side == "after") ? "left" : "right"; - var ch = side == "after" ? extent.begin : extent.end - (/\s/.test(lineObj.text.charAt(extent.end - 1)) ? 2 : 1); - return coords(ch, prop)[prop] - } - - var order = getOrder(lineObj, doc.direction); - iterateBidiSections(order, fromArg || 0, toArg == null ? lineLen : toArg, function (from, to, dir, i) { - var ltr = dir == "ltr"; - var fromPos = coords(from, ltr ? "left" : "right"); - var toPos = coords(to - 1, ltr ? "right" : "left"); - - var openStart = fromArg == null && from == 0, openEnd = toArg == null && to == lineLen; - var first = i == 0, last = !order || i == order.length - 1; - if (toPos.top - fromPos.top <= 3) { // Single line - var openLeft = (docLTR ? openStart : openEnd) && first; - var openRight = (docLTR ? openEnd : openStart) && last; - var left = openLeft ? leftSide : (ltr ? fromPos : toPos).left; - var right = openRight ? rightSide : (ltr ? toPos : fromPos).right; - add(left, fromPos.top, right - left, fromPos.bottom); - } else { // Multiple lines - var topLeft, topRight, botLeft, botRight; - if (ltr) { - topLeft = docLTR && openStart && first ? leftSide : fromPos.left; - topRight = docLTR ? rightSide : wrapX(from, dir, "before"); - botLeft = docLTR ? leftSide : wrapX(to, dir, "after"); - botRight = docLTR && openEnd && last ? rightSide : toPos.right; - } else { - topLeft = !docLTR ? leftSide : wrapX(from, dir, "before"); - topRight = !docLTR && openStart && first ? rightSide : fromPos.right; - botLeft = !docLTR && openEnd && last ? leftSide : toPos.left; - botRight = !docLTR ? rightSide : wrapX(to, dir, "after"); - } - add(topLeft, fromPos.top, topRight - topLeft, fromPos.bottom); - if (fromPos.bottom < toPos.top) { add(leftSide, fromPos.bottom, null, toPos.top); } - add(botLeft, toPos.top, botRight - botLeft, toPos.bottom); - } - - if (!start || cmpCoords(fromPos, start) < 0) { start = fromPos; } - if (cmpCoords(toPos, start) < 0) { start = toPos; } - if (!end || cmpCoords(fromPos, end) < 0) { end = fromPos; } - if (cmpCoords(toPos, end) < 0) { end = toPos; } - }); - return {start: start, end: end} - } - - var sFrom = range$$1.from(), sTo = range$$1.to(); - if (sFrom.line == sTo.line) { - drawForLine(sFrom.line, sFrom.ch, sTo.ch); - } else { - var fromLine = getLine(doc, sFrom.line), toLine = getLine(doc, sTo.line); - var singleVLine = visualLine(fromLine) == visualLine(toLine); - var leftEnd = drawForLine(sFrom.line, sFrom.ch, singleVLine ? fromLine.text.length + 1 : null).end; - var rightStart = drawForLine(sTo.line, singleVLine ? 0 : null, sTo.ch).start; - if (singleVLine) { - if (leftEnd.top < rightStart.top - 2) { - add(leftEnd.right, leftEnd.top, null, leftEnd.bottom); - add(leftSide, rightStart.top, rightStart.left, rightStart.bottom); - } else { - add(leftEnd.right, leftEnd.top, rightStart.left - leftEnd.right, leftEnd.bottom); - } - } - if (leftEnd.bottom < rightStart.top) - { add(leftSide, leftEnd.bottom, null, rightStart.top); } - } - - output.appendChild(fragment); -} - -// Cursor-blinking -function restartBlink(cm) { - if (!cm.state.focused) { return } - var display = cm.display; - clearInterval(display.blinker); - var on = true; - display.cursorDiv.style.visibility = ""; - if (cm.options.cursorBlinkRate > 0) - { display.blinker = setInterval(function () { return display.cursorDiv.style.visibility = (on = !on) ? "" : "hidden"; }, - cm.options.cursorBlinkRate); } - else if (cm.options.cursorBlinkRate < 0) - { display.cursorDiv.style.visibility = "hidden"; } -} - -function ensureFocus(cm) { - if (!cm.state.focused) { cm.display.input.focus(); onFocus(cm); } -} - -function delayBlurEvent(cm) { - cm.state.delayingBlurEvent = true; - setTimeout(function () { if (cm.state.delayingBlurEvent) { - cm.state.delayingBlurEvent = false; - onBlur(cm); - } }, 100); -} - -function onFocus(cm, e) { - if (cm.state.delayingBlurEvent) { cm.state.delayingBlurEvent = false; } - - if (cm.options.readOnly == "nocursor") { return } - if (!cm.state.focused) { - signal(cm, "focus", cm, e); - cm.state.focused = true; - addClass(cm.display.wrapper, "CodeMirror-focused"); - // This test prevents this from firing when a context - // menu is closed (since the input reset would kill the - // select-all detection hack) - if (!cm.curOp && cm.display.selForContextMenu != cm.doc.sel) { - cm.display.input.reset(); - if (webkit) { setTimeout(function () { return cm.display.input.reset(true); }, 20); } // Issue #1730 - } - cm.display.input.receivedFocus(); - } - restartBlink(cm); -} -function onBlur(cm, e) { - if (cm.state.delayingBlurEvent) { return } - - if (cm.state.focused) { - signal(cm, "blur", cm, e); - cm.state.focused = false; - rmClass(cm.display.wrapper, "CodeMirror-focused"); - } - clearInterval(cm.display.blinker); - setTimeout(function () { if (!cm.state.focused) { cm.display.shift = false; } }, 150); -} - -// Read the actual heights of the rendered lines, and update their -// stored heights to match. -function updateHeightsInViewport(cm) { - var display = cm.display; - var prevBottom = display.lineDiv.offsetTop; - for (var i = 0; i < display.view.length; i++) { - var cur = display.view[i], height = (void 0); - if (cur.hidden) { continue } - if (ie && ie_version < 8) { - var bot = cur.node.offsetTop + cur.node.offsetHeight; - height = bot - prevBottom; - prevBottom = bot; - } else { - var box = cur.node.getBoundingClientRect(); - height = box.bottom - box.top; - } - var diff = cur.line.height - height; - if (height < 2) { height = textHeight(display); } - if (diff > .005 || diff < -.005) { - updateLineHeight(cur.line, height); - updateWidgetHeight(cur.line); - if (cur.rest) { for (var j = 0; j < cur.rest.length; j++) - { updateWidgetHeight(cur.rest[j]); } } - } - } -} - -// Read and store the height of line widgets associated with the -// given line. -function updateWidgetHeight(line) { - if (line.widgets) { for (var i = 0; i < line.widgets.length; ++i) - { line.widgets[i].height = line.widgets[i].node.parentNode.offsetHeight; } } -} - -// Compute the lines that are visible in a given viewport (defaults -// the the current scroll position). viewport may contain top, -// height, and ensure (see op.scrollToPos) properties. -function visibleLines(display, doc, viewport) { - var top = viewport && viewport.top != null ? Math.max(0, viewport.top) : display.scroller.scrollTop; - top = Math.floor(top - paddingTop(display)); - var bottom = viewport && viewport.bottom != null ? viewport.bottom : top + display.wrapper.clientHeight; - - var from = lineAtHeight(doc, top), to = lineAtHeight(doc, bottom); - // Ensure is a {from: {line, ch}, to: {line, ch}} object, and - // forces those lines into the viewport (if possible). - if (viewport && viewport.ensure) { - var ensureFrom = viewport.ensure.from.line, ensureTo = viewport.ensure.to.line; - if (ensureFrom < from) { - from = ensureFrom; - to = lineAtHeight(doc, heightAtLine(getLine(doc, ensureFrom)) + display.wrapper.clientHeight); - } else if (Math.min(ensureTo, doc.lastLine()) >= to) { - from = lineAtHeight(doc, heightAtLine(getLine(doc, ensureTo)) - display.wrapper.clientHeight); - to = ensureTo; - } - } - return {from: from, to: Math.max(to, from + 1)} -} - -// Re-align line numbers and gutter marks to compensate for -// horizontal scrolling. -function alignHorizontally(cm) { - var display = cm.display, view = display.view; - if (!display.alignWidgets && (!display.gutters.firstChild || !cm.options.fixedGutter)) { return } - var comp = compensateForHScroll(display) - display.scroller.scrollLeft + cm.doc.scrollLeft; - var gutterW = display.gutters.offsetWidth, left = comp + "px"; - for (var i = 0; i < view.length; i++) { if (!view[i].hidden) { - if (cm.options.fixedGutter) { - if (view[i].gutter) - { view[i].gutter.style.left = left; } - if (view[i].gutterBackground) - { view[i].gutterBackground.style.left = left; } - } - var align = view[i].alignable; - if (align) { for (var j = 0; j < align.length; j++) - { align[j].style.left = left; } } - } } - if (cm.options.fixedGutter) - { display.gutters.style.left = (comp + gutterW) + "px"; } -} - -// Used to ensure that the line number gutter is still the right -// size for the current document size. Returns true when an update -// is needed. -function maybeUpdateLineNumberWidth(cm) { - if (!cm.options.lineNumbers) { return false } - var doc = cm.doc, last = lineNumberFor(cm.options, doc.first + doc.size - 1), display = cm.display; - if (last.length != display.lineNumChars) { - var test = display.measure.appendChild(elt("div", [elt("div", last)], - "CodeMirror-linenumber CodeMirror-gutter-elt")); - var innerW = test.firstChild.offsetWidth, padding = test.offsetWidth - innerW; - display.lineGutter.style.width = ""; - display.lineNumInnerWidth = Math.max(innerW, display.lineGutter.offsetWidth - padding) + 1; - display.lineNumWidth = display.lineNumInnerWidth + padding; - display.lineNumChars = display.lineNumInnerWidth ? last.length : -1; - display.lineGutter.style.width = display.lineNumWidth + "px"; - updateGutterSpace(cm); - return true - } - return false -} - -// SCROLLING THINGS INTO VIEW - -// If an editor sits on the top or bottom of the window, partially -// scrolled out of view, this ensures that the cursor is visible. -function maybeScrollWindow(cm, rect) { - if (signalDOMEvent(cm, "scrollCursorIntoView")) { return } - - var display = cm.display, box = display.sizer.getBoundingClientRect(), doScroll = null; - if (rect.top + box.top < 0) { doScroll = true; } - else if (rect.bottom + box.top > (window.innerHeight || document.documentElement.clientHeight)) { doScroll = false; } - if (doScroll != null && !phantom) { - var scrollNode = elt("div", "\u200b", null, ("position: absolute;\n top: " + (rect.top - display.viewOffset - paddingTop(cm.display)) + "px;\n height: " + (rect.bottom - rect.top + scrollGap(cm) + display.barHeight) + "px;\n left: " + (rect.left) + "px; width: " + (Math.max(2, rect.right - rect.left)) + "px;")); - cm.display.lineSpace.appendChild(scrollNode); - scrollNode.scrollIntoView(doScroll); - cm.display.lineSpace.removeChild(scrollNode); - } -} - -// Scroll a given position into view (immediately), verifying that -// it actually became visible (as line heights are accurately -// measured, the position of something may 'drift' during drawing). -function scrollPosIntoView(cm, pos, end, margin) { - if (margin == null) { margin = 0; } - var rect; - if (!cm.options.lineWrapping && pos == end) { - // Set pos and end to the cursor positions around the character pos sticks to - // If pos.sticky == "before", that is around pos.ch - 1, otherwise around pos.ch - // If pos == Pos(_, 0, "before"), pos and end are unchanged - pos = pos.ch ? Pos(pos.line, pos.sticky == "before" ? pos.ch - 1 : pos.ch, "after") : pos; - end = pos.sticky == "before" ? Pos(pos.line, pos.ch + 1, "before") : pos; - } - for (var limit = 0; limit < 5; limit++) { - var changed = false; - var coords = cursorCoords(cm, pos); - var endCoords = !end || end == pos ? coords : cursorCoords(cm, end); - rect = {left: Math.min(coords.left, endCoords.left), - top: Math.min(coords.top, endCoords.top) - margin, - right: Math.max(coords.left, endCoords.left), - bottom: Math.max(coords.bottom, endCoords.bottom) + margin}; - var scrollPos = calculateScrollPos(cm, rect); - var startTop = cm.doc.scrollTop, startLeft = cm.doc.scrollLeft; - if (scrollPos.scrollTop != null) { - updateScrollTop(cm, scrollPos.scrollTop); - if (Math.abs(cm.doc.scrollTop - startTop) > 1) { changed = true; } - } - if (scrollPos.scrollLeft != null) { - setScrollLeft(cm, scrollPos.scrollLeft); - if (Math.abs(cm.doc.scrollLeft - startLeft) > 1) { changed = true; } - } - if (!changed) { break } - } - return rect -} - -// Scroll a given set of coordinates into view (immediately). -function scrollIntoView(cm, rect) { - var scrollPos = calculateScrollPos(cm, rect); - if (scrollPos.scrollTop != null) { updateScrollTop(cm, scrollPos.scrollTop); } - if (scrollPos.scrollLeft != null) { setScrollLeft(cm, scrollPos.scrollLeft); } -} - -// Calculate a new scroll position needed to scroll the given -// rectangle into view. Returns an object with scrollTop and -// scrollLeft properties. When these are undefined, the -// vertical/horizontal position does not need to be adjusted. -function calculateScrollPos(cm, rect) { - var display = cm.display, snapMargin = textHeight(cm.display); - if (rect.top < 0) { rect.top = 0; } - var screentop = cm.curOp && cm.curOp.scrollTop != null ? cm.curOp.scrollTop : display.scroller.scrollTop; - var screen = displayHeight(cm), result = {}; - if (rect.bottom - rect.top > screen) { rect.bottom = rect.top + screen; } - var docBottom = cm.doc.height + paddingVert(display); - var atTop = rect.top < snapMargin, atBottom = rect.bottom > docBottom - snapMargin; - if (rect.top < screentop) { - result.scrollTop = atTop ? 0 : rect.top; - } else if (rect.bottom > screentop + screen) { - var newTop = Math.min(rect.top, (atBottom ? docBottom : rect.bottom) - screen); - if (newTop != screentop) { result.scrollTop = newTop; } - } - - var screenleft = cm.curOp && cm.curOp.scrollLeft != null ? cm.curOp.scrollLeft : display.scroller.scrollLeft; - var screenw = displayWidth(cm) - (cm.options.fixedGutter ? display.gutters.offsetWidth : 0); - var tooWide = rect.right - rect.left > screenw; - if (tooWide) { rect.right = rect.left + screenw; } - if (rect.left < 10) - { result.scrollLeft = 0; } - else if (rect.left < screenleft) - { result.scrollLeft = Math.max(0, rect.left - (tooWide ? 0 : 10)); } - else if (rect.right > screenw + screenleft - 3) - { result.scrollLeft = rect.right + (tooWide ? 0 : 10) - screenw; } - return result -} - -// Store a relative adjustment to the scroll position in the current -// operation (to be applied when the operation finishes). -function addToScrollTop(cm, top) { - if (top == null) { return } - resolveScrollToPos(cm); - cm.curOp.scrollTop = (cm.curOp.scrollTop == null ? cm.doc.scrollTop : cm.curOp.scrollTop) + top; -} - -// Make sure that at the end of the operation the current cursor is -// shown. -function ensureCursorVisible(cm) { - resolveScrollToPos(cm); - var cur = cm.getCursor(); - cm.curOp.scrollToPos = {from: cur, to: cur, margin: cm.options.cursorScrollMargin}; -} - -function scrollToCoords(cm, x, y) { - if (x != null || y != null) { resolveScrollToPos(cm); } - if (x != null) { cm.curOp.scrollLeft = x; } - if (y != null) { cm.curOp.scrollTop = y; } -} - -function scrollToRange(cm, range$$1) { - resolveScrollToPos(cm); - cm.curOp.scrollToPos = range$$1; -} - -// When an operation has its scrollToPos property set, and another -// scroll action is applied before the end of the operation, this -// 'simulates' scrolling that position into view in a cheap way, so -// that the effect of intermediate scroll commands is not ignored. -function resolveScrollToPos(cm) { - var range$$1 = cm.curOp.scrollToPos; - if (range$$1) { - cm.curOp.scrollToPos = null; - var from = estimateCoords(cm, range$$1.from), to = estimateCoords(cm, range$$1.to); - scrollToCoordsRange(cm, from, to, range$$1.margin); - } -} - -function scrollToCoordsRange(cm, from, to, margin) { - var sPos = calculateScrollPos(cm, { - left: Math.min(from.left, to.left), - top: Math.min(from.top, to.top) - margin, - right: Math.max(from.right, to.right), - bottom: Math.max(from.bottom, to.bottom) + margin - }); - scrollToCoords(cm, sPos.scrollLeft, sPos.scrollTop); -} - -// Sync the scrollable area and scrollbars, ensure the viewport -// covers the visible area. -function updateScrollTop(cm, val) { - if (Math.abs(cm.doc.scrollTop - val) < 2) { return } - if (!gecko) { updateDisplaySimple(cm, {top: val}); } - setScrollTop(cm, val, true); - if (gecko) { updateDisplaySimple(cm); } - startWorker(cm, 100); -} - -function setScrollTop(cm, val, forceScroll) { - val = Math.min(cm.display.scroller.scrollHeight - cm.display.scroller.clientHeight, val); - if (cm.display.scroller.scrollTop == val && !forceScroll) { return } - cm.doc.scrollTop = val; - cm.display.scrollbars.setScrollTop(val); - if (cm.display.scroller.scrollTop != val) { cm.display.scroller.scrollTop = val; } -} - -// Sync scroller and scrollbar, ensure the gutter elements are -// aligned. -function setScrollLeft(cm, val, isScroller, forceScroll) { - val = Math.min(val, cm.display.scroller.scrollWidth - cm.display.scroller.clientWidth); - if ((isScroller ? val == cm.doc.scrollLeft : Math.abs(cm.doc.scrollLeft - val) < 2) && !forceScroll) { return } - cm.doc.scrollLeft = val; - alignHorizontally(cm); - if (cm.display.scroller.scrollLeft != val) { cm.display.scroller.scrollLeft = val; } - cm.display.scrollbars.setScrollLeft(val); -} - -// SCROLLBARS - -// Prepare DOM reads needed to update the scrollbars. Done in one -// shot to minimize update/measure roundtrips. -function measureForScrollbars(cm) { - var d = cm.display, gutterW = d.gutters.offsetWidth; - var docH = Math.round(cm.doc.height + paddingVert(cm.display)); - return { - clientHeight: d.scroller.clientHeight, - viewHeight: d.wrapper.clientHeight, - scrollWidth: d.scroller.scrollWidth, clientWidth: d.scroller.clientWidth, - viewWidth: d.wrapper.clientWidth, - barLeft: cm.options.fixedGutter ? gutterW : 0, - docHeight: docH, - scrollHeight: docH + scrollGap(cm) + d.barHeight, - nativeBarWidth: d.nativeBarWidth, - gutterWidth: gutterW - } -} - -var NativeScrollbars = function(place, scroll, cm) { - this.cm = cm; - var vert = this.vert = elt("div", [elt("div", null, null, "min-width: 1px")], "CodeMirror-vscrollbar"); - var horiz = this.horiz = elt("div", [elt("div", null, null, "height: 100%; min-height: 1px")], "CodeMirror-hscrollbar"); - place(vert); place(horiz); - - on(vert, "scroll", function () { - if (vert.clientHeight) { scroll(vert.scrollTop, "vertical"); } - }); - on(horiz, "scroll", function () { - if (horiz.clientWidth) { scroll(horiz.scrollLeft, "horizontal"); } - }); - - this.checkedZeroWidth = false; - // Need to set a minimum width to see the scrollbar on IE7 (but must not set it on IE8). - if (ie && ie_version < 8) { this.horiz.style.minHeight = this.vert.style.minWidth = "18px"; } -}; - -NativeScrollbars.prototype.update = function (measure) { - var needsH = measure.scrollWidth > measure.clientWidth + 1; - var needsV = measure.scrollHeight > measure.clientHeight + 1; - var sWidth = measure.nativeBarWidth; - - if (needsV) { - this.vert.style.display = "block"; - this.vert.style.bottom = needsH ? sWidth + "px" : "0"; - var totalHeight = measure.viewHeight - (needsH ? sWidth : 0); - // A bug in IE8 can cause this value to be negative, so guard it. - this.vert.firstChild.style.height = - Math.max(0, measure.scrollHeight - measure.clientHeight + totalHeight) + "px"; - } else { - this.vert.style.display = ""; - this.vert.firstChild.style.height = "0"; - } - - if (needsH) { - this.horiz.style.display = "block"; - this.horiz.style.right = needsV ? sWidth + "px" : "0"; - this.horiz.style.left = measure.barLeft + "px"; - var totalWidth = measure.viewWidth - measure.barLeft - (needsV ? sWidth : 0); - this.horiz.firstChild.style.width = - Math.max(0, measure.scrollWidth - measure.clientWidth + totalWidth) + "px"; - } else { - this.horiz.style.display = ""; - this.horiz.firstChild.style.width = "0"; - } - - if (!this.checkedZeroWidth && measure.clientHeight > 0) { - if (sWidth == 0) { this.zeroWidthHack(); } - this.checkedZeroWidth = true; - } - - return {right: needsV ? sWidth : 0, bottom: needsH ? sWidth : 0} -}; - -NativeScrollbars.prototype.setScrollLeft = function (pos) { - if (this.horiz.scrollLeft != pos) { this.horiz.scrollLeft = pos; } - if (this.disableHoriz) { this.enableZeroWidthBar(this.horiz, this.disableHoriz, "horiz"); } -}; - -NativeScrollbars.prototype.setScrollTop = function (pos) { - if (this.vert.scrollTop != pos) { this.vert.scrollTop = pos; } - if (this.disableVert) { this.enableZeroWidthBar(this.vert, this.disableVert, "vert"); } -}; - -NativeScrollbars.prototype.zeroWidthHack = function () { - var w = mac && !mac_geMountainLion ? "12px" : "18px"; - this.horiz.style.height = this.vert.style.width = w; - this.horiz.style.pointerEvents = this.vert.style.pointerEvents = "none"; - this.disableHoriz = new Delayed; - this.disableVert = new Delayed; -}; - -NativeScrollbars.prototype.enableZeroWidthBar = function (bar, delay, type) { - bar.style.pointerEvents = "auto"; - function maybeDisable() { - // To find out whether the scrollbar is still visible, we - // check whether the element under the pixel in the bottom - // right corner of the scrollbar box is the scrollbar box - // itself (when the bar is still visible) or its filler child - // (when the bar is hidden). If it is still visible, we keep - // it enabled, if it's hidden, we disable pointer events. - var box = bar.getBoundingClientRect(); - var elt$$1 = type == "vert" ? document.elementFromPoint(box.right - 1, (box.top + box.bottom) / 2) - : document.elementFromPoint((box.right + box.left) / 2, box.bottom - 1); - if (elt$$1 != bar) { bar.style.pointerEvents = "none"; } - else { delay.set(1000, maybeDisable); } - } - delay.set(1000, maybeDisable); -}; - -NativeScrollbars.prototype.clear = function () { - var parent = this.horiz.parentNode; - parent.removeChild(this.horiz); - parent.removeChild(this.vert); -}; - -var NullScrollbars = function () {}; - -NullScrollbars.prototype.update = function () { return {bottom: 0, right: 0} }; -NullScrollbars.prototype.setScrollLeft = function () {}; -NullScrollbars.prototype.setScrollTop = function () {}; -NullScrollbars.prototype.clear = function () {}; - -function updateScrollbars(cm, measure) { - if (!measure) { measure = measureForScrollbars(cm); } - var startWidth = cm.display.barWidth, startHeight = cm.display.barHeight; - updateScrollbarsInner(cm, measure); - for (var i = 0; i < 4 && startWidth != cm.display.barWidth || startHeight != cm.display.barHeight; i++) { - if (startWidth != cm.display.barWidth && cm.options.lineWrapping) - { updateHeightsInViewport(cm); } - updateScrollbarsInner(cm, measureForScrollbars(cm)); - startWidth = cm.display.barWidth; startHeight = cm.display.barHeight; - } -} - -// Re-synchronize the fake scrollbars with the actual size of the -// content. -function updateScrollbarsInner(cm, measure) { - var d = cm.display; - var sizes = d.scrollbars.update(measure); - - d.sizer.style.paddingRight = (d.barWidth = sizes.right) + "px"; - d.sizer.style.paddingBottom = (d.barHeight = sizes.bottom) + "px"; - d.heightForcer.style.borderBottom = sizes.bottom + "px solid transparent"; - - if (sizes.right && sizes.bottom) { - d.scrollbarFiller.style.display = "block"; - d.scrollbarFiller.style.height = sizes.bottom + "px"; - d.scrollbarFiller.style.width = sizes.right + "px"; - } else { d.scrollbarFiller.style.display = ""; } - if (sizes.bottom && cm.options.coverGutterNextToScrollbar && cm.options.fixedGutter) { - d.gutterFiller.style.display = "block"; - d.gutterFiller.style.height = sizes.bottom + "px"; - d.gutterFiller.style.width = measure.gutterWidth + "px"; - } else { d.gutterFiller.style.display = ""; } -} - -var scrollbarModel = {"native": NativeScrollbars, "null": NullScrollbars}; - -function initScrollbars(cm) { - if (cm.display.scrollbars) { - cm.display.scrollbars.clear(); - if (cm.display.scrollbars.addClass) - { rmClass(cm.display.wrapper, cm.display.scrollbars.addClass); } - } - - cm.display.scrollbars = new scrollbarModel[cm.options.scrollbarStyle](function (node) { - cm.display.wrapper.insertBefore(node, cm.display.scrollbarFiller); - // Prevent clicks in the scrollbars from killing focus - on(node, "mousedown", function () { - if (cm.state.focused) { setTimeout(function () { return cm.display.input.focus(); }, 0); } - }); - node.setAttribute("cm-not-content", "true"); - }, function (pos, axis) { - if (axis == "horizontal") { setScrollLeft(cm, pos); } - else { updateScrollTop(cm, pos); } - }, cm); - if (cm.display.scrollbars.addClass) - { addClass(cm.display.wrapper, cm.display.scrollbars.addClass); } -} - -// Operations are used to wrap a series of changes to the editor -// state in such a way that each change won't have to update the -// cursor and display (which would be awkward, slow, and -// error-prone). Instead, display updates are batched and then all -// combined and executed at once. - -var nextOpId = 0; -// Start a new operation. -function startOperation(cm) { - cm.curOp = { - cm: cm, - viewChanged: false, // Flag that indicates that lines might need to be redrawn - startHeight: cm.doc.height, // Used to detect need to update scrollbar - forceUpdate: false, // Used to force a redraw - updateInput: null, // Whether to reset the input textarea - typing: false, // Whether this reset should be careful to leave existing text (for compositing) - changeObjs: null, // Accumulated changes, for firing change events - cursorActivityHandlers: null, // Set of handlers to fire cursorActivity on - cursorActivityCalled: 0, // Tracks which cursorActivity handlers have been called already - selectionChanged: false, // Whether the selection needs to be redrawn - updateMaxLine: false, // Set when the widest line needs to be determined anew - scrollLeft: null, scrollTop: null, // Intermediate scroll position, not pushed to DOM yet - scrollToPos: null, // Used to scroll to a specific position - focus: false, - id: ++nextOpId // Unique ID - }; - pushOperation(cm.curOp); -} - -// Finish an operation, updating the display and signalling delayed events -function endOperation(cm) { - var op = cm.curOp; - finishOperation(op, function (group) { - for (var i = 0; i < group.ops.length; i++) - { group.ops[i].cm.curOp = null; } - endOperations(group); - }); -} - -// The DOM updates done when an operation finishes are batched so -// that the minimum number of relayouts are required. -function endOperations(group) { - var ops = group.ops; - for (var i = 0; i < ops.length; i++) // Read DOM - { endOperation_R1(ops[i]); } - for (var i$1 = 0; i$1 < ops.length; i$1++) // Write DOM (maybe) - { endOperation_W1(ops[i$1]); } - for (var i$2 = 0; i$2 < ops.length; i$2++) // Read DOM - { endOperation_R2(ops[i$2]); } - for (var i$3 = 0; i$3 < ops.length; i$3++) // Write DOM (maybe) - { endOperation_W2(ops[i$3]); } - for (var i$4 = 0; i$4 < ops.length; i$4++) // Read DOM - { endOperation_finish(ops[i$4]); } -} - -function endOperation_R1(op) { - var cm = op.cm, display = cm.display; - maybeClipScrollbars(cm); - if (op.updateMaxLine) { findMaxLine(cm); } - - op.mustUpdate = op.viewChanged || op.forceUpdate || op.scrollTop != null || - op.scrollToPos && (op.scrollToPos.from.line < display.viewFrom || - op.scrollToPos.to.line >= display.viewTo) || - display.maxLineChanged && cm.options.lineWrapping; - op.update = op.mustUpdate && - new DisplayUpdate(cm, op.mustUpdate && {top: op.scrollTop, ensure: op.scrollToPos}, op.forceUpdate); -} - -function endOperation_W1(op) { - op.updatedDisplay = op.mustUpdate && updateDisplayIfNeeded(op.cm, op.update); -} - -function endOperation_R2(op) { - var cm = op.cm, display = cm.display; - if (op.updatedDisplay) { updateHeightsInViewport(cm); } - - op.barMeasure = measureForScrollbars(cm); - - // If the max line changed since it was last measured, measure it, - // and ensure the document's width matches it. - // updateDisplay_W2 will use these properties to do the actual resizing - if (display.maxLineChanged && !cm.options.lineWrapping) { - op.adjustWidthTo = measureChar(cm, display.maxLine, display.maxLine.text.length).left + 3; - cm.display.sizerWidth = op.adjustWidthTo; - op.barMeasure.scrollWidth = - Math.max(display.scroller.clientWidth, display.sizer.offsetLeft + op.adjustWidthTo + scrollGap(cm) + cm.display.barWidth); - op.maxScrollLeft = Math.max(0, display.sizer.offsetLeft + op.adjustWidthTo - displayWidth(cm)); - } - - if (op.updatedDisplay || op.selectionChanged) - { op.preparedSelection = display.input.prepareSelection(); } -} - -function endOperation_W2(op) { - var cm = op.cm; - - if (op.adjustWidthTo != null) { - cm.display.sizer.style.minWidth = op.adjustWidthTo + "px"; - if (op.maxScrollLeft < cm.doc.scrollLeft) - { setScrollLeft(cm, Math.min(cm.display.scroller.scrollLeft, op.maxScrollLeft), true); } - cm.display.maxLineChanged = false; - } - - var takeFocus = op.focus && op.focus == activeElt(); - if (op.preparedSelection) - { cm.display.input.showSelection(op.preparedSelection, takeFocus); } - if (op.updatedDisplay || op.startHeight != cm.doc.height) - { updateScrollbars(cm, op.barMeasure); } - if (op.updatedDisplay) - { setDocumentHeight(cm, op.barMeasure); } - - if (op.selectionChanged) { restartBlink(cm); } - - if (cm.state.focused && op.updateInput) - { cm.display.input.reset(op.typing); } - if (takeFocus) { ensureFocus(op.cm); } -} - -function endOperation_finish(op) { - var cm = op.cm, display = cm.display, doc = cm.doc; - - if (op.updatedDisplay) { postUpdateDisplay(cm, op.update); } - - // Abort mouse wheel delta measurement, when scrolling explicitly - if (display.wheelStartX != null && (op.scrollTop != null || op.scrollLeft != null || op.scrollToPos)) - { display.wheelStartX = display.wheelStartY = null; } - - // Propagate the scroll position to the actual DOM scroller - if (op.scrollTop != null) { setScrollTop(cm, op.scrollTop, op.forceScroll); } - - if (op.scrollLeft != null) { setScrollLeft(cm, op.scrollLeft, true, true); } - // If we need to scroll a specific position into view, do so. - if (op.scrollToPos) { - var rect = scrollPosIntoView(cm, clipPos(doc, op.scrollToPos.from), - clipPos(doc, op.scrollToPos.to), op.scrollToPos.margin); - maybeScrollWindow(cm, rect); - } - - // Fire events for markers that are hidden/unidden by editing or - // undoing - var hidden = op.maybeHiddenMarkers, unhidden = op.maybeUnhiddenMarkers; - if (hidden) { for (var i = 0; i < hidden.length; ++i) - { if (!hidden[i].lines.length) { signal(hidden[i], "hide"); } } } - if (unhidden) { for (var i$1 = 0; i$1 < unhidden.length; ++i$1) - { if (unhidden[i$1].lines.length) { signal(unhidden[i$1], "unhide"); } } } - - if (display.wrapper.offsetHeight) - { doc.scrollTop = cm.display.scroller.scrollTop; } - - // Fire change events, and delayed event handlers - if (op.changeObjs) - { signal(cm, "changes", cm, op.changeObjs); } - if (op.update) - { op.update.finish(); } -} - -// Run the given function in an operation -function runInOp(cm, f) { - if (cm.curOp) { return f() } - startOperation(cm); - try { return f() } - finally { endOperation(cm); } -} -// Wraps a function in an operation. Returns the wrapped function. -function operation(cm, f) { - return function() { - if (cm.curOp) { return f.apply(cm, arguments) } - startOperation(cm); - try { return f.apply(cm, arguments) } - finally { endOperation(cm); } - } -} -// Used to add methods to editor and doc instances, wrapping them in -// operations. -function methodOp(f) { - return function() { - if (this.curOp) { return f.apply(this, arguments) } - startOperation(this); - try { return f.apply(this, arguments) } - finally { endOperation(this); } - } -} -function docMethodOp(f) { - return function() { - var cm = this.cm; - if (!cm || cm.curOp) { return f.apply(this, arguments) } - startOperation(cm); - try { return f.apply(this, arguments) } - finally { endOperation(cm); } - } -} - -// Updates the display.view data structure for a given change to the -// document. From and to are in pre-change coordinates. Lendiff is -// the amount of lines added or subtracted by the change. This is -// used for changes that span multiple lines, or change the way -// lines are divided into visual lines. regLineChange (below) -// registers single-line changes. -function regChange(cm, from, to, lendiff) { - if (from == null) { from = cm.doc.first; } - if (to == null) { to = cm.doc.first + cm.doc.size; } - if (!lendiff) { lendiff = 0; } - - var display = cm.display; - if (lendiff && to < display.viewTo && - (display.updateLineNumbers == null || display.updateLineNumbers > from)) - { display.updateLineNumbers = from; } - - cm.curOp.viewChanged = true; - - if (from >= display.viewTo) { // Change after - if (sawCollapsedSpans && visualLineNo(cm.doc, from) < display.viewTo) - { resetView(cm); } - } else if (to <= display.viewFrom) { // Change before - if (sawCollapsedSpans && visualLineEndNo(cm.doc, to + lendiff) > display.viewFrom) { - resetView(cm); - } else { - display.viewFrom += lendiff; - display.viewTo += lendiff; - } - } else if (from <= display.viewFrom && to >= display.viewTo) { // Full overlap - resetView(cm); - } else if (from <= display.viewFrom) { // Top overlap - var cut = viewCuttingPoint(cm, to, to + lendiff, 1); - if (cut) { - display.view = display.view.slice(cut.index); - display.viewFrom = cut.lineN; - display.viewTo += lendiff; - } else { - resetView(cm); - } - } else if (to >= display.viewTo) { // Bottom overlap - var cut$1 = viewCuttingPoint(cm, from, from, -1); - if (cut$1) { - display.view = display.view.slice(0, cut$1.index); - display.viewTo = cut$1.lineN; - } else { - resetView(cm); - } - } else { // Gap in the middle - var cutTop = viewCuttingPoint(cm, from, from, -1); - var cutBot = viewCuttingPoint(cm, to, to + lendiff, 1); - if (cutTop && cutBot) { - display.view = display.view.slice(0, cutTop.index) - .concat(buildViewArray(cm, cutTop.lineN, cutBot.lineN)) - .concat(display.view.slice(cutBot.index)); - display.viewTo += lendiff; - } else { - resetView(cm); - } - } - - var ext = display.externalMeasured; - if (ext) { - if (to < ext.lineN) - { ext.lineN += lendiff; } - else if (from < ext.lineN + ext.size) - { display.externalMeasured = null; } - } -} - -// Register a change to a single line. Type must be one of "text", -// "gutter", "class", "widget" -function regLineChange(cm, line, type) { - cm.curOp.viewChanged = true; - var display = cm.display, ext = cm.display.externalMeasured; - if (ext && line >= ext.lineN && line < ext.lineN + ext.size) - { display.externalMeasured = null; } - - if (line < display.viewFrom || line >= display.viewTo) { return } - var lineView = display.view[findViewIndex(cm, line)]; - if (lineView.node == null) { return } - var arr = lineView.changes || (lineView.changes = []); - if (indexOf(arr, type) == -1) { arr.push(type); } -} - -// Clear the view. -function resetView(cm) { - cm.display.viewFrom = cm.display.viewTo = cm.doc.first; - cm.display.view = []; - cm.display.viewOffset = 0; -} - -function viewCuttingPoint(cm, oldN, newN, dir) { - var index = findViewIndex(cm, oldN), diff, view = cm.display.view; - if (!sawCollapsedSpans || newN == cm.doc.first + cm.doc.size) - { return {index: index, lineN: newN} } - var n = cm.display.viewFrom; - for (var i = 0; i < index; i++) - { n += view[i].size; } - if (n != oldN) { - if (dir > 0) { - if (index == view.length - 1) { return null } - diff = (n + view[index].size) - oldN; - index++; - } else { - diff = n - oldN; - } - oldN += diff; newN += diff; - } - while (visualLineNo(cm.doc, newN) != newN) { - if (index == (dir < 0 ? 0 : view.length - 1)) { return null } - newN += dir * view[index - (dir < 0 ? 1 : 0)].size; - index += dir; - } - return {index: index, lineN: newN} -} - -// Force the view to cover a given range, adding empty view element -// or clipping off existing ones as needed. -function adjustView(cm, from, to) { - var display = cm.display, view = display.view; - if (view.length == 0 || from >= display.viewTo || to <= display.viewFrom) { - display.view = buildViewArray(cm, from, to); - display.viewFrom = from; - } else { - if (display.viewFrom > from) - { display.view = buildViewArray(cm, from, display.viewFrom).concat(display.view); } - else if (display.viewFrom < from) - { display.view = display.view.slice(findViewIndex(cm, from)); } - display.viewFrom = from; - if (display.viewTo < to) - { display.view = display.view.concat(buildViewArray(cm, display.viewTo, to)); } - else if (display.viewTo > to) - { display.view = display.view.slice(0, findViewIndex(cm, to)); } - } - display.viewTo = to; -} - -// Count the number of lines in the view whose DOM representation is -// out of date (or nonexistent). -function countDirtyView(cm) { - var view = cm.display.view, dirty = 0; - for (var i = 0; i < view.length; i++) { - var lineView = view[i]; - if (!lineView.hidden && (!lineView.node || lineView.changes)) { ++dirty; } - } - return dirty -} - -// HIGHLIGHT WORKER - -function startWorker(cm, time) { - if (cm.doc.highlightFrontier < cm.display.viewTo) - { cm.state.highlight.set(time, bind(highlightWorker, cm)); } -} - -function highlightWorker(cm) { - var doc = cm.doc; - if (doc.highlightFrontier >= cm.display.viewTo) { return } - var end = +new Date + cm.options.workTime; - var context = getContextBefore(cm, doc.highlightFrontier); - var changedLines = []; - - doc.iter(context.line, Math.min(doc.first + doc.size, cm.display.viewTo + 500), function (line) { - if (context.line >= cm.display.viewFrom) { // Visible - var oldStyles = line.styles; - var resetState = line.text.length > cm.options.maxHighlightLength ? copyState(doc.mode, context.state) : null; - var highlighted = highlightLine(cm, line, context, true); - if (resetState) { context.state = resetState; } - line.styles = highlighted.styles; - var oldCls = line.styleClasses, newCls = highlighted.classes; - if (newCls) { line.styleClasses = newCls; } - else if (oldCls) { line.styleClasses = null; } - var ischange = !oldStyles || oldStyles.length != line.styles.length || - oldCls != newCls && (!oldCls || !newCls || oldCls.bgClass != newCls.bgClass || oldCls.textClass != newCls.textClass); - for (var i = 0; !ischange && i < oldStyles.length; ++i) { ischange = oldStyles[i] != line.styles[i]; } - if (ischange) { changedLines.push(context.line); } - line.stateAfter = context.save(); - context.nextLine(); - } else { - if (line.text.length <= cm.options.maxHighlightLength) - { processLine(cm, line.text, context); } - line.stateAfter = context.line % 5 == 0 ? context.save() : null; - context.nextLine(); - } - if (+new Date > end) { - startWorker(cm, cm.options.workDelay); - return true - } - }); - doc.highlightFrontier = context.line; - doc.modeFrontier = Math.max(doc.modeFrontier, context.line); - if (changedLines.length) { runInOp(cm, function () { - for (var i = 0; i < changedLines.length; i++) - { regLineChange(cm, changedLines[i], "text"); } - }); } -} - -// DISPLAY DRAWING - -var DisplayUpdate = function(cm, viewport, force) { - var display = cm.display; - - this.viewport = viewport; - // Store some values that we'll need later (but don't want to force a relayout for) - this.visible = visibleLines(display, cm.doc, viewport); - this.editorIsHidden = !display.wrapper.offsetWidth; - this.wrapperHeight = display.wrapper.clientHeight; - this.wrapperWidth = display.wrapper.clientWidth; - this.oldDisplayWidth = displayWidth(cm); - this.force = force; - this.dims = getDimensions(cm); - this.events = []; -}; - -DisplayUpdate.prototype.signal = function (emitter, type) { - if (hasHandler(emitter, type)) - { this.events.push(arguments); } -}; -DisplayUpdate.prototype.finish = function () { - var this$1 = this; - - for (var i = 0; i < this.events.length; i++) - { signal.apply(null, this$1.events[i]); } -}; - -function maybeClipScrollbars(cm) { - var display = cm.display; - if (!display.scrollbarsClipped && display.scroller.offsetWidth) { - display.nativeBarWidth = display.scroller.offsetWidth - display.scroller.clientWidth; - display.heightForcer.style.height = scrollGap(cm) + "px"; - display.sizer.style.marginBottom = -display.nativeBarWidth + "px"; - display.sizer.style.borderRightWidth = scrollGap(cm) + "px"; - display.scrollbarsClipped = true; - } -} - -function selectionSnapshot(cm) { - if (cm.hasFocus()) { return null } - var active = activeElt(); - if (!active || !contains(cm.display.lineDiv, active)) { return null } - var result = {activeElt: active}; - if (window.getSelection) { - var sel = window.getSelection(); - if (sel.anchorNode && sel.extend && contains(cm.display.lineDiv, sel.anchorNode)) { - result.anchorNode = sel.anchorNode; - result.anchorOffset = sel.anchorOffset; - result.focusNode = sel.focusNode; - result.focusOffset = sel.focusOffset; - } - } - return result -} - -function restoreSelection(snapshot) { - if (!snapshot || !snapshot.activeElt || snapshot.activeElt == activeElt()) { return } - snapshot.activeElt.focus(); - if (snapshot.anchorNode && contains(document.body, snapshot.anchorNode) && contains(document.body, snapshot.focusNode)) { - var sel = window.getSelection(), range$$1 = document.createRange(); - range$$1.setEnd(snapshot.anchorNode, snapshot.anchorOffset); - range$$1.collapse(false); - sel.removeAllRanges(); - sel.addRange(range$$1); - sel.extend(snapshot.focusNode, snapshot.focusOffset); - } -} - -// Does the actual updating of the line display. Bails out -// (returning false) when there is nothing to be done and forced is -// false. -function updateDisplayIfNeeded(cm, update) { - var display = cm.display, doc = cm.doc; - - if (update.editorIsHidden) { - resetView(cm); - return false - } - - // Bail out if the visible area is already rendered and nothing changed. - if (!update.force && - update.visible.from >= display.viewFrom && update.visible.to <= display.viewTo && - (display.updateLineNumbers == null || display.updateLineNumbers >= display.viewTo) && - display.renderedView == display.view && countDirtyView(cm) == 0) - { return false } - - if (maybeUpdateLineNumberWidth(cm)) { - resetView(cm); - update.dims = getDimensions(cm); - } - - // Compute a suitable new viewport (from & to) - var end = doc.first + doc.size; - var from = Math.max(update.visible.from - cm.options.viewportMargin, doc.first); - var to = Math.min(end, update.visible.to + cm.options.viewportMargin); - if (display.viewFrom < from && from - display.viewFrom < 20) { from = Math.max(doc.first, display.viewFrom); } - if (display.viewTo > to && display.viewTo - to < 20) { to = Math.min(end, display.viewTo); } - if (sawCollapsedSpans) { - from = visualLineNo(cm.doc, from); - to = visualLineEndNo(cm.doc, to); - } - - var different = from != display.viewFrom || to != display.viewTo || - display.lastWrapHeight != update.wrapperHeight || display.lastWrapWidth != update.wrapperWidth; - adjustView(cm, from, to); - - display.viewOffset = heightAtLine(getLine(cm.doc, display.viewFrom)); - // Position the mover div to align with the current scroll position - cm.display.mover.style.top = display.viewOffset + "px"; - - var toUpdate = countDirtyView(cm); - if (!different && toUpdate == 0 && !update.force && display.renderedView == display.view && - (display.updateLineNumbers == null || display.updateLineNumbers >= display.viewTo)) - { return false } - - // For big changes, we hide the enclosing element during the - // update, since that speeds up the operations on most browsers. - var selSnapshot = selectionSnapshot(cm); - if (toUpdate > 4) { display.lineDiv.style.display = "none"; } - patchDisplay(cm, display.updateLineNumbers, update.dims); - if (toUpdate > 4) { display.lineDiv.style.display = ""; } - display.renderedView = display.view; - // There might have been a widget with a focused element that got - // hidden or updated, if so re-focus it. - restoreSelection(selSnapshot); - - // Prevent selection and cursors from interfering with the scroll - // width and height. - removeChildren(display.cursorDiv); - removeChildren(display.selectionDiv); - display.gutters.style.height = display.sizer.style.minHeight = 0; - - if (different) { - display.lastWrapHeight = update.wrapperHeight; - display.lastWrapWidth = update.wrapperWidth; - startWorker(cm, 400); - } - - display.updateLineNumbers = null; - - return true -} - -function postUpdateDisplay(cm, update) { - var viewport = update.viewport; - - for (var first = true;; first = false) { - if (!first || !cm.options.lineWrapping || update.oldDisplayWidth == displayWidth(cm)) { - // Clip forced viewport to actual scrollable area. - if (viewport && viewport.top != null) - { viewport = {top: Math.min(cm.doc.height + paddingVert(cm.display) - displayHeight(cm), viewport.top)}; } - // Updated line heights might result in the drawn area not - // actually covering the viewport. Keep looping until it does. - update.visible = visibleLines(cm.display, cm.doc, viewport); - if (update.visible.from >= cm.display.viewFrom && update.visible.to <= cm.display.viewTo) - { break } - } - if (!updateDisplayIfNeeded(cm, update)) { break } - updateHeightsInViewport(cm); - var barMeasure = measureForScrollbars(cm); - updateSelection(cm); - updateScrollbars(cm, barMeasure); - setDocumentHeight(cm, barMeasure); - update.force = false; - } - - update.signal(cm, "update", cm); - if (cm.display.viewFrom != cm.display.reportedViewFrom || cm.display.viewTo != cm.display.reportedViewTo) { - update.signal(cm, "viewportChange", cm, cm.display.viewFrom, cm.display.viewTo); - cm.display.reportedViewFrom = cm.display.viewFrom; cm.display.reportedViewTo = cm.display.viewTo; - } -} - -function updateDisplaySimple(cm, viewport) { - var update = new DisplayUpdate(cm, viewport); - if (updateDisplayIfNeeded(cm, update)) { - updateHeightsInViewport(cm); - postUpdateDisplay(cm, update); - var barMeasure = measureForScrollbars(cm); - updateSelection(cm); - updateScrollbars(cm, barMeasure); - setDocumentHeight(cm, barMeasure); - update.finish(); - } -} - -// Sync the actual display DOM structure with display.view, removing -// nodes for lines that are no longer in view, and creating the ones -// that are not there yet, and updating the ones that are out of -// date. -function patchDisplay(cm, updateNumbersFrom, dims) { - var display = cm.display, lineNumbers = cm.options.lineNumbers; - var container = display.lineDiv, cur = container.firstChild; - - function rm(node) { - var next = node.nextSibling; - // Works around a throw-scroll bug in OS X Webkit - if (webkit && mac && cm.display.currentWheelTarget == node) - { node.style.display = "none"; } - else - { node.parentNode.removeChild(node); } - return next - } - - var view = display.view, lineN = display.viewFrom; - // Loop over the elements in the view, syncing cur (the DOM nodes - // in display.lineDiv) with the view as we go. - for (var i = 0; i < view.length; i++) { - var lineView = view[i]; - if (lineView.hidden) { - } else if (!lineView.node || lineView.node.parentNode != container) { // Not drawn yet - var node = buildLineElement(cm, lineView, lineN, dims); - container.insertBefore(node, cur); - } else { // Already drawn - while (cur != lineView.node) { cur = rm(cur); } - var updateNumber = lineNumbers && updateNumbersFrom != null && - updateNumbersFrom <= lineN && lineView.lineNumber; - if (lineView.changes) { - if (indexOf(lineView.changes, "gutter") > -1) { updateNumber = false; } - updateLineForChanges(cm, lineView, lineN, dims); - } - if (updateNumber) { - removeChildren(lineView.lineNumber); - lineView.lineNumber.appendChild(document.createTextNode(lineNumberFor(cm.options, lineN))); - } - cur = lineView.node.nextSibling; - } - lineN += lineView.size; - } - while (cur) { cur = rm(cur); } -} - -function updateGutterSpace(cm) { - var width = cm.display.gutters.offsetWidth; - cm.display.sizer.style.marginLeft = width + "px"; -} - -function setDocumentHeight(cm, measure) { - cm.display.sizer.style.minHeight = measure.docHeight + "px"; - cm.display.heightForcer.style.top = measure.docHeight + "px"; - cm.display.gutters.style.height = (measure.docHeight + cm.display.barHeight + scrollGap(cm)) + "px"; -} - -// Rebuild the gutter elements, ensure the margin to the left of the -// code matches their width. -function updateGutters(cm) { - var gutters = cm.display.gutters, specs = cm.options.gutters; - removeChildren(gutters); - var i = 0; - for (; i < specs.length; ++i) { - var gutterClass = specs[i]; - var gElt = gutters.appendChild(elt("div", null, "CodeMirror-gutter " + gutterClass)); - if (gutterClass == "CodeMirror-linenumbers") { - cm.display.lineGutter = gElt; - gElt.style.width = (cm.display.lineNumWidth || 1) + "px"; - } - } - gutters.style.display = i ? "" : "none"; - updateGutterSpace(cm); -} - -// Make sure the gutters options contains the element -// "CodeMirror-linenumbers" when the lineNumbers option is true. -function setGuttersForLineNumbers(options) { - var found = indexOf(options.gutters, "CodeMirror-linenumbers"); - if (found == -1 && options.lineNumbers) { - options.gutters = options.gutters.concat(["CodeMirror-linenumbers"]); - } else if (found > -1 && !options.lineNumbers) { - options.gutters = options.gutters.slice(0); - options.gutters.splice(found, 1); - } -} - -// Since the delta values reported on mouse wheel events are -// unstandardized between browsers and even browser versions, and -// generally horribly unpredictable, this code starts by measuring -// the scroll effect that the first few mouse wheel events have, -// and, from that, detects the way it can convert deltas to pixel -// offsets afterwards. -// -// The reason we want to know the amount a wheel event will scroll -// is that it gives us a chance to update the display before the -// actual scrolling happens, reducing flickering. - -var wheelSamples = 0; -var wheelPixelsPerUnit = null; -// Fill in a browser-detected starting value on browsers where we -// know one. These don't have to be accurate -- the result of them -// being wrong would just be a slight flicker on the first wheel -// scroll (if it is large enough). -if (ie) { wheelPixelsPerUnit = -.53; } -else if (gecko) { wheelPixelsPerUnit = 15; } -else if (chrome) { wheelPixelsPerUnit = -.7; } -else if (safari) { wheelPixelsPerUnit = -1/3; } - -function wheelEventDelta(e) { - var dx = e.wheelDeltaX, dy = e.wheelDeltaY; - if (dx == null && e.detail && e.axis == e.HORIZONTAL_AXIS) { dx = e.detail; } - if (dy == null && e.detail && e.axis == e.VERTICAL_AXIS) { dy = e.detail; } - else if (dy == null) { dy = e.wheelDelta; } - return {x: dx, y: dy} -} -function wheelEventPixels(e) { - var delta = wheelEventDelta(e); - delta.x *= wheelPixelsPerUnit; - delta.y *= wheelPixelsPerUnit; - return delta -} - -function onScrollWheel(cm, e) { - var delta = wheelEventDelta(e), dx = delta.x, dy = delta.y; - - var display = cm.display, scroll = display.scroller; - // Quit if there's nothing to scroll here - var canScrollX = scroll.scrollWidth > scroll.clientWidth; - var canScrollY = scroll.scrollHeight > scroll.clientHeight; - if (!(dx && canScrollX || dy && canScrollY)) { return } - - // Webkit browsers on OS X abort momentum scrolls when the target - // of the scroll event is removed from the scrollable element. - // This hack (see related code in patchDisplay) makes sure the - // element is kept around. - if (dy && mac && webkit) { - outer: for (var cur = e.target, view = display.view; cur != scroll; cur = cur.parentNode) { - for (var i = 0; i < view.length; i++) { - if (view[i].node == cur) { - cm.display.currentWheelTarget = cur; - break outer - } - } - } - } - - // On some browsers, horizontal scrolling will cause redraws to - // happen before the gutter has been realigned, causing it to - // wriggle around in a most unseemly way. When we have an - // estimated pixels/delta value, we just handle horizontal - // scrolling entirely here. It'll be slightly off from native, but - // better than glitching out. - if (dx && !gecko && !presto && wheelPixelsPerUnit != null) { - if (dy && canScrollY) - { updateScrollTop(cm, Math.max(0, scroll.scrollTop + dy * wheelPixelsPerUnit)); } - setScrollLeft(cm, Math.max(0, scroll.scrollLeft + dx * wheelPixelsPerUnit)); - // Only prevent default scrolling if vertical scrolling is - // actually possible. Otherwise, it causes vertical scroll - // jitter on OSX trackpads when deltaX is small and deltaY - // is large (issue #3579) - if (!dy || (dy && canScrollY)) - { e_preventDefault(e); } - display.wheelStartX = null; // Abort measurement, if in progress - return - } - - // 'Project' the visible viewport to cover the area that is being - // scrolled into view (if we know enough to estimate it). - if (dy && wheelPixelsPerUnit != null) { - var pixels = dy * wheelPixelsPerUnit; - var top = cm.doc.scrollTop, bot = top + display.wrapper.clientHeight; - if (pixels < 0) { top = Math.max(0, top + pixels - 50); } - else { bot = Math.min(cm.doc.height, bot + pixels + 50); } - updateDisplaySimple(cm, {top: top, bottom: bot}); - } - - if (wheelSamples < 20) { - if (display.wheelStartX == null) { - display.wheelStartX = scroll.scrollLeft; display.wheelStartY = scroll.scrollTop; - display.wheelDX = dx; display.wheelDY = dy; - setTimeout(function () { - if (display.wheelStartX == null) { return } - var movedX = scroll.scrollLeft - display.wheelStartX; - var movedY = scroll.scrollTop - display.wheelStartY; - var sample = (movedY && display.wheelDY && movedY / display.wheelDY) || - (movedX && display.wheelDX && movedX / display.wheelDX); - display.wheelStartX = display.wheelStartY = null; - if (!sample) { return } - wheelPixelsPerUnit = (wheelPixelsPerUnit * wheelSamples + sample) / (wheelSamples + 1); - ++wheelSamples; - }, 200); - } else { - display.wheelDX += dx; display.wheelDY += dy; - } - } -} - -// Selection objects are immutable. A new one is created every time -// the selection changes. A selection is one or more non-overlapping -// (and non-touching) ranges, sorted, and an integer that indicates -// which one is the primary selection (the one that's scrolled into -// view, that getCursor returns, etc). -var Selection = function(ranges, primIndex) { - this.ranges = ranges; - this.primIndex = primIndex; -}; - -Selection.prototype.primary = function () { return this.ranges[this.primIndex] }; - -Selection.prototype.equals = function (other) { - var this$1 = this; - - if (other == this) { return true } - if (other.primIndex != this.primIndex || other.ranges.length != this.ranges.length) { return false } - for (var i = 0; i < this.ranges.length; i++) { - var here = this$1.ranges[i], there = other.ranges[i]; - if (!equalCursorPos(here.anchor, there.anchor) || !equalCursorPos(here.head, there.head)) { return false } - } - return true -}; - -Selection.prototype.deepCopy = function () { - var this$1 = this; - - var out = []; - for (var i = 0; i < this.ranges.length; i++) - { out[i] = new Range(copyPos(this$1.ranges[i].anchor), copyPos(this$1.ranges[i].head)); } - return new Selection(out, this.primIndex) -}; - -Selection.prototype.somethingSelected = function () { - var this$1 = this; - - for (var i = 0; i < this.ranges.length; i++) - { if (!this$1.ranges[i].empty()) { return true } } - return false -}; - -Selection.prototype.contains = function (pos, end) { - var this$1 = this; - - if (!end) { end = pos; } - for (var i = 0; i < this.ranges.length; i++) { - var range = this$1.ranges[i]; - if (cmp(end, range.from()) >= 0 && cmp(pos, range.to()) <= 0) - { return i } - } - return -1 -}; - -var Range = function(anchor, head) { - this.anchor = anchor; this.head = head; -}; - -Range.prototype.from = function () { return minPos(this.anchor, this.head) }; -Range.prototype.to = function () { return maxPos(this.anchor, this.head) }; -Range.prototype.empty = function () { return this.head.line == this.anchor.line && this.head.ch == this.anchor.ch }; - -// Take an unsorted, potentially overlapping set of ranges, and -// build a selection out of it. 'Consumes' ranges array (modifying -// it). -function normalizeSelection(ranges, primIndex) { - var prim = ranges[primIndex]; - ranges.sort(function (a, b) { return cmp(a.from(), b.from()); }); - primIndex = indexOf(ranges, prim); - for (var i = 1; i < ranges.length; i++) { - var cur = ranges[i], prev = ranges[i - 1]; - if (cmp(prev.to(), cur.from()) >= 0) { - var from = minPos(prev.from(), cur.from()), to = maxPos(prev.to(), cur.to()); - var inv = prev.empty() ? cur.from() == cur.head : prev.from() == prev.head; - if (i <= primIndex) { --primIndex; } - ranges.splice(--i, 2, new Range(inv ? to : from, inv ? from : to)); - } - } - return new Selection(ranges, primIndex) -} - -function simpleSelection(anchor, head) { - return new Selection([new Range(anchor, head || anchor)], 0) -} - -// Compute the position of the end of a change (its 'to' property -// refers to the pre-change end). -function changeEnd(change) { - if (!change.text) { return change.to } - return Pos(change.from.line + change.text.length - 1, - lst(change.text).length + (change.text.length == 1 ? change.from.ch : 0)) -} - -// Adjust a position to refer to the post-change position of the -// same text, or the end of the change if the change covers it. -function adjustForChange(pos, change) { - if (cmp(pos, change.from) < 0) { return pos } - if (cmp(pos, change.to) <= 0) { return changeEnd(change) } - - var line = pos.line + change.text.length - (change.to.line - change.from.line) - 1, ch = pos.ch; - if (pos.line == change.to.line) { ch += changeEnd(change).ch - change.to.ch; } - return Pos(line, ch) -} - -function computeSelAfterChange(doc, change) { - var out = []; - for (var i = 0; i < doc.sel.ranges.length; i++) { - var range = doc.sel.ranges[i]; - out.push(new Range(adjustForChange(range.anchor, change), - adjustForChange(range.head, change))); - } - return normalizeSelection(out, doc.sel.primIndex) -} - -function offsetPos(pos, old, nw) { - if (pos.line == old.line) - { return Pos(nw.line, pos.ch - old.ch + nw.ch) } - else - { return Pos(nw.line + (pos.line - old.line), pos.ch) } -} - -// Used by replaceSelections to allow moving the selection to the -// start or around the replaced test. Hint may be "start" or "around". -function computeReplacedSel(doc, changes, hint) { - var out = []; - var oldPrev = Pos(doc.first, 0), newPrev = oldPrev; - for (var i = 0; i < changes.length; i++) { - var change = changes[i]; - var from = offsetPos(change.from, oldPrev, newPrev); - var to = offsetPos(changeEnd(change), oldPrev, newPrev); - oldPrev = change.to; - newPrev = to; - if (hint == "around") { - var range = doc.sel.ranges[i], inv = cmp(range.head, range.anchor) < 0; - out[i] = new Range(inv ? to : from, inv ? from : to); - } else { - out[i] = new Range(from, from); - } - } - return new Selection(out, doc.sel.primIndex) -} - -// Used to get the editor into a consistent state again when options change. - -function loadMode(cm) { - cm.doc.mode = getMode(cm.options, cm.doc.modeOption); - resetModeState(cm); -} - -function resetModeState(cm) { - cm.doc.iter(function (line) { - if (line.stateAfter) { line.stateAfter = null; } - if (line.styles) { line.styles = null; } - }); - cm.doc.modeFrontier = cm.doc.highlightFrontier = cm.doc.first; - startWorker(cm, 100); - cm.state.modeGen++; - if (cm.curOp) { regChange(cm); } -} - -// DOCUMENT DATA STRUCTURE - -// By default, updates that start and end at the beginning of a line -// are treated specially, in order to make the association of line -// widgets and marker elements with the text behave more intuitive. -function isWholeLineUpdate(doc, change) { - return change.from.ch == 0 && change.to.ch == 0 && lst(change.text) == "" && - (!doc.cm || doc.cm.options.wholeLineUpdateBefore) -} - -// Perform a change on the document data structure. -function updateDoc(doc, change, markedSpans, estimateHeight$$1) { - function spansFor(n) {return markedSpans ? markedSpans[n] : null} - function update(line, text, spans) { - updateLine(line, text, spans, estimateHeight$$1); - signalLater(line, "change", line, change); - } - function linesFor(start, end) { - var result = []; - for (var i = start; i < end; ++i) - { result.push(new Line(text[i], spansFor(i), estimateHeight$$1)); } - return result - } - - var from = change.from, to = change.to, text = change.text; - var firstLine = getLine(doc, from.line), lastLine = getLine(doc, to.line); - var lastText = lst(text), lastSpans = spansFor(text.length - 1), nlines = to.line - from.line; - - // Adjust the line structure - if (change.full) { - doc.insert(0, linesFor(0, text.length)); - doc.remove(text.length, doc.size - text.length); - } else if (isWholeLineUpdate(doc, change)) { - // This is a whole-line replace. Treated specially to make - // sure line objects move the way they are supposed to. - var added = linesFor(0, text.length - 1); - update(lastLine, lastLine.text, lastSpans); - if (nlines) { doc.remove(from.line, nlines); } - if (added.length) { doc.insert(from.line, added); } - } else if (firstLine == lastLine) { - if (text.length == 1) { - update(firstLine, firstLine.text.slice(0, from.ch) + lastText + firstLine.text.slice(to.ch), lastSpans); - } else { - var added$1 = linesFor(1, text.length - 1); - added$1.push(new Line(lastText + firstLine.text.slice(to.ch), lastSpans, estimateHeight$$1)); - update(firstLine, firstLine.text.slice(0, from.ch) + text[0], spansFor(0)); - doc.insert(from.line + 1, added$1); - } - } else if (text.length == 1) { - update(firstLine, firstLine.text.slice(0, from.ch) + text[0] + lastLine.text.slice(to.ch), spansFor(0)); - doc.remove(from.line + 1, nlines); - } else { - update(firstLine, firstLine.text.slice(0, from.ch) + text[0], spansFor(0)); - update(lastLine, lastText + lastLine.text.slice(to.ch), lastSpans); - var added$2 = linesFor(1, text.length - 1); - if (nlines > 1) { doc.remove(from.line + 1, nlines - 1); } - doc.insert(from.line + 1, added$2); - } - - signalLater(doc, "change", doc, change); -} - -// Call f for all linked documents. -function linkedDocs(doc, f, sharedHistOnly) { - function propagate(doc, skip, sharedHist) { - if (doc.linked) { for (var i = 0; i < doc.linked.length; ++i) { - var rel = doc.linked[i]; - if (rel.doc == skip) { continue } - var shared = sharedHist && rel.sharedHist; - if (sharedHistOnly && !shared) { continue } - f(rel.doc, shared); - propagate(rel.doc, doc, shared); - } } - } - propagate(doc, null, true); -} - -// Attach a document to an editor. -function attachDoc(cm, doc) { - if (doc.cm) { throw new Error("This document is already in use.") } - cm.doc = doc; - doc.cm = cm; - estimateLineHeights(cm); - loadMode(cm); - setDirectionClass(cm); - if (!cm.options.lineWrapping) { findMaxLine(cm); } - cm.options.mode = doc.modeOption; - regChange(cm); -} - -function setDirectionClass(cm) { - (cm.doc.direction == "rtl" ? addClass : rmClass)(cm.display.lineDiv, "CodeMirror-rtl"); -} - -function directionChanged(cm) { - runInOp(cm, function () { - setDirectionClass(cm); - regChange(cm); - }); -} - -function History(startGen) { - // Arrays of change events and selections. Doing something adds an - // event to done and clears undo. Undoing moves events from done - // to undone, redoing moves them in the other direction. - this.done = []; this.undone = []; - this.undoDepth = Infinity; - // Used to track when changes can be merged into a single undo - // event - this.lastModTime = this.lastSelTime = 0; - this.lastOp = this.lastSelOp = null; - this.lastOrigin = this.lastSelOrigin = null; - // Used by the isClean() method - this.generation = this.maxGeneration = startGen || 1; -} - -// Create a history change event from an updateDoc-style change -// object. -function historyChangeFromChange(doc, change) { - var histChange = {from: copyPos(change.from), to: changeEnd(change), text: getBetween(doc, change.from, change.to)}; - attachLocalSpans(doc, histChange, change.from.line, change.to.line + 1); - linkedDocs(doc, function (doc) { return attachLocalSpans(doc, histChange, change.from.line, change.to.line + 1); }, true); - return histChange -} - -// Pop all selection events off the end of a history array. Stop at -// a change event. -function clearSelectionEvents(array) { - while (array.length) { - var last = lst(array); - if (last.ranges) { array.pop(); } - else { break } - } -} - -// Find the top change event in the history. Pop off selection -// events that are in the way. -function lastChangeEvent(hist, force) { - if (force) { - clearSelectionEvents(hist.done); - return lst(hist.done) - } else if (hist.done.length && !lst(hist.done).ranges) { - return lst(hist.done) - } else if (hist.done.length > 1 && !hist.done[hist.done.length - 2].ranges) { - hist.done.pop(); - return lst(hist.done) - } -} - -// Register a change in the history. Merges changes that are within -// a single operation, or are close together with an origin that -// allows merging (starting with "+") into a single event. -function addChangeToHistory(doc, change, selAfter, opId) { - var hist = doc.history; - hist.undone.length = 0; - var time = +new Date, cur; - var last; - - if ((hist.lastOp == opId || - hist.lastOrigin == change.origin && change.origin && - ((change.origin.charAt(0) == "+" && doc.cm && hist.lastModTime > time - doc.cm.options.historyEventDelay) || - change.origin.charAt(0) == "*")) && - (cur = lastChangeEvent(hist, hist.lastOp == opId))) { - // Merge this change into the last event - last = lst(cur.changes); - if (cmp(change.from, change.to) == 0 && cmp(change.from, last.to) == 0) { - // Optimized case for simple insertion -- don't want to add - // new changesets for every character typed - last.to = changeEnd(change); - } else { - // Add new sub-event - cur.changes.push(historyChangeFromChange(doc, change)); - } - } else { - // Can not be merged, start a new event. - var before = lst(hist.done); - if (!before || !before.ranges) - { pushSelectionToHistory(doc.sel, hist.done); } - cur = {changes: [historyChangeFromChange(doc, change)], - generation: hist.generation}; - hist.done.push(cur); - while (hist.done.length > hist.undoDepth) { - hist.done.shift(); - if (!hist.done[0].ranges) { hist.done.shift(); } - } - } - hist.done.push(selAfter); - hist.generation = ++hist.maxGeneration; - hist.lastModTime = hist.lastSelTime = time; - hist.lastOp = hist.lastSelOp = opId; - hist.lastOrigin = hist.lastSelOrigin = change.origin; - - if (!last) { signal(doc, "historyAdded"); } -} - -function selectionEventCanBeMerged(doc, origin, prev, sel) { - var ch = origin.charAt(0); - return ch == "*" || - ch == "+" && - prev.ranges.length == sel.ranges.length && - prev.somethingSelected() == sel.somethingSelected() && - new Date - doc.history.lastSelTime <= (doc.cm ? doc.cm.options.historyEventDelay : 500) -} - -// Called whenever the selection changes, sets the new selection as -// the pending selection in the history, and pushes the old pending -// selection into the 'done' array when it was significantly -// different (in number of selected ranges, emptiness, or time). -function addSelectionToHistory(doc, sel, opId, options) { - var hist = doc.history, origin = options && options.origin; - - // A new event is started when the previous origin does not match - // the current, or the origins don't allow matching. Origins - // starting with * are always merged, those starting with + are - // merged when similar and close together in time. - if (opId == hist.lastSelOp || - (origin && hist.lastSelOrigin == origin && - (hist.lastModTime == hist.lastSelTime && hist.lastOrigin == origin || - selectionEventCanBeMerged(doc, origin, lst(hist.done), sel)))) - { hist.done[hist.done.length - 1] = sel; } - else - { pushSelectionToHistory(sel, hist.done); } - - hist.lastSelTime = +new Date; - hist.lastSelOrigin = origin; - hist.lastSelOp = opId; - if (options && options.clearRedo !== false) - { clearSelectionEvents(hist.undone); } -} - -function pushSelectionToHistory(sel, dest) { - var top = lst(dest); - if (!(top && top.ranges && top.equals(sel))) - { dest.push(sel); } -} - -// Used to store marked span information in the history. -function attachLocalSpans(doc, change, from, to) { - var existing = change["spans_" + doc.id], n = 0; - doc.iter(Math.max(doc.first, from), Math.min(doc.first + doc.size, to), function (line) { - if (line.markedSpans) - { (existing || (existing = change["spans_" + doc.id] = {}))[n] = line.markedSpans; } - ++n; - }); -} - -// When un/re-doing restores text containing marked spans, those -// that have been explicitly cleared should not be restored. -function removeClearedSpans(spans) { - if (!spans) { return null } - var out; - for (var i = 0; i < spans.length; ++i) { - if (spans[i].marker.explicitlyCleared) { if (!out) { out = spans.slice(0, i); } } - else if (out) { out.push(spans[i]); } - } - return !out ? spans : out.length ? out : null -} - -// Retrieve and filter the old marked spans stored in a change event. -function getOldSpans(doc, change) { - var found = change["spans_" + doc.id]; - if (!found) { return null } - var nw = []; - for (var i = 0; i < change.text.length; ++i) - { nw.push(removeClearedSpans(found[i])); } - return nw -} - -// Used for un/re-doing changes from the history. Combines the -// result of computing the existing spans with the set of spans that -// existed in the history (so that deleting around a span and then -// undoing brings back the span). -function mergeOldSpans(doc, change) { - var old = getOldSpans(doc, change); - var stretched = stretchSpansOverChange(doc, change); - if (!old) { return stretched } - if (!stretched) { return old } - - for (var i = 0; i < old.length; ++i) { - var oldCur = old[i], stretchCur = stretched[i]; - if (oldCur && stretchCur) { - spans: for (var j = 0; j < stretchCur.length; ++j) { - var span = stretchCur[j]; - for (var k = 0; k < oldCur.length; ++k) - { if (oldCur[k].marker == span.marker) { continue spans } } - oldCur.push(span); - } - } else if (stretchCur) { - old[i] = stretchCur; - } - } - return old -} - -// Used both to provide a JSON-safe object in .getHistory, and, when -// detaching a document, to split the history in two -function copyHistoryArray(events, newGroup, instantiateSel) { - var copy = []; - for (var i = 0; i < events.length; ++i) { - var event = events[i]; - if (event.ranges) { - copy.push(instantiateSel ? Selection.prototype.deepCopy.call(event) : event); - continue - } - var changes = event.changes, newChanges = []; - copy.push({changes: newChanges}); - for (var j = 0; j < changes.length; ++j) { - var change = changes[j], m = (void 0); - newChanges.push({from: change.from, to: change.to, text: change.text}); - if (newGroup) { for (var prop in change) { if (m = prop.match(/^spans_(\d+)$/)) { - if (indexOf(newGroup, Number(m[1])) > -1) { - lst(newChanges)[prop] = change[prop]; - delete change[prop]; - } - } } } - } - } - return copy -} - -// The 'scroll' parameter given to many of these indicated whether -// the new cursor position should be scrolled into view after -// modifying the selection. - -// If shift is held or the extend flag is set, extends a range to -// include a given position (and optionally a second position). -// Otherwise, simply returns the range between the given positions. -// Used for cursor motion and such. -function extendRange(range, head, other, extend) { - if (extend) { - var anchor = range.anchor; - if (other) { - var posBefore = cmp(head, anchor) < 0; - if (posBefore != (cmp(other, anchor) < 0)) { - anchor = head; - head = other; - } else if (posBefore != (cmp(head, other) < 0)) { - head = other; - } - } - return new Range(anchor, head) - } else { - return new Range(other || head, head) - } -} - -// Extend the primary selection range, discard the rest. -function extendSelection(doc, head, other, options, extend) { - if (extend == null) { extend = doc.cm && (doc.cm.display.shift || doc.extend); } - setSelection(doc, new Selection([extendRange(doc.sel.primary(), head, other, extend)], 0), options); -} - -// Extend all selections (pos is an array of selections with length -// equal the number of selections) -function extendSelections(doc, heads, options) { - var out = []; - var extend = doc.cm && (doc.cm.display.shift || doc.extend); - for (var i = 0; i < doc.sel.ranges.length; i++) - { out[i] = extendRange(doc.sel.ranges[i], heads[i], null, extend); } - var newSel = normalizeSelection(out, doc.sel.primIndex); - setSelection(doc, newSel, options); -} - -// Updates a single range in the selection. -function replaceOneSelection(doc, i, range, options) { - var ranges = doc.sel.ranges.slice(0); - ranges[i] = range; - setSelection(doc, normalizeSelection(ranges, doc.sel.primIndex), options); -} - -// Reset the selection to a single range. -function setSimpleSelection(doc, anchor, head, options) { - setSelection(doc, simpleSelection(anchor, head), options); -} - -// Give beforeSelectionChange handlers a change to influence a -// selection update. -function filterSelectionChange(doc, sel, options) { - var obj = { - ranges: sel.ranges, - update: function(ranges) { - var this$1 = this; - - this.ranges = []; - for (var i = 0; i < ranges.length; i++) - { this$1.ranges[i] = new Range(clipPos(doc, ranges[i].anchor), - clipPos(doc, ranges[i].head)); } - }, - origin: options && options.origin - }; - signal(doc, "beforeSelectionChange", doc, obj); - if (doc.cm) { signal(doc.cm, "beforeSelectionChange", doc.cm, obj); } - if (obj.ranges != sel.ranges) { return normalizeSelection(obj.ranges, obj.ranges.length - 1) } - else { return sel } -} - -function setSelectionReplaceHistory(doc, sel, options) { - var done = doc.history.done, last = lst(done); - if (last && last.ranges) { - done[done.length - 1] = sel; - setSelectionNoUndo(doc, sel, options); - } else { - setSelection(doc, sel, options); - } -} - -// Set a new selection. -function setSelection(doc, sel, options) { - setSelectionNoUndo(doc, sel, options); - addSelectionToHistory(doc, doc.sel, doc.cm ? doc.cm.curOp.id : NaN, options); -} - -function setSelectionNoUndo(doc, sel, options) { - if (hasHandler(doc, "beforeSelectionChange") || doc.cm && hasHandler(doc.cm, "beforeSelectionChange")) - { sel = filterSelectionChange(doc, sel, options); } - - var bias = options && options.bias || - (cmp(sel.primary().head, doc.sel.primary().head) < 0 ? -1 : 1); - setSelectionInner(doc, skipAtomicInSelection(doc, sel, bias, true)); - - if (!(options && options.scroll === false) && doc.cm) - { ensureCursorVisible(doc.cm); } -} - -function setSelectionInner(doc, sel) { - if (sel.equals(doc.sel)) { return } - - doc.sel = sel; - - if (doc.cm) { - doc.cm.curOp.updateInput = doc.cm.curOp.selectionChanged = true; - signalCursorActivity(doc.cm); - } - signalLater(doc, "cursorActivity", doc); -} - -// Verify that the selection does not partially select any atomic -// marked ranges. -function reCheckSelection(doc) { - setSelectionInner(doc, skipAtomicInSelection(doc, doc.sel, null, false)); -} - -// Return a selection that does not partially select any atomic -// ranges. -function skipAtomicInSelection(doc, sel, bias, mayClear) { - var out; - for (var i = 0; i < sel.ranges.length; i++) { - var range = sel.ranges[i]; - var old = sel.ranges.length == doc.sel.ranges.length && doc.sel.ranges[i]; - var newAnchor = skipAtomic(doc, range.anchor, old && old.anchor, bias, mayClear); - var newHead = skipAtomic(doc, range.head, old && old.head, bias, mayClear); - if (out || newAnchor != range.anchor || newHead != range.head) { - if (!out) { out = sel.ranges.slice(0, i); } - out[i] = new Range(newAnchor, newHead); - } - } - return out ? normalizeSelection(out, sel.primIndex) : sel -} - -function skipAtomicInner(doc, pos, oldPos, dir, mayClear) { - var line = getLine(doc, pos.line); - if (line.markedSpans) { for (var i = 0; i < line.markedSpans.length; ++i) { - var sp = line.markedSpans[i], m = sp.marker; - if ((sp.from == null || (m.inclusiveLeft ? sp.from <= pos.ch : sp.from < pos.ch)) && - (sp.to == null || (m.inclusiveRight ? sp.to >= pos.ch : sp.to > pos.ch))) { - if (mayClear) { - signal(m, "beforeCursorEnter"); - if (m.explicitlyCleared) { - if (!line.markedSpans) { break } - else {--i; continue} - } - } - if (!m.atomic) { continue } - - if (oldPos) { - var near = m.find(dir < 0 ? 1 : -1), diff = (void 0); - if (dir < 0 ? m.inclusiveRight : m.inclusiveLeft) - { near = movePos(doc, near, -dir, near && near.line == pos.line ? line : null); } - if (near && near.line == pos.line && (diff = cmp(near, oldPos)) && (dir < 0 ? diff < 0 : diff > 0)) - { return skipAtomicInner(doc, near, pos, dir, mayClear) } - } - - var far = m.find(dir < 0 ? -1 : 1); - if (dir < 0 ? m.inclusiveLeft : m.inclusiveRight) - { far = movePos(doc, far, dir, far.line == pos.line ? line : null); } - return far ? skipAtomicInner(doc, far, pos, dir, mayClear) : null - } - } } - return pos -} - -// Ensure a given position is not inside an atomic range. -function skipAtomic(doc, pos, oldPos, bias, mayClear) { - var dir = bias || 1; - var found = skipAtomicInner(doc, pos, oldPos, dir, mayClear) || - (!mayClear && skipAtomicInner(doc, pos, oldPos, dir, true)) || - skipAtomicInner(doc, pos, oldPos, -dir, mayClear) || - (!mayClear && skipAtomicInner(doc, pos, oldPos, -dir, true)); - if (!found) { - doc.cantEdit = true; - return Pos(doc.first, 0) - } - return found -} - -function movePos(doc, pos, dir, line) { - if (dir < 0 && pos.ch == 0) { - if (pos.line > doc.first) { return clipPos(doc, Pos(pos.line - 1)) } - else { return null } - } else if (dir > 0 && pos.ch == (line || getLine(doc, pos.line)).text.length) { - if (pos.line < doc.first + doc.size - 1) { return Pos(pos.line + 1, 0) } - else { return null } - } else { - return new Pos(pos.line, pos.ch + dir) - } -} - -function selectAll(cm) { - cm.setSelection(Pos(cm.firstLine(), 0), Pos(cm.lastLine()), sel_dontScroll); -} - -// UPDATING - -// Allow "beforeChange" event handlers to influence a change -function filterChange(doc, change, update) { - var obj = { - canceled: false, - from: change.from, - to: change.to, - text: change.text, - origin: change.origin, - cancel: function () { return obj.canceled = true; } - }; - if (update) { obj.update = function (from, to, text, origin) { - if (from) { obj.from = clipPos(doc, from); } - if (to) { obj.to = clipPos(doc, to); } - if (text) { obj.text = text; } - if (origin !== undefined) { obj.origin = origin; } - }; } - signal(doc, "beforeChange", doc, obj); - if (doc.cm) { signal(doc.cm, "beforeChange", doc.cm, obj); } - - if (obj.canceled) { return null } - return {from: obj.from, to: obj.to, text: obj.text, origin: obj.origin} -} - -// Apply a change to a document, and add it to the document's -// history, and propagating it to all linked documents. -function makeChange(doc, change, ignoreReadOnly) { - if (doc.cm) { - if (!doc.cm.curOp) { return operation(doc.cm, makeChange)(doc, change, ignoreReadOnly) } - if (doc.cm.state.suppressEdits) { return } - } - - if (hasHandler(doc, "beforeChange") || doc.cm && hasHandler(doc.cm, "beforeChange")) { - change = filterChange(doc, change, true); - if (!change) { return } - } - - // Possibly split or suppress the update based on the presence - // of read-only spans in its range. - var split = sawReadOnlySpans && !ignoreReadOnly && removeReadOnlyRanges(doc, change.from, change.to); - if (split) { - for (var i = split.length - 1; i >= 0; --i) - { makeChangeInner(doc, {from: split[i].from, to: split[i].to, text: i ? [""] : change.text, origin: change.origin}); } - } else { - makeChangeInner(doc, change); - } -} - -function makeChangeInner(doc, change) { - if (change.text.length == 1 && change.text[0] == "" && cmp(change.from, change.to) == 0) { return } - var selAfter = computeSelAfterChange(doc, change); - addChangeToHistory(doc, change, selAfter, doc.cm ? doc.cm.curOp.id : NaN); - - makeChangeSingleDoc(doc, change, selAfter, stretchSpansOverChange(doc, change)); - var rebased = []; - - linkedDocs(doc, function (doc, sharedHist) { - if (!sharedHist && indexOf(rebased, doc.history) == -1) { - rebaseHist(doc.history, change); - rebased.push(doc.history); - } - makeChangeSingleDoc(doc, change, null, stretchSpansOverChange(doc, change)); - }); -} - -// Revert a change stored in a document's history. -function makeChangeFromHistory(doc, type, allowSelectionOnly) { - if (doc.cm && doc.cm.state.suppressEdits && !allowSelectionOnly) { return } - - var hist = doc.history, event, selAfter = doc.sel; - var source = type == "undo" ? hist.done : hist.undone, dest = type == "undo" ? hist.undone : hist.done; - - // Verify that there is a useable event (so that ctrl-z won't - // needlessly clear selection events) - var i = 0; - for (; i < source.length; i++) { - event = source[i]; - if (allowSelectionOnly ? event.ranges && !event.equals(doc.sel) : !event.ranges) - { break } - } - if (i == source.length) { return } - hist.lastOrigin = hist.lastSelOrigin = null; - - for (;;) { - event = source.pop(); - if (event.ranges) { - pushSelectionToHistory(event, dest); - if (allowSelectionOnly && !event.equals(doc.sel)) { - setSelection(doc, event, {clearRedo: false}); - return - } - selAfter = event; - } - else { break } - } - - // Build up a reverse change object to add to the opposite history - // stack (redo when undoing, and vice versa). - var antiChanges = []; - pushSelectionToHistory(selAfter, dest); - dest.push({changes: antiChanges, generation: hist.generation}); - hist.generation = event.generation || ++hist.maxGeneration; - - var filter = hasHandler(doc, "beforeChange") || doc.cm && hasHandler(doc.cm, "beforeChange"); - - var loop = function ( i ) { - var change = event.changes[i]; - change.origin = type; - if (filter && !filterChange(doc, change, false)) { - source.length = 0; - return {} - } - - antiChanges.push(historyChangeFromChange(doc, change)); - - var after = i ? computeSelAfterChange(doc, change) : lst(source); - makeChangeSingleDoc(doc, change, after, mergeOldSpans(doc, change)); - if (!i && doc.cm) { doc.cm.scrollIntoView({from: change.from, to: changeEnd(change)}); } - var rebased = []; - - // Propagate to the linked documents - linkedDocs(doc, function (doc, sharedHist) { - if (!sharedHist && indexOf(rebased, doc.history) == -1) { - rebaseHist(doc.history, change); - rebased.push(doc.history); - } - makeChangeSingleDoc(doc, change, null, mergeOldSpans(doc, change)); - }); - }; - - for (var i$1 = event.changes.length - 1; i$1 >= 0; --i$1) { - var returned = loop( i$1 ); - - if ( returned ) return returned.v; - } -} - -// Sub-views need their line numbers shifted when text is added -// above or below them in the parent document. -function shiftDoc(doc, distance) { - if (distance == 0) { return } - doc.first += distance; - doc.sel = new Selection(map(doc.sel.ranges, function (range) { return new Range( - Pos(range.anchor.line + distance, range.anchor.ch), - Pos(range.head.line + distance, range.head.ch) - ); }), doc.sel.primIndex); - if (doc.cm) { - regChange(doc.cm, doc.first, doc.first - distance, distance); - for (var d = doc.cm.display, l = d.viewFrom; l < d.viewTo; l++) - { regLineChange(doc.cm, l, "gutter"); } - } -} - -// More lower-level change function, handling only a single document -// (not linked ones). -function makeChangeSingleDoc(doc, change, selAfter, spans) { - if (doc.cm && !doc.cm.curOp) - { return operation(doc.cm, makeChangeSingleDoc)(doc, change, selAfter, spans) } - - if (change.to.line < doc.first) { - shiftDoc(doc, change.text.length - 1 - (change.to.line - change.from.line)); - return - } - if (change.from.line > doc.lastLine()) { return } - - // Clip the change to the size of this doc - if (change.from.line < doc.first) { - var shift = change.text.length - 1 - (doc.first - change.from.line); - shiftDoc(doc, shift); - change = {from: Pos(doc.first, 0), to: Pos(change.to.line + shift, change.to.ch), - text: [lst(change.text)], origin: change.origin}; - } - var last = doc.lastLine(); - if (change.to.line > last) { - change = {from: change.from, to: Pos(last, getLine(doc, last).text.length), - text: [change.text[0]], origin: change.origin}; - } - - change.removed = getBetween(doc, change.from, change.to); - - if (!selAfter) { selAfter = computeSelAfterChange(doc, change); } - if (doc.cm) { makeChangeSingleDocInEditor(doc.cm, change, spans); } - else { updateDoc(doc, change, spans); } - setSelectionNoUndo(doc, selAfter, sel_dontScroll); -} - -// Handle the interaction of a change to a document with the editor -// that this document is part of. -function makeChangeSingleDocInEditor(cm, change, spans) { - var doc = cm.doc, display = cm.display, from = change.from, to = change.to; - - var recomputeMaxLength = false, checkWidthStart = from.line; - if (!cm.options.lineWrapping) { - checkWidthStart = lineNo(visualLine(getLine(doc, from.line))); - doc.iter(checkWidthStart, to.line + 1, function (line) { - if (line == display.maxLine) { - recomputeMaxLength = true; - return true - } - }); - } - - if (doc.sel.contains(change.from, change.to) > -1) - { signalCursorActivity(cm); } - - updateDoc(doc, change, spans, estimateHeight(cm)); - - if (!cm.options.lineWrapping) { - doc.iter(checkWidthStart, from.line + change.text.length, function (line) { - var len = lineLength(line); - if (len > display.maxLineLength) { - display.maxLine = line; - display.maxLineLength = len; - display.maxLineChanged = true; - recomputeMaxLength = false; - } - }); - if (recomputeMaxLength) { cm.curOp.updateMaxLine = true; } - } - - retreatFrontier(doc, from.line); - startWorker(cm, 400); - - var lendiff = change.text.length - (to.line - from.line) - 1; - // Remember that these lines changed, for updating the display - if (change.full) - { regChange(cm); } - else if (from.line == to.line && change.text.length == 1 && !isWholeLineUpdate(cm.doc, change)) - { regLineChange(cm, from.line, "text"); } - else - { regChange(cm, from.line, to.line + 1, lendiff); } - - var changesHandler = hasHandler(cm, "changes"), changeHandler = hasHandler(cm, "change"); - if (changeHandler || changesHandler) { - var obj = { - from: from, to: to, - text: change.text, - removed: change.removed, - origin: change.origin - }; - if (changeHandler) { signalLater(cm, "change", cm, obj); } - if (changesHandler) { (cm.curOp.changeObjs || (cm.curOp.changeObjs = [])).push(obj); } - } - cm.display.selForContextMenu = null; -} - -function replaceRange(doc, code, from, to, origin) { - if (!to) { to = from; } - if (cmp(to, from) < 0) { var assign; - (assign = [to, from], from = assign[0], to = assign[1], assign); } - if (typeof code == "string") { code = doc.splitLines(code); } - makeChange(doc, {from: from, to: to, text: code, origin: origin}); -} - -// Rebasing/resetting history to deal with externally-sourced changes - -function rebaseHistSelSingle(pos, from, to, diff) { - if (to < pos.line) { - pos.line += diff; - } else if (from < pos.line) { - pos.line = from; - pos.ch = 0; - } -} - -// Tries to rebase an array of history events given a change in the -// document. If the change touches the same lines as the event, the -// event, and everything 'behind' it, is discarded. If the change is -// before the event, the event's positions are updated. Uses a -// copy-on-write scheme for the positions, to avoid having to -// reallocate them all on every rebase, but also avoid problems with -// shared position objects being unsafely updated. -function rebaseHistArray(array, from, to, diff) { - for (var i = 0; i < array.length; ++i) { - var sub = array[i], ok = true; - if (sub.ranges) { - if (!sub.copied) { sub = array[i] = sub.deepCopy(); sub.copied = true; } - for (var j = 0; j < sub.ranges.length; j++) { - rebaseHistSelSingle(sub.ranges[j].anchor, from, to, diff); - rebaseHistSelSingle(sub.ranges[j].head, from, to, diff); - } - continue - } - for (var j$1 = 0; j$1 < sub.changes.length; ++j$1) { - var cur = sub.changes[j$1]; - if (to < cur.from.line) { - cur.from = Pos(cur.from.line + diff, cur.from.ch); - cur.to = Pos(cur.to.line + diff, cur.to.ch); - } else if (from <= cur.to.line) { - ok = false; - break - } - } - if (!ok) { - array.splice(0, i + 1); - i = 0; - } - } -} - -function rebaseHist(hist, change) { - var from = change.from.line, to = change.to.line, diff = change.text.length - (to - from) - 1; - rebaseHistArray(hist.done, from, to, diff); - rebaseHistArray(hist.undone, from, to, diff); -} - -// Utility for applying a change to a line by handle or number, -// returning the number and optionally registering the line as -// changed. -function changeLine(doc, handle, changeType, op) { - var no = handle, line = handle; - if (typeof handle == "number") { line = getLine(doc, clipLine(doc, handle)); } - else { no = lineNo(handle); } - if (no == null) { return null } - if (op(line, no) && doc.cm) { regLineChange(doc.cm, no, changeType); } - return line -} - -// The document is represented as a BTree consisting of leaves, with -// chunk of lines in them, and branches, with up to ten leaves or -// other branch nodes below them. The top node is always a branch -// node, and is the document object itself (meaning it has -// additional methods and properties). -// -// All nodes have parent links. The tree is used both to go from -// line numbers to line objects, and to go from objects to numbers. -// It also indexes by height, and is used to convert between height -// and line object, and to find the total height of the document. -// -// See also http://marijnhaverbeke.nl/blog/codemirror-line-tree.html - -function LeafChunk(lines) { - var this$1 = this; - - this.lines = lines; - this.parent = null; - var height = 0; - for (var i = 0; i < lines.length; ++i) { - lines[i].parent = this$1; - height += lines[i].height; - } - this.height = height; -} - -LeafChunk.prototype = { - chunkSize: function chunkSize() { return this.lines.length }, - - // Remove the n lines at offset 'at'. - removeInner: function removeInner(at, n) { - var this$1 = this; - - for (var i = at, e = at + n; i < e; ++i) { - var line = this$1.lines[i]; - this$1.height -= line.height; - cleanUpLine(line); - signalLater(line, "delete"); - } - this.lines.splice(at, n); - }, - - // Helper used to collapse a small branch into a single leaf. - collapse: function collapse(lines) { - lines.push.apply(lines, this.lines); - }, - - // Insert the given array of lines at offset 'at', count them as - // having the given height. - insertInner: function insertInner(at, lines, height) { - var this$1 = this; - - this.height += height; - this.lines = this.lines.slice(0, at).concat(lines).concat(this.lines.slice(at)); - for (var i = 0; i < lines.length; ++i) { lines[i].parent = this$1; } - }, - - // Used to iterate over a part of the tree. - iterN: function iterN(at, n, op) { - var this$1 = this; - - for (var e = at + n; at < e; ++at) - { if (op(this$1.lines[at])) { return true } } - } -}; - -function BranchChunk(children) { - var this$1 = this; - - this.children = children; - var size = 0, height = 0; - for (var i = 0; i < children.length; ++i) { - var ch = children[i]; - size += ch.chunkSize(); height += ch.height; - ch.parent = this$1; - } - this.size = size; - this.height = height; - this.parent = null; -} - -BranchChunk.prototype = { - chunkSize: function chunkSize() { return this.size }, - - removeInner: function removeInner(at, n) { - var this$1 = this; - - this.size -= n; - for (var i = 0; i < this.children.length; ++i) { - var child = this$1.children[i], sz = child.chunkSize(); - if (at < sz) { - var rm = Math.min(n, sz - at), oldHeight = child.height; - child.removeInner(at, rm); - this$1.height -= oldHeight - child.height; - if (sz == rm) { this$1.children.splice(i--, 1); child.parent = null; } - if ((n -= rm) == 0) { break } - at = 0; - } else { at -= sz; } - } - // If the result is smaller than 25 lines, ensure that it is a - // single leaf node. - if (this.size - n < 25 && - (this.children.length > 1 || !(this.children[0] instanceof LeafChunk))) { - var lines = []; - this.collapse(lines); - this.children = [new LeafChunk(lines)]; - this.children[0].parent = this; - } - }, - - collapse: function collapse(lines) { - var this$1 = this; - - for (var i = 0; i < this.children.length; ++i) { this$1.children[i].collapse(lines); } - }, - - insertInner: function insertInner(at, lines, height) { - var this$1 = this; - - this.size += lines.length; - this.height += height; - for (var i = 0; i < this.children.length; ++i) { - var child = this$1.children[i], sz = child.chunkSize(); - if (at <= sz) { - child.insertInner(at, lines, height); - if (child.lines && child.lines.length > 50) { - // To avoid memory thrashing when child.lines is huge (e.g. first view of a large file), it's never spliced. - // Instead, small slices are taken. They're taken in order because sequential memory accesses are fastest. - var remaining = child.lines.length % 25 + 25; - for (var pos = remaining; pos < child.lines.length;) { - var leaf = new LeafChunk(child.lines.slice(pos, pos += 25)); - child.height -= leaf.height; - this$1.children.splice(++i, 0, leaf); - leaf.parent = this$1; - } - child.lines = child.lines.slice(0, remaining); - this$1.maybeSpill(); - } - break - } - at -= sz; - } - }, - - // When a node has grown, check whether it should be split. - maybeSpill: function maybeSpill() { - if (this.children.length <= 10) { return } - var me = this; - do { - var spilled = me.children.splice(me.children.length - 5, 5); - var sibling = new BranchChunk(spilled); - if (!me.parent) { // Become the parent node - var copy = new BranchChunk(me.children); - copy.parent = me; - me.children = [copy, sibling]; - me = copy; - } else { - me.size -= sibling.size; - me.height -= sibling.height; - var myIndex = indexOf(me.parent.children, me); - me.parent.children.splice(myIndex + 1, 0, sibling); - } - sibling.parent = me.parent; - } while (me.children.length > 10) - me.parent.maybeSpill(); - }, - - iterN: function iterN(at, n, op) { - var this$1 = this; - - for (var i = 0; i < this.children.length; ++i) { - var child = this$1.children[i], sz = child.chunkSize(); - if (at < sz) { - var used = Math.min(n, sz - at); - if (child.iterN(at, used, op)) { return true } - if ((n -= used) == 0) { break } - at = 0; - } else { at -= sz; } - } - } -}; - -// Line widgets are block elements displayed above or below a line. - -var LineWidget = function(doc, node, options) { - var this$1 = this; - - if (options) { for (var opt in options) { if (options.hasOwnProperty(opt)) - { this$1[opt] = options[opt]; } } } - this.doc = doc; - this.node = node; -}; - -LineWidget.prototype.clear = function () { - var this$1 = this; - - var cm = this.doc.cm, ws = this.line.widgets, line = this.line, no = lineNo(line); - if (no == null || !ws) { return } - for (var i = 0; i < ws.length; ++i) { if (ws[i] == this$1) { ws.splice(i--, 1); } } - if (!ws.length) { line.widgets = null; } - var height = widgetHeight(this); - updateLineHeight(line, Math.max(0, line.height - height)); - if (cm) { - runInOp(cm, function () { - adjustScrollWhenAboveVisible(cm, line, -height); - regLineChange(cm, no, "widget"); - }); - signalLater(cm, "lineWidgetCleared", cm, this, no); - } -}; - -LineWidget.prototype.changed = function () { - var this$1 = this; - - var oldH = this.height, cm = this.doc.cm, line = this.line; - this.height = null; - var diff = widgetHeight(this) - oldH; - if (!diff) { return } - updateLineHeight(line, line.height + diff); - if (cm) { - runInOp(cm, function () { - cm.curOp.forceUpdate = true; - adjustScrollWhenAboveVisible(cm, line, diff); - signalLater(cm, "lineWidgetChanged", cm, this$1, lineNo(line)); - }); - } -}; -eventMixin(LineWidget); - -function adjustScrollWhenAboveVisible(cm, line, diff) { - if (heightAtLine(line) < ((cm.curOp && cm.curOp.scrollTop) || cm.doc.scrollTop)) - { addToScrollTop(cm, diff); } -} - -function addLineWidget(doc, handle, node, options) { - var widget = new LineWidget(doc, node, options); - var cm = doc.cm; - if (cm && widget.noHScroll) { cm.display.alignWidgets = true; } - changeLine(doc, handle, "widget", function (line) { - var widgets = line.widgets || (line.widgets = []); - if (widget.insertAt == null) { widgets.push(widget); } - else { widgets.splice(Math.min(widgets.length - 1, Math.max(0, widget.insertAt)), 0, widget); } - widget.line = line; - if (cm && !lineIsHidden(doc, line)) { - var aboveVisible = heightAtLine(line) < doc.scrollTop; - updateLineHeight(line, line.height + widgetHeight(widget)); - if (aboveVisible) { addToScrollTop(cm, widget.height); } - cm.curOp.forceUpdate = true; - } - return true - }); - signalLater(cm, "lineWidgetAdded", cm, widget, typeof handle == "number" ? handle : lineNo(handle)); - return widget -} - -// TEXTMARKERS - -// Created with markText and setBookmark methods. A TextMarker is a -// handle that can be used to clear or find a marked position in the -// document. Line objects hold arrays (markedSpans) containing -// {from, to, marker} object pointing to such marker objects, and -// indicating that such a marker is present on that line. Multiple -// lines may point to the same marker when it spans across lines. -// The spans will have null for their from/to properties when the -// marker continues beyond the start/end of the line. Markers have -// links back to the lines they currently touch. - -// Collapsed markers have unique ids, in order to be able to order -// them, which is needed for uniquely determining an outer marker -// when they overlap (they may nest, but not partially overlap). -var nextMarkerId = 0; - -var TextMarker = function(doc, type) { - this.lines = []; - this.type = type; - this.doc = doc; - this.id = ++nextMarkerId; -}; - -// Clear the marker. -TextMarker.prototype.clear = function () { - var this$1 = this; - - if (this.explicitlyCleared) { return } - var cm = this.doc.cm, withOp = cm && !cm.curOp; - if (withOp) { startOperation(cm); } - if (hasHandler(this, "clear")) { - var found = this.find(); - if (found) { signalLater(this, "clear", found.from, found.to); } - } - var min = null, max = null; - for (var i = 0; i < this.lines.length; ++i) { - var line = this$1.lines[i]; - var span = getMarkedSpanFor(line.markedSpans, this$1); - if (cm && !this$1.collapsed) { regLineChange(cm, lineNo(line), "text"); } - else if (cm) { - if (span.to != null) { max = lineNo(line); } - if (span.from != null) { min = lineNo(line); } - } - line.markedSpans = removeMarkedSpan(line.markedSpans, span); - if (span.from == null && this$1.collapsed && !lineIsHidden(this$1.doc, line) && cm) - { updateLineHeight(line, textHeight(cm.display)); } - } - if (cm && this.collapsed && !cm.options.lineWrapping) { for (var i$1 = 0; i$1 < this.lines.length; ++i$1) { - var visual = visualLine(this$1.lines[i$1]), len = lineLength(visual); - if (len > cm.display.maxLineLength) { - cm.display.maxLine = visual; - cm.display.maxLineLength = len; - cm.display.maxLineChanged = true; - } - } } - - if (min != null && cm && this.collapsed) { regChange(cm, min, max + 1); } - this.lines.length = 0; - this.explicitlyCleared = true; - if (this.atomic && this.doc.cantEdit) { - this.doc.cantEdit = false; - if (cm) { reCheckSelection(cm.doc); } - } - if (cm) { signalLater(cm, "markerCleared", cm, this, min, max); } - if (withOp) { endOperation(cm); } - if (this.parent) { this.parent.clear(); } -}; - -// Find the position of the marker in the document. Returns a {from, -// to} object by default. Side can be passed to get a specific side -// -- 0 (both), -1 (left), or 1 (right). When lineObj is true, the -// Pos objects returned contain a line object, rather than a line -// number (used to prevent looking up the same line twice). -TextMarker.prototype.find = function (side, lineObj) { - var this$1 = this; - - if (side == null && this.type == "bookmark") { side = 1; } - var from, to; - for (var i = 0; i < this.lines.length; ++i) { - var line = this$1.lines[i]; - var span = getMarkedSpanFor(line.markedSpans, this$1); - if (span.from != null) { - from = Pos(lineObj ? line : lineNo(line), span.from); - if (side == -1) { return from } - } - if (span.to != null) { - to = Pos(lineObj ? line : lineNo(line), span.to); - if (side == 1) { return to } - } - } - return from && {from: from, to: to} -}; - -// Signals that the marker's widget changed, and surrounding layout -// should be recomputed. -TextMarker.prototype.changed = function () { - var this$1 = this; - - var pos = this.find(-1, true), widget = this, cm = this.doc.cm; - if (!pos || !cm) { return } - runInOp(cm, function () { - var line = pos.line, lineN = lineNo(pos.line); - var view = findViewForLine(cm, lineN); - if (view) { - clearLineMeasurementCacheFor(view); - cm.curOp.selectionChanged = cm.curOp.forceUpdate = true; - } - cm.curOp.updateMaxLine = true; - if (!lineIsHidden(widget.doc, line) && widget.height != null) { - var oldHeight = widget.height; - widget.height = null; - var dHeight = widgetHeight(widget) - oldHeight; - if (dHeight) - { updateLineHeight(line, line.height + dHeight); } - } - signalLater(cm, "markerChanged", cm, this$1); - }); -}; - -TextMarker.prototype.attachLine = function (line) { - if (!this.lines.length && this.doc.cm) { - var op = this.doc.cm.curOp; - if (!op.maybeHiddenMarkers || indexOf(op.maybeHiddenMarkers, this) == -1) - { (op.maybeUnhiddenMarkers || (op.maybeUnhiddenMarkers = [])).push(this); } - } - this.lines.push(line); -}; - -TextMarker.prototype.detachLine = function (line) { - this.lines.splice(indexOf(this.lines, line), 1); - if (!this.lines.length && this.doc.cm) { - var op = this.doc.cm.curOp;(op.maybeHiddenMarkers || (op.maybeHiddenMarkers = [])).push(this); - } -}; -eventMixin(TextMarker); - -// Create a marker, wire it up to the right lines, and -function markText(doc, from, to, options, type) { - // Shared markers (across linked documents) are handled separately - // (markTextShared will call out to this again, once per - // document). - if (options && options.shared) { return markTextShared(doc, from, to, options, type) } - // Ensure we are in an operation. - if (doc.cm && !doc.cm.curOp) { return operation(doc.cm, markText)(doc, from, to, options, type) } - - var marker = new TextMarker(doc, type), diff = cmp(from, to); - if (options) { copyObj(options, marker, false); } - // Don't connect empty markers unless clearWhenEmpty is false - if (diff > 0 || diff == 0 && marker.clearWhenEmpty !== false) - { return marker } - if (marker.replacedWith) { - // Showing up as a widget implies collapsed (widget replaces text) - marker.collapsed = true; - marker.widgetNode = eltP("span", [marker.replacedWith], "CodeMirror-widget"); - if (!options.handleMouseEvents) { marker.widgetNode.setAttribute("cm-ignore-events", "true"); } - if (options.insertLeft) { marker.widgetNode.insertLeft = true; } - } - if (marker.collapsed) { - if (conflictingCollapsedRange(doc, from.line, from, to, marker) || - from.line != to.line && conflictingCollapsedRange(doc, to.line, from, to, marker)) - { throw new Error("Inserting collapsed marker partially overlapping an existing one") } - seeCollapsedSpans(); - } - - if (marker.addToHistory) - { addChangeToHistory(doc, {from: from, to: to, origin: "markText"}, doc.sel, NaN); } - - var curLine = from.line, cm = doc.cm, updateMaxLine; - doc.iter(curLine, to.line + 1, function (line) { - if (cm && marker.collapsed && !cm.options.lineWrapping && visualLine(line) == cm.display.maxLine) - { updateMaxLine = true; } - if (marker.collapsed && curLine != from.line) { updateLineHeight(line, 0); } - addMarkedSpan(line, new MarkedSpan(marker, - curLine == from.line ? from.ch : null, - curLine == to.line ? to.ch : null)); - ++curLine; - }); - // lineIsHidden depends on the presence of the spans, so needs a second pass - if (marker.collapsed) { doc.iter(from.line, to.line + 1, function (line) { - if (lineIsHidden(doc, line)) { updateLineHeight(line, 0); } - }); } - - if (marker.clearOnEnter) { on(marker, "beforeCursorEnter", function () { return marker.clear(); }); } - - if (marker.readOnly) { - seeReadOnlySpans(); - if (doc.history.done.length || doc.history.undone.length) - { doc.clearHistory(); } - } - if (marker.collapsed) { - marker.id = ++nextMarkerId; - marker.atomic = true; - } - if (cm) { - // Sync editor state - if (updateMaxLine) { cm.curOp.updateMaxLine = true; } - if (marker.collapsed) - { regChange(cm, from.line, to.line + 1); } - else if (marker.className || marker.title || marker.startStyle || marker.endStyle || marker.css) - { for (var i = from.line; i <= to.line; i++) { regLineChange(cm, i, "text"); } } - if (marker.atomic) { reCheckSelection(cm.doc); } - signalLater(cm, "markerAdded", cm, marker); - } - return marker -} - -// SHARED TEXTMARKERS - -// A shared marker spans multiple linked documents. It is -// implemented as a meta-marker-object controlling multiple normal -// markers. -var SharedTextMarker = function(markers, primary) { - var this$1 = this; - - this.markers = markers; - this.primary = primary; - for (var i = 0; i < markers.length; ++i) - { markers[i].parent = this$1; } -}; - -SharedTextMarker.prototype.clear = function () { - var this$1 = this; - - if (this.explicitlyCleared) { return } - this.explicitlyCleared = true; - for (var i = 0; i < this.markers.length; ++i) - { this$1.markers[i].clear(); } - signalLater(this, "clear"); -}; - -SharedTextMarker.prototype.find = function (side, lineObj) { - return this.primary.find(side, lineObj) -}; -eventMixin(SharedTextMarker); - -function markTextShared(doc, from, to, options, type) { - options = copyObj(options); - options.shared = false; - var markers = [markText(doc, from, to, options, type)], primary = markers[0]; - var widget = options.widgetNode; - linkedDocs(doc, function (doc) { - if (widget) { options.widgetNode = widget.cloneNode(true); } - markers.push(markText(doc, clipPos(doc, from), clipPos(doc, to), options, type)); - for (var i = 0; i < doc.linked.length; ++i) - { if (doc.linked[i].isParent) { return } } - primary = lst(markers); - }); - return new SharedTextMarker(markers, primary) -} - -function findSharedMarkers(doc) { - return doc.findMarks(Pos(doc.first, 0), doc.clipPos(Pos(doc.lastLine())), function (m) { return m.parent; }) -} - -function copySharedMarkers(doc, markers) { - for (var i = 0; i < markers.length; i++) { - var marker = markers[i], pos = marker.find(); - var mFrom = doc.clipPos(pos.from), mTo = doc.clipPos(pos.to); - if (cmp(mFrom, mTo)) { - var subMark = markText(doc, mFrom, mTo, marker.primary, marker.primary.type); - marker.markers.push(subMark); - subMark.parent = marker; - } - } -} - -function detachSharedMarkers(markers) { - var loop = function ( i ) { - var marker = markers[i], linked = [marker.primary.doc]; - linkedDocs(marker.primary.doc, function (d) { return linked.push(d); }); - for (var j = 0; j < marker.markers.length; j++) { - var subMarker = marker.markers[j]; - if (indexOf(linked, subMarker.doc) == -1) { - subMarker.parent = null; - marker.markers.splice(j--, 1); - } - } - }; - - for (var i = 0; i < markers.length; i++) loop( i ); -} - -var nextDocId = 0; -var Doc = function(text, mode, firstLine, lineSep, direction) { - if (!(this instanceof Doc)) { return new Doc(text, mode, firstLine, lineSep, direction) } - if (firstLine == null) { firstLine = 0; } - - BranchChunk.call(this, [new LeafChunk([new Line("", null)])]); - this.first = firstLine; - this.scrollTop = this.scrollLeft = 0; - this.cantEdit = false; - this.cleanGeneration = 1; - this.modeFrontier = this.highlightFrontier = firstLine; - var start = Pos(firstLine, 0); - this.sel = simpleSelection(start); - this.history = new History(null); - this.id = ++nextDocId; - this.modeOption = mode; - this.lineSep = lineSep; - this.direction = (direction == "rtl") ? "rtl" : "ltr"; - this.extend = false; - - if (typeof text == "string") { text = this.splitLines(text); } - updateDoc(this, {from: start, to: start, text: text}); - setSelection(this, simpleSelection(start), sel_dontScroll); -}; - -Doc.prototype = createObj(BranchChunk.prototype, { - constructor: Doc, - // Iterate over the document. Supports two forms -- with only one - // argument, it calls that for each line in the document. With - // three, it iterates over the range given by the first two (with - // the second being non-inclusive). - iter: function(from, to, op) { - if (op) { this.iterN(from - this.first, to - from, op); } - else { this.iterN(this.first, this.first + this.size, from); } - }, - - // Non-public interface for adding and removing lines. - insert: function(at, lines) { - var height = 0; - for (var i = 0; i < lines.length; ++i) { height += lines[i].height; } - this.insertInner(at - this.first, lines, height); - }, - remove: function(at, n) { this.removeInner(at - this.first, n); }, - - // From here, the methods are part of the public interface. Most - // are also available from CodeMirror (editor) instances. - - getValue: function(lineSep) { - var lines = getLines(this, this.first, this.first + this.size); - if (lineSep === false) { return lines } - return lines.join(lineSep || this.lineSeparator()) - }, - setValue: docMethodOp(function(code) { - var top = Pos(this.first, 0), last = this.first + this.size - 1; - makeChange(this, {from: top, to: Pos(last, getLine(this, last).text.length), - text: this.splitLines(code), origin: "setValue", full: true}, true); - if (this.cm) { scrollToCoords(this.cm, 0, 0); } - setSelection(this, simpleSelection(top), sel_dontScroll); - }), - replaceRange: function(code, from, to, origin) { - from = clipPos(this, from); - to = to ? clipPos(this, to) : from; - replaceRange(this, code, from, to, origin); - }, - getRange: function(from, to, lineSep) { - var lines = getBetween(this, clipPos(this, from), clipPos(this, to)); - if (lineSep === false) { return lines } - return lines.join(lineSep || this.lineSeparator()) - }, - - getLine: function(line) {var l = this.getLineHandle(line); return l && l.text}, - - getLineHandle: function(line) {if (isLine(this, line)) { return getLine(this, line) }}, - getLineNumber: function(line) {return lineNo(line)}, - - getLineHandleVisualStart: function(line) { - if (typeof line == "number") { line = getLine(this, line); } - return visualLine(line) - }, - - lineCount: function() {return this.size}, - firstLine: function() {return this.first}, - lastLine: function() {return this.first + this.size - 1}, - - clipPos: function(pos) {return clipPos(this, pos)}, - - getCursor: function(start) { - var range$$1 = this.sel.primary(), pos; - if (start == null || start == "head") { pos = range$$1.head; } - else if (start == "anchor") { pos = range$$1.anchor; } - else if (start == "end" || start == "to" || start === false) { pos = range$$1.to(); } - else { pos = range$$1.from(); } - return pos - }, - listSelections: function() { return this.sel.ranges }, - somethingSelected: function() {return this.sel.somethingSelected()}, - - setCursor: docMethodOp(function(line, ch, options) { - setSimpleSelection(this, clipPos(this, typeof line == "number" ? Pos(line, ch || 0) : line), null, options); - }), - setSelection: docMethodOp(function(anchor, head, options) { - setSimpleSelection(this, clipPos(this, anchor), clipPos(this, head || anchor), options); - }), - extendSelection: docMethodOp(function(head, other, options) { - extendSelection(this, clipPos(this, head), other && clipPos(this, other), options); - }), - extendSelections: docMethodOp(function(heads, options) { - extendSelections(this, clipPosArray(this, heads), options); - }), - extendSelectionsBy: docMethodOp(function(f, options) { - var heads = map(this.sel.ranges, f); - extendSelections(this, clipPosArray(this, heads), options); - }), - setSelections: docMethodOp(function(ranges, primary, options) { - var this$1 = this; - - if (!ranges.length) { return } - var out = []; - for (var i = 0; i < ranges.length; i++) - { out[i] = new Range(clipPos(this$1, ranges[i].anchor), - clipPos(this$1, ranges[i].head)); } - if (primary == null) { primary = Math.min(ranges.length - 1, this.sel.primIndex); } - setSelection(this, normalizeSelection(out, primary), options); - }), - addSelection: docMethodOp(function(anchor, head, options) { - var ranges = this.sel.ranges.slice(0); - ranges.push(new Range(clipPos(this, anchor), clipPos(this, head || anchor))); - setSelection(this, normalizeSelection(ranges, ranges.length - 1), options); - }), - - getSelection: function(lineSep) { - var this$1 = this; - - var ranges = this.sel.ranges, lines; - for (var i = 0; i < ranges.length; i++) { - var sel = getBetween(this$1, ranges[i].from(), ranges[i].to()); - lines = lines ? lines.concat(sel) : sel; - } - if (lineSep === false) { return lines } - else { return lines.join(lineSep || this.lineSeparator()) } - }, - getSelections: function(lineSep) { - var this$1 = this; - - var parts = [], ranges = this.sel.ranges; - for (var i = 0; i < ranges.length; i++) { - var sel = getBetween(this$1, ranges[i].from(), ranges[i].to()); - if (lineSep !== false) { sel = sel.join(lineSep || this$1.lineSeparator()); } - parts[i] = sel; - } - return parts - }, - replaceSelection: function(code, collapse, origin) { - var dup = []; - for (var i = 0; i < this.sel.ranges.length; i++) - { dup[i] = code; } - this.replaceSelections(dup, collapse, origin || "+input"); - }, - replaceSelections: docMethodOp(function(code, collapse, origin) { - var this$1 = this; - - var changes = [], sel = this.sel; - for (var i = 0; i < sel.ranges.length; i++) { - var range$$1 = sel.ranges[i]; - changes[i] = {from: range$$1.from(), to: range$$1.to(), text: this$1.splitLines(code[i]), origin: origin}; - } - var newSel = collapse && collapse != "end" && computeReplacedSel(this, changes, collapse); - for (var i$1 = changes.length - 1; i$1 >= 0; i$1--) - { makeChange(this$1, changes[i$1]); } - if (newSel) { setSelectionReplaceHistory(this, newSel); } - else if (this.cm) { ensureCursorVisible(this.cm); } - }), - undo: docMethodOp(function() {makeChangeFromHistory(this, "undo");}), - redo: docMethodOp(function() {makeChangeFromHistory(this, "redo");}), - undoSelection: docMethodOp(function() {makeChangeFromHistory(this, "undo", true);}), - redoSelection: docMethodOp(function() {makeChangeFromHistory(this, "redo", true);}), - - setExtending: function(val) {this.extend = val;}, - getExtending: function() {return this.extend}, - - historySize: function() { - var hist = this.history, done = 0, undone = 0; - for (var i = 0; i < hist.done.length; i++) { if (!hist.done[i].ranges) { ++done; } } - for (var i$1 = 0; i$1 < hist.undone.length; i$1++) { if (!hist.undone[i$1].ranges) { ++undone; } } - return {undo: done, redo: undone} - }, - clearHistory: function() {this.history = new History(this.history.maxGeneration);}, - - markClean: function() { - this.cleanGeneration = this.changeGeneration(true); - }, - changeGeneration: function(forceSplit) { - if (forceSplit) - { this.history.lastOp = this.history.lastSelOp = this.history.lastOrigin = null; } - return this.history.generation - }, - isClean: function (gen) { - return this.history.generation == (gen || this.cleanGeneration) - }, - - getHistory: function() { - return {done: copyHistoryArray(this.history.done), - undone: copyHistoryArray(this.history.undone)} - }, - setHistory: function(histData) { - var hist = this.history = new History(this.history.maxGeneration); - hist.done = copyHistoryArray(histData.done.slice(0), null, true); - hist.undone = copyHistoryArray(histData.undone.slice(0), null, true); - }, - - setGutterMarker: docMethodOp(function(line, gutterID, value) { - return changeLine(this, line, "gutter", function (line) { - var markers = line.gutterMarkers || (line.gutterMarkers = {}); - markers[gutterID] = value; - if (!value && isEmpty(markers)) { line.gutterMarkers = null; } - return true - }) - }), - - clearGutter: docMethodOp(function(gutterID) { - var this$1 = this; - - this.iter(function (line) { - if (line.gutterMarkers && line.gutterMarkers[gutterID]) { - changeLine(this$1, line, "gutter", function () { - line.gutterMarkers[gutterID] = null; - if (isEmpty(line.gutterMarkers)) { line.gutterMarkers = null; } - return true - }); - } - }); - }), - - lineInfo: function(line) { - var n; - if (typeof line == "number") { - if (!isLine(this, line)) { return null } - n = line; - line = getLine(this, line); - if (!line) { return null } - } else { - n = lineNo(line); - if (n == null) { return null } - } - return {line: n, handle: line, text: line.text, gutterMarkers: line.gutterMarkers, - textClass: line.textClass, bgClass: line.bgClass, wrapClass: line.wrapClass, - widgets: line.widgets} - }, - - addLineClass: docMethodOp(function(handle, where, cls) { - return changeLine(this, handle, where == "gutter" ? "gutter" : "class", function (line) { - var prop = where == "text" ? "textClass" - : where == "background" ? "bgClass" - : where == "gutter" ? "gutterClass" : "wrapClass"; - if (!line[prop]) { line[prop] = cls; } - else if (classTest(cls).test(line[prop])) { return false } - else { line[prop] += " " + cls; } - return true - }) - }), - removeLineClass: docMethodOp(function(handle, where, cls) { - return changeLine(this, handle, where == "gutter" ? "gutter" : "class", function (line) { - var prop = where == "text" ? "textClass" - : where == "background" ? "bgClass" - : where == "gutter" ? "gutterClass" : "wrapClass"; - var cur = line[prop]; - if (!cur) { return false } - else if (cls == null) { line[prop] = null; } - else { - var found = cur.match(classTest(cls)); - if (!found) { return false } - var end = found.index + found[0].length; - line[prop] = cur.slice(0, found.index) + (!found.index || end == cur.length ? "" : " ") + cur.slice(end) || null; - } - return true - }) - }), - - addLineWidget: docMethodOp(function(handle, node, options) { - return addLineWidget(this, handle, node, options) - }), - removeLineWidget: function(widget) { widget.clear(); }, - - markText: function(from, to, options) { - return markText(this, clipPos(this, from), clipPos(this, to), options, options && options.type || "range") - }, - setBookmark: function(pos, options) { - var realOpts = {replacedWith: options && (options.nodeType == null ? options.widget : options), - insertLeft: options && options.insertLeft, - clearWhenEmpty: false, shared: options && options.shared, - handleMouseEvents: options && options.handleMouseEvents}; - pos = clipPos(this, pos); - return markText(this, pos, pos, realOpts, "bookmark") - }, - findMarksAt: function(pos) { - pos = clipPos(this, pos); - var markers = [], spans = getLine(this, pos.line).markedSpans; - if (spans) { for (var i = 0; i < spans.length; ++i) { - var span = spans[i]; - if ((span.from == null || span.from <= pos.ch) && - (span.to == null || span.to >= pos.ch)) - { markers.push(span.marker.parent || span.marker); } - } } - return markers - }, - findMarks: function(from, to, filter) { - from = clipPos(this, from); to = clipPos(this, to); - var found = [], lineNo$$1 = from.line; - this.iter(from.line, to.line + 1, function (line) { - var spans = line.markedSpans; - if (spans) { for (var i = 0; i < spans.length; i++) { - var span = spans[i]; - if (!(span.to != null && lineNo$$1 == from.line && from.ch >= span.to || - span.from == null && lineNo$$1 != from.line || - span.from != null && lineNo$$1 == to.line && span.from >= to.ch) && - (!filter || filter(span.marker))) - { found.push(span.marker.parent || span.marker); } - } } - ++lineNo$$1; - }); - return found - }, - getAllMarks: function() { - var markers = []; - this.iter(function (line) { - var sps = line.markedSpans; - if (sps) { for (var i = 0; i < sps.length; ++i) - { if (sps[i].from != null) { markers.push(sps[i].marker); } } } - }); - return markers - }, - - posFromIndex: function(off) { - var ch, lineNo$$1 = this.first, sepSize = this.lineSeparator().length; - this.iter(function (line) { - var sz = line.text.length + sepSize; - if (sz > off) { ch = off; return true } - off -= sz; - ++lineNo$$1; - }); - return clipPos(this, Pos(lineNo$$1, ch)) - }, - indexFromPos: function (coords) { - coords = clipPos(this, coords); - var index = coords.ch; - if (coords.line < this.first || coords.ch < 0) { return 0 } - var sepSize = this.lineSeparator().length; - this.iter(this.first, coords.line, function (line) { // iter aborts when callback returns a truthy value - index += line.text.length + sepSize; - }); - return index - }, - - copy: function(copyHistory) { - var doc = new Doc(getLines(this, this.first, this.first + this.size), - this.modeOption, this.first, this.lineSep, this.direction); - doc.scrollTop = this.scrollTop; doc.scrollLeft = this.scrollLeft; - doc.sel = this.sel; - doc.extend = false; - if (copyHistory) { - doc.history.undoDepth = this.history.undoDepth; - doc.setHistory(this.getHistory()); - } - return doc - }, - - linkedDoc: function(options) { - if (!options) { options = {}; } - var from = this.first, to = this.first + this.size; - if (options.from != null && options.from > from) { from = options.from; } - if (options.to != null && options.to < to) { to = options.to; } - var copy = new Doc(getLines(this, from, to), options.mode || this.modeOption, from, this.lineSep, this.direction); - if (options.sharedHist) { copy.history = this.history - ; }(this.linked || (this.linked = [])).push({doc: copy, sharedHist: options.sharedHist}); - copy.linked = [{doc: this, isParent: true, sharedHist: options.sharedHist}]; - copySharedMarkers(copy, findSharedMarkers(this)); - return copy - }, - unlinkDoc: function(other) { - var this$1 = this; - - if (other instanceof CodeMirror$1) { other = other.doc; } - if (this.linked) { for (var i = 0; i < this.linked.length; ++i) { - var link = this$1.linked[i]; - if (link.doc != other) { continue } - this$1.linked.splice(i, 1); - other.unlinkDoc(this$1); - detachSharedMarkers(findSharedMarkers(this$1)); - break - } } - // If the histories were shared, split them again - if (other.history == this.history) { - var splitIds = [other.id]; - linkedDocs(other, function (doc) { return splitIds.push(doc.id); }, true); - other.history = new History(null); - other.history.done = copyHistoryArray(this.history.done, splitIds); - other.history.undone = copyHistoryArray(this.history.undone, splitIds); - } - }, - iterLinkedDocs: function(f) {linkedDocs(this, f);}, - - getMode: function() {return this.mode}, - getEditor: function() {return this.cm}, - - splitLines: function(str) { - if (this.lineSep) { return str.split(this.lineSep) } - return splitLinesAuto(str) - }, - lineSeparator: function() { return this.lineSep || "\n" }, - - setDirection: docMethodOp(function (dir) { - if (dir != "rtl") { dir = "ltr"; } - if (dir == this.direction) { return } - this.direction = dir; - this.iter(function (line) { return line.order = null; }); - if (this.cm) { directionChanged(this.cm); } - }) -}); - -// Public alias. -Doc.prototype.eachLine = Doc.prototype.iter; - -// Kludge to work around strange IE behavior where it'll sometimes -// re-fire a series of drag-related events right after the drop (#1551) -var lastDrop = 0; - -function onDrop(e) { - var cm = this; - clearDragCursor(cm); - if (signalDOMEvent(cm, e) || eventInWidget(cm.display, e)) - { return } - e_preventDefault(e); - if (ie) { lastDrop = +new Date; } - var pos = posFromMouse(cm, e, true), files = e.dataTransfer.files; - if (!pos || cm.isReadOnly()) { return } - // Might be a file drop, in which case we simply extract the text - // and insert it. - if (files && files.length && window.FileReader && window.File) { - var n = files.length, text = Array(n), read = 0; - var loadFile = function (file, i) { - if (cm.options.allowDropFileTypes && - indexOf(cm.options.allowDropFileTypes, file.type) == -1) - { return } - - var reader = new FileReader; - reader.onload = operation(cm, function () { - var content = reader.result; - if (/[\x00-\x08\x0e-\x1f]{2}/.test(content)) { content = ""; } - text[i] = content; - if (++read == n) { - pos = clipPos(cm.doc, pos); - var change = {from: pos, to: pos, - text: cm.doc.splitLines(text.join(cm.doc.lineSeparator())), - origin: "paste"}; - makeChange(cm.doc, change); - setSelectionReplaceHistory(cm.doc, simpleSelection(pos, changeEnd(change))); - } - }); - reader.readAsText(file); - }; - for (var i = 0; i < n; ++i) { loadFile(files[i], i); } - } else { // Normal drop - // Don't do a replace if the drop happened inside of the selected text. - if (cm.state.draggingText && cm.doc.sel.contains(pos) > -1) { - cm.state.draggingText(e); - // Ensure the editor is re-focused - setTimeout(function () { return cm.display.input.focus(); }, 20); - return - } - try { - var text$1 = e.dataTransfer.getData("Text"); - if (text$1) { - var selected; - if (cm.state.draggingText && !cm.state.draggingText.copy) - { selected = cm.listSelections(); } - setSelectionNoUndo(cm.doc, simpleSelection(pos, pos)); - if (selected) { for (var i$1 = 0; i$1 < selected.length; ++i$1) - { replaceRange(cm.doc, "", selected[i$1].anchor, selected[i$1].head, "drag"); } } - cm.replaceSelection(text$1, "around", "paste"); - cm.display.input.focus(); - } - } - catch(e){} - } -} - -function onDragStart(cm, e) { - if (ie && (!cm.state.draggingText || +new Date - lastDrop < 100)) { e_stop(e); return } - if (signalDOMEvent(cm, e) || eventInWidget(cm.display, e)) { return } - - e.dataTransfer.setData("Text", cm.getSelection()); - e.dataTransfer.effectAllowed = "copyMove"; - - // Use dummy image instead of default browsers image. - // Recent Safari (~6.0.2) have a tendency to segfault when this happens, so we don't do it there. - if (e.dataTransfer.setDragImage && !safari) { - var img = elt("img", null, null, "position: fixed; left: 0; top: 0;"); - img.src = "data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw=="; - if (presto) { - img.width = img.height = 1; - cm.display.wrapper.appendChild(img); - // Force a relayout, or Opera won't use our image for some obscure reason - img._top = img.offsetTop; - } - e.dataTransfer.setDragImage(img, 0, 0); - if (presto) { img.parentNode.removeChild(img); } - } -} - -function onDragOver(cm, e) { - var pos = posFromMouse(cm, e); - if (!pos) { return } - var frag = document.createDocumentFragment(); - drawSelectionCursor(cm, pos, frag); - if (!cm.display.dragCursor) { - cm.display.dragCursor = elt("div", null, "CodeMirror-cursors CodeMirror-dragcursors"); - cm.display.lineSpace.insertBefore(cm.display.dragCursor, cm.display.cursorDiv); - } - removeChildrenAndAdd(cm.display.dragCursor, frag); -} - -function clearDragCursor(cm) { - if (cm.display.dragCursor) { - cm.display.lineSpace.removeChild(cm.display.dragCursor); - cm.display.dragCursor = null; - } -} - -// These must be handled carefully, because naively registering a -// handler for each editor will cause the editors to never be -// garbage collected. - -function forEachCodeMirror(f) { - if (!document.getElementsByClassName) { return } - var byClass = document.getElementsByClassName("CodeMirror"); - for (var i = 0; i < byClass.length; i++) { - var cm = byClass[i].CodeMirror; - if (cm) { f(cm); } - } -} - -var globalsRegistered = false; -function ensureGlobalHandlers() { - if (globalsRegistered) { return } - registerGlobalHandlers(); - globalsRegistered = true; -} -function registerGlobalHandlers() { - // When the window resizes, we need to refresh active editors. - var resizeTimer; - on(window, "resize", function () { - if (resizeTimer == null) { resizeTimer = setTimeout(function () { - resizeTimer = null; - forEachCodeMirror(onResize); - }, 100); } - }); - // When the window loses focus, we want to show the editor as blurred - on(window, "blur", function () { return forEachCodeMirror(onBlur); }); -} -// Called when the window resizes -function onResize(cm) { - var d = cm.display; - if (d.lastWrapHeight == d.wrapper.clientHeight && d.lastWrapWidth == d.wrapper.clientWidth) - { return } - // Might be a text scaling operation, clear size caches. - d.cachedCharWidth = d.cachedTextHeight = d.cachedPaddingH = null; - d.scrollbarsClipped = false; - cm.setSize(); -} - -var keyNames = { - 3: "Enter", 8: "Backspace", 9: "Tab", 13: "Enter", 16: "Shift", 17: "Ctrl", 18: "Alt", - 19: "Pause", 20: "CapsLock", 27: "Esc", 32: "Space", 33: "PageUp", 34: "PageDown", 35: "End", - 36: "Home", 37: "Left", 38: "Up", 39: "Right", 40: "Down", 44: "PrintScrn", 45: "Insert", - 46: "Delete", 59: ";", 61: "=", 91: "Mod", 92: "Mod", 93: "Mod", - 106: "*", 107: "=", 109: "-", 110: ".", 111: "/", 127: "Delete", - 173: "-", 186: ";", 187: "=", 188: ",", 189: "-", 190: ".", 191: "/", 192: "`", 219: "[", 220: "\\", - 221: "]", 222: "'", 63232: "Up", 63233: "Down", 63234: "Left", 63235: "Right", 63272: "Delete", - 63273: "Home", 63275: "End", 63276: "PageUp", 63277: "PageDown", 63302: "Insert" -}; - -// Number keys -for (var i = 0; i < 10; i++) { keyNames[i + 48] = keyNames[i + 96] = String(i); } -// Alphabetic keys -for (var i$1 = 65; i$1 <= 90; i$1++) { keyNames[i$1] = String.fromCharCode(i$1); } -// Function keys -for (var i$2 = 1; i$2 <= 12; i$2++) { keyNames[i$2 + 111] = keyNames[i$2 + 63235] = "F" + i$2; } - -var keyMap = {}; - -keyMap.basic = { - "Left": "goCharLeft", "Right": "goCharRight", "Up": "goLineUp", "Down": "goLineDown", - "End": "goLineEnd", "Home": "goLineStartSmart", "PageUp": "goPageUp", "PageDown": "goPageDown", - "Delete": "delCharAfter", "Backspace": "delCharBefore", "Shift-Backspace": "delCharBefore", - "Tab": "defaultTab", "Shift-Tab": "indentAuto", - "Enter": "newlineAndIndent", "Insert": "toggleOverwrite", - "Esc": "singleSelection" -}; -// Note that the save and find-related commands aren't defined by -// default. User code or addons can define them. Unknown commands -// are simply ignored. -keyMap.pcDefault = { - "Ctrl-A": "selectAll", "Ctrl-D": "deleteLine", "Ctrl-Z": "undo", "Shift-Ctrl-Z": "redo", "Ctrl-Y": "redo", - "Ctrl-Home": "goDocStart", "Ctrl-End": "goDocEnd", "Ctrl-Up": "goLineUp", "Ctrl-Down": "goLineDown", - "Ctrl-Left": "goGroupLeft", "Ctrl-Right": "goGroupRight", "Alt-Left": "goLineStart", "Alt-Right": "goLineEnd", - "Ctrl-Backspace": "delGroupBefore", "Ctrl-Delete": "delGroupAfter", "Ctrl-S": "save", "Ctrl-F": "find", - "Ctrl-G": "findNext", "Shift-Ctrl-G": "findPrev", "Shift-Ctrl-F": "replace", "Shift-Ctrl-R": "replaceAll", - "Ctrl-[": "indentLess", "Ctrl-]": "indentMore", - "Ctrl-U": "undoSelection", "Shift-Ctrl-U": "redoSelection", "Alt-U": "redoSelection", - fallthrough: "basic" -}; -// Very basic readline/emacs-style bindings, which are standard on Mac. -keyMap.emacsy = { - "Ctrl-F": "goCharRight", "Ctrl-B": "goCharLeft", "Ctrl-P": "goLineUp", "Ctrl-N": "goLineDown", - "Alt-F": "goWordRight", "Alt-B": "goWordLeft", "Ctrl-A": "goLineStart", "Ctrl-E": "goLineEnd", - "Ctrl-V": "goPageDown", "Shift-Ctrl-V": "goPageUp", "Ctrl-D": "delCharAfter", "Ctrl-H": "delCharBefore", - "Alt-D": "delWordAfter", "Alt-Backspace": "delWordBefore", "Ctrl-K": "killLine", "Ctrl-T": "transposeChars", - "Ctrl-O": "openLine" -}; -keyMap.macDefault = { - "Cmd-A": "selectAll", "Cmd-D": "deleteLine", "Cmd-Z": "undo", "Shift-Cmd-Z": "redo", "Cmd-Y": "redo", - "Cmd-Home": "goDocStart", "Cmd-Up": "goDocStart", "Cmd-End": "goDocEnd", "Cmd-Down": "goDocEnd", "Alt-Left": "goGroupLeft", - "Alt-Right": "goGroupRight", "Cmd-Left": "goLineLeft", "Cmd-Right": "goLineRight", "Alt-Backspace": "delGroupBefore", - "Ctrl-Alt-Backspace": "delGroupAfter", "Alt-Delete": "delGroupAfter", "Cmd-S": "save", "Cmd-F": "find", - "Cmd-G": "findNext", "Shift-Cmd-G": "findPrev", "Cmd-Alt-F": "replace", "Shift-Cmd-Alt-F": "replaceAll", - "Cmd-[": "indentLess", "Cmd-]": "indentMore", "Cmd-Backspace": "delWrappedLineLeft", "Cmd-Delete": "delWrappedLineRight", - "Cmd-U": "undoSelection", "Shift-Cmd-U": "redoSelection", "Ctrl-Up": "goDocStart", "Ctrl-Down": "goDocEnd", - fallthrough: ["basic", "emacsy"] -}; -keyMap["default"] = mac ? keyMap.macDefault : keyMap.pcDefault; - -// KEYMAP DISPATCH - -function normalizeKeyName(name) { - var parts = name.split(/-(?!$)/); - name = parts[parts.length - 1]; - var alt, ctrl, shift, cmd; - for (var i = 0; i < parts.length - 1; i++) { - var mod = parts[i]; - if (/^(cmd|meta|m)$/i.test(mod)) { cmd = true; } - else if (/^a(lt)?$/i.test(mod)) { alt = true; } - else if (/^(c|ctrl|control)$/i.test(mod)) { ctrl = true; } - else if (/^s(hift)?$/i.test(mod)) { shift = true; } - else { throw new Error("Unrecognized modifier name: " + mod) } - } - if (alt) { name = "Alt-" + name; } - if (ctrl) { name = "Ctrl-" + name; } - if (cmd) { name = "Cmd-" + name; } - if (shift) { name = "Shift-" + name; } - return name -} - -// This is a kludge to keep keymaps mostly working as raw objects -// (backwards compatibility) while at the same time support features -// like normalization and multi-stroke key bindings. It compiles a -// new normalized keymap, and then updates the old object to reflect -// this. -function normalizeKeyMap(keymap) { - var copy = {}; - for (var keyname in keymap) { if (keymap.hasOwnProperty(keyname)) { - var value = keymap[keyname]; - if (/^(name|fallthrough|(de|at)tach)$/.test(keyname)) { continue } - if (value == "...") { delete keymap[keyname]; continue } - - var keys = map(keyname.split(" "), normalizeKeyName); - for (var i = 0; i < keys.length; i++) { - var val = (void 0), name = (void 0); - if (i == keys.length - 1) { - name = keys.join(" "); - val = value; - } else { - name = keys.slice(0, i + 1).join(" "); - val = "..."; - } - var prev = copy[name]; - if (!prev) { copy[name] = val; } - else if (prev != val) { throw new Error("Inconsistent bindings for " + name) } - } - delete keymap[keyname]; - } } - for (var prop in copy) { keymap[prop] = copy[prop]; } - return keymap -} - -function lookupKey(key, map$$1, handle, context) { - map$$1 = getKeyMap(map$$1); - var found = map$$1.call ? map$$1.call(key, context) : map$$1[key]; - if (found === false) { return "nothing" } - if (found === "...") { return "multi" } - if (found != null && handle(found)) { return "handled" } - - if (map$$1.fallthrough) { - if (Object.prototype.toString.call(map$$1.fallthrough) != "[object Array]") - { return lookupKey(key, map$$1.fallthrough, handle, context) } - for (var i = 0; i < map$$1.fallthrough.length; i++) { - var result = lookupKey(key, map$$1.fallthrough[i], handle, context); - if (result) { return result } - } - } -} - -// Modifier key presses don't count as 'real' key presses for the -// purpose of keymap fallthrough. -function isModifierKey(value) { - var name = typeof value == "string" ? value : keyNames[value.keyCode]; - return name == "Ctrl" || name == "Alt" || name == "Shift" || name == "Mod" -} - -function addModifierNames(name, event, noShift) { - var base = name; - if (event.altKey && base != "Alt") { name = "Alt-" + name; } - if ((flipCtrlCmd ? event.metaKey : event.ctrlKey) && base != "Ctrl") { name = "Ctrl-" + name; } - if ((flipCtrlCmd ? event.ctrlKey : event.metaKey) && base != "Cmd") { name = "Cmd-" + name; } - if (!noShift && event.shiftKey && base != "Shift") { name = "Shift-" + name; } - return name -} - -// Look up the name of a key as indicated by an event object. -function keyName(event, noShift) { - if (presto && event.keyCode == 34 && event["char"]) { return false } - var name = keyNames[event.keyCode]; - if (name == null || event.altGraphKey) { return false } - return addModifierNames(name, event, noShift) -} - -function getKeyMap(val) { - return typeof val == "string" ? keyMap[val] : val -} - -// Helper for deleting text near the selection(s), used to implement -// backspace, delete, and similar functionality. -function deleteNearSelection(cm, compute) { - var ranges = cm.doc.sel.ranges, kill = []; - // Build up a set of ranges to kill first, merging overlapping - // ranges. - for (var i = 0; i < ranges.length; i++) { - var toKill = compute(ranges[i]); - while (kill.length && cmp(toKill.from, lst(kill).to) <= 0) { - var replaced = kill.pop(); - if (cmp(replaced.from, toKill.from) < 0) { - toKill.from = replaced.from; - break - } - } - kill.push(toKill); - } - // Next, remove those actual ranges. - runInOp(cm, function () { - for (var i = kill.length - 1; i >= 0; i--) - { replaceRange(cm.doc, "", kill[i].from, kill[i].to, "+delete"); } - ensureCursorVisible(cm); - }); -} - -function moveCharLogically(line, ch, dir) { - var target = skipExtendingChars(line.text, ch + dir, dir); - return target < 0 || target > line.text.length ? null : target -} - -function moveLogically(line, start, dir) { - var ch = moveCharLogically(line, start.ch, dir); - return ch == null ? null : new Pos(start.line, ch, dir < 0 ? "after" : "before") -} - -function endOfLine(visually, cm, lineObj, lineNo, dir) { - if (visually) { - var order = getOrder(lineObj, cm.doc.direction); - if (order) { - var part = dir < 0 ? lst(order) : order[0]; - var moveInStorageOrder = (dir < 0) == (part.level == 1); - var sticky = moveInStorageOrder ? "after" : "before"; - var ch; - // With a wrapped rtl chunk (possibly spanning multiple bidi parts), - // it could be that the last bidi part is not on the last visual line, - // since visual lines contain content order-consecutive chunks. - // Thus, in rtl, we are looking for the first (content-order) character - // in the rtl chunk that is on the last line (that is, the same line - // as the last (content-order) character). - if (part.level > 0 || cm.doc.direction == "rtl") { - var prep = prepareMeasureForLine(cm, lineObj); - ch = dir < 0 ? lineObj.text.length - 1 : 0; - var targetTop = measureCharPrepared(cm, prep, ch).top; - ch = findFirst(function (ch) { return measureCharPrepared(cm, prep, ch).top == targetTop; }, (dir < 0) == (part.level == 1) ? part.from : part.to - 1, ch); - if (sticky == "before") { ch = moveCharLogically(lineObj, ch, 1); } - } else { ch = dir < 0 ? part.to : part.from; } - return new Pos(lineNo, ch, sticky) - } - } - return new Pos(lineNo, dir < 0 ? lineObj.text.length : 0, dir < 0 ? "before" : "after") -} - -function moveVisually(cm, line, start, dir) { - var bidi = getOrder(line, cm.doc.direction); - if (!bidi) { return moveLogically(line, start, dir) } - if (start.ch >= line.text.length) { - start.ch = line.text.length; - start.sticky = "before"; - } else if (start.ch <= 0) { - start.ch = 0; - start.sticky = "after"; - } - var partPos = getBidiPartAt(bidi, start.ch, start.sticky), part = bidi[partPos]; - if (cm.doc.direction == "ltr" && part.level % 2 == 0 && (dir > 0 ? part.to > start.ch : part.from < start.ch)) { - // Case 1: We move within an ltr part in an ltr editor. Even with wrapped lines, - // nothing interesting happens. - return moveLogically(line, start, dir) - } - - var mv = function (pos, dir) { return moveCharLogically(line, pos instanceof Pos ? pos.ch : pos, dir); }; - var prep; - var getWrappedLineExtent = function (ch) { - if (!cm.options.lineWrapping) { return {begin: 0, end: line.text.length} } - prep = prep || prepareMeasureForLine(cm, line); - return wrappedLineExtentChar(cm, line, prep, ch) - }; - var wrappedLineExtent = getWrappedLineExtent(start.sticky == "before" ? mv(start, -1) : start.ch); - - if (cm.doc.direction == "rtl" || part.level == 1) { - var moveInStorageOrder = (part.level == 1) == (dir < 0); - var ch = mv(start, moveInStorageOrder ? 1 : -1); - if (ch != null && (!moveInStorageOrder ? ch >= part.from && ch >= wrappedLineExtent.begin : ch <= part.to && ch <= wrappedLineExtent.end)) { - // Case 2: We move within an rtl part or in an rtl editor on the same visual line - var sticky = moveInStorageOrder ? "before" : "after"; - return new Pos(start.line, ch, sticky) - } - } - - // Case 3: Could not move within this bidi part in this visual line, so leave - // the current bidi part - - var searchInVisualLine = function (partPos, dir, wrappedLineExtent) { - var getRes = function (ch, moveInStorageOrder) { return moveInStorageOrder - ? new Pos(start.line, mv(ch, 1), "before") - : new Pos(start.line, ch, "after"); }; - - for (; partPos >= 0 && partPos < bidi.length; partPos += dir) { - var part = bidi[partPos]; - var moveInStorageOrder = (dir > 0) == (part.level != 1); - var ch = moveInStorageOrder ? wrappedLineExtent.begin : mv(wrappedLineExtent.end, -1); - if (part.from <= ch && ch < part.to) { return getRes(ch, moveInStorageOrder) } - ch = moveInStorageOrder ? part.from : mv(part.to, -1); - if (wrappedLineExtent.begin <= ch && ch < wrappedLineExtent.end) { return getRes(ch, moveInStorageOrder) } - } - }; - - // Case 3a: Look for other bidi parts on the same visual line - var res = searchInVisualLine(partPos + dir, dir, wrappedLineExtent); - if (res) { return res } - - // Case 3b: Look for other bidi parts on the next visual line - var nextCh = dir > 0 ? wrappedLineExtent.end : mv(wrappedLineExtent.begin, -1); - if (nextCh != null && !(dir > 0 && nextCh == line.text.length)) { - res = searchInVisualLine(dir > 0 ? 0 : bidi.length - 1, dir, getWrappedLineExtent(nextCh)); - if (res) { return res } - } - - // Case 4: Nowhere to move - return null -} - -// Commands are parameter-less actions that can be performed on an -// editor, mostly used for keybindings. -var commands = { - selectAll: selectAll, - singleSelection: function (cm) { return cm.setSelection(cm.getCursor("anchor"), cm.getCursor("head"), sel_dontScroll); }, - killLine: function (cm) { return deleteNearSelection(cm, function (range) { - if (range.empty()) { - var len = getLine(cm.doc, range.head.line).text.length; - if (range.head.ch == len && range.head.line < cm.lastLine()) - { return {from: range.head, to: Pos(range.head.line + 1, 0)} } - else - { return {from: range.head, to: Pos(range.head.line, len)} } - } else { - return {from: range.from(), to: range.to()} - } - }); }, - deleteLine: function (cm) { return deleteNearSelection(cm, function (range) { return ({ - from: Pos(range.from().line, 0), - to: clipPos(cm.doc, Pos(range.to().line + 1, 0)) - }); }); }, - delLineLeft: function (cm) { return deleteNearSelection(cm, function (range) { return ({ - from: Pos(range.from().line, 0), to: range.from() - }); }); }, - delWrappedLineLeft: function (cm) { return deleteNearSelection(cm, function (range) { - var top = cm.charCoords(range.head, "div").top + 5; - var leftPos = cm.coordsChar({left: 0, top: top}, "div"); - return {from: leftPos, to: range.from()} - }); }, - delWrappedLineRight: function (cm) { return deleteNearSelection(cm, function (range) { - var top = cm.charCoords(range.head, "div").top + 5; - var rightPos = cm.coordsChar({left: cm.display.lineDiv.offsetWidth + 100, top: top}, "div"); - return {from: range.from(), to: rightPos } - }); }, - undo: function (cm) { return cm.undo(); }, - redo: function (cm) { return cm.redo(); }, - undoSelection: function (cm) { return cm.undoSelection(); }, - redoSelection: function (cm) { return cm.redoSelection(); }, - goDocStart: function (cm) { return cm.extendSelection(Pos(cm.firstLine(), 0)); }, - goDocEnd: function (cm) { return cm.extendSelection(Pos(cm.lastLine())); }, - goLineStart: function (cm) { return cm.extendSelectionsBy(function (range) { return lineStart(cm, range.head.line); }, - {origin: "+move", bias: 1} - ); }, - goLineStartSmart: function (cm) { return cm.extendSelectionsBy(function (range) { return lineStartSmart(cm, range.head); }, - {origin: "+move", bias: 1} - ); }, - goLineEnd: function (cm) { return cm.extendSelectionsBy(function (range) { return lineEnd(cm, range.head.line); }, - {origin: "+move", bias: -1} - ); }, - goLineRight: function (cm) { return cm.extendSelectionsBy(function (range) { - var top = cm.cursorCoords(range.head, "div").top + 5; - return cm.coordsChar({left: cm.display.lineDiv.offsetWidth + 100, top: top}, "div") - }, sel_move); }, - goLineLeft: function (cm) { return cm.extendSelectionsBy(function (range) { - var top = cm.cursorCoords(range.head, "div").top + 5; - return cm.coordsChar({left: 0, top: top}, "div") - }, sel_move); }, - goLineLeftSmart: function (cm) { return cm.extendSelectionsBy(function (range) { - var top = cm.cursorCoords(range.head, "div").top + 5; - var pos = cm.coordsChar({left: 0, top: top}, "div"); - if (pos.ch < cm.getLine(pos.line).search(/\S/)) { return lineStartSmart(cm, range.head) } - return pos - }, sel_move); }, - goLineUp: function (cm) { return cm.moveV(-1, "line"); }, - goLineDown: function (cm) { return cm.moveV(1, "line"); }, - goPageUp: function (cm) { return cm.moveV(-1, "page"); }, - goPageDown: function (cm) { return cm.moveV(1, "page"); }, - goCharLeft: function (cm) { return cm.moveH(-1, "char"); }, - goCharRight: function (cm) { return cm.moveH(1, "char"); }, - goColumnLeft: function (cm) { return cm.moveH(-1, "column"); }, - goColumnRight: function (cm) { return cm.moveH(1, "column"); }, - goWordLeft: function (cm) { return cm.moveH(-1, "word"); }, - goGroupRight: function (cm) { return cm.moveH(1, "group"); }, - goGroupLeft: function (cm) { return cm.moveH(-1, "group"); }, - goWordRight: function (cm) { return cm.moveH(1, "word"); }, - delCharBefore: function (cm) { return cm.deleteH(-1, "char"); }, - delCharAfter: function (cm) { return cm.deleteH(1, "char"); }, - delWordBefore: function (cm) { return cm.deleteH(-1, "word"); }, - delWordAfter: function (cm) { return cm.deleteH(1, "word"); }, - delGroupBefore: function (cm) { return cm.deleteH(-1, "group"); }, - delGroupAfter: function (cm) { return cm.deleteH(1, "group"); }, - indentAuto: function (cm) { return cm.indentSelection("smart"); }, - indentMore: function (cm) { return cm.indentSelection("add"); }, - indentLess: function (cm) { return cm.indentSelection("subtract"); }, - insertTab: function (cm) { return cm.replaceSelection("\t"); }, - insertSoftTab: function (cm) { - var spaces = [], ranges = cm.listSelections(), tabSize = cm.options.tabSize; - for (var i = 0; i < ranges.length; i++) { - var pos = ranges[i].from(); - var col = countColumn(cm.getLine(pos.line), pos.ch, tabSize); - spaces.push(spaceStr(tabSize - col % tabSize)); - } - cm.replaceSelections(spaces); - }, - defaultTab: function (cm) { - if (cm.somethingSelected()) { cm.indentSelection("add"); } - else { cm.execCommand("insertTab"); } - }, - // Swap the two chars left and right of each selection's head. - // Move cursor behind the two swapped characters afterwards. - // - // Doesn't consider line feeds a character. - // Doesn't scan more than one line above to find a character. - // Doesn't do anything on an empty line. - // Doesn't do anything with non-empty selections. - transposeChars: function (cm) { return runInOp(cm, function () { - var ranges = cm.listSelections(), newSel = []; - for (var i = 0; i < ranges.length; i++) { - if (!ranges[i].empty()) { continue } - var cur = ranges[i].head, line = getLine(cm.doc, cur.line).text; - if (line) { - if (cur.ch == line.length) { cur = new Pos(cur.line, cur.ch - 1); } - if (cur.ch > 0) { - cur = new Pos(cur.line, cur.ch + 1); - cm.replaceRange(line.charAt(cur.ch - 1) + line.charAt(cur.ch - 2), - Pos(cur.line, cur.ch - 2), cur, "+transpose"); - } else if (cur.line > cm.doc.first) { - var prev = getLine(cm.doc, cur.line - 1).text; - if (prev) { - cur = new Pos(cur.line, 1); - cm.replaceRange(line.charAt(0) + cm.doc.lineSeparator() + - prev.charAt(prev.length - 1), - Pos(cur.line - 1, prev.length - 1), cur, "+transpose"); - } - } - } - newSel.push(new Range(cur, cur)); - } - cm.setSelections(newSel); - }); }, - newlineAndIndent: function (cm) { return runInOp(cm, function () { - var sels = cm.listSelections(); - for (var i = sels.length - 1; i >= 0; i--) - { cm.replaceRange(cm.doc.lineSeparator(), sels[i].anchor, sels[i].head, "+input"); } - sels = cm.listSelections(); - for (var i$1 = 0; i$1 < sels.length; i$1++) - { cm.indentLine(sels[i$1].from().line, null, true); } - ensureCursorVisible(cm); - }); }, - openLine: function (cm) { return cm.replaceSelection("\n", "start"); }, - toggleOverwrite: function (cm) { return cm.toggleOverwrite(); } -}; - - -function lineStart(cm, lineN) { - var line = getLine(cm.doc, lineN); - var visual = visualLine(line); - if (visual != line) { lineN = lineNo(visual); } - return endOfLine(true, cm, visual, lineN, 1) -} -function lineEnd(cm, lineN) { - var line = getLine(cm.doc, lineN); - var visual = visualLineEnd(line); - if (visual != line) { lineN = lineNo(visual); } - return endOfLine(true, cm, line, lineN, -1) -} -function lineStartSmart(cm, pos) { - var start = lineStart(cm, pos.line); - var line = getLine(cm.doc, start.line); - var order = getOrder(line, cm.doc.direction); - if (!order || order[0].level == 0) { - var firstNonWS = Math.max(0, line.text.search(/\S/)); - var inWS = pos.line == start.line && pos.ch <= firstNonWS && pos.ch; - return Pos(start.line, inWS ? 0 : firstNonWS, start.sticky) - } - return start -} - -// Run a handler that was bound to a key. -function doHandleBinding(cm, bound, dropShift) { - if (typeof bound == "string") { - bound = commands[bound]; - if (!bound) { return false } - } - // Ensure previous input has been read, so that the handler sees a - // consistent view of the document - cm.display.input.ensurePolled(); - var prevShift = cm.display.shift, done = false; - try { - if (cm.isReadOnly()) { cm.state.suppressEdits = true; } - if (dropShift) { cm.display.shift = false; } - done = bound(cm) != Pass; - } finally { - cm.display.shift = prevShift; - cm.state.suppressEdits = false; - } - return done -} - -function lookupKeyForEditor(cm, name, handle) { - for (var i = 0; i < cm.state.keyMaps.length; i++) { - var result = lookupKey(name, cm.state.keyMaps[i], handle, cm); - if (result) { return result } - } - return (cm.options.extraKeys && lookupKey(name, cm.options.extraKeys, handle, cm)) - || lookupKey(name, cm.options.keyMap, handle, cm) -} - -// Note that, despite the name, this function is also used to check -// for bound mouse clicks. - -var stopSeq = new Delayed; -function dispatchKey(cm, name, e, handle) { - var seq = cm.state.keySeq; - if (seq) { - if (isModifierKey(name)) { return "handled" } - stopSeq.set(50, function () { - if (cm.state.keySeq == seq) { - cm.state.keySeq = null; - cm.display.input.reset(); - } - }); - name = seq + " " + name; - } - var result = lookupKeyForEditor(cm, name, handle); - - if (result == "multi") - { cm.state.keySeq = name; } - if (result == "handled") - { signalLater(cm, "keyHandled", cm, name, e); } - - if (result == "handled" || result == "multi") { - e_preventDefault(e); - restartBlink(cm); - } - - if (seq && !result && /\'$/.test(name)) { - e_preventDefault(e); - return true - } - return !!result -} - -// Handle a key from the keydown event. -function handleKeyBinding(cm, e) { - var name = keyName(e, true); - if (!name) { return false } - - if (e.shiftKey && !cm.state.keySeq) { - // First try to resolve full name (including 'Shift-'). Failing - // that, see if there is a cursor-motion command (starting with - // 'go') bound to the keyname without 'Shift-'. - return dispatchKey(cm, "Shift-" + name, e, function (b) { return doHandleBinding(cm, b, true); }) - || dispatchKey(cm, name, e, function (b) { - if (typeof b == "string" ? /^go[A-Z]/.test(b) : b.motion) - { return doHandleBinding(cm, b) } - }) - } else { - return dispatchKey(cm, name, e, function (b) { return doHandleBinding(cm, b); }) - } -} - -// Handle a key from the keypress event -function handleCharBinding(cm, e, ch) { - return dispatchKey(cm, "'" + ch + "'", e, function (b) { return doHandleBinding(cm, b, true); }) -} - -var lastStoppedKey = null; -function onKeyDown(e) { - var cm = this; - cm.curOp.focus = activeElt(); - if (signalDOMEvent(cm, e)) { return } - // IE does strange things with escape. - if (ie && ie_version < 11 && e.keyCode == 27) { e.returnValue = false; } - var code = e.keyCode; - cm.display.shift = code == 16 || e.shiftKey; - var handled = handleKeyBinding(cm, e); - if (presto) { - lastStoppedKey = handled ? code : null; - // Opera has no cut event... we try to at least catch the key combo - if (!handled && code == 88 && !hasCopyEvent && (mac ? e.metaKey : e.ctrlKey)) - { cm.replaceSelection("", null, "cut"); } - } - - // Turn mouse into crosshair when Alt is held on Mac. - if (code == 18 && !/\bCodeMirror-crosshair\b/.test(cm.display.lineDiv.className)) - { showCrossHair(cm); } -} - -function showCrossHair(cm) { - var lineDiv = cm.display.lineDiv; - addClass(lineDiv, "CodeMirror-crosshair"); - - function up(e) { - if (e.keyCode == 18 || !e.altKey) { - rmClass(lineDiv, "CodeMirror-crosshair"); - off(document, "keyup", up); - off(document, "mouseover", up); - } - } - on(document, "keyup", up); - on(document, "mouseover", up); -} - -function onKeyUp(e) { - if (e.keyCode == 16) { this.doc.sel.shift = false; } - signalDOMEvent(this, e); -} - -function onKeyPress(e) { - var cm = this; - if (eventInWidget(cm.display, e) || signalDOMEvent(cm, e) || e.ctrlKey && !e.altKey || mac && e.metaKey) { return } - var keyCode = e.keyCode, charCode = e.charCode; - if (presto && keyCode == lastStoppedKey) {lastStoppedKey = null; e_preventDefault(e); return} - if ((presto && (!e.which || e.which < 10)) && handleKeyBinding(cm, e)) { return } - var ch = String.fromCharCode(charCode == null ? keyCode : charCode); - // Some browsers fire keypress events for backspace - if (ch == "\x08") { return } - if (handleCharBinding(cm, e, ch)) { return } - cm.display.input.onKeyPress(e); -} - -var DOUBLECLICK_DELAY = 400; - -var PastClick = function(time, pos, button) { - this.time = time; - this.pos = pos; - this.button = button; -}; - -PastClick.prototype.compare = function (time, pos, button) { - return this.time + DOUBLECLICK_DELAY > time && - cmp(pos, this.pos) == 0 && button == this.button -}; - -var lastClick; -var lastDoubleClick; -function clickRepeat(pos, button) { - var now = +new Date; - if (lastDoubleClick && lastDoubleClick.compare(now, pos, button)) { - lastClick = lastDoubleClick = null; - return "triple" - } else if (lastClick && lastClick.compare(now, pos, button)) { - lastDoubleClick = new PastClick(now, pos, button); - lastClick = null; - return "double" - } else { - lastClick = new PastClick(now, pos, button); - lastDoubleClick = null; - return "single" - } -} - -// A mouse down can be a single click, double click, triple click, -// start of selection drag, start of text drag, new cursor -// (ctrl-click), rectangle drag (alt-drag), or xwin -// middle-click-paste. Or it might be a click on something we should -// not interfere with, such as a scrollbar or widget. -function onMouseDown(e) { - var cm = this, display = cm.display; - if (signalDOMEvent(cm, e) || display.activeTouch && display.input.supportsTouch()) { return } - display.input.ensurePolled(); - display.shift = e.shiftKey; - - if (eventInWidget(display, e)) { - if (!webkit) { - // Briefly turn off draggability, to allow widgets to do - // normal dragging things. - display.scroller.draggable = false; - setTimeout(function () { return display.scroller.draggable = true; }, 100); - } - return - } - if (clickInGutter(cm, e)) { return } - var pos = posFromMouse(cm, e), button = e_button(e), repeat = pos ? clickRepeat(pos, button) : "single"; - window.focus(); - - // #3261: make sure, that we're not starting a second selection - if (button == 1 && cm.state.selectingText) - { cm.state.selectingText(e); } - - if (pos && handleMappedButton(cm, button, pos, repeat, e)) { return } - - if (button == 1) { - if (pos) { leftButtonDown(cm, pos, repeat, e); } - else if (e_target(e) == display.scroller) { e_preventDefault(e); } - } else if (button == 2) { - if (pos) { extendSelection(cm.doc, pos); } - setTimeout(function () { return display.input.focus(); }, 20); - } else if (button == 3) { - if (captureRightClick) { onContextMenu(cm, e); } - else { delayBlurEvent(cm); } - } -} - -function handleMappedButton(cm, button, pos, repeat, event) { - var name = "Click"; - if (repeat == "double") { name = "Double" + name; } - else if (repeat == "triple") { name = "Triple" + name; } - name = (button == 1 ? "Left" : button == 2 ? "Middle" : "Right") + name; - - return dispatchKey(cm, addModifierNames(name, event), event, function (bound) { - if (typeof bound == "string") { bound = commands[bound]; } - if (!bound) { return false } - var done = false; - try { - if (cm.isReadOnly()) { cm.state.suppressEdits = true; } - done = bound(cm, pos) != Pass; - } finally { - cm.state.suppressEdits = false; - } - return done - }) -} - -function configureMouse(cm, repeat, event) { - var option = cm.getOption("configureMouse"); - var value = option ? option(cm, repeat, event) : {}; - if (value.unit == null) { - var rect = chromeOS ? event.shiftKey && event.metaKey : event.altKey; - value.unit = rect ? "rectangle" : repeat == "single" ? "char" : repeat == "double" ? "word" : "line"; - } - if (value.extend == null || cm.doc.extend) { value.extend = cm.doc.extend || event.shiftKey; } - if (value.addNew == null) { value.addNew = mac ? event.metaKey : event.ctrlKey; } - if (value.moveOnDrag == null) { value.moveOnDrag = !(mac ? event.altKey : event.ctrlKey); } - return value -} - -function leftButtonDown(cm, pos, repeat, event) { - if (ie) { setTimeout(bind(ensureFocus, cm), 0); } - else { cm.curOp.focus = activeElt(); } - - var behavior = configureMouse(cm, repeat, event); - - var sel = cm.doc.sel, contained; - if (cm.options.dragDrop && dragAndDrop && !cm.isReadOnly() && - repeat == "single" && (contained = sel.contains(pos)) > -1 && - (cmp((contained = sel.ranges[contained]).from(), pos) < 0 || pos.xRel > 0) && - (cmp(contained.to(), pos) > 0 || pos.xRel < 0)) - { leftButtonStartDrag(cm, event, pos, behavior); } - else - { leftButtonSelect(cm, event, pos, behavior); } -} - -// Start a text drag. When it ends, see if any dragging actually -// happen, and treat as a click if it didn't. -function leftButtonStartDrag(cm, event, pos, behavior) { - var display = cm.display, moved = false; - var dragEnd = operation(cm, function (e) { - if (webkit) { display.scroller.draggable = false; } - cm.state.draggingText = false; - off(document, "mouseup", dragEnd); - off(document, "mousemove", mouseMove); - off(display.scroller, "dragstart", dragStart); - off(display.scroller, "drop", dragEnd); - if (!moved) { - e_preventDefault(e); - if (!behavior.addNew) - { extendSelection(cm.doc, pos, null, null, behavior.extend); } - // Work around unexplainable focus problem in IE9 (#2127) and Chrome (#3081) - if (webkit || ie && ie_version == 9) - { setTimeout(function () {document.body.focus(); display.input.focus();}, 20); } - else - { display.input.focus(); } - } - }); - var mouseMove = function(e2) { - moved = moved || Math.abs(event.clientX - e2.clientX) + Math.abs(event.clientY - e2.clientY) >= 10; - }; - var dragStart = function () { return moved = true; }; - // Let the drag handler handle this. - if (webkit) { display.scroller.draggable = true; } - cm.state.draggingText = dragEnd; - dragEnd.copy = !behavior.moveOnDrag; - // IE's approach to draggable - if (display.scroller.dragDrop) { display.scroller.dragDrop(); } - on(document, "mouseup", dragEnd); - on(document, "mousemove", mouseMove); - on(display.scroller, "dragstart", dragStart); - on(display.scroller, "drop", dragEnd); - - delayBlurEvent(cm); - setTimeout(function () { return display.input.focus(); }, 20); -} - -function rangeForUnit(cm, pos, unit) { - if (unit == "char") { return new Range(pos, pos) } - if (unit == "word") { return cm.findWordAt(pos) } - if (unit == "line") { return new Range(Pos(pos.line, 0), clipPos(cm.doc, Pos(pos.line + 1, 0))) } - var result = unit(cm, pos); - return new Range(result.from, result.to) -} - -// Normal selection, as opposed to text dragging. -function leftButtonSelect(cm, event, start, behavior) { - var display = cm.display, doc = cm.doc; - e_preventDefault(event); - - var ourRange, ourIndex, startSel = doc.sel, ranges = startSel.ranges; - if (behavior.addNew && !behavior.extend) { - ourIndex = doc.sel.contains(start); - if (ourIndex > -1) - { ourRange = ranges[ourIndex]; } - else - { ourRange = new Range(start, start); } - } else { - ourRange = doc.sel.primary(); - ourIndex = doc.sel.primIndex; - } - - if (behavior.unit == "rectangle") { - if (!behavior.addNew) { ourRange = new Range(start, start); } - start = posFromMouse(cm, event, true, true); - ourIndex = -1; - } else { - var range$$1 = rangeForUnit(cm, start, behavior.unit); - if (behavior.extend) - { ourRange = extendRange(ourRange, range$$1.anchor, range$$1.head, behavior.extend); } - else - { ourRange = range$$1; } - } - - if (!behavior.addNew) { - ourIndex = 0; - setSelection(doc, new Selection([ourRange], 0), sel_mouse); - startSel = doc.sel; - } else if (ourIndex == -1) { - ourIndex = ranges.length; - setSelection(doc, normalizeSelection(ranges.concat([ourRange]), ourIndex), - {scroll: false, origin: "*mouse"}); - } else if (ranges.length > 1 && ranges[ourIndex].empty() && behavior.unit == "char" && !behavior.extend) { - setSelection(doc, normalizeSelection(ranges.slice(0, ourIndex).concat(ranges.slice(ourIndex + 1)), 0), - {scroll: false, origin: "*mouse"}); - startSel = doc.sel; - } else { - replaceOneSelection(doc, ourIndex, ourRange, sel_mouse); - } - - var lastPos = start; - function extendTo(pos) { - if (cmp(lastPos, pos) == 0) { return } - lastPos = pos; - - if (behavior.unit == "rectangle") { - var ranges = [], tabSize = cm.options.tabSize; - var startCol = countColumn(getLine(doc, start.line).text, start.ch, tabSize); - var posCol = countColumn(getLine(doc, pos.line).text, pos.ch, tabSize); - var left = Math.min(startCol, posCol), right = Math.max(startCol, posCol); - for (var line = Math.min(start.line, pos.line), end = Math.min(cm.lastLine(), Math.max(start.line, pos.line)); - line <= end; line++) { - var text = getLine(doc, line).text, leftPos = findColumn(text, left, tabSize); - if (left == right) - { ranges.push(new Range(Pos(line, leftPos), Pos(line, leftPos))); } - else if (text.length > leftPos) - { ranges.push(new Range(Pos(line, leftPos), Pos(line, findColumn(text, right, tabSize)))); } - } - if (!ranges.length) { ranges.push(new Range(start, start)); } - setSelection(doc, normalizeSelection(startSel.ranges.slice(0, ourIndex).concat(ranges), ourIndex), - {origin: "*mouse", scroll: false}); - cm.scrollIntoView(pos); - } else { - var oldRange = ourRange; - var range$$1 = rangeForUnit(cm, pos, behavior.unit); - var anchor = oldRange.anchor, head; - if (cmp(range$$1.anchor, anchor) > 0) { - head = range$$1.head; - anchor = minPos(oldRange.from(), range$$1.anchor); - } else { - head = range$$1.anchor; - anchor = maxPos(oldRange.to(), range$$1.head); - } - var ranges$1 = startSel.ranges.slice(0); - ranges$1[ourIndex] = bidiSimplify(cm, new Range(clipPos(doc, anchor), head)); - setSelection(doc, normalizeSelection(ranges$1, ourIndex), sel_mouse); - } - } - - var editorSize = display.wrapper.getBoundingClientRect(); - // Used to ensure timeout re-tries don't fire when another extend - // happened in the meantime (clearTimeout isn't reliable -- at - // least on Chrome, the timeouts still happen even when cleared, - // if the clear happens after their scheduled firing time). - var counter = 0; - - function extend(e) { - var curCount = ++counter; - var cur = posFromMouse(cm, e, true, behavior.unit == "rectangle"); - if (!cur) { return } - if (cmp(cur, lastPos) != 0) { - cm.curOp.focus = activeElt(); - extendTo(cur); - var visible = visibleLines(display, doc); - if (cur.line >= visible.to || cur.line < visible.from) - { setTimeout(operation(cm, function () {if (counter == curCount) { extend(e); }}), 150); } - } else { - var outside = e.clientY < editorSize.top ? -20 : e.clientY > editorSize.bottom ? 20 : 0; - if (outside) { setTimeout(operation(cm, function () { - if (counter != curCount) { return } - display.scroller.scrollTop += outside; - extend(e); - }), 50); } - } - } - - function done(e) { - cm.state.selectingText = false; - counter = Infinity; - e_preventDefault(e); - display.input.focus(); - off(document, "mousemove", move); - off(document, "mouseup", up); - doc.history.lastSelOrigin = null; - } - - var move = operation(cm, function (e) { - if (!e_button(e)) { done(e); } - else { extend(e); } - }); - var up = operation(cm, done); - cm.state.selectingText = up; - on(document, "mousemove", move); - on(document, "mouseup", up); -} - -// Used when mouse-selecting to adjust the anchor to the proper side -// of a bidi jump depending on the visual position of the head. -function bidiSimplify(cm, range$$1) { - var anchor = range$$1.anchor; - var head = range$$1.head; - var anchorLine = getLine(cm.doc, anchor.line); - if (cmp(anchor, head) == 0 && anchor.sticky == head.sticky) { return range$$1 } - var order = getOrder(anchorLine); - if (!order) { return range$$1 } - var index = getBidiPartAt(order, anchor.ch, anchor.sticky), part = order[index]; - if (part.from != anchor.ch && part.to != anchor.ch) { return range$$1 } - var boundary = index + ((part.from == anchor.ch) == (part.level != 1) ? 0 : 1); - if (boundary == 0 || boundary == order.length) { return range$$1 } - - // Compute the relative visual position of the head compared to the - // anchor (<0 is to the left, >0 to the right) - var leftSide; - if (head.line != anchor.line) { - leftSide = (head.line - anchor.line) * (cm.doc.direction == "ltr" ? 1 : -1) > 0; - } else { - var headIndex = getBidiPartAt(order, head.ch, head.sticky); - var dir = headIndex - index || (head.ch - anchor.ch) * (part.level == 1 ? -1 : 1); - if (headIndex == boundary - 1 || headIndex == boundary) - { leftSide = dir < 0; } - else - { leftSide = dir > 0; } - } - - var usePart = order[boundary + (leftSide ? -1 : 0)]; - var from = leftSide == (usePart.level == 1); - var ch = from ? usePart.from : usePart.to, sticky = from ? "after" : "before"; - return anchor.ch == ch && anchor.sticky == sticky ? range$$1 : new Range(new Pos(anchor.line, ch, sticky), head) -} - - -// Determines whether an event happened in the gutter, and fires the -// handlers for the corresponding event. -function gutterEvent(cm, e, type, prevent) { - var mX, mY; - if (e.touches) { - mX = e.touches[0].clientX; - mY = e.touches[0].clientY; - } else { - try { mX = e.clientX; mY = e.clientY; } - catch(e) { return false } - } - if (mX >= Math.floor(cm.display.gutters.getBoundingClientRect().right)) { return false } - if (prevent) { e_preventDefault(e); } - - var display = cm.display; - var lineBox = display.lineDiv.getBoundingClientRect(); - - if (mY > lineBox.bottom || !hasHandler(cm, type)) { return e_defaultPrevented(e) } - mY -= lineBox.top - display.viewOffset; - - for (var i = 0; i < cm.options.gutters.length; ++i) { - var g = display.gutters.childNodes[i]; - if (g && g.getBoundingClientRect().right >= mX) { - var line = lineAtHeight(cm.doc, mY); - var gutter = cm.options.gutters[i]; - signal(cm, type, cm, line, gutter, e); - return e_defaultPrevented(e) - } - } -} - -function clickInGutter(cm, e) { - return gutterEvent(cm, e, "gutterClick", true) -} - -// CONTEXT MENU HANDLING - -// To make the context menu work, we need to briefly unhide the -// textarea (making it as unobtrusive as possible) to let the -// right-click take effect on it. -function onContextMenu(cm, e) { - if (eventInWidget(cm.display, e) || contextMenuInGutter(cm, e)) { return } - if (signalDOMEvent(cm, e, "contextmenu")) { return } - cm.display.input.onContextMenu(e); -} - -function contextMenuInGutter(cm, e) { - if (!hasHandler(cm, "gutterContextMenu")) { return false } - return gutterEvent(cm, e, "gutterContextMenu", false) -} - -function themeChanged(cm) { - cm.display.wrapper.className = cm.display.wrapper.className.replace(/\s*cm-s-\S+/g, "") + - cm.options.theme.replace(/(^|\s)\s*/g, " cm-s-"); - clearCaches(cm); -} - -var Init = {toString: function(){return "CodeMirror.Init"}}; - -var defaults = {}; -var optionHandlers = {}; - -function defineOptions(CodeMirror) { - var optionHandlers = CodeMirror.optionHandlers; - - function option(name, deflt, handle, notOnInit) { - CodeMirror.defaults[name] = deflt; - if (handle) { optionHandlers[name] = - notOnInit ? function (cm, val, old) {if (old != Init) { handle(cm, val, old); }} : handle; } - } - - CodeMirror.defineOption = option; - - // Passed to option handlers when there is no old value. - CodeMirror.Init = Init; - - // These two are, on init, called from the constructor because they - // have to be initialized before the editor can start at all. - option("value", "", function (cm, val) { return cm.setValue(val); }, true); - option("mode", null, function (cm, val) { - cm.doc.modeOption = val; - loadMode(cm); - }, true); - - option("indentUnit", 2, loadMode, true); - option("indentWithTabs", false); - option("smartIndent", true); - option("tabSize", 4, function (cm) { - resetModeState(cm); - clearCaches(cm); - regChange(cm); - }, true); - option("lineSeparator", null, function (cm, val) { - cm.doc.lineSep = val; - if (!val) { return } - var newBreaks = [], lineNo = cm.doc.first; - cm.doc.iter(function (line) { - for (var pos = 0;;) { - var found = line.text.indexOf(val, pos); - if (found == -1) { break } - pos = found + val.length; - newBreaks.push(Pos(lineNo, found)); - } - lineNo++; - }); - for (var i = newBreaks.length - 1; i >= 0; i--) - { replaceRange(cm.doc, val, newBreaks[i], Pos(newBreaks[i].line, newBreaks[i].ch + val.length)); } - }); - option("specialChars", /[\u0000-\u001f\u007f-\u009f\u00ad\u061c\u200b-\u200f\u2028\u2029\ufeff]/g, function (cm, val, old) { - cm.state.specialChars = new RegExp(val.source + (val.test("\t") ? "" : "|\t"), "g"); - if (old != Init) { cm.refresh(); } - }); - option("specialCharPlaceholder", defaultSpecialCharPlaceholder, function (cm) { return cm.refresh(); }, true); - option("electricChars", true); - option("inputStyle", mobile ? "contenteditable" : "textarea", function () { - throw new Error("inputStyle can not (yet) be changed in a running editor") // FIXME - }, true); - option("spellcheck", false, function (cm, val) { return cm.getInputField().spellcheck = val; }, true); - option("rtlMoveVisually", !windows); - option("wholeLineUpdateBefore", true); - - option("theme", "default", function (cm) { - themeChanged(cm); - guttersChanged(cm); - }, true); - option("keyMap", "default", function (cm, val, old) { - var next = getKeyMap(val); - var prev = old != Init && getKeyMap(old); - if (prev && prev.detach) { prev.detach(cm, next); } - if (next.attach) { next.attach(cm, prev || null); } - }); - option("extraKeys", null); - option("configureMouse", null); - - option("lineWrapping", false, wrappingChanged, true); - option("gutters", [], function (cm) { - setGuttersForLineNumbers(cm.options); - guttersChanged(cm); - }, true); - option("fixedGutter", true, function (cm, val) { - cm.display.gutters.style.left = val ? compensateForHScroll(cm.display) + "px" : "0"; - cm.refresh(); - }, true); - option("coverGutterNextToScrollbar", false, function (cm) { return updateScrollbars(cm); }, true); - option("scrollbarStyle", "native", function (cm) { - initScrollbars(cm); - updateScrollbars(cm); - cm.display.scrollbars.setScrollTop(cm.doc.scrollTop); - cm.display.scrollbars.setScrollLeft(cm.doc.scrollLeft); - }, true); - option("lineNumbers", false, function (cm) { - setGuttersForLineNumbers(cm.options); - guttersChanged(cm); - }, true); - option("firstLineNumber", 1, guttersChanged, true); - option("lineNumberFormatter", function (integer) { return integer; }, guttersChanged, true); - option("showCursorWhenSelecting", false, updateSelection, true); - - option("resetSelectionOnContextMenu", true); - option("lineWiseCopyCut", true); - option("pasteLinesPerSelection", true); - - option("readOnly", false, function (cm, val) { - if (val == "nocursor") { - onBlur(cm); - cm.display.input.blur(); - } - cm.display.input.readOnlyChanged(val); - }); - option("disableInput", false, function (cm, val) {if (!val) { cm.display.input.reset(); }}, true); - option("dragDrop", true, dragDropChanged); - option("allowDropFileTypes", null); - - option("cursorBlinkRate", 530); - option("cursorScrollMargin", 0); - option("cursorHeight", 1, updateSelection, true); - option("singleCursorHeightPerLine", true, updateSelection, true); - option("workTime", 100); - option("workDelay", 100); - option("flattenSpans", true, resetModeState, true); - option("addModeClass", false, resetModeState, true); - option("pollInterval", 100); - option("undoDepth", 200, function (cm, val) { return cm.doc.history.undoDepth = val; }); - option("historyEventDelay", 1250); - option("viewportMargin", 10, function (cm) { return cm.refresh(); }, true); - option("maxHighlightLength", 10000, resetModeState, true); - option("moveInputWithCursor", true, function (cm, val) { - if (!val) { cm.display.input.resetPosition(); } - }); - - option("tabindex", null, function (cm, val) { return cm.display.input.getField().tabIndex = val || ""; }); - option("autofocus", null); - option("direction", "ltr", function (cm, val) { return cm.doc.setDirection(val); }, true); -} - -function guttersChanged(cm) { - updateGutters(cm); - regChange(cm); - alignHorizontally(cm); -} - -function dragDropChanged(cm, value, old) { - var wasOn = old && old != Init; - if (!value != !wasOn) { - var funcs = cm.display.dragFunctions; - var toggle = value ? on : off; - toggle(cm.display.scroller, "dragstart", funcs.start); - toggle(cm.display.scroller, "dragenter", funcs.enter); - toggle(cm.display.scroller, "dragover", funcs.over); - toggle(cm.display.scroller, "dragleave", funcs.leave); - toggle(cm.display.scroller, "drop", funcs.drop); - } -} - -function wrappingChanged(cm) { - if (cm.options.lineWrapping) { - addClass(cm.display.wrapper, "CodeMirror-wrap"); - cm.display.sizer.style.minWidth = ""; - cm.display.sizerWidth = null; - } else { - rmClass(cm.display.wrapper, "CodeMirror-wrap"); - findMaxLine(cm); - } - estimateLineHeights(cm); - regChange(cm); - clearCaches(cm); - setTimeout(function () { return updateScrollbars(cm); }, 100); -} - -// A CodeMirror instance represents an editor. This is the object -// that user code is usually dealing with. - -function CodeMirror$1(place, options) { - var this$1 = this; - - if (!(this instanceof CodeMirror$1)) { return new CodeMirror$1(place, options) } - - this.options = options = options ? copyObj(options) : {}; - // Determine effective options based on given values and defaults. - copyObj(defaults, options, false); - setGuttersForLineNumbers(options); - - var doc = options.value; - if (typeof doc == "string") { doc = new Doc(doc, options.mode, null, options.lineSeparator, options.direction); } - this.doc = doc; - - var input = new CodeMirror$1.inputStyles[options.inputStyle](this); - var display = this.display = new Display(place, doc, input); - display.wrapper.CodeMirror = this; - updateGutters(this); - themeChanged(this); - if (options.lineWrapping) - { this.display.wrapper.className += " CodeMirror-wrap"; } - initScrollbars(this); - - this.state = { - keyMaps: [], // stores maps added by addKeyMap - overlays: [], // highlighting overlays, as added by addOverlay - modeGen: 0, // bumped when mode/overlay changes, used to invalidate highlighting info - overwrite: false, - delayingBlurEvent: false, - focused: false, - suppressEdits: false, // used to disable editing during key handlers when in readOnly mode - pasteIncoming: false, cutIncoming: false, // help recognize paste/cut edits in input.poll - selectingText: false, - draggingText: false, - highlight: new Delayed(), // stores highlight worker timeout - keySeq: null, // Unfinished key sequence - specialChars: null - }; - - if (options.autofocus && !mobile) { display.input.focus(); } - - // Override magic textarea content restore that IE sometimes does - // on our hidden textarea on reload - if (ie && ie_version < 11) { setTimeout(function () { return this$1.display.input.reset(true); }, 20); } - - registerEventHandlers(this); - ensureGlobalHandlers(); - - startOperation(this); - this.curOp.forceUpdate = true; - attachDoc(this, doc); - - if ((options.autofocus && !mobile) || this.hasFocus()) - { setTimeout(bind(onFocus, this), 20); } - else - { onBlur(this); } - - for (var opt in optionHandlers) { if (optionHandlers.hasOwnProperty(opt)) - { optionHandlers[opt](this$1, options[opt], Init); } } - maybeUpdateLineNumberWidth(this); - if (options.finishInit) { options.finishInit(this); } - for (var i = 0; i < initHooks.length; ++i) { initHooks[i](this$1); } - endOperation(this); - // Suppress optimizelegibility in Webkit, since it breaks text - // measuring on line wrapping boundaries. - if (webkit && options.lineWrapping && - getComputedStyle(display.lineDiv).textRendering == "optimizelegibility") - { display.lineDiv.style.textRendering = "auto"; } -} - -// The default configuration options. -CodeMirror$1.defaults = defaults; -// Functions to run when options are changed. -CodeMirror$1.optionHandlers = optionHandlers; - -// Attach the necessary event handlers when initializing the editor -function registerEventHandlers(cm) { - var d = cm.display; - on(d.scroller, "mousedown", operation(cm, onMouseDown)); - // Older IE's will not fire a second mousedown for a double click - if (ie && ie_version < 11) - { on(d.scroller, "dblclick", operation(cm, function (e) { - if (signalDOMEvent(cm, e)) { return } - var pos = posFromMouse(cm, e); - if (!pos || clickInGutter(cm, e) || eventInWidget(cm.display, e)) { return } - e_preventDefault(e); - var word = cm.findWordAt(pos); - extendSelection(cm.doc, word.anchor, word.head); - })); } - else - { on(d.scroller, "dblclick", function (e) { return signalDOMEvent(cm, e) || e_preventDefault(e); }); } - // Some browsers fire contextmenu *after* opening the menu, at - // which point we can't mess with it anymore. Context menu is - // handled in onMouseDown for these browsers. - if (!captureRightClick) { on(d.scroller, "contextmenu", function (e) { return onContextMenu(cm, e); }); } - - // Used to suppress mouse event handling when a touch happens - var touchFinished, prevTouch = {end: 0}; - function finishTouch() { - if (d.activeTouch) { - touchFinished = setTimeout(function () { return d.activeTouch = null; }, 1000); - prevTouch = d.activeTouch; - prevTouch.end = +new Date; - } - } - function isMouseLikeTouchEvent(e) { - if (e.touches.length != 1) { return false } - var touch = e.touches[0]; - return touch.radiusX <= 1 && touch.radiusY <= 1 - } - function farAway(touch, other) { - if (other.left == null) { return true } - var dx = other.left - touch.left, dy = other.top - touch.top; - return dx * dx + dy * dy > 20 * 20 - } - on(d.scroller, "touchstart", function (e) { - if (!signalDOMEvent(cm, e) && !isMouseLikeTouchEvent(e) && !clickInGutter(cm, e)) { - d.input.ensurePolled(); - clearTimeout(touchFinished); - var now = +new Date; - d.activeTouch = {start: now, moved: false, - prev: now - prevTouch.end <= 300 ? prevTouch : null}; - if (e.touches.length == 1) { - d.activeTouch.left = e.touches[0].pageX; - d.activeTouch.top = e.touches[0].pageY; - } - } - }); - on(d.scroller, "touchmove", function () { - if (d.activeTouch) { d.activeTouch.moved = true; } - }); - on(d.scroller, "touchend", function (e) { - var touch = d.activeTouch; - if (touch && !eventInWidget(d, e) && touch.left != null && - !touch.moved && new Date - touch.start < 300) { - var pos = cm.coordsChar(d.activeTouch, "page"), range; - if (!touch.prev || farAway(touch, touch.prev)) // Single tap - { range = new Range(pos, pos); } - else if (!touch.prev.prev || farAway(touch, touch.prev.prev)) // Double tap - { range = cm.findWordAt(pos); } - else // Triple tap - { range = new Range(Pos(pos.line, 0), clipPos(cm.doc, Pos(pos.line + 1, 0))); } - cm.setSelection(range.anchor, range.head); - cm.focus(); - e_preventDefault(e); - } - finishTouch(); - }); - on(d.scroller, "touchcancel", finishTouch); - - // Sync scrolling between fake scrollbars and real scrollable - // area, ensure viewport is updated when scrolling. - on(d.scroller, "scroll", function () { - if (d.scroller.clientHeight) { - updateScrollTop(cm, d.scroller.scrollTop); - setScrollLeft(cm, d.scroller.scrollLeft, true); - signal(cm, "scroll", cm); - } - }); - - // Listen to wheel events in order to try and update the viewport on time. - on(d.scroller, "mousewheel", function (e) { return onScrollWheel(cm, e); }); - on(d.scroller, "DOMMouseScroll", function (e) { return onScrollWheel(cm, e); }); - - // Prevent wrapper from ever scrolling - on(d.wrapper, "scroll", function () { return d.wrapper.scrollTop = d.wrapper.scrollLeft = 0; }); - - d.dragFunctions = { - enter: function (e) {if (!signalDOMEvent(cm, e)) { e_stop(e); }}, - over: function (e) {if (!signalDOMEvent(cm, e)) { onDragOver(cm, e); e_stop(e); }}, - start: function (e) { return onDragStart(cm, e); }, - drop: operation(cm, onDrop), - leave: function (e) {if (!signalDOMEvent(cm, e)) { clearDragCursor(cm); }} - }; - - var inp = d.input.getField(); - on(inp, "keyup", function (e) { return onKeyUp.call(cm, e); }); - on(inp, "keydown", operation(cm, onKeyDown)); - on(inp, "keypress", operation(cm, onKeyPress)); - on(inp, "focus", function (e) { return onFocus(cm, e); }); - on(inp, "blur", function (e) { return onBlur(cm, e); }); -} - -var initHooks = []; -CodeMirror$1.defineInitHook = function (f) { return initHooks.push(f); }; - -// Indent the given line. The how parameter can be "smart", -// "add"/null, "subtract", or "prev". When aggressive is false -// (typically set to true for forced single-line indents), empty -// lines are not indented, and places where the mode returns Pass -// are left alone. -function indentLine(cm, n, how, aggressive) { - var doc = cm.doc, state; - if (how == null) { how = "add"; } - if (how == "smart") { - // Fall back to "prev" when the mode doesn't have an indentation - // method. - if (!doc.mode.indent) { how = "prev"; } - else { state = getContextBefore(cm, n).state; } - } - - var tabSize = cm.options.tabSize; - var line = getLine(doc, n), curSpace = countColumn(line.text, null, tabSize); - if (line.stateAfter) { line.stateAfter = null; } - var curSpaceString = line.text.match(/^\s*/)[0], indentation; - if (!aggressive && !/\S/.test(line.text)) { - indentation = 0; - how = "not"; - } else if (how == "smart") { - indentation = doc.mode.indent(state, line.text.slice(curSpaceString.length), line.text); - if (indentation == Pass || indentation > 150) { - if (!aggressive) { return } - how = "prev"; - } - } - if (how == "prev") { - if (n > doc.first) { indentation = countColumn(getLine(doc, n-1).text, null, tabSize); } - else { indentation = 0; } - } else if (how == "add") { - indentation = curSpace + cm.options.indentUnit; - } else if (how == "subtract") { - indentation = curSpace - cm.options.indentUnit; - } else if (typeof how == "number") { - indentation = curSpace + how; - } - indentation = Math.max(0, indentation); - - var indentString = "", pos = 0; - if (cm.options.indentWithTabs) - { for (var i = Math.floor(indentation / tabSize); i; --i) {pos += tabSize; indentString += "\t";} } - if (pos < indentation) { indentString += spaceStr(indentation - pos); } - - if (indentString != curSpaceString) { - replaceRange(doc, indentString, Pos(n, 0), Pos(n, curSpaceString.length), "+input"); - line.stateAfter = null; - return true - } else { - // Ensure that, if the cursor was in the whitespace at the start - // of the line, it is moved to the end of that space. - for (var i$1 = 0; i$1 < doc.sel.ranges.length; i$1++) { - var range = doc.sel.ranges[i$1]; - if (range.head.line == n && range.head.ch < curSpaceString.length) { - var pos$1 = Pos(n, curSpaceString.length); - replaceOneSelection(doc, i$1, new Range(pos$1, pos$1)); - break - } - } - } -} - -// This will be set to a {lineWise: bool, text: [string]} object, so -// that, when pasting, we know what kind of selections the copied -// text was made out of. -var lastCopied = null; - -function setLastCopied(newLastCopied) { - lastCopied = newLastCopied; -} - -function applyTextInput(cm, inserted, deleted, sel, origin) { - var doc = cm.doc; - cm.display.shift = false; - if (!sel) { sel = doc.sel; } - - var paste = cm.state.pasteIncoming || origin == "paste"; - var textLines = splitLinesAuto(inserted), multiPaste = null; - // When pasing N lines into N selections, insert one line per selection - if (paste && sel.ranges.length > 1) { - if (lastCopied && lastCopied.text.join("\n") == inserted) { - if (sel.ranges.length % lastCopied.text.length == 0) { - multiPaste = []; - for (var i = 0; i < lastCopied.text.length; i++) - { multiPaste.push(doc.splitLines(lastCopied.text[i])); } - } - } else if (textLines.length == sel.ranges.length && cm.options.pasteLinesPerSelection) { - multiPaste = map(textLines, function (l) { return [l]; }); - } - } - - var updateInput; - // Normal behavior is to insert the new text into every selection - for (var i$1 = sel.ranges.length - 1; i$1 >= 0; i$1--) { - var range$$1 = sel.ranges[i$1]; - var from = range$$1.from(), to = range$$1.to(); - if (range$$1.empty()) { - if (deleted && deleted > 0) // Handle deletion - { from = Pos(from.line, from.ch - deleted); } - else if (cm.state.overwrite && !paste) // Handle overwrite - { to = Pos(to.line, Math.min(getLine(doc, to.line).text.length, to.ch + lst(textLines).length)); } - else if (lastCopied && lastCopied.lineWise && lastCopied.text.join("\n") == inserted) - { from = to = Pos(from.line, 0); } - } - updateInput = cm.curOp.updateInput; - var changeEvent = {from: from, to: to, text: multiPaste ? multiPaste[i$1 % multiPaste.length] : textLines, - origin: origin || (paste ? "paste" : cm.state.cutIncoming ? "cut" : "+input")}; - makeChange(cm.doc, changeEvent); - signalLater(cm, "inputRead", cm, changeEvent); - } - if (inserted && !paste) - { triggerElectric(cm, inserted); } - - ensureCursorVisible(cm); - cm.curOp.updateInput = updateInput; - cm.curOp.typing = true; - cm.state.pasteIncoming = cm.state.cutIncoming = false; -} - -function handlePaste(e, cm) { - var pasted = e.clipboardData && e.clipboardData.getData("Text"); - if (pasted) { - e.preventDefault(); - if (!cm.isReadOnly() && !cm.options.disableInput) - { runInOp(cm, function () { return applyTextInput(cm, pasted, 0, null, "paste"); }); } - return true - } -} - -function triggerElectric(cm, inserted) { - // When an 'electric' character is inserted, immediately trigger a reindent - if (!cm.options.electricChars || !cm.options.smartIndent) { return } - var sel = cm.doc.sel; - - for (var i = sel.ranges.length - 1; i >= 0; i--) { - var range$$1 = sel.ranges[i]; - if (range$$1.head.ch > 100 || (i && sel.ranges[i - 1].head.line == range$$1.head.line)) { continue } - var mode = cm.getModeAt(range$$1.head); - var indented = false; - if (mode.electricChars) { - for (var j = 0; j < mode.electricChars.length; j++) - { if (inserted.indexOf(mode.electricChars.charAt(j)) > -1) { - indented = indentLine(cm, range$$1.head.line, "smart"); - break - } } - } else if (mode.electricInput) { - if (mode.electricInput.test(getLine(cm.doc, range$$1.head.line).text.slice(0, range$$1.head.ch))) - { indented = indentLine(cm, range$$1.head.line, "smart"); } - } - if (indented) { signalLater(cm, "electricInput", cm, range$$1.head.line); } - } -} - -function copyableRanges(cm) { - var text = [], ranges = []; - for (var i = 0; i < cm.doc.sel.ranges.length; i++) { - var line = cm.doc.sel.ranges[i].head.line; - var lineRange = {anchor: Pos(line, 0), head: Pos(line + 1, 0)}; - ranges.push(lineRange); - text.push(cm.getRange(lineRange.anchor, lineRange.head)); - } - return {text: text, ranges: ranges} -} - -function disableBrowserMagic(field, spellcheck) { - field.setAttribute("autocorrect", "off"); - field.setAttribute("autocapitalize", "off"); - field.setAttribute("spellcheck", !!spellcheck); -} - -function hiddenTextarea() { - var te = elt("textarea", null, null, "position: absolute; bottom: -1em; padding: 0; width: 1px; height: 1em; outline: none"); - var div = elt("div", [te], null, "overflow: hidden; position: relative; width: 3px; height: 0px;"); - // The textarea is kept positioned near the cursor to prevent the - // fact that it'll be scrolled into view on input from scrolling - // our fake cursor out of view. On webkit, when wrap=off, paste is - // very slow. So make the area wide instead. - if (webkit) { te.style.width = "1000px"; } - else { te.setAttribute("wrap", "off"); } - // If border: 0; -- iOS fails to open keyboard (issue #1287) - if (ios) { te.style.border = "1px solid black"; } - disableBrowserMagic(te); - return div -} - -// The publicly visible API. Note that methodOp(f) means -// 'wrap f in an operation, performed on its `this` parameter'. - -// This is not the complete set of editor methods. Most of the -// methods defined on the Doc type are also injected into -// CodeMirror.prototype, for backwards compatibility and -// convenience. - -var addEditorMethods = function(CodeMirror) { - var optionHandlers = CodeMirror.optionHandlers; - - var helpers = CodeMirror.helpers = {}; - - CodeMirror.prototype = { - constructor: CodeMirror, - focus: function(){window.focus(); this.display.input.focus();}, - - setOption: function(option, value) { - var options = this.options, old = options[option]; - if (options[option] == value && option != "mode") { return } - options[option] = value; - if (optionHandlers.hasOwnProperty(option)) - { operation(this, optionHandlers[option])(this, value, old); } - signal(this, "optionChange", this, option); - }, - - getOption: function(option) {return this.options[option]}, - getDoc: function() {return this.doc}, - - addKeyMap: function(map$$1, bottom) { - this.state.keyMaps[bottom ? "push" : "unshift"](getKeyMap(map$$1)); - }, - removeKeyMap: function(map$$1) { - var maps = this.state.keyMaps; - for (var i = 0; i < maps.length; ++i) - { if (maps[i] == map$$1 || maps[i].name == map$$1) { - maps.splice(i, 1); - return true - } } - }, - - addOverlay: methodOp(function(spec, options) { - var mode = spec.token ? spec : CodeMirror.getMode(this.options, spec); - if (mode.startState) { throw new Error("Overlays may not be stateful.") } - insertSorted(this.state.overlays, - {mode: mode, modeSpec: spec, opaque: options && options.opaque, - priority: (options && options.priority) || 0}, - function (overlay) { return overlay.priority; }); - this.state.modeGen++; - regChange(this); - }), - removeOverlay: methodOp(function(spec) { - var this$1 = this; - - var overlays = this.state.overlays; - for (var i = 0; i < overlays.length; ++i) { - var cur = overlays[i].modeSpec; - if (cur == spec || typeof spec == "string" && cur.name == spec) { - overlays.splice(i, 1); - this$1.state.modeGen++; - regChange(this$1); - return - } - } - }), - - indentLine: methodOp(function(n, dir, aggressive) { - if (typeof dir != "string" && typeof dir != "number") { - if (dir == null) { dir = this.options.smartIndent ? "smart" : "prev"; } - else { dir = dir ? "add" : "subtract"; } - } - if (isLine(this.doc, n)) { indentLine(this, n, dir, aggressive); } - }), - indentSelection: methodOp(function(how) { - var this$1 = this; - - var ranges = this.doc.sel.ranges, end = -1; - for (var i = 0; i < ranges.length; i++) { - var range$$1 = ranges[i]; - if (!range$$1.empty()) { - var from = range$$1.from(), to = range$$1.to(); - var start = Math.max(end, from.line); - end = Math.min(this$1.lastLine(), to.line - (to.ch ? 0 : 1)) + 1; - for (var j = start; j < end; ++j) - { indentLine(this$1, j, how); } - var newRanges = this$1.doc.sel.ranges; - if (from.ch == 0 && ranges.length == newRanges.length && newRanges[i].from().ch > 0) - { replaceOneSelection(this$1.doc, i, new Range(from, newRanges[i].to()), sel_dontScroll); } - } else if (range$$1.head.line > end) { - indentLine(this$1, range$$1.head.line, how, true); - end = range$$1.head.line; - if (i == this$1.doc.sel.primIndex) { ensureCursorVisible(this$1); } - } - } - }), - - // Fetch the parser token for a given character. Useful for hacks - // that want to inspect the mode state (say, for completion). - getTokenAt: function(pos, precise) { - return takeToken(this, pos, precise) - }, - - getLineTokens: function(line, precise) { - return takeToken(this, Pos(line), precise, true) - }, - - getTokenTypeAt: function(pos) { - pos = clipPos(this.doc, pos); - var styles = getLineStyles(this, getLine(this.doc, pos.line)); - var before = 0, after = (styles.length - 1) / 2, ch = pos.ch; - var type; - if (ch == 0) { type = styles[2]; } - else { for (;;) { - var mid = (before + after) >> 1; - if ((mid ? styles[mid * 2 - 1] : 0) >= ch) { after = mid; } - else if (styles[mid * 2 + 1] < ch) { before = mid + 1; } - else { type = styles[mid * 2 + 2]; break } - } } - var cut = type ? type.indexOf("overlay ") : -1; - return cut < 0 ? type : cut == 0 ? null : type.slice(0, cut - 1) - }, - - getModeAt: function(pos) { - var mode = this.doc.mode; - if (!mode.innerMode) { return mode } - return CodeMirror.innerMode(mode, this.getTokenAt(pos).state).mode - }, - - getHelper: function(pos, type) { - return this.getHelpers(pos, type)[0] - }, - - getHelpers: function(pos, type) { - var this$1 = this; - - var found = []; - if (!helpers.hasOwnProperty(type)) { return found } - var help = helpers[type], mode = this.getModeAt(pos); - if (typeof mode[type] == "string") { - if (help[mode[type]]) { found.push(help[mode[type]]); } - } else if (mode[type]) { - for (var i = 0; i < mode[type].length; i++) { - var val = help[mode[type][i]]; - if (val) { found.push(val); } - } - } else if (mode.helperType && help[mode.helperType]) { - found.push(help[mode.helperType]); - } else if (help[mode.name]) { - found.push(help[mode.name]); - } - for (var i$1 = 0; i$1 < help._global.length; i$1++) { - var cur = help._global[i$1]; - if (cur.pred(mode, this$1) && indexOf(found, cur.val) == -1) - { found.push(cur.val); } - } - return found - }, - - getStateAfter: function(line, precise) { - var doc = this.doc; - line = clipLine(doc, line == null ? doc.first + doc.size - 1: line); - return getContextBefore(this, line + 1, precise).state - }, - - cursorCoords: function(start, mode) { - var pos, range$$1 = this.doc.sel.primary(); - if (start == null) { pos = range$$1.head; } - else if (typeof start == "object") { pos = clipPos(this.doc, start); } - else { pos = start ? range$$1.from() : range$$1.to(); } - return cursorCoords(this, pos, mode || "page") - }, - - charCoords: function(pos, mode) { - return charCoords(this, clipPos(this.doc, pos), mode || "page") - }, - - coordsChar: function(coords, mode) { - coords = fromCoordSystem(this, coords, mode || "page"); - return coordsChar(this, coords.left, coords.top) - }, - - lineAtHeight: function(height, mode) { - height = fromCoordSystem(this, {top: height, left: 0}, mode || "page").top; - return lineAtHeight(this.doc, height + this.display.viewOffset) - }, - heightAtLine: function(line, mode, includeWidgets) { - var end = false, lineObj; - if (typeof line == "number") { - var last = this.doc.first + this.doc.size - 1; - if (line < this.doc.first) { line = this.doc.first; } - else if (line > last) { line = last; end = true; } - lineObj = getLine(this.doc, line); - } else { - lineObj = line; - } - return intoCoordSystem(this, lineObj, {top: 0, left: 0}, mode || "page", includeWidgets || end).top + - (end ? this.doc.height - heightAtLine(lineObj) : 0) - }, - - defaultTextHeight: function() { return textHeight(this.display) }, - defaultCharWidth: function() { return charWidth(this.display) }, - - getViewport: function() { return {from: this.display.viewFrom, to: this.display.viewTo}}, - - addWidget: function(pos, node, scroll, vert, horiz) { - var display = this.display; - pos = cursorCoords(this, clipPos(this.doc, pos)); - var top = pos.bottom, left = pos.left; - node.style.position = "absolute"; - node.setAttribute("cm-ignore-events", "true"); - this.display.input.setUneditable(node); - display.sizer.appendChild(node); - if (vert == "over") { - top = pos.top; - } else if (vert == "above" || vert == "near") { - var vspace = Math.max(display.wrapper.clientHeight, this.doc.height), - hspace = Math.max(display.sizer.clientWidth, display.lineSpace.clientWidth); - // Default to positioning above (if specified and possible); otherwise default to positioning below - if ((vert == 'above' || pos.bottom + node.offsetHeight > vspace) && pos.top > node.offsetHeight) - { top = pos.top - node.offsetHeight; } - else if (pos.bottom + node.offsetHeight <= vspace) - { top = pos.bottom; } - if (left + node.offsetWidth > hspace) - { left = hspace - node.offsetWidth; } - } - node.style.top = top + "px"; - node.style.left = node.style.right = ""; - if (horiz == "right") { - left = display.sizer.clientWidth - node.offsetWidth; - node.style.right = "0px"; - } else { - if (horiz == "left") { left = 0; } - else if (horiz == "middle") { left = (display.sizer.clientWidth - node.offsetWidth) / 2; } - node.style.left = left + "px"; - } - if (scroll) - { scrollIntoView(this, {left: left, top: top, right: left + node.offsetWidth, bottom: top + node.offsetHeight}); } - }, - - triggerOnKeyDown: methodOp(onKeyDown), - triggerOnKeyPress: methodOp(onKeyPress), - triggerOnKeyUp: onKeyUp, - triggerOnMouseDown: methodOp(onMouseDown), - - execCommand: function(cmd) { - if (commands.hasOwnProperty(cmd)) - { return commands[cmd].call(null, this) } - }, - - triggerElectric: methodOp(function(text) { triggerElectric(this, text); }), - - findPosH: function(from, amount, unit, visually) { - var this$1 = this; - - var dir = 1; - if (amount < 0) { dir = -1; amount = -amount; } - var cur = clipPos(this.doc, from); - for (var i = 0; i < amount; ++i) { - cur = findPosH(this$1.doc, cur, dir, unit, visually); - if (cur.hitSide) { break } - } - return cur - }, - - moveH: methodOp(function(dir, unit) { - var this$1 = this; - - this.extendSelectionsBy(function (range$$1) { - if (this$1.display.shift || this$1.doc.extend || range$$1.empty()) - { return findPosH(this$1.doc, range$$1.head, dir, unit, this$1.options.rtlMoveVisually) } - else - { return dir < 0 ? range$$1.from() : range$$1.to() } - }, sel_move); - }), - - deleteH: methodOp(function(dir, unit) { - var sel = this.doc.sel, doc = this.doc; - if (sel.somethingSelected()) - { doc.replaceSelection("", null, "+delete"); } - else - { deleteNearSelection(this, function (range$$1) { - var other = findPosH(doc, range$$1.head, dir, unit, false); - return dir < 0 ? {from: other, to: range$$1.head} : {from: range$$1.head, to: other} - }); } - }), - - findPosV: function(from, amount, unit, goalColumn) { - var this$1 = this; - - var dir = 1, x = goalColumn; - if (amount < 0) { dir = -1; amount = -amount; } - var cur = clipPos(this.doc, from); - for (var i = 0; i < amount; ++i) { - var coords = cursorCoords(this$1, cur, "div"); - if (x == null) { x = coords.left; } - else { coords.left = x; } - cur = findPosV(this$1, coords, dir, unit); - if (cur.hitSide) { break } - } - return cur - }, - - moveV: methodOp(function(dir, unit) { - var this$1 = this; - - var doc = this.doc, goals = []; - var collapse = !this.display.shift && !doc.extend && doc.sel.somethingSelected(); - doc.extendSelectionsBy(function (range$$1) { - if (collapse) - { return dir < 0 ? range$$1.from() : range$$1.to() } - var headPos = cursorCoords(this$1, range$$1.head, "div"); - if (range$$1.goalColumn != null) { headPos.left = range$$1.goalColumn; } - goals.push(headPos.left); - var pos = findPosV(this$1, headPos, dir, unit); - if (unit == "page" && range$$1 == doc.sel.primary()) - { addToScrollTop(this$1, charCoords(this$1, pos, "div").top - headPos.top); } - return pos - }, sel_move); - if (goals.length) { for (var i = 0; i < doc.sel.ranges.length; i++) - { doc.sel.ranges[i].goalColumn = goals[i]; } } - }), - - // Find the word at the given position (as returned by coordsChar). - findWordAt: function(pos) { - var doc = this.doc, line = getLine(doc, pos.line).text; - var start = pos.ch, end = pos.ch; - if (line) { - var helper = this.getHelper(pos, "wordChars"); - if ((pos.sticky == "before" || end == line.length) && start) { --start; } else { ++end; } - var startChar = line.charAt(start); - var check = isWordChar(startChar, helper) - ? function (ch) { return isWordChar(ch, helper); } - : /\s/.test(startChar) ? function (ch) { return /\s/.test(ch); } - : function (ch) { return (!/\s/.test(ch) && !isWordChar(ch)); }; - while (start > 0 && check(line.charAt(start - 1))) { --start; } - while (end < line.length && check(line.charAt(end))) { ++end; } - } - return new Range(Pos(pos.line, start), Pos(pos.line, end)) - }, - - toggleOverwrite: function(value) { - if (value != null && value == this.state.overwrite) { return } - if (this.state.overwrite = !this.state.overwrite) - { addClass(this.display.cursorDiv, "CodeMirror-overwrite"); } - else - { rmClass(this.display.cursorDiv, "CodeMirror-overwrite"); } - - signal(this, "overwriteToggle", this, this.state.overwrite); - }, - hasFocus: function() { return this.display.input.getField() == activeElt() }, - isReadOnly: function() { return !!(this.options.readOnly || this.doc.cantEdit) }, - - scrollTo: methodOp(function (x, y) { scrollToCoords(this, x, y); }), - getScrollInfo: function() { - var scroller = this.display.scroller; - return {left: scroller.scrollLeft, top: scroller.scrollTop, - height: scroller.scrollHeight - scrollGap(this) - this.display.barHeight, - width: scroller.scrollWidth - scrollGap(this) - this.display.barWidth, - clientHeight: displayHeight(this), clientWidth: displayWidth(this)} - }, - - scrollIntoView: methodOp(function(range$$1, margin) { - if (range$$1 == null) { - range$$1 = {from: this.doc.sel.primary().head, to: null}; - if (margin == null) { margin = this.options.cursorScrollMargin; } - } else if (typeof range$$1 == "number") { - range$$1 = {from: Pos(range$$1, 0), to: null}; - } else if (range$$1.from == null) { - range$$1 = {from: range$$1, to: null}; - } - if (!range$$1.to) { range$$1.to = range$$1.from; } - range$$1.margin = margin || 0; - - if (range$$1.from.line != null) { - scrollToRange(this, range$$1); - } else { - scrollToCoordsRange(this, range$$1.from, range$$1.to, range$$1.margin); - } - }), - - setSize: methodOp(function(width, height) { - var this$1 = this; - - var interpret = function (val) { return typeof val == "number" || /^\d+$/.test(String(val)) ? val + "px" : val; }; - if (width != null) { this.display.wrapper.style.width = interpret(width); } - if (height != null) { this.display.wrapper.style.height = interpret(height); } - if (this.options.lineWrapping) { clearLineMeasurementCache(this); } - var lineNo$$1 = this.display.viewFrom; - this.doc.iter(lineNo$$1, this.display.viewTo, function (line) { - if (line.widgets) { for (var i = 0; i < line.widgets.length; i++) - { if (line.widgets[i].noHScroll) { regLineChange(this$1, lineNo$$1, "widget"); break } } } - ++lineNo$$1; - }); - this.curOp.forceUpdate = true; - signal(this, "refresh", this); - }), - - operation: function(f){return runInOp(this, f)}, - startOperation: function(){return startOperation(this)}, - endOperation: function(){return endOperation(this)}, - - refresh: methodOp(function() { - var oldHeight = this.display.cachedTextHeight; - regChange(this); - this.curOp.forceUpdate = true; - clearCaches(this); - scrollToCoords(this, this.doc.scrollLeft, this.doc.scrollTop); - updateGutterSpace(this); - if (oldHeight == null || Math.abs(oldHeight - textHeight(this.display)) > .5) - { estimateLineHeights(this); } - signal(this, "refresh", this); - }), - - swapDoc: methodOp(function(doc) { - var old = this.doc; - old.cm = null; - attachDoc(this, doc); - clearCaches(this); - this.display.input.reset(); - scrollToCoords(this, doc.scrollLeft, doc.scrollTop); - this.curOp.forceScroll = true; - signalLater(this, "swapDoc", this, old); - return old - }), - - getInputField: function(){return this.display.input.getField()}, - getWrapperElement: function(){return this.display.wrapper}, - getScrollerElement: function(){return this.display.scroller}, - getGutterElement: function(){return this.display.gutters} - }; - eventMixin(CodeMirror); - - CodeMirror.registerHelper = function(type, name, value) { - if (!helpers.hasOwnProperty(type)) { helpers[type] = CodeMirror[type] = {_global: []}; } - helpers[type][name] = value; - }; - CodeMirror.registerGlobalHelper = function(type, name, predicate, value) { - CodeMirror.registerHelper(type, name, value); - helpers[type]._global.push({pred: predicate, val: value}); - }; -}; - -// Used for horizontal relative motion. Dir is -1 or 1 (left or -// right), unit can be "char", "column" (like char, but doesn't -// cross line boundaries), "word" (across next word), or "group" (to -// the start of next group of word or non-word-non-whitespace -// chars). The visually param controls whether, in right-to-left -// text, direction 1 means to move towards the next index in the -// string, or towards the character to the right of the current -// position. The resulting position will have a hitSide=true -// property if it reached the end of the document. -function findPosH(doc, pos, dir, unit, visually) { - var oldPos = pos; - var origDir = dir; - var lineObj = getLine(doc, pos.line); - function findNextLine() { - var l = pos.line + dir; - if (l < doc.first || l >= doc.first + doc.size) { return false } - pos = new Pos(l, pos.ch, pos.sticky); - return lineObj = getLine(doc, l) - } - function moveOnce(boundToLine) { - var next; - if (visually) { - next = moveVisually(doc.cm, lineObj, pos, dir); - } else { - next = moveLogically(lineObj, pos, dir); - } - if (next == null) { - if (!boundToLine && findNextLine()) - { pos = endOfLine(visually, doc.cm, lineObj, pos.line, dir); } - else - { return false } - } else { - pos = next; - } - return true - } - - if (unit == "char") { - moveOnce(); - } else if (unit == "column") { - moveOnce(true); - } else if (unit == "word" || unit == "group") { - var sawType = null, group = unit == "group"; - var helper = doc.cm && doc.cm.getHelper(pos, "wordChars"); - for (var first = true;; first = false) { - if (dir < 0 && !moveOnce(!first)) { break } - var cur = lineObj.text.charAt(pos.ch) || "\n"; - var type = isWordChar(cur, helper) ? "w" - : group && cur == "\n" ? "n" - : !group || /\s/.test(cur) ? null - : "p"; - if (group && !first && !type) { type = "s"; } - if (sawType && sawType != type) { - if (dir < 0) {dir = 1; moveOnce(); pos.sticky = "after";} - break - } - - if (type) { sawType = type; } - if (dir > 0 && !moveOnce(!first)) { break } - } - } - var result = skipAtomic(doc, pos, oldPos, origDir, true); - if (equalCursorPos(oldPos, result)) { result.hitSide = true; } - return result -} - -// For relative vertical movement. Dir may be -1 or 1. Unit can be -// "page" or "line". The resulting position will have a hitSide=true -// property if it reached the end of the document. -function findPosV(cm, pos, dir, unit) { - var doc = cm.doc, x = pos.left, y; - if (unit == "page") { - var pageSize = Math.min(cm.display.wrapper.clientHeight, window.innerHeight || document.documentElement.clientHeight); - var moveAmount = Math.max(pageSize - .5 * textHeight(cm.display), 3); - y = (dir > 0 ? pos.bottom : pos.top) + dir * moveAmount; - - } else if (unit == "line") { - y = dir > 0 ? pos.bottom + 3 : pos.top - 3; - } - var target; - for (;;) { - target = coordsChar(cm, x, y); - if (!target.outside) { break } - if (dir < 0 ? y <= 0 : y >= doc.height) { target.hitSide = true; break } - y += dir * 5; - } - return target -} - -// CONTENTEDITABLE INPUT STYLE - -var ContentEditableInput = function(cm) { - this.cm = cm; - this.lastAnchorNode = this.lastAnchorOffset = this.lastFocusNode = this.lastFocusOffset = null; - this.polling = new Delayed(); - this.composing = null; - this.gracePeriod = false; - this.readDOMTimeout = null; -}; - -ContentEditableInput.prototype.init = function (display) { - var this$1 = this; - - var input = this, cm = input.cm; - var div = input.div = display.lineDiv; - disableBrowserMagic(div, cm.options.spellcheck); - - on(div, "paste", function (e) { - if (signalDOMEvent(cm, e) || handlePaste(e, cm)) { return } - // IE doesn't fire input events, so we schedule a read for the pasted content in this way - if (ie_version <= 11) { setTimeout(operation(cm, function () { return this$1.updateFromDOM(); }), 20); } - }); - - on(div, "compositionstart", function (e) { - this$1.composing = {data: e.data, done: false}; - }); - on(div, "compositionupdate", function (e) { - if (!this$1.composing) { this$1.composing = {data: e.data, done: false}; } - }); - on(div, "compositionend", function (e) { - if (this$1.composing) { - if (e.data != this$1.composing.data) { this$1.readFromDOMSoon(); } - this$1.composing.done = true; - } - }); - - on(div, "touchstart", function () { return input.forceCompositionEnd(); }); - - on(div, "input", function () { - if (!this$1.composing) { this$1.readFromDOMSoon(); } - }); - - function onCopyCut(e) { - if (signalDOMEvent(cm, e)) { return } - if (cm.somethingSelected()) { - setLastCopied({lineWise: false, text: cm.getSelections()}); - if (e.type == "cut") { cm.replaceSelection("", null, "cut"); } - } else if (!cm.options.lineWiseCopyCut) { - return - } else { - var ranges = copyableRanges(cm); - setLastCopied({lineWise: true, text: ranges.text}); - if (e.type == "cut") { - cm.operation(function () { - cm.setSelections(ranges.ranges, 0, sel_dontScroll); - cm.replaceSelection("", null, "cut"); - }); - } - } - if (e.clipboardData) { - e.clipboardData.clearData(); - var content = lastCopied.text.join("\n"); - // iOS exposes the clipboard API, but seems to discard content inserted into it - e.clipboardData.setData("Text", content); - if (e.clipboardData.getData("Text") == content) { - e.preventDefault(); - return - } - } - // Old-fashioned briefly-focus-a-textarea hack - var kludge = hiddenTextarea(), te = kludge.firstChild; - cm.display.lineSpace.insertBefore(kludge, cm.display.lineSpace.firstChild); - te.value = lastCopied.text.join("\n"); - var hadFocus = document.activeElement; - selectInput(te); - setTimeout(function () { - cm.display.lineSpace.removeChild(kludge); - hadFocus.focus(); - if (hadFocus == div) { input.showPrimarySelection(); } - }, 50); - } - on(div, "copy", onCopyCut); - on(div, "cut", onCopyCut); -}; - -ContentEditableInput.prototype.prepareSelection = function () { - var result = prepareSelection(this.cm, false); - result.focus = this.cm.state.focused; - return result -}; - -ContentEditableInput.prototype.showSelection = function (info, takeFocus) { - if (!info || !this.cm.display.view.length) { return } - if (info.focus || takeFocus) { this.showPrimarySelection(); } - this.showMultipleSelections(info); -}; - -ContentEditableInput.prototype.showPrimarySelection = function () { - var sel = window.getSelection(), cm = this.cm, prim = cm.doc.sel.primary(); - var from = prim.from(), to = prim.to(); - - if (cm.display.viewTo == cm.display.viewFrom || from.line >= cm.display.viewTo || to.line < cm.display.viewFrom) { - sel.removeAllRanges(); - return - } - - var curAnchor = domToPos(cm, sel.anchorNode, sel.anchorOffset); - var curFocus = domToPos(cm, sel.focusNode, sel.focusOffset); - if (curAnchor && !curAnchor.bad && curFocus && !curFocus.bad && - cmp(minPos(curAnchor, curFocus), from) == 0 && - cmp(maxPos(curAnchor, curFocus), to) == 0) - { return } - - var view = cm.display.view; - var start = (from.line >= cm.display.viewFrom && posToDOM(cm, from)) || - {node: view[0].measure.map[2], offset: 0}; - var end = to.line < cm.display.viewTo && posToDOM(cm, to); - if (!end) { - var measure = view[view.length - 1].measure; - var map$$1 = measure.maps ? measure.maps[measure.maps.length - 1] : measure.map; - end = {node: map$$1[map$$1.length - 1], offset: map$$1[map$$1.length - 2] - map$$1[map$$1.length - 3]}; - } - - if (!start || !end) { - sel.removeAllRanges(); - return - } - - var old = sel.rangeCount && sel.getRangeAt(0), rng; - try { rng = range(start.node, start.offset, end.offset, end.node); } - catch(e) {} // Our model of the DOM might be outdated, in which case the range we try to set can be impossible - if (rng) { - if (!gecko && cm.state.focused) { - sel.collapse(start.node, start.offset); - if (!rng.collapsed) { - sel.removeAllRanges(); - sel.addRange(rng); - } - } else { - sel.removeAllRanges(); - sel.addRange(rng); - } - if (old && sel.anchorNode == null) { sel.addRange(old); } - else if (gecko) { this.startGracePeriod(); } - } - this.rememberSelection(); -}; - -ContentEditableInput.prototype.startGracePeriod = function () { - var this$1 = this; - - clearTimeout(this.gracePeriod); - this.gracePeriod = setTimeout(function () { - this$1.gracePeriod = false; - if (this$1.selectionChanged()) - { this$1.cm.operation(function () { return this$1.cm.curOp.selectionChanged = true; }); } - }, 20); -}; - -ContentEditableInput.prototype.showMultipleSelections = function (info) { - removeChildrenAndAdd(this.cm.display.cursorDiv, info.cursors); - removeChildrenAndAdd(this.cm.display.selectionDiv, info.selection); -}; - -ContentEditableInput.prototype.rememberSelection = function () { - var sel = window.getSelection(); - this.lastAnchorNode = sel.anchorNode; this.lastAnchorOffset = sel.anchorOffset; - this.lastFocusNode = sel.focusNode; this.lastFocusOffset = sel.focusOffset; -}; - -ContentEditableInput.prototype.selectionInEditor = function () { - var sel = window.getSelection(); - if (!sel.rangeCount) { return false } - var node = sel.getRangeAt(0).commonAncestorContainer; - return contains(this.div, node) -}; - -ContentEditableInput.prototype.focus = function () { - if (this.cm.options.readOnly != "nocursor") { - if (!this.selectionInEditor()) - { this.showSelection(this.prepareSelection(), true); } - this.div.focus(); - } -}; -ContentEditableInput.prototype.blur = function () { this.div.blur(); }; -ContentEditableInput.prototype.getField = function () { return this.div }; - -ContentEditableInput.prototype.supportsTouch = function () { return true }; - -ContentEditableInput.prototype.receivedFocus = function () { - var input = this; - if (this.selectionInEditor()) - { this.pollSelection(); } - else - { runInOp(this.cm, function () { return input.cm.curOp.selectionChanged = true; }); } - - function poll() { - if (input.cm.state.focused) { - input.pollSelection(); - input.polling.set(input.cm.options.pollInterval, poll); - } - } - this.polling.set(this.cm.options.pollInterval, poll); -}; - -ContentEditableInput.prototype.selectionChanged = function () { - var sel = window.getSelection(); - return sel.anchorNode != this.lastAnchorNode || sel.anchorOffset != this.lastAnchorOffset || - sel.focusNode != this.lastFocusNode || sel.focusOffset != this.lastFocusOffset -}; - -ContentEditableInput.prototype.pollSelection = function () { - if (this.readDOMTimeout != null || this.gracePeriod || !this.selectionChanged()) { return } - var sel = window.getSelection(), cm = this.cm; - // On Android Chrome (version 56, at least), backspacing into an - // uneditable block element will put the cursor in that element, - // and then, because it's not editable, hide the virtual keyboard. - // Because Android doesn't allow us to actually detect backspace - // presses in a sane way, this code checks for when that happens - // and simulates a backspace press in this case. - if (android && chrome && this.cm.options.gutters.length && isInGutter(sel.anchorNode)) { - this.cm.triggerOnKeyDown({type: "keydown", keyCode: 8, preventDefault: Math.abs}); - this.blur(); - this.focus(); - return - } - if (this.composing) { return } - this.rememberSelection(); - var anchor = domToPos(cm, sel.anchorNode, sel.anchorOffset); - var head = domToPos(cm, sel.focusNode, sel.focusOffset); - if (anchor && head) { runInOp(cm, function () { - setSelection(cm.doc, simpleSelection(anchor, head), sel_dontScroll); - if (anchor.bad || head.bad) { cm.curOp.selectionChanged = true; } - }); } -}; - -ContentEditableInput.prototype.pollContent = function () { - if (this.readDOMTimeout != null) { - clearTimeout(this.readDOMTimeout); - this.readDOMTimeout = null; - } - - var cm = this.cm, display = cm.display, sel = cm.doc.sel.primary(); - var from = sel.from(), to = sel.to(); - if (from.ch == 0 && from.line > cm.firstLine()) - { from = Pos(from.line - 1, getLine(cm.doc, from.line - 1).length); } - if (to.ch == getLine(cm.doc, to.line).text.length && to.line < cm.lastLine()) - { to = Pos(to.line + 1, 0); } - if (from.line < display.viewFrom || to.line > display.viewTo - 1) { return false } - - var fromIndex, fromLine, fromNode; - if (from.line == display.viewFrom || (fromIndex = findViewIndex(cm, from.line)) == 0) { - fromLine = lineNo(display.view[0].line); - fromNode = display.view[0].node; - } else { - fromLine = lineNo(display.view[fromIndex].line); - fromNode = display.view[fromIndex - 1].node.nextSibling; - } - var toIndex = findViewIndex(cm, to.line); - var toLine, toNode; - if (toIndex == display.view.length - 1) { - toLine = display.viewTo - 1; - toNode = display.lineDiv.lastChild; - } else { - toLine = lineNo(display.view[toIndex + 1].line) - 1; - toNode = display.view[toIndex + 1].node.previousSibling; - } - - if (!fromNode) { return false } - var newText = cm.doc.splitLines(domTextBetween(cm, fromNode, toNode, fromLine, toLine)); - var oldText = getBetween(cm.doc, Pos(fromLine, 0), Pos(toLine, getLine(cm.doc, toLine).text.length)); - while (newText.length > 1 && oldText.length > 1) { - if (lst(newText) == lst(oldText)) { newText.pop(); oldText.pop(); toLine--; } - else if (newText[0] == oldText[0]) { newText.shift(); oldText.shift(); fromLine++; } - else { break } - } - - var cutFront = 0, cutEnd = 0; - var newTop = newText[0], oldTop = oldText[0], maxCutFront = Math.min(newTop.length, oldTop.length); - while (cutFront < maxCutFront && newTop.charCodeAt(cutFront) == oldTop.charCodeAt(cutFront)) - { ++cutFront; } - var newBot = lst(newText), oldBot = lst(oldText); - var maxCutEnd = Math.min(newBot.length - (newText.length == 1 ? cutFront : 0), - oldBot.length - (oldText.length == 1 ? cutFront : 0)); - while (cutEnd < maxCutEnd && - newBot.charCodeAt(newBot.length - cutEnd - 1) == oldBot.charCodeAt(oldBot.length - cutEnd - 1)) - { ++cutEnd; } - // Try to move start of change to start of selection if ambiguous - if (newText.length == 1 && oldText.length == 1 && fromLine == from.line) { - while (cutFront && cutFront > from.ch && - newBot.charCodeAt(newBot.length - cutEnd - 1) == oldBot.charCodeAt(oldBot.length - cutEnd - 1)) { - cutFront--; - cutEnd++; - } - } - - newText[newText.length - 1] = newBot.slice(0, newBot.length - cutEnd).replace(/^\u200b+/, ""); - newText[0] = newText[0].slice(cutFront).replace(/\u200b+$/, ""); - - var chFrom = Pos(fromLine, cutFront); - var chTo = Pos(toLine, oldText.length ? lst(oldText).length - cutEnd : 0); - if (newText.length > 1 || newText[0] || cmp(chFrom, chTo)) { - replaceRange(cm.doc, newText, chFrom, chTo, "+input"); - return true - } -}; - -ContentEditableInput.prototype.ensurePolled = function () { - this.forceCompositionEnd(); -}; -ContentEditableInput.prototype.reset = function () { - this.forceCompositionEnd(); -}; -ContentEditableInput.prototype.forceCompositionEnd = function () { - if (!this.composing) { return } - clearTimeout(this.readDOMTimeout); - this.composing = null; - this.updateFromDOM(); - this.div.blur(); - this.div.focus(); -}; -ContentEditableInput.prototype.readFromDOMSoon = function () { - var this$1 = this; - - if (this.readDOMTimeout != null) { return } - this.readDOMTimeout = setTimeout(function () { - this$1.readDOMTimeout = null; - if (this$1.composing) { - if (this$1.composing.done) { this$1.composing = null; } - else { return } - } - this$1.updateFromDOM(); - }, 80); -}; - -ContentEditableInput.prototype.updateFromDOM = function () { - var this$1 = this; - - if (this.cm.isReadOnly() || !this.pollContent()) - { runInOp(this.cm, function () { return regChange(this$1.cm); }); } -}; - -ContentEditableInput.prototype.setUneditable = function (node) { - node.contentEditable = "false"; -}; - -ContentEditableInput.prototype.onKeyPress = function (e) { - if (e.charCode == 0) { return } - e.preventDefault(); - if (!this.cm.isReadOnly()) - { operation(this.cm, applyTextInput)(this.cm, String.fromCharCode(e.charCode == null ? e.keyCode : e.charCode), 0); } -}; - -ContentEditableInput.prototype.readOnlyChanged = function (val) { - this.div.contentEditable = String(val != "nocursor"); -}; - -ContentEditableInput.prototype.onContextMenu = function () {}; -ContentEditableInput.prototype.resetPosition = function () {}; - -ContentEditableInput.prototype.needsContentAttribute = true; - -function posToDOM(cm, pos) { - var view = findViewForLine(cm, pos.line); - if (!view || view.hidden) { return null } - var line = getLine(cm.doc, pos.line); - var info = mapFromLineView(view, line, pos.line); - - var order = getOrder(line, cm.doc.direction), side = "left"; - if (order) { - var partPos = getBidiPartAt(order, pos.ch); - side = partPos % 2 ? "right" : "left"; - } - var result = nodeAndOffsetInLineMap(info.map, pos.ch, side); - result.offset = result.collapse == "right" ? result.end : result.start; - return result -} - -function isInGutter(node) { - for (var scan = node; scan; scan = scan.parentNode) - { if (/CodeMirror-gutter-wrapper/.test(scan.className)) { return true } } - return false -} - -function badPos(pos, bad) { if (bad) { pos.bad = true; } return pos } - -function domTextBetween(cm, from, to, fromLine, toLine) { - var text = "", closing = false, lineSep = cm.doc.lineSeparator(); - function recognizeMarker(id) { return function (marker) { return marker.id == id; } } - function close() { - if (closing) { - text += lineSep; - closing = false; - } - } - function addText(str) { - if (str) { - close(); - text += str; - } - } - function walk(node) { - if (node.nodeType == 1) { - var cmText = node.getAttribute("cm-text"); - if (cmText != null) { - addText(cmText || node.textContent.replace(/\u200b/g, "")); - return - } - var markerID = node.getAttribute("cm-marker"), range$$1; - if (markerID) { - var found = cm.findMarks(Pos(fromLine, 0), Pos(toLine + 1, 0), recognizeMarker(+markerID)); - if (found.length && (range$$1 = found[0].find(0))) - { addText(getBetween(cm.doc, range$$1.from, range$$1.to).join(lineSep)); } - return - } - if (node.getAttribute("contenteditable") == "false") { return } - var isBlock = /^(pre|div|p)$/i.test(node.nodeName); - if (isBlock) { close(); } - for (var i = 0; i < node.childNodes.length; i++) - { walk(node.childNodes[i]); } - if (isBlock) { closing = true; } - } else if (node.nodeType == 3) { - addText(node.nodeValue); - } - } - for (;;) { - walk(from); - if (from == to) { break } - from = from.nextSibling; - } - return text -} - -function domToPos(cm, node, offset) { - var lineNode; - if (node == cm.display.lineDiv) { - lineNode = cm.display.lineDiv.childNodes[offset]; - if (!lineNode) { return badPos(cm.clipPos(Pos(cm.display.viewTo - 1)), true) } - node = null; offset = 0; - } else { - for (lineNode = node;; lineNode = lineNode.parentNode) { - if (!lineNode || lineNode == cm.display.lineDiv) { return null } - if (lineNode.parentNode && lineNode.parentNode == cm.display.lineDiv) { break } - } - } - for (var i = 0; i < cm.display.view.length; i++) { - var lineView = cm.display.view[i]; - if (lineView.node == lineNode) - { return locateNodeInLineView(lineView, node, offset) } - } -} - -function locateNodeInLineView(lineView, node, offset) { - var wrapper = lineView.text.firstChild, bad = false; - if (!node || !contains(wrapper, node)) { return badPos(Pos(lineNo(lineView.line), 0), true) } - if (node == wrapper) { - bad = true; - node = wrapper.childNodes[offset]; - offset = 0; - if (!node) { - var line = lineView.rest ? lst(lineView.rest) : lineView.line; - return badPos(Pos(lineNo(line), line.text.length), bad) - } - } - - var textNode = node.nodeType == 3 ? node : null, topNode = node; - if (!textNode && node.childNodes.length == 1 && node.firstChild.nodeType == 3) { - textNode = node.firstChild; - if (offset) { offset = textNode.nodeValue.length; } - } - while (topNode.parentNode != wrapper) { topNode = topNode.parentNode; } - var measure = lineView.measure, maps = measure.maps; - - function find(textNode, topNode, offset) { - for (var i = -1; i < (maps ? maps.length : 0); i++) { - var map$$1 = i < 0 ? measure.map : maps[i]; - for (var j = 0; j < map$$1.length; j += 3) { - var curNode = map$$1[j + 2]; - if (curNode == textNode || curNode == topNode) { - var line = lineNo(i < 0 ? lineView.line : lineView.rest[i]); - var ch = map$$1[j] + offset; - if (offset < 0 || curNode != textNode) { ch = map$$1[j + (offset ? 1 : 0)]; } - return Pos(line, ch) - } - } - } - } - var found = find(textNode, topNode, offset); - if (found) { return badPos(found, bad) } - - // FIXME this is all really shaky. might handle the few cases it needs to handle, but likely to cause problems - for (var after = topNode.nextSibling, dist = textNode ? textNode.nodeValue.length - offset : 0; after; after = after.nextSibling) { - found = find(after, after.firstChild, 0); - if (found) - { return badPos(Pos(found.line, found.ch - dist), bad) } - else - { dist += after.textContent.length; } - } - for (var before = topNode.previousSibling, dist$1 = offset; before; before = before.previousSibling) { - found = find(before, before.firstChild, -1); - if (found) - { return badPos(Pos(found.line, found.ch + dist$1), bad) } - else - { dist$1 += before.textContent.length; } - } -} - -// TEXTAREA INPUT STYLE - -var TextareaInput = function(cm) { - this.cm = cm; - // See input.poll and input.reset - this.prevInput = ""; - - // Flag that indicates whether we expect input to appear real soon - // now (after some event like 'keypress' or 'input') and are - // polling intensively. - this.pollingFast = false; - // Self-resetting timeout for the poller - this.polling = new Delayed(); - // Used to work around IE issue with selection being forgotten when focus moves away from textarea - this.hasSelection = false; - this.composing = null; -}; - -TextareaInput.prototype.init = function (display) { - var this$1 = this; - - var input = this, cm = this.cm; - - // Wraps and hides input textarea - var div = this.wrapper = hiddenTextarea(); - // The semihidden textarea that is focused when the editor is - // focused, and receives input. - var te = this.textarea = div.firstChild; - display.wrapper.insertBefore(div, display.wrapper.firstChild); - - // Needed to hide big blue blinking cursor on Mobile Safari (doesn't seem to work in iOS 8 anymore) - if (ios) { te.style.width = "0px"; } - - on(te, "input", function () { - if (ie && ie_version >= 9 && this$1.hasSelection) { this$1.hasSelection = null; } - input.poll(); - }); - - on(te, "paste", function (e) { - if (signalDOMEvent(cm, e) || handlePaste(e, cm)) { return } - - cm.state.pasteIncoming = true; - input.fastPoll(); - }); - - function prepareCopyCut(e) { - if (signalDOMEvent(cm, e)) { return } - if (cm.somethingSelected()) { - setLastCopied({lineWise: false, text: cm.getSelections()}); - } else if (!cm.options.lineWiseCopyCut) { - return - } else { - var ranges = copyableRanges(cm); - setLastCopied({lineWise: true, text: ranges.text}); - if (e.type == "cut") { - cm.setSelections(ranges.ranges, null, sel_dontScroll); - } else { - input.prevInput = ""; - te.value = ranges.text.join("\n"); - selectInput(te); - } - } - if (e.type == "cut") { cm.state.cutIncoming = true; } - } - on(te, "cut", prepareCopyCut); - on(te, "copy", prepareCopyCut); - - on(display.scroller, "paste", function (e) { - if (eventInWidget(display, e) || signalDOMEvent(cm, e)) { return } - cm.state.pasteIncoming = true; - input.focus(); - }); - - // Prevent normal selection in the editor (we handle our own) - on(display.lineSpace, "selectstart", function (e) { - if (!eventInWidget(display, e)) { e_preventDefault(e); } - }); - - on(te, "compositionstart", function () { - var start = cm.getCursor("from"); - if (input.composing) { input.composing.range.clear(); } - input.composing = { - start: start, - range: cm.markText(start, cm.getCursor("to"), {className: "CodeMirror-composing"}) - }; - }); - on(te, "compositionend", function () { - if (input.composing) { - input.poll(); - input.composing.range.clear(); - input.composing = null; - } - }); -}; - -TextareaInput.prototype.prepareSelection = function () { - // Redraw the selection and/or cursor - var cm = this.cm, display = cm.display, doc = cm.doc; - var result = prepareSelection(cm); - - // Move the hidden textarea near the cursor to prevent scrolling artifacts - if (cm.options.moveInputWithCursor) { - var headPos = cursorCoords(cm, doc.sel.primary().head, "div"); - var wrapOff = display.wrapper.getBoundingClientRect(), lineOff = display.lineDiv.getBoundingClientRect(); - result.teTop = Math.max(0, Math.min(display.wrapper.clientHeight - 10, - headPos.top + lineOff.top - wrapOff.top)); - result.teLeft = Math.max(0, Math.min(display.wrapper.clientWidth - 10, - headPos.left + lineOff.left - wrapOff.left)); - } - - return result -}; - -TextareaInput.prototype.showSelection = function (drawn) { - var cm = this.cm, display = cm.display; - removeChildrenAndAdd(display.cursorDiv, drawn.cursors); - removeChildrenAndAdd(display.selectionDiv, drawn.selection); - if (drawn.teTop != null) { - this.wrapper.style.top = drawn.teTop + "px"; - this.wrapper.style.left = drawn.teLeft + "px"; - } -}; - -// Reset the input to correspond to the selection (or to be empty, -// when not typing and nothing is selected) -TextareaInput.prototype.reset = function (typing) { - if (this.contextMenuPending || this.composing) { return } - var cm = this.cm; - if (cm.somethingSelected()) { - this.prevInput = ""; - var content = cm.getSelection(); - this.textarea.value = content; - if (cm.state.focused) { selectInput(this.textarea); } - if (ie && ie_version >= 9) { this.hasSelection = content; } - } else if (!typing) { - this.prevInput = this.textarea.value = ""; - if (ie && ie_version >= 9) { this.hasSelection = null; } - } -}; - -TextareaInput.prototype.getField = function () { return this.textarea }; - -TextareaInput.prototype.supportsTouch = function () { return false }; - -TextareaInput.prototype.focus = function () { - if (this.cm.options.readOnly != "nocursor" && (!mobile || activeElt() != this.textarea)) { - try { this.textarea.focus(); } - catch (e) {} // IE8 will throw if the textarea is display: none or not in DOM - } -}; - -TextareaInput.prototype.blur = function () { this.textarea.blur(); }; - -TextareaInput.prototype.resetPosition = function () { - this.wrapper.style.top = this.wrapper.style.left = 0; -}; - -TextareaInput.prototype.receivedFocus = function () { this.slowPoll(); }; - -// Poll for input changes, using the normal rate of polling. This -// runs as long as the editor is focused. -TextareaInput.prototype.slowPoll = function () { - var this$1 = this; - - if (this.pollingFast) { return } - this.polling.set(this.cm.options.pollInterval, function () { - this$1.poll(); - if (this$1.cm.state.focused) { this$1.slowPoll(); } - }); -}; - -// When an event has just come in that is likely to add or change -// something in the input textarea, we poll faster, to ensure that -// the change appears on the screen quickly. -TextareaInput.prototype.fastPoll = function () { - var missed = false, input = this; - input.pollingFast = true; - function p() { - var changed = input.poll(); - if (!changed && !missed) {missed = true; input.polling.set(60, p);} - else {input.pollingFast = false; input.slowPoll();} - } - input.polling.set(20, p); -}; - -// Read input from the textarea, and update the document to match. -// When something is selected, it is present in the textarea, and -// selected (unless it is huge, in which case a placeholder is -// used). When nothing is selected, the cursor sits after previously -// seen text (can be empty), which is stored in prevInput (we must -// not reset the textarea when typing, because that breaks IME). -TextareaInput.prototype.poll = function () { - var this$1 = this; - - var cm = this.cm, input = this.textarea, prevInput = this.prevInput; - // Since this is called a *lot*, try to bail out as cheaply as - // possible when it is clear that nothing happened. hasSelection - // will be the case when there is a lot of text in the textarea, - // in which case reading its value would be expensive. - if (this.contextMenuPending || !cm.state.focused || - (hasSelection(input) && !prevInput && !this.composing) || - cm.isReadOnly() || cm.options.disableInput || cm.state.keySeq) - { return false } - - var text = input.value; - // If nothing changed, bail. - if (text == prevInput && !cm.somethingSelected()) { return false } - // Work around nonsensical selection resetting in IE9/10, and - // inexplicable appearance of private area unicode characters on - // some key combos in Mac (#2689). - if (ie && ie_version >= 9 && this.hasSelection === text || - mac && /[\uf700-\uf7ff]/.test(text)) { - cm.display.input.reset(); - return false - } - - if (cm.doc.sel == cm.display.selForContextMenu) { - var first = text.charCodeAt(0); - if (first == 0x200b && !prevInput) { prevInput = "\u200b"; } - if (first == 0x21da) { this.reset(); return this.cm.execCommand("undo") } - } - // Find the part of the input that is actually new - var same = 0, l = Math.min(prevInput.length, text.length); - while (same < l && prevInput.charCodeAt(same) == text.charCodeAt(same)) { ++same; } - - runInOp(cm, function () { - applyTextInput(cm, text.slice(same), prevInput.length - same, - null, this$1.composing ? "*compose" : null); - - // Don't leave long text in the textarea, since it makes further polling slow - if (text.length > 1000 || text.indexOf("\n") > -1) { input.value = this$1.prevInput = ""; } - else { this$1.prevInput = text; } - - if (this$1.composing) { - this$1.composing.range.clear(); - this$1.composing.range = cm.markText(this$1.composing.start, cm.getCursor("to"), - {className: "CodeMirror-composing"}); - } - }); - return true -}; - -TextareaInput.prototype.ensurePolled = function () { - if (this.pollingFast && this.poll()) { this.pollingFast = false; } -}; - -TextareaInput.prototype.onKeyPress = function () { - if (ie && ie_version >= 9) { this.hasSelection = null; } - this.fastPoll(); -}; - -TextareaInput.prototype.onContextMenu = function (e) { - var input = this, cm = input.cm, display = cm.display, te = input.textarea; - var pos = posFromMouse(cm, e), scrollPos = display.scroller.scrollTop; - if (!pos || presto) { return } // Opera is difficult. - - // Reset the current text selection only if the click is done outside of the selection - // and 'resetSelectionOnContextMenu' option is true. - var reset = cm.options.resetSelectionOnContextMenu; - if (reset && cm.doc.sel.contains(pos) == -1) - { operation(cm, setSelection)(cm.doc, simpleSelection(pos), sel_dontScroll); } - - var oldCSS = te.style.cssText, oldWrapperCSS = input.wrapper.style.cssText; - input.wrapper.style.cssText = "position: absolute"; - var wrapperBox = input.wrapper.getBoundingClientRect(); - te.style.cssText = "position: absolute; width: 30px; height: 30px;\n top: " + (e.clientY - wrapperBox.top - 5) + "px; left: " + (e.clientX - wrapperBox.left - 5) + "px;\n z-index: 1000; background: " + (ie ? "rgba(255, 255, 255, .05)" : "transparent") + ";\n outline: none; border-width: 0; outline: none; overflow: hidden; opacity: .05; filter: alpha(opacity=5);"; - var oldScrollY; - if (webkit) { oldScrollY = window.scrollY; } // Work around Chrome issue (#2712) - display.input.focus(); - if (webkit) { window.scrollTo(null, oldScrollY); } - display.input.reset(); - // Adds "Select all" to context menu in FF - if (!cm.somethingSelected()) { te.value = input.prevInput = " "; } - input.contextMenuPending = true; - display.selForContextMenu = cm.doc.sel; - clearTimeout(display.detectingSelectAll); - - // Select-all will be greyed out if there's nothing to select, so - // this adds a zero-width space so that we can later check whether - // it got selected. - function prepareSelectAllHack() { - if (te.selectionStart != null) { - var selected = cm.somethingSelected(); - var extval = "\u200b" + (selected ? te.value : ""); - te.value = "\u21da"; // Used to catch context-menu undo - te.value = extval; - input.prevInput = selected ? "" : "\u200b"; - te.selectionStart = 1; te.selectionEnd = extval.length; - // Re-set this, in case some other handler touched the - // selection in the meantime. - display.selForContextMenu = cm.doc.sel; - } - } - function rehide() { - input.contextMenuPending = false; - input.wrapper.style.cssText = oldWrapperCSS; - te.style.cssText = oldCSS; - if (ie && ie_version < 9) { display.scrollbars.setScrollTop(display.scroller.scrollTop = scrollPos); } - - // Try to detect the user choosing select-all - if (te.selectionStart != null) { - if (!ie || (ie && ie_version < 9)) { prepareSelectAllHack(); } - var i = 0, poll = function () { - if (display.selForContextMenu == cm.doc.sel && te.selectionStart == 0 && - te.selectionEnd > 0 && input.prevInput == "\u200b") { - operation(cm, selectAll)(cm); - } else if (i++ < 10) { - display.detectingSelectAll = setTimeout(poll, 500); - } else { - display.selForContextMenu = null; - display.input.reset(); - } - }; - display.detectingSelectAll = setTimeout(poll, 200); - } - } - - if (ie && ie_version >= 9) { prepareSelectAllHack(); } - if (captureRightClick) { - e_stop(e); - var mouseup = function () { - off(window, "mouseup", mouseup); - setTimeout(rehide, 20); - }; - on(window, "mouseup", mouseup); - } else { - setTimeout(rehide, 50); - } -}; - -TextareaInput.prototype.readOnlyChanged = function (val) { - if (!val) { this.reset(); } - this.textarea.disabled = val == "nocursor"; -}; - -TextareaInput.prototype.setUneditable = function () {}; - -TextareaInput.prototype.needsContentAttribute = false; - -function fromTextArea(textarea, options) { - options = options ? copyObj(options) : {}; - options.value = textarea.value; - if (!options.tabindex && textarea.tabIndex) - { options.tabindex = textarea.tabIndex; } - if (!options.placeholder && textarea.placeholder) - { options.placeholder = textarea.placeholder; } - // Set autofocus to true if this textarea is focused, or if it has - // autofocus and no other element is focused. - if (options.autofocus == null) { - var hasFocus = activeElt(); - options.autofocus = hasFocus == textarea || - textarea.getAttribute("autofocus") != null && hasFocus == document.body; - } - - function save() {textarea.value = cm.getValue();} - - var realSubmit; - if (textarea.form) { - on(textarea.form, "submit", save); - // Deplorable hack to make the submit method do the right thing. - if (!options.leaveSubmitMethodAlone) { - var form = textarea.form; - realSubmit = form.submit; - try { - var wrappedSubmit = form.submit = function () { - save(); - form.submit = realSubmit; - form.submit(); - form.submit = wrappedSubmit; - }; - } catch(e) {} - } - } - - options.finishInit = function (cm) { - cm.save = save; - cm.getTextArea = function () { return textarea; }; - cm.toTextArea = function () { - cm.toTextArea = isNaN; // Prevent this from being ran twice - save(); - textarea.parentNode.removeChild(cm.getWrapperElement()); - textarea.style.display = ""; - if (textarea.form) { - off(textarea.form, "submit", save); - if (typeof textarea.form.submit == "function") - { textarea.form.submit = realSubmit; } - } - }; - }; - - textarea.style.display = "none"; - var cm = CodeMirror$1(function (node) { return textarea.parentNode.insertBefore(node, textarea.nextSibling); }, - options); - return cm -} - -function addLegacyProps(CodeMirror) { - CodeMirror.off = off; - CodeMirror.on = on; - CodeMirror.wheelEventPixels = wheelEventPixels; - CodeMirror.Doc = Doc; - CodeMirror.splitLines = splitLinesAuto; - CodeMirror.countColumn = countColumn; - CodeMirror.findColumn = findColumn; - CodeMirror.isWordChar = isWordCharBasic; - CodeMirror.Pass = Pass; - CodeMirror.signal = signal; - CodeMirror.Line = Line; - CodeMirror.changeEnd = changeEnd; - CodeMirror.scrollbarModel = scrollbarModel; - CodeMirror.Pos = Pos; - CodeMirror.cmpPos = cmp; - CodeMirror.modes = modes; - CodeMirror.mimeModes = mimeModes; - CodeMirror.resolveMode = resolveMode; - CodeMirror.getMode = getMode; - CodeMirror.modeExtensions = modeExtensions; - CodeMirror.extendMode = extendMode; - CodeMirror.copyState = copyState; - CodeMirror.startState = startState; - CodeMirror.innerMode = innerMode; - CodeMirror.commands = commands; - CodeMirror.keyMap = keyMap; - CodeMirror.keyName = keyName; - CodeMirror.isModifierKey = isModifierKey; - CodeMirror.lookupKey = lookupKey; - CodeMirror.normalizeKeyMap = normalizeKeyMap; - CodeMirror.StringStream = StringStream; - CodeMirror.SharedTextMarker = SharedTextMarker; - CodeMirror.TextMarker = TextMarker; - CodeMirror.LineWidget = LineWidget; - CodeMirror.e_preventDefault = e_preventDefault; - CodeMirror.e_stopPropagation = e_stopPropagation; - CodeMirror.e_stop = e_stop; - CodeMirror.addClass = addClass; - CodeMirror.contains = contains; - CodeMirror.rmClass = rmClass; - CodeMirror.keyNames = keyNames; -} - -// EDITOR CONSTRUCTOR - -defineOptions(CodeMirror$1); - -addEditorMethods(CodeMirror$1); - -// Set up methods on CodeMirror's prototype to redirect to the editor's document. -var dontDelegate = "iter insert remove copy getEditor constructor".split(" "); -for (var prop in Doc.prototype) { if (Doc.prototype.hasOwnProperty(prop) && indexOf(dontDelegate, prop) < 0) - { CodeMirror$1.prototype[prop] = (function(method) { - return function() {return method.apply(this.doc, arguments)} - })(Doc.prototype[prop]); } } - -eventMixin(Doc); - -// INPUT HANDLING - -CodeMirror$1.inputStyles = {"textarea": TextareaInput, "contenteditable": ContentEditableInput}; - -// MODE DEFINITION AND QUERYING - -// Extra arguments are stored as the mode's dependencies, which is -// used by (legacy) mechanisms like loadmode.js to automatically -// load a mode. (Preferred mechanism is the require/define calls.) -CodeMirror$1.defineMode = function(name/*, mode, …*/) { - if (!CodeMirror$1.defaults.mode && name != "null") { CodeMirror$1.defaults.mode = name; } - defineMode.apply(this, arguments); -}; - -CodeMirror$1.defineMIME = defineMIME; - -// Minimal default mode. -CodeMirror$1.defineMode("null", function () { return ({token: function (stream) { return stream.skipToEnd(); }}); }); -CodeMirror$1.defineMIME("text/plain", "null"); - -// EXTENSIONS - -CodeMirror$1.defineExtension = function (name, func) { - CodeMirror$1.prototype[name] = func; -}; -CodeMirror$1.defineDocExtension = function (name, func) { - Doc.prototype[name] = func; -}; - -CodeMirror$1.fromTextArea = fromTextArea; - -addLegacyProps(CodeMirror$1); - -CodeMirror$1.version = "5.31.0"; - -return CodeMirror$1; - -}))); diff --git a/datasette/static/codemirror-5.57.0-sql.min.js b/datasette/static/codemirror-5.57.0-sql.min.js new file mode 100644 index 00000000..13f667c6 --- /dev/null +++ b/datasette/static/codemirror-5.57.0-sql.min.js @@ -0,0 +1,5 @@ +/* + CodeMirror, copyright (c) by Marijn Haverbeke and others + Distributed under an MIT license: https://codemirror.net/LICENSE +*/ +(function(mod){if(typeof exports=="object"&&typeof module=="object")mod(require("../../lib/codemirror"));else if(typeof define=="function"&&define.amd)define(["../../lib/codemirror"],mod);else mod(CodeMirror)})(function(CodeMirror){"use strict";CodeMirror.defineMode("sql",function(config,parserConfig){var client=parserConfig.client||{},atoms=parserConfig.atoms||{false:true,true:true,null:true},builtin=parserConfig.builtin||set(defaultBuiltin),keywords=parserConfig.keywords||set(sqlKeywords),operatorChars=parserConfig.operatorChars||/^[*+\-%<>!=&|~^\/]/,support=parserConfig.support||{},hooks=parserConfig.hooks||{},dateSQL=parserConfig.dateSQL||{date:true,time:true,timestamp:true},backslashStringEscapes=parserConfig.backslashStringEscapes!==false,brackets=parserConfig.brackets||/^[\{}\(\)\[\]]/,punctuation=parserConfig.punctuation||/^[;.,:]/;function tokenBase(stream,state){var ch=stream.next();if(hooks[ch]){var result=hooks[ch](stream,state);if(result!==false)return result}if(support.hexNumber&&(ch=="0"&&stream.match(/^[xX][0-9a-fA-F]+/)||(ch=="x"||ch=="X")&&stream.match(/^'[0-9a-fA-F]+'/))){return"number"}else if(support.binaryNumber&&((ch=="b"||ch=="B")&&stream.match(/^'[01]+'/)||ch=="0"&&stream.match(/^b[01]+/))){return"number"}else if(ch.charCodeAt(0)>47&&ch.charCodeAt(0)<58){stream.match(/^[0-9]*(\.[0-9]+)?([eE][-+]?[0-9]+)?/);support.decimallessFloat&&stream.match(/^\.(?!\.)/);return"number"}else if(ch=="?"&&(stream.eatSpace()||stream.eol()||stream.eat(";"))){return"variable-3"}else if(ch=="'"||ch=='"'&&support.doubleQuote){state.tokenize=tokenLiteral(ch);return state.tokenize(stream,state)}else if((support.nCharCast&&(ch=="n"||ch=="N")||support.charsetCast&&ch=="_"&&stream.match(/[a-z][a-z0-9]*/i))&&(stream.peek()=="'"||stream.peek()=='"')){return"keyword"}else if(support.escapeConstant&&(ch=="e"||ch=="E")&&(stream.peek()=="'"||stream.peek()=='"'&&support.doubleQuote)){state.tokenize=function(stream,state){return(state.tokenize=tokenLiteral(stream.next(),true))(stream,state)};return"keyword"}else if(support.commentSlashSlash&&ch=="/"&&stream.eat("/")){stream.skipToEnd();return"comment"}else if(support.commentHash&&ch=="#"||ch=="-"&&stream.eat("-")&&(!support.commentSpaceRequired||stream.eat(" "))){stream.skipToEnd();return"comment"}else if(ch=="/"&&stream.eat("*")){state.tokenize=tokenComment(1);return state.tokenize(stream,state)}else if(ch=="."){if(support.zerolessFloat&&stream.match(/^(?:\d+(?:e[+-]?\d+)?)/i))return"number";if(stream.match(/^\.+/))return null;if(support.ODBCdotTable&&stream.match(/^[\w\d_$#]+/))return"variable-2"}else if(operatorChars.test(ch)){stream.eatWhile(operatorChars);return"operator"}else if(brackets.test(ch)){return"bracket"}else if(punctuation.test(ch)){stream.eatWhile(punctuation);return"punctuation"}else if(ch=="{"&&(stream.match(/^( )*(d|D|t|T|ts|TS)( )*'[^']*'( )*}/)||stream.match(/^( )*(d|D|t|T|ts|TS)( )*"[^"]*"( )*}/))){return"number"}else{stream.eatWhile(/^[_\w\d]/);var word=stream.current().toLowerCase();if(dateSQL.hasOwnProperty(word)&&(stream.match(/^( )+'[^']*'/)||stream.match(/^( )+"[^"]*"/)))return"number";if(atoms.hasOwnProperty(word))return"atom";if(builtin.hasOwnProperty(word))return"builtin";if(keywords.hasOwnProperty(word))return"keyword";if(client.hasOwnProperty(word))return"string-2";return null}}function tokenLiteral(quote,backslashEscapes){return function(stream,state){var escaped=false,ch;while((ch=stream.next())!=null){if(ch==quote&&!escaped){state.tokenize=tokenBase;break}escaped=(backslashStringEscapes||backslashEscapes)&&!escaped&&ch=="\\"}return"string"}}function tokenComment(depth){return function(stream,state){var m=stream.match(/^.*?(\/\*|\*\/)/);if(!m)stream.skipToEnd();else if(m[1]=="/*")state.tokenize=tokenComment(depth+1);else if(depth>1)state.tokenize=tokenComment(depth-1);else state.tokenize=tokenBase;return"comment"}}function pushContext(stream,state,type){state.context={prev:state.context,indent:stream.indentation(),col:stream.column(),type:type}}function popContext(state){state.indent=state.context.indent;state.context=state.context.prev}return{startState:function(){return{tokenize:tokenBase,context:null}},token:function(stream,state){if(stream.sol()){if(state.context&&state.context.align==null)state.context.align=false}if(state.tokenize==tokenBase&&stream.eatSpace())return null;var style=state.tokenize(stream,state);if(style=="comment")return style;if(state.context&&state.context.align==null)state.context.align=true;var tok=stream.current();if(tok=="(")pushContext(stream,state,")");else if(tok=="[")pushContext(stream,state,"]");else if(state.context&&state.context.type==tok)popContext(state);return style},indent:function(state,textAfter){var cx=state.context;if(!cx)return CodeMirror.Pass;var closing=textAfter.charAt(0)==cx.type;if(cx.align)return cx.col+(closing?0:1);else return cx.indent+(closing?0:config.indentUnit)},blockCommentStart:"/*",blockCommentEnd:"*/",lineComment:support.commentSlashSlash?"//":support.commentHash?"#":"--",closeBrackets:"()[]{}''\"\"``"}});function hookIdentifier(stream){var ch;while((ch=stream.next())!=null){if(ch=="`"&&!stream.eat("`"))return"variable-2"}stream.backUp(stream.current().length-1);return stream.eatWhile(/\w/)?"variable-2":null}function hookIdentifierDoublequote(stream){var ch;while((ch=stream.next())!=null){if(ch=='"'&&!stream.eat('"'))return"variable-2"}stream.backUp(stream.current().length-1);return stream.eatWhile(/\w/)?"variable-2":null}function hookVar(stream){if(stream.eat("@")){stream.match(/^session\./);stream.match(/^local\./);stream.match(/^global\./)}if(stream.eat("'")){stream.match(/^.*'/);return"variable-2"}else if(stream.eat('"')){stream.match(/^.*"/);return"variable-2"}else if(stream.eat("`")){stream.match(/^.*`/);return"variable-2"}else if(stream.match(/^[0-9a-zA-Z$\.\_]+/)){return"variable-2"}return null}function hookClient(stream){if(stream.eat("N")){return"atom"}return stream.match(/^[a-zA-Z.#!?]/)?"variable-2":null}var sqlKeywords="alter and as asc between by count create delete desc distinct drop from group having in insert into is join like not on or order select set table union update values where limit ";function set(str){var obj={},words=str.split(" ");for(var i=0;i!=^\&|\/]/,brackets:/^[\{}\(\)]/,punctuation:/^[;.,:/]/,backslashStringEscapes:false,dateSQL:set("date datetimeoffset datetime2 smalldatetime datetime time"),hooks:{"@":hookVar}});CodeMirror.defineMIME("text/x-mysql",{name:"sql",client:set("charset clear connect edit ego exit go help nopager notee nowarning pager print prompt quit rehash source status system tee"),keywords:set(sqlKeywords+"accessible action add after algorithm all analyze asensitive at authors auto_increment autocommit avg avg_row_length before binary binlog both btree cache call cascade cascaded case catalog_name chain change changed character check checkpoint checksum class_origin client_statistics close coalesce code collate collation collations column columns comment commit committed completion concurrent condition connection consistent constraint contains continue contributors convert cross current current_date current_time current_timestamp current_user cursor data database databases day_hour day_microsecond day_minute day_second deallocate dec declare default delay_key_write delayed delimiter des_key_file describe deterministic dev_pop dev_samp deviance diagnostics directory disable discard distinctrow div dual dumpfile each elseif enable enclosed end ends engine engines enum errors escape escaped even event events every execute exists exit explain extended fast fetch field fields first flush for force foreign found_rows full fulltext function general get global grant grants group group_concat handler hash help high_priority hosts hour_microsecond hour_minute hour_second if ignore ignore_server_ids import index index_statistics infile inner innodb inout insensitive insert_method install interval invoker isolation iterate key keys kill language last leading leave left level limit linear lines list load local localtime localtimestamp lock logs low_priority master master_heartbeat_period master_ssl_verify_server_cert masters match max max_rows maxvalue message_text middleint migrate min min_rows minute_microsecond minute_second mod mode modifies modify mutex mysql_errno natural next no no_write_to_binlog offline offset one online open optimize option optionally out outer outfile pack_keys parser partition partitions password phase plugin plugins prepare preserve prev primary privileges procedure processlist profile profiles purge query quick range read read_write reads real rebuild recover references regexp relaylog release remove rename reorganize repair repeatable replace require resignal restrict resume return returns revoke right rlike rollback rollup row row_format rtree savepoint schedule schema schema_name schemas second_microsecond security sensitive separator serializable server session share show signal slave slow smallint snapshot soname spatial specific sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_no_cache sql_small_result sqlexception sqlstate sqlwarning ssl start starting starts status std stddev stddev_pop stddev_samp storage straight_join subclass_origin sum suspend table_name table_statistics tables tablespace temporary terminated to trailing transaction trigger triggers truncate uncommitted undo uninstall unique unlock upgrade usage use use_frm user user_resources user_statistics using utc_date utc_time utc_timestamp value variables varying view views warnings when while with work write xa xor year_month zerofill begin do then else loop repeat"),builtin:set("bool boolean bit blob decimal double float long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text bigint int int1 int2 int3 int4 int8 integer float float4 float8 double char varbinary varchar varcharacter precision date datetime year unsigned signed numeric"),atoms:set("false true null unknown"),operatorChars:/^[*+\-%<>!=&|^]/,dateSQL:set("date time timestamp"),support:set("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber doubleQuote nCharCast charsetCast commentHash commentSpaceRequired"),hooks:{"@":hookVar,"`":hookIdentifier,"\\":hookClient}});CodeMirror.defineMIME("text/x-mariadb",{name:"sql",client:set("charset clear connect edit ego exit go help nopager notee nowarning pager print prompt quit rehash source status system tee"),keywords:set(sqlKeywords+"accessible action add after algorithm all always analyze asensitive at authors auto_increment autocommit avg avg_row_length before binary binlog both btree cache call cascade cascaded case catalog_name chain change changed character check checkpoint checksum class_origin client_statistics close coalesce code collate collation collations column columns comment commit committed completion concurrent condition connection consistent constraint contains continue contributors convert cross current current_date current_time current_timestamp current_user cursor data database databases day_hour day_microsecond day_minute day_second deallocate dec declare default delay_key_write delayed delimiter des_key_file describe deterministic dev_pop dev_samp deviance diagnostics directory disable discard distinctrow div dual dumpfile each elseif enable enclosed end ends engine engines enum errors escape escaped even event events every execute exists exit explain extended fast fetch field fields first flush for force foreign found_rows full fulltext function general generated get global grant grants group groupby_concat handler hard hash help high_priority hosts hour_microsecond hour_minute hour_second if ignore ignore_server_ids import index index_statistics infile inner innodb inout insensitive insert_method install interval invoker isolation iterate key keys kill language last leading leave left level limit linear lines list load local localtime localtimestamp lock logs low_priority master master_heartbeat_period master_ssl_verify_server_cert masters match max max_rows maxvalue message_text middleint migrate min min_rows minute_microsecond minute_second mod mode modifies modify mutex mysql_errno natural next no no_write_to_binlog offline offset one online open optimize option optionally out outer outfile pack_keys parser partition partitions password persistent phase plugin plugins prepare preserve prev primary privileges procedure processlist profile profiles purge query quick range read read_write reads real rebuild recover references regexp relaylog release remove rename reorganize repair repeatable replace require resignal restrict resume return returns revoke right rlike rollback rollup row row_format rtree savepoint schedule schema schema_name schemas second_microsecond security sensitive separator serializable server session share show shutdown signal slave slow smallint snapshot soft soname spatial specific sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_no_cache sql_small_result sqlexception sqlstate sqlwarning ssl start starting starts status std stddev stddev_pop stddev_samp storage straight_join subclass_origin sum suspend table_name table_statistics tables tablespace temporary terminated to trailing transaction trigger triggers truncate uncommitted undo uninstall unique unlock upgrade usage use use_frm user user_resources user_statistics using utc_date utc_time utc_timestamp value variables varying view views virtual warnings when while with work write xa xor year_month zerofill begin do then else loop repeat"),builtin:set("bool boolean bit blob decimal double float long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text bigint int int1 int2 int3 int4 int8 integer float float4 float8 double char varbinary varchar varcharacter precision date datetime year unsigned signed numeric"),atoms:set("false true null unknown"),operatorChars:/^[*+\-%<>!=&|^]/,dateSQL:set("date time timestamp"),support:set("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber doubleQuote nCharCast charsetCast commentHash commentSpaceRequired"),hooks:{"@":hookVar,"`":hookIdentifier,"\\":hookClient}});CodeMirror.defineMIME("text/x-sqlite",{name:"sql",client:set("auth backup bail binary changes check clone databases dbinfo dump echo eqp exit explain fullschema headers help import imposter indexes iotrace limit lint load log mode nullvalue once open output print prompt quit read restore save scanstats schema separator session shell show stats system tables testcase timeout timer trace vfsinfo vfslist vfsname width"),keywords:set(sqlKeywords+"abort action add after all analyze attach autoincrement before begin cascade case cast check collate column commit conflict constraint cross current_date current_time current_timestamp database default deferrable deferred detach each else end escape except exclusive exists explain fail for foreign full glob if ignore immediate index indexed initially inner instead intersect isnull key left limit match natural no notnull null of offset outer plan pragma primary query raise recursive references regexp reindex release rename replace restrict right rollback row savepoint temp temporary then to transaction trigger unique using vacuum view virtual when with without"),builtin:set("bool boolean bit blob decimal double float long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text clob bigint int int2 int8 integer float double char varchar date datetime year unsigned signed numeric real"),atoms:set("null current_date current_time current_timestamp"),operatorChars:/^[*+\-%<>!=&|/~]/,dateSQL:set("date time timestamp datetime"),support:set("decimallessFloat zerolessFloat"),identifierQuote:'"',hooks:{"@":hookVar,":":hookVar,"?":hookVar,$:hookVar,'"':hookIdentifierDoublequote,"`":hookIdentifier}});CodeMirror.defineMIME("text/x-cassandra",{name:"sql",client:{},keywords:set("add all allow alter and any apply as asc authorize batch begin by clustering columnfamily compact consistency count create custom delete desc distinct drop each_quorum exists filtering from grant if in index insert into key keyspace keyspaces level limit local_one local_quorum modify nan norecursive nosuperuser not of on one order password permission permissions primary quorum rename revoke schema select set storage superuser table three to token truncate ttl two type unlogged update use user users using values where with writetime"),builtin:set("ascii bigint blob boolean counter decimal double float frozen inet int list map static text timestamp timeuuid tuple uuid varchar varint"),atoms:set("false true infinity NaN"),operatorChars:/^[<>=]/,dateSQL:{},support:set("commentSlashSlash decimallessFloat"),hooks:{}});CodeMirror.defineMIME("text/x-plsql",{name:"sql",client:set("appinfo arraysize autocommit autoprint autorecovery autotrace blockterminator break btitle cmdsep colsep compatibility compute concat copycommit copytypecheck define describe echo editfile embedded escape exec execute feedback flagger flush heading headsep instance linesize lno loboffset logsource long longchunksize markup native newpage numformat numwidth pagesize pause pno recsep recsepchar release repfooter repheader serveroutput shiftinout show showmode size spool sqlblanklines sqlcase sqlcode sqlcontinue sqlnumber sqlpluscompatibility sqlprefix sqlprompt sqlterminator suffix tab term termout time timing trimout trimspool ttitle underline verify version wrap"),keywords:set("abort accept access add all alter and any array arraylen as asc assert assign at attributes audit authorization avg base_table begin between binary_integer body boolean by case cast char char_base check close cluster clusters colauth column comment commit compress connect connected constant constraint crash create current currval cursor data_base database date dba deallocate debugoff debugon decimal declare default definition delay delete desc digits dispose distinct do drop else elseif elsif enable end entry escape exception exception_init exchange exclusive exists exit external fast fetch file for force form from function generic goto grant group having identified if immediate in increment index indexes indicator initial initrans insert interface intersect into is key level library like limited local lock log logging long loop master maxextents maxtrans member minextents minus mislabel mode modify multiset new next no noaudit nocompress nologging noparallel not nowait number_base object of off offline on online only open option or order out package parallel partition pctfree pctincrease pctused pls_integer positive positiven pragma primary prior private privileges procedure public raise range raw read rebuild record ref references refresh release rename replace resource restrict return returning returns reverse revoke rollback row rowid rowlabel rownum rows run savepoint schema segment select separate session set share snapshot some space split sql start statement storage subtype successful synonym tabauth table tables tablespace task terminate then to trigger truncate type union unique unlimited unrecoverable unusable update use using validate value values variable view views when whenever where while with work"),builtin:set("abs acos add_months ascii asin atan atan2 average bfile bfilename bigserial bit blob ceil character chartorowid chr clob concat convert cos cosh count dec decode deref dual dump dup_val_on_index empty error exp false float floor found glb greatest hextoraw initcap instr instrb int integer isopen last_day least length lengthb ln lower lpad ltrim lub make_ref max min mlslabel mod months_between natural naturaln nchar nclob new_time next_day nextval nls_charset_decl_len nls_charset_id nls_charset_name nls_initcap nls_lower nls_sort nls_upper nlssort no_data_found notfound null number numeric nvarchar2 nvl others power rawtohex real reftohex round rowcount rowidtochar rowtype rpad rtrim serial sign signtype sin sinh smallint soundex sqlcode sqlerrm sqrt stddev string substr substrb sum sysdate tan tanh to_char text to_date to_label to_multi_byte to_number to_single_byte translate true trunc uid unlogged upper user userenv varchar varchar2 variance varying vsize xml"),operatorChars:/^[*\/+\-%<>!=~]/,dateSQL:set("date time timestamp"),support:set("doubleQuote nCharCast zerolessFloat binaryNumber hexNumber")});CodeMirror.defineMIME("text/x-hive",{name:"sql",keywords:set("select alter $elem$ $key$ $value$ add after all analyze and archive as asc before between binary both bucket buckets by cascade case cast change cluster clustered clusterstatus collection column columns comment compute concatenate continue create cross cursor data database databases dbproperties deferred delete delimited desc describe directory disable distinct distribute drop else enable end escaped exclusive exists explain export extended external fetch fields fileformat first format formatted from full function functions grant group having hold_ddltime idxproperties if import in index indexes inpath inputdriver inputformat insert intersect into is items join keys lateral left like limit lines load local location lock locks mapjoin materialized minus msck no_drop nocompress not of offline on option or order out outer outputdriver outputformat overwrite partition partitioned partitions percent plus preserve procedure purge range rcfile read readonly reads rebuild recordreader recordwriter recover reduce regexp rename repair replace restrict revoke right rlike row schema schemas semi sequencefile serde serdeproperties set shared show show_database sort sorted ssl statistics stored streamtable table tables tablesample tblproperties temporary terminated textfile then tmp to touch transform trigger unarchive undo union uniquejoin unlock update use using utc utc_tmestamp view when where while with admin authorization char compact compactions conf cube current current_date current_timestamp day decimal defined dependency directories elem_type exchange file following for grouping hour ignore inner interval jar less logical macro minute month more none noscan over owner partialscan preceding pretty principals protection reload rewrite role roles rollup rows second server sets skewed transactions truncate unbounded unset uri user values window year"),builtin:set("bool boolean long timestamp tinyint smallint bigint int float double date datetime unsigned string array struct map uniontype key_type utctimestamp value_type varchar"),atoms:set("false true null unknown"),operatorChars:/^[*+\-%<>!=]/,dateSQL:set("date timestamp"),support:set("ODBCdotTable doubleQuote binaryNumber hexNumber")});CodeMirror.defineMIME("text/x-pgsql",{name:"sql",client:set("source"),keywords:set(sqlKeywords+"a abort abs absent absolute access according action ada add admin after aggregate alias all allocate also alter always analyse analyze and any are array array_agg array_max_cardinality as asc asensitive assert assertion assignment asymmetric at atomic attach attribute attributes authorization avg backward base64 before begin begin_frame begin_partition bernoulli between bigint binary bit bit_length blob blocked bom boolean both breadth by c cache call called cardinality cascade cascaded case cast catalog catalog_name ceil ceiling chain char char_length character character_length character_set_catalog character_set_name character_set_schema characteristics characters check checkpoint class class_origin clob close cluster coalesce cobol collate collation collation_catalog collation_name collation_schema collect column column_name columns command_function command_function_code comment comments commit committed concurrently condition condition_number configuration conflict connect connection connection_name constant constraint constraint_catalog constraint_name constraint_schema constraints constructor contains content continue control conversion convert copy corr corresponding cost count covar_pop covar_samp create cross csv cube cume_dist current current_catalog current_date current_default_transform_group current_path current_role current_row current_schema current_time current_timestamp current_transform_group_for_type current_user cursor cursor_name cycle data database datalink datatype date datetime_interval_code datetime_interval_precision day db deallocate debug dec decimal declare default defaults deferrable deferred defined definer degree delete delimiter delimiters dense_rank depends depth deref derived desc describe descriptor detach detail deterministic diagnostics dictionary disable discard disconnect dispatch distinct dlnewcopy dlpreviouscopy dlurlcomplete dlurlcompleteonly dlurlcompletewrite dlurlpath dlurlpathonly dlurlpathwrite dlurlscheme dlurlserver dlvalue do document domain double drop dump dynamic dynamic_function dynamic_function_code each element else elseif elsif empty enable encoding encrypted end end_frame end_partition endexec enforced enum equals errcode error escape event every except exception exclude excluding exclusive exec execute exists exit exp explain expression extension external extract false family fetch file filter final first first_value flag float floor following for force foreach foreign fortran forward found frame_row free freeze from fs full function functions fusion g general generated get global go goto grant granted greatest group grouping groups handler having header hex hierarchy hint hold hour id identity if ignore ilike immediate immediately immutable implementation implicit import in include including increment indent index indexes indicator info inherit inherits initially inline inner inout input insensitive insert instance instantiable instead int integer integrity intersect intersection interval into invoker is isnull isolation join k key key_member key_type label lag language large last last_value lateral lead leading leakproof least left length level library like like_regex limit link listen ln load local localtime localtimestamp location locator lock locked log logged loop lower m map mapping match matched materialized max max_cardinality maxvalue member merge message message_length message_octet_length message_text method min minute minvalue mod mode modifies module month more move multiset mumps name names namespace national natural nchar nclob nesting new next nfc nfd nfkc nfkd nil no none normalize normalized not nothing notice notify notnull nowait nth_value ntile null nullable nullif nulls number numeric object occurrences_regex octet_length octets of off offset oids old on only open operator option options or order ordering ordinality others out outer output over overlaps overlay overriding owned owner p pad parallel parameter parameter_mode parameter_name parameter_ordinal_position parameter_specific_catalog parameter_specific_name parameter_specific_schema parser partial partition pascal passing passthrough password path percent percent_rank percentile_cont percentile_disc perform period permission pg_context pg_datatype_name pg_exception_context pg_exception_detail pg_exception_hint placing plans pli policy portion position position_regex power precedes preceding precision prepare prepared preserve primary print_strict_params prior privileges procedural procedure procedures program public publication query quote raise range rank read reads real reassign recheck recovery recursive ref references referencing refresh regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy regr_syy reindex relative release rename repeatable replace replica requiring reset respect restart restore restrict result result_oid return returned_cardinality returned_length returned_octet_length returned_sqlstate returning returns reverse revoke right role rollback rollup routine routine_catalog routine_name routine_schema routines row row_count row_number rows rowtype rule savepoint scale schema schema_name schemas scope scope_catalog scope_name scope_schema scroll search second section security select selective self sensitive sequence sequences serializable server server_name session session_user set setof sets share show similar simple size skip slice smallint snapshot some source space specific specific_name specifictype sql sqlcode sqlerror sqlexception sqlstate sqlwarning sqrt stable stacked standalone start state statement static statistics stddev_pop stddev_samp stdin stdout storage strict strip structure style subclass_origin submultiset subscription substring substring_regex succeeds sum symmetric sysid system system_time system_user t table table_name tables tablesample tablespace temp template temporary text then ties time timestamp timezone_hour timezone_minute to token top_level_count trailing transaction transaction_active transactions_committed transactions_rolled_back transform transforms translate translate_regex translation treat trigger trigger_catalog trigger_name trigger_schema trim trim_array true truncate trusted type types uescape unbounded uncommitted under unencrypted union unique unknown unlink unlisten unlogged unnamed unnest until untyped update upper uri usage use_column use_variable user user_defined_type_catalog user_defined_type_code user_defined_type_name user_defined_type_schema using vacuum valid validate validator value value_of values var_pop var_samp varbinary varchar variable_conflict variadic varying verbose version versioning view views volatile warning when whenever where while whitespace width_bucket window with within without work wrapper write xml xmlagg xmlattributes xmlbinary xmlcast xmlcomment xmlconcat xmldeclaration xmldocument xmlelement xmlexists xmlforest xmliterate xmlnamespaces xmlparse xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltext xmlvalidate year yes zone"),builtin:set("bigint int8 bigserial serial8 bit varying varbit boolean bool box bytea character char varchar cidr circle date double precision float8 inet integer int int4 interval json jsonb line lseg macaddr macaddr8 money numeric decimal path pg_lsn point polygon real float4 smallint int2 smallserial serial2 serial serial4 text time without zone with timetz timestamp timestamptz tsquery tsvector txid_snapshot uuid xml"),atoms:set("false true null unknown"),operatorChars:/^[*\/+\-%<>!=&|^\/#@?~]/,backslashStringEscapes:false,dateSQL:set("date time timestamp"),support:set("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber nCharCast charsetCast escapeConstant")});CodeMirror.defineMIME("text/x-gql",{name:"sql",keywords:set("ancestor and asc by contains desc descendant distinct from group has in is limit offset on order select superset where"),atoms:set("false true"),builtin:set("blob datetime first key __key__ string integer double boolean null"),operatorChars:/^[*+\-%<>!=]/});CodeMirror.defineMIME("text/x-gpsql",{name:"sql",client:set("source"),keywords:set("abort absolute access action active add admin after aggregate all also alter always analyse analyze and any array as asc assertion assignment asymmetric at authorization backward before begin between bigint binary bit boolean both by cache called cascade cascaded case cast chain char character characteristics check checkpoint class close cluster coalesce codegen collate column comment commit committed concurrency concurrently configuration connection constraint constraints contains content continue conversion copy cost cpu_rate_limit create createdb createexttable createrole createuser cross csv cube current current_catalog current_date current_role current_schema current_time current_timestamp current_user cursor cycle data database day deallocate dec decimal declare decode default defaults deferrable deferred definer delete delimiter delimiters deny desc dictionary disable discard distinct distributed do document domain double drop dxl each else enable encoding encrypted end enum errors escape every except exchange exclude excluding exclusive execute exists explain extension external extract false family fetch fields filespace fill filter first float following for force foreign format forward freeze from full function global grant granted greatest group group_id grouping handler hash having header hold host hour identity if ignore ilike immediate immutable implicit in including inclusive increment index indexes inherit inherits initially inline inner inout input insensitive insert instead int integer intersect interval into invoker is isnull isolation join key language large last leading least left level like limit list listen load local localtime localtimestamp location lock log login mapping master match maxvalue median merge minute minvalue missing mode modifies modify month move name names national natural nchar new newline next no nocreatedb nocreateexttable nocreaterole nocreateuser noinherit nologin none noovercommit nosuperuser not nothing notify notnull nowait null nullif nulls numeric object of off offset oids old on only operator option options or order ordered others out outer over overcommit overlaps overlay owned owner parser partial partition partitions passing password percent percentile_cont percentile_disc placing plans position preceding precision prepare prepared preserve primary prior privileges procedural procedure protocol queue quote randomly range read readable reads real reassign recheck recursive ref references reindex reject relative release rename repeatable replace replica reset resource restart restrict returning returns revoke right role rollback rollup rootpartition row rows rule savepoint scatter schema scroll search second security segment select sequence serializable session session_user set setof sets share show similar simple smallint some split sql stable standalone start statement statistics stdin stdout storage strict strip subpartition subpartitions substring superuser symmetric sysid system table tablespace temp template temporary text then threshold ties time timestamp to trailing transaction treat trigger trim true truncate trusted type unbounded uncommitted unencrypted union unique unknown unlisten until update user using vacuum valid validation validator value values varchar variadic varying verbose version view volatile web when where whitespace window with within without work writable write xml xmlattributes xmlconcat xmlelement xmlexists xmlforest xmlparse xmlpi xmlroot xmlserialize year yes zone"),builtin:set("bigint int8 bigserial serial8 bit varying varbit boolean bool box bytea character char varchar cidr circle date double precision float float8 inet integer int int4 interval json jsonb line lseg macaddr macaddr8 money numeric decimal path pg_lsn point polygon real float4 smallint int2 smallserial serial2 serial serial4 text time without zone with timetz timestamp timestamptz tsquery tsvector txid_snapshot uuid xml"),atoms:set("false true null unknown"),operatorChars:/^[*+\-%<>!=&|^\/#@?~]/,dateSQL:set("date time timestamp"),support:set("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber nCharCast charsetCast")});CodeMirror.defineMIME("text/x-sparksql",{name:"sql",keywords:set("add after all alter analyze and anti archive array as asc at between bucket buckets by cache cascade case cast change clear cluster clustered codegen collection column columns comment commit compact compactions compute concatenate cost create cross cube current current_date current_timestamp database databases datata dbproperties defined delete delimited deny desc describe dfs directories distinct distribute drop else end escaped except exchange exists explain export extended external false fields fileformat first following for format formatted from full function functions global grant group grouping having if ignore import in index indexes inner inpath inputformat insert intersect interval into is items join keys last lateral lazy left like limit lines list load local location lock locks logical macro map minus msck natural no not null nulls of on optimize option options or order out outer outputformat over overwrite partition partitioned partitions percent preceding principals purge range recordreader recordwriter recover reduce refresh regexp rename repair replace reset restrict revoke right rlike role roles rollback rollup row rows schema schemas select semi separated serde serdeproperties set sets show skewed sort sorted start statistics stored stratify struct table tables tablesample tblproperties temp temporary terminated then to touch transaction transactions transform true truncate unarchive unbounded uncache union unlock unset use using values view when where window with"),builtin:set("tinyint smallint int bigint boolean float double string binary timestamp decimal array map struct uniontype delimited serde sequencefile textfile rcfile inputformat outputformat"),atoms:set("false true null"),operatorChars:/^[*\/+\-%<>!=~&|^]/,dateSQL:set("date time timestamp"),support:set("ODBCdotTable doubleQuote zerolessFloat")});CodeMirror.defineMIME("text/x-esper",{name:"sql",client:set("source"),keywords:set("alter and as asc between by count create delete desc distinct drop from group having in insert into is join like not on or order select set table union update values where limit after all and as at asc avedev avg between by case cast coalesce count create current_timestamp day days delete define desc distinct else end escape events every exists false first from full group having hour hours in inner insert instanceof into irstream is istream join last lastweekday left limit like max match_recognize matches median measures metadatasql min minute minutes msec millisecond milliseconds not null offset on or order outer output partition pattern prev prior regexp retain-union retain-intersection right rstream sec second seconds select set some snapshot sql stddev sum then true unidirectional until update variable weekday when where window"),builtin:{},atoms:set("false true null"),operatorChars:/^[*+\-%<>!=&|^\/#@?~]/,dateSQL:set("time"),support:set("decimallessFloat zerolessFloat binaryNumber hexNumber")})}); \ No newline at end of file diff --git a/datasette/static/codemirror-5.57.0.min.css b/datasette/static/codemirror-5.57.0.min.css new file mode 100644 index 00000000..0adf786f --- /dev/null +++ b/datasette/static/codemirror-5.57.0.min.css @@ -0,0 +1 @@ +.CodeMirror{font-family:monospace;height:300px;color:#000;direction:ltr}.CodeMirror-lines{padding:4px 0}.CodeMirror pre.CodeMirror-line,.CodeMirror pre.CodeMirror-line-like{padding:0 4px}.CodeMirror-gutter-filler,.CodeMirror-scrollbar-filler{background-color:#fff}.CodeMirror-gutters{border-right:1px solid #ddd;background-color:#f7f7f7;white-space:nowrap}.CodeMirror-linenumber{padding:0 3px 0 5px;min-width:20px;text-align:right;color:#999;white-space:nowrap}.CodeMirror-guttermarker{color:#000}.CodeMirror-guttermarker-subtle{color:#999}.CodeMirror-cursor{border-left:1px solid #000;border-right:none;width:0}.CodeMirror div.CodeMirror-secondarycursor{border-left:1px solid silver}.cm-fat-cursor .CodeMirror-cursor{width:auto;border:0!important;background:#7e7}.cm-fat-cursor div.CodeMirror-cursors{z-index:1}.cm-fat-cursor-mark{background-color:rgba(20,255,20,.5);-webkit-animation:blink 1.06s steps(1) infinite;-moz-animation:blink 1.06s steps(1) infinite;animation:blink 1.06s steps(1) infinite}.cm-animate-fat-cursor{width:auto;border:0;-webkit-animation:blink 1.06s steps(1) infinite;-moz-animation:blink 1.06s steps(1) infinite;animation:blink 1.06s steps(1) infinite;background-color:#7e7}@-moz-keyframes blink{50%{background-color:transparent}}@-webkit-keyframes blink{50%{background-color:transparent}}@keyframes blink{50%{background-color:transparent}}.cm-tab{display:inline-block;text-decoration:inherit}.CodeMirror-rulers{position:absolute;left:0;right:0;top:-50px;bottom:0;overflow:hidden}.CodeMirror-ruler{border-left:1px solid #ccc;top:0;bottom:0;position:absolute}.cm-s-default .cm-header{color:#00f}.cm-s-default .cm-quote{color:#090}.cm-negative{color:#d44}.cm-positive{color:#292}.cm-header,.cm-strong{font-weight:700}.cm-em{font-style:italic}.cm-link{text-decoration:underline}.cm-strikethrough{text-decoration:line-through}.cm-s-default .cm-keyword{color:#708}.cm-s-default .cm-atom{color:#219}.cm-s-default .cm-number{color:#164}.cm-s-default .cm-def{color:#00f}.cm-s-default .cm-variable-2{color:#05a}.cm-s-default .cm-type,.cm-s-default .cm-variable-3{color:#085}.cm-s-default .cm-comment{color:#a50}.cm-s-default .cm-string{color:#a11}.cm-s-default .cm-string-2{color:#f50}.cm-s-default .cm-meta{color:#555}.cm-s-default .cm-qualifier{color:#555}.cm-s-default .cm-builtin{color:#30a}.cm-s-default .cm-bracket{color:#997}.cm-s-default .cm-tag{color:#170}.cm-s-default .cm-attribute{color:#00c}.cm-s-default .cm-hr{color:#999}.cm-s-default .cm-link{color:#00c}.cm-s-default .cm-error{color:red}.cm-invalidchar{color:red}.CodeMirror-composing{border-bottom:2px solid}div.CodeMirror span.CodeMirror-matchingbracket{color:#0b0}div.CodeMirror span.CodeMirror-nonmatchingbracket{color:#a22}.CodeMirror-matchingtag{background:rgba(255,150,0,.3)}.CodeMirror-activeline-background{background:#e8f2ff}.CodeMirror{position:relative;overflow:hidden;background:#fff}.CodeMirror-scroll{overflow:scroll!important;margin-bottom:-50px;margin-right:-50px;padding-bottom:50px;height:100%;outline:0;position:relative}.CodeMirror-sizer{position:relative;border-right:50px solid transparent}.CodeMirror-gutter-filler,.CodeMirror-hscrollbar,.CodeMirror-scrollbar-filler,.CodeMirror-vscrollbar{position:absolute;z-index:6;display:none}.CodeMirror-vscrollbar{right:0;top:0;overflow-x:hidden;overflow-y:scroll}.CodeMirror-hscrollbar{bottom:0;left:0;overflow-y:hidden;overflow-x:scroll}.CodeMirror-scrollbar-filler{right:0;bottom:0}.CodeMirror-gutter-filler{left:0;bottom:0}.CodeMirror-gutters{position:absolute;left:0;top:0;min-height:100%;z-index:3}.CodeMirror-gutter{white-space:normal;height:100%;display:inline-block;vertical-align:top;margin-bottom:-50px}.CodeMirror-gutter-wrapper{position:absolute;z-index:4;background:0 0!important;border:none!important}.CodeMirror-gutter-background{position:absolute;top:0;bottom:0;z-index:4}.CodeMirror-gutter-elt{position:absolute;cursor:default;z-index:4}.CodeMirror-gutter-wrapper ::selection{background-color:transparent}.CodeMirror-gutter-wrapper ::-moz-selection{background-color:transparent}.CodeMirror-lines{cursor:text;min-height:1px}.CodeMirror pre.CodeMirror-line,.CodeMirror pre.CodeMirror-line-like{-moz-border-radius:0;-webkit-border-radius:0;border-radius:0;border-width:0;background:0 0;font-family:inherit;font-size:inherit;margin:0;white-space:pre;word-wrap:normal;line-height:inherit;color:inherit;z-index:2;position:relative;overflow:visible;-webkit-tap-highlight-color:transparent;-webkit-font-variant-ligatures:contextual;font-variant-ligatures:contextual}.CodeMirror-wrap pre.CodeMirror-line,.CodeMirror-wrap pre.CodeMirror-line-like{word-wrap:break-word;white-space:pre-wrap;word-break:normal}.CodeMirror-linebackground{position:absolute;left:0;right:0;top:0;bottom:0;z-index:0}.CodeMirror-linewidget{position:relative;z-index:2;padding:.1px}.CodeMirror-rtl pre{direction:rtl}.CodeMirror-code{outline:0}.CodeMirror-gutter,.CodeMirror-gutters,.CodeMirror-linenumber,.CodeMirror-scroll,.CodeMirror-sizer{-moz-box-sizing:content-box;box-sizing:content-box}.CodeMirror-measure{position:absolute;width:100%;height:0;overflow:hidden;visibility:hidden}.CodeMirror-cursor{position:absolute;pointer-events:none}.CodeMirror-measure pre{position:static}div.CodeMirror-cursors{visibility:hidden;position:relative;z-index:3}div.CodeMirror-dragcursors{visibility:visible}.CodeMirror-focused div.CodeMirror-cursors{visibility:visible}.CodeMirror-selected{background:#d9d9d9}.CodeMirror-focused .CodeMirror-selected{background:#d7d4f0}.CodeMirror-crosshair{cursor:crosshair}.CodeMirror-line::selection,.CodeMirror-line>span::selection,.CodeMirror-line>span>span::selection{background:#d7d4f0}.CodeMirror-line::-moz-selection,.CodeMirror-line>span::-moz-selection,.CodeMirror-line>span>span::-moz-selection{background:#d7d4f0}.cm-searching{background-color:#ffa;background-color:rgba(255,255,0,.4)}.cm-force-border{padding-right:.1px}@media print{.CodeMirror div.CodeMirror-cursors{visibility:hidden}}.cm-tab-wrap-hack:after{content:''}span.CodeMirror-selectedtext{background:0 0} \ No newline at end of file diff --git a/datasette/static/codemirror-5.57.0.min.js b/datasette/static/codemirror-5.57.0.min.js new file mode 100644 index 00000000..a8ef1854 --- /dev/null +++ b/datasette/static/codemirror-5.57.0.min.js @@ -0,0 +1,11 @@ +/* + CodeMirror, copyright (c) by Marijn Haverbeke and others + Distributed under an MIT license: https://codemirror.net/LICENSE + + This is CodeMirror (https://codemirror.net), a code editor + implemented in JavaScript on top of the browser's DOM. + + You can find some technical background for some of the code below + at http://marijnhaverbeke.nl/blog/#cm-internals . +*/ +(function(global,factory){typeof exports==="object"&&typeof module!=="undefined"?module.exports=factory():typeof define==="function"&&define.amd?define(factory):(global=global||self,global.CodeMirror=factory())})(this,function(){"use strict";var userAgent=navigator.userAgent;var platform=navigator.platform;var gecko=/gecko\/\d/i.test(userAgent);var ie_upto10=/MSIE \d/.test(userAgent);var ie_11up=/Trident\/(?:[7-9]|\d{2,})\..*rv:(\d+)/.exec(userAgent);var edge=/Edge\/(\d+)/.exec(userAgent);var ie=ie_upto10||ie_11up||edge;var ie_version=ie&&(ie_upto10?document.documentMode||6:+(edge||ie_11up)[1]);var webkit=!edge&&/WebKit\//.test(userAgent);var qtwebkit=webkit&&/Qt\/\d+\.\d+/.test(userAgent);var chrome=!edge&&/Chrome\//.test(userAgent);var presto=/Opera\//.test(userAgent);var safari=/Apple Computer/.test(navigator.vendor);var mac_geMountainLion=/Mac OS X 1\d\D([8-9]|\d\d)\D/.test(userAgent);var phantom=/PhantomJS/.test(userAgent);var ios=!edge&&/AppleWebKit/.test(userAgent)&&/Mobile\/\w+/.test(userAgent);var android=/Android/.test(userAgent);var mobile=ios||android||/webOS|BlackBerry|Opera Mini|Opera Mobi|IEMobile/i.test(userAgent);var mac=ios||/Mac/.test(platform);var chromeOS=/\bCrOS\b/.test(userAgent);var windows=/win/i.test(platform);var presto_version=presto&&userAgent.match(/Version\/(\d*\.\d*)/);if(presto_version){presto_version=Number(presto_version[1])}if(presto_version&&presto_version>=15){presto=false;webkit=true}var flipCtrlCmd=mac&&(qtwebkit||presto&&(presto_version==null||presto_version<12.11));var captureRightClick=gecko||ie&&ie_version>=9;function classTest(cls){return new RegExp("(^|\\s)"+cls+"(?:$|\\s)\\s*")}var rmClass=function(node,cls){var current=node.className;var match=classTest(cls).exec(current);if(match){var after=current.slice(match.index+match[0].length);node.className=current.slice(0,match.index)+(after?match[1]+after:"")}};function removeChildren(e){for(var count=e.childNodes.length;count>0;--count){e.removeChild(e.firstChild)}return e}function removeChildrenAndAdd(parent,e){return removeChildren(parent).appendChild(e)}function elt(tag,content,className,style){var e=document.createElement(tag);if(className){e.className=className}if(style){e.style.cssText=style}if(typeof content=="string"){e.appendChild(document.createTextNode(content))}else if(content){for(var i=0;i=end){return n+(end-i)}n+=nextTab-i;n+=tabSize-n%tabSize;i=nextTab+1}}var Delayed=function(){this.id=null;this.f=null;this.time=0;this.handler=bind(this.onTimeout,this)};Delayed.prototype.onTimeout=function(self){self.id=0;if(self.time<=+new Date){self.f()}else{setTimeout(self.handler,self.time-+new Date)}};Delayed.prototype.set=function(ms,f){this.f=f;var time=+new Date+ms;if(!this.id||time=goal){return pos+Math.min(skipped,goal-col)}col+=nextTab-pos;col+=tabSize-col%tabSize;pos=nextTab+1;if(col>=goal){return pos}}}var spaceStrs=[""];function spaceStr(n){while(spaceStrs.length<=n){spaceStrs.push(lst(spaceStrs)+" ")}return spaceStrs[n]}function lst(arr){return arr[arr.length-1]}function map(array,f){var out=[];for(var i=0;i"€"&&(ch.toUpperCase()!=ch.toLowerCase()||nonASCIISingleCaseWordChar.test(ch))}function isWordChar(ch,helper){if(!helper){return isWordCharBasic(ch)}if(helper.source.indexOf("\\w")>-1&&isWordCharBasic(ch)){return true}return helper.test(ch)}function isEmpty(obj){for(var n in obj){if(obj.hasOwnProperty(n)&&obj[n]){return false}}return true}var extendingChars=/[\u0300-\u036f\u0483-\u0489\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u065e\u0670\u06d6-\u06dc\u06de-\u06e4\u06e7\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0900-\u0902\u093c\u0941-\u0948\u094d\u0951-\u0955\u0962\u0963\u0981\u09bc\u09be\u09c1-\u09c4\u09cd\u09d7\u09e2\u09e3\u0a01\u0a02\u0a3c\u0a41\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a70\u0a71\u0a75\u0a81\u0a82\u0abc\u0ac1-\u0ac5\u0ac7\u0ac8\u0acd\u0ae2\u0ae3\u0b01\u0b3c\u0b3e\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b57\u0b62\u0b63\u0b82\u0bbe\u0bc0\u0bcd\u0bd7\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0cbc\u0cbf\u0cc2\u0cc6\u0ccc\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0d3e\u0d41-\u0d44\u0d4d\u0d57\u0d62\u0d63\u0dca\u0dcf\u0dd2-\u0dd4\u0dd6\u0ddf\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0f18\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86\u0f87\u0f90-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039\u103a\u103d\u103e\u1058\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085\u1086\u108d\u109d\u135f\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u18a9\u1920-\u1922\u1927\u1928\u1932\u1939-\u193b\u1a17\u1a18\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80\u1b81\u1ba2-\u1ba5\u1ba8\u1ba9\u1c2c-\u1c33\u1c36\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1dc0-\u1de6\u1dfd-\u1dff\u200c\u200d\u20d0-\u20f0\u2cef-\u2cf1\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua66f-\ua672\ua67c\ua67d\ua6f0\ua6f1\ua802\ua806\ua80b\ua825\ua826\ua8c4\ua8e0-\ua8f1\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\uaa29-\uaa2e\uaa31\uaa32\uaa35\uaa36\uaa43\uaa4c\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uabe5\uabe8\uabed\udc00-\udfff\ufb1e\ufe00-\ufe0f\ufe20-\ufe26\uff9e\uff9f]/;function isExtendingChar(ch){return ch.charCodeAt(0)>=768&&extendingChars.test(ch)}function skipExtendingChars(str,pos,dir){while((dir<0?pos>0:posto?-1:1;for(;;){if(from==to){return from}var midF=(from+to)/2,mid=dir<0?Math.ceil(midF):Math.floor(midF);if(mid==from){return pred(mid)?from:to}if(pred(mid)){to=mid}else{from=mid+dir}}}function iterateBidiSections(order,from,to,f){if(!order){return f(from,to,"ltr",0)}var found=false;for(var i=0;ifrom||from==to&&part.to==from){f(Math.max(part.from,from),Math.min(part.to,to),part.level==1?"rtl":"ltr",i);found=true}}if(!found){f(from,to,"ltr")}}var bidiOther=null;function getBidiPartAt(order,ch,sticky){var found;bidiOther=null;for(var i=0;ich){return i}if(cur.to==ch){if(cur.from!=cur.to&&sticky=="before"){found=i}else{bidiOther=i}}if(cur.from==ch){if(cur.from!=cur.to&&sticky!="before"){found=i}else{bidiOther=i}}}return found!=null?found:bidiOther}var bidiOrdering=function(){var lowTypes="bbbbbbbbbtstwsbbbbbbbbbbbbbbssstwNN%%%NNNNNN,N,N1111111111NNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNbbbbbbsbbbbbbbbbbbbbbbbbbbbbbbbbb,N%%%%NNNNLNNNNN%%11NLNNN1LNNNNNLLLLLLLLLLLLLLLLLLLLLLLNLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLN";var arabicTypes="nnnnnnNNr%%r,rNNmmmmmmmmmmmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmmmmmmmmmmmmmmmnnnnnnnnnn%nnrrrmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmnNmmmmmmrrmmNmmmmrr1111111111";function charType(code){if(code<=247){return lowTypes.charAt(code)}else if(1424<=code&&code<=1524){return"R"}else if(1536<=code&&code<=1785){return arabicTypes.charAt(code-1536)}else if(1774<=code&&code<=2220){return"r"}else if(8192<=code&&code<=8203){return"w"}else if(code==8204){return"b"}else{return"L"}}var bidiRE=/[\u0590-\u05f4\u0600-\u06ff\u0700-\u08ac]/;var isNeutral=/[stwN]/,isStrong=/[LRr]/,countsAsLeft=/[Lb1n]/,countsAsNum=/[1n]/;function BidiSpan(level,from,to){this.level=level;this.from=from;this.to=to}return function(str,direction){var outerType=direction=="ltr"?"L":"R";if(str.length==0||direction=="ltr"&&!bidiRE.test(str)){return false}var len=str.length,types=[];for(var i=0;i-1){map[type]=arr.slice(0,index).concat(arr.slice(index+1))}}}}function signal(emitter,type){var handlers=getHandlers(emitter,type);if(!handlers.length){return}var args=Array.prototype.slice.call(arguments,2);for(var i=0;i0}function eventMixin(ctor){ctor.prototype.on=function(type,f){on(this,type,f)};ctor.prototype.off=function(type,f){off(this,type,f)}}function e_preventDefault(e){if(e.preventDefault){e.preventDefault()}else{e.returnValue=false}}function e_stopPropagation(e){if(e.stopPropagation){e.stopPropagation()}else{e.cancelBubble=true}}function e_defaultPrevented(e){return e.defaultPrevented!=null?e.defaultPrevented:e.returnValue==false}function e_stop(e){e_preventDefault(e);e_stopPropagation(e)}function e_target(e){return e.target||e.srcElement}function e_button(e){var b=e.which;if(b==null){if(e.button&1){b=1}else if(e.button&2){b=3}else if(e.button&4){b=2}}if(mac&&e.ctrlKey&&b==1){b=3}return b}var dragAndDrop=function(){if(ie&&ie_version<9){return false}var div=elt("div");return"draggable"in div||"dragDrop"in div}();var zwspSupported;function zeroWidthElement(measure){if(zwspSupported==null){var test=elt("span","​");removeChildrenAndAdd(measure,elt("span",[test,document.createTextNode("x")]));if(measure.firstChild.offsetHeight!=0){zwspSupported=test.offsetWidth<=1&&test.offsetHeight>2&&!(ie&&ie_version<8)}}var node=zwspSupported?elt("span","​"):elt("span"," ",null,"display: inline-block; width: 1px; margin-right: -1px");node.setAttribute("cm-text","");return node}var badBidiRects;function hasBadBidiRects(measure){if(badBidiRects!=null){return badBidiRects}var txt=removeChildrenAndAdd(measure,document.createTextNode("AخA"));var r0=range(txt,0,1).getBoundingClientRect();var r1=range(txt,1,2).getBoundingClientRect();removeChildren(measure);if(!r0||r0.left==r0.right){return false}return badBidiRects=r1.right-r0.right<3}var splitLinesAuto="\n\nb".split(/\n/).length!=3?function(string){var pos=0,result=[],l=string.length;while(pos<=l){var nl=string.indexOf("\n",pos);if(nl==-1){nl=string.length}var line=string.slice(pos,string.charAt(nl-1)=="\r"?nl-1:nl);var rt=line.indexOf("\r");if(rt!=-1){result.push(line.slice(0,rt));pos+=rt+1}else{result.push(line);pos=nl+1}}return result}:function(string){return string.split(/\r\n?|\n/)};var hasSelection=window.getSelection?function(te){try{return te.selectionStart!=te.selectionEnd}catch(e){return false}}:function(te){var range;try{range=te.ownerDocument.selection.createRange()}catch(e){}if(!range||range.parentElement()!=te){return false}return range.compareEndPoints("StartToEnd",range)!=0};var hasCopyEvent=function(){var e=elt("div");if("oncopy"in e){return true}e.setAttribute("oncopy","return;");return typeof e.oncopy=="function"}();var badZoomedRects=null;function hasBadZoomedRects(measure){if(badZoomedRects!=null){return badZoomedRects}var node=removeChildrenAndAdd(measure,elt("span","x"));var normal=node.getBoundingClientRect();var fromRange=range(node,0,1).getBoundingClientRect();return badZoomedRects=Math.abs(normal.left-fromRange.left)>1}var modes={},mimeModes={};function defineMode(name,mode){if(arguments.length>2){mode.dependencies=Array.prototype.slice.call(arguments,2)}modes[name]=mode}function defineMIME(mime,spec){mimeModes[mime]=spec}function resolveMode(spec){if(typeof spec=="string"&&mimeModes.hasOwnProperty(spec)){spec=mimeModes[spec]}else if(spec&&typeof spec.name=="string"&&mimeModes.hasOwnProperty(spec.name)){var found=mimeModes[spec.name];if(typeof found=="string"){found={name:found}}spec=createObj(found,spec);spec.name=found.name}else if(typeof spec=="string"&&/^[\w\-]+\/[\w\-]+\+xml$/.test(spec)){return resolveMode("application/xml")}else if(typeof spec=="string"&&/^[\w\-]+\/[\w\-]+\+json$/.test(spec)){return resolveMode("application/json")}if(typeof spec=="string"){return{name:spec}}else{return spec||{name:"null"}}}function getMode(options,spec){spec=resolveMode(spec);var mfactory=modes[spec.name];if(!mfactory){return getMode(options,"text/plain")}var modeObj=mfactory(options,spec);if(modeExtensions.hasOwnProperty(spec.name)){var exts=modeExtensions[spec.name];for(var prop in exts){if(!exts.hasOwnProperty(prop)){continue}if(modeObj.hasOwnProperty(prop)){modeObj["_"+prop]=modeObj[prop]}modeObj[prop]=exts[prop]}}modeObj.name=spec.name;if(spec.helperType){modeObj.helperType=spec.helperType}if(spec.modeProps){for(var prop$1 in spec.modeProps){modeObj[prop$1]=spec.modeProps[prop$1]}}return modeObj}var modeExtensions={};function extendMode(mode,properties){var exts=modeExtensions.hasOwnProperty(mode)?modeExtensions[mode]:modeExtensions[mode]={};copyObj(properties,exts)}function copyState(mode,state){if(state===true){return state}if(mode.copyState){return mode.copyState(state)}var nstate={};for(var n in state){var val=state[n];if(val instanceof Array){val=val.concat([])}nstate[n]=val}return nstate}function innerMode(mode,state){var info;while(mode.innerMode){info=mode.innerMode(state);if(!info||info.mode==mode){break}state=info.state;mode=info.mode}return info||{mode:mode,state:state}}function startState(mode,a1,a2){return mode.startState?mode.startState(a1,a2):true}var StringStream=function(string,tabSize,lineOracle){this.pos=this.start=0;this.string=string;this.tabSize=tabSize||8;this.lastColumnPos=this.lastColumnValue=0;this.lineStart=0;this.lineOracle=lineOracle};StringStream.prototype.eol=function(){return this.pos>=this.string.length};StringStream.prototype.sol=function(){return this.pos==this.lineStart};StringStream.prototype.peek=function(){return this.string.charAt(this.pos)||undefined};StringStream.prototype.next=function(){if(this.posstart};StringStream.prototype.eatSpace=function(){var start=this.pos;while(/[\s\u00a0]/.test(this.string.charAt(this.pos))){++this.pos}return this.pos>start};StringStream.prototype.skipToEnd=function(){this.pos=this.string.length};StringStream.prototype.skipTo=function(ch){var found=this.string.indexOf(ch,this.pos);if(found>-1){this.pos=found;return true}};StringStream.prototype.backUp=function(n){this.pos-=n};StringStream.prototype.column=function(){if(this.lastColumnPos0){return null}if(match&&consume!==false){this.pos+=match[0].length}return match}};StringStream.prototype.current=function(){return this.string.slice(this.start,this.pos)};StringStream.prototype.hideFirstChars=function(n,inner){this.lineStart+=n;try{return inner()}finally{this.lineStart-=n}};StringStream.prototype.lookAhead=function(n){var oracle=this.lineOracle;return oracle&&oracle.lookAhead(n)};StringStream.prototype.baseToken=function(){var oracle=this.lineOracle;return oracle&&oracle.baseToken(this.pos)};function getLine(doc,n){n-=doc.first;if(n<0||n>=doc.size){throw new Error("There is no line "+(n+doc.first)+" in the document.")}var chunk=doc;while(!chunk.lines){for(var i=0;;++i){var child=chunk.children[i],sz=child.chunkSize();if(n=doc.first&&llast){return Pos(last,getLine(doc,last).text.length)}return clipToLen(pos,getLine(doc,pos.line).text.length)}function clipToLen(pos,linelen){var ch=pos.ch;if(ch==null||ch>linelen){return Pos(pos.line,linelen)}else if(ch<0){return Pos(pos.line,0)}else{return pos}}function clipPosArray(doc,array){var out=[];for(var i=0;ithis.maxLookAhead){this.maxLookAhead=n}return line};Context.prototype.baseToken=function(n){if(!this.baseTokens){return null}while(this.baseTokens[this.baseTokenPos]<=n){this.baseTokenPos+=2}var type=this.baseTokens[this.baseTokenPos+1];return{type:type&&type.replace(/( |^)overlay .*/,""),size:this.baseTokens[this.baseTokenPos]-n}};Context.prototype.nextLine=function(){this.line++;if(this.maxLookAhead>0){this.maxLookAhead--}};Context.fromSaved=function(doc,saved,line){if(saved instanceof SavedContext){return new Context(doc,copyState(doc.mode,saved.state),line,saved.lookAhead)}else{return new Context(doc,copyState(doc.mode,saved),line)}};Context.prototype.save=function(copy){var state=copy!==false?copyState(this.doc.mode,this.state):this.state;return this.maxLookAhead>0?new SavedContext(state,this.maxLookAhead):state};function highlightLine(cm,line,context,forceToEnd){var st=[cm.state.modeGen],lineClasses={};runMode(cm,line.text,cm.doc.mode,context,function(end,style){return st.push(end,style)},lineClasses,forceToEnd);var state=context.state;var loop=function(o){context.baseTokens=st;var overlay=cm.state.overlays[o],i=1,at=0;context.state=true;runMode(cm,line.text,overlay.mode,context,function(end,style){var start=i;while(atend){st.splice(i,1,end,st[i+1],i_end)}i+=2;at=Math.min(end,i_end)}if(!style){return}if(overlay.opaque){st.splice(start,i-start,end,"overlay "+style);i=start+2}else{for(;startcm.options.maxHighlightLength&©State(cm.doc.mode,context.state);var result=highlightLine(cm,line,context);if(resetState){context.state=resetState}line.stateAfter=context.save(!resetState);line.styles=result.styles;if(result.classes){line.styleClasses=result.classes}else if(line.styleClasses){line.styleClasses=null}if(updateFrontier===cm.doc.highlightFrontier){cm.doc.modeFrontier=Math.max(cm.doc.modeFrontier,++cm.doc.highlightFrontier)}}return line.styles}function getContextBefore(cm,n,precise){var doc=cm.doc,display=cm.display;if(!doc.mode.startState){return new Context(doc,true,n)}var start=findStartLine(cm,n,precise);var saved=start>doc.first&&getLine(doc,start-1).stateAfter;var context=saved?Context.fromSaved(doc,saved,start):new Context(doc,startState(doc.mode),start);doc.iter(start,n,function(line){processLine(cm,line.text,context);var pos=context.line;line.stateAfter=pos==n-1||pos%5==0||pos>=display.viewFrom&&posstream.start){return style}}throw new Error("Mode "+mode.name+" failed to advance stream.")}var Token=function(stream,type,state){this.start=stream.start;this.end=stream.pos;this.string=stream.current();this.type=type||null;this.state=state};function takeToken(cm,pos,precise,asArray){var doc=cm.doc,mode=doc.mode,style;pos=clipPos(doc,pos);var line=getLine(doc,pos.line),context=getContextBefore(cm,pos.line,precise);var stream=new StringStream(line.text,cm.options.tabSize,context),tokens;if(asArray){tokens=[]}while((asArray||stream.poscm.options.maxHighlightLength){flattenSpans=false;if(forceToEnd){processLine(cm,text,context,stream.pos)}stream.pos=text.length;style=null}else{style=extractLineClasses(readToken(mode,stream,context.state,inner),lineClasses)}if(inner){var mName=inner[0].name;if(mName){style="m-"+(style?mName+" "+style:mName)}}if(!flattenSpans||curStyle!=style){while(curStartlim;--search){if(search<=doc.first){return doc.first}var line=getLine(doc,search-1),after=line.stateAfter;if(after&&(!precise||search+(after instanceof SavedContext?after.lookAhead:0)<=doc.modeFrontier)){return search}var indented=countColumn(line.text,null,cm.options.tabSize);if(minline==null||minindent>indented){minline=search-1;minindent=indented}}return minline}function retreatFrontier(doc,n){doc.modeFrontier=Math.min(doc.modeFrontier,n);if(doc.highlightFrontierstart;line--){var saved=getLine(doc,line).stateAfter;if(saved&&(!(saved instanceof SavedContext)||line+saved.lookAhead=startCh:span.to>startCh);(nw||(nw=[])).push(new MarkedSpan(marker,span.from,endsAfter?null:span.to))}}}return nw}function markedSpansAfter(old,endCh,isInsert){var nw;if(old){for(var i=0;i=endCh:span.to>endCh);if(endsAfter||span.from==endCh&&marker.type=="bookmark"&&(!isInsert||span.marker.insertLeft)){var startsBefore=span.from==null||(marker.inclusiveLeft?span.from<=endCh:span.from0&&first){for(var i$2=0;i$20){continue}var newParts=[j,1],dfrom=cmp(p.from,m.from),dto=cmp(p.to,m.to);if(dfrom<0||!mk.inclusiveLeft&&!dfrom){newParts.push({from:p.from,to:m.from})}if(dto>0||!mk.inclusiveRight&&!dto){newParts.push({from:m.to,to:p.to})}parts.splice.apply(parts,newParts);j+=newParts.length-3}}return parts}function detachMarkedSpans(line){var spans=line.markedSpans;if(!spans){return}for(var i=0;ich)&&(!found||compareCollapsedMarkers(found,sp.marker)<0)){found=sp.marker}}}return found}function conflictingCollapsedRange(doc,lineNo,from,to,marker){var line=getLine(doc,lineNo);var sps=sawCollapsedSpans&&line.markedSpans;if(sps){for(var i=0;i=0&&toCmp<=0||fromCmp<=0&&toCmp>=0){continue}if(fromCmp<=0&&(sp.marker.inclusiveRight&&marker.inclusiveLeft?cmp(found.to,from)>=0:cmp(found.to,from)>0)||fromCmp>=0&&(sp.marker.inclusiveRight&&marker.inclusiveLeft?cmp(found.from,to)<=0:cmp(found.from,to)<0)){return true}}}}function visualLine(line){var merged;while(merged=collapsedSpanAtStart(line)){line=merged.find(-1,true).line}return line}function visualLineEnd(line){var merged;while(merged=collapsedSpanAtEnd(line)){line=merged.find(1,true).line}return line}function visualLineContinued(line){var merged,lines;while(merged=collapsedSpanAtEnd(line)){line=merged.find(1,true).line;(lines||(lines=[])).push(line)}return lines}function visualLineNo(doc,lineN){var line=getLine(doc,lineN),vis=visualLine(line);if(line==vis){return lineN}return lineNo(vis)}function visualLineEndNo(doc,lineN){if(lineN>doc.lastLine()){return lineN}var line=getLine(doc,lineN),merged;if(!lineIsHidden(doc,line)){return lineN}while(merged=collapsedSpanAtEnd(line)){line=merged.find(1,true).line}return lineNo(line)+1}function lineIsHidden(doc,line){var sps=sawCollapsedSpans&&line.markedSpans;if(sps){for(var sp=void 0,i=0;id.maxLineLength){d.maxLineLength=len;d.maxLine=line}})}var Line=function(text,markedSpans,estimateHeight){this.text=text;attachMarkedSpans(this,markedSpans);this.height=estimateHeight?estimateHeight(this):1};Line.prototype.lineNo=function(){return lineNo(this)};eventMixin(Line);function updateLine(line,text,markedSpans,estimateHeight){line.text=text;if(line.stateAfter){line.stateAfter=null}if(line.styles){line.styles=null}if(line.order!=null){line.order=null}detachMarkedSpans(line);attachMarkedSpans(line,markedSpans);var estHeight=estimateHeight?estimateHeight(line):1;if(estHeight!=line.height){updateLineHeight(line,estHeight)}}function cleanUpLine(line){line.parent=null;detachMarkedSpans(line)}var styleToClassCache={},styleToClassCacheWithMode={};function interpretTokenStyle(style,options){if(!style||/^\s*$/.test(style)){return null}var cache=options.addModeClass?styleToClassCacheWithMode:styleToClassCache;return cache[style]||(cache[style]=style.replace(/\S+/g,"cm-$&"))}function buildLineContent(cm,lineView){var content=eltP("span",null,null,webkit?"padding-right: .1px":null);var builder={pre:eltP("pre",[content],"CodeMirror-line"),content:content,col:0,pos:0,cm:cm,trailingSpace:false,splitSpaces:cm.getOption("lineWrapping")};lineView.measure={};for(var i=0;i<=(lineView.rest?lineView.rest.length:0);i++){var line=i?lineView.rest[i-1]:lineView.line,order=void 0;builder.pos=0;builder.addToken=buildToken;if(hasBadBidiRects(cm.display.measure)&&(order=getOrder(line,cm.doc.direction))){builder.addToken=buildTokenBadBidi(builder.addToken,order)}builder.map=[];var allowFrontierUpdate=lineView!=cm.display.externalMeasured&&lineNo(line);insertLineContent(line,builder,getLineStyles(cm,line,allowFrontierUpdate));if(line.styleClasses){if(line.styleClasses.bgClass){builder.bgClass=joinClasses(line.styleClasses.bgClass,builder.bgClass||"")}if(line.styleClasses.textClass){builder.textClass=joinClasses(line.styleClasses.textClass,builder.textClass||"")}}if(builder.map.length==0){builder.map.push(0,0,builder.content.appendChild(zeroWidthElement(cm.display.measure)))}if(i==0){lineView.measure.map=builder.map;lineView.measure.cache={}}else{(lineView.measure.maps||(lineView.measure.maps=[])).push(builder.map);(lineView.measure.caches||(lineView.measure.caches=[])).push({})}}if(webkit){var last=builder.content.lastChild;if(/\bcm-tab\b/.test(last.className)||last.querySelector&&last.querySelector(".cm-tab")){builder.content.className="cm-tab-wrap-hack"}}signal(cm,"renderLine",cm,lineView.line,builder.pre);if(builder.pre.className){builder.textClass=joinClasses(builder.pre.className,builder.textClass||"")}return builder}function defaultSpecialCharPlaceholder(ch){var token=elt("span","•","cm-invalidchar");token.title="\\u"+ch.charCodeAt(0).toString(16);token.setAttribute("aria-label",token.title);return token}function buildToken(builder,text,style,startStyle,endStyle,css,attributes){if(!text){return}var displayText=builder.splitSpaces?splitSpaces(text,builder.trailingSpace):text;var special=builder.cm.state.specialChars,mustWrap=false;var content;if(!special.test(text)){builder.col+=text.length;content=document.createTextNode(displayText);builder.map.push(builder.pos,builder.pos+text.length,content);if(ie&&ie_version<9){mustWrap=true}builder.pos+=text.length}else{content=document.createDocumentFragment();var pos=0;while(true){special.lastIndex=pos;var m=special.exec(text);var skipped=m?m.index-pos:text.length-pos;if(skipped){var txt=document.createTextNode(displayText.slice(pos,pos+skipped));if(ie&&ie_version<9){content.appendChild(elt("span",[txt]))}else{content.appendChild(txt)}builder.map.push(builder.pos,builder.pos+skipped,txt);builder.col+=skipped;builder.pos+=skipped}if(!m){break}pos+=skipped+1;var txt$1=void 0;if(m[0]=="\t"){var tabSize=builder.cm.options.tabSize,tabWidth=tabSize-builder.col%tabSize;txt$1=content.appendChild(elt("span",spaceStr(tabWidth),"cm-tab"));txt$1.setAttribute("role","presentation");txt$1.setAttribute("cm-text","\t");builder.col+=tabWidth}else if(m[0]=="\r"||m[0]=="\n"){txt$1=content.appendChild(elt("span",m[0]=="\r"?"␍":"␤","cm-invalidchar"));txt$1.setAttribute("cm-text",m[0]);builder.col+=1}else{txt$1=builder.cm.options.specialCharPlaceholder(m[0]);txt$1.setAttribute("cm-text",m[0]);if(ie&&ie_version<9){content.appendChild(elt("span",[txt$1]))}else{content.appendChild(txt$1)}builder.col+=1}builder.map.push(builder.pos,builder.pos+1,txt$1);builder.pos++}}builder.trailingSpace=displayText.charCodeAt(text.length-1)==32;if(style||startStyle||endStyle||mustWrap||css){var fullStyle=style||"";if(startStyle){fullStyle+=startStyle}if(endStyle){fullStyle+=endStyle}var token=elt("span",[content],fullStyle,css);if(attributes){for(var attr in attributes){if(attributes.hasOwnProperty(attr)&&attr!="style"&&attr!="class"){token.setAttribute(attr,attributes[attr])}}}return builder.content.appendChild(token)}builder.content.appendChild(content)}function splitSpaces(text,trailingBefore){if(text.length>1&&!/ /.test(text)){return text}var spaceBefore=trailingBefore,result="";for(var i=0;istart&&part.from<=start){break}}if(part.to>=end){return inner(builder,text,style,startStyle,endStyle,css,attributes)}inner(builder,text.slice(0,part.to-start),style,startStyle,null,css,attributes);startStyle=null;text=text.slice(part.to-start);start=part.to}}}function buildCollapsedSpan(builder,size,marker,ignoreWidget){var widget=!ignoreWidget&&marker.widgetNode;if(widget){builder.map.push(builder.pos,builder.pos+size,widget)}if(!ignoreWidget&&builder.cm.display.input.needsContentAttribute){if(!widget){widget=builder.content.appendChild(document.createElement("span"))}widget.setAttribute("cm-marker",marker.id)}if(widget){builder.cm.display.input.setUneditable(widget);builder.content.appendChild(widget)}builder.pos+=size;builder.trailingSpace=false}function insertLineContent(line,builder,styles){var spans=line.markedSpans,allText=line.text,at=0;if(!spans){for(var i$1=1;i$1pos||m.collapsed&&sp.to==pos&&sp.from==pos)){if(sp.to!=null&&sp.to!=pos&&nextChange>sp.to){nextChange=sp.to;spanEndStyle=""}if(m.className){spanStyle+=" "+m.className}if(m.css){css=(css?css+";":"")+m.css}if(m.startStyle&&sp.from==pos){spanStartStyle+=" "+m.startStyle}if(m.endStyle&&sp.to==nextChange){(endStyles||(endStyles=[])).push(m.endStyle,sp.to)}if(m.title){(attributes||(attributes={})).title=m.title}if(m.attributes){for(var attr in m.attributes){(attributes||(attributes={}))[attr]=m.attributes[attr]}}if(m.collapsed&&(!collapsed||compareCollapsedMarkers(collapsed.marker,m)<0)){collapsed=sp}}else if(sp.from>pos&&nextChange>sp.from){nextChange=sp.from}}if(endStyles){for(var j$1=0;j$1=len){break}var upto=Math.min(len,nextChange);while(true){if(text){var end=pos+text.length;if(!collapsed){var tokenText=end>upto?text.slice(0,upto-pos):text;builder.addToken(builder,tokenText,style?style+spanStyle:spanStyle,spanStartStyle,pos+tokenText.length==nextChange?spanEndStyle:"",css,attributes)}if(end>=upto){text=text.slice(upto-pos);pos=upto;break}pos=end;spanStartStyle=""}text=allText.slice(at,at=styles[i++]);style=interpretTokenStyle(styles[i++],builder.cm.options)}}}function LineView(doc,line,lineN){this.line=line;this.rest=visualLineContinued(line);this.size=this.rest?lineNo(lst(this.rest))-lineN+1:1;this.node=this.text=null;this.hidden=lineIsHidden(doc,line)}function buildViewArray(cm,from,to){var array=[],nextPos;for(var pos=from;pos2){heights.push((cur.bottom+next.top)/2-rect.top)}}}heights.push(rect.bottom-rect.top)}}function mapFromLineView(lineView,line,lineN){if(lineView.line==line){return{map:lineView.measure.map,cache:lineView.measure.cache}}for(var i=0;ilineN){return{map:lineView.measure.maps[i$1],cache:lineView.measure.caches[i$1],before:true}}}}function updateExternalMeasurement(cm,line){line=visualLine(line);var lineN=lineNo(line);var view=cm.display.externalMeasured=new LineView(cm.doc,line,lineN);view.lineN=lineN;var built=view.built=buildLineContent(cm,view);view.text=built.pre;removeChildrenAndAdd(cm.display.lineMeasure,built.pre);return view}function measureChar(cm,line,ch,bias){return measureCharPrepared(cm,prepareMeasureForLine(cm,line),ch,bias)}function findViewForLine(cm,lineN){if(lineN>=cm.display.viewFrom&&lineN=ext.lineN&&lineNch){end=mEnd-mStart;start=end-1;if(ch>=mEnd){collapse="right"}}if(start!=null){node=map[i+2];if(mStart==mEnd&&bias==(node.insertLeft?"left":"right")){collapse=bias}if(bias=="left"&&start==0){while(i&&map[i-2]==map[i-3]&&map[i-1].insertLeft){node=map[(i-=3)+2];collapse="left"}}if(bias=="right"&&start==mEnd-mStart){while(i=0;i$1--){if((rect=rects[i$1]).left!=rect.right){break}}}return rect}function measureCharInner(cm,prepared,ch,bias){var place=nodeAndOffsetInLineMap(prepared.map,ch,bias);var node=place.node,start=place.start,end=place.end,collapse=place.collapse;var rect;if(node.nodeType==3){for(var i$1=0;i$1<4;i$1++){while(start&&isExtendingChar(prepared.line.text.charAt(place.coverStart+start))){--start}while(place.coverStart+end0){collapse=bias="right"}var rects;if(cm.options.lineWrapping&&(rects=node.getClientRects()).length>1){rect=rects[bias=="right"?rects.length-1:0]}else{rect=node.getBoundingClientRect()}}if(ie&&ie_version<9&&!start&&(!rect||!rect.left&&!rect.right)){var rSpan=node.parentNode.getClientRects()[0];if(rSpan){rect={left:rSpan.left,right:rSpan.left+charWidth(cm.display),top:rSpan.top,bottom:rSpan.bottom}}else{rect=nullRect}}var rtop=rect.top-prepared.rect.top,rbot=rect.bottom-prepared.rect.top;var mid=(rtop+rbot)/2;var heights=prepared.view.measure.heights;var i=0;for(;i=lineObj.text.length){ch=lineObj.text.length;sticky="before"}else if(ch<=0){ch=0;sticky="after"}if(!order){return get(sticky=="before"?ch-1:ch,sticky=="before")}function getBidi(ch,partPos,invert){var part=order[partPos],right=part.level==1;return get(invert?ch-1:ch,right!=invert)}var partPos=getBidiPartAt(order,ch,sticky);var other=bidiOther;var val=getBidi(ch,partPos,sticky=="before");if(other!=null){val.other=getBidi(ch,other,sticky!="before")}return val}function estimateCoords(cm,pos){var left=0;pos=clipPos(cm.doc,pos);if(!cm.options.lineWrapping){left=charWidth(cm.display)*pos.ch}var lineObj=getLine(cm.doc,pos.line);var top=heightAtLine(lineObj)+paddingTop(cm.display);return{left:left,right:left,top:top,bottom:top+lineObj.height}}function PosWithInfo(line,ch,sticky,outside,xRel){var pos=Pos(line,ch,sticky);pos.xRel=xRel;if(outside){pos.outside=outside}return pos}function coordsChar(cm,x,y){var doc=cm.doc;y+=cm.display.viewOffset;if(y<0){return PosWithInfo(doc.first,0,null,-1,-1)}var lineN=lineAtHeight(doc,y),last=doc.first+doc.size-1;if(lineN>last){return PosWithInfo(doc.first+doc.size-1,getLine(doc,last).text.length,null,1,1)}if(x<0){x=0}var lineObj=getLine(doc,lineN);for(;;){var found=coordsCharInner(cm,lineObj,lineN,x,y);var collapsed=collapsedSpanAround(lineObj,found.ch+(found.xRel>0||found.outside>0?1:0));if(!collapsed){return found}var rangeEnd=collapsed.find(1);if(rangeEnd.line==lineN){return rangeEnd}lineObj=getLine(doc,lineN=rangeEnd.line)}}function wrappedLineExtent(cm,lineObj,preparedMeasure,y){y-=widgetTopHeight(lineObj);var end=lineObj.text.length;var begin=findFirst(function(ch){return measureCharPrepared(cm,preparedMeasure,ch-1).bottom<=y},end,0);end=findFirst(function(ch){return measureCharPrepared(cm,preparedMeasure,ch).top>y},begin,end);return{begin:begin,end:end}}function wrappedLineExtentChar(cm,lineObj,preparedMeasure,target){if(!preparedMeasure){preparedMeasure=prepareMeasureForLine(cm,lineObj)}var targetTop=intoCoordSystem(cm,lineObj,measureCharPrepared(cm,preparedMeasure,target),"line").top;return wrappedLineExtent(cm,lineObj,preparedMeasure,targetTop)}function boxIsAfter(box,x,y,left){return box.bottom<=y?false:box.top>y?true:(left?box.left:box.right)>x}function coordsCharInner(cm,lineObj,lineNo,x,y){y-=heightAtLine(lineObj);var preparedMeasure=prepareMeasureForLine(cm,lineObj);var widgetHeight=widgetTopHeight(lineObj);var begin=0,end=lineObj.text.length,ltr=true;var order=getOrder(lineObj,cm.doc.direction);if(order){var part=(cm.options.lineWrapping?coordsBidiPartWrapped:coordsBidiPart)(cm,lineObj,lineNo,preparedMeasure,order,x,y);ltr=part.level!=1;begin=ltr?part.from:part.to-1;end=ltr?part.to:part.from-1}var chAround=null,boxAround=null;var ch=findFirst(function(ch){var box=measureCharPrepared(cm,preparedMeasure,ch);box.top+=widgetHeight;box.bottom+=widgetHeight;if(!boxIsAfter(box,x,y,false)){return false}if(box.top<=y&&box.left<=x){chAround=ch;boxAround=box}return true},begin,end);var baseX,sticky,outside=false;if(boxAround){var atLeft=x-boxAround.left=coords.bottom?1:0}ch=skipExtendingChars(lineObj.text,ch,1);return PosWithInfo(lineNo,ch,sticky,outside,x-baseX)}function coordsBidiPart(cm,lineObj,lineNo,preparedMeasure,order,x,y){var index=findFirst(function(i){var part=order[i],ltr=part.level!=1;return boxIsAfter(cursorCoords(cm,Pos(lineNo,ltr?part.to:part.from,ltr?"before":"after"),"line",lineObj,preparedMeasure),x,y,true)},0,order.length-1);var part=order[index];if(index>0){var ltr=part.level!=1;var start=cursorCoords(cm,Pos(lineNo,ltr?part.from:part.to,ltr?"after":"before"),"line",lineObj,preparedMeasure);if(boxIsAfter(start,x,y,true)&&start.top>y){part=order[index-1]}}return part}function coordsBidiPartWrapped(cm,lineObj,_lineNo,preparedMeasure,order,x,y){var ref=wrappedLineExtent(cm,lineObj,preparedMeasure,y);var begin=ref.begin;var end=ref.end;if(/\s/.test(lineObj.text.charAt(end-1))){end--}var part=null,closestDist=null;for(var i=0;i=end||p.to<=begin){continue}var ltr=p.level!=1;var endX=measureCharPrepared(cm,preparedMeasure,ltr?Math.min(end,p.to)-1:Math.max(begin,p.from)).right;var dist=endXdist){part=p;closestDist=dist}}if(!part){part=order[order.length-1]}if(part.fromend){part={from:part.from,to:end,level:part.level}}return part}var measureText;function textHeight(display){if(display.cachedTextHeight!=null){return display.cachedTextHeight}if(measureText==null){measureText=elt("pre",null,"CodeMirror-line-like");for(var i=0;i<49;++i){measureText.appendChild(document.createTextNode("x"));measureText.appendChild(elt("br"))}measureText.appendChild(document.createTextNode("x"))}removeChildrenAndAdd(display.measure,measureText);var height=measureText.offsetHeight/50;if(height>3){display.cachedTextHeight=height}removeChildren(display.measure);return height||1}function charWidth(display){if(display.cachedCharWidth!=null){return display.cachedCharWidth}var anchor=elt("span","xxxxxxxxxx");var pre=elt("pre",[anchor],"CodeMirror-line-like");removeChildrenAndAdd(display.measure,pre);var rect=anchor.getBoundingClientRect(),width=(rect.right-rect.left)/10;if(width>2){display.cachedCharWidth=width}return width||10}function getDimensions(cm){var d=cm.display,left={},width={};var gutterLeft=d.gutters.clientLeft;for(var n=d.gutters.firstChild,i=0;n;n=n.nextSibling,++i){var id=cm.display.gutterSpecs[i].className;left[id]=n.offsetLeft+n.clientLeft+gutterLeft;width[id]=n.clientWidth}return{fixedPos:compensateForHScroll(d),gutterTotalWidth:d.gutters.offsetWidth,gutterLeft:left,gutterWidth:width,wrapperWidth:d.wrapper.clientWidth}}function compensateForHScroll(display){return display.scroller.getBoundingClientRect().left-display.sizer.getBoundingClientRect().left}function estimateHeight(cm){var th=textHeight(cm.display),wrapping=cm.options.lineWrapping;var perLine=wrapping&&Math.max(5,cm.display.scroller.clientWidth/charWidth(cm.display)-3);return function(line){if(lineIsHidden(cm.doc,line)){return 0}var widgetsHeight=0;if(line.widgets){for(var i=0;i0&&(line=getLine(cm.doc,coords.line).text).length==coords.ch){var colDiff=countColumn(line,line.length,cm.options.tabSize)-line.length;coords=Pos(coords.line,Math.max(0,Math.round((x-paddingH(cm.display).left)/charWidth(cm.display))-colDiff))}return coords}function findViewIndex(cm,n){if(n>=cm.display.viewTo){return null}n-=cm.display.viewFrom;if(n<0){return null}var view=cm.display.view;for(var i=0;ifrom)){display.updateLineNumbers=from}cm.curOp.viewChanged=true;if(from>=display.viewTo){if(sawCollapsedSpans&&visualLineNo(cm.doc,from)display.viewFrom){resetView(cm)}else{display.viewFrom+=lendiff;display.viewTo+=lendiff}}else if(from<=display.viewFrom&&to>=display.viewTo){resetView(cm)}else if(from<=display.viewFrom){var cut=viewCuttingPoint(cm,to,to+lendiff,1);if(cut){display.view=display.view.slice(cut.index);display.viewFrom=cut.lineN;display.viewTo+=lendiff}else{resetView(cm)}}else if(to>=display.viewTo){var cut$1=viewCuttingPoint(cm,from,from,-1);if(cut$1){display.view=display.view.slice(0,cut$1.index);display.viewTo=cut$1.lineN}else{resetView(cm)}}else{var cutTop=viewCuttingPoint(cm,from,from,-1);var cutBot=viewCuttingPoint(cm,to,to+lendiff,1);if(cutTop&&cutBot){display.view=display.view.slice(0,cutTop.index).concat(buildViewArray(cm,cutTop.lineN,cutBot.lineN)).concat(display.view.slice(cutBot.index));display.viewTo+=lendiff}else{resetView(cm)}}var ext=display.externalMeasured;if(ext){if(to=ext.lineN&&line=display.viewTo){return}var lineView=display.view[findViewIndex(cm,line)];if(lineView.node==null){return}var arr=lineView.changes||(lineView.changes=[]);if(indexOf(arr,type)==-1){arr.push(type)}}function resetView(cm){cm.display.viewFrom=cm.display.viewTo=cm.doc.first;cm.display.view=[];cm.display.viewOffset=0}function viewCuttingPoint(cm,oldN,newN,dir){var index=findViewIndex(cm,oldN),diff,view=cm.display.view;if(!sawCollapsedSpans||newN==cm.doc.first+cm.doc.size){return{index:index,lineN:newN}}var n=cm.display.viewFrom;for(var i=0;i0){if(index==view.length-1){return null}diff=n+view[index].size-oldN;index++}else{diff=n-oldN}oldN+=diff;newN+=diff}while(visualLineNo(cm.doc,newN)!=newN){if(index==(dir<0?0:view.length-1)){return null}newN+=dir*view[index-(dir<0?1:0)].size;index+=dir}return{index:index,lineN:newN}}function adjustView(cm,from,to){var display=cm.display,view=display.view;if(view.length==0||from>=display.viewTo||to<=display.viewFrom){display.view=buildViewArray(cm,from,to);display.viewFrom=from}else{if(display.viewFrom>from){display.view=buildViewArray(cm,from,display.viewFrom).concat(display.view)}else if(display.viewFromto){display.view=display.view.slice(0,findViewIndex(cm,to))}}display.viewTo=to}function countDirtyView(cm){var view=cm.display.view,dirty=0;for(var i=0;i=cm.display.viewTo||range.to().line0){display.blinker=setInterval(function(){return display.cursorDiv.style.visibility=(on=!on)?"":"hidden"},cm.options.cursorBlinkRate)}else if(cm.options.cursorBlinkRate<0){display.cursorDiv.style.visibility="hidden"}}function ensureFocus(cm){if(!cm.state.focused){cm.display.input.focus();onFocus(cm)}}function delayBlurEvent(cm){cm.state.delayingBlurEvent=true;setTimeout(function(){if(cm.state.delayingBlurEvent){cm.state.delayingBlurEvent=false;onBlur(cm)}},100)}function onFocus(cm,e){if(cm.state.delayingBlurEvent){cm.state.delayingBlurEvent=false}if(cm.options.readOnly=="nocursor"){return}if(!cm.state.focused){signal(cm,"focus",cm,e);cm.state.focused=true;addClass(cm.display.wrapper,"CodeMirror-focused");if(!cm.curOp&&cm.display.selForContextMenu!=cm.doc.sel){cm.display.input.reset();if(webkit){setTimeout(function(){return cm.display.input.reset(true)},20)}}cm.display.input.receivedFocus()}restartBlink(cm)}function onBlur(cm,e){if(cm.state.delayingBlurEvent){return}if(cm.state.focused){signal(cm,"blur",cm,e);cm.state.focused=false;rmClass(cm.display.wrapper,"CodeMirror-focused")}clearInterval(cm.display.blinker);setTimeout(function(){if(!cm.state.focused){cm.display.shift=false}},150)}function updateHeightsInViewport(cm){var display=cm.display;var prevBottom=display.lineDiv.offsetTop;for(var i=0;i.005||diff<-.005){updateLineHeight(cur.line,height);updateWidgetHeight(cur.line);if(cur.rest){for(var j=0;jcm.display.sizerWidth){var chWidth=Math.ceil(width/charWidth(cm.display));if(chWidth>cm.display.maxLineLength){cm.display.maxLineLength=chWidth;cm.display.maxLine=cur.line;cm.display.maxLineChanged=true}}}}function updateWidgetHeight(line){if(line.widgets){for(var i=0;i=to){from=lineAtHeight(doc,heightAtLine(getLine(doc,ensureTo))-display.wrapper.clientHeight);to=ensureTo}}return{from:from,to:Math.max(to,from+1)}}function maybeScrollWindow(cm,rect){if(signalDOMEvent(cm,"scrollCursorIntoView")){return}var display=cm.display,box=display.sizer.getBoundingClientRect(),doScroll=null;if(rect.top+box.top<0){doScroll=true}else if(rect.bottom+box.top>(window.innerHeight||document.documentElement.clientHeight)){doScroll=false}if(doScroll!=null&&!phantom){var scrollNode=elt("div","​",null,"position: absolute;\n top: "+(rect.top-display.viewOffset-paddingTop(cm.display))+"px;\n height: "+(rect.bottom-rect.top+scrollGap(cm)+display.barHeight)+"px;\n left: "+rect.left+"px; width: "+Math.max(2,rect.right-rect.left)+"px;");cm.display.lineSpace.appendChild(scrollNode);scrollNode.scrollIntoView(doScroll);cm.display.lineSpace.removeChild(scrollNode)}}function scrollPosIntoView(cm,pos,end,margin){if(margin==null){margin=0}var rect;if(!cm.options.lineWrapping&&pos==end){pos=pos.ch?Pos(pos.line,pos.sticky=="before"?pos.ch-1:pos.ch,"after"):pos;end=pos.sticky=="before"?Pos(pos.line,pos.ch+1,"before"):pos}for(var limit=0;limit<5;limit++){var changed=false;var coords=cursorCoords(cm,pos);var endCoords=!end||end==pos?coords:cursorCoords(cm,end);rect={left:Math.min(coords.left,endCoords.left),top:Math.min(coords.top,endCoords.top)-margin,right:Math.max(coords.left,endCoords.left),bottom:Math.max(coords.bottom,endCoords.bottom)+margin};var scrollPos=calculateScrollPos(cm,rect);var startTop=cm.doc.scrollTop,startLeft=cm.doc.scrollLeft;if(scrollPos.scrollTop!=null){updateScrollTop(cm,scrollPos.scrollTop);if(Math.abs(cm.doc.scrollTop-startTop)>1){changed=true}}if(scrollPos.scrollLeft!=null){setScrollLeft(cm,scrollPos.scrollLeft);if(Math.abs(cm.doc.scrollLeft-startLeft)>1){changed=true}}if(!changed){break}}return rect}function scrollIntoView(cm,rect){var scrollPos=calculateScrollPos(cm,rect);if(scrollPos.scrollTop!=null){updateScrollTop(cm,scrollPos.scrollTop)}if(scrollPos.scrollLeft!=null){setScrollLeft(cm,scrollPos.scrollLeft)}}function calculateScrollPos(cm,rect){var display=cm.display,snapMargin=textHeight(cm.display);if(rect.top<0){rect.top=0}var screentop=cm.curOp&&cm.curOp.scrollTop!=null?cm.curOp.scrollTop:display.scroller.scrollTop;var screen=displayHeight(cm),result={};if(rect.bottom-rect.top>screen){rect.bottom=rect.top+screen}var docBottom=cm.doc.height+paddingVert(display);var atTop=rect.topdocBottom-snapMargin;if(rect.topscreentop+screen){var newTop=Math.min(rect.top,(atBottom?docBottom:rect.bottom)-screen);if(newTop!=screentop){result.scrollTop=newTop}}var screenleft=cm.curOp&&cm.curOp.scrollLeft!=null?cm.curOp.scrollLeft:display.scroller.scrollLeft;var screenw=displayWidth(cm)-(cm.options.fixedGutter?display.gutters.offsetWidth:0);var tooWide=rect.right-rect.left>screenw;if(tooWide){rect.right=rect.left+screenw}if(rect.left<10){result.scrollLeft=0}else if(rect.leftscreenw+screenleft-3){result.scrollLeft=rect.right+(tooWide?0:10)-screenw}return result}function addToScrollTop(cm,top){if(top==null){return}resolveScrollToPos(cm);cm.curOp.scrollTop=(cm.curOp.scrollTop==null?cm.doc.scrollTop:cm.curOp.scrollTop)+top}function ensureCursorVisible(cm){resolveScrollToPos(cm);var cur=cm.getCursor();cm.curOp.scrollToPos={from:cur,to:cur,margin:cm.options.cursorScrollMargin}}function scrollToCoords(cm,x,y){if(x!=null||y!=null){resolveScrollToPos(cm)}if(x!=null){cm.curOp.scrollLeft=x}if(y!=null){cm.curOp.scrollTop=y}}function scrollToRange(cm,range){resolveScrollToPos(cm);cm.curOp.scrollToPos=range}function resolveScrollToPos(cm){var range=cm.curOp.scrollToPos;if(range){cm.curOp.scrollToPos=null;var from=estimateCoords(cm,range.from),to=estimateCoords(cm,range.to);scrollToCoordsRange(cm,from,to,range.margin)}}function scrollToCoordsRange(cm,from,to,margin){var sPos=calculateScrollPos(cm,{left:Math.min(from.left,to.left),top:Math.min(from.top,to.top)-margin,right:Math.max(from.right,to.right),bottom:Math.max(from.bottom,to.bottom)+margin});scrollToCoords(cm,sPos.scrollLeft,sPos.scrollTop)}function updateScrollTop(cm,val){if(Math.abs(cm.doc.scrollTop-val)<2){return}if(!gecko){updateDisplaySimple(cm,{top:val})}setScrollTop(cm,val,true);if(gecko){updateDisplaySimple(cm)}startWorker(cm,100)}function setScrollTop(cm,val,forceScroll){val=Math.max(0,Math.min(cm.display.scroller.scrollHeight-cm.display.scroller.clientHeight,val));if(cm.display.scroller.scrollTop==val&&!forceScroll){return}cm.doc.scrollTop=val;cm.display.scrollbars.setScrollTop(val);if(cm.display.scroller.scrollTop!=val){cm.display.scroller.scrollTop=val}}function setScrollLeft(cm,val,isScroller,forceScroll){val=Math.max(0,Math.min(val,cm.display.scroller.scrollWidth-cm.display.scroller.clientWidth));if((isScroller?val==cm.doc.scrollLeft:Math.abs(cm.doc.scrollLeft-val)<2)&&!forceScroll){return}cm.doc.scrollLeft=val;alignHorizontally(cm);if(cm.display.scroller.scrollLeft!=val){cm.display.scroller.scrollLeft=val}cm.display.scrollbars.setScrollLeft(val)}function measureForScrollbars(cm){var d=cm.display,gutterW=d.gutters.offsetWidth;var docH=Math.round(cm.doc.height+paddingVert(cm.display));return{clientHeight:d.scroller.clientHeight,viewHeight:d.wrapper.clientHeight,scrollWidth:d.scroller.scrollWidth,clientWidth:d.scroller.clientWidth,viewWidth:d.wrapper.clientWidth,barLeft:cm.options.fixedGutter?gutterW:0,docHeight:docH,scrollHeight:docH+scrollGap(cm)+d.barHeight,nativeBarWidth:d.nativeBarWidth,gutterWidth:gutterW}}var NativeScrollbars=function(place,scroll,cm){this.cm=cm;var vert=this.vert=elt("div",[elt("div",null,null,"min-width: 1px")],"CodeMirror-vscrollbar");var horiz=this.horiz=elt("div",[elt("div",null,null,"height: 100%; min-height: 1px")],"CodeMirror-hscrollbar");vert.tabIndex=horiz.tabIndex=-1;place(vert);place(horiz);on(vert,"scroll",function(){if(vert.clientHeight){scroll(vert.scrollTop,"vertical")}});on(horiz,"scroll",function(){if(horiz.clientWidth){scroll(horiz.scrollLeft,"horizontal")}});this.checkedZeroWidth=false;if(ie&&ie_version<8){this.horiz.style.minHeight=this.vert.style.minWidth="18px"}};NativeScrollbars.prototype.update=function(measure){var needsH=measure.scrollWidth>measure.clientWidth+1;var needsV=measure.scrollHeight>measure.clientHeight+1;var sWidth=measure.nativeBarWidth;if(needsV){this.vert.style.display="block";this.vert.style.bottom=needsH?sWidth+"px":"0";var totalHeight=measure.viewHeight-(needsH?sWidth:0);this.vert.firstChild.style.height=Math.max(0,measure.scrollHeight-measure.clientHeight+totalHeight)+"px"}else{this.vert.style.display="";this.vert.firstChild.style.height="0"}if(needsH){this.horiz.style.display="block";this.horiz.style.right=needsV?sWidth+"px":"0";this.horiz.style.left=measure.barLeft+"px";var totalWidth=measure.viewWidth-measure.barLeft-(needsV?sWidth:0);this.horiz.firstChild.style.width=Math.max(0,measure.scrollWidth-measure.clientWidth+totalWidth)+"px"}else{this.horiz.style.display="";this.horiz.firstChild.style.width="0"}if(!this.checkedZeroWidth&&measure.clientHeight>0){if(sWidth==0){this.zeroWidthHack()}this.checkedZeroWidth=true}return{right:needsV?sWidth:0,bottom:needsH?sWidth:0}};NativeScrollbars.prototype.setScrollLeft=function(pos){if(this.horiz.scrollLeft!=pos){this.horiz.scrollLeft=pos}if(this.disableHoriz){this.enableZeroWidthBar(this.horiz,this.disableHoriz,"horiz")}};NativeScrollbars.prototype.setScrollTop=function(pos){if(this.vert.scrollTop!=pos){this.vert.scrollTop=pos}if(this.disableVert){this.enableZeroWidthBar(this.vert,this.disableVert,"vert")}};NativeScrollbars.prototype.zeroWidthHack=function(){var w=mac&&!mac_geMountainLion?"12px":"18px";this.horiz.style.height=this.vert.style.width=w;this.horiz.style.pointerEvents=this.vert.style.pointerEvents="none";this.disableHoriz=new Delayed;this.disableVert=new Delayed};NativeScrollbars.prototype.enableZeroWidthBar=function(bar,delay,type){bar.style.pointerEvents="auto";function maybeDisable(){var box=bar.getBoundingClientRect();var elt=type=="vert"?document.elementFromPoint(box.right-1,(box.top+box.bottom)/2):document.elementFromPoint((box.right+box.left)/2,box.bottom-1);if(elt!=bar){bar.style.pointerEvents="none"}else{delay.set(1e3,maybeDisable)}}delay.set(1e3,maybeDisable)};NativeScrollbars.prototype.clear=function(){var parent=this.horiz.parentNode;parent.removeChild(this.horiz);parent.removeChild(this.vert)};var NullScrollbars=function(){};NullScrollbars.prototype.update=function(){return{bottom:0,right:0}};NullScrollbars.prototype.setScrollLeft=function(){};NullScrollbars.prototype.setScrollTop=function(){};NullScrollbars.prototype.clear=function(){};function updateScrollbars(cm,measure){if(!measure){measure=measureForScrollbars(cm)}var startWidth=cm.display.barWidth,startHeight=cm.display.barHeight;updateScrollbarsInner(cm,measure);for(var i=0;i<4&&startWidth!=cm.display.barWidth||startHeight!=cm.display.barHeight;i++){if(startWidth!=cm.display.barWidth&&cm.options.lineWrapping){updateHeightsInViewport(cm)}updateScrollbarsInner(cm,measureForScrollbars(cm));startWidth=cm.display.barWidth;startHeight=cm.display.barHeight}}function updateScrollbarsInner(cm,measure){var d=cm.display;var sizes=d.scrollbars.update(measure);d.sizer.style.paddingRight=(d.barWidth=sizes.right)+"px";d.sizer.style.paddingBottom=(d.barHeight=sizes.bottom)+"px";d.heightForcer.style.borderBottom=sizes.bottom+"px solid transparent";if(sizes.right&&sizes.bottom){d.scrollbarFiller.style.display="block";d.scrollbarFiller.style.height=sizes.bottom+"px";d.scrollbarFiller.style.width=sizes.right+"px"}else{d.scrollbarFiller.style.display=""}if(sizes.bottom&&cm.options.coverGutterNextToScrollbar&&cm.options.fixedGutter){d.gutterFiller.style.display="block";d.gutterFiller.style.height=sizes.bottom+"px";d.gutterFiller.style.width=measure.gutterWidth+"px"}else{d.gutterFiller.style.display=""}}var scrollbarModel={native:NativeScrollbars,null:NullScrollbars};function initScrollbars(cm){if(cm.display.scrollbars){cm.display.scrollbars.clear();if(cm.display.scrollbars.addClass){rmClass(cm.display.wrapper,cm.display.scrollbars.addClass)}}cm.display.scrollbars=new scrollbarModel[cm.options.scrollbarStyle](function(node){cm.display.wrapper.insertBefore(node,cm.display.scrollbarFiller);on(node,"mousedown",function(){if(cm.state.focused){setTimeout(function(){return cm.display.input.focus()},0)}});node.setAttribute("cm-not-content","true")},function(pos,axis){if(axis=="horizontal"){setScrollLeft(cm,pos)}else{updateScrollTop(cm,pos)}},cm);if(cm.display.scrollbars.addClass){addClass(cm.display.wrapper,cm.display.scrollbars.addClass)}}var nextOpId=0;function startOperation(cm){cm.curOp={cm:cm,viewChanged:false,startHeight:cm.doc.height,forceUpdate:false,updateInput:0,typing:false,changeObjs:null,cursorActivityHandlers:null,cursorActivityCalled:0,selectionChanged:false,updateMaxLine:false,scrollLeft:null,scrollTop:null,scrollToPos:null,focus:false,id:++nextOpId};pushOperation(cm.curOp)}function endOperation(cm){var op=cm.curOp;if(op){finishOperation(op,function(group){for(var i=0;i=display.viewTo)||display.maxLineChanged&&cm.options.lineWrapping;op.update=op.mustUpdate&&new DisplayUpdate(cm,op.mustUpdate&&{top:op.scrollTop,ensure:op.scrollToPos},op.forceUpdate)}function endOperation_W1(op){op.updatedDisplay=op.mustUpdate&&updateDisplayIfNeeded(op.cm,op.update)}function endOperation_R2(op){var cm=op.cm,display=cm.display;if(op.updatedDisplay){updateHeightsInViewport(cm)}op.barMeasure=measureForScrollbars(cm);if(display.maxLineChanged&&!cm.options.lineWrapping){op.adjustWidthTo=measureChar(cm,display.maxLine,display.maxLine.text.length).left+3;cm.display.sizerWidth=op.adjustWidthTo;op.barMeasure.scrollWidth=Math.max(display.scroller.clientWidth,display.sizer.offsetLeft+op.adjustWidthTo+scrollGap(cm)+cm.display.barWidth);op.maxScrollLeft=Math.max(0,display.sizer.offsetLeft+op.adjustWidthTo-displayWidth(cm))}if(op.updatedDisplay||op.selectionChanged){op.preparedSelection=display.input.prepareSelection()}}function endOperation_W2(op){var cm=op.cm;if(op.adjustWidthTo!=null){cm.display.sizer.style.minWidth=op.adjustWidthTo+"px";if(op.maxScrollLeft
+ `; var DROPDOWN_ICON_SVG = ` @@ -166,6 +167,14 @@ var DROPDOWN_ICON_SVG = `
{% for column in display_columns %} - + {% if not column.sortable %} {{ column.name }} {% else %} diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 211352b5..466e8a47 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -51,6 +51,14 @@ {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} +{% if metadata.columns %} +
+ {% for column_name, column_description in metadata.columns.items() %} +
{{ column_name }}
{{ column_description }}
+ {% endfor %} +
+{% endif %} + {% if filtered_table_rows_count or human_description_en %}

{% if filtered_table_rows_count or filtered_table_rows_count == 0 %}{{ "{:,}".format(filtered_table_rows_count) }} row{% if filtered_table_rows_count == 1 %}{% else %}s{% endif %}{% endif %} {% if human_description_en %}{{ human_description_en }}{% endif %} diff --git a/datasette/views/table.py b/datasette/views/table.py index 456d8069..486a6131 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -125,6 +125,7 @@ class RowTableShared(DataView): """Returns columns, rows for specified table - including fancy foreign key treatment""" db = self.ds.databases[database] table_metadata = self.ds.table_metadata(database, table) + column_descriptions = table_metadata.get("columns") or {} column_details = {col.name: col for col in await db.table_column_details(table)} sortable_columns = await self.sortable_columns_for_table(database, table, True) pks = await db.primary_keys(table) @@ -147,6 +148,7 @@ class RowTableShared(DataView): "is_pk": r[0] in pks_for_display, "type": type_, "notnull": notnull, + "description": column_descriptions.get(r[0]), } ) diff --git a/docs/metadata.rst b/docs/metadata.rst index dad5adca..35b8aede 100644 --- a/docs/metadata.rst +++ b/docs/metadata.rst @@ -78,6 +78,34 @@ The three visible metadata fields you can apply to everything, specific database For each of these you can provide just the ``*_url`` field and Datasette will treat that as the default link label text and display the URL directly on the page. +.. _metadata_column_descriptions: + +Column descriptions +------------------- + +You can include descriptions for your columns by adding a ``"columns": {"name-of-column": "description-of-column"}`` block to your table metadata: + +.. code-block:: json + + { + "databases": { + "database1": { + "tables": { + "example_table": { + "columns": { + "column1": "Description of column 1", + "column2": "Description of column 2" + } + } + } + } + } + } + +These will be displayed at the top of the table page, and will also show in the cog menu for each column. + +You can see an example of how these look at `latest.datasette.io/fixtures/roadside_attractions `__. + Specifying units for a column ----------------------------- diff --git a/tests/fixtures.py b/tests/fixtures.py index 880e4347..4a420e4b 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -336,6 +336,12 @@ METADATA = { "fts_table": "searchable_fts", "fts_pk": "pk", }, + "roadside_attractions": { + "columns": { + "name": "The name of the attraction", + "address": "The street address for the attraction", + } + }, "attraction_characteristic": {"sort_desc": "pk"}, "facet_cities": {"sort": "name"}, "paginated_view": {"size": 25}, diff --git a/tests/test_html.py b/tests/test_html.py index b1b6c1f3..f12f89cd 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1777,3 +1777,21 @@ def test_trace_correctly_escaped(app_client): response = app_client.get("/fixtures?sql=select+'

Hello'&_trace=1") assert "select '

Hello" not in response.text assert "select '<h1>Hello" in response.text + + +def test_column_metadata(app_client): + response = app_client.get("/fixtures/roadside_attractions") + soup = Soup(response.body, "html.parser") + dl = soup.find("dl") + assert [(dt.text, dt.nextSibling.text) for dt in dl.findAll("dt")] == [ + ("name", "The name of the attraction"), + ("address", "The street address for the attraction"), + ] + assert ( + soup.select("th[data-column=name]")[0]["data-column-description"] + == "The name of the attraction" + ) + assert ( + soup.select("th[data-column=address]")[0]["data-column-description"] + == "The street address for the attraction" + ) From 77f46297a88ac7e49dad2139410b01ee56d5f99c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 12 Aug 2021 18:01:57 -0700 Subject: [PATCH 0964/2113] Rename --help-config to --help-settings, closes #1431 --- datasette/cli.py | 12 ++++++------ docs/datasette-serve-help.txt | 2 +- tests/test_cli.py | 10 +++++++++- 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index e53f3d8e..d4e23c70 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -51,7 +51,7 @@ class Config(click.ParamType): name, value = config.split(":", 1) if name not in DEFAULT_SETTINGS: self.fail( - f"{name} is not a valid option (--help-config to see all)", + f"{name} is not a valid option (--help-settings to see all)", param, ctx, ) @@ -84,7 +84,7 @@ class Setting(CompositeParamType): name, value = config if name not in DEFAULT_SETTINGS: self.fail( - f"{name} is not a valid option (--help-config to see all)", + f"{name} is not a valid option (--help-settings to see all)", param, ctx, ) @@ -408,7 +408,7 @@ def uninstall(packages, yes): help="Run an HTTP GET request against this path, print results and exit", ) @click.option("--version-note", help="Additional note to show on /-/versions") -@click.option("--help-config", is_flag=True, help="Show available config options") +@click.option("--help-settings", is_flag=True, help="Show available settings") @click.option("--pdb", is_flag=True, help="Launch debugger on any errors") @click.option( "-o", @@ -456,7 +456,7 @@ def serve( root, get, version_note, - help_config, + help_settings, pdb, open_browser, create, @@ -466,9 +466,9 @@ def serve( return_instance=False, ): """Serve up specified SQLite database files with a web UI""" - if help_config: + if help_settings: formatter = formatting.HelpFormatter() - with formatter.section("Config options"): + with formatter.section("Settings"): formatter.write_dl( [ (option.name, f"{option.help} (default={option.default})") diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index ec3f41a0..2911977a 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -32,7 +32,7 @@ Options: --get TEXT Run an HTTP GET request against this path, print results and exit --version-note TEXT Additional note to show on /-/versions - --help-config Show available config options + --help-settings Show available settings --pdb Launch debugger on any errors -o, --open Open Datasette in your web browser --create Create database files if they do not exist diff --git a/tests/test_cli.py b/tests/test_cli.py index e31a305e..763fe2e7 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -5,6 +5,7 @@ from .fixtures import ( EXPECTED_PLUGINS, ) import asyncio +from datasette.app import SETTINGS from datasette.plugins import DEFAULT_PLUGINS from datasette.cli import cli, serve from datasette.version import __version__ @@ -147,7 +148,7 @@ def test_metadata_yaml(): root=False, version_note=None, get=None, - help_config=False, + help_settings=False, pdb=False, crossdb=False, open_browser=False, @@ -291,3 +292,10 @@ def test_weird_database_names(ensure_eventloop, tmpdir, filename): cli, [db_path, "--get", "/{}".format(urllib.parse.quote(filename_no_stem))] ) assert result2.exit_code == 0, result2.output + + +def test_help_settings(): + runner = CliRunner() + result = runner.invoke(cli, ["--help-settings"]) + for setting in SETTINGS: + assert setting.name in result.output From ca4f83dc7b1d573b92a8921fca96d3ed490614c3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 12 Aug 2021 18:10:36 -0700 Subject: [PATCH 0965/2113] Rename config= to settings=, refs #1432 --- datasette/app.py | 8 ++++---- datasette/cli.py | 8 ++++---- datasette/templates/table.html | 2 +- datasette/views/base.py | 2 +- datasette/views/database.py | 2 +- tests/fixtures.py | 20 ++++++++++---------- tests/test_api.py | 8 ++++---- tests/test_custom_pages.py | 2 +- tests/test_facets.py | 2 +- tests/test_html.py | 14 ++++++++------ 10 files changed, 35 insertions(+), 33 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f2f75884..8cbaaf9f 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -200,7 +200,7 @@ class Datasette: plugins_dir=None, static_mounts=None, memory=False, - config=None, + settings=None, secret=None, version_note=None, config_dir=None, @@ -279,7 +279,7 @@ class Datasette: raise StartupError("config.json should be renamed to settings.json") if config_dir and (config_dir / "settings.json").exists() and not config: config = json.loads((config_dir / "settings.json").read_text()) - self._settings = dict(DEFAULT_SETTINGS, **(config or {})) + self._settings = dict(DEFAULT_SETTINGS, **(settings or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note self.executor = futures.ThreadPoolExecutor( @@ -419,8 +419,8 @@ class Datasette: def setting(self, key): return self._settings.get(key, None) - def config_dict(self): - # Returns a fully resolved config dictionary, useful for templates + def settings_dict(self): + # Returns a fully resolved settings dictionary, useful for templates return {option.name: self.setting(option.name) for option in SETTINGS} def _metadata_recursive_update(self, orig, updated): diff --git a/datasette/cli.py b/datasette/cli.py index d4e23c70..ea6da748 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -495,14 +495,14 @@ def serve( if metadata: metadata_data = parse_metadata(metadata.read()) - combined_config = {} + combined_settings = {} if config: click.echo( "--config name:value will be deprecated in Datasette 1.0, use --setting name value instead", err=True, ) - combined_config.update(config) - combined_config.update(settings) + combined_settings.update(config) + combined_settings.update(settings) kwargs = dict( immutables=immutable, @@ -514,7 +514,7 @@ def serve( template_dir=template_dir, plugins_dir=plugins_dir, static_mounts=static, - config=combined_config, + settings=combined_settings, memory=memory, secret=secret, version_note=version_note, diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 466e8a47..a28945ad 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -201,7 +201,7 @@ CSV options: {% if expandable_columns %}{% endif %} - {% if next_url and config.allow_csv_stream %}{% endif %} + {% if next_url and settings.allow_csv_stream %}{% endif %} {% for key, value in url_csv_hidden_args %} diff --git a/datasette/views/base.py b/datasette/views/base.py index 1cea1386..3333781c 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -614,7 +614,7 @@ class DataView(BaseView): ] + [("_size", "max")], "datasette_version": __version__, - "config": self.ds.config_dict(), + "settings": self.ds.settings_dict(), }, } if "metadata" not in context: diff --git a/datasette/views/database.py b/datasette/views/database.py index 7c36034c..e3070ce6 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -456,7 +456,7 @@ class QueryView(DataView): "canned_query": canned_query, "edit_sql_url": edit_sql_url, "metadata": metadata, - "config": self.ds.config_dict(), + "settings": self.ds.settings_dict(), "request": request, "show_hide_link": show_hide_link, "show_hide_text": show_hide_text, diff --git a/tests/fixtures.py b/tests/fixtures.py index 4a420e4b..dc22c609 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -99,7 +99,7 @@ def make_app_client( max_returned_rows=None, cors=False, memory=False, - config=None, + settings=None, filename="fixtures.db", is_immutable=False, extra_databases=None, @@ -129,7 +129,7 @@ def make_app_client( # Insert at start to help test /-/databases ordering: files.insert(0, extra_filepath) os.chdir(os.path.dirname(filepath)) - config = config or {} + settings = settings or {} for key, value in { "default_page_size": 50, "max_returned_rows": max_returned_rows or 100, @@ -138,8 +138,8 @@ def make_app_client( # errors when running the full test suite: "num_sql_threads": 1, }.items(): - if key not in config: - config[key] = value + if key not in settings: + settings[key] = value ds = Datasette( files, immutables=immutables, @@ -147,7 +147,7 @@ def make_app_client( cors=cors, metadata=metadata or METADATA, plugins_dir=PLUGINS_DIR, - config=config, + settings=settings, inspect_data=inspect_data, static_mounts=static_mounts, template_dir=template_dir, @@ -171,7 +171,7 @@ def app_client_no_files(): @pytest.fixture(scope="session") def app_client_base_url_prefix(): - with make_app_client(config={"base_url": "/prefix/"}) as client: + with make_app_client(settings={"base_url": "/prefix/"}) as client: yield client @@ -210,13 +210,13 @@ def app_client_two_attached_databases_one_immutable(): @pytest.fixture(scope="session") def app_client_with_hash(): - with make_app_client(config={"hash_urls": True}, is_immutable=True) as client: + with make_app_client(settings={"hash_urls": True}, is_immutable=True) as client: yield client @pytest.fixture(scope="session") def app_client_with_trace(): - with make_app_client(config={"trace_debug": True}, is_immutable=True) as client: + with make_app_client(settings={"trace_debug": True}, is_immutable=True) as client: yield client @@ -234,13 +234,13 @@ def app_client_returned_rows_matches_page_size(): @pytest.fixture(scope="session") def app_client_larger_cache_size(): - with make_app_client(config={"cache_size_kb": 2500}) as client: + with make_app_client(settings={"cache_size_kb": 2500}) as client: yield client @pytest.fixture(scope="session") def app_client_csv_max_mb_one(): - with make_app_client(config={"max_csv_mb": 1}) as client: + with make_app_client(settings={"max_csv_mb": 1}) as client: yield client diff --git a/tests/test_api.py b/tests/test_api.py index 83cca521..1e93c62e 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1711,14 +1711,14 @@ def test_suggested_facets(app_client): def test_allow_facet_off(): - with make_app_client(config={"allow_facet": False}) as client: + with make_app_client(settings={"allow_facet": False}) as client: assert 400 == client.get("/fixtures/facetable.json?_facet=planet_int").status # Should not suggest any facets either: assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] def test_suggest_facets_off(): - with make_app_client(config={"suggest_facets": False}) as client: + with make_app_client(settings={"suggest_facets": False}) as client: # Now suggested_facets should be [] assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] @@ -1883,7 +1883,7 @@ def test_config_cache_size(app_client_larger_cache_size): def test_config_force_https_urls(): - with make_app_client(config={"force_https_urls": True}) as client: + with make_app_client(settings={"force_https_urls": True}) as client: response = client.get("/fixtures/facetable.json?_size=3&_facet=state") assert response.json["next_url"].startswith("https://") assert response.json["facet_results"]["state"]["results"][0][ @@ -1921,7 +1921,7 @@ def test_custom_query_with_unicode_characters(app_client): @pytest.mark.parametrize("trace_debug", (True, False)) def test_trace(trace_debug): - with make_app_client(config={"trace_debug": trace_debug}) as client: + with make_app_client(settings={"trace_debug": trace_debug}) as client: response = client.get("/fixtures/simple_primary_key.json?_trace=1") assert response.status == 200 diff --git a/tests/test_custom_pages.py b/tests/test_custom_pages.py index 5a71f56d..76c67397 100644 --- a/tests/test_custom_pages.py +++ b/tests/test_custom_pages.py @@ -14,7 +14,7 @@ def custom_pages_client(): @pytest.fixture(scope="session") def custom_pages_client_with_base_url(): with make_app_client( - template_dir=TEST_TEMPLATE_DIRS, config={"base_url": "/prefix/"} + template_dir=TEST_TEMPLATE_DIRS, settings={"base_url": "/prefix/"} ) as client: yield client diff --git a/tests/test_facets.py b/tests/test_facets.py index 18fb8c3b..22927512 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -351,7 +351,7 @@ async def test_json_array_with_blanks_and_nulls(): @pytest.mark.asyncio async def test_facet_size(): - ds = Datasette([], memory=True, config={"max_returned_rows": 50}) + ds = Datasette([], memory=True, settings={"max_returned_rows": 50}) db = ds.add_database(Database(ds, memory_name="test_facet_size")) await db.execute_write( "create table neighbourhoods(city text, neighbourhood text)", block=True diff --git a/tests/test_html.py b/tests/test_html.py index f12f89cd..90fcdae7 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -214,7 +214,7 @@ def test_definition_sql(path, expected_definition_sql, app_client): def test_table_cell_truncation(): - with make_app_client(config={"truncate_cells_html": 5}) as client: + with make_app_client(settings={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") @@ -239,7 +239,7 @@ def test_table_cell_truncation(): def test_row_page_does_not_truncate(): - with make_app_client(config={"truncate_cells_html": 5}) as client: + with make_app_client(settings={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable/1") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") @@ -1072,7 +1072,9 @@ def test_database_download_disallowed_for_memory(): def test_allow_download_off(): - with make_app_client(is_immutable=True, config={"allow_download": False}) as client: + with make_app_client( + is_immutable=True, settings={"allow_download": False} + ) as client: response = client.get("/fixtures") soup = Soup(response.body, "html.parser") assert not len(soup.findAll("a", {"href": re.compile(r"\.db$")})) @@ -1486,7 +1488,7 @@ def test_query_error(app_client): def test_config_template_debug_on(): - with make_app_client(config={"template_debug": True}) as client: + with make_app_client(settings={"template_debug": True}) as client: response = client.get("/fixtures/facetable?_context=1") assert response.status == 200 assert response.text.startswith("
{")
@@ -1500,7 +1502,7 @@ def test_config_template_debug_off(app_client):
 
 def test_debug_context_includes_extra_template_vars():
     # https://github.com/simonw/datasette/issues/693
-    with make_app_client(config={"template_debug": True}) as client:
+    with make_app_client(settings={"template_debug": True}) as client:
         response = client.get("/fixtures/facetable?_context=1")
         # scope_path is added by PLUGIN1
         assert "scope_path" in response.text
@@ -1744,7 +1746,7 @@ def test_facet_more_links(
     expected_ellipses_url,
 ):
     with make_app_client(
-        config={"max_returned_rows": max_returned_rows, "default_facet_size": 2}
+        settings={"max_returned_rows": max_returned_rows, "default_facet_size": 2}
     ) as client:
         response = client.get(path)
         soup = Soup(response.body, "html.parser")

From bbc4756f9e8180c7a40c57f8a35e39dee7be7807 Mon Sep 17 00:00:00 2001
From: Simon Willison 
Date: Thu, 12 Aug 2021 20:54:25 -0700
Subject: [PATCH 0966/2113] Settings fix, refs #1433

---
 datasette/app.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/datasette/app.py b/datasette/app.py
index 8cbaaf9f..adc543ef 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -277,7 +277,7 @@ class Datasette:
         self.static_mounts = static_mounts or []
         if config_dir and (config_dir / "config.json").exists():
             raise StartupError("config.json should be renamed to settings.json")
-        if config_dir and (config_dir / "settings.json").exists() and not config:
+        if config_dir and (config_dir / "settings.json").exists() and not settings:
             config = json.loads((config_dir / "settings.json").read_text())
         self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
         self.renderers = {}  # File extension -> (renderer, can_render) functions

From 2883098770fc66e50183b2b231edbde20848d4d6 Mon Sep 17 00:00:00 2001
From: Simon Willison 
Date: Thu, 12 Aug 2021 22:10:07 -0700
Subject: [PATCH 0967/2113] Fixed config_dir mode, refs #1432

---
 datasette/app.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/datasette/app.py b/datasette/app.py
index adc543ef..06db740e 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -278,7 +278,7 @@ class Datasette:
         if config_dir and (config_dir / "config.json").exists():
             raise StartupError("config.json should be renamed to settings.json")
         if config_dir and (config_dir / "settings.json").exists() and not settings:
-            config = json.loads((config_dir / "settings.json").read_text())
+            settings = json.loads((config_dir / "settings.json").read_text())
         self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
         self.renderers = {}  # File extension -> (renderer, can_render) functions
         self.version_note = version_note

From adb5b70de5cec3c3dd37184defe606a082c232cf Mon Sep 17 00:00:00 2001
From: Simon Willison 
Date: Mon, 16 Aug 2021 11:56:32 -0700
Subject: [PATCH 0968/2113] Show count of facet values if ?_facet_size=max,
 closes #1423

---
 datasette/static/app.css       |  5 +++++
 datasette/templates/table.html |  4 +++-
 datasette/views/table.py       |  1 +
 tests/test_html.py             | 22 +++++++++++++++++++++-
 4 files changed, 30 insertions(+), 2 deletions(-)

diff --git a/datasette/static/app.css b/datasette/static/app.css
index bf068fdf..af3e14d5 100644
--- a/datasette/static/app.css
+++ b/datasette/static/app.css
@@ -633,6 +633,11 @@ form button[type=button] {
     width: 250px;
     margin-right: 15px;
 }
+.facet-info-total {
+    font-size: 0.8em;
+    color: #666;
+    padding-right: 0.25em;
+}
 .facet-info li,
 .facet-info ul {
     margin: 0;
diff --git a/datasette/templates/table.html b/datasette/templates/table.html
index a28945ad..6ba301b5 100644
--- a/datasette/templates/table.html
+++ b/datasette/templates/table.html
@@ -156,7 +156,9 @@
         {% for facet_info in sorted_facet_results %}
             

- {{ facet_info.name }}{% if facet_info.type != "column" %} ({{ facet_info.type }}){% endif %} + {{ facet_info.name }}{% if facet_info.type != "column" %} ({{ facet_info.type }}){% endif %} + {% if show_facet_counts %} {% if facet_info.truncated %}>{% endif %}{{ facet_info.results|length }}{% endif %} + {% if facet_info.hideable %} {% endif %} diff --git a/datasette/views/table.py b/datasette/views/table.py index 486a6131..83f7c7cb 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -928,6 +928,7 @@ class TableView(RowTableShared): key=lambda f: (len(f["results"]), f["name"]), reverse=True, ), + "show_facet_counts": special_args.get("_facet_size") == "max", "extra_wheres_for_ui": extra_wheres_for_ui, "form_hidden_args": form_hidden_args, "is_sortable": any(c["sortable"] for c in display_columns), diff --git a/tests/test_html.py b/tests/test_html.py index 90fcdae7..e73ccd2f 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -479,7 +479,7 @@ def test_facet_display(app_client): for div in divs: actual.append( { - "name": div.find("strong").text, + "name": div.find("strong").text.split()[0], "items": [ { "name": a.text, @@ -1797,3 +1797,23 @@ def test_column_metadata(app_client): soup.select("th[data-column=address]")[0]["data-column-description"] == "The street address for the attraction" ) + + +@pytest.mark.parametrize("use_facet_size_max", (True, False)) +def test_facet_total_shown_if_facet_max_size(use_facet_size_max): + # https://github.com/simonw/datasette/issues/1423 + with make_app_client(settings={"max_returned_rows": 100}) as client: + path = "/fixtures/sortable?_facet=content&_facet=pk1" + if use_facet_size_max: + path += "&_facet_size=max" + response = client.get(path) + assert response.status == 200 + fragments = ( + '>100', + '8', + ) + for fragment in fragments: + if use_facet_size_max: + assert fragment in response.text + else: + assert fragment not in response.text From d84e574e59c51ddcd6cf60a6f9b3d45182daf824 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 19 Aug 2021 14:09:38 -0700 Subject: [PATCH 0969/2113] Ability to deploy demos of branches * Ability to deploy additional branch demos, closes #1442 * Only run tests before deploy on main branch * Documentation for continuous deployment --- .github/workflows/deploy-latest.yml | 8 +++++++- docs/contributing.rst | 11 +++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 849adb40..1a07503a 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -29,6 +29,7 @@ jobs: python -m pip install -e .[docs] python -m pip install sphinx-to-sqlite==0.1a1 - name: Run tests + if: ${{ github.ref == 'refs/heads/main' }} run: | pytest -n auto -m "not serial" pytest -m "serial" @@ -50,6 +51,8 @@ jobs: run: |- gcloud config set run/region us-central1 gcloud config set project datasette-222320 + export SUFFIX="-${GITHUB_REF#refs/heads/}" + export SUFFIX=${SUFFIX#-main} datasette publish cloudrun fixtures.db extra_database.db \ -m fixtures.json \ --plugins-dir=plugins \ @@ -57,7 +60,10 @@ jobs: --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \ --install=pysqlite3-binary \ - --service=datasette-latest + --service "datasette-latest$SUFFIX" + - name: Deploy to docs as well (only for main) + if: ${{ github.ref == 'refs/heads/main' }} + run: |- # Deploy docs.db to a different service datasette publish cloudrun docs.db \ --branch=$GITHUB_SHA \ diff --git a/docs/contributing.rst b/docs/contributing.rst index 8a638e0b..07f2a0e4 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -202,6 +202,17 @@ For added productivity, you can use use `sphinx-autobuild `__ is re-deployed automatically to Google Cloud Run for every push to ``main`` that passes the test suite. This is implemented by the GitHub Actions workflow at `.github/workflows/deploy-latest.yml `__. + +Specific branches can also be set to automatically deploy by adding them to the ``on: push: branches`` block at the top of the workflow YAML file. Branches configured in this way will be deployed to a new Cloud Run service whether or not their tests pass. + +The Cloud Run URL for a branch demo can be found in the GitHub Actions logs. + .. _contributing_release: Release process From 4eb3ae40fb223a66ae574fb84fac99e96183b08d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 19 Aug 2021 14:17:44 -0700 Subject: [PATCH 0970/2113] Don't bother building docs if not on main Refs ##1442 --- .github/workflows/deploy-latest.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 1a07503a..1ae96e89 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -36,6 +36,7 @@ jobs: - name: Build fixtures.db run: python tests/fixtures.py fixtures.db fixtures.json plugins --extra-db-filename extra_database.db - name: Build docs.db + if: ${{ github.ref == 'refs/heads/main' }} run: |- cd docs sphinx-build -b xml . _build From 7e15422aacfa9e9735cb9f9beaa32250edbf4905 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 19 Aug 2021 14:23:43 -0700 Subject: [PATCH 0971/2113] Documentation for datasette.databases property, closes #1443 --- docs/internals.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/internals.rst b/docs/internals.rst index 058a8969..d5db7ffa 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -196,6 +196,17 @@ Datasette class This object is an instance of the ``Datasette`` class, passed to many plugin hooks as an argument called ``datasette``. +.. _datasette_databases: + +.databases +---------- + +Property exposing an ordered dictionary of databases currently connected to Datasette. + +The dictionary keys are the name of the database that is used in the URL - e.g. ``/fixtures`` would have a key of ``"fixtures"``. The values are :ref:`internals_database` instances. + +All databases are listed, irrespective of user permissions. This means that the ``_internal`` database will always be listed here. + .. _datasette_plugin_config: .plugin_config(plugin_name, database=None, table=None) From 92a99d969c01633dba14cceebeda65daaedaec17 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 24 Aug 2021 11:13:42 -0700 Subject: [PATCH 0972/2113] Added not-footer wrapper div, refs #1446 --- datasette/templates/base.html | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/datasette/templates/base.html b/datasette/templates/base.html index e61edc4f..c9aa7e31 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -13,6 +13,7 @@ {% block extra_head %}{% endblock %} +

{% block footer %}{% include "_footer.html" %}{% endblock %}
{% include "_close_open_menus.html" %} From 93c3a7ffbfb3378f743ebce87d033cf1ce7689e0 Mon Sep 17 00:00:00 2001 From: Tim Sherratt Date: Wed, 25 Aug 2021 11:28:58 +1000 Subject: [PATCH 0973/2113] Remove underscore from search mode parameter name (#1447) The text refers to the parameter as `searchmode` but the `metadata.json` example uses `search_mode`. The latter doesn't actually seem to work. --- docs/full_text_search.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/full_text_search.rst b/docs/full_text_search.rst index f549296f..90b2e8c1 100644 --- a/docs/full_text_search.rst +++ b/docs/full_text_search.rst @@ -70,7 +70,7 @@ Here is an example which enables full-text search (with SQLite advanced search o "display_ads": { "fts_table": "ads_fts", "fts_pk": "id", - "search_mode": "raw" + "searchmode": "raw" } } } From 5161422b7fa249c6b7d6dc47ec6f483d3fdbd170 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Aug 2021 18:29:26 -0700 Subject: [PATCH 0974/2113] Update trustme requirement from <0.9,>=0.7 to >=0.7,<0.10 (#1433) Updates the requirements on [trustme](https://github.com/python-trio/trustme) to permit the latest version. - [Release notes](https://github.com/python-trio/trustme/releases) - [Commits](https://github.com/python-trio/trustme/compare/v0.7.0...v0.9.0) --- updated-dependencies: - dependency-name: trustme dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 65e99848..a3866515 100644 --- a/setup.py +++ b/setup.py @@ -73,7 +73,7 @@ setup( "beautifulsoup4>=4.8.1,<4.10.0", "black==21.6b0", "pytest-timeout>=1.4.2,<1.5", - "trustme>=0.7,<0.9", + "trustme>=0.7,<0.10", ], "rich": ["rich"], }, From a1a33bb5822214be1cebd98cd858b2058d91a4aa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Aug 2021 18:29:55 -0700 Subject: [PATCH 0975/2113] Bump black from 21.6b0 to 21.7b0 (#1400) Bumps [black](https://github.com/psf/black) from 21.6b0 to 21.7b0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/commits) --- updated-dependencies: - dependency-name: black dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index a3866515..84f32087 100644 --- a/setup.py +++ b/setup.py @@ -71,7 +71,7 @@ setup( "pytest-xdist>=2.2.1,<2.4", "pytest-asyncio>=0.10,<0.16", "beautifulsoup4>=4.8.1,<4.10.0", - "black==21.6b0", + "black==21.7b0", "pytest-timeout>=1.4.2,<1.5", "trustme>=0.7,<0.10", ], From 3655bb49a464bcc8004e491cc4d4de292f1acd62 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 27 Aug 2021 17:48:54 -0700 Subject: [PATCH 0976/2113] Better default help text, closes #1450 --- datasette/cli.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/datasette/cli.py b/datasette/cli.py index ea6da748..65da5613 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -123,7 +123,11 @@ def sqlite_extensions(fn): @click.version_option(version=__version__) def cli(): """ - Datasette! + Datasette is an open source multi-tool for exploring and publishing data + + \b + About Datasette: https://datasette.io/ + Full documentation: https://docs.datasette.io/ """ From 30c18576d603366dc3bd83ba50de1b7e70844430 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 27 Aug 2021 18:39:42 -0700 Subject: [PATCH 0977/2113] register_commands() plugin hook, closes #1449 --- datasette/cli.py | 3 +++ datasette/hookspecs.py | 5 ++++ docs/plugin_hooks.rst | 45 +++++++++++++++++++++++++++++++++ tests/test_plugins.py | 57 +++++++++++++++++++++++++++++++++++++++++- 4 files changed, 109 insertions(+), 1 deletion(-) diff --git a/datasette/cli.py b/datasette/cli.py index 65da5613..22e2338a 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -595,6 +595,9 @@ def serve( uvicorn.run(ds.app(), **uvicorn_kwargs) +pm.hook.register_commands(cli=cli) + + async def check_databases(ds): # Run check_connection against every connected database # to confirm they are all usable diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 56c79d23..1d4e3b27 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -79,6 +79,11 @@ def register_routes(datasette): """Register URL routes: return a list of (regex, view_function) pairs""" +@hookspec +def register_commands(cli): + """Register additional CLI commands, e.g. 'datasette mycommand ...'""" + + @hookspec def actor_from_request(datasette, request): """Return an actor dictionary based on the incoming request""" diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 5cdb1623..a6fe1071 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -587,6 +587,51 @@ See :ref:`writing_plugins_designing_urls` for tips on designing the URL routes u Examples: `datasette-auth-github `__, `datasette-psutil `__ +.. _plugin_register_commands: + +register_commands(cli) +---------------------- + +``cli`` - the root Datasette `Click command group `__ + Use this to register additional CLI commands + +Register additional CLI commands that can be run using ``datsette yourcommand ...``. This provides a mechanism by which plugins can add new CLI commands to Datasette. + +This example registers a new ``datasette verify file1.db file2.db`` command that checks if the provided file paths are valid SQLite databases: + +.. code-block:: python + + from datasette import hookimpl + import click + import sqlite3 + + @hookimpl + def register_commands(cli): + @cli.command() + @click.argument("files", type=click.Path(exists=True), nargs=-1) + def verify(files): + "Verify that files can be opened by Datasette" + for file in files: + conn = sqlite3.connect(str(file)) + try: + conn.execute("select * from sqlite_master") + except sqlite3.DatabaseError: + raise click.ClickException("Invalid database: {}".format(file)) + +The new command can then be executed like so:: + + datasette verify fixtures.db + +Help text (from the docstring for the function plus any defined Click arguments or options) will become available using:: + + datasette verify --help + +Plugins can register multiple commands by making multiple calls to the ``@cli.command()`` decorator.Consult the `Click documentation `__ for full details on how to build a CLI command, including how to define arguments and options. + +Note that ``register_commands()`` plugins cannot used with the :ref:`--plugins-dir mechanism ` - they need to be installed into the same virtual environment as Datasette using ``pip install``. Provided it has a ``setup.py`` file (see :ref:`writing_plugins_packaging`) you can run ``pip install`` directly against the directory in which you are developing your plugin like so:: + + pip install -e path/to/my/datasette-plugin + .. _plugin_register_facet_classes: register_facet_classes() diff --git a/tests/test_plugins.py b/tests/test_plugins.py index ec8ff0c5..a024c39b 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -6,13 +6,15 @@ from .fixtures import ( TEMP_PLUGIN_SECRET_FILE, TestClient as _TestClient, ) # noqa +from click.testing import CliRunner from datasette.app import Datasette -from datasette import cli +from datasette import cli, hookimpl from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm from datasette.utils.sqlite import sqlite3 from datasette.utils import CustomRow from jinja2.environment import Template import base64 +import importlib import json import os import pathlib @@ -902,3 +904,56 @@ def test_hook_get_metadata(app_client): assert "Hello from local metadata" == meta["databases"]["from-local"]["title"] assert "Hello from the plugin hook" == meta["databases"]["from-hook"]["title"] pm.hook.get_metadata = og_pm_hook_get_metadata + + +def _extract_commands(output): + lines = output.split("Commands:\n", 1)[1].split("\n") + return {line.split()[0].replace("*", "") for line in lines if line.strip()} + + +def test_hook_register_commands(): + # Without the plugin should have seven commands + runner = CliRunner() + result = runner.invoke(cli.cli, "--help") + commands = _extract_commands(result.output) + assert commands == { + "serve", + "inspect", + "install", + "package", + "plugins", + "publish", + "uninstall", + } + + # Now install a plugin + class VerifyPlugin: + __name__ = "VerifyPlugin" + + @hookimpl + def register_commands(self, cli): + @cli.command() + def verify(): + pass + + @cli.command() + def unverify(): + pass + + pm.register(VerifyPlugin(), name="verify") + importlib.reload(cli) + result2 = runner.invoke(cli.cli, "--help") + commands2 = _extract_commands(result2.output) + assert commands2 == { + "serve", + "inspect", + "install", + "package", + "plugins", + "publish", + "uninstall", + "verify", + "unverify", + } + pm.unregister(name="verify") + importlib.reload(cli) From d3ea36713194e3d92ed4c066337400146c921d0e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 27 Aug 2021 18:55:54 -0700 Subject: [PATCH 0978/2113] Release 0.59a2 Refs #942, #1421, #1423, #1431, #1443, #1446, #1449 --- datasette/version.py | 2 +- docs/changelog.rst | 13 +++++++++++++ docs/plugin_hooks.rst | 2 +- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/datasette/version.py b/datasette/version.py index f5fbfb3f..87b18fab 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.59a1" +__version__ = "0.59a2" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 1406a7ca..737a151b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,19 @@ Changelog ========= +.. _v0_59a2: + +0.59a2 (2021-08-27) +------------------- + +- Columns can now have associated metadata descriptions in ``metadata.json``, see :ref:`metadata_column_descriptions`. (:issue:`942`) +- New :ref:`register_commands() ` plugin hook allows plugins to register additional Datasette CLI commands, e.g. ``datasette mycommand file.db``. (:issue:`1449`) +- Adding ``?_facet_size=max`` to a table page now shows the number of unique values in each facet. (:issue:`1423`) +- Code that figures out which named parameters a SQL query takes in order to display form fields for them is no longer confused by strings that contain colon characters. (:issue:`1421`) +- Renamed ``--help-config`` option to ``--help-settings``. (:issue:`1431`) +- ``datasette.databases`` property is now a documented API. (:issue:`1443`) +- Datasette base template now wraps everything other than the ``
`` in a ``

" in response.text + assert ">Table With Space In Name 🔒

" in response.text + # Queries + assert ">from_async_hook 🔒" in response.text + assert ">query_two" in response.text + # Views + assert ">paginated_view 🔒" in response.text + assert ">simple_view" in response.text + finally: + cascade_app_client.ds._metadata_local = previous_metadata From 602c0888ce633000cfae42be00de474ef681bda7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 23 Oct 2022 20:07:09 -0700 Subject: [PATCH 1332/2113] Release 0.63a1 Refs #1646, #1819, #1825, #1829, #1831, #1832, #1834, #1844, #1848 --- datasette/version.py | 2 +- docs/changelog.rst | 16 +++++++++++++++- docs/internals.rst | 2 +- docs/performance.rst | 2 ++ 4 files changed, 19 insertions(+), 3 deletions(-) diff --git a/datasette/version.py b/datasette/version.py index e5ad585f..eb36da45 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.63a0" +__version__ = "0.63a1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index f5cf03e8..dd4c20b7 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,20 @@ Changelog ========= +.. _v0_63a1: + +0.63a1 (2022-10-23) +------------------- + +- SQL query is now re-displayed when terminated with a time limit error. (:issue:`1819`) +- New documentation on :ref:`deploying_openrc` - thanks, Adam Simpson. (`#1825 `__) +- The :ref:`inspect data ` mechanism is now used to speed up server startup - thanks, Forest Gregg. (:issue:`1834`) +- In :ref:`config_dir` databases with filenames ending in ``.sqlite`` or ``.sqlite3`` are now automatically added to the Datasette instance. (:issue:`1646`) +- Breadcrumb navigation display now respects the current user's permissions. (:issue:`1831`) +- Screenshots in the documentation are now maintained using `shot-scraper `__, as described in `Automating screenshots for the Datasette documentation using shot-scraper `__. (:issue:`1844`) +- The :ref:`datasette.check_visibility() ` method now accepts an optional ``permissions=`` list, allowing it to take multiple permissions into account at once when deciding if something should be shown as public or private. This has been used to correctly display padlock icons in more places in the Datasette interface. (:issue:`1829`) + + .. _v0_63a0: 0.63a0 (2022-09-26) @@ -91,7 +105,7 @@ Datasette also now requires Python 3.7 or higher. - Python 3.6 is no longer supported. (:issue:`1577`) - Tests now run against Python 3.11-dev. (:issue:`1621`) - New :ref:`datasette.ensure_permissions(actor, permissions) ` internal method for checking multiple permissions at once. (:issue:`1675`) -- New :ref:`datasette.check_visibility(actor, action, resource=None) ` internal method for checking if a user can see a resource that would otherwise be invisible to unauthenticated users. (:issue:`1678`) +- New :ref:`datasette.check_visibility(actor, action, resource=None) ` internal method for checking if a user can see a resource that would otherwise be invisible to unauthenticated users. (:issue:`1678`) - Table and row HTML pages now include a ```` element and return a ``Link: URL; rel="alternate"; type="application/json+datasette"`` HTTP header pointing to the JSON version of those pages. (:issue:`1533`) - ``Access-Control-Expose-Headers: Link`` is now added to the CORS headers, allowing remote JavaScript to access that header. - Canned queries are now shown at the top of the database page, directly below the SQL editor. Previously they were shown at the bottom, below the list of tables. (:issue:`1612`) diff --git a/docs/internals.rst b/docs/internals.rst index 92f4efee..c3892a7c 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -364,7 +364,7 @@ This is useful when you need to check multiple permissions at once. For example, ], ) -.. _datasette_check_visibilty: +.. _datasette_check_visibility: await .check_visibility(actor, action=None, resource=None, permissions=None) ---------------------------------------------------------------------------- diff --git a/docs/performance.rst b/docs/performance.rst index 89bbf5ae..4427757c 100644 --- a/docs/performance.rst +++ b/docs/performance.rst @@ -24,6 +24,8 @@ To open a file in immutable mode pass it to the datasette command using the ``-i When you open a file in immutable mode like this Datasette will also calculate and cache the row counts for each table in that database when it first starts up, further improving performance. +.. _performance_inspect: + Using "datasette inspect" ------------------------- From a0dd5fa02fb1e6d5477b962a2062f1a4be3354a5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 23 Oct 2022 20:14:49 -0700 Subject: [PATCH 1333/2113] Fixed typo in release notes --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index dd4c20b7..2255dcce 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -31,7 +31,7 @@ Changelog - ``Database(is_mutable=)`` now defaults to ``True``. (:issue:`1808`) - Non-JavaScript textarea now increases height to fit the SQL query. (:issue:`1786`) - More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) -- Datasette no longer enforces upper bounds on its depenedencies. (:issue:`1800`) +- Datasette no longer enforces upper bounds on its dependencies. (:issue:`1800`) - Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) - The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) From 83adf55b2da83fd9a227f7e4c8506d72def72294 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 23 Oct 2022 20:28:15 -0700 Subject: [PATCH 1334/2113] Deploy one-dot-zero branch preview --- .github/workflows/deploy-latest.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 2b94a7f1..43a843ed 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -3,7 +3,8 @@ name: Deploy latest.datasette.io on: push: branches: - - main + - main + - 1.0-dev permissions: contents: read @@ -68,6 +69,8 @@ jobs: gcloud config set project datasette-222320 export SUFFIX="-${GITHUB_REF#refs/heads/}" export SUFFIX=${SUFFIX#-main} + # Replace 1.0 with one-dot-zero in SUFFIX + export SUFFIX=${SUFFIX//1.0/one-dot-zero} datasette publish cloudrun fixtures.db fixtures2.db extra_database.db \ -m fixtures.json \ --plugins-dir=plugins \ From e135da8efe8fccecf9a137a941cc1f1db0db583a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 07:13:43 -0700 Subject: [PATCH 1335/2113] Python 3.11 in CI --- .github/workflows/publish.yml | 16 ++++++++-------- .github/workflows/test.yml | 8 ++++---- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 9ef09d2e..fa608055 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -12,14 +12,14 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip @@ -37,12 +37,12 @@ jobs: runs-on: ubuntu-latest needs: [test] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: '3.10' - - uses: actions/cache@v2 + python-version: '3.11' + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e38d5ee9..886f649a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -10,14 +10,14 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11-dev"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip From 02ae1a002918eb91f794e912c32742559da34cf5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 11:59:03 -0700 Subject: [PATCH 1336/2113] Upgrade Docker images to Python 3.11, closes #1853 --- Dockerfile | 2 +- datasette/utils/__init__.py | 2 +- demos/apache-proxy/Dockerfile | 2 +- docs/publish.rst | 2 +- tests/test_package.py | 2 +- tests/test_publish_cloudrun.py | 4 ++-- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index ee7ed957..9a8f06cf 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10.6-slim-bullseye as build +FROM python:3.11.0-slim-bullseye as build # Version of Datasette to install, e.g. 0.55 # docker build . -t datasette --build-arg VERSION=0.55 diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 2bdea673..803ba96d 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -390,7 +390,7 @@ def make_dockerfile( "SQLITE_EXTENSIONS" ] = "/usr/lib/x86_64-linux-gnu/mod_spatialite.so" return """ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app {apt_get_extras} diff --git a/demos/apache-proxy/Dockerfile b/demos/apache-proxy/Dockerfile index 70b33bec..9a8448da 100644 --- a/demos/apache-proxy/Dockerfile +++ b/demos/apache-proxy/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye RUN apt-get update && \ apt-get install -y apache2 supervisor && \ diff --git a/docs/publish.rst b/docs/publish.rst index d817ed31..4ba94792 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -146,7 +146,7 @@ Here's example output for the package command:: $ datasette package parlgov.db --extra-options="--setting sql_time_limit_ms 2500" Sending build context to Docker daemon 4.459MB - Step 1/7 : FROM python:3.10.6-slim-bullseye + Step 1/7 : FROM python:3.11.0-slim-bullseye ---> 79e1dc9af1c1 Step 2/7 : COPY . /app ---> Using cache diff --git a/tests/test_package.py b/tests/test_package.py index ac15e61e..f05f3ece 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -12,7 +12,7 @@ class CaptureDockerfile: EXPECTED_DOCKERFILE = """ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index e64534d2..158a090e 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -242,7 +242,7 @@ def test_publish_cloudrun_plugin_secrets( ) expected = textwrap.dedent( r""" - FROM python:3.10.6-slim-bullseye + FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app @@ -309,7 +309,7 @@ def test_publish_cloudrun_apt_get_install( ) expected = textwrap.dedent( r""" - FROM python:3.10.6-slim-bullseye + FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app From 9676b2deb07cff20247ba91dad3e84a4ab0b00d1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 11:59:03 -0700 Subject: [PATCH 1337/2113] Upgrade Docker images to Python 3.11, closes #1853 --- Dockerfile | 2 +- datasette/utils/__init__.py | 2 +- demos/apache-proxy/Dockerfile | 2 +- docs/publish.rst | 2 +- tests/test_package.py | 2 +- tests/test_publish_cloudrun.py | 4 ++-- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index ee7ed957..9a8f06cf 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10.6-slim-bullseye as build +FROM python:3.11.0-slim-bullseye as build # Version of Datasette to install, e.g. 0.55 # docker build . -t datasette --build-arg VERSION=0.55 diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 2bdea673..803ba96d 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -390,7 +390,7 @@ def make_dockerfile( "SQLITE_EXTENSIONS" ] = "/usr/lib/x86_64-linux-gnu/mod_spatialite.so" return """ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app {apt_get_extras} diff --git a/demos/apache-proxy/Dockerfile b/demos/apache-proxy/Dockerfile index 70b33bec..9a8448da 100644 --- a/demos/apache-proxy/Dockerfile +++ b/demos/apache-proxy/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye RUN apt-get update && \ apt-get install -y apache2 supervisor && \ diff --git a/docs/publish.rst b/docs/publish.rst index d817ed31..4ba94792 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -146,7 +146,7 @@ Here's example output for the package command:: $ datasette package parlgov.db --extra-options="--setting sql_time_limit_ms 2500" Sending build context to Docker daemon 4.459MB - Step 1/7 : FROM python:3.10.6-slim-bullseye + Step 1/7 : FROM python:3.11.0-slim-bullseye ---> 79e1dc9af1c1 Step 2/7 : COPY . /app ---> Using cache diff --git a/tests/test_package.py b/tests/test_package.py index ac15e61e..f05f3ece 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -12,7 +12,7 @@ class CaptureDockerfile: EXPECTED_DOCKERFILE = """ -FROM python:3.10.6-slim-bullseye +FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index e64534d2..158a090e 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -242,7 +242,7 @@ def test_publish_cloudrun_plugin_secrets( ) expected = textwrap.dedent( r""" - FROM python:3.10.6-slim-bullseye + FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app @@ -309,7 +309,7 @@ def test_publish_cloudrun_apt_get_install( ) expected = textwrap.dedent( r""" - FROM python:3.10.6-slim-bullseye + FROM python:3.11.0-slim-bullseye COPY . /app WORKDIR /app From 613ad05c095f92653221db267ef53d54d00cdfbb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 12:16:48 -0700 Subject: [PATCH 1338/2113] Don't need pysqlite3-binary any more, refs #1853 --- .github/workflows/deploy-latest.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 2b94a7f1..e423b8fa 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -13,12 +13,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out datasette - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: "3.10" - - uses: actions/cache@v2 + python-version: "3.11" + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip @@ -74,7 +74,6 @@ jobs: --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \ - --install=pysqlite3-binary \ --service "datasette-latest$SUFFIX" - name: Deploy to docs as well (only for main) if: ${{ github.ref == 'refs/heads/main' }} From c7dd76c26257ded5bcdfd0570e12412531b8b88f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 12:42:21 -0700 Subject: [PATCH 1339/2113] Poll until servers start, refs #1854 --- tests/conftest.py | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 215853b3..f4638a14 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,4 @@ +import httpx import os import pathlib import pytest @@ -110,8 +111,13 @@ def ds_localhost_http_server(): # Avoid FileNotFoundError: [Errno 2] No such file or directory: cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + # Loop until port 8041 serves traffic + while True: + try: + httpx.get("http://localhost:8041/") + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc @@ -146,8 +152,12 @@ def ds_localhost_https_server(tmp_path_factory): stderr=subprocess.STDOUT, cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + while True: + try: + httpx.get("https://localhost:8042/", verify=client_cert) + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc, client_cert @@ -168,8 +178,15 @@ def ds_unix_domain_socket_server(tmp_path_factory): stderr=subprocess.STDOUT, cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + # Poll until available + transport = httpx.HTTPTransport(uds=uds) + client = httpx.Client(transport=transport) + while True: + try: + client.get("http://localhost/_memory.json") + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc, uds From 6d085af28c63c28ecda388fc0552c91f756be0c6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 07:13:43 -0700 Subject: [PATCH 1340/2113] Python 3.11 in CI --- .github/workflows/publish.yml | 16 ++++++++-------- .github/workflows/test.yml | 8 ++++---- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 9ef09d2e..fa608055 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -12,14 +12,14 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip @@ -37,12 +37,12 @@ jobs: runs-on: ubuntu-latest needs: [test] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: '3.10' - - uses: actions/cache@v2 + python-version: '3.11' + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e38d5ee9..886f649a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -10,14 +10,14 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11-dev"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip From 05b479224fa57af3ab2d03769edd5081dad62a19 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 12:16:48 -0700 Subject: [PATCH 1341/2113] Don't need pysqlite3-binary any more, refs #1853 --- .github/workflows/deploy-latest.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 43a843ed..5598dc12 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -14,12 +14,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out datasette - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: "3.10" - - uses: actions/cache@v2 + python-version: "3.11" + - uses: actions/cache@v3 name: Configure pip caching with: path: ~/.cache/pip @@ -77,7 +77,6 @@ jobs: --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \ - --install=pysqlite3-binary \ --service "datasette-latest$SUFFIX" - name: Deploy to docs as well (only for main) if: ${{ github.ref == 'refs/heads/main' }} From f9ae92b37796f7f559d57b1ee9718aa4d43547e8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 12:42:21 -0700 Subject: [PATCH 1342/2113] Poll until servers start, refs #1854 --- tests/conftest.py | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 215853b3..f4638a14 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,4 @@ +import httpx import os import pathlib import pytest @@ -110,8 +111,13 @@ def ds_localhost_http_server(): # Avoid FileNotFoundError: [Errno 2] No such file or directory: cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + # Loop until port 8041 serves traffic + while True: + try: + httpx.get("http://localhost:8041/") + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc @@ -146,8 +152,12 @@ def ds_localhost_https_server(tmp_path_factory): stderr=subprocess.STDOUT, cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + while True: + try: + httpx.get("https://localhost:8042/", verify=client_cert) + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc, client_cert @@ -168,8 +178,15 @@ def ds_unix_domain_socket_server(tmp_path_factory): stderr=subprocess.STDOUT, cwd=tempfile.gettempdir(), ) - # Give the server time to start - time.sleep(1.5) + # Poll until available + transport = httpx.HTTPTransport(uds=uds) + client = httpx.Client(transport=transport) + while True: + try: + client.get("http://localhost/_memory.json") + break + except httpx.ConnectError: + time.sleep(0.1) # Check it started successfully assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") yield ds_proc, uds From 42f8b402e6aa56af4bbe921e346af8df42acd50f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 17:07:58 -0700 Subject: [PATCH 1343/2113] Initial prototype of create API token page, refs #1852 --- datasette/app.py | 5 ++ datasette/templates/create_token.html | 83 +++++++++++++++++++++++++++ datasette/views/special.py | 54 +++++++++++++++++ 3 files changed, 142 insertions(+) create mode 100644 datasette/templates/create_token.html diff --git a/datasette/app.py b/datasette/app.py index 9df16558..cab9d142 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -33,6 +33,7 @@ from .views.special import ( JsonDataView, PatternPortfolioView, AuthTokenView, + CreateTokenView, LogoutView, AllowDebugView, PermissionsDebugView, @@ -1212,6 +1213,10 @@ class Datasette: AuthTokenView.as_view(self), r"/-/auth-token$", ) + add_route( + CreateTokenView.as_view(self), + r"/-/create-token$", + ) add_route( LogoutView.as_view(self), r"/-/logout$", diff --git a/datasette/templates/create_token.html b/datasette/templates/create_token.html new file mode 100644 index 00000000..a94881ed --- /dev/null +++ b/datasette/templates/create_token.html @@ -0,0 +1,83 @@ +{% extends "base.html" %} + +{% block title %}Create an API token{% endblock %} + +{% block content %} + +

Create an API token

+ +

This token will allow API access with the same abilities as your current user.

+ +{% if errors %} + {% for error in errors %} +

{{ error }}

+ {% endfor %} +{% endif %} + + +
+
+ +
+ + + +
+ + +{% if token %} +
+

Your API token

+
+ + +
+ +
+ Token details +
{{ token_bits|tojson }}
+
+
+ {% endif %} + + + +{% endblock %} diff --git a/datasette/views/special.py b/datasette/views/special.py index dd834528..f2e69412 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -3,6 +3,7 @@ from datasette.utils.asgi import Response, Forbidden from datasette.utils import actor_matches_allow, add_cors_headers from .base import BaseView import secrets +import time class JsonDataView(BaseView): @@ -163,3 +164,56 @@ class MessagesDebugView(BaseView): else: datasette.add_message(request, message, getattr(datasette, message_type)) return Response.redirect(self.ds.urls.instance()) + + +class CreateTokenView(BaseView): + name = "create_token" + has_json_alternate = False + + async def get(self, request): + if not request.actor: + raise Forbidden("You must be logged in to create a token") + return await self.render( + ["create_token.html"], + request, + {"actor": request.actor}, + ) + + async def post(self, request): + if not request.actor: + raise Forbidden("You must be logged in to create a token") + post = await request.post_vars() + expires = None + errors = [] + if post.get("expire_type"): + duration = post.get("expire_duration") + if not duration or not duration.isdigit() or not int(duration) > 0: + errors.append("Invalid expire duration") + else: + unit = post["expire_type"] + if unit == "minutes": + expires = int(duration) * 60 + elif unit == "hours": + expires = int(duration) * 60 * 60 + elif unit == "days": + expires = int(duration) * 60 * 60 * 24 + else: + errors.append("Invalid expire duration unit") + token_bits = None + token = None + if not errors: + token_bits = { + "a": request.actor, + "e": (int(time.time()) + expires) if expires else None, + } + token = self.ds.sign(token_bits, "token") + return await self.render( + ["create_token.html"], + request, + { + "actor": request.actor, + "errors": errors, + "token": token, + "token_bits": token_bits, + }, + ) From 68ccb7578b5d3bf68b86fb2f5cf8753098dfe075 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 18:40:07 -0700 Subject: [PATCH 1344/2113] dstoke_ prefix for tokens Refs https://github.com/simonw/datasette/issues/1852#issuecomment-1291290451 --- datasette/views/special.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/views/special.py b/datasette/views/special.py index f2e69412..d3f202f4 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -206,7 +206,7 @@ class CreateTokenView(BaseView): "a": request.actor, "e": (int(time.time()) + expires) if expires else None, } - token = self.ds.sign(token_bits, "token") + token = "dstok_{}".format(self.ds.sign(token_bits, "token")) return await self.render( ["create_token.html"], request, From 7ab091e8ef8d3af1e23b5a81ffad2bd8c96cc47c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 19:04:05 -0700 Subject: [PATCH 1345/2113] Tests and docs for /-/create-token, refs #1852 --- datasette/views/special.py | 14 +++++--- docs/authentication.rst | 15 +++++++++ tests/test_auth.py | 68 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 93 insertions(+), 4 deletions(-) diff --git a/datasette/views/special.py b/datasette/views/special.py index d3f202f4..7f70eb1f 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -170,9 +170,16 @@ class CreateTokenView(BaseView): name = "create_token" has_json_alternate = False - async def get(self, request): + def check_permission(self, request): if not request.actor: raise Forbidden("You must be logged in to create a token") + if not request.actor.get("id"): + raise Forbidden( + "You must be logged in as an actor with an ID to create a token" + ) + + async def get(self, request): + self.check_permission(request) return await self.render( ["create_token.html"], request, @@ -180,8 +187,7 @@ class CreateTokenView(BaseView): ) async def post(self, request): - if not request.actor: - raise Forbidden("You must be logged in to create a token") + self.check_permission(request) post = await request.post_vars() expires = None errors = [] @@ -203,7 +209,7 @@ class CreateTokenView(BaseView): token = None if not errors: token_bits = { - "a": request.actor, + "a": request.actor["id"], "e": (int(time.time()) + expires) if expires else None, } token = "dstok_{}".format(self.ds.sign(token_bits, "token")) diff --git a/docs/authentication.rst b/docs/authentication.rst index 685dab15..fc903fbb 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -333,6 +333,21 @@ To limit this ability for just one specific database, use this: } } +.. _CreateTokenView: + +API Tokens +========== + +Datasette includes a default mechanism for generating API tokens that can be used to authenticate requests. + +Authenticated users can create new API tokens using a form on the ``/-/create-token`` page. + +Created tokens can then be passed in the ``Authorization: Bearer token_here`` header of HTTP requests to Datasette. + +A token created by a user will include that user's ``"id"`` in the token payload, so any permissions granted to that user based on their ID will be made available to the token as well. + +Coming soon: a mechanism for creating tokens that can only perform a subset of the actions available to the user who created them. + .. _permissions_plugins: Checking permissions in plugins diff --git a/tests/test_auth.py b/tests/test_auth.py index 4ef35a76..3aaab50d 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -110,3 +110,71 @@ def test_no_logout_button_in_navigation_if_no_ds_actor_cookie(app_client, path): response = app_client.get(path + "?_bot=1") assert "bot" in response.text assert '
' not in response.text + + +@pytest.mark.parametrize( + "post_data,errors,expected_duration", + ( + ({"expire_type": ""}, [], None), + ({"expire_type": "x"}, ["Invalid expire duration"], None), + ({"expire_type": "minutes"}, ["Invalid expire duration"], None), + ( + {"expire_type": "minutes", "expire_duration": "x"}, + ["Invalid expire duration"], + None, + ), + ( + {"expire_type": "minutes", "expire_duration": "-1"}, + ["Invalid expire duration"], + None, + ), + ( + {"expire_type": "minutes", "expire_duration": "0"}, + ["Invalid expire duration"], + None, + ), + ( + {"expire_type": "minutes", "expire_duration": "10"}, + [], + 600, + ), + ( + {"expire_type": "hours", "expire_duration": "10"}, + [], + 10 * 60 * 60, + ), + ( + {"expire_type": "days", "expire_duration": "3"}, + [], + 60 * 60 * 24 * 3, + ), + ), +) +def test_auth_create_token(app_client, post_data, errors, expected_duration): + assert app_client.get("/-/create-token").status == 403 + ds_actor = app_client.actor_cookie({"id": "test"}) + response = app_client.get("/-/create-token", cookies={"ds_actor": ds_actor}) + assert response.status == 200 + assert ">Create an API token<" in response.text + # Now try actually creating one + response2 = app_client.post( + "/-/create-token", + post_data, + csrftoken_from=True, + cookies={"ds_actor": ds_actor}, + ) + assert response2.status == 200 + if errors: + for error in errors: + assert '

{}

'.format(error) in response2.text + else: + # Extract token from page + token = response2.text.split('value="dstok_')[1].split('"')[0] + details = app_client.ds.unsign(token, "token") + assert details.keys() == {"a", "e"} + assert details["a"] == "test" + if expected_duration is None: + assert details["e"] is None + else: + about_right = int(time.time()) + expected_duration + assert about_right - 2 < details["e"] < about_right + 2 From b29e487bc3fde6418bf45bda7cfed2e081ff03fb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 19:18:41 -0700 Subject: [PATCH 1346/2113] actor_from_request for dstok_ tokens, refs #1852 --- datasette/default_permissions.py | 25 +++++++++++++++++++++++++ datasette/utils/testing.py | 2 ++ tests/test_auth.py | 32 ++++++++++++++++++++++++++++++++ 3 files changed, 59 insertions(+) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index b58d8d1b..4d836ddc 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -1,5 +1,7 @@ from datasette import hookimpl from datasette.utils import actor_matches_allow +import itsdangerous +import time @hookimpl(tryfirst=True) @@ -45,3 +47,26 @@ def permission_allowed(datasette, actor, action, resource): return actor_matches_allow(actor, database_allow_sql) return inner + + +@hookimpl +def actor_from_request(datasette, request): + prefix = "dstok_" + authorization = request.headers.get("authorization") + if not authorization: + return None + if not authorization.startswith("Bearer "): + return None + token = authorization[len("Bearer ") :] + if not token.startswith(prefix): + return None + token = token[len(prefix) :] + try: + decoded = datasette.unsign(token, namespace="token") + except itsdangerous.BadSignature: + return None + expires_at = decoded.get("e") + if expires_at is not None: + if expires_at < time.time(): + return None + return {"id": decoded["a"], "dstok": True} diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index b28fc575..4f76a799 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -62,6 +62,7 @@ class TestClient: method="GET", cookies=None, if_none_match=None, + headers=None, ): return await self._request( path=path, @@ -70,6 +71,7 @@ class TestClient: method=method, cookies=cookies, if_none_match=if_none_match, + headers=headers, ) @async_to_sync diff --git a/tests/test_auth.py b/tests/test_auth.py index 3aaab50d..be21d6a5 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -178,3 +178,35 @@ def test_auth_create_token(app_client, post_data, errors, expected_duration): else: about_right = int(time.time()) + expected_duration assert about_right - 2 < details["e"] < about_right + 2 + + +@pytest.mark.parametrize( + "scenario,should_work", + ( + ("no_token", False), + ("invalid_token", False), + ("expired_token", False), + ("valid_unlimited_token", True), + ("valid_expiring_token", True), + ), +) +def test_auth_with_dstok_token(app_client, scenario, should_work): + token = None + if scenario == "valid_unlimited_token": + token = app_client.ds.sign({"a": "test"}, "token") + elif scenario == "valid_expiring_token": + token = app_client.ds.sign({"a": "test", "e": int(time.time()) + 1000}, "token") + elif scenario == "expired_token": + token = app_client.ds.sign({"a": "test", "e": int(time.time()) - 1000}, "token") + elif scenario == "invalid_token": + token = "invalid" + if token: + token = "dstok_{}".format(token) + headers = {} + if token: + headers["Authorization"] = "Bearer {}".format(token) + response = app_client.get("/-/actor.json", headers=headers) + if should_work: + assert response.json == {"actor": {"id": "test", "dstok": True}} + else: + assert response.json == {"actor": None} From 0f013ff497df62e1dd2075777b9817555646010e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 19:43:55 -0700 Subject: [PATCH 1347/2113] Mechanism to prevent tokens creating tokens, closes #1857 --- datasette/default_permissions.py | 2 +- datasette/views/special.py | 4 ++++ docs/authentication.rst | 2 ++ tests/test_auth.py | 11 ++++++++++- 4 files changed, 17 insertions(+), 2 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 4d836ddc..d908af7a 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -69,4 +69,4 @@ def actor_from_request(datasette, request): if expires_at is not None: if expires_at < time.time(): return None - return {"id": decoded["a"], "dstok": True} + return {"id": decoded["a"], "token": "dstok"} diff --git a/datasette/views/special.py b/datasette/views/special.py index 7f70eb1f..91130353 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -177,6 +177,10 @@ class CreateTokenView(BaseView): raise Forbidden( "You must be logged in as an actor with an ID to create a token" ) + if request.actor.get("token"): + raise Forbidden( + "Token authentication cannot be used to create additional tokens" + ) async def get(self, request): self.check_permission(request) diff --git a/docs/authentication.rst b/docs/authentication.rst index fc903fbb..cbecd296 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -348,6 +348,8 @@ A token created by a user will include that user's ``"id"`` in the token payload Coming soon: a mechanism for creating tokens that can only perform a subset of the actions available to the user who created them. +This page cannot be accessed by actors with a ``"token": "some-value"`` property. This is to prevent API tokens from being used to automatically create more tokens. Datasette plugins that implement their own form of API token authentication should follow this convention. + .. _permissions_plugins: Checking permissions in plugins diff --git a/tests/test_auth.py b/tests/test_auth.py index be21d6a5..397d51d7 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -180,6 +180,15 @@ def test_auth_create_token(app_client, post_data, errors, expected_duration): assert about_right - 2 < details["e"] < about_right + 2 +def test_auth_create_token_not_allowed_for_tokens(app_client): + ds_tok = app_client.ds.sign({"a": "test", "token": "dstok"}, "token") + response = app_client.get( + "/-/create-token", + headers={"Authorization": "Bearer dstok_{}".format(ds_tok)}, + ) + assert response.status == 403 + + @pytest.mark.parametrize( "scenario,should_work", ( @@ -207,6 +216,6 @@ def test_auth_with_dstok_token(app_client, scenario, should_work): headers["Authorization"] = "Bearer {}".format(token) response = app_client.get("/-/actor.json", headers=headers) if should_work: - assert response.json == {"actor": {"id": "test", "dstok": True}} + assert response.json == {"actor": {"id": "test", "token": "dstok"}} else: assert response.json == {"actor": None} From c23fa850e7f21977e367e3467656055216978e8a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 19:55:47 -0700 Subject: [PATCH 1348/2113] allow_signed_tokens setting, closes #1856 --- datasette/app.py | 5 +++++ datasette/default_permissions.py | 2 ++ datasette/views/special.py | 2 ++ docs/authentication.rst | 2 ++ docs/cli-reference.rst | 2 ++ docs/plugins.rst | 1 + docs/settings.rst | 13 +++++++++++++ tests/test_auth.py | 26 +++++++++++++++++++++----- 8 files changed, 48 insertions(+), 5 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index cab9d142..c868f8d3 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -124,6 +124,11 @@ SETTINGS = ( True, "Allow users to download the original SQLite database files", ), + Setting( + "allow_signed_tokens", + True, + "Allow users to create and use signed API tokens", + ), Setting("suggest_facets", True, "Calculate and display suggested facets"), Setting( "default_cache_ttl", diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index d908af7a..49ca8851 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -52,6 +52,8 @@ def permission_allowed(datasette, actor, action, resource): @hookimpl def actor_from_request(datasette, request): prefix = "dstok_" + if not datasette.setting("allow_signed_tokens"): + return None authorization = request.headers.get("authorization") if not authorization: return None diff --git a/datasette/views/special.py b/datasette/views/special.py index 91130353..89015958 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -171,6 +171,8 @@ class CreateTokenView(BaseView): has_json_alternate = False def check_permission(self, request): + if not self.ds.setting("allow_signed_tokens"): + raise Forbidden("Signed tokens are not enabled for this Datasette instance") if not request.actor: raise Forbidden("You must be logged in to create a token") if not request.actor.get("id"): diff --git a/docs/authentication.rst b/docs/authentication.rst index cbecd296..50304ec5 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -350,6 +350,8 @@ Coming soon: a mechanism for creating tokens that can only perform a subset of t This page cannot be accessed by actors with a ``"token": "some-value"`` property. This is to prevent API tokens from being used to automatically create more tokens. Datasette plugins that implement their own form of API token authentication should follow this convention. +You can disable this feature using the :ref:`allow_signed_tokens ` setting. + .. _permissions_plugins: Checking permissions in plugins diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 4a8465cb..fd5e2404 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -226,6 +226,8 @@ These can be passed to ``datasette serve`` using ``datasette serve --setting nam ?_facet= parameter (default=True) allow_download Allow users to download the original SQLite database files (default=True) + allow_signed_tokens Allow users to create and use signed API tokens + (default=True) suggest_facets Calculate and display suggested facets (default=True) default_cache_ttl Default HTTP cache TTL (used in Cache-Control: diff --git a/docs/plugins.rst b/docs/plugins.rst index 29078054..9efef32f 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -151,6 +151,7 @@ If you run ``datasette plugins --all`` it will include default plugins that ship "templates": false, "version": null, "hooks": [ + "actor_from_request", "permission_allowed" ] }, diff --git a/docs/settings.rst b/docs/settings.rst index a6d50543..be640b21 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -169,6 +169,19 @@ Should users be able to download the original SQLite database using a link on th datasette mydatabase.db --setting allow_download off +.. _setting_allow_signed_tokens: + +allow_signed_tokens +~~~~~~~~~~~~~~~~~~~ + +Should users be able to create signed API tokens to access Datasette? + +This is turned on by default. Use the following to turn it off:: + + datasette mydatabase.db --setting allow_signed_tokens off + +Turning this setting off will disable the ``/-/create-token`` page, :ref:`described here `. It will also cause any incoming ``Authorization: Bearer dstok_...`` API tokens to be ignored. + .. _setting_default_cache_ttl: default_cache_ttl diff --git a/tests/test_auth.py b/tests/test_auth.py index 397d51d7..a79dafd8 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -189,9 +189,20 @@ def test_auth_create_token_not_allowed_for_tokens(app_client): assert response.status == 403 +def test_auth_create_token_not_allowed_if_allow_signed_tokens_off(app_client): + app_client.ds._settings["allow_signed_tokens"] = False + try: + ds_actor = app_client.actor_cookie({"id": "test"}) + response = app_client.get("/-/create-token", cookies={"ds_actor": ds_actor}) + assert response.status == 403 + finally: + app_client.ds._settings["allow_signed_tokens"] = True + + @pytest.mark.parametrize( "scenario,should_work", ( + ("allow_signed_tokens_off", False), ("no_token", False), ("invalid_token", False), ("expired_token", False), @@ -201,7 +212,7 @@ def test_auth_create_token_not_allowed_for_tokens(app_client): ) def test_auth_with_dstok_token(app_client, scenario, should_work): token = None - if scenario == "valid_unlimited_token": + if scenario in ("valid_unlimited_token", "allow_signed_tokens_off"): token = app_client.ds.sign({"a": "test"}, "token") elif scenario == "valid_expiring_token": token = app_client.ds.sign({"a": "test", "e": int(time.time()) + 1000}, "token") @@ -211,11 +222,16 @@ def test_auth_with_dstok_token(app_client, scenario, should_work): token = "invalid" if token: token = "dstok_{}".format(token) + if scenario == "allow_signed_tokens_off": + app_client.ds._settings["allow_signed_tokens"] = False headers = {} if token: headers["Authorization"] = "Bearer {}".format(token) response = app_client.get("/-/actor.json", headers=headers) - if should_work: - assert response.json == {"actor": {"id": "test", "token": "dstok"}} - else: - assert response.json == {"actor": None} + try: + if should_work: + assert response.json == {"actor": {"id": "test", "token": "dstok"}} + else: + assert response.json == {"actor": None} + finally: + app_client.ds._settings["allow_signed_tokens"] = True From c36a74ece1e475291af326d493d8db9ff3afdd30 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 21:04:39 -0700 Subject: [PATCH 1349/2113] Try shutting down executor in tests to free up thread local SQLite connections, refs #1843 --- tests/fixtures.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/fixtures.py b/tests/fixtures.py index 13a3dffa..d1afd2f3 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -166,6 +166,7 @@ def make_app_client( # Close the connection to avoid "too many open files" errors conn.close() os.remove(filepath) + ds.executor.shutdown() @pytest.fixture(scope="session") From c556fad65d8a45ce85027678796a12ac9107d9ed Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 21:25:47 -0700 Subject: [PATCH 1350/2113] Try to address too many files error again, refs #1843 --- tests/fixtures.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index d1afd2f3..92a10da6 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -131,10 +131,14 @@ def make_app_client( for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) + # Close the connection to avoid "too many open files" errors + conn.close() if extra_databases is not None: for extra_filename, extra_sql in extra_databases.items(): extra_filepath = os.path.join(tmpdir, extra_filename) - sqlite3.connect(extra_filepath).executescript(extra_sql) + c2 = sqlite3.connect(extra_filepath) + c2.executescript(extra_sql) + c2.close() # Insert at start to help test /-/databases ordering: files.insert(0, extra_filepath) os.chdir(os.path.dirname(filepath)) @@ -163,10 +167,7 @@ def make_app_client( crossdb=crossdb, ) yield TestClient(ds) - # Close the connection to avoid "too many open files" errors - conn.close() os.remove(filepath) - ds.executor.shutdown() @pytest.fixture(scope="session") From c7956eed7777c62653b4d508570c5d77cfead7d9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 25 Oct 2022 21:26:12 -0700 Subject: [PATCH 1351/2113] datasette create-token command, refs #1859 --- datasette/default_permissions.py | 38 ++++++++++++++++++++++++++++ docs/authentication.rst | 23 +++++++++++++++++ docs/cli-reference.rst | 43 ++++++++++++++++++++++++++------ docs/plugins.rst | 3 ++- tests/test_api.py | 1 + tests/test_auth.py | 28 +++++++++++++++++++++ tests/test_plugins.py | 2 ++ 7 files changed, 130 insertions(+), 8 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 49ca8851..12499c16 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -1,6 +1,8 @@ from datasette import hookimpl from datasette.utils import actor_matches_allow +import click import itsdangerous +import json import time @@ -72,3 +74,39 @@ def actor_from_request(datasette, request): if expires_at < time.time(): return None return {"id": decoded["a"], "token": "dstok"} + + +@hookimpl +def register_commands(cli): + from datasette.app import Datasette + + @cli.command() + @click.argument("id") + @click.option( + "--secret", + help="Secret used for signing the API tokens", + envvar="DATASETTE_SECRET", + required=True, + ) + @click.option( + "-e", + "--expires-after", + help="Token should expire after this many seconds", + type=int, + ) + @click.option( + "--debug", + help="Show decoded token", + is_flag=True, + ) + def create_token(id, secret, expires_after, debug): + "Create a signed API token for the specified actor ID" + ds = Datasette(secret=secret) + bits = {"a": id, "token": "dstok"} + if expires_after: + bits["e"] = int(time.time()) + expires_after + token = ds.sign(bits, namespace="token") + click.echo("dstok_{}".format(token)) + if debug: + click.echo("\nDecoded:\n") + click.echo(json.dumps(ds.unsign(token, namespace="token"), indent=2)) diff --git a/docs/authentication.rst b/docs/authentication.rst index 50304ec5..0835e17c 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -352,6 +352,29 @@ This page cannot be accessed by actors with a ``"token": "some-value"`` property You can disable this feature using the :ref:`allow_signed_tokens ` setting. +.. _authentication_cli_create_token: + +datasette create-token +---------------------- + +You can also create tokens on the command line using the ``datasette create-token`` command. + +This command takes one required argument - the ID of the actor to be associated with the created token. + +You can specify an ``--expires-after`` option in seconds. If omitted, the token will never expire. + +The command will sign the token using the ``DATASETTE_SECRET`` environment variable, if available. You can also pass the secret using the ``--secret`` option. + +This means you can run the command locally to create tokens for use with a deployed Datasette instance, provided you know that instance's secret. + +To create a token for the ``root`` actor that will expire in one hour:: + + datasette create-token root --expires-after 3600 + +To create a secret that never expires using a specific secret:: + + datasette create-token root --secret my-secret-goes-here + .. _permissions_plugins: Checking permissions in plugins diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index fd5e2404..b40c6b2c 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -47,13 +47,14 @@ Running ``datasette --help`` shows a list of all of the available commands. --help Show this message and exit. Commands: - serve* Serve up specified SQLite database files with a web UI - inspect Generate JSON summary of provided database files - install Install plugins and packages from PyPI into the same... - package Package SQLite files into a Datasette Docker container - plugins List currently installed plugins - publish Publish specified SQLite database files to the internet along... - uninstall Uninstall plugins and Python packages from the Datasette... + serve* Serve up specified SQLite database files with a web UI + create-token Create a signed API token for the specified actor ID + inspect Generate JSON summary of provided database files + install Install plugins and packages from PyPI into the same... + package Package SQLite files into a Datasette Docker container + plugins List currently installed plugins + publish Publish specified SQLite database files to the internet... + uninstall Uninstall plugins and Python packages from the Datasette... .. [[[end]]] @@ -591,3 +592,31 @@ This performance optimization is used automatically by some of the ``datasette p .. [[[end]]] + + +.. _cli_help_create_token___help: + +datasette create-token +====================== + +Create a signed API token, see :ref:`authentication_cli_create_token`. + +.. [[[cog + help(["create-token", "--help"]) +.. ]]] + +:: + + Usage: datasette create-token [OPTIONS] ID + + Create a signed API token for the specified actor ID + + Options: + --secret TEXT Secret used for signing the API tokens + [required] + -e, --expires-after INTEGER Token should expire after this many seconds + --debug Show decoded token + --help Show this message and exit. + + +.. [[[end]]] diff --git a/docs/plugins.rst b/docs/plugins.rst index 9efef32f..3ae42293 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -152,7 +152,8 @@ If you run ``datasette plugins --all`` it will include default plugins that ship "version": null, "hooks": [ "actor_from_request", - "permission_allowed" + "permission_allowed", + "register_commands" ] }, { diff --git a/tests/test_api.py b/tests/test_api.py index ad74d16e..f7cbe950 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -806,6 +806,7 @@ def test_settings_json(app_client): "max_returned_rows": 100, "sql_time_limit_ms": 200, "allow_download": True, + "allow_signed_tokens": True, "allow_facet": True, "suggest_facets": True, "default_cache_ttl": 5, diff --git a/tests/test_auth.py b/tests/test_auth.py index a79dafd8..f2d82107 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,5 +1,7 @@ from .fixtures import app_client +from click.testing import CliRunner from datasette.utils import baseconv +from datasette.cli import cli import pytest import time @@ -235,3 +237,29 @@ def test_auth_with_dstok_token(app_client, scenario, should_work): assert response.json == {"actor": None} finally: app_client.ds._settings["allow_signed_tokens"] = True + + +@pytest.mark.parametrize("expires", (None, 1000, -1000)) +def test_cli_create_token(app_client, expires): + secret = app_client.ds._secret + runner = CliRunner(mix_stderr=False) + args = ["create-token", "--secret", secret, "test"] + if expires: + args += ["--expires-after", str(expires)] + result = runner.invoke(cli, args) + assert result.exit_code == 0 + token = result.output.strip() + assert token.startswith("dstok_") + details = app_client.ds.unsign(token[len("dstok_") :], "token") + expected_keys = {"a", "token"} + if expires: + expected_keys.add("e") + assert details.keys() == expected_keys + assert details["a"] == "test" + response = app_client.get( + "/-/actor.json", headers={"Authorization": "Bearer {}".format(token)} + ) + if expires is None or expires > 0: + assert response.json == {"actor": {"id": "test", "token": "dstok"}} + else: + assert response.json == {"actor": None} diff --git a/tests/test_plugins.py b/tests/test_plugins.py index e0a7bc76..de3fde8e 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -971,6 +971,7 @@ def test_hook_register_commands(): "plugins", "publish", "uninstall", + "create-token", } # Now install a plugin @@ -1001,6 +1002,7 @@ def test_hook_register_commands(): "uninstall", "verify", "unverify", + "create-token", } pm.unregister(name="verify") importlib.reload(cli) From df7bf0b2fc262f0b025b3cdd283ff8ce60653175 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 14:13:31 -0700 Subject: [PATCH 1352/2113] Fix bug with breadcrumbs and request=None, closes #1849 --- datasette/app.py | 9 ++++++--- tests/test_internals_datasette.py | 9 +++++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 9df16558..246269f3 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -633,15 +633,18 @@ class Datasette: async def _crumb_items(self, request, table=None, database=None): crumbs = [] + actor = None + if request: + actor = request.actor # Top-level link if await self.permission_allowed( - actor=request.actor, action="view-instance", default=True + actor=actor, action="view-instance", default=True ): crumbs.append({"href": self.urls.instance(), "label": "home"}) # Database link if database: if await self.permission_allowed( - actor=request.actor, + actor=actor, action="view-database", resource=database, default=True, @@ -656,7 +659,7 @@ class Datasette: if table: assert database, "table= requires database=" if await self.permission_allowed( - actor=request.actor, + actor=actor, action="view-table", resource=(database, table), default=True, diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index c82cafb3..1b4732af 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -125,3 +125,12 @@ async def test_datasette_ensure_permissions_check_visibility( visible, private = await ds.check_visibility(actor, permissions=permissions) assert visible == should_allow assert private == expected_private + + +@pytest.mark.asyncio +async def test_datasette_render_template_no_request(): + # https://github.com/simonw/datasette/issues/1849 + ds = Datasette([], memory=True) + await ds.invoke_startup() + rendered = await ds.render_template("error.html") + assert "Error " in rendered From 55a709c480a1e7401b4ff6208f37a2cf7c682183 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 14:34:33 -0700 Subject: [PATCH 1353/2113] Allow leading comments on SQL queries, refs #1860 --- datasette/utils/__init__.py | 27 +++++++++++++++++++++------ tests/test_utils.py | 7 +++++++ 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 803ba96d..977a66d6 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -205,13 +205,28 @@ class InvalidSql(Exception): pass +# Allow SQL to start with a /* */ or -- comment +comment_re = ( + # Start of string, then any amount of whitespace + r"^(\s*" + + + # Comment that starts with -- and ends at a newline + r"(?:\-\-.*?\n\s*)" + + + # Comment that starts with /* and ends with */ + r"|(?:/\*[\s\S]*?\*/)" + + + # Whitespace + r")*\s*" +) + allowed_sql_res = [ - re.compile(r"^select\b"), - re.compile(r"^explain\s+select\b"), - re.compile(r"^explain\s+query\s+plan\s+select\b"), - re.compile(r"^with\b"), - re.compile(r"^explain\s+with\b"), - re.compile(r"^explain\s+query\s+plan\s+with\b"), + re.compile(comment_re + r"select\b"), + re.compile(comment_re + r"explain\s+select\b"), + re.compile(comment_re + r"explain\s+query\s+plan\s+select\b"), + re.compile(comment_re + r"with\b"), + re.compile(comment_re + r"explain\s+with\b"), + re.compile(comment_re + r"explain\s+query\s+plan\s+with\b"), ] allowed_pragmas = ( "database_list", diff --git a/tests/test_utils.py b/tests/test_utils.py index d71a612d..e89f1e6b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -141,6 +141,7 @@ def test_custom_json_encoder(obj, expected): "update blah set some_column='# Hello there\n\n* This is a list\n* of items\n--\n[And a link](https://github.com/simonw/datasette-render-markdown).'\nas demo_markdown", "PRAGMA case_sensitive_like = true", "SELECT * FROM pragma_not_on_allow_list('idx52')", + "/* This comment is not valid. select 1", ], ) def test_validate_sql_select_bad(bad_sql): @@ -166,6 +167,12 @@ def test_validate_sql_select_bad(bad_sql): "explain query plan WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;", "SELECT * FROM pragma_index_info('idx52')", "select * from pragma_table_xinfo('table')", + # Various types of comment + "-- comment\nselect 1", + "-- one line\n -- two line\nselect 1", + " /* comment */\nselect 1", + " /* comment */select 1", + "/* comment */\n -- another\n /* one more */ select 1", ], ) def test_validate_sql_select_good(good_sql): From 55f860c304aea813cb7ed740cc5625560a0722a0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 14:13:31 -0700 Subject: [PATCH 1354/2113] Fix bug with breadcrumbs and request=None, closes #1849 --- datasette/app.py | 9 ++++++--- tests/test_internals_datasette.py | 9 +++++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index c868f8d3..596ff44d 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -639,15 +639,18 @@ class Datasette: async def _crumb_items(self, request, table=None, database=None): crumbs = [] + actor = None + if request: + actor = request.actor # Top-level link if await self.permission_allowed( - actor=request.actor, action="view-instance", default=True + actor=actor, action="view-instance", default=True ): crumbs.append({"href": self.urls.instance(), "label": "home"}) # Database link if database: if await self.permission_allowed( - actor=request.actor, + actor=actor, action="view-database", resource=database, default=True, @@ -662,7 +665,7 @@ class Datasette: if table: assert database, "table= requires database=" if await self.permission_allowed( - actor=request.actor, + actor=actor, action="view-table", resource=(database, table), default=True, diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index c82cafb3..1b4732af 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -125,3 +125,12 @@ async def test_datasette_ensure_permissions_check_visibility( visible, private = await ds.check_visibility(actor, permissions=permissions) assert visible == should_allow assert private == expected_private + + +@pytest.mark.asyncio +async def test_datasette_render_template_no_request(): + # https://github.com/simonw/datasette/issues/1849 + ds = Datasette([], memory=True) + await ds.invoke_startup() + rendered = await ds.render_template("error.html") + assert "Error " in rendered From af5d5d0243631562ad83f2c318bff31a077feb5d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 14:34:33 -0700 Subject: [PATCH 1355/2113] Allow leading comments on SQL queries, refs #1860 --- datasette/utils/__init__.py | 27 +++++++++++++++++++++------ tests/test_utils.py | 7 +++++++ 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 803ba96d..977a66d6 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -205,13 +205,28 @@ class InvalidSql(Exception): pass +# Allow SQL to start with a /* */ or -- comment +comment_re = ( + # Start of string, then any amount of whitespace + r"^(\s*" + + + # Comment that starts with -- and ends at a newline + r"(?:\-\-.*?\n\s*)" + + + # Comment that starts with /* and ends with */ + r"|(?:/\*[\s\S]*?\*/)" + + + # Whitespace + r")*\s*" +) + allowed_sql_res = [ - re.compile(r"^select\b"), - re.compile(r"^explain\s+select\b"), - re.compile(r"^explain\s+query\s+plan\s+select\b"), - re.compile(r"^with\b"), - re.compile(r"^explain\s+with\b"), - re.compile(r"^explain\s+query\s+plan\s+with\b"), + re.compile(comment_re + r"select\b"), + re.compile(comment_re + r"explain\s+select\b"), + re.compile(comment_re + r"explain\s+query\s+plan\s+select\b"), + re.compile(comment_re + r"with\b"), + re.compile(comment_re + r"explain\s+with\b"), + re.compile(comment_re + r"explain\s+query\s+plan\s+with\b"), ] allowed_pragmas = ( "database_list", diff --git a/tests/test_utils.py b/tests/test_utils.py index d71a612d..e89f1e6b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -141,6 +141,7 @@ def test_custom_json_encoder(obj, expected): "update blah set some_column='# Hello there\n\n* This is a list\n* of items\n--\n[And a link](https://github.com/simonw/datasette-render-markdown).'\nas demo_markdown", "PRAGMA case_sensitive_like = true", "SELECT * FROM pragma_not_on_allow_list('idx52')", + "/* This comment is not valid. select 1", ], ) def test_validate_sql_select_bad(bad_sql): @@ -166,6 +167,12 @@ def test_validate_sql_select_bad(bad_sql): "explain query plan WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;", "SELECT * FROM pragma_index_info('idx52')", "select * from pragma_table_xinfo('table')", + # Various types of comment + "-- comment\nselect 1", + "-- one line\n -- two line\nselect 1", + " /* comment */\nselect 1", + " /* comment */select 1", + "/* comment */\n -- another\n /* one more */ select 1", ], ) def test_validate_sql_select_good(good_sql): From 382a87158337540f991c6dc887080f7b37c7c26e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 14:13:31 -0700 Subject: [PATCH 1356/2113] max_signed_tokens_ttl setting, closes #1858 Also redesigned token format to include creation time and optional duration. --- datasette/app.py | 5 ++++ datasette/default_permissions.py | 33 +++++++++++++++++---- datasette/views/special.py | 20 ++++++++----- docs/settings.rst | 15 ++++++++++ tests/test_api.py | 1 + tests/test_auth.py | 50 ++++++++++++++++++++++++-------- 6 files changed, 99 insertions(+), 25 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 596ff44d..894d7f0f 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -129,6 +129,11 @@ SETTINGS = ( True, "Allow users to create and use signed API tokens", ), + Setting( + "max_signed_tokens_ttl", + 0, + "Maximum allowed expiry time for signed API tokens", + ), Setting("suggest_facets", True, "Calculate and display suggested facets"), Setting( "default_cache_ttl", diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 12499c16..c502dd70 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -56,6 +56,7 @@ def actor_from_request(datasette, request): prefix = "dstok_" if not datasette.setting("allow_signed_tokens"): return None + max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl") authorization = request.headers.get("authorization") if not authorization: return None @@ -69,11 +70,31 @@ def actor_from_request(datasette, request): decoded = datasette.unsign(token, namespace="token") except itsdangerous.BadSignature: return None - expires_at = decoded.get("e") - if expires_at is not None: - if expires_at < time.time(): + if "t" not in decoded: + # Missing timestamp + return None + created = decoded["t"] + if not isinstance(created, int): + # Invalid timestamp + return None + duration = decoded.get("d") + if duration is not None and not isinstance(duration, int): + # Invalid duration + return None + if (duration is None and max_signed_tokens_ttl) or ( + duration is not None + and max_signed_tokens_ttl + and duration > max_signed_tokens_ttl + ): + duration = max_signed_tokens_ttl + if duration: + if time.time() - created > duration: + # Expired return None - return {"id": decoded["a"], "token": "dstok"} + actor = {"id": decoded["a"], "token": "dstok"} + if duration: + actor["token_expires"] = created + duration + return actor @hookimpl @@ -102,9 +123,9 @@ def register_commands(cli): def create_token(id, secret, expires_after, debug): "Create a signed API token for the specified actor ID" ds = Datasette(secret=secret) - bits = {"a": id, "token": "dstok"} + bits = {"a": id, "token": "dstok", "t": int(time.time())} if expires_after: - bits["e"] = int(time.time()) + expires_after + bits["d"] = expires_after token = ds.sign(bits, namespace="token") click.echo("dstok_{}".format(token)) if debug: diff --git a/datasette/views/special.py b/datasette/views/special.py index 89015958..b754a2f0 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -195,20 +195,24 @@ class CreateTokenView(BaseView): async def post(self, request): self.check_permission(request) post = await request.post_vars() - expires = None errors = [] + duration = None if post.get("expire_type"): - duration = post.get("expire_duration") - if not duration or not duration.isdigit() or not int(duration) > 0: + duration_string = post.get("expire_duration") + if ( + not duration_string + or not duration_string.isdigit() + or not int(duration_string) > 0 + ): errors.append("Invalid expire duration") else: unit = post["expire_type"] if unit == "minutes": - expires = int(duration) * 60 + duration = int(duration_string) * 60 elif unit == "hours": - expires = int(duration) * 60 * 60 + duration = int(duration_string) * 60 * 60 elif unit == "days": - expires = int(duration) * 60 * 60 * 24 + duration = int(duration_string) * 60 * 60 * 24 else: errors.append("Invalid expire duration unit") token_bits = None @@ -216,8 +220,10 @@ class CreateTokenView(BaseView): if not errors: token_bits = { "a": request.actor["id"], - "e": (int(time.time()) + expires) if expires else None, + "t": int(time.time()), } + if duration: + token_bits["d"] = duration token = "dstok_{}".format(self.ds.sign(token_bits, "token")) return await self.render( ["create_token.html"], diff --git a/docs/settings.rst b/docs/settings.rst index be640b21..a990c78c 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -182,6 +182,21 @@ This is turned on by default. Use the following to turn it off:: Turning this setting off will disable the ``/-/create-token`` page, :ref:`described here `. It will also cause any incoming ``Authorization: Bearer dstok_...`` API tokens to be ignored. +.. _setting_max_signed_tokens_ttl: + +max_signed_tokens_ttl +~~~~~~~~~~~~~~~~~~~~~ + +Maximum allowed expiry time for signed API tokens created by users. + +Defaults to ``0`` which means no limit - tokens can be created that will never expire. + +Set this to a value in seconds to limit the maximum expiry time. For example, to set that limit to 24 hours you would use:: + + datasette mydatabase.db --setting max_signed_tokens_ttl 86400 + +This setting is enforced when incoming tokens are processed. + .. _setting_default_cache_ttl: default_cache_ttl diff --git a/tests/test_api.py b/tests/test_api.py index f7cbe950..fc171421 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -807,6 +807,7 @@ def test_settings_json(app_client): "sql_time_limit_ms": 200, "allow_download": True, "allow_signed_tokens": True, + "max_signed_tokens_ttl": 0, "allow_facet": True, "suggest_facets": True, "default_cache_ttl": 5, diff --git a/tests/test_auth.py b/tests/test_auth.py index f2d82107..fa1b2e46 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -173,13 +173,19 @@ def test_auth_create_token(app_client, post_data, errors, expected_duration): # Extract token from page token = response2.text.split('value="dstok_')[1].split('"')[0] details = app_client.ds.unsign(token, "token") - assert details.keys() == {"a", "e"} + assert details.keys() == {"a", "t", "d"} or details.keys() == {"a", "t"} assert details["a"] == "test" if expected_duration is None: - assert details["e"] is None + assert "d" not in details else: - about_right = int(time.time()) + expected_duration - assert about_right - 2 < details["e"] < about_right + 2 + assert details["d"] == expected_duration + # And test that token + response3 = app_client.get( + "/-/actor.json", + headers={"Authorization": "Bearer {}".format("dstok_{}".format(token))}, + ) + assert response3.status == 200 + assert response3.json["actor"]["id"] == "test" def test_auth_create_token_not_allowed_for_tokens(app_client): @@ -206,6 +212,7 @@ def test_auth_create_token_not_allowed_if_allow_signed_tokens_off(app_client): ( ("allow_signed_tokens_off", False), ("no_token", False), + ("no_timestamp", False), ("invalid_token", False), ("expired_token", False), ("valid_unlimited_token", True), @@ -214,12 +221,15 @@ def test_auth_create_token_not_allowed_if_allow_signed_tokens_off(app_client): ) def test_auth_with_dstok_token(app_client, scenario, should_work): token = None + _time = int(time.time()) if scenario in ("valid_unlimited_token", "allow_signed_tokens_off"): - token = app_client.ds.sign({"a": "test"}, "token") + token = app_client.ds.sign({"a": "test", "t": _time}, "token") elif scenario == "valid_expiring_token": - token = app_client.ds.sign({"a": "test", "e": int(time.time()) + 1000}, "token") + token = app_client.ds.sign({"a": "test", "t": _time - 50, "d": 1000}, "token") elif scenario == "expired_token": - token = app_client.ds.sign({"a": "test", "e": int(time.time()) - 1000}, "token") + token = app_client.ds.sign({"a": "test", "t": _time - 2000, "d": 1000}, "token") + elif scenario == "no_timestamp": + token = app_client.ds.sign({"a": "test"}, "token") elif scenario == "invalid_token": token = "invalid" if token: @@ -232,7 +242,16 @@ def test_auth_with_dstok_token(app_client, scenario, should_work): response = app_client.get("/-/actor.json", headers=headers) try: if should_work: - assert response.json == {"actor": {"id": "test", "token": "dstok"}} + assert response.json.keys() == {"actor"} + actor = response.json["actor"] + expected_keys = {"id", "token"} + if scenario != "valid_unlimited_token": + expected_keys.add("token_expires") + assert actor.keys() == expected_keys + assert actor["id"] == "test" + assert actor["token"] == "dstok" + if scenario != "valid_unlimited_token": + assert isinstance(actor["token_expires"], int) else: assert response.json == {"actor": None} finally: @@ -251,15 +270,22 @@ def test_cli_create_token(app_client, expires): token = result.output.strip() assert token.startswith("dstok_") details = app_client.ds.unsign(token[len("dstok_") :], "token") - expected_keys = {"a", "token"} + expected_keys = {"a", "token", "t"} if expires: - expected_keys.add("e") + expected_keys.add("d") assert details.keys() == expected_keys assert details["a"] == "test" response = app_client.get( "/-/actor.json", headers={"Authorization": "Bearer {}".format(token)} ) if expires is None or expires > 0: - assert response.json == {"actor": {"id": "test", "token": "dstok"}} + expected_actor = { + "id": "test", + "token": "dstok", + } + if expires and expires > 0: + expected_actor["token_expires"] = details["t"] + expires + assert response.json == {"actor": expected_actor} else: - assert response.json == {"actor": None} + expected_actor = None + assert response.json == {"actor": expected_actor} From 51c436fed29205721dcf17fa31d7e7090d34ebb8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 26 Oct 2022 20:57:02 -0700 Subject: [PATCH 1357/2113] First draft of insert row write API, refs #1851 --- datasette/default_permissions.py | 2 +- datasette/views/table.py | 76 +++++++++++++++++++++++++++----- docs/authentication.rst | 12 +++++ docs/cli-reference.rst | 2 + docs/json_api.rst | 38 ++++++++++++++++ 5 files changed, 119 insertions(+), 11 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index c502dd70..87684e2a 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -9,7 +9,7 @@ import time @hookimpl(tryfirst=True) def permission_allowed(datasette, actor, action, resource): async def inner(): - if action in ("permissions-debug", "debug-menu"): + if action in ("permissions-debug", "debug-menu", "insert-row"): if actor and actor.get("id") == "root": return True elif action == "view-instance": diff --git a/datasette/views/table.py b/datasette/views/table.py index f73b0957..74d1c532 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -28,7 +28,7 @@ from datasette.utils import ( urlsafe_components, value_as_boolean, ) -from datasette.utils.asgi import BadRequest, Forbidden, NotFound +from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response from datasette.filters import Filters from .base import DataView, DatasetteError, ureg from .database import QueryView @@ -103,15 +103,71 @@ class TableView(DataView): canned_query = await self.ds.get_canned_query( database_name, table_name, request.actor ) - assert canned_query, "You may only POST to a canned query" - return await QueryView(self.ds).data( - request, - canned_query["sql"], - metadata=canned_query, - editable=False, - canned_query=table_name, - named_parameters=canned_query.get("params"), - write=bool(canned_query.get("write")), + if canned_query: + return await QueryView(self.ds).data( + request, + canned_query["sql"], + metadata=canned_query, + editable=False, + canned_query=table_name, + named_parameters=canned_query.get("params"), + write=bool(canned_query.get("write")), + ) + else: + # Handle POST to a table + return await self.table_post(request, database_name, table_name) + + async def table_post(self, request, database_name, table_name): + # Table must exist (may handle table creation in the future) + db = self.ds.get_database(database_name) + if not await db.table_exists(table_name): + raise NotFound("Table not found: {}".format(table_name)) + # Must have insert-row permission + if not await self.ds.permission_allowed( + request.actor, "insert-row", resource=(database_name, table_name) + ): + raise Forbidden("Permission denied") + if request.headers.get("content-type") != "application/json": + # TODO: handle form-encoded data + raise BadRequest("Must send JSON data") + data = json.loads(await request.post_body()) + if "row" not in data: + raise BadRequest('Must send "row" data') + row = data["row"] + if not isinstance(row, dict): + raise BadRequest("row must be a dictionary") + # Verify all columns exist + columns = await db.table_columns(table_name) + pks = await db.primary_keys(table_name) + for key in row: + if key not in columns: + raise BadRequest("Column not found: {}".format(key)) + if key in pks: + raise BadRequest( + "Cannot insert into primary key column: {}".format(key) + ) + # Perform the insert + sql = "INSERT INTO [{table}] ({columns}) VALUES ({values})".format( + table=escape_sqlite(table_name), + columns=", ".join(escape_sqlite(c) for c in row), + values=", ".join("?" for c in row), + ) + cursor = await db.execute_write(sql, list(row.values())) + # Return the new row + rowid = cursor.lastrowid + new_row = ( + await db.execute( + "SELECT * FROM [{table}] WHERE rowid = ?".format( + table=escape_sqlite(table_name) + ), + [rowid], + ) + ).first() + return Response.json( + { + "row": dict(new_row), + }, + status=201, ) async def columns_to_select(self, table_columns, pks, request): diff --git a/docs/authentication.rst b/docs/authentication.rst index 0835e17c..233a50d2 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -547,6 +547,18 @@ Actor is allowed to view (and execute) a :ref:`canned query ` pa Default *allow*. +.. _permissions_insert_row: + +insert-row +---------- + +Actor is allowed to insert rows into a table. + +``resource`` - tuple: (string, string) + The name of the database, then the name of the table + +Default *deny*. + .. _permissions_execute_sql: execute-sql diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index b40c6b2c..56156568 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -229,6 +229,8 @@ These can be passed to ``datasette serve`` using ``datasette serve --setting nam database files (default=True) allow_signed_tokens Allow users to create and use signed API tokens (default=True) + max_signed_tokens_ttl Maximum allowed expiry time for signed API tokens + (default=0) suggest_facets Calculate and display suggested facets (default=True) default_cache_ttl Default HTTP cache TTL (used in Cache-Control: diff --git a/docs/json_api.rst b/docs/json_api.rst index d3fdb1e4..b339a738 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -455,3 +455,41 @@ You can find this near the top of the source code of those pages, looking like t The JSON URL is also made available in a ``Link`` HTTP header for the page:: Link: https://latest.datasette.io/fixtures/sortable.json; rel="alternate"; type="application/json+datasette" + +.. _json_api_write: + +The JSON write API +------------------ + +Datasette provides a write API for JSON data. This is a POST-only API that requires an authenticated API token, see :ref:`CreateTokenView`. + +.. _json_api_write_insert_row: + +Inserting a single row +~~~~~~~~~~~~~~~~~~~~~~ + +This requires the :ref:`permissions_insert_row` permission. + +:: + + POST // + Content-Type: application/json + Authorization: Bearer dstok_ + { + "row": { + "column1": "value1", + "column2": "value2" + } + } + +If successful, this will return a ``201`` status code and the newly inserted row, for example: + +.. code-block:: json + + { + "row": { + "id": 1, + "column1": "value1", + "column2": "value2" + } + } From f6ca86987ba9d7d48eccf2cfe0bfc94942003844 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 06:56:11 -0700 Subject: [PATCH 1358/2113] Delete mirror-master-and-main.yml Closes #1865 --- .github/workflows/mirror-master-and-main.yml | 21 -------------------- 1 file changed, 21 deletions(-) delete mode 100644 .github/workflows/mirror-master-and-main.yml diff --git a/.github/workflows/mirror-master-and-main.yml b/.github/workflows/mirror-master-and-main.yml deleted file mode 100644 index 8418df40..00000000 --- a/.github/workflows/mirror-master-and-main.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Mirror "master" and "main" branches -on: - push: - branches: - - master - - main - -jobs: - mirror: - runs-on: ubuntu-latest - steps: - - name: Mirror to "master" - uses: zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 - with: - target-branch: master - force: false - - name: Mirror to "main" - uses: zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 - with: - target-branch: main - force: false From 5f6be3c48b661f74198b8fc85361d3ad6657880e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 11:47:41 -0700 Subject: [PATCH 1359/2113] Better comment handling in SQL regex, refs #1860 --- datasette/utils/__init__.py | 9 +++++---- tests/test_utils.py | 1 + 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 977a66d6..5acfb8b4 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -208,16 +208,16 @@ class InvalidSql(Exception): # Allow SQL to start with a /* */ or -- comment comment_re = ( # Start of string, then any amount of whitespace - r"^(\s*" + r"^\s*(" + # Comment that starts with -- and ends at a newline r"(?:\-\-.*?\n\s*)" + - # Comment that starts with /* and ends with */ - r"|(?:/\*[\s\S]*?\*/)" + # Comment that starts with /* and ends with */ - but does not have */ in it + r"|(?:\/\*((?!\*\/)[\s\S])*\*\/)" + # Whitespace - r")*\s*" + r"\s*)*\s*" ) allowed_sql_res = [ @@ -228,6 +228,7 @@ allowed_sql_res = [ re.compile(comment_re + r"explain\s+with\b"), re.compile(comment_re + r"explain\s+query\s+plan\s+with\b"), ] + allowed_pragmas = ( "database_list", "foreign_key_list", diff --git a/tests/test_utils.py b/tests/test_utils.py index e89f1e6b..c1589107 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -142,6 +142,7 @@ def test_custom_json_encoder(obj, expected): "PRAGMA case_sensitive_like = true", "SELECT * FROM pragma_not_on_allow_list('idx52')", "/* This comment is not valid. select 1", + "/**/\nupdate foo set bar = 1\n/* test */ select 1", ], ) def test_validate_sql_select_bad(bad_sql): From d2ca13b699d441a201c55cb72ff96919d3cd22bf Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 11:50:54 -0700 Subject: [PATCH 1360/2113] Add test for /* multi line */ comment, refs #1860 --- tests/test_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_utils.py b/tests/test_utils.py index c1589107..8b64f865 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -174,6 +174,7 @@ def test_validate_sql_select_bad(bad_sql): " /* comment */\nselect 1", " /* comment */select 1", "/* comment */\n -- another\n /* one more */ select 1", + "/* This comment \n has multiple lines */\nselect 1", ], ) def test_validate_sql_select_good(good_sql): From 918f3561208ee58c44773d30e21bace7d7c7cf3b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 06:56:11 -0700 Subject: [PATCH 1361/2113] Delete mirror-master-and-main.yml Closes #1865 --- .github/workflows/mirror-master-and-main.yml | 21 -------------------- 1 file changed, 21 deletions(-) delete mode 100644 .github/workflows/mirror-master-and-main.yml diff --git a/.github/workflows/mirror-master-and-main.yml b/.github/workflows/mirror-master-and-main.yml deleted file mode 100644 index 8418df40..00000000 --- a/.github/workflows/mirror-master-and-main.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Mirror "master" and "main" branches -on: - push: - branches: - - master - - main - -jobs: - mirror: - runs-on: ubuntu-latest - steps: - - name: Mirror to "master" - uses: zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 - with: - target-branch: master - force: false - - name: Mirror to "main" - uses: zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 - with: - target-branch: main - force: false From b597bb6b3e7c4b449654bbfa5b01ceff3eb3cb33 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 11:47:41 -0700 Subject: [PATCH 1362/2113] Better comment handling in SQL regex, refs #1860 --- datasette/utils/__init__.py | 9 +++++---- tests/test_utils.py | 1 + 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 977a66d6..5acfb8b4 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -208,16 +208,16 @@ class InvalidSql(Exception): # Allow SQL to start with a /* */ or -- comment comment_re = ( # Start of string, then any amount of whitespace - r"^(\s*" + r"^\s*(" + # Comment that starts with -- and ends at a newline r"(?:\-\-.*?\n\s*)" + - # Comment that starts with /* and ends with */ - r"|(?:/\*[\s\S]*?\*/)" + # Comment that starts with /* and ends with */ - but does not have */ in it + r"|(?:\/\*((?!\*\/)[\s\S])*\*\/)" + # Whitespace - r")*\s*" + r"\s*)*\s*" ) allowed_sql_res = [ @@ -228,6 +228,7 @@ allowed_sql_res = [ re.compile(comment_re + r"explain\s+with\b"), re.compile(comment_re + r"explain\s+query\s+plan\s+with\b"), ] + allowed_pragmas = ( "database_list", "foreign_key_list", diff --git a/tests/test_utils.py b/tests/test_utils.py index e89f1e6b..c1589107 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -142,6 +142,7 @@ def test_custom_json_encoder(obj, expected): "PRAGMA case_sensitive_like = true", "SELECT * FROM pragma_not_on_allow_list('idx52')", "/* This comment is not valid. select 1", + "/**/\nupdate foo set bar = 1\n/* test */ select 1", ], ) def test_validate_sql_select_bad(bad_sql): From 6958e21b5c2012adf5655d2512cb4106490d10f2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 11:50:54 -0700 Subject: [PATCH 1363/2113] Add test for /* multi line */ comment, refs #1860 --- tests/test_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_utils.py b/tests/test_utils.py index c1589107..8b64f865 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -174,6 +174,7 @@ def test_validate_sql_select_bad(bad_sql): " /* comment */\nselect 1", " /* comment */select 1", "/* comment */\n -- another\n /* one more */ select 1", + "/* This comment \n has multiple lines */\nselect 1", ], ) def test_validate_sql_select_good(good_sql): From a51608090b5ee37593078f71d18b33767ef3af79 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 12:06:18 -0700 Subject: [PATCH 1364/2113] Slight tweak to insert row API design, refs #1851 https://github.com/simonw/datasette/issues/1851#issuecomment-1292997608 --- datasette/views/table.py | 10 +++++----- docs/json_api.rst | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 74d1c532..056b7b04 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -131,11 +131,11 @@ class TableView(DataView): # TODO: handle form-encoded data raise BadRequest("Must send JSON data") data = json.loads(await request.post_body()) - if "row" not in data: - raise BadRequest('Must send "row" data') - row = data["row"] + if "insert" not in data: + raise BadRequest('Must send a "insert" key containing a dictionary') + row = data["insert"] if not isinstance(row, dict): - raise BadRequest("row must be a dictionary") + raise BadRequest("insert must be a dictionary") # Verify all columns exist columns = await db.table_columns(table_name) pks = await db.primary_keys(table_name) @@ -165,7 +165,7 @@ class TableView(DataView): ).first() return Response.json( { - "row": dict(new_row), + "inserted_row": dict(new_row), }, status=201, ) diff --git a/docs/json_api.rst b/docs/json_api.rst index b339a738..2ed8a354 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -476,7 +476,7 @@ This requires the :ref:`permissions_insert_row` permission. Content-Type: application/json Authorization: Bearer dstok_ { - "row": { + "insert": { "column1": "value1", "column2": "value2" } @@ -487,7 +487,7 @@ If successful, this will return a ``201`` status code and the newly inserted row .. code-block:: json { - "row": { + "inserted_row": { "id": 1, "column1": "value1", "column2": "value2" From a2a5dff709c6f1676ac30b5e734c2763002562cf Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 12:08:26 -0700 Subject: [PATCH 1365/2113] Missing tests for insert row API, refs #1851 --- tests/test_api_write.py | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 tests/test_api_write.py diff --git a/tests/test_api_write.py b/tests/test_api_write.py new file mode 100644 index 00000000..86c221d0 --- /dev/null +++ b/tests/test_api_write.py @@ -0,0 +1,38 @@ +from datasette.app import Datasette +from datasette.utils import sqlite3 +import pytest +import time + + +@pytest.fixture +def ds_write(tmp_path_factory): + db_directory = tmp_path_factory.mktemp("dbs") + db_path = str(db_directory / "data.db") + db = sqlite3.connect(str(db_path)) + db.execute("vacuum") + db.execute("create table docs (id integer primary key, title text, score float)") + ds = Datasette([db_path]) + yield ds + db.close() + + +@pytest.mark.asyncio +async def test_write_row(ds_write): + token = "dstok_{}".format( + ds_write.sign( + {"a": "root", "token": "dstok", "t": int(time.time())}, namespace="token" + ) + ) + response = await ds_write.client.post( + "/data/docs", + json={"insert": {"title": "Test", "score": 1.0}}, + headers={ + "Authorization": "Bearer {}".format(token), + "Content-Type": "application/json", + }, + ) + expected_row = {"id": 1, "title": "Test", "score": 1.0} + assert response.status_code == 201 + assert response.json()["inserted_row"] == expected_row + rows = (await ds_write.get_database("data").execute("select * from docs")).rows + assert dict(rows[0]) == expected_row From 6e788b49edf4f842c0817f006eb9d865778eea5e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 13:17:18 -0700 Subject: [PATCH 1366/2113] New URL design /db/table/-/insert, refs #1851 --- datasette/app.py | 6 +++- datasette/views/table.py | 69 +++++++++++++++++++++++++++++++++++++++- docs/json_api.rst | 18 ++++++----- tests/test_api_write.py | 6 ++-- 4 files changed, 86 insertions(+), 13 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 894d7f0f..8bc5fe36 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -39,7 +39,7 @@ from .views.special import ( PermissionsDebugView, MessagesDebugView, ) -from .views.table import TableView +from .views.table import TableView, TableInsertView from .views.row import RowView from .renderer import json_renderer from .url_builder import Urls @@ -1262,6 +1262,10 @@ class Datasette: RowView.as_view(self), r"/(?P[^\/\.]+)/(?P
[^/]+?)/(?P[^/]+?)(\.(?P\w+))?$", ) + add_route( + TableInsertView.as_view(self), + r"/(?P[^\/\.]+)/(?P
[^\/\.]+)/-/insert$", + ) return [ # Compile any strings to regular expressions ((re.compile(pattern) if isinstance(pattern, str) else pattern), view) diff --git a/datasette/views/table.py b/datasette/views/table.py index 056b7b04..be3d4f93 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -30,7 +30,7 @@ from datasette.utils import ( ) from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response from datasette.filters import Filters -from .base import DataView, DatasetteError, ureg +from .base import BaseView, DataView, DatasetteError, ureg from .database import QueryView LINK_WITH_LABEL = ( @@ -1077,3 +1077,70 @@ async def display_columns_and_rows( } columns = [first_column] + columns return columns, cell_rows + + +class TableInsertView(BaseView): + name = "table-insert" + + def __init__(self, datasette): + self.ds = datasette + + async def post(self, request): + database_route = tilde_decode(request.url_vars["database"]) + try: + db = self.ds.get_database(route=database_route) + except KeyError: + raise NotFound("Database not found: {}".format(database_route)) + database_name = db.name + table_name = tilde_decode(request.url_vars["table"]) + # Table must exist (may handle table creation in the future) + db = self.ds.get_database(database_name) + if not await db.table_exists(table_name): + raise NotFound("Table not found: {}".format(table_name)) + # Must have insert-row permission + if not await self.ds.permission_allowed( + request.actor, "insert-row", resource=(database_name, table_name) + ): + raise Forbidden("Permission denied") + if request.headers.get("content-type") != "application/json": + # TODO: handle form-encoded data + raise BadRequest("Must send JSON data") + data = json.loads(await request.post_body()) + if "row" not in data: + raise BadRequest('Must send a "row" key containing a dictionary') + row = data["row"] + if not isinstance(row, dict): + raise BadRequest("row must be a dictionary") + # Verify all columns exist + columns = await db.table_columns(table_name) + pks = await db.primary_keys(table_name) + for key in row: + if key not in columns: + raise BadRequest("Column not found: {}".format(key)) + if key in pks: + raise BadRequest( + "Cannot insert into primary key column: {}".format(key) + ) + # Perform the insert + sql = "INSERT INTO [{table}] ({columns}) VALUES ({values})".format( + table=escape_sqlite(table_name), + columns=", ".join(escape_sqlite(c) for c in row), + values=", ".join("?" for c in row), + ) + cursor = await db.execute_write(sql, list(row.values())) + # Return the new row + rowid = cursor.lastrowid + new_row = ( + await db.execute( + "SELECT * FROM [{table}] WHERE rowid = ?".format( + table=escape_sqlite(table_name) + ), + [rowid], + ) + ).first() + return Response.json( + { + "inserted": [dict(new_row)], + }, + status=201, + ) diff --git a/docs/json_api.rst b/docs/json_api.rst index 2ed8a354..4a7961f2 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -463,7 +463,7 @@ The JSON write API Datasette provides a write API for JSON data. This is a POST-only API that requires an authenticated API token, see :ref:`CreateTokenView`. -.. _json_api_write_insert_row: +.. _TableInsertView: Inserting a single row ~~~~~~~~~~~~~~~~~~~~~~ @@ -472,11 +472,11 @@ This requires the :ref:`permissions_insert_row` permission. :: - POST //
+ POST //
/-/insert Content-Type: application/json Authorization: Bearer dstok_ { - "insert": { + "row": { "column1": "value1", "column2": "value2" } @@ -487,9 +487,11 @@ If successful, this will return a ``201`` status code and the newly inserted row .. code-block:: json { - "inserted_row": { - "id": 1, - "column1": "value1", - "column2": "value2" - } + "inserted": [ + { + "id": 1, + "column1": "value1", + "column2": "value2" + } + ] } diff --git a/tests/test_api_write.py b/tests/test_api_write.py index 86c221d0..e8222e43 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -24,8 +24,8 @@ async def test_write_row(ds_write): ) ) response = await ds_write.client.post( - "/data/docs", - json={"insert": {"title": "Test", "score": 1.0}}, + "/data/docs/-/insert", + json={"row": {"title": "Test", "score": 1.0}}, headers={ "Authorization": "Bearer {}".format(token), "Content-Type": "application/json", @@ -33,6 +33,6 @@ async def test_write_row(ds_write): ) expected_row = {"id": 1, "title": "Test", "score": 1.0} assert response.status_code == 201 - assert response.json()["inserted_row"] == expected_row + assert response.json()["inserted"] == [expected_row] rows = (await ds_write.get_database("data").execute("select * from docs")).rows assert dict(rows[0]) == expected_row From b912d92b651c4f0b5137da924d135654511f0fe0 Mon Sep 17 00:00:00 2001 From: Forest Gregg Date: Thu, 27 Oct 2022 16:51:20 -0400 Subject: [PATCH 1367/2113] Make hash and size a lazy property (#1837) * use inspect data for hash and file size * make hash and cached_size lazy properties * move hash property near size --- datasette/database.py | 36 ++++++++++++++++++++++++------------ 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index d75bd70c..af1df0a8 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -39,7 +39,7 @@ class Database: self.memory_name = memory_name if memory_name is not None: self.is_memory = True - self.hash = None + self.cached_hash = None self.cached_size = None self._cached_table_counts = None self._write_thread = None @@ -47,14 +47,6 @@ class Database: # These are used when in non-threaded mode: self._read_connection = None self._write_connection = None - if not self.is_mutable and not self.is_memory: - if self.ds.inspect_data and self.ds.inspect_data.get(self.name): - self.hash = self.ds.inspect_data[self.name]["hash"] - self.cached_size = self.ds.inspect_data[self.name]["size"] - else: - p = Path(path) - self.hash = inspect_hash(p) - self.cached_size = p.stat().st_size @property def cached_table_counts(self): @@ -266,14 +258,34 @@ class Database: results = await self.execute_fn(sql_operation_in_thread) return results + @property + def hash(self): + if self.cached_hash is not None: + return self.cached_hash + elif self.is_mutable or self.is_memory: + return None + elif self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self.cached_hash = self.ds.inspect_data[self.name]["hash"] + return self.cached_hash + else: + p = Path(self.path) + self.cached_hash = inspect_hash(p) + return self.cached_hash + @property def size(self): - if self.is_memory: - return 0 if self.cached_size is not None: return self.cached_size - else: + elif self.is_memory: + return 0 + elif self.is_mutable: return Path(self.path).stat().st_size + elif self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self.cached_size = self.ds.inspect_data[self.name]["size"] + return self.cached_size + else: + self.cached_size = Path(self.path).stat().st_size + return self.cached_size async def table_counts(self, limit=10): if not self.is_mutable and self.cached_table_counts is not None: From 2c36e45447494cd7505440943367e29ec57c8e72 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 27 Oct 2022 13:51:45 -0700 Subject: [PATCH 1368/2113] Bump black from 22.8.0 to 22.10.0 (#1839) Bumps [black](https://github.com/psf/black) from 22.8.0 to 22.10.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/22.8.0...22.10.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index fe258adb..625557ae 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ setup( "pytest-xdist>=2.2.1", "pytest-asyncio>=0.17", "beautifulsoup4>=4.8.1", - "black==22.8.0", + "black==22.10.0", "blacken-docs==1.12.1", "pytest-timeout>=1.4.2", "trustme>=0.7", From e5e0459a0b60608cb5e9ff83f6b41f59e6cafdfd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 13:58:00 -0700 Subject: [PATCH 1369/2113] Release notes for 0.63, refs #1869 --- docs/changelog.rst | 44 +++++++++++++++++++++++++------------------- 1 file changed, 25 insertions(+), 19 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2255dcce..01957e4f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,36 +4,42 @@ Changelog ========= -.. _v0_63a1: +.. _v0_63: -0.63a1 (2022-10-23) -------------------- +0.63 (2022-10-27) +----------------- +Features +~~~~~~~~ + +- Now tested against Python 3.11. Docker containers used by ``datasette publish`` and ``datasette package`` both now use that version of Python. (:issue:`1853`) +- ``--load-extension`` option now supports entrypoints. Thanks, Alex Garcia. (`#1789 `__) +- Facet size can now be set per-table with the new ``facet_size`` table metadata option. (:issue:`1804`) +- The :ref:`setting_truncate_cells_html` setting now also affects long URLs in columns. (:issue:`1805`) +- The non-JavaScript SQL editor textarea now increases height to fit the SQL query. (:issue:`1786`) +- Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) +- The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) +- SQL queries can now include leading SQL comments, using ``/* ... */`` or ``-- ...`` syntax. Thanks, Charles Nepote. (:issue:`1860`) - SQL query is now re-displayed when terminated with a time limit error. (:issue:`1819`) -- New documentation on :ref:`deploying_openrc` - thanks, Adam Simpson. (`#1825 `__) - The :ref:`inspect data ` mechanism is now used to speed up server startup - thanks, Forest Gregg. (:issue:`1834`) - In :ref:`config_dir` databases with filenames ending in ``.sqlite`` or ``.sqlite3`` are now automatically added to the Datasette instance. (:issue:`1646`) - Breadcrumb navigation display now respects the current user's permissions. (:issue:`1831`) -- Screenshots in the documentation are now maintained using `shot-scraper `__, as described in `Automating screenshots for the Datasette documentation using shot-scraper `__. (:issue:`1844`) -- The :ref:`datasette.check_visibility() ` method now accepts an optional ``permissions=`` list, allowing it to take multiple permissions into account at once when deciding if something should be shown as public or private. This has been used to correctly display padlock icons in more places in the Datasette interface. (:issue:`1829`) - -.. _v0_63a0: - -0.63a0 (2022-09-26) -------------------- +Plugin hooks and internals +~~~~~~~~~~~~~~~~~~~~~~~~~~ - The :ref:`plugin_hook_prepare_jinja2_environment` plugin hook now accepts an optional ``datasette`` argument. Hook implementations can also now return an ``async`` function which will be awaited automatically. (:issue:`1809`) -- ``--load-extension`` option now supports entrypoints. Thanks, Alex Garcia. (`#1789 `__) -- New tutorial: `Cleaning data with sqlite-utils and Datasette `__. -- Facet size can now be set per-table with the new ``facet_size`` table metadata option. (:issue:`1804`) -- ``truncate_cells_html`` setting now also affects long URLs in columns. (:issue:`1805`) - ``Database(is_mutable=)`` now defaults to ``True``. (:issue:`1808`) -- Non-JavaScript textarea now increases height to fit the SQL query. (:issue:`1786`) -- More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) +- The :ref:`datasette.check_visibility() ` method now accepts an optional ``permissions=`` list, allowing it to take multiple permissions into account at once when deciding if something should be shown as public or private. This has been used to correctly display padlock icons in more places in the Datasette interface. (:issue:`1829`) - Datasette no longer enforces upper bounds on its dependencies. (:issue:`1800`) -- Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) -- The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) + +Documentation +~~~~~~~~~~~~~ + +- New tutorial: `Cleaning data with sqlite-utils and Datasette `__. +- Screenshots in the documentation are now maintained using `shot-scraper `__, as described in `Automating screenshots for the Datasette documentation using shot-scraper `__. (:issue:`1844`) +- More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) +- New documentation on :ref:`deploying_openrc` - thanks, Adam Simpson. (`#1825 `__) .. _v0_62: From bf00b0b59b6692bdec597ac9db4e0b497c5a47b4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 15:11:26 -0700 Subject: [PATCH 1370/2113] Release 0.63 Refs #1646, #1786, #1787, #1789, #1794, #1800, #1804, #1805, #1808, #1809, #1816, #1819, #1825, #1829, #1831, #1834, #1844, #1853, #1860 Closes #1869 --- datasette/version.py | 2 +- docs/changelog.rst | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index eb36da45..ac012640 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.63a1" +__version__ = "0.63" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 01957e4f..f573afb3 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,6 +9,8 @@ Changelog 0.63 (2022-10-27) ----------------- +See `Datasette 0.63: The annotated release notes `__ for more background on the changes in this release. + Features ~~~~~~~~ From 2ea60e12d90b7cec03ebab728854d3ec4d553f54 Mon Sep 17 00:00:00 2001 From: Forest Gregg Date: Thu, 27 Oct 2022 16:51:20 -0400 Subject: [PATCH 1371/2113] Make hash and size a lazy property (#1837) * use inspect data for hash and file size * make hash and cached_size lazy properties * move hash property near size --- datasette/database.py | 36 ++++++++++++++++++++++++------------ 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index d75bd70c..af1df0a8 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -39,7 +39,7 @@ class Database: self.memory_name = memory_name if memory_name is not None: self.is_memory = True - self.hash = None + self.cached_hash = None self.cached_size = None self._cached_table_counts = None self._write_thread = None @@ -47,14 +47,6 @@ class Database: # These are used when in non-threaded mode: self._read_connection = None self._write_connection = None - if not self.is_mutable and not self.is_memory: - if self.ds.inspect_data and self.ds.inspect_data.get(self.name): - self.hash = self.ds.inspect_data[self.name]["hash"] - self.cached_size = self.ds.inspect_data[self.name]["size"] - else: - p = Path(path) - self.hash = inspect_hash(p) - self.cached_size = p.stat().st_size @property def cached_table_counts(self): @@ -266,14 +258,34 @@ class Database: results = await self.execute_fn(sql_operation_in_thread) return results + @property + def hash(self): + if self.cached_hash is not None: + return self.cached_hash + elif self.is_mutable or self.is_memory: + return None + elif self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self.cached_hash = self.ds.inspect_data[self.name]["hash"] + return self.cached_hash + else: + p = Path(self.path) + self.cached_hash = inspect_hash(p) + return self.cached_hash + @property def size(self): - if self.is_memory: - return 0 if self.cached_size is not None: return self.cached_size - else: + elif self.is_memory: + return 0 + elif self.is_mutable: return Path(self.path).stat().st_size + elif self.ds.inspect_data and self.ds.inspect_data.get(self.name): + self.cached_size = self.ds.inspect_data[self.name]["size"] + return self.cached_size + else: + self.cached_size = Path(self.path).stat().st_size + return self.cached_size async def table_counts(self, limit=10): if not self.is_mutable and self.cached_table_counts is not None: From 641bc4453b5ef1dff0b2fc7dfad0b692be7aa61c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 27 Oct 2022 13:51:45 -0700 Subject: [PATCH 1372/2113] Bump black from 22.8.0 to 22.10.0 (#1839) Bumps [black](https://github.com/psf/black) from 22.8.0 to 22.10.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/22.8.0...22.10.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index fe258adb..625557ae 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ setup( "pytest-xdist>=2.2.1", "pytest-asyncio>=0.17", "beautifulsoup4>=4.8.1", - "black==22.8.0", + "black==22.10.0", "blacken-docs==1.12.1", "pytest-timeout>=1.4.2", "trustme>=0.7", From 26af9b9c4a6c62ee15870caa1c7bc455165d3b11 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 13:58:00 -0700 Subject: [PATCH 1373/2113] Release notes for 0.63, refs #1869 --- docs/changelog.rst | 44 +++++++++++++++++++++++++------------------- 1 file changed, 25 insertions(+), 19 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2255dcce..01957e4f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,36 +4,42 @@ Changelog ========= -.. _v0_63a1: +.. _v0_63: -0.63a1 (2022-10-23) -------------------- +0.63 (2022-10-27) +----------------- +Features +~~~~~~~~ + +- Now tested against Python 3.11. Docker containers used by ``datasette publish`` and ``datasette package`` both now use that version of Python. (:issue:`1853`) +- ``--load-extension`` option now supports entrypoints. Thanks, Alex Garcia. (`#1789 `__) +- Facet size can now be set per-table with the new ``facet_size`` table metadata option. (:issue:`1804`) +- The :ref:`setting_truncate_cells_html` setting now also affects long URLs in columns. (:issue:`1805`) +- The non-JavaScript SQL editor textarea now increases height to fit the SQL query. (:issue:`1786`) +- Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) +- The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) +- SQL queries can now include leading SQL comments, using ``/* ... */`` or ``-- ...`` syntax. Thanks, Charles Nepote. (:issue:`1860`) - SQL query is now re-displayed when terminated with a time limit error. (:issue:`1819`) -- New documentation on :ref:`deploying_openrc` - thanks, Adam Simpson. (`#1825 `__) - The :ref:`inspect data ` mechanism is now used to speed up server startup - thanks, Forest Gregg. (:issue:`1834`) - In :ref:`config_dir` databases with filenames ending in ``.sqlite`` or ``.sqlite3`` are now automatically added to the Datasette instance. (:issue:`1646`) - Breadcrumb navigation display now respects the current user's permissions. (:issue:`1831`) -- Screenshots in the documentation are now maintained using `shot-scraper `__, as described in `Automating screenshots for the Datasette documentation using shot-scraper `__. (:issue:`1844`) -- The :ref:`datasette.check_visibility() ` method now accepts an optional ``permissions=`` list, allowing it to take multiple permissions into account at once when deciding if something should be shown as public or private. This has been used to correctly display padlock icons in more places in the Datasette interface. (:issue:`1829`) - -.. _v0_63a0: - -0.63a0 (2022-09-26) -------------------- +Plugin hooks and internals +~~~~~~~~~~~~~~~~~~~~~~~~~~ - The :ref:`plugin_hook_prepare_jinja2_environment` plugin hook now accepts an optional ``datasette`` argument. Hook implementations can also now return an ``async`` function which will be awaited automatically. (:issue:`1809`) -- ``--load-extension`` option now supports entrypoints. Thanks, Alex Garcia. (`#1789 `__) -- New tutorial: `Cleaning data with sqlite-utils and Datasette `__. -- Facet size can now be set per-table with the new ``facet_size`` table metadata option. (:issue:`1804`) -- ``truncate_cells_html`` setting now also affects long URLs in columns. (:issue:`1805`) - ``Database(is_mutable=)`` now defaults to ``True``. (:issue:`1808`) -- Non-JavaScript textarea now increases height to fit the SQL query. (:issue:`1786`) -- More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) +- The :ref:`datasette.check_visibility() ` method now accepts an optional ``permissions=`` list, allowing it to take multiple permissions into account at once when deciding if something should be shown as public or private. This has been used to correctly display padlock icons in more places in the Datasette interface. (:issue:`1829`) - Datasette no longer enforces upper bounds on its dependencies. (:issue:`1800`) -- Facets are now displayed with better line-breaks in long values. Thanks, Daniel Rech. (`#1794 `__) -- The ``settings.json`` file used in :ref:`config_dir` is now validated on startup. (:issue:`1816`) + +Documentation +~~~~~~~~~~~~~ + +- New tutorial: `Cleaning data with sqlite-utils and Datasette `__. +- Screenshots in the documentation are now maintained using `shot-scraper `__, as described in `Automating screenshots for the Datasette documentation using shot-scraper `__. (:issue:`1844`) +- More detailed command descriptions on the :ref:`CLI reference ` page. (:issue:`1787`) +- New documentation on :ref:`deploying_openrc` - thanks, Adam Simpson. (`#1825 `__) .. _v0_62: From 61171f01549549e5fb25c72b13280d941d96dbf1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 15:11:26 -0700 Subject: [PATCH 1374/2113] Release 0.63 Refs #1646, #1786, #1787, #1789, #1794, #1800, #1804, #1805, #1808, #1809, #1816, #1819, #1825, #1829, #1831, #1834, #1844, #1853, #1860 Closes #1869 --- datasette/version.py | 2 +- docs/changelog.rst | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index eb36da45..ac012640 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.63a1" +__version__ = "0.63" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 01957e4f..f573afb3 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,6 +9,8 @@ Changelog 0.63 (2022-10-27) ----------------- +See `Datasette 0.63: The annotated release notes `__ for more background on the changes in this release. + Features ~~~~~~~~ From c9b5f5d598e7f85cd3e1ce020351a27da334408b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Oct 2022 17:58:36 -0700 Subject: [PATCH 1375/2113] Depend on sqlite-utils>=3.30 Decided to use the most recent version in case I decide later to use the flatten() utility function. Refs #1850 --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 625557ae..99e2a4ad 100644 --- a/setup.py +++ b/setup.py @@ -57,6 +57,7 @@ setup( "PyYAML>=5.3", "mergedeep>=1.1.1", "itsdangerous>=1.1", + "sqlite-utils>=3.30", ], entry_points=""" [console_scripts] From c35859ae3df163406f1a1895ccf9803e933b2d8e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 29 Oct 2022 23:03:45 -0700 Subject: [PATCH 1376/2113] API for bulk inserts, closes #1866 --- datasette/app.py | 5 ++ datasette/views/table.py | 136 +++++++++++++++++++++---------- docs/cli-reference.rst | 2 + docs/json_api.rst | 48 ++++++++++- docs/settings.rst | 11 +++ tests/test_api.py | 1 + tests/test_api_write.py | 168 +++++++++++++++++++++++++++++++++++++-- 7 files changed, 320 insertions(+), 51 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 8bc5fe36..f80d3792 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -99,6 +99,11 @@ SETTINGS = ( 1000, "Maximum rows that can be returned from a table or custom query", ), + Setting( + "max_insert_rows", + 100, + "Maximum rows that can be inserted at a time using the bulk insert API", + ), Setting( "num_sql_threads", 3, diff --git a/datasette/views/table.py b/datasette/views/table.py index be3d4f93..fd203036 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -30,6 +30,7 @@ from datasette.utils import ( ) from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response from datasette.filters import Filters +import sqlite_utils from .base import BaseView, DataView, DatasetteError, ureg from .database import QueryView @@ -1085,62 +1086,109 @@ class TableInsertView(BaseView): def __init__(self, datasette): self.ds = datasette + async def _validate_data(self, request, db, table_name): + errors = [] + + def _errors(errors): + return None, errors, {} + + if request.headers.get("content-type") != "application/json": + # TODO: handle form-encoded data + return _errors(["Invalid content-type, must be application/json"]) + body = await request.post_body() + try: + data = json.loads(body) + except json.JSONDecodeError as e: + return _errors(["Invalid JSON: {}".format(e)]) + if not isinstance(data, dict): + return _errors(["JSON must be a dictionary"]) + keys = data.keys() + # keys must contain "row" or "rows" + if "row" not in keys and "rows" not in keys: + return _errors(['JSON must have one or other of "row" or "rows"']) + rows = [] + if "row" in keys: + if "rows" in keys: + return _errors(['Cannot use "row" and "rows" at the same time']) + row = data["row"] + if not isinstance(row, dict): + return _errors(['"row" must be a dictionary']) + rows = [row] + data["return_rows"] = True + else: + rows = data["rows"] + if not isinstance(rows, list): + return _errors(['"rows" must be a list']) + for row in rows: + if not isinstance(row, dict): + return _errors(['"rows" must be a list of dictionaries']) + # Does this exceed max_insert_rows? + max_insert_rows = self.ds.setting("max_insert_rows") + if len(rows) > max_insert_rows: + return _errors( + ["Too many rows, maximum allowed is {}".format(max_insert_rows)] + ) + # Validate columns of each row + columns = await db.table_columns(table_name) + # TODO: There are cases where pks are OK, if not using auto-incrementing pk + pks = await db.primary_keys(table_name) + allowed_columns = set(columns) - set(pks) + for i, row in enumerate(rows): + invalid_columns = set(row.keys()) - allowed_columns + if invalid_columns: + errors.append( + "Row {} has invalid columns: {}".format( + i, ", ".join(sorted(invalid_columns)) + ) + ) + if errors: + return _errors(errors) + extra = {key: data[key] for key in data if key not in ("rows", "row")} + return rows, errors, extra + async def post(self, request): + def _error(messages, status=400): + return Response.json({"ok": False, "errors": messages}, status=status) + database_route = tilde_decode(request.url_vars["database"]) try: db = self.ds.get_database(route=database_route) except KeyError: - raise NotFound("Database not found: {}".format(database_route)) + return _error(["Database not found: {}".format(database_route)], 404) database_name = db.name table_name = tilde_decode(request.url_vars["table"]) + # Table must exist (may handle table creation in the future) db = self.ds.get_database(database_name) if not await db.table_exists(table_name): - raise NotFound("Table not found: {}".format(table_name)) + return _error(["Table not found: {}".format(table_name)], 404) # Must have insert-row permission if not await self.ds.permission_allowed( request.actor, "insert-row", resource=(database_name, table_name) ): - raise Forbidden("Permission denied") - if request.headers.get("content-type") != "application/json": - # TODO: handle form-encoded data - raise BadRequest("Must send JSON data") - data = json.loads(await request.post_body()) - if "row" not in data: - raise BadRequest('Must send a "row" key containing a dictionary') - row = data["row"] - if not isinstance(row, dict): - raise BadRequest("row must be a dictionary") - # Verify all columns exist - columns = await db.table_columns(table_name) - pks = await db.primary_keys(table_name) - for key in row: - if key not in columns: - raise BadRequest("Column not found: {}".format(key)) - if key in pks: - raise BadRequest( - "Cannot insert into primary key column: {}".format(key) + return _error(["Permission denied"], 403) + rows, errors, extra = await self._validate_data(request, db, table_name) + if errors: + return _error(errors, 400) + + should_return = bool(extra.get("return_rows", False)) + # Insert rows + def insert_rows(conn): + table = sqlite_utils.Database(conn)[table_name] + if should_return: + rowids = [] + for row in rows: + rowids.append(table.insert(row).last_rowid) + return list( + table.rows_where( + "rowid in ({})".format(",".join("?" for _ in rowids)), rowids + ) ) - # Perform the insert - sql = "INSERT INTO [{table}] ({columns}) VALUES ({values})".format( - table=escape_sqlite(table_name), - columns=", ".join(escape_sqlite(c) for c in row), - values=", ".join("?" for c in row), - ) - cursor = await db.execute_write(sql, list(row.values())) - # Return the new row - rowid = cursor.lastrowid - new_row = ( - await db.execute( - "SELECT * FROM [{table}] WHERE rowid = ?".format( - table=escape_sqlite(table_name) - ), - [rowid], - ) - ).first() - return Response.json( - { - "inserted": [dict(new_row)], - }, - status=201, - ) + else: + table.insert_all(rows) + + rows = await db.execute_write_fn(insert_rows) + result = {"ok": True} + if should_return: + result["inserted"] = rows + return Response.json(result, status=201) diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 56156568..649a3dcd 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -213,6 +213,8 @@ These can be passed to ``datasette serve`` using ``datasette serve --setting nam (default=100) max_returned_rows Maximum rows that can be returned from a table or custom query (default=1000) + max_insert_rows Maximum rows that can be inserted at a time using + the bulk insert API (default=1000) num_sql_threads Number of threads in the thread pool for executing SQLite queries (default=3) sql_time_limit_ms Time limit for a SQL query in milliseconds diff --git a/docs/json_api.rst b/docs/json_api.rst index 4a7961f2..01558c23 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -465,11 +465,13 @@ Datasette provides a write API for JSON data. This is a POST-only API that requi .. _TableInsertView: -Inserting a single row -~~~~~~~~~~~~~~~~~~~~~~ +Inserting rows +~~~~~~~~~~~~~~ This requires the :ref:`permissions_insert_row` permission. +A single row can be inserted using the ``"row"`` key: + :: POST //
/-/insert @@ -495,3 +497,45 @@ If successful, this will return a ``201`` status code and the newly inserted row } ] } + +To insert multiple rows at a time, use the same API method but send a list of dictionaries as the ``"rows"`` key: + +:: + + POST //
/-/insert + Content-Type: application/json + Authorization: Bearer dstok_ + { + "rows": [ + { + "column1": "value1", + "column2": "value2" + }, + { + "column1": "value3", + "column2": "value4" + } + ] + } + +If successful, this will return a ``201`` status code and an empty ``{}`` response body. + +To return the newly inserted rows, add the ``"return_rows": true`` key to the request body: + +.. code-block:: json + + { + "rows": [ + { + "column1": "value1", + "column2": "value2" + }, + { + "column1": "value3", + "column2": "value4" + } + ], + "return_rows": true + } + +This will return the same ``"inserted"`` key as the single row example above. There is a small performance penalty for using this option. diff --git a/docs/settings.rst b/docs/settings.rst index a990c78c..b86b18bd 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -96,6 +96,17 @@ You can increase or decrease this limit like so:: datasette mydatabase.db --setting max_returned_rows 2000 +.. _setting_max_insert_rows: + +max_insert_rows +~~~~~~~~~~~~~~~ + +Maximum rows that can be inserted at a time using the bulk insert API, see :ref:`TableInsertView`. Defaults to 100. + +You can increase or decrease this limit like so:: + + datasette mydatabase.db --setting max_insert_rows 1000 + .. _setting_num_sql_threads: num_sql_threads diff --git a/tests/test_api.py b/tests/test_api.py index fc171421..ebd675b9 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -804,6 +804,7 @@ def test_settings_json(app_client): "facet_suggest_time_limit_ms": 50, "facet_time_limit_ms": 200, "max_returned_rows": 100, + "max_insert_rows": 100, "sql_time_limit_ms": 200, "allow_download": True, "allow_signed_tokens": True, diff --git a/tests/test_api_write.py b/tests/test_api_write.py index e8222e43..4a5a58aa 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -18,11 +18,7 @@ def ds_write(tmp_path_factory): @pytest.mark.asyncio async def test_write_row(ds_write): - token = "dstok_{}".format( - ds_write.sign( - {"a": "root", "token": "dstok", "t": int(time.time())}, namespace="token" - ) - ) + token = write_token(ds_write) response = await ds_write.client.post( "/data/docs/-/insert", json={"row": {"title": "Test", "score": 1.0}}, @@ -36,3 +32,165 @@ async def test_write_row(ds_write): assert response.json()["inserted"] == [expected_row] rows = (await ds_write.get_database("data").execute("select * from docs")).rows assert dict(rows[0]) == expected_row + + +@pytest.mark.asyncio +@pytest.mark.parametrize("return_rows", (True, False)) +async def test_write_rows(ds_write, return_rows): + token = write_token(ds_write) + data = {"rows": [{"title": "Test {}".format(i), "score": 1.0} for i in range(20)]} + if return_rows: + data["return_rows"] = True + response = await ds_write.client.post( + "/data/docs/-/insert", + json=data, + headers={ + "Authorization": "Bearer {}".format(token), + "Content-Type": "application/json", + }, + ) + assert response.status_code == 201 + actual_rows = [ + dict(r) + for r in ( + await ds_write.get_database("data").execute("select * from docs") + ).rows + ] + assert len(actual_rows) == 20 + assert actual_rows == [ + {"id": i + 1, "title": "Test {}".format(i), "score": 1.0} for i in range(20) + ] + assert response.json()["ok"] is True + if return_rows: + assert response.json()["inserted"] == actual_rows + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "path,input,special_case,expected_status,expected_errors", + ( + ( + "/data2/docs/-/insert", + {}, + None, + 404, + ["Database not found: data2"], + ), + ( + "/data/docs2/-/insert", + {}, + None, + 404, + ["Table not found: docs2"], + ), + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test"} for i in range(10)]}, + "bad_token", + 403, + ["Permission denied"], + ), + ( + "/data/docs/-/insert", + {}, + "invalid_json", + 400, + [ + "Invalid JSON: Expecting property name enclosed in double quotes: line 1 column 2 (char 1)" + ], + ), + ( + "/data/docs/-/insert", + {}, + "invalid_content_type", + 400, + ["Invalid content-type, must be application/json"], + ), + ( + "/data/docs/-/insert", + [], + None, + 400, + ["JSON must be a dictionary"], + ), + ( + "/data/docs/-/insert", + {"row": "blah"}, + None, + 400, + ['"row" must be a dictionary'], + ), + ( + "/data/docs/-/insert", + {"blah": "blah"}, + None, + 400, + ['JSON must have one or other of "row" or "rows"'], + ), + ( + "/data/docs/-/insert", + {"rows": "blah"}, + None, + 400, + ['"rows" must be a list'], + ), + ( + "/data/docs/-/insert", + {"rows": ["blah"]}, + None, + 400, + ['"rows" must be a list of dictionaries'], + ), + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test"} for i in range(101)]}, + None, + 400, + ["Too many rows, maximum allowed is 100"], + ), + # Validate columns of each row + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test", "bad": 1, "worse": 2} for i in range(2)]}, + None, + 400, + [ + "Row 0 has invalid columns: bad, worse", + "Row 1 has invalid columns: bad, worse", + ], + ), + ), +) +async def test_write_row_errors( + ds_write, path, input, special_case, expected_status, expected_errors +): + token = write_token(ds_write) + if special_case == "bad_token": + token += "bad" + kwargs = dict( + json=input, + headers={ + "Authorization": "Bearer {}".format(token), + "Content-Type": "text/plain" + if special_case == "invalid_content_type" + else "application/json", + }, + ) + if special_case == "invalid_json": + del kwargs["json"] + kwargs["content"] = "{bad json" + response = await ds_write.client.post( + path, + **kwargs, + ) + assert response.status_code == expected_status + assert response.json()["ok"] is False + assert response.json()["errors"] == expected_errors + + +def write_token(ds): + return "dstok_{}".format( + ds.sign( + {"a": "root", "token": "dstok", "t": int(time.time())}, namespace="token" + ) + ) From f6bf2d8045cc239fe34357342bff1440561c8909 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 29 Oct 2022 23:20:11 -0700 Subject: [PATCH 1377/2113] Initial prototype of API explorer at /-/api, refs #1871 --- datasette/app.py | 5 ++ datasette/templates/api_explorer.html | 73 +++++++++++++++++++++++++++ datasette/views/special.py | 8 +++ tests/test_docs.py | 2 +- 4 files changed, 87 insertions(+), 1 deletion(-) create mode 100644 datasette/templates/api_explorer.html diff --git a/datasette/app.py b/datasette/app.py index f80d3792..c3d802a4 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -33,6 +33,7 @@ from .views.special import ( JsonDataView, PatternPortfolioView, AuthTokenView, + ApiExplorerView, CreateTokenView, LogoutView, AllowDebugView, @@ -1235,6 +1236,10 @@ class Datasette: CreateTokenView.as_view(self), r"/-/create-token$", ) + add_route( + ApiExplorerView.as_view(self), + r"/-/api$", + ) add_route( LogoutView.as_view(self), r"/-/logout$", diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html new file mode 100644 index 00000000..034bee60 --- /dev/null +++ b/datasette/templates/api_explorer.html @@ -0,0 +1,73 @@ +{% extends "base.html" %} + +{% block title %}API Explorer{% endblock %} + +{% block content %} + +

API Explorer

+ +

Use this tool to try out the Datasette write API.

+ +{% if errors %} + {% for error in errors %} +

{{ error }}

+ {% endfor %} +{% endif %} + + +
+ + +
+
+ + +
+
+ +
+

+ + + + +{% endblock %} diff --git a/datasette/views/special.py b/datasette/views/special.py index b754a2f0..9922a621 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -235,3 +235,11 @@ class CreateTokenView(BaseView): "token_bits": token_bits, }, ) + + +class ApiExplorerView(BaseView): + name = "api_explorer" + has_json_alternate = False + + async def get(self, request): + return await self.render(["api_explorer.html"], request) diff --git a/tests/test_docs.py b/tests/test_docs.py index cd5a6c13..e9b813fe 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -62,7 +62,7 @@ def documented_views(): if first_word.endswith("View"): view_labels.add(first_word) # We deliberately don't document these: - view_labels.update(("PatternPortfolioView", "AuthTokenView")) + view_labels.update(("PatternPortfolioView", "AuthTokenView", "ApiExplorerView")) return view_labels From 9eb9ffae3ddd4e8ff0b713bf6fd6a0afed3368d7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 30 Oct 2022 13:09:55 -0700 Subject: [PATCH 1378/2113] Drop API token requirement from API explorer, refs #1871 --- datasette/default_permissions.py | 9 +++++++++ datasette/templates/api_explorer.html | 13 ++++--------- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 87684e2a..151ba2b5 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -131,3 +131,12 @@ def register_commands(cli): if debug: click.echo("\nDecoded:\n") click.echo(json.dumps(ds.unsign(token, namespace="token"), indent=2)) + + +@hookimpl +def skip_csrf(scope): + # Skip CSRF check for requests with content-type: application/json + if scope["type"] == "http": + headers = scope.get("headers") or {} + if dict(headers).get(b"content-type") == b"application/json": + return True diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html index 034bee60..01b182d8 100644 --- a/datasette/templates/api_explorer.html +++ b/datasette/templates/api_explorer.html @@ -15,16 +15,13 @@ {% endif %}
-
- - -
- +
-
- +
+ +

@@ -46,7 +43,6 @@ form.addEventListener("submit", (ev) => { var formData = new FormData(form); var json = formData.get('json'); var path = formData.get('path'); - var token = formData.get('token'); // Validate JSON try { var data = JSON.parse(json); @@ -60,7 +56,6 @@ form.addEventListener("submit", (ev) => { body: json, headers: { 'Content-Type': 'application/json', - 'Authorization': `Bearer ${token}` } }).then(r => r.json()).then(r => { alert(JSON.stringify(r, null, 2)); From fedbfcc36873366143195d8fe124e1859bf88346 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 30 Oct 2022 14:49:07 -0700 Subject: [PATCH 1379/2113] Neater display of output and errors in API explorer, refs #1871 --- datasette/templates/api_explorer.html | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html index 01b182d8..38fdb7bc 100644 --- a/datasette/templates/api_explorer.html +++ b/datasette/templates/api_explorer.html @@ -26,6 +26,12 @@

+ + """.format( escape(ex.sql) ) diff --git a/tests/test_api.py b/tests/test_api.py index ad74d16e..4027a7a5 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -662,7 +662,11 @@ def test_sql_time_limit(app_client_shorter_time_limit): "

SQL query took too long. The time limit is controlled by the\n" 'sql_time_limit_ms\n' "configuration option.

\n" - "
select sleep(0.5)
" + '\n' + "" ), "status": 400, "title": "SQL Interrupted", diff --git a/tests/test_html.py b/tests/test_html.py index 4b394199..7cfe9d90 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -172,7 +172,7 @@ def test_sql_time_limit(app_client_shorter_time_limit): """ sql_time_limit_ms """.strip(), - "
select sleep(0.5)
", + '', ] for expected_html_fragment in expected_html_fragments: assert expected_html_fragment in response.text From 93a02281dad2f23da84210f6ae9c63777ad8af5e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Nov 2022 10:22:26 -0700 Subject: [PATCH 1384/2113] Show interrupted query in resizing textarea, closes #1876 --- datasette/views/base.py | 6 +++++- tests/test_api.py | 6 +++++- tests/test_html.py | 2 +- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index 67aa3a42..6b01fdd2 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -378,7 +378,11 @@ class DataView(BaseView):

SQL query took too long. The time limit is controlled by the sql_time_limit_ms configuration option.

-
{}
+ + """.format( escape(ex.sql) ) diff --git a/tests/test_api.py b/tests/test_api.py index ebd675b9..de0223e2 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -662,7 +662,11 @@ def test_sql_time_limit(app_client_shorter_time_limit): "

SQL query took too long. The time limit is controlled by the\n" 'sql_time_limit_ms\n' "configuration option.

\n" - "
select sleep(0.5)
" + '\n' + "" ), "status": 400, "title": "SQL Interrupted", diff --git a/tests/test_html.py b/tests/test_html.py index 4b394199..7cfe9d90 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -172,7 +172,7 @@ def test_sql_time_limit(app_client_shorter_time_limit): """ sql_time_limit_ms """.strip(), - "
select sleep(0.5)
", + '', ] for expected_html_fragment in expected_html_fragments: assert expected_html_fragment in response.text From 9bec7c38eb93cde5afb16df9bdd96aea2a5b0459 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Nov 2022 11:07:59 -0700 Subject: [PATCH 1385/2113] ignore and replace options for bulk inserts, refs #1873 Also removed the rule that you cannot include primary keys in the rows you insert. And added validation that catches invalid parameters in the incoming JSON. And renamed "inserted" to "rows" in the returned JSON for return_rows: true --- datasette/views/table.py | 41 ++++++++++++++------ docs/json_api.rst | 4 +- tests/test_api_write.py | 83 ++++++++++++++++++++++++++++++++++++++-- 3 files changed, 111 insertions(+), 17 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 1e3d566e..7692a4e3 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -1107,6 +1107,7 @@ class TableInsertView(BaseView): if not isinstance(data, dict): return _errors(["JSON must be a dictionary"]) keys = data.keys() + # keys must contain "row" or "rows" if "row" not in keys and "rows" not in keys: return _errors(['JSON must have one or other of "row" or "rows"']) @@ -1126,19 +1127,31 @@ class TableInsertView(BaseView): for row in rows: if not isinstance(row, dict): return _errors(['"rows" must be a list of dictionaries']) + # Does this exceed max_insert_rows? max_insert_rows = self.ds.setting("max_insert_rows") if len(rows) > max_insert_rows: return _errors( ["Too many rows, maximum allowed is {}".format(max_insert_rows)] ) + + # Validate other parameters + extras = { + key: value for key, value in data.items() if key not in ("row", "rows") + } + valid_extras = {"return_rows", "ignore", "replace"} + invalid_extras = extras.keys() - valid_extras + if invalid_extras: + return _errors( + ['Invalid parameter: "{}"'.format('", "'.join(sorted(invalid_extras)))] + ) + if extras.get("ignore") and extras.get("replace"): + return _errors(['Cannot use "ignore" and "replace" at the same time']) + # Validate columns of each row - columns = await db.table_columns(table_name) - # TODO: There are cases where pks are OK, if not using auto-incrementing pk - pks = await db.primary_keys(table_name) - allowed_columns = set(columns) - set(pks) + columns = set(await db.table_columns(table_name)) for i, row in enumerate(rows): - invalid_columns = set(row.keys()) - allowed_columns + invalid_columns = set(row.keys()) - columns if invalid_columns: errors.append( "Row {} has invalid columns: {}".format( @@ -1147,8 +1160,7 @@ class TableInsertView(BaseView): ) if errors: return _errors(errors) - extra = {key: data[key] for key in data if key not in ("rows", "row")} - return rows, errors, extra + return rows, errors, extras async def post(self, request): database_route = tilde_decode(request.url_vars["database"]) @@ -1168,18 +1180,23 @@ class TableInsertView(BaseView): request.actor, "insert-row", resource=(database_name, table_name) ): return _error(["Permission denied"], 403) - rows, errors, extra = await self._validate_data(request, db, table_name) + rows, errors, extras = await self._validate_data(request, db, table_name) if errors: return _error(errors, 400) - should_return = bool(extra.get("return_rows", False)) + ignore = extras.get("ignore") + replace = extras.get("replace") + + should_return = bool(extras.get("return_rows", False)) # Insert rows def insert_rows(conn): table = sqlite_utils.Database(conn)[table_name] if should_return: rowids = [] for row in rows: - rowids.append(table.insert(row).last_rowid) + rowids.append( + table.insert(row, ignore=ignore, replace=replace).last_rowid + ) return list( table.rows_where( "rowid in ({})".format(",".join("?" for _ in rowids)), @@ -1187,12 +1204,12 @@ class TableInsertView(BaseView): ) ) else: - table.insert_all(rows) + table.insert_all(rows, ignore=ignore, replace=replace) rows = await db.execute_write_fn(insert_rows) result = {"ok": True} if should_return: - result["inserted"] = rows + result["rows"] = rows return Response.json(result, status=201) diff --git a/docs/json_api.rst b/docs/json_api.rst index da4500ab..34c13211 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -489,7 +489,7 @@ If successful, this will return a ``201`` status code and the newly inserted row .. code-block:: json { - "inserted": [ + "rows": [ { "id": 1, "column1": "value1", @@ -538,7 +538,7 @@ To return the newly inserted rows, add the ``"return_rows": true`` key to the re "return_rows": true } -This will return the same ``"inserted"`` key as the single row example above. There is a small performance penalty for using this option. +This will return the same ``"rows"`` key as the single row example above. There is a small performance penalty for using this option. .. _RowDeleteView: diff --git a/tests/test_api_write.py b/tests/test_api_write.py index 1cfba104..d0b0f324 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -37,7 +37,7 @@ async def test_write_row(ds_write): ) expected_row = {"id": 1, "title": "Test", "score": 1.0} assert response.status_code == 201 - assert response.json()["inserted"] == [expected_row] + assert response.json()["rows"] == [expected_row] rows = (await ds_write.get_database("data").execute("select * from docs")).rows assert dict(rows[0]) == expected_row @@ -70,7 +70,7 @@ async def test_write_rows(ds_write, return_rows): ] assert response.json()["ok"] is True if return_rows: - assert response.json()["inserted"] == actual_rows + assert response.json()["rows"] == actual_rows @pytest.mark.asyncio @@ -156,6 +156,27 @@ async def test_write_rows(ds_write, return_rows): 400, ["Too many rows, maximum allowed is 100"], ), + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test"}], "ignore": True, "replace": True}, + None, + 400, + ['Cannot use "ignore" and "replace" at the same time'], + ), + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test"}], "invalid_param": True}, + None, + 400, + ['Invalid parameter: "invalid_param"'], + ), + ( + "/data/docs/-/insert", + {"rows": [{"title": "Test"}], "one": True, "two": True}, + None, + 400, + ['Invalid parameter: "one", "two"'], + ), # Validate columns of each row ( "/data/docs/-/insert", @@ -196,6 +217,62 @@ async def test_write_row_errors( assert response.json()["errors"] == expected_errors +@pytest.mark.asyncio +@pytest.mark.parametrize( + "ignore,replace,expected_rows", + ( + ( + True, + False, + [ + {"id": 1, "title": "Exists", "score": None}, + ], + ), + ( + False, + True, + [ + {"id": 1, "title": "One", "score": None}, + ], + ), + ), +) +@pytest.mark.parametrize("should_return", (True, False)) +async def test_insert_ignore_replace( + ds_write, ignore, replace, expected_rows, should_return +): + await ds_write.get_database("data").execute_write( + "insert into docs (id, title) values (1, 'Exists')" + ) + token = write_token(ds_write) + data = {"rows": [{"id": 1, "title": "One"}]} + if ignore: + data["ignore"] = True + if replace: + data["replace"] = True + if should_return: + data["return_rows"] = True + response = await ds_write.client.post( + "/data/docs/-/insert", + json=data, + headers={ + "Authorization": "Bearer {}".format(token), + "Content-Type": "application/json", + }, + ) + assert response.status_code == 201 + actual_rows = [ + dict(r) + for r in ( + await ds_write.get_database("data").execute("select * from docs") + ).rows + ] + assert actual_rows == expected_rows + assert response.json()["ok"] is True + if should_return: + assert response.json()["rows"] == expected_rows + + @pytest.mark.asyncio @pytest.mark.parametrize("scenario", ("no_token", "no_perm", "bad_table", "has_perm")) async def test_delete_row(ds_write, scenario): @@ -217,7 +294,7 @@ async def test_delete_row(ds_write, scenario): }, ) assert insert_response.status_code == 201 - pk = insert_response.json()["inserted"][0]["id"] + pk = insert_response.json()["rows"][0]["id"] path = "/data/{}/{}/-/delete".format( "docs" if scenario != "bad_table" else "bad_table", pk From 497290beaf32e6b779f9683ef15f1c5bc142a41a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Nov 2022 12:59:17 -0700 Subject: [PATCH 1386/2113] Handle database errors in /-/insert, refs #1866, #1873 Also improved API explorer to show HTTP status of response, refs #1871 --- datasette/templates/api_explorer.html | 14 +++++++++----- datasette/views/table.py | 5 ++++- tests/test_api_write.py | 11 +++++++++++ 3 files changed, 24 insertions(+), 6 deletions(-) diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html index 38fdb7bc..93bacde3 100644 --- a/datasette/templates/api_explorer.html +++ b/datasette/templates/api_explorer.html @@ -27,7 +27,8 @@ @@ -64,12 +65,15 @@ form.addEventListener("submit", (ev) => { headers: { 'Content-Type': 'application/json', } - }).then(r => r.json()).then(r => { + }).then(r => { + document.getElementById('response-status').textContent = r.status; + return r.json(); + }).then(data => { var errorList = output.querySelector('.errors'); - if (r.errors) { + if (data.errors) { errorList.style.display = 'block'; errorList.innerHTML = ''; - r.errors.forEach(error => { + data.errors.forEach(error => { var li = document.createElement('li'); li.textContent = error; errorList.appendChild(li); @@ -77,7 +81,7 @@ form.addEventListener("submit", (ev) => { } else { errorList.style.display = 'none'; } - output.querySelector('pre').innerText = JSON.stringify(r, null, 2); + output.querySelector('pre').innerText = JSON.stringify(data, null, 2); output.style.display = 'block'; }).catch(err => { alert("Error: " + err); diff --git a/datasette/views/table.py b/datasette/views/table.py index 7692a4e3..61227206 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -1206,7 +1206,10 @@ class TableInsertView(BaseView): else: table.insert_all(rows, ignore=ignore, replace=replace) - rows = await db.execute_write_fn(insert_rows) + try: + rows = await db.execute_write_fn(insert_rows) + except Exception as e: + return _error([str(e)]) result = {"ok": True} if should_return: result["rows"] = rows diff --git a/tests/test_api_write.py b/tests/test_api_write.py index d0b0f324..0b567f48 100644 --- a/tests/test_api_write.py +++ b/tests/test_api_write.py @@ -156,6 +156,13 @@ async def test_write_rows(ds_write, return_rows): 400, ["Too many rows, maximum allowed is 100"], ), + ( + "/data/docs/-/insert", + {"rows": [{"id": 1, "title": "Test"}]}, + "duplicate_id", + 400, + ["UNIQUE constraint failed: docs.id"], + ), ( "/data/docs/-/insert", {"rows": [{"title": "Test"}], "ignore": True, "replace": True}, @@ -194,6 +201,10 @@ async def test_write_row_errors( ds_write, path, input, special_case, expected_status, expected_errors ): token = write_token(ds_write) + if special_case == "duplicate_id": + await ds_write.get_database("data").execute_write( + "insert into docs (id) values (1)" + ) if special_case == "bad_token": token += "bad" kwargs = dict( From 0b166befc0096fca30d71e19608a928d59c331a4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 1 Nov 2022 17:31:22 -0700 Subject: [PATCH 1387/2113] API explorer can now do GET, has JSON syntax highlighting Refs #1871 --- .../static/json-format-highlight-1.0.1.js | 43 +++++++++++ datasette/templates/api_explorer.html | 77 +++++++++++++++---- 2 files changed, 103 insertions(+), 17 deletions(-) create mode 100644 datasette/static/json-format-highlight-1.0.1.js diff --git a/datasette/static/json-format-highlight-1.0.1.js b/datasette/static/json-format-highlight-1.0.1.js new file mode 100644 index 00000000..e87c76e1 --- /dev/null +++ b/datasette/static/json-format-highlight-1.0.1.js @@ -0,0 +1,43 @@ +/* +https://github.com/luyilin/json-format-highlight +From https://unpkg.com/json-format-highlight@1.0.1/dist/json-format-highlight.js +MIT Licensed +*/ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : + typeof define === 'function' && define.amd ? define(factory) : + (global.jsonFormatHighlight = factory()); +}(this, (function () { 'use strict'; + +var defaultColors = { + keyColor: 'dimgray', + numberColor: 'lightskyblue', + stringColor: 'lightcoral', + trueColor: 'lightseagreen', + falseColor: '#f66578', + nullColor: 'cornflowerblue' +}; + +function index (json, colorOptions) { + if ( colorOptions === void 0 ) colorOptions = {}; + + if (!json) { return; } + if (typeof json !== 'string') { + json = JSON.stringify(json, null, 2); + } + var colors = Object.assign({}, defaultColors, colorOptions); + json = json.replace(/&/g, '&').replace(//g, '>'); + return json.replace(/("(\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*"(\s*:)?|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+]?\d+)?)/g, function (match) { + var color = colors.numberColor; + if (/^"/.test(match)) { + color = /:$/.test(match) ? colors.keyColor : colors.stringColor; + } else { + color = /true/.test(match) ? colors.trueColor : /false/.test(match) ? colors.falseColor : /null/.test(match) ? colors.nullColor : color; + } + return ("" + match + ""); + }); +} + +return index; + +}))); diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html index 93bacde3..de5337e3 100644 --- a/datasette/templates/api_explorer.html +++ b/datasette/templates/api_explorer.html @@ -2,6 +2,10 @@ {% block title %}API Explorer{% endblock %} +{% block extra_head %} + +{% endblock %} + {% block content %}

API Explorer

@@ -14,17 +18,30 @@ {% endfor %} {% endif %} -
-
- - -
-
- - -
-

- +
+ GET +
+
+ + + +
+ +
+
+ POST +
+
+ + +
+
+ + +
+

+ +
{% else %} - {% if not canned_write and not error %} + {% if not canned_query_write and not error %}

0 results

{% endif %} {% endif %} diff --git a/datasette/views/database.py b/datasette/views/database.py index 0770a380..658c35e6 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -1,4 +1,3 @@ -from asyncinject import Registry from dataclasses import dataclass, field from typing import Callable from urllib.parse import parse_qsl, urlencode @@ -33,7 +32,7 @@ from datasette.utils import ( from datasette.utils.asgi import AsgiFileDownload, NotFound, Response, Forbidden from datasette.plugins import pm -from .base import BaseView, DatasetteError, DataView, View, _error, stream_csv +from .base import BaseView, DatasetteError, View, _error, stream_csv class DatabaseView(View): @@ -57,7 +56,7 @@ class DatabaseView(View): sql = (request.args.get("sql") or "").strip() if sql: - return await query_view(request, datasette) + return await QueryView()(request, datasette) if format_ not in ("html", "json"): raise NotFound("Invalid format: {}".format(format_)) @@ -65,10 +64,6 @@ class DatabaseView(View): metadata = (datasette.metadata("databases") or {}).get(database, {}) datasette.update_with_inherited_metadata(metadata) - table_counts = await db.table_counts(5) - hidden_table_names = set(await db.hidden_table_names()) - all_foreign_keys = await db.get_all_foreign_keys() - sql_views = [] for view_name in await db.view_names(): view_visible, view_private = await datasette.check_visibility( @@ -196,8 +191,13 @@ class QueryContext: # urls: dict = field( # metadata={"help": "Object containing URL helpers like `database()`"} # ) - canned_write: bool = field( - metadata={"help": "Boolean indicating if this canned query allows writes"} + canned_query_write: bool = field( + metadata={ + "help": "Boolean indicating if this is a canned query that allows writes" + } + ) + metadata: dict = field( + metadata={"help": "Metadata about the database or the canned query"} ) db_is_immutable: bool = field( metadata={"help": "Boolean indicating if this database is immutable"} @@ -232,7 +232,6 @@ class QueryContext: show_hide_hidden: str = field( metadata={"help": "Hidden input field for the _show_sql parameter"} ) - metadata: dict = field(metadata={"help": "Metadata about the query/database"}) database_color: Callable = field( metadata={"help": "Function that returns a color for a given database name"} ) @@ -242,6 +241,12 @@ class QueryContext: alternate_url_json: str = field( metadata={"help": "URL for alternate JSON version of this page"} ) + # TODO: refactor this to somewhere else, probably ds.render_template() + select_templates: list = field( + metadata={ + "help": "List of templates that were considered for rendering this page" + } + ) async def get_tables(datasette, request, db): @@ -320,287 +325,105 @@ async def database_download(request, datasette): ) -async def query_view( - request, - datasette, - # canned_query=None, - # _size=None, - # named_parameters=None, - # write=False, -): - db = await datasette.resolve_database(request) - database = db.name - # Flattened because of ?sql=&name1=value1&name2=value2 feature - params = {key: request.args.get(key) for key in request.args} - sql = None - if "sql" in params: - sql = params.pop("sql") - if "_shape" in params: - params.pop("_shape") +class QueryView(View): + async def post(self, request, datasette): + from datasette.app import TableNotFound - # extras come from original request.args to avoid being flattened - extras = request.args.getlist("_extra") + db = await datasette.resolve_database(request) - # TODO: Behave differently for canned query here: - await datasette.ensure_permissions(request.actor, [("execute-sql", database)]) - - _, private = await datasette.check_visibility( - request.actor, - permissions=[ - ("view-database", database), - "view-instance", - ], - ) - - extra_args = {} - if params.get("_timelimit"): - extra_args["custom_time_limit"] = int(params["_timelimit"]) - - format_ = request.url_vars.get("format") or "html" - query_error = None - try: - validate_sql_select(sql) - results = await datasette.execute( - database, sql, params, truncate=True, **extra_args - ) - columns = results.columns - rows = results.rows - except QueryInterrupted as ex: - raise DatasetteError( - textwrap.dedent( - """ -

SQL query took too long. The time limit is controlled by the - sql_time_limit_ms - configuration option.

- - - """.format( - markupsafe.escape(ex.sql) - ) - ).strip(), - title="SQL Interrupted", - status=400, - message_is_html=True, - ) - except sqlite3.DatabaseError as ex: - query_error = str(ex) - results = None - rows = [] - columns = [] - except (sqlite3.OperationalError, InvalidSql) as ex: - raise DatasetteError(str(ex), title="Invalid SQL", status=400) - except sqlite3.OperationalError as ex: - raise DatasetteError(str(ex)) - except DatasetteError: - raise - - # Handle formats from plugins - if format_ == "csv": - - async def fetch_data_for_csv(request, _next=None): - results = await db.execute(sql, params, truncate=True) - data = {"rows": results.rows, "columns": results.columns} - return data, None, None - - return await stream_csv(datasette, fetch_data_for_csv, request, db.name) - elif format_ in datasette.renderers.keys(): - # Dispatch request to the correct output format renderer - # (CSV is not handled here due to streaming) - result = call_with_supported_arguments( - datasette.renderers[format_][0], - datasette=datasette, - columns=columns, - rows=rows, - sql=sql, - query_name=None, - database=database, - table=None, - request=request, - view_name="table", - truncated=results.truncated if results else False, - error=query_error, - # These will be deprecated in Datasette 1.0: - args=request.args, - data={"rows": rows, "columns": columns}, - ) - if asyncio.iscoroutine(result): - result = await result - if result is None: - raise NotFound("No data") - if isinstance(result, dict): - r = Response( - body=result.get("body"), - status=result.get("status_code") or 200, - content_type=result.get("content_type", "text/plain"), - headers=result.get("headers"), + # We must be a canned query + table_found = False + try: + await datasette.resolve_table(request) + table_found = True + except TableNotFound as table_not_found: + canned_query = await datasette.get_canned_query( + table_not_found.database_name, table_not_found.table, request.actor ) - elif isinstance(result, Response): - r = result - # if status_code is not None: - # # Over-ride the status code - # r.status = status_code - else: - assert False, f"{result} should be dict or Response" - elif format_ == "html": - headers = {} - templates = [f"query-{to_css_class(database)}.html", "query.html"] - template = datasette.jinja_env.select_template(templates) - alternate_url_json = datasette.absolute_url( - request, - datasette.urls.path(path_with_format(request=request, format="json")), - ) - data = {} - headers.update( - { - "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( - alternate_url_json - ) - } - ) - metadata = (datasette.metadata("databases") or {}).get(database, {}) - datasette.update_with_inherited_metadata(metadata) + if canned_query is None: + raise + if table_found: + # That should not have happened + raise DatasetteError("Unexpected table found on POST", status=404) - renderers = {} - for key, (_, can_render) in datasette.renderers.items(): - it_can_render = call_with_supported_arguments( - can_render, - datasette=datasette, - columns=data.get("columns") or [], - rows=data.get("rows") or [], - sql=data.get("query", {}).get("sql", None), - query_name=data.get("query_name"), - database=database, - table=data.get("table"), - request=request, - view_name="database", + # If database is immutable, return an error + if not db.is_mutable: + raise Forbidden("Database is immutable") + + # Process the POST + body = await request.post_body() + body = body.decode("utf-8").strip() + if body.startswith("{") and body.endswith("}"): + params = json.loads(body) + # But we want key=value strings + for key, value in params.items(): + params[key] = str(value) + else: + params = dict(parse_qsl(body, keep_blank_values=True)) + # Should we return JSON? + should_return_json = ( + request.headers.get("accept") == "application/json" + or request.args.get("_json") + or params.get("_json") + ) + params_for_query = MagicParameters(params, request, datasette) + ok = None + redirect_url = None + try: + cursor = await db.execute_write(canned_query["sql"], params_for_query) + message = canned_query.get( + "on_success_message" + ) or "Query executed, {} row{} affected".format( + cursor.rowcount, "" if cursor.rowcount == 1 else "s" + ) + message_type = datasette.INFO + redirect_url = canned_query.get("on_success_redirect") + ok = True + except Exception as ex: + message = canned_query.get("on_error_message") or str(ex) + message_type = datasette.ERROR + redirect_url = canned_query.get("on_error_redirect") + ok = False + if should_return_json: + return Response.json( + { + "ok": ok, + "message": message, + "redirect": redirect_url, + } ) - it_can_render = await await_me_maybe(it_can_render) - if it_can_render: - renderers[key] = datasette.urls.path( - path_with_format(request=request, format=key) - ) - - allow_execute_sql = await datasette.permission_allowed( - request.actor, "execute-sql", database - ) - - show_hide_hidden = "" - if metadata.get("hide_sql"): - if bool(params.get("_show_sql")): - show_hide_link = path_with_removed_args(request, {"_show_sql"}) - show_hide_text = "hide" - show_hide_hidden = '' - else: - show_hide_link = path_with_added_args(request, {"_show_sql": 1}) - show_hide_text = "show" else: - if bool(params.get("_hide_sql")): - show_hide_link = path_with_removed_args(request, {"_hide_sql"}) - show_hide_text = "show" - show_hide_hidden = '' - else: - show_hide_link = path_with_added_args(request, {"_hide_sql": 1}) - show_hide_text = "hide" - hide_sql = show_hide_text == "show" + datasette.add_message(request, message, message_type) + return Response.redirect(redirect_url or request.path) - # Extract any :named parameters - named_parameters = await derive_named_parameters( - datasette.get_database(database), sql - ) - named_parameter_values = { - named_parameter: params.get(named_parameter) or "" - for named_parameter in named_parameters - if not named_parameter.startswith("_") - } + async def get(self, request, datasette): + from datasette.app import TableNotFound - # Set to blank string if missing from params - for named_parameter in named_parameters: - if named_parameter not in params and not named_parameter.startswith("_"): - params[named_parameter] = "" - - r = Response.html( - await datasette.render_template( - template, - QueryContext( - database=database, - query={ - "sql": sql, - "params": params, - }, - canned_query=None, - private=private, - canned_write=False, - db_is_immutable=not db.is_mutable, - error=query_error, - hide_sql=hide_sql, - show_hide_link=datasette.urls.path(show_hide_link), - show_hide_text=show_hide_text, - editable=True, # TODO - allow_execute_sql=allow_execute_sql, - tables=await get_tables(datasette, request, db), - named_parameter_values=named_parameter_values, - edit_sql_url="todo", - display_rows=await display_rows( - datasette, database, request, rows, columns - ), - table_columns=await _table_columns(datasette, database) - if allow_execute_sql - else {}, - columns=columns, - renderers=renderers, - url_csv=datasette.urls.path( - path_with_format( - request=request, format="csv", extra_qs={"_size": "max"} - ) - ), - show_hide_hidden=markupsafe.Markup(show_hide_hidden), - metadata=metadata, - database_color=lambda _: "#ff0000", - alternate_url_json=alternate_url_json, - ), - request=request, - view_name="database", - ), - headers=headers, - ) - else: - assert False, "Invalid format: {}".format(format_) - if datasette.cors: - add_cors_headers(r.headers) - return r - - -class QueryView(DataView): - async def data( - self, - request, - sql, - editable=True, - canned_query=None, - metadata=None, - _size=None, - named_parameters=None, - write=False, - default_labels=None, - ): - db = await self.ds.resolve_database(request) + db = await datasette.resolve_database(request) database = db.name - params = {key: request.args.get(key) for key in request.args} - if "sql" in params: - params.pop("sql") - if "_shape" in params: - params.pop("_shape") + + # Are we a canned query? + canned_query = None + canned_query_write = False + if "table" in request.url_vars: + try: + await datasette.resolve_table(request) + except TableNotFound as table_not_found: + # Was this actually a canned query? + canned_query = await datasette.get_canned_query( + table_not_found.database_name, table_not_found.table, request.actor + ) + if canned_query is None: + raise + canned_query_write = bool(canned_query.get("write")) private = False if canned_query: # Respect canned query permissions - visible, private = await self.ds.check_visibility( + visible, private = await datasette.check_visibility( request.actor, permissions=[ - ("view-query", (database, canned_query)), + ("view-query", (database, canned_query["name"])), ("view-database", database), "view-instance", ], @@ -609,18 +432,32 @@ class QueryView(DataView): raise Forbidden("You do not have permission to view this query") else: - await self.ds.ensure_permissions(request.actor, [("execute-sql", database)]) + await datasette.ensure_permissions( + request.actor, [("execute-sql", database)] + ) + + # Flattened because of ?sql=&name1=value1&name2=value2 feature + params = {key: request.args.get(key) for key in request.args} + sql = None + + if canned_query: + sql = canned_query["sql"] + elif "sql" in params: + sql = params.pop("sql") # Extract any :named parameters - named_parameters = named_parameters or await derive_named_parameters( - self.ds.get_database(database), sql - ) + named_parameters = [] + if canned_query and canned_query.get("params"): + named_parameters = canned_query["params"] + if not named_parameters: + named_parameters = await derive_named_parameters( + datasette.get_database(database), sql + ) named_parameter_values = { named_parameter: params.get(named_parameter) or "" for named_parameter in named_parameters if not named_parameter.startswith("_") } - # Set to blank string if missing from params for named_parameter in named_parameters: if named_parameter not in params and not named_parameter.startswith("_"): @@ -629,212 +466,159 @@ class QueryView(DataView): extra_args = {} if params.get("_timelimit"): extra_args["custom_time_limit"] = int(params["_timelimit"]) - if _size: - extra_args["page_size"] = _size - templates = [f"query-{to_css_class(database)}.html", "query.html"] - if canned_query: - templates.insert( - 0, - f"query-{to_css_class(database)}-{to_css_class(canned_query)}.html", - ) + format_ = request.url_vars.get("format") or "html" query_error = None + results = None + rows = [] + columns = [] - # Execute query - as write or as read - if write: - if request.method == "POST": - # If database is immutable, return an error - if not db.is_mutable: - raise Forbidden("Database is immutable") - body = await request.post_body() - body = body.decode("utf-8").strip() - if body.startswith("{") and body.endswith("}"): - params = json.loads(body) - # But we want key=value strings - for key, value in params.items(): - params[key] = str(value) - else: - params = dict(parse_qsl(body, keep_blank_values=True)) - # Should we return JSON? - should_return_json = ( - request.headers.get("accept") == "application/json" - or request.args.get("_json") - or params.get("_json") - ) - if canned_query: - params_for_query = MagicParameters(params, request, self.ds) - else: - params_for_query = params - ok = None - try: - cursor = await self.ds.databases[database].execute_write( - sql, params_for_query - ) - message = metadata.get( - "on_success_message" - ) or "Query executed, {} row{} affected".format( - cursor.rowcount, "" if cursor.rowcount == 1 else "s" - ) - message_type = self.ds.INFO - redirect_url = metadata.get("on_success_redirect") - ok = True - except Exception as e: - message = metadata.get("on_error_message") or str(e) - message_type = self.ds.ERROR - redirect_url = metadata.get("on_error_redirect") - ok = False - if should_return_json: - return Response.json( - { - "ok": ok, - "message": message, - "redirect": redirect_url, - } - ) - else: - self.ds.add_message(request, message, message_type) - return self.redirect(request, redirect_url or request.path) - else: + params_for_query = params - async def extra_template(): - return { - "request": request, - "db_is_immutable": not db.is_mutable, - "path_with_added_args": path_with_added_args, - "path_with_removed_args": path_with_removed_args, - "named_parameter_values": named_parameter_values, - "canned_query": canned_query, - "success_message": request.args.get("_success") or "", - "canned_write": True, - } - - return ( - { - "database": database, - "rows": [], - "truncated": False, - "columns": [], - "query": {"sql": sql, "params": params}, - "private": private, - }, - extra_template, - templates, - ) - else: # Not a write - if canned_query: - params_for_query = MagicParameters(params, request, self.ds) - else: - params_for_query = params + if not canned_query_write: try: - results = await self.ds.execute( + if not canned_query: + # For regular queries we only allow SELECT, plus other rules + validate_sql_select(sql) + else: + # Canned queries can run magic parameters + params_for_query = MagicParameters(params, request, datasette) + results = await datasette.execute( database, sql, params_for_query, truncate=True, **extra_args ) - columns = [r[0] for r in results.description] - except sqlite3.DatabaseError as e: - query_error = e + columns = results.columns + rows = results.rows + except QueryInterrupted as ex: + raise DatasetteError( + textwrap.dedent( + """ +

SQL query took too long. The time limit is controlled by the + sql_time_limit_ms + configuration option.

+ + + """.format( + markupsafe.escape(ex.sql) + ) + ).strip(), + title="SQL Interrupted", + status=400, + message_is_html=True, + ) + except sqlite3.DatabaseError as ex: + query_error = str(ex) results = None + rows = [] columns = [] + except (sqlite3.OperationalError, InvalidSql) as ex: + raise DatasetteError(str(ex), title="Invalid SQL", status=400) + except sqlite3.OperationalError as ex: + raise DatasetteError(str(ex)) + except DatasetteError: + raise - allow_execute_sql = await self.ds.permission_allowed( - request.actor, "execute-sql", database - ) + # Handle formats from plugins + if format_ == "csv": - async def extra_template(): - display_rows = [] - truncate_cells = self.ds.setting("truncate_cells_html") - for row in results.rows if results else []: - display_row = [] - for column, value in zip(results.columns, row): - display_value = value - # Let the plugins have a go - # pylint: disable=no-member - plugin_display_value = None - for candidate in pm.hook.render_cell( - row=row, - value=value, - column=column, - table=None, - database=database, - datasette=self.ds, - request=request, - ): - candidate = await await_me_maybe(candidate) - if candidate is not None: - plugin_display_value = candidate - break - if plugin_display_value is not None: - display_value = plugin_display_value - else: - if value in ("", None): - display_value = markupsafe.Markup(" ") - elif is_url(str(display_value).strip()): - display_value = markupsafe.Markup( - '{truncated_url}'.format( - url=markupsafe.escape(value.strip()), - truncated_url=markupsafe.escape( - truncate_url(value.strip(), truncate_cells) - ), - ) - ) - elif isinstance(display_value, bytes): - blob_url = path_with_format( - request=request, - format="blob", - extra_qs={ - "_blob_column": column, - "_blob_hash": hashlib.sha256( - display_value - ).hexdigest(), - }, - ) - formatted = format_bytes(len(value)) - display_value = markupsafe.Markup( - '<Binary: {:,} byte{}>'.format( - blob_url, - ' title="{}"'.format(formatted) - if "bytes" not in formatted - else "", - len(value), - "" if len(value) == 1 else "s", - ) - ) - else: - display_value = str(value) - if truncate_cells and len(display_value) > truncate_cells: - display_value = ( - display_value[:truncate_cells] + "\u2026" - ) - display_row.append(display_value) - display_rows.append(display_row) + async def fetch_data_for_csv(request, _next=None): + results = await db.execute(sql, params, truncate=True) + data = {"rows": results.rows, "columns": results.columns} + return data, None, None - # Show 'Edit SQL' button only if: - # - User is allowed to execute SQL - # - SQL is an approved SELECT statement - # - No magic parameters, so no :_ in the SQL string - edit_sql_url = None - is_validated_sql = False - try: - validate_sql_select(sql) - is_validated_sql = True - except InvalidSql: - pass - if allow_execute_sql and is_validated_sql and ":_" not in sql: - edit_sql_url = ( - self.ds.urls.database(database) - + "?" - + urlencode( - { - **{ - "sql": sql, - }, - **named_parameter_values, - } - ) + return await stream_csv(datasette, fetch_data_for_csv, request, db.name) + elif format_ in datasette.renderers.keys(): + # Dispatch request to the correct output format renderer + # (CSV is not handled here due to streaming) + result = call_with_supported_arguments( + datasette.renderers[format_][0], + datasette=datasette, + columns=columns, + rows=rows, + sql=sql, + query_name=canned_query["name"] if canned_query else None, + database=database, + table=None, + request=request, + view_name="table", + truncated=results.truncated if results else False, + error=query_error, + # These will be deprecated in Datasette 1.0: + args=request.args, + data={"rows": rows, "columns": columns}, + ) + if asyncio.iscoroutine(result): + result = await result + if result is None: + raise NotFound("No data") + if isinstance(result, dict): + r = Response( + body=result.get("body"), + status=result.get("status_code") or 200, + content_type=result.get("content_type", "text/plain"), + headers=result.get("headers"), + ) + elif isinstance(result, Response): + r = result + # if status_code is not None: + # # Over-ride the status code + # r.status = status_code + else: + assert False, f"{result} should be dict or Response" + elif format_ == "html": + headers = {} + templates = [f"query-{to_css_class(database)}.html", "query.html"] + if canned_query: + templates.insert( + 0, + f"query-{to_css_class(database)}-{to_css_class(canned_query['name'])}.html", ) + template = datasette.jinja_env.select_template(templates) + alternate_url_json = datasette.absolute_url( + request, + datasette.urls.path(path_with_format(request=request, format="json")), + ) + data = {} + headers.update( + { + "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( + alternate_url_json + ) + } + ) + metadata = (datasette.metadata("databases") or {}).get(database, {}) + datasette.update_with_inherited_metadata(metadata) + + renderers = {} + for key, (_, can_render) in datasette.renderers.items(): + it_can_render = call_with_supported_arguments( + can_render, + datasette=datasette, + columns=data.get("columns") or [], + rows=data.get("rows") or [], + sql=data.get("query", {}).get("sql", None), + query_name=data.get("query_name"), + database=database, + table=data.get("table"), + request=request, + view_name="database", + ) + it_can_render = await await_me_maybe(it_can_render) + if it_can_render: + renderers[key] = datasette.urls.path( + path_with_format(request=request, format=key) + ) + + allow_execute_sql = await datasette.permission_allowed( + request.actor, "execute-sql", database + ) + show_hide_hidden = "" - if metadata.get("hide_sql"): + if canned_query and canned_query.get("hide_sql"): if bool(params.get("_show_sql")): show_hide_link = path_with_removed_args(request, {"_show_sql"}) show_hide_text = "hide" @@ -855,42 +639,86 @@ class QueryView(DataView): show_hide_link = path_with_added_args(request, {"_hide_sql": 1}) show_hide_text = "hide" hide_sql = show_hide_text == "show" - return { - "display_rows": display_rows, - "custom_sql": True, - "named_parameter_values": named_parameter_values, - "editable": editable, - "canned_query": canned_query, - "edit_sql_url": edit_sql_url, - "metadata": metadata, - "settings": self.ds.settings_dict(), - "request": request, - "show_hide_link": self.ds.urls.path(show_hide_link), - "show_hide_text": show_hide_text, - "show_hide_hidden": markupsafe.Markup(show_hide_hidden), - "hide_sql": hide_sql, - "table_columns": await _table_columns(self.ds, database) - if allow_execute_sql - else {}, - } - return ( - { - "ok": not query_error, - "database": database, - "query_name": canned_query, - "rows": results.rows if results else [], - "truncated": results.truncated if results else False, - "columns": columns, - "query": {"sql": sql, "params": params}, - "error": str(query_error) if query_error else None, - "private": private, - "allow_execute_sql": allow_execute_sql, - }, - extra_template, - templates, - 400 if query_error else 200, - ) + # Show 'Edit SQL' button only if: + # - User is allowed to execute SQL + # - SQL is an approved SELECT statement + # - No magic parameters, so no :_ in the SQL string + edit_sql_url = None + is_validated_sql = False + try: + validate_sql_select(sql) + is_validated_sql = True + except InvalidSql: + pass + if allow_execute_sql and is_validated_sql and ":_" not in sql: + edit_sql_url = ( + datasette.urls.database(database) + + "?" + + urlencode( + { + **{ + "sql": sql, + }, + **named_parameter_values, + } + ) + ) + + r = Response.html( + await datasette.render_template( + template, + QueryContext( + database=database, + query={ + "sql": sql, + "params": params, + }, + canned_query=canned_query["name"] if canned_query else None, + private=private, + canned_query_write=canned_query_write, + db_is_immutable=not db.is_mutable, + error=query_error, + hide_sql=hide_sql, + show_hide_link=datasette.urls.path(show_hide_link), + show_hide_text=show_hide_text, + editable=not canned_query, + allow_execute_sql=allow_execute_sql, + tables=await get_tables(datasette, request, db), + named_parameter_values=named_parameter_values, + edit_sql_url=edit_sql_url, + display_rows=await display_rows( + datasette, database, request, rows, columns + ), + table_columns=await _table_columns(datasette, database) + if allow_execute_sql + else {}, + columns=columns, + renderers=renderers, + url_csv=datasette.urls.path( + path_with_format( + request=request, format="csv", extra_qs={"_size": "max"} + ) + ), + show_hide_hidden=markupsafe.Markup(show_hide_hidden), + metadata=canned_query or metadata, + database_color=lambda _: "#ff0000", + alternate_url_json=alternate_url_json, + select_templates=[ + f"{'*' if template_name == template.name else ''}{template_name}" + for template_name in templates + ], + ), + request=request, + view_name="database", + ), + headers=headers, + ) + else: + assert False, "Invalid format: {}".format(format_) + if datasette.cors: + add_cors_headers(r.headers) + return r class MagicParameters(dict): diff --git a/datasette/views/table.py b/datasette/views/table.py index 77acfd95..28264e92 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -9,7 +9,6 @@ import markupsafe from datasette.plugins import pm from datasette.database import QueryInterrupted from datasette import tracer -from datasette.renderer import json_renderer from datasette.utils import ( add_cors_headers, await_me_maybe, @@ -21,7 +20,6 @@ from datasette.utils import ( tilde_encode, escape_sqlite, filters_should_redirect, - format_bytes, is_url, path_from_row_pks, path_with_added_args, @@ -38,7 +36,7 @@ from datasette.utils import ( from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response from datasette.filters import Filters import sqlite_utils -from .base import BaseView, DataView, DatasetteError, ureg, _error, stream_csv +from .base import BaseView, DatasetteError, ureg, _error, stream_csv from .database import QueryView LINK_WITH_LABEL = ( @@ -698,57 +696,6 @@ async def table_view(datasette, request): return response -class CannedQueryView(DataView): - def __init__(self, datasette): - self.ds = datasette - - async def post(self, request): - from datasette.app import TableNotFound - - try: - await self.ds.resolve_table(request) - except TableNotFound as e: - # Was this actually a canned query? - canned_query = await self.ds.get_canned_query( - e.database_name, e.table, request.actor - ) - if canned_query: - # Handle POST to a canned query - return await QueryView(self.ds).data( - request, - canned_query["sql"], - metadata=canned_query, - editable=False, - canned_query=e.table, - named_parameters=canned_query.get("params"), - write=bool(canned_query.get("write")), - ) - - return Response.text("Method not allowed", status=405) - - async def data(self, request, **kwargs): - from datasette.app import TableNotFound - - try: - await self.ds.resolve_table(request) - except TableNotFound as not_found: - canned_query = await self.ds.get_canned_query( - not_found.database_name, not_found.table, request.actor - ) - if canned_query: - return await QueryView(self.ds).data( - request, - canned_query["sql"], - metadata=canned_query, - editable=False, - canned_query=not_found.table, - named_parameters=canned_query.get("params"), - write=bool(canned_query.get("write")), - ) - else: - raise - - async def table_view_traced(datasette, request): from datasette.app import TableNotFound @@ -761,10 +708,7 @@ async def table_view_traced(datasette, request): ) # If this is a canned query, not a table, then dispatch to QueryView instead if canned_query: - if request.method == "POST": - return await CannedQueryView(datasette).post(request) - else: - return await CannedQueryView(datasette).get(request) + return await QueryView()(request, datasette) else: raise diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index d6a88733..e9ad3239 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -95,12 +95,12 @@ def test_insert(canned_write_client): csrftoken_from=True, cookies={"foo": "bar"}, ) - assert 302 == response.status - assert "/data/add_name?success" == response.headers["Location"] messages = canned_write_client.ds.unsign( response.cookies["ds_messages"], "messages" ) - assert [["Query executed, 1 row affected", 1]] == messages + assert messages == [["Query executed, 1 row affected", 1]] + assert response.status == 302 + assert response.headers["Location"] == "/data/add_name?success" @pytest.mark.parametrize( @@ -382,11 +382,11 @@ def test_magic_parameters_cannot_be_used_in_arbitrary_queries(magic_parameters_c def test_canned_write_custom_template(canned_write_client): response = canned_write_client.get("/data/update_name") assert response.status == 200 + assert "!!!CUSTOM_UPDATE_NAME_TEMPLATE!!!" in response.text assert ( "" in response.text ) - assert "!!!CUSTOM_UPDATE_NAME_TEMPLATE!!!" in response.text # And test for link rel=alternate while we're here: assert ( '' From 8920d425f4d417cfd998b61016c5ff3530cd34e1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 10:20:58 -0700 Subject: [PATCH 1629/2113] 1.0a3 release notes, smaller changes section - refs #2135 --- docs/changelog.rst | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index ee48d075..b4416f94 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,25 @@ Changelog ========= +.. _v1_0_a3: + +1.0a3 (2023-08-09) +------------------ + +This alpha release previews the updated design for Datasette's default JSON API. + +Smaller changes +~~~~~~~~~~~~~~~ + +- Datasette documentation now shows YAML examples for :ref:`metadata` by default, with a tab interface for switching to JSON. (:issue:`1153`) +- :ref:`plugin_register_output_renderer` plugins now have access to ``error`` and ``truncated`` arguments, allowing them to display error messages and take into account truncated results. (:issue:`2130`) +- ``render_cell()`` plugin hook now also supports an optional ``request`` argument. (:issue:`2007`) +- New ``Justfile`` to support development workflows for Datasette using `Just `__. +- ``datasette.render_template()`` can now accepts a ``datasette.views.Context`` subclass as an alternative to a dictionary. (:issue:`2127`) +- ``datasette install -e path`` option for editable installations, useful while developing plugins. (:issue:`2106`) +- When started with the ``--cors`` option Datasette now serves an ``Access-Control-Max-Age: 3600`` header, ensuring CORS OPTIONS requests are repeated no more than once an hour. (:issue:`2079`) +- Fixed a bug where the ``_internal`` database could display ``None`` instead of ``null`` for in-memory databases. (:issue:`1970`) + .. _v0_64_2: 0.64.2 (2023-03-08) From e34d09c6ec16ff5e7717e112afdad67f7c05a62a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 12:01:59 -0700 Subject: [PATCH 1630/2113] Don't include columns in query JSON, refs #2136 --- datasette/renderer.py | 8 +++++++- datasette/views/database.py | 2 +- tests/test_api.py | 1 - tests/test_cli_serve_get.py | 11 ++++++----- 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/datasette/renderer.py b/datasette/renderer.py index 0bd74e81..224031a7 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -27,7 +27,7 @@ def convert_specific_columns_to_json(rows, columns, json_cols): return new_rows -def json_renderer(args, data, error, truncated=None): +def json_renderer(request, args, data, error, truncated=None): """Render a response as JSON""" status_code = 200 @@ -106,6 +106,12 @@ def json_renderer(args, data, error, truncated=None): "status": 400, "title": None, } + + # Don't include "columns" in output + # https://github.com/simonw/datasette/issues/2136 + if isinstance(data, dict) and "columns" not in request.args.getlist("_extra"): + data.pop("columns", None) + # Handle _nl option for _shape=array nl = args.get("_nl", "") if nl and shape == "array": diff --git a/datasette/views/database.py b/datasette/views/database.py index 658c35e6..cf76f3c2 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -548,7 +548,7 @@ class QueryView(View): error=query_error, # These will be deprecated in Datasette 1.0: args=request.args, - data={"rows": rows, "columns": columns}, + data={"ok": True, "rows": rows, "columns": columns}, ) if asyncio.iscoroutine(result): result = await result diff --git a/tests/test_api.py b/tests/test_api.py index 28415a0b..f96f571e 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -649,7 +649,6 @@ async def test_custom_sql(ds_client): {"content": "RENDER_CELL_DEMO"}, {"content": "RENDER_CELL_ASYNC"}, ], - "columns": ["content"], "ok": True, "truncated": False, } diff --git a/tests/test_cli_serve_get.py b/tests/test_cli_serve_get.py index 2e0390bb..dc7fc1e2 100644 --- a/tests/test_cli_serve_get.py +++ b/tests/test_cli_serve_get.py @@ -34,11 +34,12 @@ def test_serve_with_get(tmp_path_factory): "/_memory.json?sql=select+sqlite_version()", ], ) - assert 0 == result.exit_code, result.output - assert { - "truncated": False, - "columns": ["sqlite_version()"], - }.items() <= json.loads(result.output).items() + assert result.exit_code == 0, result.output + data = json.loads(result.output) + # Should have a single row with a single column + assert len(data["rows"]) == 1 + assert list(data["rows"][0].keys()) == ["sqlite_version()"] + assert set(data.keys()) == {"rows", "ok", "truncated"} # The plugin should have created hello.txt assert (plugins_dir / "hello.txt").read_text() == "hello" From 856ca68d94708c6e94673cb6bc28bf3e3ca17845 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 12:04:40 -0700 Subject: [PATCH 1631/2113] Update default JSON representation docs, refs #2135 --- docs/json_api.rst | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/docs/json_api.rst b/docs/json_api.rst index c273c2a8..16b997eb 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -9,10 +9,10 @@ through the Datasette user interface can also be accessed as JSON via the API. To access the API for a page, either click on the ``.json`` link on that page or edit the URL and add a ``.json`` extension to it. -.. _json_api_shapes: +.. _json_api_default: -Different shapes ----------------- +Default representation +---------------------- The default JSON representation of data from a SQLite table or custom query looks like this: @@ -21,7 +21,6 @@ looks like this: { "ok": true, - "next": null, "rows": [ { "id": 3, @@ -39,13 +38,22 @@ looks like this: "id": 1, "name": "San Francisco" } - ] + ], + "truncated": false } -The ``rows`` key is a list of objects, each one representing a row. ``next`` indicates if -there is another page, and ``ok`` is always ``true`` if an error did not occur. +``"ok"`` is always ``true`` if an error did not occur. -If ``next`` is present then the next page in the pagination set can be retrieved using ``?_next=VALUE``. +The ``"rows"`` key is a list of objects, each one representing a row. + +The ``"truncated"`` key lets you know if the query was truncated. This can happen if a SQL query returns more than 1,000 results (or the :ref:`setting_max_returned_rows` setting). + +For table pages, an additional key ``"next"`` may be present. This indicates that the next page in the pagination set can be retrieved using ``?_next=VALUE``. + +.. _json_api_shapes: + +Different shapes +---------------- The ``_shape`` parameter can be used to access alternative formats for the ``rows`` key which may be more convenient for your application. There are three From 90cb9ca58d910f49e8f117bbdd94df6f0855cf99 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 12:11:16 -0700 Subject: [PATCH 1632/2113] JSON changes in release notes, refs #2135 --- docs/changelog.rst | 35 ++++++++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b4416f94..4c70855b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,7 +9,40 @@ Changelog 1.0a3 (2023-08-09) ------------------ -This alpha release previews the updated design for Datasette's default JSON API. +This alpha release previews the updated design for Datasette's default JSON API. (:issue:`782`) + +The new :ref:`default JSON representation ` for both table pages (``/dbname/table.json``) and arbitrary SQL queries (``/dbname.json?sql=...``) is now shaped like this: + +.. code-block:: json + + { + "ok": true, + "rows": [ + { + "id": 3, + "name": "Detroit" + }, + { + "id": 2, + "name": "Los Angeles" + }, + { + "id": 4, + "name": "Memnonia" + }, + { + "id": 1, + "name": "San Francisco" + } + ], + "truncated": false + } + +Tables will include an additional ``"next"`` key for pagination, which can be passed to ``?_next=`` to fetch the next page of results. + +The various ``?_shape=`` options continue to work as before - see :ref:`json_api_shapes` for details. + +A new ``?_extra=`` mechanism is available for tables, but has not yet been stabilized or documented. Details on that are available in :issue:`262`. Smaller changes ~~~~~~~~~~~~~~~ From 19ab4552e212c9845a59461cc73e82d5ae8c278a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 12:13:11 -0700 Subject: [PATCH 1633/2113] Release 1.0a3 Closes #2135 Refs #262, #782, #1153, #1970, #2007, #2079, #2106, #2127, #2130 --- datasette/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 3b81ab21..61dee464 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "1.0a2" +__version__ = "1.0a3" __version_info__ = tuple(__version__.split(".")) From 4a42476bb7ce4c5ed941f944115dedd9bce34656 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 15:04:16 -0700 Subject: [PATCH 1634/2113] datasette plugins --requirements, closes #2133 --- datasette/cli.py | 12 ++++++++++-- docs/cli-reference.rst | 1 + docs/plugins.rst | 32 ++++++++++++++++++++++++++++---- tests/test_cli.py | 3 +++ 4 files changed, 42 insertions(+), 6 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 32266888..21fd25d6 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -223,15 +223,23 @@ pm.hook.publish_subcommand(publish=publish) @cli.command() @click.option("--all", help="Include built-in default plugins", is_flag=True) +@click.option( + "--requirements", help="Output requirements.txt of installed plugins", is_flag=True +) @click.option( "--plugins-dir", type=click.Path(exists=True, file_okay=False, dir_okay=True), help="Path to directory containing custom plugins", ) -def plugins(all, plugins_dir): +def plugins(all, requirements, plugins_dir): """List currently installed plugins""" app = Datasette([], plugins_dir=plugins_dir) - click.echo(json.dumps(app._plugins(all=all), indent=4)) + if requirements: + for plugin in app._plugins(): + if plugin["version"]: + click.echo("{}=={}".format(plugin["name"], plugin["version"])) + else: + click.echo(json.dumps(app._plugins(all=all), indent=4)) @cli.command() diff --git a/docs/cli-reference.rst b/docs/cli-reference.rst index 2177fc9e..7a96d311 100644 --- a/docs/cli-reference.rst +++ b/docs/cli-reference.rst @@ -282,6 +282,7 @@ Output JSON showing all currently installed plugins, their versions, whether the Options: --all Include built-in default plugins + --requirements Output requirements.txt of installed plugins --plugins-dir DIRECTORY Path to directory containing custom plugins --help Show this message and exit. diff --git a/docs/plugins.rst b/docs/plugins.rst index 979f94dd..19bfdd0c 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -90,7 +90,12 @@ You can see a list of installed plugins by navigating to the ``/-/plugins`` page You can also use the ``datasette plugins`` command:: - $ datasette plugins + datasette plugins + +Which outputs: + +.. code-block:: json + [ { "name": "datasette_json_html", @@ -107,7 +112,8 @@ You can also use the ``datasette plugins`` command:: cog.out("\n") result = CliRunner().invoke(cli.cli, ["plugins", "--all"]) # cog.out() with text containing newlines was unindenting for some reason - cog.outl("If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette::\n") + cog.outl("If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette:\n") + cog.outl(".. code-block:: json\n") plugins = [p for p in json.loads(result.output) if p["name"].startswith("datasette.")] indented = textwrap.indent(json.dumps(plugins, indent=4), " ") for line in indented.split("\n"): @@ -115,7 +121,9 @@ You can also use the ``datasette plugins`` command:: cog.out("\n\n") .. ]]] -If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette:: +If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette: + +.. code-block:: json [ { @@ -236,6 +244,22 @@ If you run ``datasette plugins --all`` it will include default plugins that ship You can add the ``--plugins-dir=`` option to include any plugins found in that directory. +Add ``--requirements`` to output a list of installed plugins that can then be installed in another Datasette instance using ``datasette install -r requirements.txt``:: + + datasette plugins --requirements + +The output will look something like this:: + + datasette-codespaces==0.1.1 + datasette-graphql==2.2 + datasette-json-html==1.0.1 + datasette-pretty-json==0.2.2 + datasette-x-forwarded-host==0.1 + +To write that to a ``requirements.txt`` file, run this:: + + datasette plugins --requirements > requirements.txt + .. _plugins_configuration: Plugin configuration @@ -390,7 +414,7 @@ Any values embedded in ``metadata.yaml`` will be visible to anyone who views the If you are publishing your data using the :ref:`datasette publish ` family of commands, you can use the ``--plugin-secret`` option to set these secrets at publish time. For example, using Heroku you might run the following command:: - $ datasette publish heroku my_database.db \ + datasette publish heroku my_database.db \ --name my-heroku-app-demo \ --install=datasette-auth-github \ --plugin-secret datasette-auth-github client_id your_client_id \ diff --git a/tests/test_cli.py b/tests/test_cli.py index 75724f61..056e2821 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -108,6 +108,9 @@ def test_plugins_cli(app_client): assert set(names).issuperset({p["name"] for p in EXPECTED_PLUGINS}) # And the following too: assert set(names).issuperset(DEFAULT_PLUGINS) + # --requirements should be empty because there are no installed non-plugins-dir plugins + result3 = runner.invoke(cli, ["plugins", "--requirements"]) + assert result3.output == "" def test_metadata_yaml(): From a3593c901580ea50854c3e0774b0ba0126e8a76f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 17:32:07 -0700 Subject: [PATCH 1635/2113] on_success_message_sql, closes #2138 --- datasette/views/database.py | 29 ++++++++++++++++---- docs/sql_queries.rst | 21 ++++++++++---- tests/test_canned_queries.py | 53 +++++++++++++++++++++++++++++++----- 3 files changed, 85 insertions(+), 18 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index cf76f3c2..79b3f88d 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -360,6 +360,10 @@ class QueryView(View): params[key] = str(value) else: params = dict(parse_qsl(body, keep_blank_values=True)) + + # Don't ever send csrftoken as a SQL parameter + params.pop("csrftoken", None) + # Should we return JSON? should_return_json = ( request.headers.get("accept") == "application/json" @@ -371,12 +375,27 @@ class QueryView(View): redirect_url = None try: cursor = await db.execute_write(canned_query["sql"], params_for_query) - message = canned_query.get( - "on_success_message" - ) or "Query executed, {} row{} affected".format( - cursor.rowcount, "" if cursor.rowcount == 1 else "s" - ) + # success message can come from on_success_message or on_success_message_sql + message = None message_type = datasette.INFO + on_success_message_sql = canned_query.get("on_success_message_sql") + if on_success_message_sql: + try: + message_result = ( + await db.execute(on_success_message_sql, params_for_query) + ).first() + if message_result: + message = message_result[0] + except Exception as ex: + message = "Error running on_success_message_sql: {}".format(ex) + message_type = datasette.ERROR + if not message: + message = canned_query.get( + "on_success_message" + ) or "Query executed, {} row{} affected".format( + cursor.rowcount, "" if cursor.rowcount == 1 else "s" + ) + redirect_url = canned_query.get("on_success_redirect") ok = True except Exception as ex: diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index 3c2cb228..1ae07e1f 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -392,6 +392,7 @@ This configuration will create a page at ``/mydatabase/add_name`` displaying a f You can customize how Datasette represents success and errors using the following optional properties: - ``on_success_message`` - the message shown when a query is successful +- ``on_success_message_sql`` - alternative to ``on_success_message``: a SQL query that should be executed to generate the message - ``on_success_redirect`` - the path or URL the user is redirected to on success - ``on_error_message`` - the message shown when a query throws an error - ``on_error_redirect`` - the path or URL the user is redirected to on error @@ -405,11 +406,12 @@ For example: "queries": { "add_name": { "sql": "INSERT INTO names (name) VALUES (:name)", + "params": ["name"], "write": True, - "on_success_message": "Name inserted", + "on_success_message_sql": "select 'Name inserted: ' || :name", "on_success_redirect": "/mydatabase/names", "on_error_message": "Name insert failed", - "on_error_redirect": "/mydatabase" + "on_error_redirect": "/mydatabase", } } } @@ -426,8 +428,10 @@ For example: queries: add_name: sql: INSERT INTO names (name) VALUES (:name) + params: + - name write: true - on_success_message: Name inserted + on_success_message_sql: 'select ''Name inserted: '' || :name' on_success_redirect: /mydatabase/names on_error_message: Name insert failed on_error_redirect: /mydatabase @@ -443,8 +447,11 @@ For example: "queries": { "add_name": { "sql": "INSERT INTO names (name) VALUES (:name)", + "params": [ + "name" + ], "write": true, - "on_success_message": "Name inserted", + "on_success_message_sql": "select 'Name inserted: ' || :name", "on_success_redirect": "/mydatabase/names", "on_error_message": "Name insert failed", "on_error_redirect": "/mydatabase" @@ -455,10 +462,12 @@ For example: } .. [[[end]]] -You can use ``"params"`` to explicitly list the named parameters that should be displayed as form fields - otherwise they will be automatically detected. +You can use ``"params"`` to explicitly list the named parameters that should be displayed as form fields - otherwise they will be automatically detected. ``"params"`` is not necessary in the above example, since without it ``"name"`` would be automatically detected from the query. You can pre-populate form fields when the page first loads using a query string, e.g. ``/mydatabase/add_name?name=Prepopulated``. The user will have to submit the form to execute the query. +If you specify a query in ``"on_success_message_sql"``, that query will be executed after the main query. The first column of the first row return by that query will be displayed as a success message. Named parameters from the main query will be made available to the success message query as well. + .. _canned_queries_magic_parameters: Magic parameters @@ -589,7 +598,7 @@ The JSON response will look like this: "redirect": "/data/add_name" } -The ``"message"`` and ``"redirect"`` values here will take into account ``on_success_message``, ``on_success_redirect``, ``on_error_message`` and ``on_error_redirect``, if they have been set. +The ``"message"`` and ``"redirect"`` values here will take into account ``on_success_message``, ``on_success_message_sql``, ``on_success_redirect``, ``on_error_message`` and ``on_error_redirect``, if they have been set. .. _pagination: diff --git a/tests/test_canned_queries.py b/tests/test_canned_queries.py index e9ad3239..5256c24c 100644 --- a/tests/test_canned_queries.py +++ b/tests/test_canned_queries.py @@ -31,9 +31,15 @@ def canned_write_client(tmpdir): }, "add_name_specify_id": { "sql": "insert into names (rowid, name) values (:rowid, :name)", + "on_success_message_sql": "select 'Name added: ' || :name || ' with rowid ' || :rowid", "write": True, "on_error_redirect": "/data/add_name_specify_id?error", }, + "add_name_specify_id_with_error_in_on_success_message_sql": { + "sql": "insert into names (rowid, name) values (:rowid, :name)", + "on_success_message_sql": "select this is bad SQL", + "write": True, + }, "delete_name": { "sql": "delete from names where rowid = :rowid", "write": True, @@ -179,6 +185,34 @@ def test_insert_error(canned_write_client): ) +def test_on_success_message_sql(canned_write_client): + response = canned_write_client.post( + "/data/add_name_specify_id", + {"rowid": 5, "name": "Should be OK"}, + csrftoken_from=True, + ) + assert response.status == 302 + assert response.headers["Location"] == "/data/add_name_specify_id" + messages = canned_write_client.ds.unsign( + response.cookies["ds_messages"], "messages" + ) + assert messages == [["Name added: Should be OK with rowid 5", 1]] + + +def test_error_in_on_success_message_sql(canned_write_client): + response = canned_write_client.post( + "/data/add_name_specify_id_with_error_in_on_success_message_sql", + {"rowid": 1, "name": "Should fail"}, + csrftoken_from=True, + ) + messages = canned_write_client.ds.unsign( + response.cookies["ds_messages"], "messages" + ) + assert messages == [ + ["Error running on_success_message_sql: no such column: bad", 3] + ] + + def test_custom_params(canned_write_client): response = canned_write_client.get("/data/update_name?extra=foo") assert '' in response.text @@ -232,21 +266,22 @@ def test_canned_query_permissions_on_database_page(canned_write_client): query_names = { q["name"] for q in canned_write_client.get("/data.json").json["queries"] } - assert { + assert query_names == { + "add_name_specify_id_with_error_in_on_success_message_sql", + "from_hook", + "update_name", + "add_name_specify_id", + "from_async_hook", "canned_read", "add_name", - "add_name_specify_id", - "update_name", - "from_async_hook", - "from_hook", - } == query_names + } # With auth shows four response = canned_write_client.get( "/data.json", cookies={"ds_actor": canned_write_client.actor_cookie({"id": "root"})}, ) - assert 200 == response.status + assert response.status == 200 query_names_and_private = sorted( [ {"name": q["name"], "private": q["private"]} @@ -257,6 +292,10 @@ def test_canned_query_permissions_on_database_page(canned_write_client): assert query_names_and_private == [ {"name": "add_name", "private": False}, {"name": "add_name_specify_id", "private": False}, + { + "name": "add_name_specify_id_with_error_in_on_success_message_sql", + "private": False, + }, {"name": "canned_read", "private": False}, {"name": "delete_name", "private": True}, {"name": "from_async_hook", "private": False}, From 33251d04e78d575cca62bb59069bb43a7d924746 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Aug 2023 17:56:27 -0700 Subject: [PATCH 1636/2113] Canned query write counters demo, refs #2134 --- .github/workflows/deploy-latest.yml | 30 +++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index ed60376c..4746aa07 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -57,6 +57,36 @@ jobs: db.route = "alternative-route" ' > plugins/alternative_route.py cp fixtures.db fixtures2.db + - name: And the counters writable canned query demo + run: | + cat > plugins/counters.py < Date: Thu, 10 Aug 2023 22:16:19 -0700 Subject: [PATCH 1637/2113] Fixed display of database color Closes #2139, closes #2119 --- datasette/database.py | 7 +++++++ datasette/templates/database.html | 2 +- datasette/templates/query.html | 2 +- datasette/templates/row.html | 2 +- datasette/templates/table.html | 2 +- datasette/views/base.py | 4 ---- datasette/views/database.py | 8 +++----- datasette/views/index.py | 4 +--- datasette/views/row.py | 4 +++- datasette/views/table.py | 2 +- tests/test_html.py | 20 ++++++++++++++++++++ 11 files changed, 39 insertions(+), 18 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index d8043c24..af39ac9e 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -1,6 +1,7 @@ import asyncio from collections import namedtuple from pathlib import Path +import hashlib import janus import queue import sys @@ -62,6 +63,12 @@ class Database: } return self._cached_table_counts + @property + def color(self): + if self.hash: + return self.hash[:6] + return hashlib.md5(self.name.encode("utf8")).hexdigest()[:6] + def suggest_name(self): if self.path: return Path(self.path).stem diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 7acf0369..3d4dae07 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -10,7 +10,7 @@ {% block body_class %}db db-{{ database|to_css_class }}{% endblock %} {% block content %} -