From 40885ef24e32d91502b6b8bbad1c7376f50f2830 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 28 May 2020 07:41:22 -0700 Subject: [PATCH 0001/1871] Noted tool for converting release notes to Markdown --- docs/contributing.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 48930332..567c4f47 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -145,12 +145,12 @@ To release a new version, first create a commit that updates :ref:`the changelog For non-bugfix releases you may want to update the news section of ``README.md`` as part of the same commit. -Wait long enough for Travis to build and deploy the demo version of that commit (otherwise the tag deployment may fail to alias to it properly). Then run the following:: +To tag and push the releaes, run the following:: git tag 0.25.2 git push --tags Final steps once the release has deployed to https://pypi.org/project/datasette/ -* Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases +* Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases - you can convert the release notes to Markdown by copying and pasting the rendered HTML into this tool: https://euangoddard.github.io/clipboard2markdown/ * Manually kick off a build of the `stable` branch on Read The Docs: https://readthedocs.org/projects/datasette/builds/ From 7bb30c1f11f7246baf7bb6a229f6b93572c4cbe3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 28 May 2020 10:09:32 -0700 Subject: [PATCH 0002/1871] request.url now respects force_https_urls, closes #781 --- datasette/app.py | 7 +++++++ tests/plugins/my_plugin_2.py | 3 +++ tests/test_api.py | 4 ++++ 3 files changed, 14 insertions(+) diff --git a/datasette/app.py b/datasette/app.py index 40d39ac9..07190c16 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -786,6 +786,13 @@ class DatasetteRouter(AsgiRouter): base_url = self.ds.config("base_url") if base_url != "/" and path.startswith(base_url): path = "/" + path[len(base_url) :] + # Apply force_https_urls, if set + if ( + self.ds.config("force_https_urls") + and scope["type"] == "http" + and scope.get("scheme") != "https" + ): + scope = dict(scope, scheme="https") return await super().route_path(scope, receive, send, path) async def handle_404(self, scope, receive, send, exception=None): diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index fdc6956d..c9e7c78f 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -46,6 +46,9 @@ def render_cell(value, database): @hookimpl def extra_template_vars(template, database, table, view_name, request, datasette): + # This helps unit tests that want to run assertions against the request object: + datasette._last_request = request + async def query_database(sql): first_db = list(datasette.databases.keys())[0] return (await datasette.execute(first_db, sql)).rows[0][0] diff --git a/tests/test_api.py b/tests/test_api.py index eb80f8e7..d7e7c03f 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1676,6 +1676,10 @@ def test_config_force_https_urls(): "toggle_url" ].startswith("https://") assert response.json["suggested_facets"][0]["toggle_url"].startswith("https://") + # Also confirm that request.url and request.scheme are set correctly + response = client.get("/") + assert client.ds._last_request.url.startswith("https://") + assert client.ds._last_request.scheme == "https" def test_infinity_returned_as_null(app_client): From 21a8ffc82dcf5e8e5f484ce39ee9713f959e0ad5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 28 May 2020 10:49:58 -0700 Subject: [PATCH 0003/1871] Tip about referencing issues in release notes commit --- docs/contributing.rst | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 567c4f47..da4dc35a 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -137,12 +137,16 @@ We increment ``minor`` for new features. We increment ``patch`` for bugfix releass. -To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__:: +To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__:: # Update changelog - git commit -m "Release 0.25.2" -a + git commit -m "Release notes for 0.43 + + Refs #581, #770, #729, #706, #751, #706, #744, #771, #773" -a git push +Referencing the issues that are part of the release in the commit message ensures the name of the release shows up on those issue pages, e.g. `here `__. + For non-bugfix releases you may want to update the news section of ``README.md`` as part of the same commit. To tag and push the releaes, run the following:: From 3c1a60589e14849344acd8aa6da0a60b40fbfc60 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 28 May 2020 11:27:24 -0700 Subject: [PATCH 0004/1871] Consistent capitalization of SpatiaLite in the docs --- docs/changelog.rst | 2 +- docs/installation.rst | 2 +- docs/metadata.rst | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 8f375dd1..8b6272cb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -947,7 +947,7 @@ request all rows where that column is less than 50 meters or more than 20 feet f 404s for missing tables/databases closes `#184 `_ - long_description in markdown for the new PyPI -- Hide Spatialite system tables. [Russ Garrett] +- Hide SpatiaLite system tables. [Russ Garrett] - Allow ``explain select`` / ``explain query plan select`` `#201 `_ - Datasette inspect now finds primary_keys `#195 `_ - Ability to sort using form fields (for mobile portrait mode) `#199 `_ diff --git a/docs/installation.rst b/docs/installation.rst index cdf1467a..aacfed1d 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -149,7 +149,7 @@ To upgrade to the most recent release of Datasette, run the following:: docker pull datasetteproject/datasette -Loading Spatialite +Loading SpatiaLite ~~~~~~~~~~~~~~~~~~ The ``datasetteproject/datasette`` image includes a recent version of the diff --git a/docs/metadata.rst b/docs/metadata.rst index 88ad5854..18766bac 100644 --- a/docs/metadata.rst +++ b/docs/metadata.rst @@ -260,7 +260,7 @@ Hiding tables ------------- You can hide tables from the database listing view (in the same way that FTS and -Spatialite tables are automatically hidden) using ``"hidden": true``: +SpatiaLite tables are automatically hidden) using ``"hidden": true``: .. code-block:: json From 3e8932bf6443bd5168f22d559597aed619205995 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 29 May 2020 15:12:10 -0700 Subject: [PATCH 0005/1871] Upgrade to actions/cache@v2 --- .github/workflows/deploy-latest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 33490972..fd53f754 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -15,7 +15,7 @@ jobs: uses: actions/setup-python@v1 with: python-version: 3.8 - - uses: actions/cache@v1 + - uses: actions/cache@v2 name: Configure pip caching with: path: ~/.cache/pip From 7ccd55a1638d7d2762f2789f192e5bb81fb0d0c7 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 28 May 2020 11:54:57 -0700 Subject: [PATCH 0006/1871] Views do support sorting now, refs #508 --- docs/metadata.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/metadata.rst b/docs/metadata.rst index 18766bac..024af01e 100644 --- a/docs/metadata.rst +++ b/docs/metadata.rst @@ -210,7 +210,7 @@ This will restrict sorting of ``example_table`` to just the ``height`` and You can also disable sorting entirely by setting ``"sortable_columns": []`` -By default, database views in Datasette do not support sorting. You can use ``sortable_columns`` to enable specific sort orders for a view called ``name_of_view`` in the database ``my_database`` like so: +You can use ``sortable_columns`` to enable specific sort orders for a view called ``name_of_view`` in the database ``my_database`` like so: .. code-block:: json From 84616a2364df56f966f579eecc0716b9877f0d70 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 29 May 2020 15:51:30 -0700 Subject: [PATCH 0007/1871] request.args.getlist() returns [] if missing, refs #774 Also added some unit tests for request.args --- datasette/utils/__init__.py | 4 ++-- docs/internals.rst | 2 +- tests/plugins/register_output_renderer.py | 2 +- tests/test_utils.py | 10 ++++++++++ 4 files changed, 14 insertions(+), 4 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 2dab8e14..9b4f21ba 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -761,9 +761,9 @@ class RequestParameters(dict): except (KeyError, TypeError): return default - def getlist(self, name, default=None): + def getlist(self, name): "Return full list" - return super().get(name, default) + return super().get(name) or [] class ConnectionProblem(Exception): diff --git a/docs/internals.rst b/docs/internals.rst index 5bcb9da9..bbf10cae 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -276,4 +276,4 @@ Conider the querystring ``?foo=1&foo=2``. This will produce a ``request.args`` t Calling ``request.args.get("foo")`` will return the first value, ``"1"``. If that key is not present it will return ``None`` - or the second argument if you passed one, which will be used as the default. -Calling ``request.args.getlist("foo")`` will return the full list, ``["1", "2"]``. \ No newline at end of file +Calling ``request.args.getlist("foo")`` will return the full list, ``["1", "2"]``. If you call it on a missing key it will return ``[]``. diff --git a/tests/plugins/register_output_renderer.py b/tests/plugins/register_output_renderer.py index a9f0f157..82b60d01 100644 --- a/tests/plugins/register_output_renderer.py +++ b/tests/plugins/register_output_renderer.py @@ -26,7 +26,7 @@ async def render_test_all_parameters( datasette, columns, rows, sql, query_name, database, table, request, view_name, data ): headers = {} - for custom_header in request.args.getlist("header") or []: + for custom_header in request.args.getlist("header"): key, value = custom_header.split(":") headers[key] = value result = await datasette.databases["fixtures"].execute("select 1 + 1") diff --git a/tests/test_utils.py b/tests/test_utils.py index 59b80a67..ffb66ca5 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -448,6 +448,16 @@ async def test_request_post_vars(): assert {"foo": "bar", "baz": "1"} == await request.post_vars() +def test_request_args(): + request = Request.fake("/foo?multi=1&multi=2&single=3") + assert "1" == request.args.get("multi") + assert "3" == request.args.get("single") + assert ["1", "2"] == request.args.getlist("multi") + assert [] == request.args.getlist("missing") + with pytest.raises(KeyError): + request.args["missing"] + + def test_call_with_supported_arguments(): def foo(a, b): return "{}+{}".format(a, b) From f272cbc65fbf56368413320e21c87dc842e0a083 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 29 May 2020 15:57:46 -0700 Subject: [PATCH 0008/1871] Use request.args.getlist instead of request.args[...], refs #774 --- datasette/views/table.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index d014db71..d1d92bb1 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -319,19 +319,19 @@ class TableView(RowTableShared): if not self.ds.config("allow_sql"): raise DatasetteError("_where= is not allowed", status=400) else: - where_clauses.extend(request.args["_where"]) + where_clauses.extend(request.args.getlist("_where")) extra_wheres_for_ui = [ { "text": text, "remove_url": path_with_removed_args(request, {"_where": text}), } - for text in request.args["_where"] + for text in request.args.getlist("_where") ] # Support for ?_through={table, column, value} extra_human_descriptions = [] if "_through" in request.args: - for through in request.args["_through"]: + for through in request.args.getlist("_through"): through_data = json.loads(through) through_table = through_data["table"] other_column = through_data["column"] @@ -559,7 +559,7 @@ class TableView(RowTableShared): ) if request.args.get("_timelimit"): - extra_args["custom_time_limit"] = int(request.args["_timelimit"]) + extra_args["custom_time_limit"] = int(request.args.get("_timelimit")) results = await db.execute(sql, params, truncate=True, **extra_args) @@ -633,7 +633,7 @@ class TableView(RowTableShared): all_labels = default_labels # Check for explicit _label= if "_label" in request.args: - columns_to_expand = request.args["_label"] + columns_to_expand = request.args.getlist("_label") if columns_to_expand is None and all_labels: # expand all columns with foreign keys columns_to_expand = [fk["column"] for fk, _ in expandable_columns] @@ -746,7 +746,7 @@ class TableView(RowTableShared): if arg in special_args: form_hidden_args.append((arg, special_args[arg])) if request.args.get("_where"): - for where_text in request.args["_where"]: + for where_text in request.args.getlist("_where"): form_hidden_args.append(("_where", where_text)) # if no sort specified AND table has a single primary key, From 81be31322a968d23cf57cee62b58df55433385e3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 29 May 2020 16:18:01 -0700 Subject: [PATCH 0009/1871] New implementation for RequestParams - no longer subclasses dict - request.args[key] now returns first item, not all items - removed request.raw_args entirely Closes #774 --- datasette/renderer.py | 2 +- datasette/utils/__init__.py | 30 +++++++++++++++++++++++++++--- datasette/utils/asgi.py | 5 ----- datasette/views/table.py | 6 +++--- docs/internals.rst | 12 ++++++++---- tests/test_utils.py | 10 ++++++++++ 6 files changed, 49 insertions(+), 16 deletions(-) diff --git a/datasette/renderer.py b/datasette/renderer.py index 349c2922..3f921fe7 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -32,7 +32,7 @@ def json_renderer(args, data, view_name): # Handle the _json= parameter which may modify data["rows"] json_cols = [] if "_json" in args: - json_cols = args["_json"] + json_cols = args.getlist("_json") if json_cols and "rows" in data and "columns" in data: data["rows"] = convert_specific_columns_to_json( data["rows"], data["columns"], json_cols diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 9b4f21ba..bf965413 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -753,17 +753,41 @@ def escape_fts(query): ) -class RequestParameters(dict): +class RequestParameters: + def __init__(self, data): + # data is a dictionary of key => [list, of, values] + assert isinstance(data, dict), "data should be a dictionary of key => [list]" + for key in data: + assert isinstance( + data[key], list + ), "data should be a dictionary of key => [list]" + self._data = data + + def __contains__(self, key): + return key in self._data + + def __getitem__(self, key): + return self._data[key][0] + + def keys(self): + return self._data.keys() + + def __iter__(self): + yield from self._data.keys() + + def __len__(self): + return len(self._data) + def get(self, name, default=None): "Return first value in the list, if available" try: - return super().get(name)[0] + return self._data.get(name)[0] except (KeyError, TypeError): return default def getlist(self, name): "Return full list" - return super().get(name) or [] + return self._data.get(name) or [] class ConnectionProblem(Exception): diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 62a2a0c8..24398b77 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -63,11 +63,6 @@ class Request: def args(self): return RequestParameters(parse_qs(qs=self.query_string)) - @property - def raw_args(self): - # Deprecated, undocumented - may be removed in Datasette 1.0 - return {key: value[0] for key, value in self.args.items()} - async def post_vars(self): body = [] body = b"" diff --git a/datasette/views/table.py b/datasette/views/table.py index d1d92bb1..a629346f 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -277,11 +277,11 @@ class TableView(RowTableShared): # it can still be queried using ?_col__exact=blah special_args = {} other_args = [] - for key, value in args.items(): + for key in args: if key.startswith("_") and "__" not in key: - special_args[key] = value[0] + special_args[key] = args[key] else: - for v in value: + for v in args.getlist(key): other_args.append((key, v)) # Handle ?_filter_column and redirect, if present diff --git a/docs/internals.rst b/docs/internals.rst index bbf10cae..ea015dbc 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -268,12 +268,16 @@ The object also has one awaitable method: The RequestParameters class --------------------------- -This class, returned by ``request.args``, is a subclass of a Python dictionary that provides methods for working with keys that map to lists of values. +This class, returned by ``request.args``, is a dictionary-like object. -Conider the querystring ``?foo=1&foo=2``. This will produce a ``request.args`` that looks like this:: +Consider the querystring ``?foo=1&foo=2``. This will produce a ``request.args`` that looks like this:: RequestParameters({"foo": ["1", "2"]}) -Calling ``request.args.get("foo")`` will return the first value, ``"1"``. If that key is not present it will return ``None`` - or the second argument if you passed one, which will be used as the default. +``request.args["foo"]`` returns the first value, ``"1"`` - or raises ``KeyError`` if that key is missing. -Calling ``request.args.getlist("foo")`` will return the full list, ``["1", "2"]``. If you call it on a missing key it will return ``[]``. +``request.args.get("foo")`` returns ``"1"`` - or ``None`` if the key is missing. A second argument can be used to specify a different default value. + +``request.args.getlist("foo")`` returns the full list, ``["1", "2"]``. If you call it on a missing key it will return ``[]``. + +You can use ``if key in request.args`` to check if a key is present. ``for key in request.args`` will iterate through the keys, or you can use ``request.args.keys()`` to get all of the keys. diff --git a/tests/test_utils.py b/tests/test_utils.py index ffb66ca5..9d6f45b0 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -452,8 +452,18 @@ def test_request_args(): request = Request.fake("/foo?multi=1&multi=2&single=3") assert "1" == request.args.get("multi") assert "3" == request.args.get("single") + assert "1" == request.args["multi"] + assert "3" == request.args["single"] assert ["1", "2"] == request.args.getlist("multi") assert [] == request.args.getlist("missing") + assert "multi" in request.args + assert "single" in request.args + assert "missing" not in request.args + expected = ["multi", "single"] + assert expected == list(request.args.keys()) + for i, key in enumerate(request.args): + assert expected[i] == key + assert 2 == len(request.args) with pytest.raises(KeyError): request.args["missing"] From 31fb006a9b05067a8eb2f774ad3a3b15b4565924 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 07:28:29 -0700 Subject: [PATCH 0010/1871] Added datasette.get_database() method Refs #576 --- datasette/app.py | 5 +++++ docs/internals.rst | 10 ++++++++++ docs/plugins.rst | 2 +- tests/test_database.py | 3 +++ tests/test_internals_datasette.py | 23 +++++++++++++++++++++++ 5 files changed, 42 insertions(+), 1 deletion(-) create mode 100644 tests/test_internals_datasette.py diff --git a/datasette/app.py b/datasette/app.py index 07190c16..30eb3dba 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -281,6 +281,11 @@ class Datasette: self.register_renderers() + def get_database(self, name=None): + if name is None: + return next(iter(self.databases.values())) + return self.databases[name] + def add_database(self, name, db): self.databases[name] = db diff --git a/docs/internals.rst b/docs/internals.rst index ea015dbc..886cb7e7 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -44,6 +44,16 @@ This method lets you read plugin configuration values that were set in ``metadat Renders a `Jinja template `__ using Datasette's preconfigured instance of Jinja and returns the resulting string. The template will have access to Datasette's default template functions and any functions that have been made available by other plugins. +.. _datasette_get_database: + +.get_database(name) +------------------- + +``name`` - string, optional + The name of the database - optional. + +Returns the specified database object. Raises a ``KeyError`` if the database does not exist. Call this method without an argument to return the first connected database. + .. _datasette_add_database: .add_database(name, db) diff --git a/docs/plugins.rst b/docs/plugins.rst index b27daf3f..f08f1217 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -811,7 +811,7 @@ Here is a more complex example: .. code-block:: python async def render_demo(datasette, columns, rows): - db = next(iter(datasette.databases.values())) + db = datasette.get_database() result = await db.execute("select sqlite_version()") first_row = " | ".join(columns) lines = [first_row] diff --git a/tests/test_database.py b/tests/test_database.py index 1f1a3a7e..bd7e7666 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -1,3 +1,6 @@ +""" +Tests for the datasette.database.Database class +""" from datasette.database import Results, MultipleValues from datasette.utils import sqlite3 from .fixtures import app_client diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py new file mode 100644 index 00000000..4993250d --- /dev/null +++ b/tests/test_internals_datasette.py @@ -0,0 +1,23 @@ +""" +Tests for the datasette.app.Datasette class +""" +from .fixtures import app_client +import pytest + + +@pytest.fixture +def datasette(app_client): + return app_client.ds + + +def test_get_database(datasette): + db = datasette.get_database("fixtures") + assert "fixtures" == db.name + with pytest.raises(KeyError): + datasette.get_database("missing") + + +def test_get_database_no_argument(datasette): + # Returns the first available database: + db = datasette.get_database() + assert "fixtures" == db.name From ca56c226a9f1b02e871d7d7b392619a805b7f1ed Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 07:33:02 -0700 Subject: [PATCH 0011/1871] Renamed test_database.py to test_internals_database.py Also added a db fixture to remove some boilerplate. --- ...database.py => test_internals_database.py} | 45 +++++++------------ 1 file changed, 17 insertions(+), 28 deletions(-) rename tests/{test_database.py => test_internals_database.py} (80%) diff --git a/tests/test_database.py b/tests/test_internals_database.py similarity index 80% rename from tests/test_database.py rename to tests/test_internals_database.py index bd7e7666..fde7ad2c 100644 --- a/tests/test_database.py +++ b/tests/test_internals_database.py @@ -9,17 +9,20 @@ import time import uuid +@pytest.fixture +def db(app_client): + return app_client.ds.get_database("fixtures") + + @pytest.mark.asyncio -async def test_execute(app_client): - db = app_client.ds.databases["fixtures"] +async def test_execute(db): results = await db.execute("select * from facetable") assert isinstance(results, Results) assert 15 == len(results) @pytest.mark.asyncio -async def test_results_first(app_client): - db = app_client.ds.databases["fixtures"] +async def test_results_first(db): assert None is (await db.execute("select * from facetable where pk > 100")).first() results = await db.execute("select * from facetable") row = results.first() @@ -35,8 +38,7 @@ async def test_results_first(app_client): ], ) @pytest.mark.asyncio -async def test_results_single_value(app_client, query, expected): - db = app_client.ds.databases["fixtures"] +async def test_results_single_value(db, query, expected): results = await db.execute(query) if expected: assert expected == results.single_value() @@ -46,9 +48,7 @@ async def test_results_single_value(app_client, query, expected): @pytest.mark.asyncio -async def test_execute_fn(app_client): - db = app_client.ds.databases["fixtures"] - +async def test_execute_fn(db): def get_1_plus_1(conn): return conn.execute("select 1 + 1").fetchall()[0][0] @@ -63,16 +63,14 @@ async def test_execute_fn(app_client): ), ) @pytest.mark.asyncio -async def test_table_exists(app_client, tables, exists): - db = app_client.ds.databases["fixtures"] +async def test_table_exists(db, tables, exists): for table in tables: actual = await db.table_exists(table) assert exists == actual @pytest.mark.asyncio -async def test_get_all_foreign_keys(app_client): - db = app_client.ds.databases["fixtures"] +async def test_get_all_foreign_keys(db): all_foreign_keys = await db.get_all_foreign_keys() assert { "incoming": [], @@ -102,8 +100,7 @@ async def test_get_all_foreign_keys(app_client): @pytest.mark.asyncio -async def test_table_names(app_client): - db = app_client.ds.databases["fixtures"] +async def test_table_names(db): table_names = await db.table_names() assert [ "simple_primary_key", @@ -139,8 +136,7 @@ async def test_table_names(app_client): @pytest.mark.asyncio -async def test_execute_write_block_true(app_client): - db = app_client.ds.databases["fixtures"] +async def test_execute_write_block_true(db): await db.execute_write( "update roadside_attractions set name = ? where pk = ?", ["Mystery!", 1], @@ -151,8 +147,7 @@ async def test_execute_write_block_true(app_client): @pytest.mark.asyncio -async def test_execute_write_block_false(app_client): - db = app_client.ds.databases["fixtures"] +async def test_execute_write_block_false(db): await db.execute_write( "update roadside_attractions set name = ? where pk = ?", ["Mystery!", 1], ) @@ -162,9 +157,7 @@ async def test_execute_write_block_false(app_client): @pytest.mark.asyncio -async def test_execute_write_fn_block_false(app_client): - db = app_client.ds.databases["fixtures"] - +async def test_execute_write_fn_block_false(db): def write_fn(conn): with conn: conn.execute("delete from roadside_attractions where pk = 1;") @@ -177,9 +170,7 @@ async def test_execute_write_fn_block_false(app_client): @pytest.mark.asyncio -async def test_execute_write_fn_block_true(app_client): - db = app_client.ds.databases["fixtures"] - +async def test_execute_write_fn_block_true(db): def write_fn(conn): with conn: conn.execute("delete from roadside_attractions where pk = 1;") @@ -191,9 +182,7 @@ async def test_execute_write_fn_block_true(app_client): @pytest.mark.asyncio -async def test_execute_write_fn_exception(app_client): - db = app_client.ds.databases["fixtures"] - +async def test_execute_write_fn_exception(db): def write_fn(conn): assert False From 012c76901af65442e90eac4b36db43455e3c922f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 07:38:46 -0700 Subject: [PATCH 0012/1871] _ prefix for many private methods of Datasette, refs #576 --- datasette/app.py | 28 ++++++++++++++-------------- datasette/database.py | 2 +- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 30eb3dba..4b9807b0 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -279,7 +279,7 @@ class Datasette: # pylint: disable=no-member pm.hook.prepare_jinja2_environment(env=self.jinja_env) - self.register_renderers() + self._register_renderers() def get_database(self, name=None): if name is None: @@ -392,7 +392,7 @@ class Datasette: } ) - def prepare_connection(self, conn, database): + def _prepare_connection(self, conn, database): conn.row_factory = sqlite3.Row conn.text_factory = lambda x: str(x, "utf-8", "replace") for name, num_args, func in self.sqlite_functions: @@ -468,12 +468,12 @@ class Datasette: url = "https://" + url[len("http://") :] return url - def register_custom_units(self): + def _register_custom_units(self): "Register any custom units defined in the metadata.json with Pint" for unit in self.metadata("custom_units") or []: ureg.define(unit) - def connected_databases(self): + def _connected_databases(self): return [ { "name": d.name, @@ -486,9 +486,9 @@ class Datasette: for d in sorted(self.databases.values(), key=lambda d: d.name) ] - def versions(self): + def _versions(self): conn = sqlite3.connect(":memory:") - self.prepare_connection(conn, ":memory:") + self._prepare_connection(conn, ":memory:") sqlite_version = conn.execute("select sqlite_version()").fetchone()[0] sqlite_extensions = {} for extension, testsql, hasversion in ( @@ -534,7 +534,7 @@ class Datasette: }, } - def plugins(self, show_all=False): + def _plugins(self, show_all=False): ps = list(get_plugins()) if not show_all: ps = [p for p in ps if p["name"] not in DEFAULT_PLUGINS] @@ -548,7 +548,7 @@ class Datasette: for p in ps ] - def threads(self): + def _threads(self): threads = list(threading.enumerate()) d = { "num_threads": len(threads), @@ -576,7 +576,7 @@ class Datasette: .get(table, {}) ) - def register_renderers(self): + def _register_renderers(self): """ Register output renderers which output data in custom formats. """ # Built-in renderers self.renderers["json"] = (json_renderer, lambda: True) @@ -724,11 +724,11 @@ class Datasette: r"/-/metadata(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi(self, "versions.json", self.versions), + JsonDataView.as_asgi(self, "versions.json", self._versions), r"/-/versions(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi(self, "plugins.json", self.plugins), + JsonDataView.as_asgi(self, "plugins.json", self._plugins), r"/-/plugins(?P(\.json)?)$", ) add_route( @@ -736,11 +736,11 @@ class Datasette: r"/-/config(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi(self, "threads.json", self.threads), + JsonDataView.as_asgi(self, "threads.json", self._threads), r"/-/threads(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi(self, "databases.json", self.connected_databases), + JsonDataView.as_asgi(self, "databases.json", self._connected_databases), r"/-/databases(?P(\.json)?)$", ) add_route( @@ -765,7 +765,7 @@ class Datasette: + renderer_regex + r")?$", ) - self.register_custom_units() + self._register_custom_units() async def setup_db(): # First time server starts up, calculate table counts for immutable databases diff --git a/datasette/database.py b/datasette/database.py index e6154caa..89bf47f4 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -104,7 +104,7 @@ class Database: conn = getattr(connections, self.name, None) if not conn: conn = self.connect() - self.ds.prepare_connection(conn, self.name) + self.ds._prepare_connection(conn, self.name) setattr(connections, self.name, conn) return fn(conn) From de1cde65a67cf9acb227b4df67230b47fdfc9a0e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 10:45:11 -0700 Subject: [PATCH 0013/1871] Moved request tests to test_internals_request.py --- tests/test_internals_request.py | 42 +++++++++++++++++++++++++++++++++ tests/test_utils.py | 40 ------------------------------- 2 files changed, 42 insertions(+), 40 deletions(-) create mode 100644 tests/test_internals_request.py diff --git a/tests/test_internals_request.py b/tests/test_internals_request.py new file mode 100644 index 00000000..5c9b254b --- /dev/null +++ b/tests/test_internals_request.py @@ -0,0 +1,42 @@ +from datasette.utils.asgi import Request +import pytest + + +@pytest.mark.asyncio +async def test_request_post_vars(): + scope = { + "http_version": "1.1", + "method": "POST", + "path": "/", + "raw_path": b"/", + "query_string": b"", + "scheme": "http", + "type": "http", + "headers": [[b"content-type", b"application/x-www-form-urlencoded"]], + } + + async def receive(): + return {"type": "http.request", "body": b"foo=bar&baz=1", "more_body": False} + + request = Request(scope, receive) + assert {"foo": "bar", "baz": "1"} == await request.post_vars() + + +def test_request_args(): + request = Request.fake("/foo?multi=1&multi=2&single=3") + assert "1" == request.args.get("multi") + assert "3" == request.args.get("single") + assert "1" == request.args["multi"] + assert "3" == request.args["single"] + assert ["1", "2"] == request.args.getlist("multi") + assert [] == request.args.getlist("missing") + assert "multi" in request.args + assert "single" in request.args + assert "missing" not in request.args + expected = ["multi", "single"] + assert expected == list(request.args.keys()) + for i, key in enumerate(request.args): + assert expected[i] == key + assert 2 == len(request.args) + with pytest.raises(KeyError): + request.args["missing"] diff --git a/tests/test_utils.py b/tests/test_utils.py index 9d6f45b0..01a10468 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -428,46 +428,6 @@ def test_check_connection_passes(): utils.check_connection(conn) -@pytest.mark.asyncio -async def test_request_post_vars(): - scope = { - "http_version": "1.1", - "method": "POST", - "path": "/", - "raw_path": b"/", - "query_string": b"", - "scheme": "http", - "type": "http", - "headers": [[b"content-type", b"application/x-www-form-urlencoded"]], - } - - async def receive(): - return {"type": "http.request", "body": b"foo=bar&baz=1", "more_body": False} - - request = Request(scope, receive) - assert {"foo": "bar", "baz": "1"} == await request.post_vars() - - -def test_request_args(): - request = Request.fake("/foo?multi=1&multi=2&single=3") - assert "1" == request.args.get("multi") - assert "3" == request.args.get("single") - assert "1" == request.args["multi"] - assert "3" == request.args["single"] - assert ["1", "2"] == request.args.getlist("multi") - assert [] == request.args.getlist("missing") - assert "multi" in request.args - assert "single" in request.args - assert "missing" not in request.args - expected = ["multi", "single"] - assert expected == list(request.args.keys()) - for i, key in enumerate(request.args): - assert expected[i] == key - assert 2 == len(request.args) - with pytest.raises(KeyError): - request.args["missing"] - - def test_call_with_supported_arguments(): def foo(a, b): return "{}+{}".format(a, b) From 5ae14c9f20e0dc59c588f0e93eedfefe0f0f3e8e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 10:54:22 -0700 Subject: [PATCH 0014/1871] Improved documentation for RequestParameters class --- docs/internals.rst | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/docs/internals.rst b/docs/internals.rst index 886cb7e7..ca725cc4 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -278,16 +278,27 @@ The object also has one awaitable method: The RequestParameters class --------------------------- -This class, returned by ``request.args``, is a dictionary-like object. +``request.args`` is a ``RequestParameters`` object - a dictionary-like object which provides access to querystring parameters that may have multiple values. -Consider the querystring ``?foo=1&foo=2``. This will produce a ``request.args`` that looks like this:: +Consider the querystring ``?foo=1&foo=2&bar=3`` - with two values for ``foo`` and one value for ``bar``. - RequestParameters({"foo": ["1", "2"]}) +``request.args[key]`` - string + Returns the first value for that key, or raises a ``KeyError`` if the key is missing. For the above example ``request.args["foo"]`` would return ``"1"``. -``request.args["foo"]`` returns the first value, ``"1"`` - or raises ``KeyError`` if that key is missing. +``request.args.get(key)`` - string or None + Returns the first value for that key, or ``None`` if the key is missing. Pass a second argument to specify a different default, e.g. ``q = request.args.get("q", "")``. -``request.args.get("foo")`` returns ``"1"`` - or ``None`` if the key is missing. A second argument can be used to specify a different default value. +``request.args.getlist(key)`` - list of strings + Returns the list of strings for that key. ``request.args.getlist("foo")`` would return ``["1", "2"]`` in the above example. ``request.args.getlist("bar")`` would return ``["3"]``. If the key is missing an empty list will be returned. -``request.args.getlist("foo")`` returns the full list, ``["1", "2"]``. If you call it on a missing key it will return ``[]``. +``request.args.keys()`` - list of strings + Returns the list of available keys - for the example this would be ``["foo", "bar"]``. -You can use ``if key in request.args`` to check if a key is present. ``for key in request.args`` will iterate through the keys, or you can use ``request.args.keys()`` to get all of the keys. +``key in request.args`` - True or False + You can use ``if key in request.args`` to check if a key is present. + +``for key in request.args`` - iterator + This lets you loop through every available key. + +``len(request.args)`` - integer + Returns the number of keys. From 3c5afaeb231c94a55309f1c0187ff6dedd5b5fb8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 11:06:13 -0700 Subject: [PATCH 0015/1871] Re-arranged internals documentation Request is more useful to most people than Database. --- docs/internals.rst | 136 ++++++++++++++++++++++----------------------- 1 file changed, 68 insertions(+), 68 deletions(-) diff --git a/docs/internals.rst b/docs/internals.rst index ca725cc4..4db710c0 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -3,7 +3,74 @@ Internals for plugins ===================== -Many :ref:`plugin_hooks` are passed objects that provide access to internal Datasette functionality. The interface to these objects should not be considered stable (at least until Datasette 1.0) with the exception of methods that are documented on this page. +Many :ref:`plugin_hooks` are passed objects that provide access to internal Datasette functionality. The interface to these objects should not be considered stable with the exception of methods that are documented here. + +.. _internals_request: + +Request object +~~~~~~~~~~~~~~ + +The request object is passed to various plugin hooks. It represents an incoming HTTP request. It has the following properties: + +``.scope`` - dictionary + The ASGI scope that was used to construct this request, described in the `ASGI HTTP connection scope `__ specification. + +``.method`` - string + The HTTP method for this request, usually ``GET`` or ``POST``. + +``.url`` - string + The full URL for this request, e.g. ``https://latest.datasette.io/fixtures``. + +``.scheme`` - string + The request scheme - usually ``https`` or ``http``. + +``.headers`` - dictionary (str -> str) + A dictionary of incoming HTTP request headers. + +``.host`` - string + The host header from the incoming request, e.g. ``latest.datasette.io`` or ``localhost``. + +``.path`` - string + The path of the request, e.g. ``/fixtures``. + +``.query_string`` - string + The querystring component of the request, without the ``?`` - e.g. ``name__contains=sam&age__gt=10``. + +``.args`` - RequestParameters + An object representing the parsed querystring parameters, see below. + +The object also has one awaitable method: + +``await request.post_vars()`` - dictionary + Returns a dictionary of form variables that were submitted in the request body via ``POST``. + +The RequestParameters class +--------------------------- + +``request.args`` is a ``RequestParameters`` object - a dictionary-like object which provides access to querystring parameters that may have multiple values. + +Consider the querystring ``?foo=1&foo=2&bar=3`` - with two values for ``foo`` and one value for ``bar``. + +``request.args[key]`` - string + Returns the first value for that key, or raises a ``KeyError`` if the key is missing. For the above example ``request.args["foo"]`` would return ``"1"``. + +``request.args.get(key)`` - string or None + Returns the first value for that key, or ``None`` if the key is missing. Pass a second argument to specify a different default, e.g. ``q = request.args.get("q", "")``. + +``request.args.getlist(key)`` - list of strings + Returns the list of strings for that key. ``request.args.getlist("foo")`` would return ``["1", "2"]`` in the above example. ``request.args.getlist("bar")`` would return ``["3"]``. If the key is missing an empty list will be returned. + +``request.args.keys()`` - list of strings + Returns the list of available keys - for the example this would be ``["foo", "bar"]``. + +``key in request.args`` - True or False + You can use ``if key in request.args`` to check if a key is present. + +``for key in request.args`` - iterator + This lets you loop through every available key. + +``len(request.args)`` - integer + Returns the number of keys. .. _internals_datasette: @@ -235,70 +302,3 @@ Here's an example of ``block=True`` in action: num_rows_left = await database.execute_write_fn(my_action, block=True) except Exception as e: print("An error occurred:", e) - -.. _internals_request: - -Request object -~~~~~~~~~~~~~~ - -The request object is passed to various plugin hooks. It represents an incoming HTTP request. It has the following properties: - -``.scope`` - dictionary - The ASGI scope that was used to construct this request, described in the `ASGI HTTP connection scope `__ specification. - -``.method`` - string - The HTTP method for this request, usually ``GET`` or ``POST``. - -``.url`` - string - The full URL for this request, e.g. ``https://latest.datasette.io/fixtures``. - -``.scheme`` - string - The request scheme - usually ``https`` or ``http``. - -``.headers`` - dictionary (str -> str) - A dictionary of incoming HTTP request headers. - -``.host`` - string - The host header from the incoming request, e.g. ``latest.datasette.io`` or ``localhost``. - -``.path`` - string - The path of the request, e.g. ``/fixtures``. - -``.query_string`` - string - The querystring component of the request, without the ``?`` - e.g. ``name__contains=sam&age__gt=10``. - -``.args`` - RequestParameters - An object representing the parsed querystring parameters, see below. - -The object also has one awaitable method: - -``await request.post_vars()`` - dictionary - Returns a dictionary of form variables that were submitted in the request body via ``POST``. - -The RequestParameters class ---------------------------- - -``request.args`` is a ``RequestParameters`` object - a dictionary-like object which provides access to querystring parameters that may have multiple values. - -Consider the querystring ``?foo=1&foo=2&bar=3`` - with two values for ``foo`` and one value for ``bar``. - -``request.args[key]`` - string - Returns the first value for that key, or raises a ``KeyError`` if the key is missing. For the above example ``request.args["foo"]`` would return ``"1"``. - -``request.args.get(key)`` - string or None - Returns the first value for that key, or ``None`` if the key is missing. Pass a second argument to specify a different default, e.g. ``q = request.args.get("q", "")``. - -``request.args.getlist(key)`` - list of strings - Returns the list of strings for that key. ``request.args.getlist("foo")`` would return ``["1", "2"]`` in the above example. ``request.args.getlist("bar")`` would return ``["3"]``. If the key is missing an empty list will be returned. - -``request.args.keys()`` - list of strings - Returns the list of available keys - for the example this would be ``["foo", "bar"]``. - -``key in request.args`` - True or False - You can use ``if key in request.args`` to check if a key is present. - -``for key in request.args`` - iterator - This lets you loop through every available key. - -``len(request.args)`` - integer - Returns the number of keys. From 4d798ca0e3df246bd47f0600cc7b5118ba33ac16 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 11:17:20 -0700 Subject: [PATCH 0016/1871] Added test for db.mtime_ns --- datasette/database.py | 4 +++- tests/test_internals_database.py | 12 +++++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 89bf47f4..ed119542 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -33,7 +33,7 @@ class Database: self.cached_table_counts = None self._write_thread = None self._write_queue = None - if not self.is_mutable: + if not self.is_mutable and not self.is_memory: p = Path(path) self.hash = inspect_hash(p) self.cached_size = p.stat().st_size @@ -197,6 +197,8 @@ class Database: @property def mtime_ns(self): + if self.is_memory: + return None return Path(self.path).stat().st_mtime_ns @property diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index fde7ad2c..5d5520dd 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -1,7 +1,7 @@ """ Tests for the datasette.database.Database class """ -from datasette.database import Results, MultipleValues +from datasette.database import Database, Results, MultipleValues from datasette.utils import sqlite3 from .fixtures import app_client import pytest @@ -188,3 +188,13 @@ async def test_execute_write_fn_exception(db): with pytest.raises(AssertionError): await db.execute_write_fn(write_fn, block=True) + + +@pytest.mark.asyncio +async def test_mtime_ns(db): + assert isinstance(db.mtime_ns, int) + + +def test_mtime_ns_is_none_for_memory(app_client): + memory_db = Database(app_client.ds, is_memory=True) + assert None is memory_db.mtime_ns From 124acf34a678f0af438dc31a2dceebf28612f249 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 11:39:46 -0700 Subject: [PATCH 0017/1871] Removed db.get_outbound_foreign_keys method It duplicated the functionality of db.foreign_keys_for_table. --- datasette/database.py | 5 ----- datasette/utils/__init__.py | 2 +- datasette/views/table.py | 4 +--- 3 files changed, 2 insertions(+), 9 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index ed119542..ab3c82c9 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -319,11 +319,6 @@ class Database: async def get_all_foreign_keys(self): return await self.execute_fn(get_all_foreign_keys) - async def get_outbound_foreign_keys(self, table): - return await self.execute_fn( - lambda conn: get_outbound_foreign_keys(conn, table) - ) - async def get_table_definition(self, table, type_="table"): table_definition_rows = list( await self.execute( diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index bf965413..2eb31502 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -428,7 +428,7 @@ def get_outbound_foreign_keys(conn, table): if info is not None: id, seq, table_name, from_, to_, on_update, on_delete, match = info fks.append( - {"other_table": table_name, "column": from_, "other_column": to_} + {"column": from_, "other_table": table_name, "other_column": to_} ) return fks diff --git a/datasette/views/table.py b/datasette/views/table.py index a629346f..2e9515c3 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -336,9 +336,7 @@ class TableView(RowTableShared): through_table = through_data["table"] other_column = through_data["column"] value = through_data["value"] - outgoing_foreign_keys = await db.get_outbound_foreign_keys( - through_table - ) + outgoing_foreign_keys = await db.foreign_keys_for_table(through_table) try: fk_to_us = [ fk for fk in outgoing_foreign_keys if fk["other_table"] == table From c4fbe50676929b512940aab90de590a78ac5d7fc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 11:40:30 -0700 Subject: [PATCH 0018/1871] Documentation for Database introspection methods, closes #684 Refs #576 --- docs/internals.rst | 68 ++++++++++++++++++++++++++++++++++++++++++++++ docs/metadata.rst | 2 ++ 2 files changed, 70 insertions(+) diff --git a/docs/internals.rst b/docs/internals.rst index 4db710c0..e9ba9567 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -302,3 +302,71 @@ Here's an example of ``block=True`` in action: num_rows_left = await database.execute_write_fn(my_action, block=True) except Exception as e: print("An error occurred:", e) + +Database introspection +---------------------- + +The ``Database`` class also provides properties and methods for introspecting the database. + +``db.name`` - string + The name of the database - usually the filename without the ``.db`` prefix. + +``db.size`` - integer + The size of the database file in bytes. 0 for ``:memory:`` databases. + +``db.mtime_ns`` - integer or None + The last modification time of the database file in nanoseconds since the epoch. ``None`` for ``:memory:`` databases. + +``await db.table_exists(table)`` - boolean + Check if a table called ``table`` exists. + +``await db.table_names()`` - list of strings + List of names of tables in the database. + +``await db.view_names()`` - list of strings + List of names of views in tha database. + +``await db.table_columns(table)`` - list of strings + Names of columns in a specific table. + +``await db.primary_keys(table)`` - list of strings + Names of the columns that are part of the primary key for this table. + +``await db.fts_table(table)`` - string or None + The name of the FTS table associated with this table, if one exists. + +``await db.label_column_for_table(table)`` - string or None + The label column that is associated with this table - either automatically detected or using the ``"label_column"`` key from :ref:`metadata`, see :ref:`label_columns`. + +``await db.foreign_keys_for_table(table)`` - list of dictionaries + Details of columns in this table which are foreign keys to other tables. A list of dictionaries where each dictionary is shaped like this: ``{"column": string, "other_table": string, "other_column": string}``. + +``await db.hidden_table_names()`` - list of strings + List of tables which Datasette "hides" by default - usually these are tables associated with SQLite's full-text search feature, the SpatiaLite extension or tables hidden using the :ref:`metadata_hiding_tables` feature. + +``await db.get_table_definition(table)`` - string + Returns the SQL definition for the table - the ``CREATE TABLE`` statement and any associated ``CREATE INDEX`` statements. + +``await db.get_view_definition(view)`` - string + Returns the SQL definition of the named view. + +``await db.get_all_foreign_keys()`` - dictionary + Dictionary representing both incoming and outgoing foreign keys for this table. It has two keys, ``"incoming"`` and ``"outgoing"``, each of which is a list of dictionaries with keys ``"column"``, ``"other_table"`` and ``"other_column"``. For example: + + .. code-block:: json + + { + "incoming": [], + "outgoing": [ + { + "other_table": "attraction_characteristic", + "column": "characteristic_id", + "other_column": "pk", + }, + { + "other_table": "roadside_attractions", + "column": "attraction_id", + "other_column": "pk", + } + ] + } diff --git a/docs/metadata.rst b/docs/metadata.rst index 024af01e..471a52e3 100644 --- a/docs/metadata.rst +++ b/docs/metadata.rst @@ -256,6 +256,8 @@ used for the link label with the ``label_column`` property: } } +.. _metadata_hiding_tables: + Hiding tables ------------- From 060a56735c1d3bde0a4c7674e82b5f45bef34dee Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 13:24:00 -0700 Subject: [PATCH 0019/1871] actor_from_request and permission_allowed hookspecs, refs #699 --- datasette/hookspecs.py | 10 ++++++++++ docs/plugins.rst | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index c2fc0126..65c1c859 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -58,3 +58,13 @@ def register_output_renderer(datasette): @hookspec def register_facet_classes(): "Register Facet subclasses" + + +@hookspec +def actor_from_request(datasette, request): + "Return an actor dictionary based on the incoming request" + + +@hookspec +def permission_allowed(actor, action, resource_type, resource_identifier): + "Check if actor is allowed to perfom this action - return True, False or None" diff --git a/docs/plugins.rst b/docs/plugins.rst index f08f1217..09e8f5e3 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -941,3 +941,40 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att return wrap_with_databases_header Examples: `datasette-auth-github `_, `datasette-search-all `_, `datasette-media `_ + +.. _plugin_actor_from_request: + +actor_from_request(datasette, request) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + +``request`` - object + The current HTTP :ref:`internals_request`. + +This is part of Datasette's authentication and permissions system. The function should attempt to authenticate an actor (either a user or an API actor of some sort) based on information in the request. + +If it cannot authenticate an actor, it should return ``None``. Otherwise it should return a dictionary representing that actor. + +.. _plugin_permission_allowed: + +permission_allowed(datasette, actor, action, resource_type, resource_identifier) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + +``actor`` - dictionary + The current actor, as decided by :ref:`plugin_actor_from_request`. + +``action`` - string + The action to be performed, e.g. ``"edit-table"``. + +``resource_type`` - string + The type of resource being acted on, e.g. ``"table"``. + +``resource`` - string + An identifier for the individual resource, e.g. the name of the table. + +Called to check that an actor has permission to perform an action on a resource. Can return ``True`` if the action is allowed, ``False`` if the action is not allowed or ``None`` if the plugin does not have an opinion one way or the other. From 461c82838d65dd9f61c5be725343a82c61b5c3f3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 15:06:33 -0700 Subject: [PATCH 0020/1871] Implemented actor_from_request with tests, refs #699 Also added datasette argument to permission_allowed hook --- datasette/app.py | 13 ++++++++++++- datasette/hookspecs.py | 2 +- docs/plugins.rst | 23 +++++++++++++++++++++++ tests/plugins/my_plugin.py | 8 ++++++++ tests/plugins/my_plugin_2.py | 12 ++++++++++++ tests/test_plugins.py | 24 ++++++++++++++++++++++++ 6 files changed, 80 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 4b9807b0..3f2876ec 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -798,7 +798,18 @@ class DatasetteRouter(AsgiRouter): and scope.get("scheme") != "https" ): scope = dict(scope, scheme="https") - return await super().route_path(scope, receive, send, path) + # Handle authentication + actor = None + for actor in pm.hook.actor_from_request( + datasette=self.ds, request=Request(scope, receive) + ): + if callable(actor): + actor = actor() + if asyncio.iscoroutine(actor): + actor = await actor + if actor: + break + return await super().route_path(dict(scope, actor=actor), receive, send, path) async def handle_404(self, scope, receive, send, exception=None): # If URL has a trailing slash, redirect to URL without it diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 65c1c859..71d06661 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -66,5 +66,5 @@ def actor_from_request(datasette, request): @hookspec -def permission_allowed(actor, action, resource_type, resource_identifier): +def permission_allowed(datasette, actor, action, resource_type, resource_identifier): "Check if actor is allowed to perfom this action - return True, False or None" diff --git a/docs/plugins.rst b/docs/plugins.rst index 09e8f5e3..fb2843f4 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -957,6 +957,29 @@ This is part of Datasette's authentication and permissions system. The function If it cannot authenticate an actor, it should return ``None``. Otherwise it should return a dictionary representing that actor. +Instead of returning a dictionary, this function can return an awaitable function which itself returns either ``None`` or a dictionary. This is useful for authentication functions that need to make a database query - for example: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def actor_from_request(datasette, request): + async def inner(): + token = request.args.get("_token") + if not token: + return None + # Look up ?_token=xxx in sessions table + result = await datasette.get_database().execute( + "select count(*) from sessions where token = ?", [token] + ) + if result.first()[0]: + return {"token": token} + else: + return None + + return inner + .. _plugin_permission_allowed: permission_allowed(datasette, actor, action, resource_type, resource_identifier) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 10d7e7e6..305cb3b7 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -126,3 +126,11 @@ class DummyFacet(Facet): facet_results = {} facets_timed_out = [] return facet_results, facets_timed_out + + +@hookimpl +def actor_from_request(datasette, request): + if request.args.get("_bot"): + return {"id": "bot"} + else: + return None diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index c9e7c78f..0a5cbba5 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -95,3 +95,15 @@ def asgi_wrapper(datasette): return add_x_databases_header return wrap_with_databases_header + + +@hookimpl +def actor_from_request(datasette, request): + async def inner(): + if request.args.get("_bot2"): + result = await datasette.get_database().execute("select 1 + 1") + return {"id": "bot2", "1+1": result.first()[0]} + else: + return None + + return inner diff --git a/tests/test_plugins.py b/tests/test_plugins.py index a34328a9..3ad26986 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -503,3 +503,27 @@ def test_register_facet_classes(app_client): "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet=pk3", }, ] == data["suggested_facets"] + + +def test_actor_from_request(app_client): + app_client.get("/") + # Should have no actor + assert None == app_client.ds._last_request.scope["actor"] + app_client.get("/?_bot=1") + # Should have bot actor + assert {"id": "bot"} == app_client.ds._last_request.scope["actor"] + + +def test_actor_from_request_async(app_client): + app_client.get("/") + # Should have no actor + assert None == app_client.ds._last_request.scope["actor"] + app_client.get("/?_bot2=1") + # Should have bot2 actor + assert {"id": "bot2", "1+1": 2} == app_client.ds._last_request.scope["actor"] + + +@pytest.mark.xfail +def test_permission_allowed(app_client): + # TODO + assert False From 9315bacf6f63e20781d21d170e55a55b2c54fcdd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 15:24:43 -0700 Subject: [PATCH 0021/1871] Implemented datasette.permission_allowed(), refs #699 --- datasette/app.py | 19 +++++++++++++++++++ docs/internals.rst | 19 +++++++++++++++++++ tests/plugins/my_plugin.py | 8 ++++++++ tests/plugins/my_plugin_2.py | 13 +++++++++++++ tests/test_plugins.py | 20 ++++++++++++++++---- 5 files changed, 75 insertions(+), 4 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 3f2876ec..773dee31 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -406,6 +406,25 @@ class Datasette: # pylint: disable=no-member pm.hook.prepare_connection(conn=conn, database=database, datasette=self) + async def permission_allowed( + self, actor, action, resource_type=None, resource_identifier=None, default=False + ): + "Check permissions using the permissions_allowed plugin hook" + for check in pm.hook.permission_allowed( + datasette=self, + actor=actor, + action=action, + resource_type=resource_type, + resource_identifier=resource_identifier, + ): + if callable(check): + check = check() + if asyncio.iscoroutine(check): + check = await check + if check is not None: + return check + return default + async def execute( self, db_name, diff --git a/docs/internals.rst b/docs/internals.rst index e9ba9567..2ba70722 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -111,6 +111,25 @@ This method lets you read plugin configuration values that were set in ``metadat Renders a `Jinja template `__ using Datasette's preconfigured instance of Jinja and returns the resulting string. The template will have access to Datasette's default template functions and any functions that have been made available by other plugins. +await .permission_allowed(actor, action, resource_type=None, resource_identifier=None, default=False) +----------------------------------------------------------------------------------------------------- + +``actor`` - dictionary + The authenticated actor. This is usually ``request.scope.get("actor")``. + +``action`` - string + The name of the action that is being permission checked. + +``resource_type`` - string, optional + The type of resource being checked, e.g. ``"table"``. + +``resource_identifier`` - string, optional + The resource identifier, e.g. the name of the table. + +Check if the given actor has permission to perform the given action on the given resource. This uses plugins that implement the :ref:`plugin_permission_allowed` plugin hook to decide if the action is allowed or not. + +If none of the plugins express an opinion, the return value will be the ``default`` argument. This is deny, but you can pass ``default=True`` to default allow instead. + .. _datasette_get_database: .get_database(name) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 305cb3b7..46893710 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -134,3 +134,11 @@ def actor_from_request(datasette, request): return {"id": "bot"} else: return None + + +@hookimpl +def permission_allowed(actor, action): + if action == "this_is_allowed": + return True + elif action == "this_is_denied": + return False diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index 0a5cbba5..039112f4 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -107,3 +107,16 @@ def actor_from_request(datasette, request): return None return inner + + +@hookimpl +def permission_allowed(datasette, actor, action): + # Testing asyncio version of permission_allowed + async def inner(): + assert 2 == (await datasette.get_database().execute("select 1 + 1")).first()[0] + if action == "this_is_allowed_async": + return True + elif action == "this_is_denied_async": + return False + + return inner diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 3ad26986..e123b7a0 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -523,7 +523,19 @@ def test_actor_from_request_async(app_client): assert {"id": "bot2", "1+1": 2} == app_client.ds._last_request.scope["actor"] -@pytest.mark.xfail -def test_permission_allowed(app_client): - # TODO - assert False +@pytest.mark.asyncio +@pytest.mark.parametrize( + "action,expected", + [ + ("this_is_allowed", True), + ("this_is_denied", False), + ("this_is_allowed_async", True), + ("this_is_denied_async", False), + ("no_match", None), + ], +) +async def test_permission_allowed(app_client, action, expected): + actual = await app_client.ds.permission_allowed( + {"id": "actor"}, action, default=None + ) + assert expected == actual From 1fc6ceefb9eddd29844e7bfe3e06a83df6ce3dc4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 30 May 2020 18:51:00 -0700 Subject: [PATCH 0022/1871] Added /-/actor.json - refs #699 Also added JSON highlighting to introspection documentation. --- datasette/app.py | 7 ++++++ datasette/views/special.py | 8 +++++-- docs/introspection.rst | 44 ++++++++++++++++++++++++++++++++------ tests/test_plugins.py | 7 ++++++ 4 files changed, 57 insertions(+), 9 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 773dee31..37b4ed3d 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -586,6 +586,9 @@ class Datasette: ) return d + def _actor(self, request): + return {"actor": request.scope.get("actor", None)} + def table_metadata(self, database, table): "Fetch table-specific metadata." return ( @@ -762,6 +765,10 @@ class Datasette: JsonDataView.as_asgi(self, "databases.json", self._connected_databases), r"/-/databases(?P(\.json)?)$", ) + add_route( + JsonDataView.as_asgi(self, "actor.json", self._actor, needs_request=True), + r"/-/actor(?P(\.json)?)$", + ) add_route( PatternPortfolioView.as_asgi(self), r"/-/patterns$", ) diff --git a/datasette/views/special.py b/datasette/views/special.py index dfe5ea8c..840473a7 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -6,13 +6,17 @@ from .base import BaseView class JsonDataView(BaseView): name = "json_data" - def __init__(self, datasette, filename, data_callback): + def __init__(self, datasette, filename, data_callback, needs_request=False): self.ds = datasette self.filename = filename self.data_callback = data_callback + self.needs_request = needs_request async def get(self, request, as_format): - data = self.data_callback() + if self.needs_request: + data = self.data_callback(request) + else: + data = self.data_callback() if as_format: headers = {} if self.ds.cors: diff --git a/docs/introspection.rst b/docs/introspection.rst index 3cd4a40f..e5d08dbc 100644 --- a/docs/introspection.rst +++ b/docs/introspection.rst @@ -10,7 +10,9 @@ Each of these pages can be viewed in your browser. Add ``.json`` to the URL to g /-/metadata ----------- -Shows the contents of the ``metadata.json`` file that was passed to ``datasette serve``, if any. `Metadata example `_:: +Shows the contents of the ``metadata.json`` file that was passed to ``datasette serve``, if any. `Metadata example `_: + +.. code-block:: json { "license": "CC Attribution 4.0 License", @@ -18,7 +20,9 @@ Shows the contents of the ``metadata.json`` file that was passed to ``datasette "source": "fivethirtyeight/data on GitHub", "source_url": "https://github.com/fivethirtyeight/data", "title": "Five Thirty Eight", - "databases": {...} + "databases": { + + } } .. _JsonDataView_versions: @@ -26,7 +30,9 @@ Shows the contents of the ``metadata.json`` file that was passed to ``datasette /-/versions ----------- -Shows the version of Datasette, Python and SQLite. `Versions example `_:: +Shows the version of Datasette, Python and SQLite. `Versions example `_: + +.. code-block:: json { "datasette": { @@ -63,7 +69,9 @@ Shows the version of Datasette, Python and SQLite. `Versions example `_:: +Shows a list of currently installed plugins and their versions. `Plugins example `_: + +.. code-block:: json [ { @@ -79,7 +87,9 @@ Shows a list of currently installed plugins and their versions. `Plugins example /-/config --------- -Shows the :ref:`config` options for this instance of Datasette. `Config example `_:: +Shows the :ref:`config` options for this instance of Datasette. `Config example `_: + +.. code-block:: json { "default_facet_size": 30, @@ -95,7 +105,9 @@ Shows the :ref:`config` options for this instance of Datasette. `Config example /-/databases ------------ -Shows currently attached databases. `Databases example `_:: +Shows currently attached databases. `Databases example `_: + +.. code-block:: json [ { @@ -113,7 +125,9 @@ Shows currently attached databases. `Databases example `_:: +Shows details of threads and ``asyncio`` tasks. `Threads example `_: + +.. code-block:: json { "num_threads": 2, @@ -136,3 +150,19 @@ Shows details of threads and ``asyncio`` tasks. `Threads example wait_for=()]>>" ] } + +.. _JsonDataView_actor: + +/-/actor +-------- + +Shows the currently authenticated actor. Useful for debugging Datasette authentication plugins. + +.. code-block:: json + + { + "actor": { + "id": 1, + "username": "some-user" + } + } diff --git a/tests/test_plugins.py b/tests/test_plugins.py index e123b7a0..7a3fb49a 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -539,3 +539,10 @@ async def test_permission_allowed(app_client, action, expected): {"id": "actor"}, action, default=None ) assert expected == actual + + +def test_actor_json(app_client): + assert {"actor": None} == app_client.get("/-/actor.json").json + assert {"actor": {"id": "bot2", "1+1": 2}} == app_client.get( + "/-/actor.json/?_bot2=1" + ).json From fa27e44fe09f57dcb87157be97f15b6add7f14ad Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 31 May 2020 15:42:08 -0700 Subject: [PATCH 0023/1871] datasette.sign() and datasette.unsign() methods, refs #785 --- datasette/app.py | 9 +++++++++ datasette/cli.py | 7 +++++++ docs/datasette-serve-help.txt | 3 +++ docs/internals.rst | 28 ++++++++++++++++++++++++++++ setup.py | 1 + tests/test_cli.py | 1 + tests/test_internals_datasette.py | 12 ++++++++++++ 7 files changed, 61 insertions(+) diff --git a/datasette/app.py b/datasette/app.py index 37b4ed3d..5e3d3af5 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -14,6 +14,7 @@ from pathlib import Path import click from markupsafe import Markup +from itsdangerous import URLSafeSerializer import jinja2 from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader, escape from jinja2.environment import Template @@ -163,12 +164,14 @@ class Datasette: static_mounts=None, memory=False, config=None, + secret=None, version_note=None, config_dir=None, ): assert config_dir is None or isinstance( config_dir, Path ), "config_dir= should be a pathlib.Path" + self._secret = secret or os.urandom(32).hex() self.files = tuple(files) + tuple(immutables or []) if config_dir: self.files += tuple([str(p) for p in config_dir.glob("*.db")]) @@ -281,6 +284,12 @@ class Datasette: self._register_renderers() + def sign(self, value, namespace="default"): + return URLSafeSerializer(self._secret, namespace).dumps(value) + + def unsign(self, signed, namespace="default"): + return URLSafeSerializer(self._secret, namespace).loads(signed) + def get_database(self, name=None): if name is None: return next(iter(self.databases.values())) diff --git a/datasette/cli.py b/datasette/cli.py index c59fb6e0..dba3a612 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -299,6 +299,11 @@ def package( help="Set config option using configname:value datasette.readthedocs.io/en/latest/config.html", multiple=True, ) +@click.option( + "--secret", + help="Secret used for signing secure values, such as signed cookies", + envvar="DATASETTE_SECRET", +) @click.option("--version-note", help="Additional note to show on /-/versions") @click.option("--help-config", is_flag=True, help="Show available config options") def serve( @@ -317,6 +322,7 @@ def serve( static, memory, config, + secret, version_note, help_config, return_instance=False, @@ -362,6 +368,7 @@ def serve( static_mounts=static, config=dict(config), memory=memory, + secret=secret, version_note=version_note, ) diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index 5265c294..ab27714a 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -29,6 +29,9 @@ Options: --config CONFIG Set config option using configname:value datasette.readthedocs.io/en/latest/config.html + --secret TEXT Secret used for signing secure values, such as signed + cookies + --version-note TEXT Additional note to show on /-/versions --help-config Show available config options --help Show this message and exit. diff --git a/docs/internals.rst b/docs/internals.rst index 2ba70722..68a35312 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -183,6 +183,34 @@ Use ``is_memory`` if the connection is to an in-memory SQLite database. This removes a database that has been previously added. ``name=`` is the unique name of that database, also used in the URL for it. +.. _datasette_sign: + +.sign(value, namespace="default") +--------------------------------- + +``value`` - any serializable type + The value to be signed. + +``namespace`` - string, optional + An alternative namespace, see the `itsdangerous salt documentation `__. + +Utility method for signing values, such that you can safely pass data to and from an untrusted environment. This is a wrapper around the `itsdangerous `__ library. + +This method returns a signed string, which can be decoded and verified using :ref:`datasette_unsign`. + +.. _datasette_unsign: + +.unsign(value, namespace="default") +----------------------------------- + +``signed`` - any serializable type + The signed string that was created using :ref:`datasette_sign`. + +``namespace`` - string, optional + The alternative namespace, if one was used. + +Returns the original, decoded object that was passed to :ref:`datasette_sign`. If the signature is not valid this raises a ``itsdangerous.BadSignature`` exception. + .. _internals_database: Database class diff --git a/setup.py b/setup.py index d9c70de5..93628266 100644 --- a/setup.py +++ b/setup.py @@ -55,6 +55,7 @@ setup( "janus>=0.4,<0.6", "PyYAML~=5.3", "mergedeep>=1.1.1,<1.4.0", + "itsdangerous~=1.1", ], entry_points=""" [console_scripts] diff --git a/tests/test_cli.py b/tests/test_cli.py index ac5746c6..f52f17b4 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -75,6 +75,7 @@ def test_metadata_yaml(): static=[], memory=False, config=[], + secret=None, version_note=None, help_config=False, return_instance=True, diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py index 4993250d..0be0b932 100644 --- a/tests/test_internals_datasette.py +++ b/tests/test_internals_datasette.py @@ -1,6 +1,7 @@ """ Tests for the datasette.app.Datasette class """ +from itsdangerous import BadSignature from .fixtures import app_client import pytest @@ -21,3 +22,14 @@ def test_get_database_no_argument(datasette): # Returns the first available database: db = datasette.get_database() assert "fixtures" == db.name + + +@pytest.mark.parametrize("value", ["hello", 123, {"key": "value"}]) +@pytest.mark.parametrize("namespace", [None, "two"]) +def test_sign_unsign(datasette, value, namespace): + extra_args = [namespace] if namespace else [] + signed = datasette.sign(value, *extra_args) + assert value != signed + assert value == datasette.unsign(signed, *extra_args) + with pytest.raises(BadSignature): + datasette.unsign(signed[:-1] + ("!" if signed[-1] != "!" else ":")) From 7690d5ba40fda37ba4ba38ad56fe06c3aed071de Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 31 May 2020 17:18:06 -0700 Subject: [PATCH 0024/1871] Docs for --secret/DATASETTE_SECRET - closes #785 --- docs/config.rst | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/docs/config.rst b/docs/config.rst index d8c2f550..da93e40a 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -288,3 +288,30 @@ For example, if you are sending traffic from ``https://www.example.com/tools/dat You can do that like so:: datasette mydatabase.db --config base_url:/tools/datasette/ + +.. _config_secret: + +Configuring the secret +---------------------- + +Datasette uses a secret string to sign secure values such as cookies. + +If you do not provide a secret, Datasette will create one when it starts up. This secret will reset every time the Datasette server restarts though, so things like authentication cookies will not stay valid between restarts. + +You can pass a secret to Datasette in two ways: with the ``--secret`` command-line option or by setting a ``DATASETTE_SECRET`` environment variable. + +:: + + $ datasette mydb.db --secret=SECRET_VALUE_HERE + +Or:: + + $ export DATASETTE_SECRET=SECRET_VALUE_HERE + $ datasette mydb.db + +One way to generate a secure random secret is to use Python like this:: + + $ python3 -c 'import os; print(os.urandom(32).hex())' + cdb19e94283a20f9d42cca50c5a4871c0aa07392db308755d60a1a5b9bb0fa52 + +Plugin authors make use of this signing mechanism in their plugins using :ref:`datasette_sign` and :ref:`datasette_unsign`. From 9f3d4aba31baf1e2de1910a40bc9663ef53b94e9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 31 May 2020 18:03:17 -0700 Subject: [PATCH 0025/1871] --root option and /-/auth-token view, refs #784 --- datasette/app.py | 6 +++++- datasette/cli.py | 8 ++++++++ datasette/plugins.py | 1 + datasette/views/special.py | 32 +++++++++++++++++++++++++++++++- docs/datasette-serve-help.txt | 1 + tests/fixtures.py | 19 +++++++++++++++---- tests/test_auth.py | 25 +++++++++++++++++++++++++ tests/test_cli.py | 1 + tests/test_docs.py | 4 ++-- 9 files changed, 89 insertions(+), 8 deletions(-) create mode 100644 tests/test_auth.py diff --git a/datasette/app.py b/datasette/app.py index 5e3d3af5..6b39ce12 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -24,7 +24,7 @@ import uvicorn from .views.base import DatasetteError, ureg, AsgiRouter from .views.database import DatabaseDownload, DatabaseView from .views.index import IndexView -from .views.special import JsonDataView, PatternPortfolioView +from .views.special import JsonDataView, PatternPortfolioView, AuthTokenView from .views.table import RowView, TableView from .renderer import json_renderer from .database import Database, QueryInterrupted @@ -283,6 +283,7 @@ class Datasette: pm.hook.prepare_jinja2_environment(env=self.jinja_env) self._register_renderers() + self._root_token = os.urandom(32).hex() def sign(self, value, namespace="default"): return URLSafeSerializer(self._secret, namespace).dumps(value) @@ -778,6 +779,9 @@ class Datasette: JsonDataView.as_asgi(self, "actor.json", self._actor, needs_request=True), r"/-/actor(?P(\.json)?)$", ) + add_route( + AuthTokenView.as_asgi(self), r"/-/auth-token$", + ) add_route( PatternPortfolioView.as_asgi(self), r"/-/patterns$", ) diff --git a/datasette/cli.py b/datasette/cli.py index dba3a612..23f9e36b 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -304,6 +304,11 @@ def package( help="Secret used for signing secure values, such as signed cookies", envvar="DATASETTE_SECRET", ) +@click.option( + "--root", + help="Output URL that sets a cookie authenticating the root user", + is_flag=True, +) @click.option("--version-note", help="Additional note to show on /-/versions") @click.option("--help-config", is_flag=True, help="Show available config options") def serve( @@ -323,6 +328,7 @@ def serve( memory, config, secret, + root, version_note, help_config, return_instance=False, @@ -387,6 +393,8 @@ def serve( asyncio.get_event_loop().run_until_complete(check_databases(ds)) # Start the server + if root: + print("http://{}:{}/-/auth-token?token={}".format(host, port, ds._root_token)) uvicorn.run(ds.app(), host=host, port=port, log_level="info") diff --git a/datasette/plugins.py b/datasette/plugins.py index 6c9677d0..487fce4d 100644 --- a/datasette/plugins.py +++ b/datasette/plugins.py @@ -9,6 +9,7 @@ DEFAULT_PLUGINS = ( "datasette.publish.cloudrun", "datasette.facets", "datasette.sql_functions", + "datasette.actor_auth_cookie", ) pm = pluggy.PluginManager("datasette") diff --git a/datasette/views/special.py b/datasette/views/special.py index 840473a7..910193e8 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -1,6 +1,8 @@ import json from datasette.utils.asgi import Response from .base import BaseView +from http.cookies import SimpleCookie +import secrets class JsonDataView(BaseView): @@ -45,4 +47,32 @@ class PatternPortfolioView(BaseView): self.ds = datasette async def get(self, request): - return await self.render(["patterns.html"], request=request,) + return await self.render(["patterns.html"], request=request) + + +class AuthTokenView(BaseView): + name = "auth_token" + + def __init__(self, datasette): + self.ds = datasette + + async def get(self, request): + token = request.args.get("token") or "" + if not self.ds._root_token: + return Response("Root token has already been used", status=403) + if secrets.compare_digest(token, self.ds._root_token): + self.ds._root_token = None + cookie = SimpleCookie() + cookie["ds_actor"] = self.ds.sign({"id": "root"}, "actor") + cookie["ds_actor"]["path"] = "/" + response = Response( + body="", + status=302, + headers={ + "Location": "/", + "set-cookie": cookie.output(header="").lstrip(), + }, + ) + return response + else: + return Response("Invalid token", status=403) diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index ab27714a..183ecc14 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -32,6 +32,7 @@ Options: --secret TEXT Secret used for signing secure values, such as signed cookies + --root Output URL that sets a cookie authenticating the root user --version-note TEXT Additional note to show on /-/versions --help-config Show available config options --help Show this message and exit. diff --git a/tests/fixtures.py b/tests/fixtures.py index 9479abf6..b2cfd3d6 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -2,6 +2,7 @@ from datasette.app import Datasette from datasette.utils import sqlite3 from asgiref.testing import ApplicationCommunicator from asgiref.sync import async_to_sync +from http.cookies import SimpleCookie import itertools import json import os @@ -44,10 +45,14 @@ class TestClient: self.asgi_app = asgi_app @async_to_sync - async def get(self, path, allow_redirects=True, redirect_count=0, method="GET"): - return await self._get(path, allow_redirects, redirect_count, method) + async def get( + self, path, allow_redirects=True, redirect_count=0, method="GET", cookies=None + ): + return await self._get(path, allow_redirects, redirect_count, method, cookies) - async def _get(self, path, allow_redirects=True, redirect_count=0, method="GET"): + async def _get( + self, path, allow_redirects=True, redirect_count=0, method="GET", cookies=None + ): query_string = b"" if "?" in path: path, _, query_string = path.partition("?") @@ -56,6 +61,12 @@ class TestClient: raw_path = path.encode("latin-1") else: raw_path = quote(path, safe="/:,").encode("latin-1") + headers = [[b"host", b"localhost"]] + if cookies: + sc = SimpleCookie() + for key, value in cookies.items(): + sc[key] = value + headers.append([b"cookie", sc.output(header="").encode("utf-8")]) scope = { "type": "http", "http_version": "1.0", @@ -63,7 +74,7 @@ class TestClient: "path": unquote(path), "raw_path": raw_path, "query_string": query_string, - "headers": [[b"host", b"localhost"]], + "headers": headers, } instance = ApplicationCommunicator(self.asgi_app, scope) await instance.send_input({"type": "http.request"}) diff --git a/tests/test_auth.py b/tests/test_auth.py new file mode 100644 index 00000000..6b69ab93 --- /dev/null +++ b/tests/test_auth.py @@ -0,0 +1,25 @@ +from .fixtures import app_client + + +def test_auth_token(app_client): + "The /-/auth-token endpoint sets the correct cookie" + assert app_client.ds._root_token is not None + path = "/-/auth-token?token={}".format(app_client.ds._root_token) + response = app_client.get(path, allow_redirects=False,) + assert 302 == response.status + assert "/" == response.headers["Location"] + set_cookie = response.headers["set-cookie"] + assert set_cookie.endswith("; Path=/") + assert set_cookie.startswith("ds_actor=") + cookie_value = set_cookie.split("ds_actor=")[1].split("; Path=/")[0] + assert {"id": "root"} == app_client.ds.unsign(cookie_value, "actor") + # Check that a second with same token fails + assert app_client.ds._root_token is None + assert 403 == app_client.get(path, allow_redirects=False,).status + + +def test_actor_cookie(app_client): + "A valid actor cookie sets request.scope['actor']" + cookie = app_client.ds.sign({"id": "test"}, "actor") + response = app_client.get("/", cookies={"ds_actor": cookie}) + assert {"id": "test"} == app_client.ds._last_request.scope["actor"] diff --git a/tests/test_cli.py b/tests/test_cli.py index f52f17b4..529661ce 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -76,6 +76,7 @@ def test_metadata_yaml(): memory=False, config=[], secret=None, + root=False, version_note=None, help_config=False, return_instance=True, diff --git a/tests/test_docs.py b/tests/test_docs.py index 77c2a611..09c00ddf 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -65,8 +65,8 @@ def documented_views(): first_word = label.split("_")[0] if first_word.endswith("View"): view_labels.add(first_word) - # We deliberately don't document this one: - view_labels.add("PatternPortfolioView") + # We deliberately don't document these: + view_labels.update(("PatternPortfolioView", "AuthTokenView")) return view_labels From 57cf5139c552cb7feab9947daa949ca434cc0a66 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 31 May 2020 18:06:16 -0700 Subject: [PATCH 0026/1871] Default actor_from_request hook supporting ds_actor signed cookie Refs #784, refs #699 --- datasette/actor_auth_cookie.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 datasette/actor_auth_cookie.py diff --git a/datasette/actor_auth_cookie.py b/datasette/actor_auth_cookie.py new file mode 100644 index 00000000..41f33fe9 --- /dev/null +++ b/datasette/actor_auth_cookie.py @@ -0,0 +1,18 @@ +from datasette import hookimpl +from itsdangerous import BadSignature +from http.cookies import SimpleCookie + + +@hookimpl +def actor_from_request(datasette, request): + cookies = SimpleCookie() + cookies.load( + dict(request.scope.get("headers") or []).get(b"cookie", b"").decode("utf-8") + ) + if "ds_actor" not in cookies: + return None + ds_actor = cookies["ds_actor"].value + try: + return datasette.unsign(ds_actor, "actor") + except BadSignature: + return None From dfdbdf378aba9afb66666f66b78df2f2069d2595 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 31 May 2020 22:00:36 -0700 Subject: [PATCH 0027/1871] Added /-/permissions debug tool, closes #788 Also started the authentication.rst docs page, refs #786. Part of authentication work, refs #699. --- datasette/app.py | 32 +++++++++++-- datasette/default_permissions.py | 7 +++ datasette/plugins.py | 1 + datasette/templates/permissions_debug.html | 55 ++++++++++++++++++++++ datasette/views/special.py | 18 +++++++ docs/authentication.rst | 18 +++++++ docs/index.rst | 1 + tests/test_auth.py | 23 +++++++++ 8 files changed, 152 insertions(+), 3 deletions(-) create mode 100644 datasette/default_permissions.py create mode 100644 datasette/templates/permissions_debug.html create mode 100644 docs/authentication.rst diff --git a/datasette/app.py b/datasette/app.py index 6b39ce12..b8a5e23d 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1,5 +1,6 @@ import asyncio import collections +import datetime import hashlib import itertools import json @@ -24,7 +25,12 @@ import uvicorn from .views.base import DatasetteError, ureg, AsgiRouter from .views.database import DatabaseDownload, DatabaseView from .views.index import IndexView -from .views.special import JsonDataView, PatternPortfolioView, AuthTokenView +from .views.special import ( + JsonDataView, + PatternPortfolioView, + AuthTokenView, + PermissionsDebugView, +) from .views.table import RowView, TableView from .renderer import json_renderer from .database import Database, QueryInterrupted @@ -283,6 +289,7 @@ class Datasette: pm.hook.prepare_jinja2_environment(env=self.jinja_env) self._register_renderers() + self.permission_checks = collections.deque(maxlen=30) self._root_token = os.urandom(32).hex() def sign(self, value, namespace="default"): @@ -420,6 +427,7 @@ class Datasette: self, actor, action, resource_type=None, resource_identifier=None, default=False ): "Check permissions using the permissions_allowed plugin hook" + result = None for check in pm.hook.permission_allowed( datasette=self, actor=actor, @@ -432,8 +440,23 @@ class Datasette: if asyncio.iscoroutine(check): check = await check if check is not None: - return check - return default + result = check + used_default = False + if result is None: + result = default + used_default = True + self.permission_checks.append( + { + "when": datetime.datetime.utcnow().isoformat(), + "actor": actor, + "action": action, + "resource_type": resource_type, + "resource_identifier": resource_identifier, + "used_default": used_default, + "result": result, + } + ) + return result async def execute( self, @@ -782,6 +805,9 @@ class Datasette: add_route( AuthTokenView.as_asgi(self), r"/-/auth-token$", ) + add_route( + PermissionsDebugView.as_asgi(self), r"/-/permissions$", + ) add_route( PatternPortfolioView.as_asgi(self), r"/-/patterns$", ) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py new file mode 100644 index 00000000..0b0d17f9 --- /dev/null +++ b/datasette/default_permissions.py @@ -0,0 +1,7 @@ +from datasette import hookimpl + + +@hookimpl +def permission_allowed(actor, action, resource_type, resource_identifier): + if actor and actor.get("id") == "root" and action == "permissions-debug": + return True diff --git a/datasette/plugins.py b/datasette/plugins.py index 487fce4d..26d4fd63 100644 --- a/datasette/plugins.py +++ b/datasette/plugins.py @@ -10,6 +10,7 @@ DEFAULT_PLUGINS = ( "datasette.facets", "datasette.sql_functions", "datasette.actor_auth_cookie", + "datasette.default_permissions", ) pm = pluggy.PluginManager("datasette") diff --git a/datasette/templates/permissions_debug.html b/datasette/templates/permissions_debug.html new file mode 100644 index 00000000..fb098c5c --- /dev/null +++ b/datasette/templates/permissions_debug.html @@ -0,0 +1,55 @@ +{% extends "base.html" %} + +{% block title %}Debug permissions{% endblock %} + +{% block extra_head %} + +{% endblock %} + +{% block nav %} +

+ home +

+ {{ super() }} +{% endblock %} + +{% block content %} + +

Recent permissions checks

+ +{% for check in permission_checks %} +
+

+ {{ check.action }} + checked at + {{ check.when }} + {% if check.result %} + + {% else %} + + {% endif %} + {% if check.used_default %} + (used default) + {% endif %} +

+

Actor: {{ check.actor|tojson }}

+ {% if check.resource_type %} +

Resource: {{ check.resource_type }}: {{ check.resource_identifier }}

+ {% endif %} +
+{% endfor %} + +{% endblock %} diff --git a/datasette/views/special.py b/datasette/views/special.py index 910193e8..b75355fb 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -76,3 +76,21 @@ class AuthTokenView(BaseView): return response else: return Response("Invalid token", status=403) + + +class PermissionsDebugView(BaseView): + name = "permissions_debug" + + def __init__(self, datasette): + self.ds = datasette + + async def get(self, request): + if not await self.ds.permission_allowed( + request.scope.get("actor"), "permissions-debug" + ): + return Response("Permission denied", status=403) + return await self.render( + ["permissions_debug.html"], + request, + {"permission_checks": reversed(self.ds.permission_checks)}, + ) diff --git a/docs/authentication.rst b/docs/authentication.rst new file mode 100644 index 00000000..0a9a4c0d --- /dev/null +++ b/docs/authentication.rst @@ -0,0 +1,18 @@ +.. _authentication: + +================================ + Authentication and permissions +================================ + +Datasette's authentication system is currently under construction. Follow `issue 699 `__ to track the development of this feature. + +.. _PermissionsDebugView: + +Permissions Debug +================= + +The debug tool at ``/-/permissions`` is only available to the root user. + +It shows the thirty most recent permission checks that have been carried out by the Datasette instance. + +This is designed to help administrators and plugin authors understand exactly how permission checks are being carried out, in order to effectively configure Datasette's permission system. diff --git a/docs/index.rst b/docs/index.rst index 2390e263..03988c8e 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -40,6 +40,7 @@ Contents publish json_api sql_queries + authentication performance csv_export facets diff --git a/tests/test_auth.py b/tests/test_auth.py index 6b69ab93..ddf328af 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,4 +1,5 @@ from .fixtures import app_client +from bs4 import BeautifulSoup as Soup def test_auth_token(app_client): @@ -23,3 +24,25 @@ def test_actor_cookie(app_client): cookie = app_client.ds.sign({"id": "test"}, "actor") response = app_client.get("/", cookies={"ds_actor": cookie}) assert {"id": "test"} == app_client.ds._last_request.scope["actor"] + + +def test_permissions_debug(app_client): + assert 403 == app_client.get("/-/permissions").status + # With the cookie it should work + cookie = app_client.ds.sign({"id": "root"}, "actor") + response = app_client.get("/-/permissions", cookies={"ds_actor": cookie}) + # Should show one failure and one success + soup = Soup(response.body, "html.parser") + check_divs = soup.findAll("div", {"class": "check"}) + checks = [ + { + "action": div.select_one(".check-action").text, + "result": bool(div.select(".check-result-true")), + "used_default": bool(div.select(".check-used-default")), + } + for div in check_divs + ] + assert [ + {"action": "permissions-debug", "result": True, "used_default": False}, + {"action": "permissions-debug", "result": False, "used_default": True}, + ] == checks From b4cd8797b8592a8bf060a76eb7227f3f1ba61d32 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jun 2020 10:43:50 -0700 Subject: [PATCH 0028/1871] permission_checks is now _permission_checks --- datasette/app.py | 4 ++-- datasette/views/special.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index b8a5e23d..e3ad5fc7 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -289,7 +289,7 @@ class Datasette: pm.hook.prepare_jinja2_environment(env=self.jinja_env) self._register_renderers() - self.permission_checks = collections.deque(maxlen=30) + self._permission_checks = collections.deque(maxlen=30) self._root_token = os.urandom(32).hex() def sign(self, value, namespace="default"): @@ -445,7 +445,7 @@ class Datasette: if result is None: result = default used_default = True - self.permission_checks.append( + self._permission_checks.append( { "when": datetime.datetime.utcnow().isoformat(), "actor": actor, diff --git a/datasette/views/special.py b/datasette/views/special.py index b75355fb..811ed4cb 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -92,5 +92,5 @@ class PermissionsDebugView(BaseView): return await self.render( ["permissions_debug.html"], request, - {"permission_checks": reversed(self.ds.permission_checks)}, + {"permission_checks": reversed(self.ds._permission_checks)}, ) From 1d0bea157ac7074f23229af247565a78fa71c03f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jun 2020 14:06:53 -0700 Subject: [PATCH 0029/1871] New request.cookies property --- datasette/actor_auth_cookie.py | 9 ++------- datasette/utils/asgi.py | 7 +++++++ docs/internals.rst | 3 +++ 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/datasette/actor_auth_cookie.py b/datasette/actor_auth_cookie.py index 41f33fe9..f3a0f306 100644 --- a/datasette/actor_auth_cookie.py +++ b/datasette/actor_auth_cookie.py @@ -5,14 +5,9 @@ from http.cookies import SimpleCookie @hookimpl def actor_from_request(datasette, request): - cookies = SimpleCookie() - cookies.load( - dict(request.scope.get("headers") or []).get(b"cookie", b"").decode("utf-8") - ) - if "ds_actor" not in cookies: + if "ds_actor" not in request.cookies: return None - ds_actor = cookies["ds_actor"].value try: - return datasette.unsign(ds_actor, "actor") + return datasette.unsign(request.cookies["ds_actor"], "actor") except BadSignature: return None diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 24398b77..960532ca 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -4,6 +4,7 @@ from mimetypes import guess_type from urllib.parse import parse_qs, urlunparse, parse_qsl from pathlib import Path from html import escape +from http.cookies import SimpleCookie import re import aiofiles @@ -44,6 +45,12 @@ class Request: def host(self): return self.headers.get("host") or "localhost" + @property + def cookies(self): + cookies = SimpleCookie() + cookies.load(self.headers.get("cookie", "")) + return {key: value.value for key, value in cookies.items()} + @property def path(self): if self.scope.get("raw_path") is not None: diff --git a/docs/internals.rst b/docs/internals.rst index 68a35312..b3ad623f 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -27,6 +27,9 @@ The request object is passed to various plugin hooks. It represents an incoming ``.headers`` - dictionary (str -> str) A dictionary of incoming HTTP request headers. +``.cookies`` - dictionary (str -> str) + A dictionary of incoming cookies + ``.host`` - string The host header from the incoming request, e.g. ``latest.datasette.io`` or ``localhost``. From 4fa7cf68536628344356d3ef8c92c25c249067a0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jun 2020 14:08:12 -0700 Subject: [PATCH 0030/1871] Flash messages mechanism, closes #790 --- datasette/app.py | 42 +++++++++++++++++++++++ datasette/static/app.css | 16 +++++++++ datasette/templates/base.html | 8 +++++ datasette/templates/messages_debug.html | 26 ++++++++++++++ datasette/utils/asgi.py | 4 +-- datasette/views/base.py | 16 +++++++++ datasette/views/special.py | 24 +++++++++++++ docs/internals.rst | 18 ++++++++++ docs/introspection.rst | 8 +++++ tests/fixtures.py | 6 ++++ tests/plugins/messages_output_renderer.py | 21 ++++++++++++ tests/test_api.py | 1 + tests/test_auth.py | 6 +--- tests/test_messages.py | 28 +++++++++++++++ 14 files changed, 217 insertions(+), 7 deletions(-) create mode 100644 datasette/templates/messages_debug.html create mode 100644 tests/plugins/messages_output_renderer.py create mode 100644 tests/test_messages.py diff --git a/datasette/app.py b/datasette/app.py index e3ad5fc7..41c73900 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -2,6 +2,7 @@ import asyncio import collections import datetime import hashlib +from http.cookies import SimpleCookie import itertools import json import os @@ -30,6 +31,7 @@ from .views.special import ( PatternPortfolioView, AuthTokenView, PermissionsDebugView, + MessagesDebugView, ) from .views.table import RowView, TableView from .renderer import json_renderer @@ -156,6 +158,11 @@ async def favicon(scope, receive, send): class Datasette: + # Message constants: + INFO = 1 + WARNING = 2 + ERROR = 3 + def __init__( self, files, @@ -423,6 +430,38 @@ class Datasette: # pylint: disable=no-member pm.hook.prepare_connection(conn=conn, database=database, datasette=self) + def add_message(self, request, message, type=INFO): + if not hasattr(request, "_messages"): + request._messages = [] + request._messages_should_clear = False + request._messages.append((message, type)) + + def _write_messages_to_response(self, request, response): + if getattr(request, "_messages", None): + # Set those messages + cookie = SimpleCookie() + cookie["ds_messages"] = self.sign(request._messages, "messages") + cookie["ds_messages"]["path"] = "/" + # TODO: Co-exist with existing set-cookie headers + assert "set-cookie" not in response.headers + response.headers["set-cookie"] = cookie.output(header="").lstrip() + elif getattr(request, "_messages_should_clear", False): + cookie = SimpleCookie() + cookie["ds_messages"] = "" + cookie["ds_messages"]["path"] = "/" + # TODO: Co-exist with existing set-cookie headers + assert "set-cookie" not in response.headers + response.headers["set-cookie"] = cookie.output(header="").lstrip() + + def _show_messages(self, request): + if getattr(request, "_messages", None): + request._messages_should_clear = True + messages = request._messages + request._messages = [] + return messages + else: + return [] + async def permission_allowed( self, actor, action, resource_type=None, resource_identifier=None, default=False ): @@ -808,6 +847,9 @@ class Datasette: add_route( PermissionsDebugView.as_asgi(self), r"/-/permissions$", ) + add_route( + MessagesDebugView.as_asgi(self), r"/-/messages$", + ) add_route( PatternPortfolioView.as_asgi(self), r"/-/patterns$", ) diff --git a/datasette/static/app.css b/datasette/static/app.css index 92f268ae..774a2235 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -351,3 +351,19 @@ p.zero-results { .type-float, .type-int { color: #666; } + +.message-info { + padding: 1em; + border: 1px solid green; + background-color: #c7fbc7; +} +.message-warning { + padding: 1em; + border: 1px solid #ae7100; + background-color: #fbdda5; +} +.message-error { + padding: 1em; + border: 1px solid red; + background-color: pink; +} diff --git a/datasette/templates/base.html b/datasette/templates/base.html index d9fd945b..9b871d03 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -17,6 +17,14 @@
+{% block messages %} +{% if show_messages %} + {% for message, message_type in show_messages() %} +

{{ message }}

+ {% endfor %} +{% endif %} +{% endblock %} + {% block content %} {% endblock %}
diff --git a/datasette/templates/messages_debug.html b/datasette/templates/messages_debug.html new file mode 100644 index 00000000..b2e1bc7c --- /dev/null +++ b/datasette/templates/messages_debug.html @@ -0,0 +1,26 @@ +{% extends "base.html" %} + +{% block title %}Debug messages{% endblock %} + +{% block content %} + +

Debug messages

+ +

Set a message:

+ +
+
+ +
+ +
+ +
+
+ +{% endblock %} diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 960532ca..5682da48 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -180,9 +180,9 @@ class AsgiLifespan: class AsgiView: - def dispatch_request(self, request, *args, **kwargs): + async def dispatch_request(self, request, *args, **kwargs): handler = getattr(self, request.method.lower(), None) - return handler(request, *args, **kwargs) + return await handler(request, *args, **kwargs) @classmethod def as_asgi(cls, *class_args, **class_kwargs): diff --git a/datasette/views/base.py b/datasette/views/base.py index 06b78d5f..2402406a 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -1,6 +1,7 @@ import asyncio import csv import itertools +from itsdangerous import BadSignature import json import re import time @@ -73,6 +74,20 @@ class BaseView(AsgiView): def database_color(self, database): return "ff0000" + async def dispatch_request(self, request, *args, **kwargs): + # Populate request_messages if ds_messages cookie is present + if self.ds: + try: + request._messages = self.ds.unsign( + request.cookies.get("ds_messages", ""), "messages" + ) + except BadSignature: + pass + response = await super().dispatch_request(request, *args, **kwargs) + if self.ds: + self.ds._write_messages_to_response(request, response) + return response + async def render(self, templates, request, context=None): context = context or {} template = self.ds.jinja_env.select_template(templates) @@ -81,6 +96,7 @@ class BaseView(AsgiView): **{ "database_url": self.database_url, "database_color": self.database_color, + "show_messages": lambda: self.ds._show_messages(request), "select_templates": [ "{}{}".format( "*" if template_name == template.name else "", template_name diff --git a/datasette/views/special.py b/datasette/views/special.py index 811ed4cb..37c04697 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -94,3 +94,27 @@ class PermissionsDebugView(BaseView): request, {"permission_checks": reversed(self.ds._permission_checks)}, ) + + +class MessagesDebugView(BaseView): + name = "messages_debug" + + def __init__(self, datasette): + self.ds = datasette + + async def get(self, request): + return await self.render(["messages_debug.html"], request) + + async def post(self, request): + post = await request.post_vars() + message = post.get("message", "") + message_type = post.get("message_type") or "INFO" + assert message_type in ("INFO", "WARNING", "ERROR", "all") + datasette = self.ds + if message_type == "all": + datasette.add_message(request, message, datasette.INFO) + datasette.add_message(request, message, datasette.WARNING) + datasette.add_message(request, message, datasette.ERROR) + else: + datasette.add_message(request, message, getattr(datasette, message_type)) + return Response.redirect("/") diff --git a/docs/internals.rst b/docs/internals.rst index b3ad623f..4d51d614 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -214,6 +214,24 @@ This method returns a signed string, which can be decoded and verified using :re Returns the original, decoded object that was passed to :ref:`datasette_sign`. If the signature is not valid this raises a ``itsdangerous.BadSignature`` exception. +.. _datasette_add_message: + +.add_message(request, message, message_type=datasette.INFO) +----------------------------------------------------------- + +``request`` - Request + The current Request object + +``message`` - string + The message string + +``message_type`` - constant, optional + The message type - ``datasette.INFO``, ``datasette.WARNING`` or ``datasette.ERROR`` + +Datasette's flash messaging mechanism allows you to add a message that will be displayed to the user on the next page that they visit. Messages are persisted in a ``ds_messages`` cookie. This method adds a message to that cookie. + +You can try out these messages (including the different visual styling of the three message types) using the ``/-/messages`` debugging tool. + .. _internals_database: Database class diff --git a/docs/introspection.rst b/docs/introspection.rst index e5d08dbc..084ee144 100644 --- a/docs/introspection.rst +++ b/docs/introspection.rst @@ -166,3 +166,11 @@ Shows the currently authenticated actor. Useful for debugging Datasette authenti "username": "some-user" } } + + +.. _MessagesDebugView: + +/-/messages +----------- + +The debug tool at ``/-/messages`` can be used to set flash messages to try out that feature. See :ref:`datasette_add_message` for details of this feature. diff --git a/tests/fixtures.py b/tests/fixtures.py index b2cfd3d6..daff0168 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -29,6 +29,12 @@ class TestResponse: self.headers = headers self.body = body + @property + def cookies(self): + cookie = SimpleCookie() + cookie.load(self.headers.get("set-cookie") or "") + return {key: value.value for key, value in cookie.items()} + @property def json(self): return json.loads(self.text) diff --git a/tests/plugins/messages_output_renderer.py b/tests/plugins/messages_output_renderer.py new file mode 100644 index 00000000..6b52f801 --- /dev/null +++ b/tests/plugins/messages_output_renderer.py @@ -0,0 +1,21 @@ +from datasette import hookimpl + + +def render_message_debug(datasette, request): + if request.args.get("add_msg"): + msg_type = request.args.get("type", "INFO") + datasette.add_message( + request, request.args["add_msg"], getattr(datasette, msg_type) + ) + return {"body": "Hello from message debug"} + + +@hookimpl +def register_output_renderer(datasette): + return [ + { + "extension": "message", + "render": render_message_debug, + "can_render": lambda: False, + } + ] diff --git a/tests/test_api.py b/tests/test_api.py index d7e7c03f..a5c6f6a2 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1262,6 +1262,7 @@ def test_plugins_json(app_client): expected = [ {"name": name, "static": False, "templates": False, "version": None} for name in ( + "messages_output_renderer.py", "my_plugin.py", "my_plugin_2.py", "register_output_renderer.py", diff --git a/tests/test_auth.py b/tests/test_auth.py index ddf328af..ac8d7abe 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -9,11 +9,7 @@ def test_auth_token(app_client): response = app_client.get(path, allow_redirects=False,) assert 302 == response.status assert "/" == response.headers["Location"] - set_cookie = response.headers["set-cookie"] - assert set_cookie.endswith("; Path=/") - assert set_cookie.startswith("ds_actor=") - cookie_value = set_cookie.split("ds_actor=")[1].split("; Path=/")[0] - assert {"id": "root"} == app_client.ds.unsign(cookie_value, "actor") + assert {"id": "root"} == app_client.ds.unsign(response.cookies["ds_actor"], "actor") # Check that a second with same token fails assert app_client.ds._root_token is None assert 403 == app_client.get(path, allow_redirects=False,).status diff --git a/tests/test_messages.py b/tests/test_messages.py new file mode 100644 index 00000000..d17e015c --- /dev/null +++ b/tests/test_messages.py @@ -0,0 +1,28 @@ +from .fixtures import app_client +import pytest + + +@pytest.mark.parametrize( + "qs,expected", + [ + ("add_msg=added-message", [["added-message", 1]]), + ("add_msg=added-warning&type=WARNING", [["added-warning", 2]]), + ("add_msg=added-error&type=ERROR", [["added-error", 3]]), + ], +) +def test_add_message_sets_cookie(app_client, qs, expected): + response = app_client.get("/fixtures.message?{}".format(qs)) + signed = response.cookies["ds_messages"] + decoded = app_client.ds.unsign(signed, "messages") + assert expected == decoded + + +def test_messages_are_displayed_and_cleared(app_client): + # First set the message cookie + set_msg_response = app_client.get("/fixtures.message?add_msg=xmessagex") + # Now access a page that displays messages + response = app_client.get("/", cookies=set_msg_response.cookies) + # Messages should be in that HTML + assert "xmessagex" in response.text + # Cookie should have been set that clears messages + assert "" == response.cookies["ds_messages"] From 5278c04682929f0b155102827f9150c7b2112215 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jun 2020 14:29:12 -0700 Subject: [PATCH 0031/1871] More consistent use of response.text/response.json in tests, closes #792 --- tests/test_api.py | 10 ++-------- tests/test_config_dir.py | 13 ++++++------- tests/test_csv.py | 2 +- tests/test_html.py | 4 +--- tests/test_plugins.py | 10 ++++------ 5 files changed, 14 insertions(+), 25 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index a5c6f6a2..7ed4cced 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1762,16 +1762,10 @@ def test_common_prefix_database_names(app_client_conflicting_database_names): # https://github.com/simonw/datasette/issues/597 assert ["fixtures", "foo", "foo-bar"] == [ d["name"] - for d in json.loads( - app_client_conflicting_database_names.get("/-/databases.json").body.decode( - "utf8" - ) - ) + for d in app_client_conflicting_database_names.get("/-/databases.json").json ] for db_name, path in (("foo", "/foo.json"), ("foo-bar", "/foo-bar.json")): - data = json.loads( - app_client_conflicting_database_names.get(path).body.decode("utf8") - ) + data = app_client_conflicting_database_names.get(path).json assert db_name == data["database"] diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index 50e67f80..490b1f1d 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -84,21 +84,20 @@ def config_dir_client(tmp_path_factory): def test_metadata(config_dir_client): response = config_dir_client.get("/-/metadata.json") assert 200 == response.status - assert METADATA == json.loads(response.text) + assert METADATA == response.json def test_config(config_dir_client): response = config_dir_client.get("/-/config.json") assert 200 == response.status - config = json.loads(response.text) - assert 60 == config["default_cache_ttl"] - assert not config["allow_sql"] + assert 60 == response.json["default_cache_ttl"] + assert not response.json["allow_sql"] def test_plugins(config_dir_client): response = config_dir_client.get("/-/plugins.json") assert 200 == response.status - assert "hooray.py" in {p["name"] for p in json.loads(response.text)} + assert "hooray.py" in {p["name"] for p in response.json} def test_templates_and_plugin(config_dir_client): @@ -123,7 +122,7 @@ def test_static_directory_browsing_not_allowed(config_dir_client): def test_databases(config_dir_client): response = config_dir_client.get("/-/databases.json") assert 200 == response.status - databases = json.loads(response.text) + databases = response.json assert 2 == len(databases) databases.sort(key=lambda d: d["name"]) assert "demo" == databases[0]["name"] @@ -141,4 +140,4 @@ def test_metadata_yaml(tmp_path_factory, filename): client.ds = ds response = client.get("/-/metadata.json") assert 200 == response.status - assert {"title": "Title from metadata"} == json.loads(response.text) + assert {"title": "Title from metadata"} == response.json diff --git a/tests/test_csv.py b/tests/test_csv.py index 1030c2bb..42022726 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -101,7 +101,7 @@ def test_csv_with_non_ascii_characters(app_client): ) assert response.status == 200 assert "text/plain; charset=utf-8" == response.headers["content-type"] - assert "text,number\r\n𝐜𝐢𝐭𝐢𝐞𝐬,1\r\nbob,2\r\n" == response.body.decode("utf8") + assert "text,number\r\n𝐜𝐢𝐭𝐢𝐞𝐬,1\r\nbob,2\r\n" == response.text def test_max_csv_mb(app_client_csv_max_mb_one): diff --git a/tests/test_html.py b/tests/test_html.py index e602bf0e..2d2a141a 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -606,9 +606,7 @@ def test_row_html_simple_primary_key(app_client): def test_table_not_exists(app_client): - assert "Table not found: blah" in app_client.get("/fixtures/blah").body.decode( - "utf8" - ) + assert "Table not found: blah" in app_client.get("/fixtures/blah").text def test_table_html_no_primary_key(app_client): diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 7a3fb49a..f69e7fa7 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -218,7 +218,7 @@ def test_plugin_config_file(app_client): ) def test_plugins_extra_body_script(app_client, path, expected_extra_body_script): r = re.compile(r"") - json_data = r.search(app_client.get(path).body.decode("utf8")).group(1) + json_data = r.search(app_client.get(path).text).group(1) actual_data = json.loads(json_data) assert expected_extra_body_script == actual_data @@ -331,7 +331,7 @@ def view_names_client(tmp_path_factory): def test_view_names(view_names_client, path, view_name): response = view_names_client.get(path) assert response.status == 200 - assert "view_name:{}".format(view_name) == response.body.decode("utf8") + assert "view_name:{}".format(view_name) == response.text def test_register_output_renderer_no_parameters(app_client): @@ -345,8 +345,7 @@ def test_register_output_renderer_all_parameters(app_client): assert 200 == response.status # Lots of 'at 0x103a4a690' in here - replace those so we can do # an easy comparison - body = response.body.decode("utf-8") - body = at_memory_re.sub(" at 0xXXX", body) + body = at_memory_re.sub(" at 0xXXX", response.text) assert { "1+1": 2, "datasette": "", @@ -468,7 +467,6 @@ def test_register_facet_classes(app_client): response = app_client.get( "/fixtures/compound_three_primary_keys.json?_dummy_facet=1" ) - data = json.loads(response.body) assert [ { "name": "pk1", @@ -502,7 +500,7 @@ def test_register_facet_classes(app_client): "name": "pk3", "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet=pk3", }, - ] == data["suggested_facets"] + ] == response.json["suggested_facets"] def test_actor_from_request(app_client): From a7137dfe069e5fceca56f78631baebd4a6a19967 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jun 2020 14:49:28 -0700 Subject: [PATCH 0032/1871] /-/plugins now shows details of hooks, closes #794 Also added /-/plugins?all=1 parameter to see default plugins. --- datasette/app.py | 9 ++++--- datasette/plugins.py | 1 + docs/introspection.rst | 5 +++- tests/test_api.py | 61 ++++++++++++++++++++++++++++++++++++------ 4 files changed, 64 insertions(+), 12 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 41c73900..22fb04c6 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -625,9 +625,9 @@ class Datasette: }, } - def _plugins(self, show_all=False): + def _plugins(self, request): ps = list(get_plugins()) - if not show_all: + if not request.args.get("all"): ps = [p for p in ps if p["name"] not in DEFAULT_PLUGINS] return [ { @@ -635,6 +635,7 @@ class Datasette: "static": p["static_path"] is not None, "templates": p["templates_path"] is not None, "version": p.get("version"), + "hooks": p["hooks"], } for p in ps ] @@ -822,7 +823,9 @@ class Datasette: r"/-/versions(?P(\.json)?)$", ) add_route( - JsonDataView.as_asgi(self, "plugins.json", self._plugins), + JsonDataView.as_asgi( + self, "plugins.json", self._plugins, needs_request=True + ), r"/-/plugins(?P(\.json)?)$", ) add_route( diff --git a/datasette/plugins.py b/datasette/plugins.py index 26d4fd63..b35b750f 100644 --- a/datasette/plugins.py +++ b/datasette/plugins.py @@ -49,6 +49,7 @@ def get_plugins(): "name": plugin.__name__, "static_path": static_path, "templates_path": templates_path, + "hooks": [h.name for h in pm.get_hookcallers(plugin)], } distinfo = plugin_to_distinfo.get(plugin) if distinfo: diff --git a/docs/introspection.rst b/docs/introspection.rst index 084ee144..08006529 100644 --- a/docs/introspection.rst +++ b/docs/introspection.rst @@ -78,10 +78,13 @@ Shows a list of currently installed plugins and their versions. `Plugins example "name": "datasette_cluster_map", "static": true, "templates": false, - "version": "0.4" + "version": "0.10", + "hooks": ["extra_css_urls", "extra_js_urls", "extra_body_script"] } ] +Add ``?all=1`` to include details of the default plugins baked into Datasette. + .. _JsonDataView_config: /-/config diff --git a/tests/test_api.py b/tests/test_api.py index 7ed4cced..4b752f31 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1260,14 +1260,59 @@ def test_threads_json(app_client): def test_plugins_json(app_client): response = app_client.get("/-/plugins.json") expected = [ - {"name": name, "static": False, "templates": False, "version": None} - for name in ( - "messages_output_renderer.py", - "my_plugin.py", - "my_plugin_2.py", - "register_output_renderer.py", - "view_name.py", - ) + { + "name": "messages_output_renderer.py", + "static": False, + "templates": False, + "version": None, + "hooks": ["register_output_renderer"], + }, + { + "name": "my_plugin.py", + "static": False, + "templates": False, + "version": None, + "hooks": [ + "actor_from_request", + "extra_body_script", + "extra_css_urls", + "extra_js_urls", + "extra_template_vars", + "permission_allowed", + "prepare_connection", + "prepare_jinja2_environment", + "register_facet_classes", + "render_cell", + ], + }, + { + "name": "my_plugin_2.py", + "static": False, + "templates": False, + "version": None, + "hooks": [ + "actor_from_request", + "asgi_wrapper", + "extra_js_urls", + "extra_template_vars", + "permission_allowed", + "render_cell", + ], + }, + { + "name": "register_output_renderer.py", + "static": False, + "templates": False, + "version": None, + "hooks": ["register_output_renderer"], + }, + { + "name": "view_name.py", + "static": False, + "templates": False, + "version": None, + "hooks": ["extra_template_vars"], + }, ] assert expected == sorted(response.json, key=lambda p: p["name"]) From 3c5e4f266dfa07bd0bbb530d17019207f787d806 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jun 2020 15:34:50 -0700 Subject: [PATCH 0033/1871] Added messages to pattern portfolio, refs #790 --- datasette/templates/patterns.html | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/datasette/templates/patterns.html b/datasette/templates/patterns.html index 9ea4ae42..73443ac2 100644 --- a/datasette/templates/patterns.html +++ b/datasette/templates/patterns.html @@ -20,6 +20,12 @@ attraction_characteristic

+

Messages

+
+

Example message

+

Example message

+

Example message

+

.bd for /

Datasette Fixtures

From 9690ce606823bbfceb0c50d59e03adf7bb1a8475 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Jun 2020 17:05:33 -0700 Subject: [PATCH 0034/1871] More efficient modifiation of scope --- datasette/app.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 22fb04c6..f9bf91a8 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -901,13 +901,14 @@ class DatasetteRouter(AsgiRouter): base_url = self.ds.config("base_url") if base_url != "/" and path.startswith(base_url): path = "/" + path[len(base_url) :] + scope_modifications = {} # Apply force_https_urls, if set if ( self.ds.config("force_https_urls") and scope["type"] == "http" and scope.get("scheme") != "https" ): - scope = dict(scope, scheme="https") + scope_modifications["scheme"] = "https" # Handle authentication actor = None for actor in pm.hook.actor_from_request( @@ -919,7 +920,10 @@ class DatasetteRouter(AsgiRouter): actor = await actor if actor: break - return await super().route_path(dict(scope, actor=actor), receive, send, path) + scope_modifications["actor"] = actor + return await super().route_path( + dict(scope, **scope_modifications), receive, send, path + ) async def handle_404(self, scope, receive, send, exception=None): # If URL has a trailing slash, redirect to URL without it From 0934844c0b6d124163d0185fb6a41ba5a71433da Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 3 Jun 2020 06:48:39 -0700 Subject: [PATCH 0035/1871] request.post_vars() no longer discards empty values --- datasette/utils/asgi.py | 2 +- tests/test_internals_request.py | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 5682da48..c7810a50 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -80,7 +80,7 @@ class Request: body += message.get("body", b"") more_body = message.get("more_body", False) - return dict(parse_qsl(body.decode("utf-8"))) + return dict(parse_qsl(body.decode("utf-8"), keep_blank_values=True)) @classmethod def fake(cls, path_with_query_string, method="GET", scheme="http"): diff --git a/tests/test_internals_request.py b/tests/test_internals_request.py index 5c9b254b..433b23d5 100644 --- a/tests/test_internals_request.py +++ b/tests/test_internals_request.py @@ -16,10 +16,14 @@ async def test_request_post_vars(): } async def receive(): - return {"type": "http.request", "body": b"foo=bar&baz=1", "more_body": False} + return { + "type": "http.request", + "body": b"foo=bar&baz=1&empty=", + "more_body": False, + } request = Request(scope, receive) - assert {"foo": "bar", "baz": "1"} == await request.post_vars() + assert {"foo": "bar", "baz": "1", "empty": ""} == await request.post_vars() def test_request_args(): From aa82d0370463580f2cb10d9617f1bcbe45cc994a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 3 Jun 2020 08:16:50 -0700 Subject: [PATCH 0036/1871] Basic writable canned queries Refs #698. First working version of this feature. * request.post_vars() no longer discards empty values --- datasette/templates/query.html | 7 ++- datasette/views/database.py | 60 +++++++++++++++++++++-- datasette/views/table.py | 18 +++++++ docs/sql_queries.rst | 65 +++++++++++++++++++++++-- tests/fixtures.py | 37 +++++++++++--- tests/test_canned_write.py | 88 ++++++++++++++++++++++++++++++++++ 6 files changed, 256 insertions(+), 19 deletions(-) create mode 100644 tests/test_canned_write.py diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 2c8c05a0..52896e96 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -27,11 +27,12 @@ {% endblock %} {% block content %} +

{{ metadata.title or database }}

{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} -
+

Custom SQL query{% if display_rows %} returning {% if truncated %}more than {% endif %}{{ "{:,}".format(display_rows|length) }} row{% if display_rows|length == 1 %}{% else %}s{% endif %}{% endif %} {% if hide_sql %}(show){% else %}(hide){% endif %}

{% if not hide_sql %} {% if editable and config.allow_sql %} @@ -74,7 +75,9 @@ {% else %} -

0 results

+ {% if not canned_write %} +

0 results

+ {% endif %} {% endif %} {% include "_codemirror_foot.html" %} diff --git a/datasette/views/database.py b/datasette/views/database.py index 15545fb8..558dd0f0 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -106,6 +106,8 @@ class QueryView(DataView): canned_query=None, metadata=None, _size=None, + named_parameters=None, + write=False, ): params = {key: request.args.get(key) for key in request.args} if "sql" in params: @@ -113,7 +115,7 @@ class QueryView(DataView): if "_shape" in params: params.pop("_shape") # Extract any :named parameters - named_parameters = self.re_named_parameter.findall(sql) + named_parameters = named_parameters or self.re_named_parameter.findall(sql) named_parameter_values = { named_parameter: params.get(named_parameter) or "" for named_parameter in named_parameters @@ -129,12 +131,60 @@ class QueryView(DataView): extra_args["custom_time_limit"] = int(params["_timelimit"]) if _size: extra_args["page_size"] = _size - results = await self.ds.execute( - database, sql, params, truncate=True, **extra_args - ) - columns = [r[0] for r in results.description] templates = ["query-{}.html".format(to_css_class(database)), "query.html"] + + # Execute query - as write or as read + if write: + if request.method == "POST": + params = await request.post_vars() + try: + cursor = await self.ds.databases[database].execute_write( + sql, params, block=True + ) + message = metadata.get( + "on_success_message" + ) or "Query executed, {} row{} affected".format( + cursor.rowcount, "" if cursor.rowcount == 1 else "s" + ) + message_type = self.ds.INFO + redirect_url = metadata.get("on_success_redirect") + except Exception as e: + message = metadata.get("on_error_message") or str(e) + message_type = self.ds.ERROR + redirect_url = metadata.get("on_error_redirect") + self.ds.add_message(request, message, message_type) + return self.redirect(request, redirect_url or request.path) + else: + + async def extra_template(): + return { + "request": request, + "path_with_added_args": path_with_added_args, + "path_with_removed_args": path_with_removed_args, + "named_parameter_values": named_parameter_values, + "canned_query": canned_query, + "success_message": request.args.get("_success") or "", + "canned_write": True, + } + + return ( + { + "database": database, + "rows": [], + "truncated": False, + "columns": [], + "query": {"sql": sql, "params": params}, + }, + extra_template, + templates, + ) + else: # Not a write + results = await self.ds.execute( + database, sql, params, truncate=True, **extra_args + ) + columns = [r[0] for r in results.description] + if canned_query: templates.insert( 0, diff --git a/datasette/views/table.py b/datasette/views/table.py index 2e9515c3..79bf8b08 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -221,6 +221,22 @@ class RowTableShared(DataView): class TableView(RowTableShared): name = "table" + async def post(self, request, db_name, table_and_format): + # Handle POST to a canned query + canned_query = self.ds.get_canned_query(db_name, table_and_format) + assert canned_query, "You may only POST to a canned query" + return await QueryView(self.ds).data( + request, + db_name, + None, + canned_query["sql"], + metadata=canned_query, + editable=False, + canned_query=table_and_format, + named_parameters=canned_query.get("params"), + write=bool(canned_query.get("write")), + ) + async def data( self, request, @@ -241,6 +257,8 @@ class TableView(RowTableShared): metadata=canned_query, editable=False, canned_query=table, + named_parameters=canned_query.get("params"), + write=bool(canned_query.get("write")), ) db = self.ds.databases[database] diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index c3efd930..dc239a84 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -161,11 +161,12 @@ You can set a default fragment hash that will be included in the link to the can { "databases": { - "fixtures": { - "queries": { - "neighborhood_search": { - "sql": "select neighborhood, facet_cities.name, state\nfrom facetable join facet_cities on facetable.city_id = facet_cities.id\nwhere neighborhood like '%' || :text || '%' order by neighborhood;", - "fragment": "fragment-goes-here" + "fixtures": { + "queries": { + "neighborhood_search": { + "sql": "select neighborhood, facet_cities.name, state\nfrom facetable join facet_cities on facetable.city_id = facet_cities.id\nwhere neighborhood like '%' || :text || '%' order by neighborhood;", + "fragment": "fragment-goes-here" + } } } } @@ -173,6 +174,60 @@ You can set a default fragment hash that will be included in the link to the can `See here `__ for a demo of this in action. +.. _canned_queries_writable: + +Writable canned queries +~~~~~~~~~~~~~~~~~~~~~~~ + +Canned queries by default are read-only. You can use the ``"write": true`` key to indicate that a canned query can write to the database. + +.. code-block:: json + + { + "databases": { + "mydatabase": { + "queries": { + "add_name": { + "sql": "INSERT INTO names (name) VALUES (:name)", + "write": true + } + } + } + } + } + +This configuration will create a page at ``/mydatabase/add_name`` displaying a form with a ``name`` field. Submitting that form will execute the configured ``INSERT`` query. + +You can customize how Datasette represents success and errors using the following optional properties: + +- ``on_success_message`` - the message shown when a query is successful +- ``on_success_redirect`` - the path or URL the user is redirected to on success +- ``on_error_message`` - the message shown when a query throws an error +- ``on_error_redirect`` - the path or URL the user is redirected to on error + +For example: + +.. code-block:: json + + { + "databases": { + "mydatabase": { + "queries": { + "add_name": { + "sql": "INSERT INTO names (name) VALUES (:name)", + "write": true, + "on_success_message": "Name inserted", + "on_success_redirect": "/mydatabase/names", + "on_error_message": "Name insert failed", + "on_error_redirect": "/mydatabase" + } + } + } + } + } + +You may wish to use this feature in conjunction with :ref:`authentication`. + .. _pagination: Pagination diff --git a/tests/fixtures.py b/tests/fixtures.py index daff0168..78a54c68 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -14,7 +14,7 @@ import string import tempfile import textwrap import time -from urllib.parse import unquote, quote +from urllib.parse import unquote, quote, urlencode # This temp file is used by one of the plugin config tests @@ -54,10 +54,26 @@ class TestClient: async def get( self, path, allow_redirects=True, redirect_count=0, method="GET", cookies=None ): - return await self._get(path, allow_redirects, redirect_count, method, cookies) + return await self._request( + path, allow_redirects, redirect_count, method, cookies + ) - async def _get( - self, path, allow_redirects=True, redirect_count=0, method="GET", cookies=None + @async_to_sync + async def post( + self, path, post_data=None, allow_redirects=True, redirect_count=0, cookies=None + ): + return await self._request( + path, allow_redirects, redirect_count, "POST", cookies, post_data + ) + + async def _request( + self, + path, + allow_redirects=True, + redirect_count=0, + method="GET", + cookies=None, + post_data=None, ): query_string = b"" if "?" in path: @@ -83,7 +99,13 @@ class TestClient: "headers": headers, } instance = ApplicationCommunicator(self.asgi_app, scope) - await instance.send_input({"type": "http.request"}) + + if post_data: + body = urlencode(post_data, doseq=True).encode("utf-8") + await instance.send_input({"type": "http.request", "body": body}) + else: + await instance.send_input({"type": "http.request"}) + # First message back should be response.start with headers and status messages = [] start = await instance.receive_output(2) @@ -110,7 +132,7 @@ class TestClient: redirect_count, self.max_redirects ) location = response.headers["Location"] - return await self._get( + return await self._request( location, allow_redirects=True, redirect_count=redirect_count + 1 ) return response @@ -128,6 +150,7 @@ def make_app_client( inspect_data=None, static_mounts=None, template_dir=None, + metadata=None, ): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, filename) @@ -161,7 +184,7 @@ def make_app_client( immutables=immutables, memory=memory, cors=cors, - metadata=METADATA, + metadata=metadata or METADATA, plugins_dir=PLUGINS_DIR, config=config, inspect_data=inspect_data, diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py new file mode 100644 index 00000000..52c8aec2 --- /dev/null +++ b/tests/test_canned_write.py @@ -0,0 +1,88 @@ +import pytest +from .fixtures import make_app_client + + +@pytest.fixture +def canned_write_client(): + for client in make_app_client( + extra_databases={"data.db": "create table names (name text)"}, + metadata={ + "databases": { + "data": { + "queries": { + "add_name": { + "sql": "insert into names (name) values (:name)", + "write": True, + "on_success_redirect": "/data/add_name?success", + }, + "add_name_specify_id": { + "sql": "insert into names (rowid, name) values (:rowid, :name)", + "write": True, + "on_error_redirect": "/data/add_name_specify_id?error", + }, + "delete_name": { + "sql": "delete from names where rowid = :rowid", + "write": True, + "on_success_message": "Name deleted", + }, + "update_name": { + "sql": "update names set name = :name where rowid = :rowid", + "params": ["rowid", "name"], + "write": True, + }, + } + } + } + }, + ): + yield client + + +def test_insert(canned_write_client): + response = canned_write_client.post( + "/data/add_name", {"name": "Hello"}, allow_redirects=False + ) + assert 302 == response.status + assert "/data/add_name?success" == response.headers["Location"] + messages = canned_write_client.ds.unsign( + response.cookies["ds_messages"], "messages" + ) + assert [["Query executed, 1 row affected", 1]] == messages + + +def test_custom_success_message(canned_write_client): + response = canned_write_client.post( + "/data/delete_name", {"rowid": 1}, allow_redirects=False + ) + assert 302 == response.status + messages = canned_write_client.ds.unsign( + response.cookies["ds_messages"], "messages" + ) + assert [["Name deleted", 1]] == messages + + +def test_insert_error(canned_write_client): + canned_write_client.post("/data/add_name", {"name": "Hello"}) + response = canned_write_client.post( + "/data/add_name_specify_id", + {"rowid": 1, "name": "Should fail"}, + allow_redirects=False, + ) + assert 302 == response.status + assert "/data/add_name_specify_id?error" == response.headers["Location"] + messages = canned_write_client.ds.unsign( + response.cookies["ds_messages"], "messages" + ) + assert [["UNIQUE constraint failed: names.rowid", 3]] == messages + # How about with a custom error message? + canned_write_client.ds._metadata["databases"]["data"]["queries"][ + "add_name_specify_id" + ]["on_error_message"] = "ERROR" + response = canned_write_client.post( + "/data/add_name_specify_id", + {"rowid": 1, "name": "Should fail"}, + allow_redirects=False, + ) + assert [["ERROR", 3]] == canned_write_client.ds.unsign( + response.cookies["ds_messages"], "messages" + ) From 9cb44be42f012a68c8c3904a37008200cc7bb1f4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 3 Jun 2020 14:04:40 -0700 Subject: [PATCH 0037/1871] Docs and tests for "params", closes #797 --- docs/sql_queries.rst | 70 ++++++++++++++++++++++++++++++-------- tests/test_canned_write.py | 7 +++- 2 files changed, 61 insertions(+), 16 deletions(-) diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index dc239a84..aa1edc98 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -121,32 +121,68 @@ Here's an example of a canned query with a named parameter: .. code-block:: sql select neighborhood, facet_cities.name, state - from facetable join facet_cities on facetable.city_id = facet_cities.id - where neighborhood like '%' || :text || '%' order by neighborhood; + from facetable + join facet_cities on facetable.city_id = facet_cities.id + where neighborhood like '%' || :text || '%' + order by neighborhood; -In the canned query JSON it looks like this: +In the canned query metadata (here :ref:`metadata_yaml` as ``metadata.yaml``) it looks like this: + +.. code-block:: yaml + + databases: + fixtures: + queries: + neighborhood_search: + sql: |- + select neighborhood, facet_cities.name, state + from facetable + join facet_cities on facetable.city_id = facet_cities.id + where neighborhood like '%' || :text || '%' + order by neighborhood + title: Search neighborhoods + +Here's the equivalent using JSON (as ``metadata.json``): .. code-block:: json { "databases": { - "fixtures": { - "queries": { - "neighborhood_search": { - "sql": "select neighborhood, facet_cities.name, state\nfrom facetable join facet_cities on facetable.city_id = facet_cities.id\nwhere neighborhood like '%' || :text || '%' order by neighborhood;", - "title": "Search neighborhoods", - "description_html": "Demonstrating simple like search" - } - } - } + "fixtures": { + "queries": { + "neighborhood_search": { + "sql": "select neighborhood, facet_cities.name, state\nfrom facetable\n join facet_cities on facetable.city_id = facet_cities.id\nwhere neighborhood like '%' || :text || '%'\norder by neighborhood", + "title": "Search neighborhoods" + } + } + } } } +Note that we are using SQLite string concatenation here - the ``||`` operator - to add wildcard ``%`` characters to the string provided by the user. + You can try this canned query out here: https://latest.datasette.io/fixtures/neighborhood_search?text=town -Note that we are using SQLite string concatenation here - the ``||`` operator - -to add wildcard ``%`` characters to the string provided by the user. +In this example the ``:text`` named parameter is automatically extracted from the query using a regular expression. + +You can alternatively provide an explicit list of named parameters using the ``"params"`` key, like this: + +.. code-block:: yaml + + databases: + fixtures: + queries: + neighborhood_search: + params: + - text + sql: |- + select neighborhood, facet_cities.name, state + from facetable + join facet_cities on facetable.city_id = facet_cities.id + where neighborhood like '%' || :text || '%' + order by neighborhood + title: Search neighborhoods .. _canned_queries_default_fragment: @@ -181,6 +217,8 @@ Writable canned queries Canned queries by default are read-only. You can use the ``"write": true`` key to indicate that a canned query can write to the database. +You may wish to use this feature in conjunction with :ref:`authentication`. + .. code-block:: json { @@ -226,7 +264,9 @@ For example: } } -You may wish to use this feature in conjunction with :ref:`authentication`. +You can use ``"params"`` to explicitly list the named parameters that should be displayed as form fields - otherwise they will be automatically detected. + +You can pre-populate form fields when the page first loads using a querystring, e.g. ``/mydatabase/add_name?name=Prepopulated``. The user will have to submit the form to execute the query. .. _pagination: diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index 52c8aec2..692d726e 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -27,7 +27,7 @@ def canned_write_client(): }, "update_name": { "sql": "update names set name = :name where rowid = :rowid", - "params": ["rowid", "name"], + "params": ["rowid", "name", "extra"], "write": True, }, } @@ -86,3 +86,8 @@ def test_insert_error(canned_write_client): assert [["ERROR", 3]] == canned_write_client.ds.unsign( response.cookies["ds_messages"], "messages" ) + + +def test_custom_params(canned_write_client): + response = canned_write_client.get("/data/update_name?extra=foo") + assert '' in response.text From 8524866fdf0b43a68e1ee24c419c80b5cddaaeca Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 4 Jun 2020 16:58:19 -0700 Subject: [PATCH 0038/1871] Link to authentication docs --- docs/plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index fb2843f4..3777bba7 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -953,7 +953,7 @@ actor_from_request(datasette, request) ``request`` - object The current HTTP :ref:`internals_request`. -This is part of Datasette's authentication and permissions system. The function should attempt to authenticate an actor (either a user or an API actor of some sort) based on information in the request. +This is part of Datasette's :ref:`authentication and permissions system `. The function should attempt to authenticate an actor (either a user or an API actor of some sort) based on information in the request. If it cannot authenticate an actor, it should return ``None``. Otherwise it should return a dictionary representing that actor. From 2074efa5a49f72cf1c47c28894de6c0b1f0fb3b1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 4 Jun 2020 18:38:32 -0700 Subject: [PATCH 0039/1871] Another actor_from_request example --- docs/plugins.rst | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/docs/plugins.rst b/docs/plugins.rst index 3777bba7..8004e118 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -957,6 +957,27 @@ This is part of Datasette's :ref:`authentication and permissions system Date: Thu, 4 Jun 2020 20:10:40 -0700 Subject: [PATCH 0040/1871] More things you can do with plugins --- docs/plugins.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/plugins.rst b/docs/plugins.rst index 8004e118..ecc7cbf1 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -18,6 +18,8 @@ Things you can do with plugins include: * Make new custom SQL functions available for use within Datasette, for example `datasette-haversine `__ and `datasette-jellyfish `__. +* Define custom output formats with custom extensions, for example `datasette-atom `__ and + `datasette-ics `__. * Add template functions that can be called within your Jinja custom templates, for example `datasette-render-markdown `__. * Customize how database values are rendered in the Datasette interface, for example From 0da7f49b24e429e81317e370cb01de941f1b873e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jun 2020 10:52:50 -0700 Subject: [PATCH 0041/1871] Rename RequestParameters to MultiParams, refs #799 --- datasette/utils/__init__.py | 25 +++++++++++++++++-------- datasette/utils/asgi.py | 4 ++-- datasette/views/table.py | 4 ++-- docs/internals.rst | 10 ++++++---- tests/test_utils.py | 9 +++++++++ 5 files changed, 36 insertions(+), 16 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 2eb31502..083fba0c 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -753,15 +753,24 @@ def escape_fts(query): ) -class RequestParameters: +class MultiParams: def __init__(self, data): - # data is a dictionary of key => [list, of, values] - assert isinstance(data, dict), "data should be a dictionary of key => [list]" - for key in data: - assert isinstance( - data[key], list - ), "data should be a dictionary of key => [list]" - self._data = data + # data is a dictionary of key => [list, of, values] or a list of [["key", "value"]] pairs + if isinstance(data, dict): + for key in data: + assert isinstance( + data[key], list + ), "dictionary data should be a dictionary of key => [list]" + self._data = data + elif isinstance(data, list): + new_data = {} + for item in data: + assert ( + isinstance(item, list) and len(item) == 2 + ), "list data should be a list of [key, value] pairs" + key, value = item + new_data.setdefault(key, []).append(value) + self._data = new_data def __contains__(self, key): return key in self._data diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index c7810a50..ba131dc8 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -1,5 +1,5 @@ import json -from datasette.utils import RequestParameters +from datasette.utils import MultiParams from mimetypes import guess_type from urllib.parse import parse_qs, urlunparse, parse_qsl from pathlib import Path @@ -68,7 +68,7 @@ class Request: @property def args(self): - return RequestParameters(parse_qs(qs=self.query_string)) + return MultiParams(parse_qs(qs=self.query_string)) async def post_vars(self): body = [] diff --git a/datasette/views/table.py b/datasette/views/table.py index 79bf8b08..ec1b6c7c 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -8,7 +8,7 @@ from datasette.plugins import pm from datasette.database import QueryInterrupted from datasette.utils import ( CustomRow, - RequestParameters, + MultiParams, append_querystring, compound_keys_after_sql, escape_sqlite, @@ -286,7 +286,7 @@ class TableView(RowTableShared): order_by = "" # Ensure we don't drop anything with an empty value e.g. ?name__exact= - args = RequestParameters( + args = MultiParams( urllib.parse.parse_qs(request.query_string, keep_blank_values=True) ) diff --git a/docs/internals.rst b/docs/internals.rst index 4d51d614..4b4adc5e 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -39,7 +39,7 @@ The request object is passed to various plugin hooks. It represents an incoming ``.query_string`` - string The querystring component of the request, without the ``?`` - e.g. ``name__contains=sam&age__gt=10``. -``.args`` - RequestParameters +``.args`` - MultiParams An object representing the parsed querystring parameters, see below. The object also has one awaitable method: @@ -47,10 +47,12 @@ The object also has one awaitable method: ``await request.post_vars()`` - dictionary Returns a dictionary of form variables that were submitted in the request body via ``POST``. -The RequestParameters class ---------------------------- +.. _internals_multiparams: -``request.args`` is a ``RequestParameters`` object - a dictionary-like object which provides access to querystring parameters that may have multiple values. +The MultiParams class +--------------------- + +``request.args`` is a ``MultiParams`` object - a dictionary-like object which provides access to querystring parameters that may have multiple values. Consider the querystring ``?foo=1&foo=2&bar=3`` - with two values for ``foo`` and one value for ``bar``. diff --git a/tests/test_utils.py b/tests/test_utils.py index 01a10468..ffe14587 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -437,3 +437,12 @@ def test_call_with_supported_arguments(): with pytest.raises(TypeError): utils.call_with_supported_arguments(foo, a=1) + + +def test_multi_params_list(): + p1 = utils.MultiParams([["foo", "bar"], ["foo", "baz"]]) + assert "bar" == p1["foo"] + assert ["bar", "baz"] == p1.getlist("foo") + # Should raise an error if list isn't pairs + with pytest.raises(AssertionError): + utils.MultiParams([["foo", "bar"], ["foo", "baz", "bar"]]) From d96ac1d52cacf34bae09705eb8f9a0e3f81c426b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jun 2020 11:01:06 -0700 Subject: [PATCH 0042/1871] Allow tuples as well as lists in MultiParams, refs #799 --- datasette/utils/__init__.py | 6 +++--- tests/test_utils.py | 22 ++++++++++++++++------ 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 083fba0c..69e288e6 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -759,14 +759,14 @@ class MultiParams: if isinstance(data, dict): for key in data: assert isinstance( - data[key], list + data[key], (list, tuple) ), "dictionary data should be a dictionary of key => [list]" self._data = data - elif isinstance(data, list): + elif isinstance(data, list) or isinstance(data, tuple): new_data = {} for item in data: assert ( - isinstance(item, list) and len(item) == 2 + isinstance(item, (list, tuple)) and len(item) == 2 ), "list data should be a list of [key, value] pairs" key, value = item new_data.setdefault(key, []).append(value) diff --git a/tests/test_utils.py b/tests/test_utils.py index ffe14587..a7968e54 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -439,10 +439,20 @@ def test_call_with_supported_arguments(): utils.call_with_supported_arguments(foo, a=1) -def test_multi_params_list(): - p1 = utils.MultiParams([["foo", "bar"], ["foo", "baz"]]) +@pytest.mark.parametrize("data,should_raise", [ + ([["foo", "bar"], ["foo", "baz"]], False), + ([("foo", "bar"), ("foo", "baz")], False), + ((["foo", "bar"], ["foo", "baz"]), False), + ([["foo", "bar"], ["foo", "baz", "bax"]], True), + ({"foo": ["bar", "baz"]}, False), + ({"foo": ("bar", "baz")}, False), + ({"foo": "bar"}, True), +]) +def test_multi_params(data, should_raise): + if should_raise: + with pytest.raises(AssertionError): + utils.MultiParams(data) + return + p1 = utils.MultiParams(data) assert "bar" == p1["foo"] - assert ["bar", "baz"] == p1.getlist("foo") - # Should raise an error if list isn't pairs - with pytest.raises(AssertionError): - utils.MultiParams([["foo", "bar"], ["foo", "baz", "bar"]]) + assert ["bar", "baz"] == list(p1.getlist("foo")) From 84a9c4ff75460f91c049bd30bba3cee1fd89d9e2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jun 2020 12:05:57 -0700 Subject: [PATCH 0043/1871] CSRF protection (#798) Closes #793. * Rename RequestParameters to MultiParams, refs #799 * Allow tuples as well as lists in MultiParams, refs #799 * Use csrftokens when running tests, refs #799 * Use new csrftoken() function, refs https://github.com/simonw/asgi-csrf/issues/7 * Check for Vary: Cookie hedaer, refs https://github.com/simonw/asgi-csrf/issues/8 --- datasette/app.py | 10 ++++++- datasette/templates/messages_debug.html | 3 +- datasette/templates/query.html | 1 + datasette/utils/__init__.py | 3 ++ datasette/views/base.py | 1 + setup.py | 1 + tests/fixtures.py | 38 +++++++++++++++++++++---- tests/test_canned_write.py | 8 ++++-- tests/test_utils.py | 21 ++++++++------ 9 files changed, 67 insertions(+), 19 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f9bf91a8..54cf02f8 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1,4 +1,5 @@ import asyncio +import asgi_csrf import collections import datetime import hashlib @@ -884,7 +885,14 @@ class Datasette: await database.table_counts(limit=60 * 60 * 1000) asgi = AsgiLifespan( - AsgiTracer(DatasetteRouter(self, routes)), on_startup=setup_db + AsgiTracer( + asgi_csrf.asgi_csrf( + DatasetteRouter(self, routes), + signing_secret=self._secret, + cookie_name="ds_csrftoken", + ) + ), + on_startup=setup_db, ) for wrapper in pm.hook.asgi_wrapper(datasette=self): asgi = wrapper(asgi) diff --git a/datasette/templates/messages_debug.html b/datasette/templates/messages_debug.html index b2e1bc7c..e83d2a2f 100644 --- a/datasette/templates/messages_debug.html +++ b/datasette/templates/messages_debug.html @@ -8,7 +8,7 @@

Set a message:

- +
@@ -19,6 +19,7 @@
+
diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 52896e96..a7cb6647 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -52,6 +52,7 @@ {% endif %}

+ {% if canned_query %}{% endif %}

diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 69e288e6..059db184 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -772,6 +772,9 @@ class MultiParams: new_data.setdefault(key, []).append(value) self._data = new_data + def __repr__(self): + return "".format(self._data) + def __contains__(self, key): return key in self._data diff --git a/datasette/views/base.py b/datasette/views/base.py index 2402406a..315c96fe 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -95,6 +95,7 @@ class BaseView(AsgiView): **context, **{ "database_url": self.database_url, + "csrftoken": request.scope["csrftoken"], "database_color": self.database_color, "show_messages": lambda: self.ds._show_messages(request), "select_templates": [ diff --git a/setup.py b/setup.py index 93628266..c0316deb 100644 --- a/setup.py +++ b/setup.py @@ -53,6 +53,7 @@ setup( "uvicorn~=0.11", "aiofiles>=0.4,<0.6", "janus>=0.4,<0.6", + "asgi-csrf>=0.4", "PyYAML~=5.3", "mergedeep>=1.1.1,<1.4.0", "itsdangerous~=1.1", diff --git a/tests/fixtures.py b/tests/fixtures.py index 78a54c68..a64a8295 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -1,5 +1,5 @@ from datasette.app import Datasette -from datasette.utils import sqlite3 +from datasette.utils import sqlite3, MultiParams from asgiref.testing import ApplicationCommunicator from asgiref.sync import async_to_sync from http.cookies import SimpleCookie @@ -60,10 +60,35 @@ class TestClient: @async_to_sync async def post( - self, path, post_data=None, allow_redirects=True, redirect_count=0, cookies=None + self, + path, + post_data=None, + allow_redirects=True, + redirect_count=0, + content_type="application/x-www-form-urlencoded", + cookies=None, + csrftoken_from=None, ): + cookies = cookies or {} + post_data = post_data or {} + # Maybe fetch a csrftoken first + if csrftoken_from is not None: + if csrftoken_from is True: + csrftoken_from = path + token_response = await self._request(csrftoken_from) + # Check this had a Vary: Cookie header + assert "Cookie" == token_response.headers["vary"] + csrftoken = token_response.cookies["ds_csrftoken"] + cookies["ds_csrftoken"] = csrftoken + post_data["csrftoken"] = csrftoken return await self._request( - path, allow_redirects, redirect_count, "POST", cookies, post_data + path, + allow_redirects, + redirect_count, + "POST", + cookies, + post_data, + content_type, ) async def _request( @@ -74,6 +99,7 @@ class TestClient: method="GET", cookies=None, post_data=None, + content_type=None, ): query_string = b"" if "?" in path: @@ -84,6 +110,8 @@ class TestClient: else: raw_path = quote(path, safe="/:,").encode("latin-1") headers = [[b"host", b"localhost"]] + if content_type: + headers.append((b"content-type", content_type.encode("utf-8"))) if cookies: sc = SimpleCookie() for key, value in cookies.items(): @@ -111,7 +139,7 @@ class TestClient: start = await instance.receive_output(2) messages.append(start) assert start["type"] == "http.response.start" - headers = dict( + response_headers = MultiParams( [(k.decode("utf8"), v.decode("utf8")) for k, v in start["headers"]] ) status = start["status"] @@ -124,7 +152,7 @@ class TestClient: body += message["body"] if not message.get("more_body"): break - response = TestResponse(status, headers, body) + response = TestResponse(status, response_headers, body) if allow_redirects and response.status in (301, 302): assert ( redirect_count < self.max_redirects diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index 692d726e..be838063 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -40,7 +40,7 @@ def canned_write_client(): def test_insert(canned_write_client): response = canned_write_client.post( - "/data/add_name", {"name": "Hello"}, allow_redirects=False + "/data/add_name", {"name": "Hello"}, allow_redirects=False, csrftoken_from=True, ) assert 302 == response.status assert "/data/add_name?success" == response.headers["Location"] @@ -52,7 +52,7 @@ def test_insert(canned_write_client): def test_custom_success_message(canned_write_client): response = canned_write_client.post( - "/data/delete_name", {"rowid": 1}, allow_redirects=False + "/data/delete_name", {"rowid": 1}, allow_redirects=False, csrftoken_from=True ) assert 302 == response.status messages = canned_write_client.ds.unsign( @@ -62,11 +62,12 @@ def test_custom_success_message(canned_write_client): def test_insert_error(canned_write_client): - canned_write_client.post("/data/add_name", {"name": "Hello"}) + canned_write_client.post("/data/add_name", {"name": "Hello"}, csrftoken_from=True) response = canned_write_client.post( "/data/add_name_specify_id", {"rowid": 1, "name": "Should fail"}, allow_redirects=False, + csrftoken_from=True, ) assert 302 == response.status assert "/data/add_name_specify_id?error" == response.headers["Location"] @@ -82,6 +83,7 @@ def test_insert_error(canned_write_client): "/data/add_name_specify_id", {"rowid": 1, "name": "Should fail"}, allow_redirects=False, + csrftoken_from=True, ) assert [["ERROR", 3]] == canned_write_client.ds.unsign( response.cookies["ds_messages"], "messages" diff --git a/tests/test_utils.py b/tests/test_utils.py index a7968e54..cf714215 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -439,15 +439,18 @@ def test_call_with_supported_arguments(): utils.call_with_supported_arguments(foo, a=1) -@pytest.mark.parametrize("data,should_raise", [ - ([["foo", "bar"], ["foo", "baz"]], False), - ([("foo", "bar"), ("foo", "baz")], False), - ((["foo", "bar"], ["foo", "baz"]), False), - ([["foo", "bar"], ["foo", "baz", "bax"]], True), - ({"foo": ["bar", "baz"]}, False), - ({"foo": ("bar", "baz")}, False), - ({"foo": "bar"}, True), -]) +@pytest.mark.parametrize( + "data,should_raise", + [ + ([["foo", "bar"], ["foo", "baz"]], False), + ([("foo", "bar"), ("foo", "baz")], False), + ((["foo", "bar"], ["foo", "baz"]), False), + ([["foo", "bar"], ["foo", "baz", "bax"]], True), + ({"foo": ["bar", "baz"]}, False), + ({"foo": ("bar", "baz")}, False), + ({"foo": "bar"}, True), + ] +) def test_multi_params(data, should_raise): if should_raise: with pytest.raises(AssertionError): From 033a1bb22c70a955d9fd1d3b4675a0e2e5c8b8cd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jun 2020 12:06:43 -0700 Subject: [PATCH 0044/1871] Removed rogue print() from test --- tests/test_internals_database.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 5d5520dd..2d288cc8 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -162,7 +162,6 @@ async def test_execute_write_fn_block_false(db): with conn: conn.execute("delete from roadside_attractions where pk = 1;") row = conn.execute("select count(*) from roadside_attractions").fetchone() - print("row = ", row) return row[0] task_id = await db.execute_write_fn(write_fn) From f786033a5f0098371cb1df1ce83959b27c588115 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jun 2020 16:46:37 -0700 Subject: [PATCH 0045/1871] Fixed 'datasette plugins' command, with tests - closes #802 --- datasette/app.py | 4 ++-- datasette/cli.py | 2 +- tests/fixtures.py | 56 +++++++++++++++++++++++++++++++++++++++++++ tests/test_api.py | 58 ++------------------------------------------- tests/test_cli.py | 31 +++++++++++++++++++++++- tests/test_utils.py | 2 +- 6 files changed, 92 insertions(+), 61 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 54cf02f8..444a065a 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -626,9 +626,9 @@ class Datasette: }, } - def _plugins(self, request): + def _plugins(self, request=None, all=False): ps = list(get_plugins()) - if not request.args.get("all"): + if all is False or (request is not None and request.args.get("all")): ps = [p for p in ps if p["name"] not in DEFAULT_PLUGINS] return [ { diff --git a/datasette/cli.py b/datasette/cli.py index 23f9e36b..2e3c8e36 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -126,7 +126,7 @@ pm.hook.publish_subcommand(publish=publish) def plugins(all, plugins_dir): "List currently available plugins" app = Datasette([], plugins_dir=plugins_dir) - click.echo(json.dumps(app.plugins(all), indent=4)) + click.echo(json.dumps(app._plugins(all=all), indent=4)) @cli.command() diff --git a/tests/fixtures.py b/tests/fixtures.py index a64a8295..4ca7b10f 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -22,6 +22,62 @@ TEMP_PLUGIN_SECRET_FILE = os.path.join(tempfile.gettempdir(), "plugin-secret") PLUGINS_DIR = str(pathlib.Path(__file__).parent / "plugins") +EXPECTED_PLUGINS = [ + { + "name": "messages_output_renderer.py", + "static": False, + "templates": False, + "version": None, + "hooks": ["register_output_renderer"], + }, + { + "name": "my_plugin.py", + "static": False, + "templates": False, + "version": None, + "hooks": [ + "actor_from_request", + "extra_body_script", + "extra_css_urls", + "extra_js_urls", + "extra_template_vars", + "permission_allowed", + "prepare_connection", + "prepare_jinja2_environment", + "register_facet_classes", + "render_cell", + ], + }, + { + "name": "my_plugin_2.py", + "static": False, + "templates": False, + "version": None, + "hooks": [ + "actor_from_request", + "asgi_wrapper", + "extra_js_urls", + "extra_template_vars", + "permission_allowed", + "render_cell", + ], + }, + { + "name": "register_output_renderer.py", + "static": False, + "templates": False, + "version": None, + "hooks": ["register_output_renderer"], + }, + { + "name": "view_name.py", + "static": False, + "templates": False, + "version": None, + "hooks": ["extra_template_vars"], + }, +] + class TestResponse: def __init__(self, status, headers, body): diff --git a/tests/test_api.py b/tests/test_api.py index 4b752f31..0aa62a95 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -15,6 +15,7 @@ from .fixtures import ( # noqa generate_compound_rows, generate_sortable_rows, make_app_client, + EXPECTED_PLUGINS, METADATA, ) import json @@ -1259,62 +1260,7 @@ def test_threads_json(app_client): def test_plugins_json(app_client): response = app_client.get("/-/plugins.json") - expected = [ - { - "name": "messages_output_renderer.py", - "static": False, - "templates": False, - "version": None, - "hooks": ["register_output_renderer"], - }, - { - "name": "my_plugin.py", - "static": False, - "templates": False, - "version": None, - "hooks": [ - "actor_from_request", - "extra_body_script", - "extra_css_urls", - "extra_js_urls", - "extra_template_vars", - "permission_allowed", - "prepare_connection", - "prepare_jinja2_environment", - "register_facet_classes", - "render_cell", - ], - }, - { - "name": "my_plugin_2.py", - "static": False, - "templates": False, - "version": None, - "hooks": [ - "actor_from_request", - "asgi_wrapper", - "extra_js_urls", - "extra_template_vars", - "permission_allowed", - "render_cell", - ], - }, - { - "name": "register_output_renderer.py", - "static": False, - "templates": False, - "version": None, - "hooks": ["register_output_renderer"], - }, - { - "name": "view_name.py", - "static": False, - "templates": False, - "version": None, - "hooks": ["extra_template_vars"], - }, - ] - assert expected == sorted(response.json, key=lambda p: p["name"]) + assert EXPECTED_PLUGINS == sorted(response.json, key=lambda p: p["name"]) def test_versions_json(app_client): diff --git a/tests/test_cli.py b/tests/test_cli.py index 529661ce..c53e9a3e 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,4 +1,9 @@ -from .fixtures import app_client, make_app_client, TestClient as _TestClient +from .fixtures import ( + app_client, + make_app_client, + TestClient as _TestClient, + EXPECTED_PLUGINS, +) from datasette.cli import cli, serve from click.testing import CliRunner import io @@ -50,6 +55,30 @@ def test_spatialite_error_if_attempt_to_open_spatialite(): assert "trying to load a SpatiaLite database" in result.output +def test_plugins_cli(app_client): + runner = CliRunner() + result1 = runner.invoke(cli, ["plugins"]) + assert sorted(EXPECTED_PLUGINS, key=lambda p: p["name"]) == sorted( + json.loads(result1.output), key=lambda p: p["name"] + ) + # Try with --all + result2 = runner.invoke(cli, ["plugins", "--all"]) + names = [p["name"] for p in json.loads(result2.output)] + # Should have all the EXPECTED_PLUGINS + assert set(names).issuperset(set(p["name"] for p in EXPECTED_PLUGINS)) + # And the following too: + assert set(names).issuperset( + [ + "datasette.sql_functions", + "datasette.actor_auth_cookie", + "datasette.facets", + "datasette.publish.cloudrun", + "datasette.default_permissions", + "datasette.publish.heroku", + ] + ) + + def test_metadata_yaml(): yaml_file = io.StringIO( textwrap.dedent( diff --git a/tests/test_utils.py b/tests/test_utils.py index cf714215..4931ef3b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -449,7 +449,7 @@ def test_call_with_supported_arguments(): ({"foo": ["bar", "baz"]}, False), ({"foo": ("bar", "baz")}, False), ({"foo": "bar"}, True), - ] + ], ) def test_multi_params(data, should_raise): if should_raise: From 75c143a84cee2fad878c6318755582522b9afff3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jun 2020 16:55:08 -0700 Subject: [PATCH 0046/1871] Fixed /-/plugins?all=1, refs #802 --- datasette/app.py | 7 ++++++- tests/test_api.py | 6 ++++++ tests/test_cli.py | 12 ++---------- 3 files changed, 14 insertions(+), 11 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 444a065a..1624f6ea 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -628,7 +628,12 @@ class Datasette: def _plugins(self, request=None, all=False): ps = list(get_plugins()) - if all is False or (request is not None and request.args.get("all")): + should_show_all = False + if request is not None: + should_show_all = request.args.get("all") + else: + should_show_all = all + if not should_show_all: ps = [p for p in ps if p["name"] not in DEFAULT_PLUGINS] return [ { diff --git a/tests/test_api.py b/tests/test_api.py index 0aa62a95..b35c0a2d 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,3 +1,4 @@ +from datasette.plugins import DEFAULT_PLUGINS from datasette.utils import detect_json1 from .fixtures import ( # noqa app_client, @@ -1261,6 +1262,11 @@ def test_threads_json(app_client): def test_plugins_json(app_client): response = app_client.get("/-/plugins.json") assert EXPECTED_PLUGINS == sorted(response.json, key=lambda p: p["name"]) + # Try with ?all=1 + response = app_client.get("/-/plugins.json?all=1") + names = {p["name"] for p in response.json} + assert names.issuperset(p["name"] for p in EXPECTED_PLUGINS) + assert names.issuperset(DEFAULT_PLUGINS) def test_versions_json(app_client): diff --git a/tests/test_cli.py b/tests/test_cli.py index c53e9a3e..2616f1d1 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -4,6 +4,7 @@ from .fixtures import ( TestClient as _TestClient, EXPECTED_PLUGINS, ) +from datasette.plugins import DEFAULT_PLUGINS from datasette.cli import cli, serve from click.testing import CliRunner import io @@ -67,16 +68,7 @@ def test_plugins_cli(app_client): # Should have all the EXPECTED_PLUGINS assert set(names).issuperset(set(p["name"] for p in EXPECTED_PLUGINS)) # And the following too: - assert set(names).issuperset( - [ - "datasette.sql_functions", - "datasette.actor_auth_cookie", - "datasette.facets", - "datasette.publish.cloudrun", - "datasette.default_permissions", - "datasette.publish.heroku", - ] - ) + assert set(names).issuperset(DEFAULT_PLUGINS) def test_metadata_yaml(): From 9c563d6aed072f14d3d25f58e84659f9caa1a243 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 5 Jun 2020 17:15:52 -0700 Subject: [PATCH 0047/1871] Bump asgi-csrf to 0.5.1 for a bug fix Refs https://github.com/simonw/asgi-csrf/issues/10 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index c0316deb..678a022f 100644 --- a/setup.py +++ b/setup.py @@ -53,7 +53,7 @@ setup( "uvicorn~=0.11", "aiofiles>=0.4,<0.6", "janus>=0.4,<0.6", - "asgi-csrf>=0.4", + "asgi-csrf>=0.5.1", "PyYAML~=5.3", "mergedeep>=1.1.1,<1.4.0", "itsdangerous~=1.1", From 30a8132d58a89fed0e034e058b62fab5180fae0f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 11:18:46 -0700 Subject: [PATCH 0048/1871] Docs for authentication + canned query permissions, refs #800 Closes #786 --- docs/authentication.rst | 108 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 106 insertions(+), 2 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 0a9a4c0d..2c07f75a 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -4,14 +4,118 @@ Authentication and permissions ================================ -Datasette's authentication system is currently under construction. Follow `issue 699 `__ to track the development of this feature. +Datasette does not require authentication by default. Any visitor to a Datasette instance can explore the full data and execute SQL queries. + +Datasette's plugin system can be used to add many different styles of authentication, such as user accounts, single sign-on or API keys. + +.. _authentication_actor: + +Actors +====== + +Through plugins, Datasette can support both authenticated users (with cookies) and authenticated API agents (via authentication tokens). The word "actor" is used to cover both of these cases. + +Every request to Datasette has an associated actor value. This can be ``None`` for unauthenticated requests, or a JSON compatible Python dictionary for authenticated users or API agents. + +The only required field in an actor is ``"id"``, which must be a string. Plugins may decide to add any other fields to the actor dictionary. + +Plugins can use the :ref:`plugin_actor_from_request` hook to implement custom logic for authenticating an actor based on the incoming HTTP request. + +.. _authentication_root: + +Using the "root" actor +====================== + +Datasette currently leaves almost all forms of authentication to plugins - `datasette-auth-github `__ for example. + +The one exception is the "root" account, which you can sign into while using Datasette on your local machine. This provides access to a small number of debugging features. + +To sign in as root, start Datasette using the ``--root`` command-line option, like this:: + + $ datasette --root + http://127.0.0.1:8001/-/auth-token?token=786fc524e0199d70dc9a581d851f466244e114ca92f33aa3b42a139e9388daa7 + INFO: Started server process [25801] + INFO: Waiting for application startup. + INFO: Application startup complete. + INFO: Uvicorn running on http://127.0.0.1:8001 (Press CTRL+C to quit) + +The URL on the first line includes a one-use token which can be used to sign in as the "root" actor in your browser. Click on that link and then visit ``http://127.0.0.1:8001/-/actor`` to confirm that you are authenticated as an actor that looks like this: + +.. code-block:: json + + { + "id": "root" + } + + +.. _authentication_permissions_canned_queries: + +Setting permissions for canned queries +====================================== + +Datasette's :ref:`canned_queries` default to allowing any user to execute them. + +You can limit who is allowed to execute a specific query with the ``"allow"`` key in the :ref:`metadata` configuration for that query. + +Here's how to restrict access to a write query to just the "root" user: + +.. code-block:: json + + { + "databases": { + "mydatabase": { + "queries": { + "add_name": { + "sql": "INSERT INTO names (name) VALUES (:name)", + "write": true, + "allow": { + "id": ["root"] + } + } + } + } + } + } + +To allow any of the actors with an ``id`` matching a specific list of values, use this: + +.. code-block:: json + + { + "allow": { + "id": ["simon", "cleopaws"] + } + } + +This works for other keys as well. Imagine an actor that looks like this: + +.. code-block:: json + + { + "id": "simon", + "roles": ["staff", "developer"] + } + +You can provide access to any user that has "developer" as one of their roles like so: + +.. code-block:: json + + { + "allow": { + "roles": ["developer"] + } + } + +Note that "roles" is not a concept that is baked into Datasette - it's more of a convention that plugins can choose to implement and act on. + +These keys act as an "or" mechanism. A actor will be able to execute the query if any of their JSON properties match any of the values in the corresponding lists in the ``allow`` block. .. _PermissionsDebugView: Permissions Debug ================= -The debug tool at ``/-/permissions`` is only available to the root user. +The debug tool at ``/-/permissions`` is only available to the :ref:`authenticated root user ` (or any actor granted the ``permissions-debug`` action according to a plugin). It shows the thirty most recent permission checks that have been carried out by the Datasette instance. From d4c7b85f556230923d37ff327a068ed08aa9b62b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 11:23:54 -0700 Subject: [PATCH 0049/1871] Documentation for "id": "*", refs #800 --- docs/authentication.rst | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/authentication.rst b/docs/authentication.rst index 2c07f75a..a90dcc41 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -108,6 +108,16 @@ You can provide access to any user that has "developer" as one of their roles li Note that "roles" is not a concept that is baked into Datasette - it's more of a convention that plugins can choose to implement and act on. +If you want to provide access to any actor with a value for a specific key, use ``"*"``. For example, to spceify that a query can be accessed by any logged-in user use this: + +.. code-block:: json + + { + "allow": { + "id": "*" + } + } + These keys act as an "or" mechanism. A actor will be able to execute the query if any of their JSON properties match any of the values in the corresponding lists in the ``allow`` block. .. _PermissionsDebugView: From 14f6b4d200f24940a795ddc0825319ab2891bde2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 11:39:11 -0700 Subject: [PATCH 0050/1871] actor_matches_allow utility function, refs #800 --- datasette/utils/__init__.py | 19 +++++++++++++++++++ docs/authentication.rst | 18 ++++++++++++++++-- tests/test_utils.py | 27 +++++++++++++++++++++++++++ 3 files changed, 62 insertions(+), 2 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 059db184..eb118f38 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -854,3 +854,22 @@ def call_with_supported_arguments(fn, **kwargs): ) call_with.append(kwargs[parameter]) return fn(*call_with) + + +def actor_matches_allow(actor, allow): + if allow is None: + return True + for key, values in allow.items(): + if values == "*" and key in actor: + return True + if isinstance(values, str): + values = [values] + actor_values = actor.get(key) + if actor_values is None: + return False + if isinstance(actor_values, str): + actor_values = [actor_values] + actor_values = set(actor_values) + if actor_values.intersection(values): + return True + return False diff --git a/docs/authentication.rst b/docs/authentication.rst index a90dcc41..85bbbbbd 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -50,8 +50,8 @@ The URL on the first line includes a one-use token which can be used to sign in .. _authentication_permissions_canned_queries: -Setting permissions for canned queries -====================================== +Permissions for canned queries +============================== Datasette's :ref:`canned_queries` default to allowing any user to execute them. @@ -120,6 +120,20 @@ If you want to provide access to any actor with a value for a specific key, use These keys act as an "or" mechanism. A actor will be able to execute the query if any of their JSON properties match any of the values in the corresponding lists in the ``allow`` block. +.. _authentication_actor_matches_allow: + +actor_matches_allow() +===================== + +Plugins that wish to implement the same permissions scheme as canned queries can take advantage of the ``datasette.utils.actor_matches_allow(actor, allow)`` function: + +.. code-block:: python + + from datasette.utils import actor_matches_allow + + actor_matches_allow({"id": "root"}, {"id": "*"}) + # returns True + .. _PermissionsDebugView: Permissions Debug diff --git a/tests/test_utils.py b/tests/test_utils.py index 4931ef3b..7c24648a 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -459,3 +459,30 @@ def test_multi_params(data, should_raise): p1 = utils.MultiParams(data) assert "bar" == p1["foo"] assert ["bar", "baz"] == list(p1.getlist("foo")) + + +@pytest.mark.parametrize( + "actor,allow,expected", + [ + ({"id": "root"}, None, True), + ({"id": "root"}, {}, False), + # Special "*" value for any key: + ({"id": "root"}, {"id": "*"}, True), + ({}, {"id": "*"}, False), + ({"name": "root"}, {"id": "*"}, False), + # Supports single strings or list of values: + ({"id": "root"}, {"id": "bob"}, False), + ({"id": "root"}, {"id": ["bob"]}, False), + ({"id": "root"}, {"id": "root"}, True), + ({"id": "root"}, {"id": ["root"]}, True), + # Any matching role will work: + ({"id": "garry", "roles": ["staff", "dev"]}, {"roles": ["staff"]}, True), + ({"id": "garry", "roles": ["staff", "dev"]}, {"roles": ["dev"]}, True), + ({"id": "garry", "roles": ["staff", "dev"]}, {"roles": ["otter"]}, False), + ({"id": "garry", "roles": ["staff", "dev"]}, {"roles": ["dev", "otter"]}, True), + ({"id": "garry", "roles": []}, {"roles": ["staff"]}, False), + ({"id": "garry"}, {"roles": ["staff"]}, False), + ], +) +def test_actor_matches_allow(actor, allow, expected): + assert expected == utils.actor_matches_allow(actor, allow) From 3f83d4632a643266f46ccd955d951be7aacbab99 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 12:05:22 -0700 Subject: [PATCH 0051/1871] Respect query permissions on database page, refs #800 --- datasette/templates/database.html | 2 +- datasette/utils/__init__.py | 1 + datasette/views/database.py | 13 ++++++++++++- tests/test_canned_write.py | 31 ++++++++++++++++++++++++++++++- tests/test_utils.py | 3 +++ 5 files changed, 47 insertions(+), 3 deletions(-) diff --git a/datasette/templates/database.html b/datasette/templates/database.html index e47b2418..fc88003c 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -60,7 +60,7 @@

Queries

{% endif %} diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index eb118f38..077728f4 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -857,6 +857,7 @@ def call_with_supported_arguments(fn, **kwargs): def actor_matches_allow(actor, allow): + actor = actor or {} if allow is None: return True for key, values in allow.items(): diff --git a/datasette/views/database.py b/datasette/views/database.py index 558dd0f0..abc7d3bb 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -2,6 +2,7 @@ import os import jinja2 from datasette.utils import ( + actor_matches_allow, to_css_class, validate_sql_select, is_url, @@ -53,6 +54,16 @@ class DatabaseView(DataView): ) tables.sort(key=lambda t: (t["hidden"], t["name"])) + canned_queries = [ + dict( + query, + requires_auth=not actor_matches_allow(None, query.get("allow", None)), + ) + for query in self.ds.get_canned_queries(database) + if actor_matches_allow( + request.scope.get("actor", None), query.get("allow", None) + ) + ] return ( { "database": database, @@ -60,7 +71,7 @@ class DatabaseView(DataView): "tables": tables, "hidden_count": len([t for t in tables if t["hidden"]]), "views": views, - "queries": self.ds.get_canned_queries(database), + "queries": canned_queries, }, { "show_hidden": request.args.get("_show_hidden"), diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index be838063..5b5756b0 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -24,6 +24,7 @@ def canned_write_client(): "sql": "delete from names where rowid = :rowid", "write": True, "on_success_message": "Name deleted", + "allow": {"id": "root"}, }, "update_name": { "sql": "update names set name = :name where rowid = :rowid", @@ -52,7 +53,11 @@ def test_insert(canned_write_client): def test_custom_success_message(canned_write_client): response = canned_write_client.post( - "/data/delete_name", {"rowid": 1}, allow_redirects=False, csrftoken_from=True + "/data/delete_name", + {"rowid": 1}, + cookies={"ds_actor": canned_write_client.ds.sign({"id": "root"}, "actor")}, + allow_redirects=False, + csrftoken_from=True, ) assert 302 == response.status messages = canned_write_client.ds.unsign( @@ -93,3 +98,27 @@ def test_insert_error(canned_write_client): def test_custom_params(canned_write_client): response = canned_write_client.get("/data/update_name?extra=foo") assert '' in response.text + + +def test_canned_query_permissions_on_database_page(canned_write_client): + # Without auth only shows three queries + query_names = [ + q["name"] for q in canned_write_client.get("/data.json").json["queries"] + ] + assert ["add_name", "add_name_specify_id", "update_name"] == query_names + + # With auth shows four + response = canned_write_client.get( + "/data.json", + cookies={"ds_actor": canned_write_client.ds.sign({"id": "root"}, "actor")}, + ) + assert 200 == response.status + assert [ + {"name": "add_name", "requires_auth": False}, + {"name": "add_name_specify_id", "requires_auth": False}, + {"name": "delete_name", "requires_auth": True}, + {"name": "update_name", "requires_auth": False}, + ] == [ + {"name": q["name"], "requires_auth": q["requires_auth"]} + for q in response.json["queries"] + ] diff --git a/tests/test_utils.py b/tests/test_utils.py index 7c24648a..975ed0fd 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -466,6 +466,9 @@ def test_multi_params(data, should_raise): [ ({"id": "root"}, None, True), ({"id": "root"}, {}, False), + (None, None, True), + (None, {}, False), + (None, {"id": "root"}, False), # Special "*" value for any key: ({"id": "root"}, {"id": "*"}, True), ({}, {"id": "*"}, False), From 070838bfa19b177f59ef3bd8f0139266adecda90 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 12:26:19 -0700 Subject: [PATCH 0052/1871] Better test for Vary header --- tests/fixtures.py | 2 -- tests/test_canned_write.py | 6 ++++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index 4ca7b10f..2268ef4d 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -132,8 +132,6 @@ class TestClient: if csrftoken_from is True: csrftoken_from = path token_response = await self._request(csrftoken_from) - # Check this had a Vary: Cookie header - assert "Cookie" == token_response.headers["vary"] csrftoken = token_response.cookies["ds_csrftoken"] cookies["ds_csrftoken"] = csrftoken post_data["csrftoken"] = csrftoken diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index 5b5756b0..aacc586f 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -100,6 +100,12 @@ def test_custom_params(canned_write_client): assert '' in response.text +def test_vary_header(canned_write_client): + # These forms embed a csrftoken so they should be served with Vary: Cookie + assert "vary" not in canned_write_client.get("/data").headers + assert "Cookie" == canned_write_client.get("/data/update_name").headers["vary"] + + def test_canned_query_permissions_on_database_page(canned_write_client): # Without auth only shows three queries query_names = [ From 966eec7f75d2e1b809b001bb7e82f35d477f77ea Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 12:27:00 -0700 Subject: [PATCH 0053/1871] Check permissions on canned query page, refs #800 --- datasette/views/database.py | 10 +++++++++- tests/test_canned_write.py | 8 ++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index abc7d3bb..4e9a6da7 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -9,7 +9,7 @@ from datasette.utils import ( path_with_added_args, path_with_removed_args, ) -from datasette.utils.asgi import AsgiFileDownload +from datasette.utils.asgi import AsgiFileDownload, Response from datasette.plugins import pm from .base import DatasetteError, DataView @@ -125,6 +125,14 @@ class QueryView(DataView): params.pop("sql") if "_shape" in params: params.pop("_shape") + + # Respect canned query permissions + if canned_query: + if not actor_matches_allow( + request.scope.get("actor", None), metadata.get("allow") + ): + return Response("Permission denied", status=403) + # Extract any :named parameters named_parameters = named_parameters or self.re_named_parameter.findall(sql) named_parameter_values = { diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index aacc586f..73b01e51 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -128,3 +128,11 @@ def test_canned_query_permissions_on_database_page(canned_write_client): {"name": q["name"], "requires_auth": q["requires_auth"]} for q in response.json["queries"] ] + + +def test_canned_query_permissions(canned_write_client): + assert 403 == canned_write_client.get("/data/delete_name").status + assert 200 == canned_write_client.get("/data/update_name").status + cookies = {"ds_actor": canned_write_client.ds.sign({"id": "root"}, "actor")} + assert 200 == canned_write_client.get("/data/delete_name", cookies=cookies).status + assert 200 == canned_write_client.get("/data/update_name", cookies=cookies).status From 3359d54a4eb9c9725c27a85437661b5180c4099a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 12:33:08 -0700 Subject: [PATCH 0054/1871] Use cookies when accessing csrftoken_from --- tests/fixtures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index 2268ef4d..75bd6b94 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -131,7 +131,7 @@ class TestClient: if csrftoken_from is not None: if csrftoken_from is True: csrftoken_from = path - token_response = await self._request(csrftoken_from) + token_response = await self._request(csrftoken_from, cookies=cookies) csrftoken = token_response.cookies["ds_csrftoken"] cookies["ds_csrftoken"] = csrftoken post_data["csrftoken"] = csrftoken From f1daf64e722f9aedc61bea1636a9df715c4c4a8e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 12:46:40 -0700 Subject: [PATCH 0055/1871] Link to canned query permissions documentation --- docs/authentication.rst | 2 +- docs/sql_queries.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 85bbbbbd..8b24a44a 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -53,7 +53,7 @@ The URL on the first line includes a one-use token which can be used to sign in Permissions for canned queries ============================== -Datasette's :ref:`canned_queries` default to allowing any user to execute them. +Datasette's :ref:`canned queries ` default to allowing any user to execute them. You can limit who is allowed to execute a specific query with the ``"allow"`` key in the :ref:`metadata` configuration for that query. diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index aa1edc98..5df8bdb0 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -217,7 +217,7 @@ Writable canned queries Canned queries by default are read-only. You can use the ``"write": true`` key to indicate that a canned query can write to the database. -You may wish to use this feature in conjunction with :ref:`authentication`. +See :ref:`authentication_permissions_canned_queries` for details on how to add permission checks to canned queries, using the ``"allow"`` key. .. code-block:: json From 7dc23cd71aeb5a0e194f25fd1b8e569e3bb2149b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 13:05:09 -0700 Subject: [PATCH 0056/1871] Whitespace --- docs/authentication.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 8b24a44a..730a86c8 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -47,7 +47,6 @@ The URL on the first line includes a one-use token which can be used to sign in "id": "root" } - .. _authentication_permissions_canned_queries: Permissions for canned queries From bd4de0647d660709de122303a1aece3a8ef88394 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 19:09:59 -0700 Subject: [PATCH 0057/1871] Improved permissions documentation --- docs/authentication.rst | 7 +++++++ docs/internals.rst | 2 ++ 2 files changed, 9 insertions(+) diff --git a/docs/authentication.rst b/docs/authentication.rst index 730a86c8..fd70000e 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -47,6 +47,13 @@ The URL on the first line includes a one-use token which can be used to sign in "id": "root" } +.. _authentication_permissions: + +Permissions +=========== + +Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. This method is also used by Datasette core code itself, which allows plugins to help make decisions on which actions are allowed by implementing the :ref:`permission_allowed(...) ` plugin hook. + .. _authentication_permissions_canned_queries: Permissions for canned queries diff --git a/docs/internals.rst b/docs/internals.rst index 4b4adc5e..25b2d875 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -116,6 +116,8 @@ This method lets you read plugin configuration values that were set in ``metadat Renders a `Jinja template `__ using Datasette's preconfigured instance of Jinja and returns the resulting string. The template will have access to Datasette's default template functions and any functions that have been made available by other plugins. +.. _datasette_permission_allowed: + await .permission_allowed(actor, action, resource_type=None, resource_identifier=None, default=False) ----------------------------------------------------------------------------------------------------- From 86dec9e8fffd6c4efec928ae9b5713748dec7e74 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 6 Jun 2020 22:30:36 -0700 Subject: [PATCH 0058/1871] Added permission check to every view, closes #808 --- datasette/app.py | 5 ++ datasette/templates/permissions_debug.html | 2 +- datasette/utils/asgi.py | 4 + datasette/views/base.py | 14 ++++ datasette/views/database.py | 8 ++ datasette/views/index.py | 1 + datasette/views/table.py | 5 ++ docs/authentication.rst | 88 ++++++++++++++++++++++ tests/conftest.py | 38 ++++++++++ tests/fixtures.py | 16 ++++ tests/test_api.py | 2 +- tests/test_auth.py | 1 + tests/test_html.py | 38 ++++++++++ 13 files changed, 220 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 1624f6ea..f433a10a 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -49,6 +49,7 @@ from .utils import ( ) from .utils.asgi import ( AsgiLifespan, + Forbidden, NotFound, Request, Response, @@ -1003,6 +1004,10 @@ class DatasetteRouter(AsgiRouter): status = 404 info = {} message = exception.args[0] + elif isinstance(exception, Forbidden): + status = 403 + info = {} + message = exception.args[0] elif isinstance(exception, DatasetteError): status = exception.status info = exception.error_dict diff --git a/datasette/templates/permissions_debug.html b/datasette/templates/permissions_debug.html index fb098c5c..dda57dfa 100644 --- a/datasette/templates/permissions_debug.html +++ b/datasette/templates/permissions_debug.html @@ -47,7 +47,7 @@

Actor: {{ check.actor|tojson }}

{% if check.resource_type %} -

Resource: {{ check.resource_type }}: {{ check.resource_identifier }}

+

Resource: {{ check.resource_type }} = {{ check.resource_identifier }}

{% endif %}
{% endfor %} diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index ba131dc8..fa78c8df 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -13,6 +13,10 @@ class NotFound(Exception): pass +class Forbidden(Exception): + pass + + class Request: def __init__(self, scope, receive): self.scope = scope diff --git a/datasette/views/base.py b/datasette/views/base.py index 315c96fe..9c2cbbcc 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -29,6 +29,7 @@ from datasette.utils.asgi import ( AsgiWriter, AsgiRouter, AsgiView, + Forbidden, NotFound, Response, ) @@ -63,6 +64,19 @@ class BaseView(AsgiView): response.body = b"" return response + async def check_permission( + self, request, action, resource_type=None, resource_identifier=None + ): + ok = await self.ds.permission_allowed( + request.scope.get("actor"), + action, + resource_type=resource_type, + resource_identifier=resource_identifier, + default=True, + ) + if not ok: + raise Forbidden(action) + def database_url(self, database): db = self.ds.databases[database] base_url = self.ds.config("base_url") diff --git a/datasette/views/database.py b/datasette/views/database.py index 4e9a6da7..eb7c29ca 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -19,6 +19,7 @@ class DatabaseView(DataView): name = "database" async def data(self, request, database, hash, default_labels=False, _size=None): + await self.check_permission(request, "view-database", "database", database) metadata = (self.ds.metadata("databases") or {}).get(database, {}) self.ds.update_with_inherited_metadata(metadata) @@ -89,6 +90,9 @@ class DatabaseDownload(DataView): name = "database_download" async def view_get(self, request, database, hash, correct_hash_present, **kwargs): + await self.check_permission( + request, "view-database-download", "database", database + ) if database not in self.ds.databases: raise DatasetteError("Invalid database", status=404) db = self.ds.databases[database] @@ -128,6 +132,10 @@ class QueryView(DataView): # Respect canned query permissions if canned_query: + await self.check_permission( + request, "view-query", "query", (database, canned_query) + ) + # TODO: fix this to use that permission check if not actor_matches_allow( request.scope.get("actor", None), metadata.get("allow") ): diff --git a/datasette/views/index.py b/datasette/views/index.py index fe88a38c..40c41002 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -22,6 +22,7 @@ class IndexView(BaseView): self.ds = datasette async def get(self, request, as_format): + await self.check_permission(request, "view-index") databases = [] for name, db in self.ds.databases.items(): table_names = await db.table_names() diff --git a/datasette/views/table.py b/datasette/views/table.py index ec1b6c7c..32c7f839 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -267,6 +267,8 @@ class TableView(RowTableShared): if not is_view and not table_exists: raise NotFound("Table not found: {}".format(table)) + await self.check_permission(request, "view-table", "table", (database, table)) + pks = await db.primary_keys(table) table_columns = await db.table_columns(table) @@ -844,6 +846,9 @@ class RowView(RowTableShared): async def data(self, request, database, hash, table, pk_path, default_labels=False): pk_values = urlsafe_components(pk_path) + await self.check_permission( + request, "view-row", "row", tuple([database, table] + list(pk_values)) + ) db = self.ds.databases[database] pks = await db.primary_keys(table) use_rowid = not pks diff --git a/docs/authentication.rst b/docs/authentication.rst index fd70000e..b0473ee8 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -150,3 +150,91 @@ The debug tool at ``/-/permissions`` is only available to the :ref:`authenticate It shows the thirty most recent permission checks that have been carried out by the Datasette instance. This is designed to help administrators and plugin authors understand exactly how permission checks are being carried out, in order to effectively configure Datasette's permission system. + + +.. _permissions: + +Permissions +=========== + +This section lists all of the permission checks that are carried out by Datasette core, along with their ``resource_type`` and ``resource_identifier`` if those are passed. + +.. _permissions_view_index: + +view-index +---------- + +Actor is allowed to view the index page, e.g. https://latest.datasette.io/ + + +.. _permissions_view_database: + +view-database +------------- + +Actor is allowed to view a database page, e.g. https://latest.datasette.io/fixtures + +``resource_type`` - string + "database" + +``resource_identifier`` - string + The name of the database + +.. _permissions_view_database_download: + +view-database-download +----------------------- + +Actor is allowed to download a database, e.g. https://latest.datasette.io/fixtures.db + +``resource_type`` - string + "database" + +``resource_identifier`` - string + The name of the database + +.. _permissions_view_table: + +view-table +---------- + +Actor is allowed to view a table (or view) page, e.g. https://latest.datasette.io/fixtures/complex_foreign_keys + +``resource_type`` - string + "table" - even if this is actually a SQL view + +``resource_identifier`` - tuple: (string, string) + The name of the database, then the name of the table + +.. _permissions_view_row: + +view-row +-------- + +Actor is allowed to view a row page, e.g. https://latest.datasette.io/fixtures/compound_primary_key/a,b + +``resource_type`` - string + "row" + +``resource_identifier`` - tuple: (string, string, strings...) + The name of the database, then the name of the table, then the primary key of the row. The primary key may be a single value or multiple values, so the ``resource_identifier`` tuple may be three or more items long. + +.. _permissions_view_query: + +view-query +---------- + +Actor is allowed to view a :ref:`canned query ` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size + +``resource_type`` - string + "query" + +``resource_identifier`` - string + The name of the canned query + +.. _permissions_permissions_debug: + +permissions-debug +----------------- + +Actor is allowed to view the ``/-/permissions`` debug page. diff --git a/tests/conftest.py b/tests/conftest.py index a19ad18d..1921ae3a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,15 @@ import os +import pathlib import pytest +import re + +UNDOCUMENTED_PERMISSIONS = { + "this_is_allowed", + "this_is_denied", + "this_is_allowed_async", + "this_is_denied_async", + "no_match", +} def pytest_configure(config): @@ -39,3 +49,31 @@ def restore_working_directory(tmpdir, request): os.chdir(previous_cwd) request.addfinalizer(return_to_previous) + + +@pytest.fixture(scope="session", autouse=True) +def check_permission_actions_are_documented(): + from datasette.plugins import pm + + content = ( + (pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst") + .open() + .read() + ) + permissions_re = re.compile(r"\.\. _permissions_([^\s:]+):") + documented_permission_actions = set(permissions_re.findall(content)).union( + UNDOCUMENTED_PERMISSIONS + ) + + def before(hook_name, hook_impls, kwargs): + if hook_name == "permission_allowed": + action = kwargs.get("action").replace("-", "_") + assert ( + action in documented_permission_actions + ), "Undocumented permission action: {}, resource_type: {}, resource_identifier: {}".format( + action, kwargs["resource_type"], kwargs["resource_identifier"] + ) + + pm.add_hookcall_monitoring( + before=before, after=lambda outcome, hook_name, hook_impls, kwargs: None + ) diff --git a/tests/fixtures.py b/tests/fixtures.py index 75bd6b94..d175dfd5 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -840,3 +840,19 @@ if __name__ == "__main__": sys.argv[0] ) ) + + +def assert_permission_checked( + datasette, action, resource_type=None, resource_identifier=None +): + assert [ + pc + for pc in datasette._permission_checks + if pc["action"] == action + and pc["resource_type"] == resource_type + and pc["resource_identifier"] == resource_identifier + ], """Missing expected permission check: action={}, resource_type={}, resource_identifier={} + Permission checks seen: {} + """.format( + action, resource_type, resource_identifier, datasette._permission_checks + ) diff --git a/tests/test_api.py b/tests/test_api.py index b35c0a2d..555e394a 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1721,7 +1721,7 @@ def test_trace(app_client): assert isinstance(trace["traceback"], list) assert isinstance(trace["database"], str) assert isinstance(trace["sql"], str) - assert isinstance(trace["params"], (list, dict)) + assert isinstance(trace["params"], (list, dict, None.__class__)) @pytest.mark.parametrize( diff --git a/tests/test_auth.py b/tests/test_auth.py index ac8d7abe..40dc2587 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -23,6 +23,7 @@ def test_actor_cookie(app_client): def test_permissions_debug(app_client): + app_client.ds._permission_checks.clear() assert 403 == app_client.get("/-/permissions").status # With the cookie it should work cookie = app_client.ds.sign({"id": "root"}, "actor") diff --git a/tests/test_html.py b/tests/test_html.py index 2d2a141a..3569b92c 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -4,6 +4,7 @@ from .fixtures import ( # noqa app_client_shorter_time_limit, app_client_two_attached_databases, app_client_with_hash, + assert_permission_checked, make_app_client, METADATA, ) @@ -17,6 +18,7 @@ import urllib.parse def test_homepage(app_client_two_attached_databases): response = app_client_two_attached_databases.get("/") + assert_permission_checked(app_client_two_attached_databases.ds, "view-index") assert response.status == 200 assert "text/html; charset=utf-8" == response.headers["content-type"] soup = Soup(response.body, "html.parser") @@ -75,6 +77,12 @@ def test_static_mounts(): def test_memory_database_page(): for client in make_app_client(memory=True): response = client.get("/:memory:") + assert_permission_checked( + client.ds, + "view-database", + resource_type="database", + resource_identifier=":memory:", + ) assert response.status == 200 @@ -87,6 +95,12 @@ def test_database_page_redirects_with_url_hash(app_client_with_hash): def test_database_page(app_client): response = app_client.get("/fixtures") + assert_permission_checked( + app_client.ds, + "view-database", + resource_type="database", + resource_identifier="fixtures", + ) soup = Soup(response.body, "html.parser") queries_ul = soup.find("h2", text="Queries").find_next_sibling("ul") assert queries_ul is not None @@ -197,6 +211,12 @@ def test_row_page_does_not_truncate(): for client in make_app_client(config={"truncate_cells_html": 5}): response = client.get("/fixtures/facetable/1") assert response.status == 200 + assert_permission_checked( + client.ds, + "view-row", + resource_type="row", + resource_identifier=("fixtures", "facetable", "1"), + ) table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] assert ["Mission"] == [ @@ -506,6 +526,12 @@ def test_templates_considered(app_client, path, expected_considered): def test_table_html_simple_primary_key(app_client): response = app_client.get("/fixtures/simple_primary_key?_size=3") + assert_permission_checked( + app_client.ds, + "view-table", + resource_type="table", + resource_identifier=("fixtures", "simple_primary_key"), + ) assert response.status == 200 table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] @@ -896,6 +922,12 @@ def test_database_download_allowed_for_immutable(): assert len(soup.findAll("a", {"href": re.compile(r"\.db$")})) # Check we can actually download it assert 200 == client.get("/fixtures.db").status + assert_permission_checked( + client.ds, + "view-database-download", + resource_type="database", + resource_identifier="fixtures", + ) def test_database_download_disallowed_for_mutable(app_client): @@ -991,6 +1023,12 @@ def test_404_content_type(app_client): def test_canned_query_with_custom_metadata(app_client): response = app_client.get("/fixtures/neighborhood_search?text=town") + assert_permission_checked( + app_client.ds, + "view-query", + resource_type="query", + resource_identifier=("fixtures", "neighborhood_search"), + ) assert response.status == 200 soup = Soup(response.body, "html.parser") assert "Search neighborhoods" == soup.find("h1").text From 4340845754e90fe778a7da8668b4fd9bf6ccc2c6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 7 Jun 2020 13:03:08 -0700 Subject: [PATCH 0059/1871] Nested permission checks for all views, refs #811 --- datasette/views/database.py | 10 +++++- datasette/views/index.py | 2 +- datasette/views/table.py | 5 +++ docs/authentication.rst | 21 ++++++++--- tests/fixtures.py | 36 +++++++++++-------- tests/test_html.py | 71 ++++++++++++++++++++++--------------- 6 files changed, 97 insertions(+), 48 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index eb7c29ca..4eae9e33 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -19,6 +19,7 @@ class DatabaseView(DataView): name = "database" async def data(self, request, database, hash, default_labels=False, _size=None): + await self.check_permission(request, "view-instance") await self.check_permission(request, "view-database", "database", database) metadata = (self.ds.metadata("databases") or {}).get(database, {}) self.ds.update_with_inherited_metadata(metadata) @@ -90,6 +91,8 @@ class DatabaseDownload(DataView): name = "database_download" async def view_get(self, request, database, hash, correct_hash_present, **kwargs): + await self.check_permission(request, "view-instance") + await self.check_permission(request, "view-database", "database", database) await self.check_permission( request, "view-database-download", "database", database ) @@ -132,6 +135,8 @@ class QueryView(DataView): # Respect canned query permissions if canned_query: + await self.check_permission(request, "view-instance") + await self.check_permission(request, "view-database", "database", database) await self.check_permission( request, "view-query", "query", (database, canned_query) ) @@ -140,7 +145,10 @@ class QueryView(DataView): request.scope.get("actor", None), metadata.get("allow") ): return Response("Permission denied", status=403) - + else: + await self.check_permission(request, "view-instance") + await self.check_permission(request, "view-database", "database", database) + await self.check_permission(request, "execute-query", "database", database) # Extract any :named parameters named_parameters = named_parameters or self.re_named_parameter.findall(sql) named_parameter_values = { diff --git a/datasette/views/index.py b/datasette/views/index.py index 40c41002..5f903474 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -22,7 +22,7 @@ class IndexView(BaseView): self.ds = datasette async def get(self, request, as_format): - await self.check_permission(request, "view-index") + await self.check_permission(request, "view-instance") databases = [] for name, db in self.ds.databases.items(): table_names = await db.table_names() diff --git a/datasette/views/table.py b/datasette/views/table.py index 32c7f839..10d6725a 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -267,6 +267,8 @@ class TableView(RowTableShared): if not is_view and not table_exists: raise NotFound("Table not found: {}".format(table)) + await self.check_permission(request, "view-instance") + await self.check_permission(request, "view-database", "database", database) await self.check_permission(request, "view-table", "table", (database, table)) pks = await db.primary_keys(table) @@ -846,6 +848,9 @@ class RowView(RowTableShared): async def data(self, request, database, hash, table, pk_path, default_labels=False): pk_values = urlsafe_components(pk_path) + await self.check_permission(request, "view-instance") + await self.check_permission(request, "view-database", "database", database) + await self.check_permission(request, "view-table", "table", (database, table)) await self.check_permission( request, "view-row", "row", tuple([database, table] + list(pk_values)) ) diff --git a/docs/authentication.rst b/docs/authentication.rst index b0473ee8..7fa96b35 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -159,12 +159,12 @@ Permissions This section lists all of the permission checks that are carried out by Datasette core, along with their ``resource_type`` and ``resource_identifier`` if those are passed. -.. _permissions_view_index: +.. _permissions_view_instance: -view-index ----------- +view-instance +------------- -Actor is allowed to view the index page, e.g. https://latest.datasette.io/ +Top level permission - Actor is allowed to view any pages within this instance, starting at https://latest.datasette.io/ .. _permissions_view_database: @@ -232,6 +232,19 @@ Actor is allowed to view a :ref:`canned query ` page, e.g. https ``resource_identifier`` - string The name of the canned query +.. _permissions_execute_query: + +execute-query +------------- + +Actor is allowed to run arbitrary SQL queries against a specific database, e.g. https://latest.datasette.io/fixtures?sql=select+100 + +``resource_type`` - string + "database" + +``resource_identifier`` - string + The name of the database + .. _permissions_permissions_debug: permissions-debug diff --git a/tests/fixtures.py b/tests/fixtures.py index d175dfd5..f767dc84 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -842,17 +842,25 @@ if __name__ == "__main__": ) -def assert_permission_checked( - datasette, action, resource_type=None, resource_identifier=None -): - assert [ - pc - for pc in datasette._permission_checks - if pc["action"] == action - and pc["resource_type"] == resource_type - and pc["resource_identifier"] == resource_identifier - ], """Missing expected permission check: action={}, resource_type={}, resource_identifier={} - Permission checks seen: {} - """.format( - action, resource_type, resource_identifier, datasette._permission_checks - ) +def assert_permissions_checked(datasette, actions): + # actions is a list of "action" or (action, resource_type, resource_identifier) tuples + for action in actions: + if isinstance(action, str): + resource_type = None + resource_identifier = None + else: + action, resource_type, resource_identifier = action + assert [ + pc + for pc in datasette._permission_checks + if pc["action"] == action + and pc["resource_type"] == resource_type + and pc["resource_identifier"] == resource_identifier + ], """Missing expected permission check: action={}, resource_type={}, resource_identifier={} + Permission checks seen: {} + """.format( + action, + resource_type, + resource_identifier, + json.dumps(list(datasette._permission_checks), indent=4), + ) diff --git a/tests/test_html.py b/tests/test_html.py index 3569b92c..b41c1943 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -4,7 +4,7 @@ from .fixtures import ( # noqa app_client_shorter_time_limit, app_client_two_attached_databases, app_client_with_hash, - assert_permission_checked, + assert_permissions_checked, make_app_client, METADATA, ) @@ -18,7 +18,7 @@ import urllib.parse def test_homepage(app_client_two_attached_databases): response = app_client_two_attached_databases.get("/") - assert_permission_checked(app_client_two_attached_databases.ds, "view-index") + assert_permissions_checked(app_client_two_attached_databases.ds, ["view-instance"]) assert response.status == 200 assert "text/html; charset=utf-8" == response.headers["content-type"] soup = Soup(response.body, "html.parser") @@ -77,11 +77,8 @@ def test_static_mounts(): def test_memory_database_page(): for client in make_app_client(memory=True): response = client.get("/:memory:") - assert_permission_checked( - client.ds, - "view-database", - resource_type="database", - resource_identifier=":memory:", + assert_permissions_checked( + client.ds, ["view-instance", ("view-database", "database", ":memory:")] ) assert response.status == 200 @@ -95,11 +92,8 @@ def test_database_page_redirects_with_url_hash(app_client_with_hash): def test_database_page(app_client): response = app_client.get("/fixtures") - assert_permission_checked( - app_client.ds, - "view-database", - resource_type="database", - resource_identifier="fixtures", + assert_permissions_checked( + app_client.ds, ["view-instance", ("view-database", "database", "fixtures")] ) soup = Soup(response.body, "html.parser") queries_ul = soup.find("h2", text="Queries").find_next_sibling("ul") @@ -211,11 +205,13 @@ def test_row_page_does_not_truncate(): for client in make_app_client(config={"truncate_cells_html": 5}): response = client.get("/fixtures/facetable/1") assert response.status == 200 - assert_permission_checked( + assert_permissions_checked( client.ds, - "view-row", - resource_type="row", - resource_identifier=("fixtures", "facetable", "1"), + [ + "view-instance", + ("view-table", "table", ("fixtures", "facetable")), + ("view-row", "row", ("fixtures", "facetable", "1")), + ], ) table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] @@ -526,11 +522,13 @@ def test_templates_considered(app_client, path, expected_considered): def test_table_html_simple_primary_key(app_client): response = app_client.get("/fixtures/simple_primary_key?_size=3") - assert_permission_checked( + assert_permissions_checked( app_client.ds, - "view-table", - resource_type="table", - resource_identifier=("fixtures", "simple_primary_key"), + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("view-table", "table", ("fixtures", "simple_primary_key")), + ], ) assert response.status == 200 table = Soup(response.body, "html.parser").find("table") @@ -887,6 +885,19 @@ def test_database_metadata(app_client): assert_footer_links(soup) +def test_database_query_permission_checks(app_client): + response = app_client.get("/fixtures?sql=select+1") + assert response.status == 200 + assert_permissions_checked( + app_client.ds, + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("execute-query", "database", "fixtures"), + ], + ) + + def test_database_metadata_with_custom_sql(app_client): response = app_client.get("/fixtures?sql=select+*+from+simple_primary_key") assert response.status == 200 @@ -922,11 +933,13 @@ def test_database_download_allowed_for_immutable(): assert len(soup.findAll("a", {"href": re.compile(r"\.db$")})) # Check we can actually download it assert 200 == client.get("/fixtures.db").status - assert_permission_checked( + assert_permissions_checked( client.ds, - "view-database-download", - resource_type="database", - resource_identifier="fixtures", + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("view-database-download", "database", "fixtures"), + ], ) @@ -1023,11 +1036,13 @@ def test_404_content_type(app_client): def test_canned_query_with_custom_metadata(app_client): response = app_client.get("/fixtures/neighborhood_search?text=town") - assert_permission_checked( + assert_permissions_checked( app_client.ds, - "view-query", - resource_type="query", - resource_identifier=("fixtures", "neighborhood_search"), + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("view-query", "query", ("fixtures", "neighborhood_search")), + ], ) assert response.status == 200 soup = Soup(response.body, "html.parser") From a1e801453aaeb540d2aea8cccb90b425af737c44 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 7 Jun 2020 13:20:59 -0700 Subject: [PATCH 0060/1871] Renamed execute-query permission to execute-sql, refs #811 --- datasette/views/database.py | 13 +++---------- docs/authentication.rst | 4 ++-- tests/test_html.py | 2 +- 3 files changed, 6 insertions(+), 13 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index 4eae9e33..961ab61e 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -134,21 +134,14 @@ class QueryView(DataView): params.pop("_shape") # Respect canned query permissions + await self.check_permission(request, "view-instance") + await self.check_permission(request, "view-database", "database", database) if canned_query: - await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", "database", database) await self.check_permission( request, "view-query", "query", (database, canned_query) ) - # TODO: fix this to use that permission check - if not actor_matches_allow( - request.scope.get("actor", None), metadata.get("allow") - ): - return Response("Permission denied", status=403) else: - await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", "database", database) - await self.check_permission(request, "execute-query", "database", database) + await self.check_permission(request, "execute-sql", "database", database) # Extract any :named parameters named_parameters = named_parameters or self.re_named_parameter.findall(sql) named_parameter_values = { diff --git a/docs/authentication.rst b/docs/authentication.rst index 7fa96b35..ee8e7125 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -234,8 +234,8 @@ Actor is allowed to view a :ref:`canned query ` page, e.g. https .. _permissions_execute_query: -execute-query -------------- +execute-sql +----------- Actor is allowed to run arbitrary SQL queries against a specific database, e.g. https://latest.datasette.io/fixtures?sql=select+100 diff --git a/tests/test_html.py b/tests/test_html.py index b41c1943..ac7432d7 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -893,7 +893,7 @@ def test_database_query_permission_checks(app_client): [ "view-instance", ("view-database", "database", "fixtures"), - ("execute-query", "database", "fixtures"), + ("execute-sql", "database", "fixtures"), ], ) From 5ed2853cf3432a0f5a3511df8d2ffe9c6c79a584 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 7 Jun 2020 14:01:22 -0700 Subject: [PATCH 0061/1871] Fix permissions documenation test --- docs/authentication.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index ee8e7125..1bf2a1a5 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -232,7 +232,7 @@ Actor is allowed to view a :ref:`canned query ` page, e.g. https ``resource_identifier`` - string The name of the canned query -.. _permissions_execute_query: +.. _permissions_execute_sql: execute-sql ----------- From abc733912447f284b38ddc389d18ba0a8cef8bcf Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 7 Jun 2020 14:14:10 -0700 Subject: [PATCH 0062/1871] Nicer pattern for make_app_client() in tests, closes #395 --- tests/fixtures.py | 44 +++++++++++++++++++++++++------------- tests/test_api.py | 10 ++++----- tests/test_canned_write.py | 4 ++-- tests/test_cli.py | 2 +- tests/test_custom_pages.py | 2 +- tests/test_html.py | 28 ++++++++++++------------ tests/test_plugins.py | 8 +++---- 7 files changed, 56 insertions(+), 42 deletions(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index f767dc84..2ac73fb1 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -2,6 +2,7 @@ from datasette.app import Datasette from datasette.utils import sqlite3, MultiParams from asgiref.testing import ApplicationCommunicator from asgiref.sync import async_to_sync +import contextlib from http.cookies import SimpleCookie import itertools import json @@ -220,6 +221,7 @@ class TestClient: return response +@contextlib.contextmanager def make_app_client( sql_time_limit_ms=None, max_returned_rows=None, @@ -281,7 +283,8 @@ def make_app_client( @pytest.fixture(scope="session") def app_client(): - yield from make_app_client() + with make_app_client() as client: + yield client @pytest.fixture(scope="session") @@ -294,64 +297,75 @@ def app_client_no_files(): @pytest.fixture(scope="session") def app_client_two_attached_databases(): - yield from make_app_client( + with make_app_client( extra_databases={"extra database.db": EXTRA_DATABASE_SQL} - ) + ) as client: + yield client @pytest.fixture(scope="session") def app_client_conflicting_database_names(): - yield from make_app_client( + with make_app_client( extra_databases={"foo.db": EXTRA_DATABASE_SQL, "foo-bar.db": EXTRA_DATABASE_SQL} - ) + ) as client: + yield client @pytest.fixture(scope="session") def app_client_two_attached_databases_one_immutable(): - yield from make_app_client( + with make_app_client( is_immutable=True, extra_databases={"extra database.db": EXTRA_DATABASE_SQL} - ) + ) as client: + yield client @pytest.fixture(scope="session") def app_client_with_hash(): - yield from make_app_client(config={"hash_urls": True}, is_immutable=True) + with make_app_client(config={"hash_urls": True}, is_immutable=True) as client: + yield client @pytest.fixture(scope="session") def app_client_shorter_time_limit(): - yield from make_app_client(20) + with make_app_client(20) as client: + yield client @pytest.fixture(scope="session") def app_client_returned_rows_matches_page_size(): - yield from make_app_client(max_returned_rows=50) + with make_app_client(max_returned_rows=50) as client: + yield client @pytest.fixture(scope="session") def app_client_larger_cache_size(): - yield from make_app_client(config={"cache_size_kb": 2500}) + with make_app_client(config={"cache_size_kb": 2500}) as client: + yield client @pytest.fixture(scope="session") def app_client_csv_max_mb_one(): - yield from make_app_client(config={"max_csv_mb": 1}) + with make_app_client(config={"max_csv_mb": 1}) as client: + yield client @pytest.fixture(scope="session") def app_client_with_dot(): - yield from make_app_client(filename="fixtures.dot.db") + with make_app_client(filename="fixtures.dot.db") as client: + yield client @pytest.fixture(scope="session") def app_client_with_cors(): - yield from make_app_client(cors=True) + with make_app_client(cors=True) as client: + yield client @pytest.fixture(scope="session") def app_client_immutable_and_inspect_file(): inspect_data = {"fixtures": {"tables": {"sortable": {"count": 100}}}} - yield from make_app_client(is_immutable=True, inspect_data=inspect_data) + with make_app_client(is_immutable=True, inspect_data=inspect_data) as client: + yield client def generate_compound_rows(num): diff --git a/tests/test_api.py b/tests/test_api.py index 555e394a..22378946 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -605,7 +605,7 @@ def test_invalid_custom_sql(app_client): def test_allow_sql_off(): - for client in make_app_client(config={"allow_sql": False}): + with make_app_client(config={"allow_sql": False}) as client: response = client.get("/fixtures.json?sql=select+sleep(0.01)") assert 400 == response.status assert "sql= is not allowed" == response.json["error"] @@ -1107,7 +1107,7 @@ def test_table_filter_extra_where_invalid(app_client): def test_table_filter_extra_where_disabled_if_no_sql_allowed(): - for client in make_app_client(config={"allow_sql": False}): + with make_app_client(config={"allow_sql": False}) as client: response = client.get("/fixtures/facetable.json?_where=neighborhood='Dogpatch'") assert 400 == response.status assert "_where= is not allowed" == response.json["error"] @@ -1528,14 +1528,14 @@ def test_suggested_facets(app_client): def test_allow_facet_off(): - for client in make_app_client(config={"allow_facet": False}): + with make_app_client(config={"allow_facet": False}) as client: assert 400 == client.get("/fixtures/facetable.json?_facet=planet_int").status # Should not suggest any facets either: assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] def test_suggest_facets_off(): - for client in make_app_client(config={"suggest_facets": False}): + with make_app_client(config={"suggest_facets": False}) as client: # Now suggested_facets should be [] assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] @@ -1667,7 +1667,7 @@ def test_config_cache_size(app_client_larger_cache_size): def test_config_force_https_urls(): - for client in make_app_client(config={"force_https_urls": True}): + with make_app_client(config={"force_https_urls": True}) as client: response = client.get("/fixtures/facetable.json?_size=3&_facet=state") assert response.json["next_url"].startswith("https://") assert response.json["facet_results"]["state"]["results"][0][ diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index 73b01e51..c217be8f 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -4,7 +4,7 @@ from .fixtures import make_app_client @pytest.fixture def canned_write_client(): - for client in make_app_client( + with make_app_client( extra_databases={"data.db": "create table names (name text)"}, metadata={ "databases": { @@ -35,7 +35,7 @@ def canned_write_client(): } } }, - ): + ) as client: yield client diff --git a/tests/test_cli.py b/tests/test_cli.py index 2616f1d1..6939fe57 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -41,7 +41,7 @@ def test_inspect_cli_writes_to_file(app_client): def test_serve_with_inspect_file_prepopulates_table_counts_cache(): inspect_data = {"fixtures": {"tables": {"hithere": {"count": 44}}}} - for client in make_app_client(inspect_data=inspect_data, is_immutable=True): + with make_app_client(inspect_data=inspect_data, is_immutable=True) as client: assert inspect_data == client.ds.inspect_data db = client.ds.databases["fixtures"] assert {"hithere": 44} == db.cached_table_counts diff --git a/tests/test_custom_pages.py b/tests/test_custom_pages.py index c69facb5..4e4b2a67 100644 --- a/tests/test_custom_pages.py +++ b/tests/test_custom_pages.py @@ -27,7 +27,7 @@ def custom_pages_client(tmp_path_factory): nested_dir = pages_dir / "nested" nested_dir.mkdir() (nested_dir / "nest.html").write_text("Nest!", "utf-8") - for client in make_app_client(template_dir=str(template_dir)): + with make_app_client(template_dir=str(template_dir)) as client: yield client diff --git a/tests/test_html.py b/tests/test_html.py index ac7432d7..4e913bcf 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -63,9 +63,9 @@ def test_static(app_client): def test_static_mounts(): - for client in make_app_client( + with make_app_client( static_mounts=[("custom-static", str(pathlib.Path(__file__).parent))] - ): + ) as client: response = client.get("/custom-static/test_html.py") assert response.status == 200 response = client.get("/custom-static/not_exists.py") @@ -75,7 +75,7 @@ def test_static_mounts(): def test_memory_database_page(): - for client in make_app_client(memory=True): + with make_app_client(memory=True) as client: response = client.get("/:memory:") assert_permissions_checked( client.ds, ["view-instance", ("view-database", "database", ":memory:")] @@ -177,7 +177,7 @@ def test_definition_sql(path, expected_definition_sql, app_client): def test_table_cell_truncation(): - for client in make_app_client(config={"truncate_cells_html": 5}): + with make_app_client(config={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") @@ -202,7 +202,7 @@ def test_table_cell_truncation(): def test_row_page_does_not_truncate(): - for client in make_app_client(config={"truncate_cells_html": 5}): + with make_app_client(config={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable/1") assert response.status == 200 assert_permissions_checked( @@ -925,7 +925,7 @@ def test_table_metadata(app_client): def test_database_download_allowed_for_immutable(): - for client in make_app_client(is_immutable=True): + with make_app_client(is_immutable=True) as client: assert not client.ds.databases["fixtures"].is_mutable # Regular page should have a download link response = client.get("/fixtures") @@ -951,7 +951,7 @@ def test_database_download_disallowed_for_mutable(app_client): def test_database_download_disallowed_for_memory(): - for client in make_app_client(memory=True): + with make_app_client(memory=True) as client: # Memory page should NOT have a download link response = client.get("/:memory:") soup = Soup(response.body, "html.parser") @@ -960,7 +960,7 @@ def test_database_download_disallowed_for_memory(): def test_allow_download_off(): - for client in make_app_client(is_immutable=True, config={"allow_download": False}): + with make_app_client(is_immutable=True, config={"allow_download": False}) as client: response = client.get("/fixtures") soup = Soup(response.body, "html.parser") assert not len(soup.findAll("a", {"href": re.compile(r"\.db$")})) @@ -978,7 +978,7 @@ def test_allow_sql_on(app_client): def test_allow_sql_off(): - for client in make_app_client(config={"allow_sql": False}): + with make_app_client(config={"allow_sql": False}) as client: response = client.get("/fixtures") soup = Soup(response.body, "html.parser") assert not len(soup.findAll("textarea", {"name": "sql"})) @@ -1170,9 +1170,9 @@ def test_metadata_json_html(app_client): def test_custom_table_include(): - for client in make_app_client( + with make_app_client( template_dir=str(pathlib.Path(__file__).parent / "test_templates") - ): + ) as client: response = client.get("/fixtures/complex_foreign_keys") assert response.status == 200 assert ( @@ -1197,7 +1197,7 @@ def test_zero_results(app_client, path): def test_config_template_debug_on(): - for client in make_app_client(config={"template_debug": True}): + with make_app_client(config={"template_debug": True}) as client: response = client.get("/fixtures/facetable?_context=1") assert response.status == 200 assert response.text.startswith("
{")
@@ -1211,7 +1211,7 @@ def test_config_template_debug_off(app_client):
 
 def test_debug_context_includes_extra_template_vars():
     # https://github.com/simonw/datasette/issues/693
-    for client in make_app_client(config={"template_debug": True}):
+    with make_app_client(config={"template_debug": True}) as client:
         response = client.get("/fixtures/facetable?_context=1")
         # scope_path is added by PLUGIN1
         assert "scope_path" in response.text
@@ -1292,7 +1292,7 @@ def test_metadata_sort_desc(app_client):
     ],
 )
 def test_base_url_config(base_url, path):
-    for client in make_app_client(config={"base_url": base_url}):
+    with make_app_client(config={"base_url": base_url}) as client:
         response = client.get(base_url + path.lstrip("/"))
         soup = Soup(response.body, "html.parser")
         for el in soup.findAll(["a", "link", "script"]):
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index f69e7fa7..c782b87b 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -229,9 +229,9 @@ def test_plugins_asgi_wrapper(app_client):
 
 
 def test_plugins_extra_template_vars(restore_working_directory):
-    for client in make_app_client(
+    with make_app_client(
         template_dir=str(pathlib.Path(__file__).parent / "test_templates")
-    ):
+    ) as client:
         response = client.get("/-/metadata")
         assert response.status == 200
         extra_template_vars = json.loads(
@@ -254,9 +254,9 @@ def test_plugins_extra_template_vars(restore_working_directory):
 
 
 def test_plugins_async_template_function(restore_working_directory):
-    for client in make_app_client(
+    with make_app_client(
         template_dir=str(pathlib.Path(__file__).parent / "test_templates")
-    ):
+    ) as client:
         response = client.get("/-/metadata")
         assert response.status == 200
         extra_from_awaitable_function = (

From ece0ba6f4bc152af6f605fc5f536ffa46af95274 Mon Sep 17 00:00:00 2001
From: Simon Willison 
Date: Sun, 7 Jun 2020 14:23:16 -0700
Subject: [PATCH 0063/1871] Test + default impl for view-query permission, refs
 #811

---
 datasette/default_permissions.py | 21 ++++++++++++++++++---
 tests/test_permissions.py        | 22 ++++++++++++++++++++++
 2 files changed, 40 insertions(+), 3 deletions(-)
 create mode 100644 tests/test_permissions.py

diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py
index 0b0d17f9..40ae54ab 100644
--- a/datasette/default_permissions.py
+++ b/datasette/default_permissions.py
@@ -1,7 +1,22 @@
 from datasette import hookimpl
+from datasette.utils import actor_matches_allow
 
 
 @hookimpl
-def permission_allowed(actor, action, resource_type, resource_identifier):
-    if actor and actor.get("id") == "root" and action == "permissions-debug":
-        return True
+def permission_allowed(datasette, actor, action, resource_type, resource_identifier):
+    if action == "permissions-debug":
+        if actor and actor.get("id") == "root":
+            return True
+    elif action == "view-query":
+        # Check if this query has a "allow" block in metadata
+        assert resource_type == "query"
+        database, query_name = resource_identifier
+        queries_metadata = datasette.metadata("queries", database=database)
+        assert query_name in queries_metadata
+        if isinstance(queries_metadata[query_name], str):
+            return True
+        allow = queries_metadata[query_name].get("allow")
+        print("checking allow - actor = {}, allow = {}".format(actor, allow))
+        if allow is None:
+            return True
+        return actor_matches_allow(actor, allow)
diff --git a/tests/test_permissions.py b/tests/test_permissions.py
new file mode 100644
index 00000000..c90fdf7a
--- /dev/null
+++ b/tests/test_permissions.py
@@ -0,0 +1,22 @@
+from .fixtures import make_app_client
+import pytest
+
+
+@pytest.mark.parametrize(
+    "allow,expected_anon,expected_auth",
+    [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),],
+)
+def test_execute_sql(allow, expected_anon, expected_auth):
+    with make_app_client(
+        metadata={
+            "databases": {
+                "fixtures": {"queries": {"q": {"sql": "select 1 + 1", "allow": allow}}}
+            }
+        }
+    ) as client:
+        anon_response = client.get("/fixtures/q")
+        assert expected_anon == anon_response.status
+        auth_response = client.get(
+            "/fixtures/q", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}
+        )
+        assert expected_auth == auth_response.status

From 8571ce388a23dd98adbdc1b7eff6c6eef5a9d1af Mon Sep 17 00:00:00 2001
From: Simon Willison 
Date: Sun, 7 Jun 2020 14:30:39 -0700
Subject: [PATCH 0064/1871] Implemented view-instance permission, refs #811

---
 datasette/default_permissions.py |  4 ++++
 tests/test_permissions.py        | 20 ++++++++++++++++++++
 2 files changed, 24 insertions(+)

diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py
index 40ae54ab..ee182c85 100644
--- a/datasette/default_permissions.py
+++ b/datasette/default_permissions.py
@@ -7,6 +7,10 @@ def permission_allowed(datasette, actor, action, resource_type, resource_identif
     if action == "permissions-debug":
         if actor and actor.get("id") == "root":
             return True
+    elif action == "view-instance":
+        allow = datasette.metadata("allow")
+        if allow is not None:
+            return actor_matches_allow(actor, allow)
     elif action == "view-query":
         # Check if this query has a "allow" block in metadata
         assert resource_type == "query"
diff --git a/tests/test_permissions.py b/tests/test_permissions.py
index c90fdf7a..b5c2e00c 100644
--- a/tests/test_permissions.py
+++ b/tests/test_permissions.py
@@ -20,3 +20,23 @@ def test_execute_sql(allow, expected_anon, expected_auth):
             "/fixtures/q", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}
         )
         assert expected_auth == auth_response.status
+
+
+@pytest.mark.parametrize(
+    "allow,expected_anon,expected_auth",
+    [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),],
+)
+def test_view_instance(allow, expected_anon, expected_auth):
+    with make_app_client(metadata={"allow": allow}) as client:
+        for path in (
+            "/",
+            "/fixtures",
+            "/fixtures/compound_three_primary_keys",
+            "/fixtures/compound_three_primary_keys/a,a,a",
+        ):
+            anon_response = client.get(path)
+            assert expected_anon == anon_response.status
+            auth_response = client.get(
+                path, cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")},
+            )
+            assert expected_auth == auth_response.status

From cd92e4fe2a47039a8c780e4e7183a0d2e7446884 Mon Sep 17 00:00:00 2001
From: Simon Willison 
Date: Sun, 7 Jun 2020 14:33:52 -0700
Subject: [PATCH 0065/1871] Fixed test name, this executes view-query, not
 execute-sql - refs #811

---
 tests/test_permissions.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tests/test_permissions.py b/tests/test_permissions.py
index b5c2e00c..bf66bc9c 100644
--- a/tests/test_permissions.py
+++ b/tests/test_permissions.py
@@ -6,7 +6,7 @@ import pytest
     "allow,expected_anon,expected_auth",
     [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),],
 )
-def test_execute_sql(allow, expected_anon, expected_auth):
+def test_view_query(allow, expected_anon, expected_auth):
     with make_app_client(
         metadata={
             "databases": {

From 613fa551a1be31645deb0ece4b46638c181827e0 Mon Sep 17 00:00:00 2001
From: Simon Willison 
Date: Sun, 7 Jun 2020 20:14:27 -0700
Subject: [PATCH 0066/1871] Removed view-row permission, for the moment - refs
 #811

https://github.com/simonw/datasette/issues/811#issuecomment-640338347
---
 datasette/views/table.py |  3 ---
 docs/authentication.rst  | 13 -------------
 tests/test_html.py       |  1 -
 3 files changed, 17 deletions(-)

diff --git a/datasette/views/table.py b/datasette/views/table.py
index 10d6725a..935fed3d 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -851,9 +851,6 @@ class RowView(RowTableShared):
         await self.check_permission(request, "view-instance")
         await self.check_permission(request, "view-database", "database", database)
         await self.check_permission(request, "view-table", "table", (database, table))
-        await self.check_permission(
-            request, "view-row", "row", tuple([database, table] + list(pk_values))
-        )
         db = self.ds.databases[database]
         pks = await db.primary_keys(table)
         use_rowid = not pks
diff --git a/docs/authentication.rst b/docs/authentication.rst
index 1bf2a1a5..2caca66f 100644
--- a/docs/authentication.rst
+++ b/docs/authentication.rst
@@ -206,19 +206,6 @@ Actor is allowed to view a table (or view) page, e.g. https://latest.datasette.i
 ``resource_identifier`` - tuple: (string, string)
     The name of the database, then the name of the table
 
-.. _permissions_view_row:
-
-view-row
---------
-
-Actor is allowed to view a row page, e.g. https://latest.datasette.io/fixtures/compound_primary_key/a,b
-
-``resource_type`` - string
-    "row"
-
-``resource_identifier`` - tuple: (string, string, strings...)
-    The name of the database, then the name of the table, then the primary key of the row. The primary key may be a single value or multiple values, so the ``resource_identifier`` tuple may be three or more items long.
-
 .. _permissions_view_query:
 
 view-query
diff --git a/tests/test_html.py b/tests/test_html.py
index 4e913bcf..e05640d7 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -210,7 +210,6 @@ def test_row_page_does_not_truncate():
             [
                 "view-instance",
                 ("view-table", "table", ("fixtures", "facetable")),
-                ("view-row", "row", ("fixtures", "facetable", "1")),
             ],
         )
         table = Soup(response.body, "html.parser").find("table")

From 9b42e1a4f5902fb7d6ad0111189900e2656ffda3 Mon Sep 17 00:00:00 2001
From: Simon Willison 
Date: Sun, 7 Jun 2020 20:50:37 -0700
Subject: [PATCH 0067/1871] view-database permission
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Also now using 🔒 to indicate private resources - resources that
would not be available to the anonymous user. Refs #811
---
 datasette/default_permissions.py  |  7 +++++-
 datasette/templates/database.html |  2 +-
 datasette/templates/index.html    |  2 +-
 datasette/views/database.py       |  3 +--
 datasette/views/index.py          | 19 +++++++++++++++-
 tests/test_canned_write.py        | 11 +++++-----
 tests/test_html.py                |  5 +----
 tests/test_permissions.py         | 36 +++++++++++++++++++++++++++++++
 8 files changed, 69 insertions(+), 16 deletions(-)

diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py
index ee182c85..40be8d34 100644
--- a/datasette/default_permissions.py
+++ b/datasette/default_permissions.py
@@ -11,6 +11,12 @@ def permission_allowed(datasette, actor, action, resource_type, resource_identif
         allow = datasette.metadata("allow")
         if allow is not None:
             return actor_matches_allow(actor, allow)
+    elif action == "view-database":
+        assert resource_type == "database"
+        database_allow = datasette.metadata("allow", database=resource_identifier)
+        if database_allow is None:
+            return True
+        return actor_matches_allow(actor, database_allow)
     elif action == "view-query":
         # Check if this query has a "allow" block in metadata
         assert resource_type == "query"
@@ -20,7 +26,6 @@ def permission_allowed(datasette, actor, action, resource_type, resource_identif
         if isinstance(queries_metadata[query_name], str):
             return True
         allow = queries_metadata[query_name].get("allow")
-        print("checking allow - actor = {}, allow = {}".format(actor, allow))
         if allow is None:
             return True
         return actor_matches_allow(actor, allow)
diff --git a/datasette/templates/database.html b/datasette/templates/database.html
index fc88003c..eaebfdf7 100644
--- a/datasette/templates/database.html
+++ b/datasette/templates/database.html
@@ -60,7 +60,7 @@
     

Queries

{% endif %} diff --git a/datasette/templates/index.html b/datasette/templates/index.html index b394564a..3b8568b3 100644 --- a/datasette/templates/index.html +++ b/datasette/templates/index.html @@ -10,7 +10,7 @@ {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} {% for database in databases %} -

{{ database.name }}

+

{{ database.name }}{% if database.private %} 🔒{% endif %}

{% if database.show_table_row_counts %}{{ "{:,}".format(database.table_rows_sum) }} rows in {% endif %}{{ database.tables_count }} table{% if database.tables_count != 1 %}s{% endif %}{% if database.tables_count and database.hidden_tables_count %}, {% endif -%} {% if database.hidden_tables_count -%} diff --git a/datasette/views/database.py b/datasette/views/database.py index 961ab61e..4804b2a9 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -58,8 +58,7 @@ class DatabaseView(DataView): tables.sort(key=lambda t: (t["hidden"], t["name"])) canned_queries = [ dict( - query, - requires_auth=not actor_matches_allow(None, query.get("allow", None)), + query, private=not actor_matches_allow(None, query.get("allow", None)), ) for query in self.ds.get_canned_queries(database) if actor_matches_allow( diff --git a/datasette/views/index.py b/datasette/views/index.py index 5f903474..7b88028b 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -2,7 +2,7 @@ import hashlib import json from datasette.utils import CustomJSONEncoder -from datasette.utils.asgi import Response +from datasette.utils.asgi import Response, Forbidden from datasette.version import __version__ from .base import BaseView @@ -25,6 +25,22 @@ class IndexView(BaseView): await self.check_permission(request, "view-instance") databases = [] for name, db in self.ds.databases.items(): + # Check permission + allowed = await self.ds.permission_allowed( + request.scope.get("actor"), + "view-database", + resource_type="database", + resource_identifier=name, + default=True, + ) + if not allowed: + continue + private = not await self.ds.permission_allowed( + None, + "view-database", + resource_type="database", + resource_identifier=name, + ) table_names = await db.table_names() hidden_table_names = set(await db.hidden_table_names()) views = await db.view_names() @@ -95,6 +111,7 @@ class IndexView(BaseView): ), "hidden_tables_count": len(hidden_tables), "views_count": len(views), + "private": private, } ) diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index c217be8f..dc3fba3f 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -120,13 +120,12 @@ def test_canned_query_permissions_on_database_page(canned_write_client): ) assert 200 == response.status assert [ - {"name": "add_name", "requires_auth": False}, - {"name": "add_name_specify_id", "requires_auth": False}, - {"name": "delete_name", "requires_auth": True}, - {"name": "update_name", "requires_auth": False}, + {"name": "add_name", "private": False}, + {"name": "add_name_specify_id", "private": False}, + {"name": "delete_name", "private": True}, + {"name": "update_name", "private": False}, ] == [ - {"name": q["name"], "requires_auth": q["requires_auth"]} - for q in response.json["queries"] + {"name": q["name"], "private": q["private"]} for q in response.json["queries"] ] diff --git a/tests/test_html.py b/tests/test_html.py index e05640d7..3f6dc4df 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -207,10 +207,7 @@ def test_row_page_does_not_truncate(): assert response.status == 200 assert_permissions_checked( client.ds, - [ - "view-instance", - ("view-table", "table", ("fixtures", "facetable")), - ], + ["view-instance", ("view-table", "table", ("fixtures", "facetable")),], ) table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] diff --git a/tests/test_permissions.py b/tests/test_permissions.py index bf66bc9c..21014a25 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -40,3 +40,39 @@ def test_view_instance(allow, expected_anon, expected_auth): path, cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, ) assert expected_auth == auth_response.status + + +@pytest.mark.parametrize( + "allow,expected_anon,expected_auth", + [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),], +) +def test_view_database(allow, expected_anon, expected_auth): + with make_app_client( + metadata={"databases": {"fixtures": {"allow": allow}}} + ) as client: + for path in ( + "/fixtures", + "/fixtures/compound_three_primary_keys", + "/fixtures/compound_three_primary_keys/a,a,a", + ): + anon_response = client.get(path) + assert expected_anon == anon_response.status + auth_response = client.get( + path, cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + ) + assert expected_auth == auth_response.status + + +def test_database_list_respects_view_database(): + with make_app_client( + metadata={"databases": {"fixtures": {"allow": {"id": "root"}}}}, + extra_databases={"data.db": "create table names (name text)"}, + ) as client: + anon_response = client.get("/") + assert 'data' in anon_response.text + assert 'fixtures' not in anon_response.text + auth_response = client.get( + "/", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + ) + assert 'data' in auth_response.text + assert 'fixtures 🔒' in auth_response.text From b26292a4582ea7fe16c59d0ac99f3bd8c3d4b1d0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 7 Jun 2020 20:56:49 -0700 Subject: [PATCH 0068/1871] Test that view-query is respected by query list, refs #811 --- datasette/templates/database.html | 2 +- tests/test_permissions.py | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/datasette/templates/database.html b/datasette/templates/database.html index eaebfdf7..dfafc049 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -60,7 +60,7 @@

Queries

{% endif %} diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 21014a25..e66b9291 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -22,6 +22,26 @@ def test_view_query(allow, expected_anon, expected_auth): assert expected_auth == auth_response.status +def test_query_list_respects_view_query(): + with make_app_client( + metadata={ + "databases": { + "fixtures": { + "queries": {"q": {"sql": "select 1 + 1", "allow": {"id": "root"}}} + } + } + } + ) as client: + html_fragment = '
  • q 🔒
  • ' + anon_response = client.get("/fixtures") + assert html_fragment not in anon_response.text + assert '"/fixtures/q"' not in anon_response.text + auth_response = client.get( + "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} + ) + assert html_fragment in auth_response.text + + @pytest.mark.parametrize( "allow,expected_anon,expected_auth", [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),], From 9397d718345c4b35d2a5c55bfcbd1468876b5ab9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 7 Jun 2020 21:47:22 -0700 Subject: [PATCH 0069/1871] Implemented view-table, refs #811 --- datasette/default_permissions.py | 8 ++ datasette/templates/database.html | 2 +- datasette/views/database.py | 16 ++++ tests/test_permissions.py | 123 ++++++++++++++++++++---------- 4 files changed, 108 insertions(+), 41 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index 40be8d34..dd1770a3 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -17,6 +17,14 @@ def permission_allowed(datasette, actor, action, resource_type, resource_identif if database_allow is None: return True return actor_matches_allow(actor, database_allow) + elif action == "view-table": + assert resource_type == "table" + database, table = resource_identifier + tables = datasette.metadata("tables", database=database) or {} + table_allow = (tables.get(table) or {}).get("allow") + if table_allow is None: + return True + return actor_matches_allow(actor, table_allow) elif action == "view-query": # Check if this query has a "allow" block in metadata assert resource_type == "query" diff --git a/datasette/templates/database.html b/datasette/templates/database.html index dfafc049..1187267d 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -36,7 +36,7 @@ {% for table in tables %} {% if show_hidden or not table.hidden %}
    -

    {{ table.name }}{% if table.hidden %} (hidden){% endif %}

    +

    {{ table.name }}{% if table.private %} 🔒{% endif %}{% if table.hidden %} (hidden){% endif %}

    {% for column in table.columns[:9] %}{{ column }}{% if not loop.last %}, {% endif %}{% endfor %}{% if table.columns|length > 9 %}...{% endif %}

    {% if table.count is none %}Many rows{% else %}{{ "{:,}".format(table.count) }} row{% if table.count == 1 %}{% else %}s{% endif %}{% endif %}

    diff --git a/datasette/views/database.py b/datasette/views/database.py index 4804b2a9..ba3d22d9 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -42,6 +42,21 @@ class DatabaseView(DataView): tables = [] for table in table_counts: + allowed = await self.ds.permission_allowed( + request.scope.get("actor"), + "view-table", + resource_type="table", + resource_identifier=(database, table), + default=True, + ) + if not allowed: + continue + private = not await self.ds.permission_allowed( + None, + "view-table", + resource_type="table", + resource_identifier=(database, table), + ) table_columns = await db.table_columns(table) tables.append( { @@ -52,6 +67,7 @@ class DatabaseView(DataView): "hidden": table in hidden_table_names, "fts_table": await db.fts_table(table), "foreign_keys": all_foreign_keys[table], + "private": private, } ) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index e66b9291..7c5b02c0 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -2,46 +2,6 @@ from .fixtures import make_app_client import pytest -@pytest.mark.parametrize( - "allow,expected_anon,expected_auth", - [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),], -) -def test_view_query(allow, expected_anon, expected_auth): - with make_app_client( - metadata={ - "databases": { - "fixtures": {"queries": {"q": {"sql": "select 1 + 1", "allow": allow}}} - } - } - ) as client: - anon_response = client.get("/fixtures/q") - assert expected_anon == anon_response.status - auth_response = client.get( - "/fixtures/q", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} - ) - assert expected_auth == auth_response.status - - -def test_query_list_respects_view_query(): - with make_app_client( - metadata={ - "databases": { - "fixtures": { - "queries": {"q": {"sql": "select 1 + 1", "allow": {"id": "root"}}} - } - } - } - ) as client: - html_fragment = '
  • q 🔒
  • ' - anon_response = client.get("/fixtures") - assert html_fragment not in anon_response.text - assert '"/fixtures/q"' not in anon_response.text - auth_response = client.get( - "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} - ) - assert html_fragment in auth_response.text - - @pytest.mark.parametrize( "allow,expected_anon,expected_auth", [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),], @@ -96,3 +56,86 @@ def test_database_list_respects_view_database(): ) assert 'data' in auth_response.text assert 'fixtures 🔒' in auth_response.text + + +@pytest.mark.parametrize( + "allow,expected_anon,expected_auth", + [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),], +) +def test_view_table(allow, expected_anon, expected_auth): + with make_app_client( + metadata={ + "databases": { + "fixtures": { + "tables": {"compound_three_primary_keys": {"allow": allow}} + } + } + } + ) as client: + anon_response = client.get("/fixtures/compound_three_primary_keys") + assert expected_anon == anon_response.status + auth_response = client.get( + "/fixtures/compound_three_primary_keys", + cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + ) + assert expected_auth == auth_response.status + + +def test_table_list_respects_view_table(): + with make_app_client( + metadata={ + "databases": { + "fixtures": { + "tables": {"compound_three_primary_keys": {"allow": {"id": "root"}}} + } + } + } + ) as client: + html_fragment = 'compound_three_primary_keys 🔒' + anon_response = client.get("/fixtures") + assert html_fragment not in anon_response.text + assert '"/fixtures/compound_three_primary_keys"' not in anon_response.text + auth_response = client.get( + "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} + ) + assert html_fragment in auth_response.text + + +@pytest.mark.parametrize( + "allow,expected_anon,expected_auth", + [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),], +) +def test_view_query(allow, expected_anon, expected_auth): + with make_app_client( + metadata={ + "databases": { + "fixtures": {"queries": {"q": {"sql": "select 1 + 1", "allow": allow}}} + } + } + ) as client: + anon_response = client.get("/fixtures/q") + assert expected_anon == anon_response.status + auth_response = client.get( + "/fixtures/q", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} + ) + assert expected_auth == auth_response.status + + +def test_query_list_respects_view_query(): + with make_app_client( + metadata={ + "databases": { + "fixtures": { + "queries": {"q": {"sql": "select 1 + 1", "allow": {"id": "root"}}} + } + } + } + ) as client: + html_fragment = '
  • q 🔒
  • ' + anon_response = client.get("/fixtures") + assert html_fragment not in anon_response.text + assert '"/fixtures/q"' not in anon_response.text + auth_response = client.get( + "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} + ) + assert html_fragment in auth_response.text From e18f8c3f871fe1e9e00554b5c6c75409cc1a5e6d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 06:49:55 -0700 Subject: [PATCH 0070/1871] New check_visibility() utility function, refs #811 --- datasette/utils/__init__.py | 23 +++++++++++++++++++++++ datasette/views/database.py | 35 ++++++++++++++++------------------- datasette/views/index.py | 19 ++++--------------- 3 files changed, 43 insertions(+), 34 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 077728f4..3d964049 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -874,3 +874,26 @@ def actor_matches_allow(actor, allow): if actor_values.intersection(values): return True return False + + +async def check_visibility( + datasette, actor, action, resource_type, resource_identifier, default=True +): + "Returns (visible, private) - visible = can you see it, private = can others see it too" + visible = await datasette.permission_allowed( + actor, + action, + resource_type=resource_type, + resource_identifier=resource_identifier, + default=default, + ) + if not visible: + return (False, False) + private = not await datasette.permission_allowed( + None, + action, + resource_type=resource_type, + resource_identifier=resource_identifier, + default=default, + ) + return visible, private diff --git a/datasette/views/database.py b/datasette/views/database.py index ba3d22d9..afbb6b05 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -3,6 +3,7 @@ import jinja2 from datasette.utils import ( actor_matches_allow, + check_visibility, to_css_class, validate_sql_select, is_url, @@ -42,21 +43,15 @@ class DatabaseView(DataView): tables = [] for table in table_counts: - allowed = await self.ds.permission_allowed( + visible, private = await check_visibility( + self.ds, request.scope.get("actor"), "view-table", - resource_type="table", - resource_identifier=(database, table), - default=True, + "table", + (database, table), ) - if not allowed: + if not visible: continue - private = not await self.ds.permission_allowed( - None, - "view-table", - resource_type="table", - resource_identifier=(database, table), - ) table_columns = await db.table_columns(table) tables.append( { @@ -72,15 +67,17 @@ class DatabaseView(DataView): ) tables.sort(key=lambda t: (t["hidden"], t["name"])) - canned_queries = [ - dict( - query, private=not actor_matches_allow(None, query.get("allow", None)), + canned_queries = [] + for query in self.ds.get_canned_queries(database): + visible, private = await check_visibility( + self.ds, + request.scope.get("actor"), + "view-query", + "query", + (database, query["name"]), ) - for query in self.ds.get_canned_queries(database) - if actor_matches_allow( - request.scope.get("actor", None), query.get("allow", None) - ) - ] + if visible: + canned_queries.append(dict(query, private=private)) return ( { "database": database, diff --git a/datasette/views/index.py b/datasette/views/index.py index 7b88028b..0f7fb613 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -1,7 +1,7 @@ import hashlib import json -from datasette.utils import CustomJSONEncoder +from datasette.utils import check_visibility, CustomJSONEncoder from datasette.utils.asgi import Response, Forbidden from datasette.version import __version__ @@ -25,22 +25,11 @@ class IndexView(BaseView): await self.check_permission(request, "view-instance") databases = [] for name, db in self.ds.databases.items(): - # Check permission - allowed = await self.ds.permission_allowed( - request.scope.get("actor"), - "view-database", - resource_type="database", - resource_identifier=name, - default=True, + visible, private = await check_visibility( + self.ds, request.scope.get("actor"), "view-database", "database", name, ) - if not allowed: + if not visible: continue - private = not await self.ds.permission_allowed( - None, - "view-database", - resource_type="database", - resource_identifier=name, - ) table_names = await db.table_names() hidden_table_names = set(await db.hidden_table_names()) views = await db.view_names() From cc218fa9be55842656d030545c308392e3736053 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 07:02:31 -0700 Subject: [PATCH 0071/1871] Move assert_permissions_checked() calls from test_html.py to test_permissions.py, refs #811 --- datasette/app.py | 2 +- tests/test_html.py | 49 ------------------------------------ tests/test_permissions.py | 52 ++++++++++++++++++++++++++++++++++++++- 3 files changed, 52 insertions(+), 51 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f433a10a..23c293c9 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -298,7 +298,7 @@ class Datasette: pm.hook.prepare_jinja2_environment(env=self.jinja_env) self._register_renderers() - self._permission_checks = collections.deque(maxlen=30) + self._permission_checks = collections.deque(maxlen=200) self._root_token = os.urandom(32).hex() def sign(self, value, namespace="default"): diff --git a/tests/test_html.py b/tests/test_html.py index 3f6dc4df..cb0e0c90 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -4,7 +4,6 @@ from .fixtures import ( # noqa app_client_shorter_time_limit, app_client_two_attached_databases, app_client_with_hash, - assert_permissions_checked, make_app_client, METADATA, ) @@ -18,7 +17,6 @@ import urllib.parse def test_homepage(app_client_two_attached_databases): response = app_client_two_attached_databases.get("/") - assert_permissions_checked(app_client_two_attached_databases.ds, ["view-instance"]) assert response.status == 200 assert "text/html; charset=utf-8" == response.headers["content-type"] soup = Soup(response.body, "html.parser") @@ -77,9 +75,6 @@ def test_static_mounts(): def test_memory_database_page(): with make_app_client(memory=True) as client: response = client.get("/:memory:") - assert_permissions_checked( - client.ds, ["view-instance", ("view-database", "database", ":memory:")] - ) assert response.status == 200 @@ -92,9 +87,6 @@ def test_database_page_redirects_with_url_hash(app_client_with_hash): def test_database_page(app_client): response = app_client.get("/fixtures") - assert_permissions_checked( - app_client.ds, ["view-instance", ("view-database", "database", "fixtures")] - ) soup = Soup(response.body, "html.parser") queries_ul = soup.find("h2", text="Queries").find_next_sibling("ul") assert queries_ul is not None @@ -205,10 +197,6 @@ def test_row_page_does_not_truncate(): with make_app_client(config={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable/1") assert response.status == 200 - assert_permissions_checked( - client.ds, - ["view-instance", ("view-table", "table", ("fixtures", "facetable")),], - ) table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] assert ["Mission"] == [ @@ -518,14 +506,6 @@ def test_templates_considered(app_client, path, expected_considered): def test_table_html_simple_primary_key(app_client): response = app_client.get("/fixtures/simple_primary_key?_size=3") - assert_permissions_checked( - app_client.ds, - [ - "view-instance", - ("view-database", "database", "fixtures"), - ("view-table", "table", ("fixtures", "simple_primary_key")), - ], - ) assert response.status == 200 table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] @@ -881,19 +861,6 @@ def test_database_metadata(app_client): assert_footer_links(soup) -def test_database_query_permission_checks(app_client): - response = app_client.get("/fixtures?sql=select+1") - assert response.status == 200 - assert_permissions_checked( - app_client.ds, - [ - "view-instance", - ("view-database", "database", "fixtures"), - ("execute-sql", "database", "fixtures"), - ], - ) - - def test_database_metadata_with_custom_sql(app_client): response = app_client.get("/fixtures?sql=select+*+from+simple_primary_key") assert response.status == 200 @@ -929,14 +896,6 @@ def test_database_download_allowed_for_immutable(): assert len(soup.findAll("a", {"href": re.compile(r"\.db$")})) # Check we can actually download it assert 200 == client.get("/fixtures.db").status - assert_permissions_checked( - client.ds, - [ - "view-instance", - ("view-database", "database", "fixtures"), - ("view-database-download", "database", "fixtures"), - ], - ) def test_database_download_disallowed_for_mutable(app_client): @@ -1032,14 +991,6 @@ def test_404_content_type(app_client): def test_canned_query_with_custom_metadata(app_client): response = app_client.get("/fixtures/neighborhood_search?text=town") - assert_permissions_checked( - app_client.ds, - [ - "view-instance", - ("view-database", "database", "fixtures"), - ("view-query", "query", ("fixtures", "neighborhood_search")), - ], - ) assert response.status == 200 soup = Soup(response.body, "html.parser") assert "Search neighborhoods" == soup.find("h1").text diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 7c5b02c0..df905aa1 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -1,4 +1,4 @@ -from .fixtures import make_app_client +from .fixtures import app_client, assert_permissions_checked, make_app_client import pytest @@ -139,3 +139,53 @@ def test_query_list_respects_view_query(): "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} ) assert html_fragment in auth_response.text + + +@pytest.mark.parametrize( + "path,permissions", + [ + ("/", ["view-instance"]), + ("/fixtures", ["view-instance", ("view-database", "database", "fixtures")]), + ( + "/fixtures/facetable/1", + ["view-instance", ("view-table", "table", ("fixtures", "facetable"))], + ), + ( + "/fixtures/simple_primary_key", + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("view-table", "table", ("fixtures", "simple_primary_key")), + ], + ), + ( + "/fixtures?sql=select+1", + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("execute-sql", "database", "fixtures"), + ], + ), + ( + "/fixtures.db", + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("view-database-download", "database", "fixtures"), + ], + ), + ( + "/fixtures/neighborhood_search", + [ + "view-instance", + ("view-database", "database", "fixtures"), + ("view-query", "query", ("fixtures", "neighborhood_search")), + ], + ), + ], +) +def test_permissions_checked(app_client, path, permissions): + app_client.ds._permission_checks.clear() + response = app_client.get(path) + assert response.status in (200, 403) + assert_permissions_checked(app_client.ds, permissions) From 1cf86e5eccf3f92b483bacbad860879cf39b0ad6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 07:18:37 -0700 Subject: [PATCH 0072/1871] Show padlock on private index page, refs #811 --- datasette/templates/index.html | 2 +- datasette/views/index.py | 3 +++ tests/test_permissions.py | 6 ++++++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/datasette/templates/index.html b/datasette/templates/index.html index 3b8568b3..5a8dccae 100644 --- a/datasette/templates/index.html +++ b/datasette/templates/index.html @@ -5,7 +5,7 @@ {% block body_class %}index{% endblock %} {% block content %} -

    {{ metadata.title or "Datasette" }}

    +

    {{ metadata.title or "Datasette" }}{% if private %} 🔒{% endif %}

    {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} diff --git a/datasette/views/index.py b/datasette/views/index.py index 0f7fb613..8cbe28f0 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -121,5 +121,8 @@ class IndexView(BaseView): "databases": databases, "metadata": self.ds.metadata(), "datasette_version": __version__, + "private": not await self.ds.permission_allowed( + None, "view-instance" + ), }, ) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index df905aa1..5dcf46ad 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -16,10 +16,16 @@ def test_view_instance(allow, expected_anon, expected_auth): ): anon_response = client.get(path) assert expected_anon == anon_response.status + if allow and path == "/" and anon_response.status == 200: + # Should be no padlock + assert "

    Datasette 🔒

    " not in anon_response.text auth_response = client.get( path, cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, ) assert expected_auth == auth_response.status + # Check for the padlock + if allow and path == "/" and expected_anon == 403 and expected_auth == 200: + assert "

    Datasette 🔒

    " in auth_response.text @pytest.mark.parametrize( From 3ce7f2e7dae010de97b67618c111ea5853164a69 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 07:23:10 -0700 Subject: [PATCH 0073/1871] Show padlock on private database page, refs #811 --- datasette/templates/database.html | 2 +- datasette/views/database.py | 3 +++ tests/test_permissions.py | 10 ++++++++++ 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 1187267d..089142e2 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -18,7 +18,7 @@ {% block content %} -

    {{ metadata.title or database }}

    +

    {{ metadata.title or database }}{% if private %} 🔒{% endif %}

    {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} diff --git a/datasette/views/database.py b/datasette/views/database.py index afbb6b05..2d7e6b31 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -86,6 +86,9 @@ class DatabaseView(DataView): "hidden_count": len([t for t in tables if t["hidden"]]), "views": views, "queries": canned_queries, + "private": not await self.ds.permission_allowed( + None, "view-database", "database", database + ), }, { "show_hidden": request.args.get("_show_hidden"), diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 5dcf46ad..d76d1e15 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -43,10 +43,20 @@ def test_view_database(allow, expected_anon, expected_auth): ): anon_response = client.get(path) assert expected_anon == anon_response.status + if allow and path == "/fixtures" and anon_response.status == 200: + # Should be no padlock + assert ">fixtures 🔒" not in anon_response.text auth_response = client.get( path, cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, ) assert expected_auth == auth_response.status + if ( + allow + and path == "/fixtures" + and expected_anon == 403 + and expected_auth == 200 + ): + assert ">fixtures 🔒" in auth_response.text def test_database_list_respects_view_database(): From 2a8b39800f194925658bd9e1b5e4cc12619d5e9c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 07:50:06 -0700 Subject: [PATCH 0074/1871] Updated tests, refs #811 --- tests/test_api.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/tests/test_api.py b/tests/test_api.py index 22378946..13a98b6a 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -70,6 +70,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "Table With Space In Name", @@ -79,6 +80,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "attraction_characteristic", @@ -97,6 +99,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "binary_data", @@ -106,6 +109,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "complex_foreign_keys", @@ -134,6 +138,7 @@ def test_database_page(app_client): }, ], }, + "private": False, }, { "name": "compound_primary_key", @@ -143,6 +148,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "compound_three_primary_keys", @@ -152,6 +158,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "custom_foreign_key_label", @@ -170,6 +177,7 @@ def test_database_page(app_client): } ], }, + "private": False, }, { "name": "facet_cities", @@ -188,6 +196,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "facetable", @@ -217,6 +226,7 @@ def test_database_page(app_client): } ], }, + "private": False, }, { "name": "foreign_key_references", @@ -240,6 +250,7 @@ def test_database_page(app_client): }, ], }, + "private": False, }, { "name": "infinity", @@ -249,6 +260,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "primary_key_multiple_columns", @@ -267,6 +279,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "primary_key_multiple_columns_explicit_label", @@ -285,6 +298,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "roadside_attraction_characteristics", @@ -308,6 +322,7 @@ def test_database_page(app_client): }, ], }, + "private": False, }, { "name": "roadside_attractions", @@ -326,6 +341,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "searchable", @@ -344,6 +360,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "searchable_tags", @@ -363,6 +380,7 @@ def test_database_page(app_client): }, ], }, + "private": False, }, { "name": "select", @@ -372,6 +390,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "simple_primary_key", @@ -405,6 +424,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "sortable", @@ -422,6 +442,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "table/with/slashes.csv", @@ -431,6 +452,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "tags", @@ -449,6 +471,7 @@ def test_database_page(app_client): ], "outgoing": [], }, + "private": False, }, { "name": "units", @@ -458,6 +481,7 @@ def test_database_page(app_client): "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "no_primary_key", @@ -467,6 +491,7 @@ def test_database_page(app_client): "hidden": True, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "searchable_fts", @@ -476,6 +501,7 @@ def test_database_page(app_client): "hidden": True, "fts_table": "searchable_fts", "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "searchable_fts_content", @@ -491,6 +517,7 @@ def test_database_page(app_client): "hidden": True, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "searchable_fts_segdir", @@ -507,6 +534,7 @@ def test_database_page(app_client): "hidden": True, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, { "name": "searchable_fts_segments", @@ -516,6 +544,7 @@ def test_database_page(app_client): "hidden": True, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, + "private": False, }, ] == data["tables"] @@ -537,6 +566,7 @@ def test_no_files_uses_memory_database(app_client_no_files): "tables_and_views_more": False, "tables_and_views_truncated": [], "views_count": 0, + "private": False, } } == response.json # Try that SQL query From 177059284dc953e6c76f86213aa470db2ff3eaca Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 10:05:32 -0700 Subject: [PATCH 0075/1871] New request.actor property, refs #811 --- datasette/app.py | 2 +- datasette/utils/asgi.py | 4 ++++ datasette/views/base.py | 2 +- datasette/views/database.py | 4 ++-- datasette/views/index.py | 2 +- datasette/views/special.py | 2 +- docs/authentication.rst | 2 ++ docs/internals.rst | 5 ++++- 8 files changed, 16 insertions(+), 7 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 23c293c9..87e542c1 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -667,7 +667,7 @@ class Datasette: return d def _actor(self, request): - return {"actor": request.scope.get("actor", None)} + return {"actor": request.actor} def table_metadata(self, database, table): "Fetch table-specific metadata." diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index fa78c8df..bca9c9ab 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -74,6 +74,10 @@ class Request: def args(self): return MultiParams(parse_qs(qs=self.query_string)) + @property + def actor(self): + return self.scope.get("actor", None) + async def post_vars(self): body = [] body = b"" diff --git a/datasette/views/base.py b/datasette/views/base.py index 9c2cbbcc..000d354b 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -68,7 +68,7 @@ class BaseView(AsgiView): self, request, action, resource_type=None, resource_identifier=None ): ok = await self.ds.permission_allowed( - request.scope.get("actor"), + request.actor, action, resource_type=resource_type, resource_identifier=resource_identifier, diff --git a/datasette/views/database.py b/datasette/views/database.py index 2d7e6b31..dee6c9c8 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -45,7 +45,7 @@ class DatabaseView(DataView): for table in table_counts: visible, private = await check_visibility( self.ds, - request.scope.get("actor"), + request.actor, "view-table", "table", (database, table), @@ -71,7 +71,7 @@ class DatabaseView(DataView): for query in self.ds.get_canned_queries(database): visible, private = await check_visibility( self.ds, - request.scope.get("actor"), + request.actor, "view-query", "query", (database, query["name"]), diff --git a/datasette/views/index.py b/datasette/views/index.py index 8cbe28f0..609bfa6a 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -26,7 +26,7 @@ class IndexView(BaseView): databases = [] for name, db in self.ds.databases.items(): visible, private = await check_visibility( - self.ds, request.scope.get("actor"), "view-database", "database", name, + self.ds, request.actor, "view-database", "database", name, ) if not visible: continue diff --git a/datasette/views/special.py b/datasette/views/special.py index 37c04697..b8bd57c6 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -86,7 +86,7 @@ class PermissionsDebugView(BaseView): async def get(self, request): if not await self.ds.permission_allowed( - request.scope.get("actor"), "permissions-debug" + request.actor, "permissions-debug" ): return Response("Permission denied", status=403) return await self.render( diff --git a/docs/authentication.rst b/docs/authentication.rst index 2caca66f..bda6a0b7 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -140,6 +140,8 @@ Plugins that wish to implement the same permissions scheme as canned queries can actor_matches_allow({"id": "root"}, {"id": "*"}) # returns True +The currently authenticated actor is made available to plugins as ``request.actor``. + .. _PermissionsDebugView: Permissions Debug diff --git a/docs/internals.rst b/docs/internals.rst index 25b2d875..7498f017 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -42,6 +42,9 @@ The request object is passed to various plugin hooks. It represents an incoming ``.args`` - MultiParams An object representing the parsed querystring parameters, see below. +``.actor`` - dictionary (str -> Any) or None + The currently authenticated actor (see :ref:`actors `), or ``None`` if the request is unauthenticated. + The object also has one awaitable method: ``await request.post_vars()`` - dictionary @@ -122,7 +125,7 @@ await .permission_allowed(actor, action, resource_type=None, resource_identifier ----------------------------------------------------------------------------------------------------- ``actor`` - dictionary - The authenticated actor. This is usually ``request.scope.get("actor")``. + The authenticated actor. This is usually ``request.actor``. ``action`` - string The name of the action that is being permission checked. From ab14b20b248dafbe7f9f9487985614939c83b517 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 10:16:24 -0700 Subject: [PATCH 0076/1871] Get tests working again --- datasette/views/database.py | 6 +----- datasette/views/index.py | 2 +- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index dee6c9c8..6f6404a7 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -44,11 +44,7 @@ class DatabaseView(DataView): tables = [] for table in table_counts: visible, private = await check_visibility( - self.ds, - request.actor, - "view-table", - "table", - (database, table), + self.ds, request.actor, "view-table", "table", (database, table), ) if not visible: continue diff --git a/datasette/views/index.py b/datasette/views/index.py index 609bfa6a..59d3e042 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -122,7 +122,7 @@ class IndexView(BaseView): "metadata": self.ds.metadata(), "datasette_version": __version__, "private": not await self.ds.permission_allowed( - None, "view-instance" + None, "view-instance", default=True ), }, ) From dfff34e1987976e72f58ee7b274952840b1f4b71 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 11:03:33 -0700 Subject: [PATCH 0077/1871] Applied black, refs #811 --- datasette/views/special.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/datasette/views/special.py b/datasette/views/special.py index b8bd57c6..7a5fbe21 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -85,9 +85,7 @@ class PermissionsDebugView(BaseView): self.ds = datasette async def get(self, request): - if not await self.ds.permission_allowed( - request.actor, "permissions-debug" - ): + if not await self.ds.permission_allowed(request.actor, "permissions-debug"): return Response("Permission denied", status=403) return await self.render( ["permissions_debug.html"], From aa420009c08921d0c9a68cf60a57959be0e8a2e5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 11:07:11 -0700 Subject: [PATCH 0078/1871] Show padlock on private table page, refs #811 --- datasette/templates/table.html | 2 +- datasette/views/table.py | 5 +++++ tests/test_permissions.py | 5 +++++ 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/datasette/templates/table.html b/datasette/templates/table.html index fa6766a8..1289e125 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -26,7 +26,7 @@ {% block content %} -

    {{ metadata.title or table }}{% if is_view %} (view){% endif %}

    +

    {{ metadata.title or table }}{% if is_view %} (view){% endif %}{% if private %} 🔒{% endif %}

    {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} diff --git a/datasette/views/table.py b/datasette/views/table.py index 935fed3d..cd952568 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -271,6 +271,10 @@ class TableView(RowTableShared): await self.check_permission(request, "view-database", "database", database) await self.check_permission(request, "view-table", "table", (database, table)) + private = not await self.ds.permission_allowed( + None, "view-table", "table", (database, table), default=True + ) + pks = await db.primary_keys(table) table_columns = await db.table_columns(table) @@ -834,6 +838,7 @@ class TableView(RowTableShared): "suggested_facets": suggested_facets, "next": next_value and str(next_value) or None, "next_url": next_url, + "private": private, }, extra_template, ( diff --git a/tests/test_permissions.py b/tests/test_permissions.py index d76d1e15..733afd5f 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -90,11 +90,16 @@ def test_view_table(allow, expected_anon, expected_auth): ) as client: anon_response = client.get("/fixtures/compound_three_primary_keys") assert expected_anon == anon_response.status + if allow and anon_response.status == 200: + # Should be no padlock + assert ">compound_three_primary_keys 🔒" not in anon_response.text auth_response = client.get( "/fixtures/compound_three_primary_keys", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, ) assert expected_auth == auth_response.status + if allow and expected_anon == 403 and expected_auth == 200: + assert ">compound_three_primary_keys 🔒" in auth_response.text def test_table_list_respects_view_table(): From 9ac27f67fe346e753b562b711a2086e4c616d51d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 11:13:32 -0700 Subject: [PATCH 0079/1871] Show padlock on private query page, refs #811 --- datasette/templates/query.html | 2 +- datasette/views/database.py | 6 ++++++ tests/test_permissions.py | 5 +++++ 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/datasette/templates/query.html b/datasette/templates/query.html index a7cb6647..7771b101 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -28,7 +28,7 @@ {% block content %} -

    {{ metadata.title or database }}

    +

    {{ metadata.title or database }}{% if private %} 🔒{% endif %}

    {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} diff --git a/datasette/views/database.py b/datasette/views/database.py index 6f6404a7..30817106 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -147,10 +147,14 @@ class QueryView(DataView): # Respect canned query permissions await self.check_permission(request, "view-instance") await self.check_permission(request, "view-database", "database", database) + private = False if canned_query: await self.check_permission( request, "view-query", "query", (database, canned_query) ) + private = not await self.ds.permission_allowed( + None, "view-query", "query", (database, canned_query), default=True + ) else: await self.check_permission(request, "execute-sql", "database", database) # Extract any :named parameters @@ -214,6 +218,7 @@ class QueryView(DataView): "truncated": False, "columns": [], "query": {"sql": sql, "params": params}, + "private": private, }, extra_template, templates, @@ -282,6 +287,7 @@ class QueryView(DataView): "truncated": results.truncated, "columns": columns, "query": {"sql": sql, "params": params}, + "private": private, }, extra_template, templates, diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 733afd5f..55b2d673 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -136,10 +136,15 @@ def test_view_query(allow, expected_anon, expected_auth): ) as client: anon_response = client.get("/fixtures/q") assert expected_anon == anon_response.status + if allow and anon_response.status == 200: + # Should be no padlock + assert ">fixtures 🔒" not in anon_response.text auth_response = client.get( "/fixtures/q", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} ) assert expected_auth == auth_response.status + if allow and expected_anon == 403 and expected_auth == 200: + assert ">fixtures 🔒" in auth_response.text def test_query_list_respects_view_query(): From dcec89270a2e3b9fabed93f1d7b9be3ef86e9ed2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 11:20:21 -0700 Subject: [PATCH 0080/1871] View list respects view-table permission, refs #811 Also makes a small change to the /fixtures.json JSON: "views": ["view_name"] Is now: "views": [{"name": "view_name", "private": true}] --- datasette/templates/database.html | 2 +- datasette/views/database.py | 11 ++++++++++- tests/test_permissions.py | 18 +++++++++++++----- 3 files changed, 24 insertions(+), 7 deletions(-) diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 089142e2..100faee4 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -51,7 +51,7 @@

    Views

    {% endif %} diff --git a/datasette/views/database.py b/datasette/views/database.py index 30817106..824cb632 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -37,10 +37,19 @@ class DatabaseView(DataView): db = self.ds.databases[database] table_counts = await db.table_counts(5) - views = await db.view_names() hidden_table_names = set(await db.hidden_table_names()) all_foreign_keys = await db.get_all_foreign_keys() + views = [] + for view_name in await db.view_names(): + visible, private = await check_visibility( + self.ds, request.actor, "view-table", "table", (database, view_name), + ) + if visible: + views.append( + {"name": view_name, "private": private,} + ) + tables = [] for table in table_counts: visible, private = await check_visibility( diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 55b2d673..5c338e04 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -107,19 +107,27 @@ def test_table_list_respects_view_table(): metadata={ "databases": { "fixtures": { - "tables": {"compound_three_primary_keys": {"allow": {"id": "root"}}} + "tables": { + "compound_three_primary_keys": {"allow": {"id": "root"}}, + # And a SQL view too: + "paginated_view": {"allow": {"id": "root"}}, + } } } } ) as client: - html_fragment = 'compound_three_primary_keys 🔒' + html_fragments = [ + ">compound_three_primary_keys 🔒", + ">paginated_view 🔒", + ] anon_response = client.get("/fixtures") - assert html_fragment not in anon_response.text - assert '"/fixtures/compound_three_primary_keys"' not in anon_response.text + for html_fragment in html_fragments: + assert html_fragment not in anon_response.text auth_response = client.get( "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} ) - assert html_fragment in auth_response.text + for html_fragment in html_fragments: + assert html_fragment in auth_response.text @pytest.mark.parametrize( From 5598c5de011db95396b65b5c8c251cbe6884d6ae Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 11:34:14 -0700 Subject: [PATCH 0081/1871] Database list on index page respects table/view permissions, refs #811 --- datasette/templates/index.html | 2 +- datasette/views/index.py | 25 ++++++++++++++++++++----- tests/test_permissions.py | 31 +++++++++++++++++++++++++++++++ 3 files changed, 52 insertions(+), 6 deletions(-) diff --git a/datasette/templates/index.html b/datasette/templates/index.html index 5a8dccae..c1adfc59 100644 --- a/datasette/templates/index.html +++ b/datasette/templates/index.html @@ -22,7 +22,7 @@ {% endif %}

    {% for table in database.tables_and_views_truncated %}{{ table.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}

    + }}"{% if table.count %} title="{{ table.count }} rows"{% endif %}>{{ table.name }}{% if table.private %} 🔒{% endif %}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}

    {% endfor %} {% endblock %} diff --git a/datasette/views/index.py b/datasette/views/index.py index 59d3e042..a3e8388c 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -25,14 +25,22 @@ class IndexView(BaseView): await self.check_permission(request, "view-instance") databases = [] for name, db in self.ds.databases.items(): - visible, private = await check_visibility( + visible, database_private = await check_visibility( self.ds, request.actor, "view-database", "database", name, ) if not visible: continue table_names = await db.table_names() hidden_table_names = set(await db.hidden_table_names()) - views = await db.view_names() + + views = [] + for view_name in await db.view_names(): + visible, private = await check_visibility( + self.ds, request.actor, "view-table", "table", (name, view_name), + ) + if visible: + views.append({"name": view_name, "private": private}) + # Perform counts only for immutable or DBS with <= COUNT_TABLE_LIMIT tables table_counts = {} if not db.is_mutable or db.size < COUNT_DB_SIZE_LIMIT: @@ -40,8 +48,14 @@ class IndexView(BaseView): # If any of these are None it means at least one timed out - ignore them all if any(v is None for v in table_counts.values()): table_counts = {} + tables = {} for table in table_names: + visible, private = await check_visibility( + self.ds, request.actor, "view-table", "table", (name, table), + ) + if not visible: + continue table_columns = await db.table_columns(table) tables[table] = { "name": table, @@ -51,6 +65,7 @@ class IndexView(BaseView): "hidden": table in hidden_table_names, "fts_table": await db.fts_table(table), "num_relationships_for_sorting": 0, + "private": private, } if request.args.get("_sort") == "relationships" or not table_counts: @@ -78,8 +93,8 @@ class IndexView(BaseView): # Only add views if this is less than TRUNCATE_AT if len(tables_and_views_truncated) < TRUNCATE_AT: num_views_to_add = TRUNCATE_AT - len(tables_and_views_truncated) - for view_name in views[:num_views_to_add]: - tables_and_views_truncated.append({"name": view_name}) + for view in views[:num_views_to_add]: + tables_and_views_truncated.append(view) databases.append( { @@ -100,7 +115,7 @@ class IndexView(BaseView): ), "hidden_tables_count": len(hidden_tables), "views_count": len(views), - "private": private, + "private": database_private, } ) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 5c338e04..475f93dd 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -74,6 +74,37 @@ def test_database_list_respects_view_database(): assert 'fixtures 🔒' in auth_response.text +def test_database_list_respects_view_table(): + with make_app_client( + metadata={ + "databases": { + "data": { + "tables": { + "names": {"allow": {"id": "root"}}, + "v": {"allow": {"id": "root"}}, + } + } + } + }, + extra_databases={ + "data.db": "create table names (name text); create view v as select * from names" + }, + ) as client: + html_fragments = [ + ">names 🔒", + ">v 🔒", + ] + anon_response_text = client.get("/").text + assert "0 rows in 0 tables" in anon_response_text + for html_fragment in html_fragments: + assert html_fragment not in anon_response_text + auth_response_text = client.get( + "/", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + ).text + for html_fragment in html_fragments: + assert html_fragment in auth_response_text + + @pytest.mark.parametrize( "allow,expected_anon,expected_auth", [(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),], From c9f1ec616e5a8c83f554baaedd38663569fb9b91 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 11:51:03 -0700 Subject: [PATCH 0082/1871] Removed resource_type from permissions system, closes #817 Refs #811, #699 --- datasette/app.py | 4 +--- datasette/default_permissions.py | 5 +--- datasette/hookspecs.py | 2 +- datasette/templates/permissions_debug.html | 4 ++-- datasette/utils/__init__.py | 16 +++---------- datasette/views/base.py | 5 +--- datasette/views/database.py | 28 ++++++++-------------- datasette/views/index.py | 6 ++--- datasette/views/table.py | 10 ++++---- docs/authentication.rst | 19 ++------------- docs/internals.rst | 7 ++---- docs/plugins.rst | 9 +++---- tests/conftest.py | 4 ++-- tests/fixtures.py | 9 +++---- 14 files changed, 39 insertions(+), 89 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 87e542c1..c12e0af0 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -465,7 +465,7 @@ class Datasette: return [] async def permission_allowed( - self, actor, action, resource_type=None, resource_identifier=None, default=False + self, actor, action, resource_identifier=None, default=False ): "Check permissions using the permissions_allowed plugin hook" result = None @@ -473,7 +473,6 @@ class Datasette: datasette=self, actor=actor, action=action, - resource_type=resource_type, resource_identifier=resource_identifier, ): if callable(check): @@ -491,7 +490,6 @@ class Datasette: "when": datetime.datetime.utcnow().isoformat(), "actor": actor, "action": action, - "resource_type": resource_type, "resource_identifier": resource_identifier, "used_default": used_default, "result": result, diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index dd1770a3..d27704aa 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -3,7 +3,7 @@ from datasette.utils import actor_matches_allow @hookimpl -def permission_allowed(datasette, actor, action, resource_type, resource_identifier): +def permission_allowed(datasette, actor, action, resource_identifier): if action == "permissions-debug": if actor and actor.get("id") == "root": return True @@ -12,13 +12,11 @@ def permission_allowed(datasette, actor, action, resource_type, resource_identif if allow is not None: return actor_matches_allow(actor, allow) elif action == "view-database": - assert resource_type == "database" database_allow = datasette.metadata("allow", database=resource_identifier) if database_allow is None: return True return actor_matches_allow(actor, database_allow) elif action == "view-table": - assert resource_type == "table" database, table = resource_identifier tables = datasette.metadata("tables", database=database) or {} table_allow = (tables.get(table) or {}).get("allow") @@ -27,7 +25,6 @@ def permission_allowed(datasette, actor, action, resource_type, resource_identif return actor_matches_allow(actor, table_allow) elif action == "view-query": # Check if this query has a "allow" block in metadata - assert resource_type == "query" database, query_name = resource_identifier queries_metadata = datasette.metadata("queries", database=database) assert query_name in queries_metadata diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 71d06661..3c202553 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -66,5 +66,5 @@ def actor_from_request(datasette, request): @hookspec -def permission_allowed(datasette, actor, action, resource_type, resource_identifier): +def permission_allowed(datasette, actor, action, resource_identifier): "Check if actor is allowed to perfom this action - return True, False or None" diff --git a/datasette/templates/permissions_debug.html b/datasette/templates/permissions_debug.html index dda57dfa..7d3ee712 100644 --- a/datasette/templates/permissions_debug.html +++ b/datasette/templates/permissions_debug.html @@ -46,8 +46,8 @@ {% endif %}

    Actor: {{ check.actor|tojson }}

    - {% if check.resource_type %} -

    Resource: {{ check.resource_type }} = {{ check.resource_identifier }}

    + {% if check.resource_identifier %} +

    Resource: {{ check.resource_identifier }}

    {% endif %} {% endfor %} diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 3d964049..257d1285 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -876,24 +876,14 @@ def actor_matches_allow(actor, allow): return False -async def check_visibility( - datasette, actor, action, resource_type, resource_identifier, default=True -): +async def check_visibility(datasette, actor, action, resource_identifier, default=True): "Returns (visible, private) - visible = can you see it, private = can others see it too" visible = await datasette.permission_allowed( - actor, - action, - resource_type=resource_type, - resource_identifier=resource_identifier, - default=default, + actor, action, resource_identifier=resource_identifier, default=default, ) if not visible: return (False, False) private = not await datasette.permission_allowed( - None, - action, - resource_type=resource_type, - resource_identifier=resource_identifier, - default=default, + None, action, resource_identifier=resource_identifier, default=default, ) return visible, private diff --git a/datasette/views/base.py b/datasette/views/base.py index 000d354b..2ca5e86a 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -64,13 +64,10 @@ class BaseView(AsgiView): response.body = b"" return response - async def check_permission( - self, request, action, resource_type=None, resource_identifier=None - ): + async def check_permission(self, request, action, resource_identifier=None): ok = await self.ds.permission_allowed( request.actor, action, - resource_type=resource_type, resource_identifier=resource_identifier, default=True, ) diff --git a/datasette/views/database.py b/datasette/views/database.py index 824cb632..d562ecb1 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -21,7 +21,7 @@ class DatabaseView(DataView): async def data(self, request, database, hash, default_labels=False, _size=None): await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", "database", database) + await self.check_permission(request, "view-database", database) metadata = (self.ds.metadata("databases") or {}).get(database, {}) self.ds.update_with_inherited_metadata(metadata) @@ -43,7 +43,7 @@ class DatabaseView(DataView): views = [] for view_name in await db.view_names(): visible, private = await check_visibility( - self.ds, request.actor, "view-table", "table", (database, view_name), + self.ds, request.actor, "view-table", (database, view_name), ) if visible: views.append( @@ -53,7 +53,7 @@ class DatabaseView(DataView): tables = [] for table in table_counts: visible, private = await check_visibility( - self.ds, request.actor, "view-table", "table", (database, table), + self.ds, request.actor, "view-table", (database, table), ) if not visible: continue @@ -75,11 +75,7 @@ class DatabaseView(DataView): canned_queries = [] for query in self.ds.get_canned_queries(database): visible, private = await check_visibility( - self.ds, - request.actor, - "view-query", - "query", - (database, query["name"]), + self.ds, request.actor, "view-query", (database, query["name"]), ) if visible: canned_queries.append(dict(query, private=private)) @@ -112,10 +108,8 @@ class DatabaseDownload(DataView): async def view_get(self, request, database, hash, correct_hash_present, **kwargs): await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", "database", database) - await self.check_permission( - request, "view-database-download", "database", database - ) + await self.check_permission(request, "view-database", database) + await self.check_permission(request, "view-database-download", database) if database not in self.ds.databases: raise DatasetteError("Invalid database", status=404) db = self.ds.databases[database] @@ -155,17 +149,15 @@ class QueryView(DataView): # Respect canned query permissions await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", "database", database) + await self.check_permission(request, "view-database", database) private = False if canned_query: - await self.check_permission( - request, "view-query", "query", (database, canned_query) - ) + await self.check_permission(request, "view-query", (database, canned_query)) private = not await self.ds.permission_allowed( - None, "view-query", "query", (database, canned_query), default=True + None, "view-query", (database, canned_query), default=True ) else: - await self.check_permission(request, "execute-sql", "database", database) + await self.check_permission(request, "execute-sql", database) # Extract any :named parameters named_parameters = named_parameters or self.re_named_parameter.findall(sql) named_parameter_values = { diff --git a/datasette/views/index.py b/datasette/views/index.py index a3e8388c..b2706251 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -26,7 +26,7 @@ class IndexView(BaseView): databases = [] for name, db in self.ds.databases.items(): visible, database_private = await check_visibility( - self.ds, request.actor, "view-database", "database", name, + self.ds, request.actor, "view-database", name, ) if not visible: continue @@ -36,7 +36,7 @@ class IndexView(BaseView): views = [] for view_name in await db.view_names(): visible, private = await check_visibility( - self.ds, request.actor, "view-table", "table", (name, view_name), + self.ds, request.actor, "view-table", (name, view_name), ) if visible: views.append({"name": view_name, "private": private}) @@ -52,7 +52,7 @@ class IndexView(BaseView): tables = {} for table in table_names: visible, private = await check_visibility( - self.ds, request.actor, "view-table", "table", (name, table), + self.ds, request.actor, "view-table", (name, table), ) if not visible: continue diff --git a/datasette/views/table.py b/datasette/views/table.py index cd952568..4cec0cda 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -268,11 +268,11 @@ class TableView(RowTableShared): raise NotFound("Table not found: {}".format(table)) await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", "database", database) - await self.check_permission(request, "view-table", "table", (database, table)) + await self.check_permission(request, "view-database", database) + await self.check_permission(request, "view-table", (database, table)) private = not await self.ds.permission_allowed( - None, "view-table", "table", (database, table), default=True + None, "view-table", (database, table), default=True ) pks = await db.primary_keys(table) @@ -854,8 +854,8 @@ class RowView(RowTableShared): async def data(self, request, database, hash, table, pk_path, default_labels=False): pk_values = urlsafe_components(pk_path) await self.check_permission(request, "view-instance") - await self.check_permission(request, "view-database", "database", database) - await self.check_permission(request, "view-table", "table", (database, table)) + await self.check_permission(request, "view-database", database) + await self.check_permission(request, "view-table", (database, table)) db = self.ds.databases[database] pks = await db.primary_keys(table) use_rowid = not pks diff --git a/docs/authentication.rst b/docs/authentication.rst index bda6a0b7..67112969 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -52,7 +52,7 @@ The URL on the first line includes a one-use token which can be used to sign in Permissions =========== -Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. This method is also used by Datasette core code itself, which allows plugins to help make decisions on which actions are allowed by implementing the :ref:`permission_allowed(...) ` plugin hook. +Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. This method is also used by Datasette core code itself, which allows plugins to help make decisions on which actions are allowed by implementing the :ref:`plugin_permission_allowed` plugin hook. .. _authentication_permissions_canned_queries: @@ -159,7 +159,7 @@ This is designed to help administrators and plugin authors understand exactly ho Permissions =========== -This section lists all of the permission checks that are carried out by Datasette core, along with their ``resource_type`` and ``resource_identifier`` if those are passed. +This section lists all of the permission checks that are carried out by Datasette core, along with the ``resource_identifier`` if it was passed. .. _permissions_view_instance: @@ -176,9 +176,6 @@ view-database Actor is allowed to view a database page, e.g. https://latest.datasette.io/fixtures -``resource_type`` - string - "database" - ``resource_identifier`` - string The name of the database @@ -189,9 +186,6 @@ view-database-download Actor is allowed to download a database, e.g. https://latest.datasette.io/fixtures.db -``resource_type`` - string - "database" - ``resource_identifier`` - string The name of the database @@ -202,9 +196,6 @@ view-table Actor is allowed to view a table (or view) page, e.g. https://latest.datasette.io/fixtures/complex_foreign_keys -``resource_type`` - string - "table" - even if this is actually a SQL view - ``resource_identifier`` - tuple: (string, string) The name of the database, then the name of the table @@ -215,9 +206,6 @@ view-query Actor is allowed to view a :ref:`canned query ` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size -``resource_type`` - string - "query" - ``resource_identifier`` - string The name of the canned query @@ -228,9 +216,6 @@ execute-sql Actor is allowed to run arbitrary SQL queries against a specific database, e.g. https://latest.datasette.io/fixtures?sql=select+100 -``resource_type`` - string - "database" - ``resource_identifier`` - string The name of the database diff --git a/docs/internals.rst b/docs/internals.rst index 7498f017..1d61b6cb 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -121,8 +121,8 @@ Renders a `Jinja template `__ usin .. _datasette_permission_allowed: -await .permission_allowed(actor, action, resource_type=None, resource_identifier=None, default=False) ------------------------------------------------------------------------------------------------------ +await .permission_allowed(actor, action, resource_identifier=None, default=False) +--------------------------------------------------------------------------------- ``actor`` - dictionary The authenticated actor. This is usually ``request.actor``. @@ -130,9 +130,6 @@ await .permission_allowed(actor, action, resource_type=None, resource_identifier ``action`` - string The name of the action that is being permission checked. -``resource_type`` - string, optional - The type of resource being checked, e.g. ``"table"``. - ``resource_identifier`` - string, optional The resource identifier, e.g. the name of the table. diff --git a/docs/plugins.rst b/docs/plugins.rst index ecc7cbf1..118fab84 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -1005,8 +1005,8 @@ Instead of returning a dictionary, this function can return an awaitable functio .. _plugin_permission_allowed: -permission_allowed(datasette, actor, action, resource_type, resource_identifier) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +permission_allowed(datasette, actor, action, resource_identifier) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. @@ -1017,10 +1017,7 @@ permission_allowed(datasette, actor, action, resource_type, resource_identifier) ``action`` - string The action to be performed, e.g. ``"edit-table"``. -``resource_type`` - string - The type of resource being acted on, e.g. ``"table"``. - -``resource`` - string +``resource_identifier`` - string An identifier for the individual resource, e.g. the name of the table. Called to check that an actor has permission to perform an action on a resource. Can return ``True`` if the action is allowed, ``False`` if the action is not allowed or ``None`` if the plugin does not have an opinion one way or the other. diff --git a/tests/conftest.py b/tests/conftest.py index 1921ae3a..7f1e9387 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -70,8 +70,8 @@ def check_permission_actions_are_documented(): action = kwargs.get("action").replace("-", "_") assert ( action in documented_permission_actions - ), "Undocumented permission action: {}, resource_type: {}, resource_identifier: {}".format( - action, kwargs["resource_type"], kwargs["resource_identifier"] + ), "Undocumented permission action: {}, resource_identifier: {}".format( + action, kwargs["resource_identifier"] ) pm.add_hookcall_monitoring( diff --git a/tests/fixtures.py b/tests/fixtures.py index 2ac73fb1..8210d34f 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -857,24 +857,21 @@ if __name__ == "__main__": def assert_permissions_checked(datasette, actions): - # actions is a list of "action" or (action, resource_type, resource_identifier) tuples + # actions is a list of "action" or (action, resource_identifier) tuples for action in actions: if isinstance(action, str): - resource_type = None resource_identifier = None else: - action, resource_type, resource_identifier = action + action, resource_identifier = action assert [ pc for pc in datasette._permission_checks if pc["action"] == action - and pc["resource_type"] == resource_type and pc["resource_identifier"] == resource_identifier - ], """Missing expected permission check: action={}, resource_type={}, resource_identifier={} + ], """Missing expected permission check: action={}, resource_identifier={} Permission checks seen: {} """.format( action, - resource_type, resource_identifier, json.dumps(list(datasette._permission_checks), indent=4), ) From 799c5d53570d773203527f19530cf772dc2eeb24 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 11:59:11 -0700 Subject: [PATCH 0083/1871] Renamed resource_identifier to resource, refs #817 --- datasette/app.py | 11 +++-------- datasette/default_permissions.py | 8 ++++---- datasette/hookspecs.py | 2 +- datasette/templates/permissions_debug.html | 4 ++-- datasette/utils/__init__.py | 6 +++--- datasette/views/base.py | 7 ++----- datasette/views/database.py | 2 +- docs/authentication.rst | 12 ++++++------ docs/internals.rst | 10 ++++++---- docs/plugins.rst | 6 ++++-- tests/conftest.py | 4 ++-- tests/fixtures.py | 15 ++++++--------- 12 files changed, 40 insertions(+), 47 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index c12e0af0..2f89d17c 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -464,16 +464,11 @@ class Datasette: else: return [] - async def permission_allowed( - self, actor, action, resource_identifier=None, default=False - ): + async def permission_allowed(self, actor, action, resource=None, default=False): "Check permissions using the permissions_allowed plugin hook" result = None for check in pm.hook.permission_allowed( - datasette=self, - actor=actor, - action=action, - resource_identifier=resource_identifier, + datasette=self, actor=actor, action=action, resource=resource, ): if callable(check): check = check() @@ -490,7 +485,7 @@ class Datasette: "when": datetime.datetime.utcnow().isoformat(), "actor": actor, "action": action, - "resource_identifier": resource_identifier, + "resource": resource, "used_default": used_default, "result": result, } diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index d27704aa..e989c0fa 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -3,7 +3,7 @@ from datasette.utils import actor_matches_allow @hookimpl -def permission_allowed(datasette, actor, action, resource_identifier): +def permission_allowed(datasette, actor, action, resource): if action == "permissions-debug": if actor and actor.get("id") == "root": return True @@ -12,12 +12,12 @@ def permission_allowed(datasette, actor, action, resource_identifier): if allow is not None: return actor_matches_allow(actor, allow) elif action == "view-database": - database_allow = datasette.metadata("allow", database=resource_identifier) + database_allow = datasette.metadata("allow", database=resource) if database_allow is None: return True return actor_matches_allow(actor, database_allow) elif action == "view-table": - database, table = resource_identifier + database, table = resource tables = datasette.metadata("tables", database=database) or {} table_allow = (tables.get(table) or {}).get("allow") if table_allow is None: @@ -25,7 +25,7 @@ def permission_allowed(datasette, actor, action, resource_identifier): return actor_matches_allow(actor, table_allow) elif action == "view-query": # Check if this query has a "allow" block in metadata - database, query_name = resource_identifier + database, query_name = resource queries_metadata = datasette.metadata("queries", database=database) assert query_name in queries_metadata if isinstance(queries_metadata[query_name], str): diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 3c202553..d5fd232f 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -66,5 +66,5 @@ def actor_from_request(datasette, request): @hookspec -def permission_allowed(datasette, actor, action, resource_identifier): +def permission_allowed(datasette, actor, action, resource): "Check if actor is allowed to perfom this action - return True, False or None" diff --git a/datasette/templates/permissions_debug.html b/datasette/templates/permissions_debug.html index 7d3ee712..d898ea8c 100644 --- a/datasette/templates/permissions_debug.html +++ b/datasette/templates/permissions_debug.html @@ -46,8 +46,8 @@ {% endif %}

    Actor: {{ check.actor|tojson }}

    - {% if check.resource_identifier %} -

    Resource: {{ check.resource_identifier }}

    + {% if check.resource %} +

    Resource: {{ check.resource }}

    {% endif %} {% endfor %} diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 257d1285..7c1f34e0 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -876,14 +876,14 @@ def actor_matches_allow(actor, allow): return False -async def check_visibility(datasette, actor, action, resource_identifier, default=True): +async def check_visibility(datasette, actor, action, resource, default=True): "Returns (visible, private) - visible = can you see it, private = can others see it too" visible = await datasette.permission_allowed( - actor, action, resource_identifier=resource_identifier, default=default, + actor, action, resource=resource, default=default, ) if not visible: return (False, False) private = not await datasette.permission_allowed( - None, action, resource_identifier=resource_identifier, default=default, + None, action, resource=resource, default=default, ) return visible, private diff --git a/datasette/views/base.py b/datasette/views/base.py index 2ca5e86a..f327c6cd 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -64,12 +64,9 @@ class BaseView(AsgiView): response.body = b"" return response - async def check_permission(self, request, action, resource_identifier=None): + async def check_permission(self, request, action, resource=None): ok = await self.ds.permission_allowed( - request.actor, - action, - resource_identifier=resource_identifier, - default=True, + request.actor, action, resource=resource, default=True, ) if not ok: raise Forbidden(action) diff --git a/datasette/views/database.py b/datasette/views/database.py index d562ecb1..e1b29c27 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -88,7 +88,7 @@ class DatabaseView(DataView): "views": views, "queries": canned_queries, "private": not await self.ds.permission_allowed( - None, "view-database", "database", database + None, "view-database", database ), }, { diff --git a/docs/authentication.rst b/docs/authentication.rst index 67112969..f5209dfc 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -159,7 +159,7 @@ This is designed to help administrators and plugin authors understand exactly ho Permissions =========== -This section lists all of the permission checks that are carried out by Datasette core, along with the ``resource_identifier`` if it was passed. +This section lists all of the permission checks that are carried out by Datasette core, along with the ``resource`` if it was passed. .. _permissions_view_instance: @@ -176,7 +176,7 @@ view-database Actor is allowed to view a database page, e.g. https://latest.datasette.io/fixtures -``resource_identifier`` - string +``resource`` - string The name of the database .. _permissions_view_database_download: @@ -186,7 +186,7 @@ view-database-download Actor is allowed to download a database, e.g. https://latest.datasette.io/fixtures.db -``resource_identifier`` - string +``resource`` - string The name of the database .. _permissions_view_table: @@ -196,7 +196,7 @@ view-table Actor is allowed to view a table (or view) page, e.g. https://latest.datasette.io/fixtures/complex_foreign_keys -``resource_identifier`` - tuple: (string, string) +``resource`` - tuple: (string, string) The name of the database, then the name of the table .. _permissions_view_query: @@ -206,7 +206,7 @@ view-query Actor is allowed to view a :ref:`canned query ` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size -``resource_identifier`` - string +``resource`` - string The name of the canned query .. _permissions_execute_sql: @@ -216,7 +216,7 @@ execute-sql Actor is allowed to run arbitrary SQL queries against a specific database, e.g. https://latest.datasette.io/fixtures?sql=select+100 -``resource_identifier`` - string +``resource`` - string The name of the database .. _permissions_permissions_debug: diff --git a/docs/internals.rst b/docs/internals.rst index 1d61b6cb..83dbd897 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -121,8 +121,8 @@ Renders a `Jinja template `__ usin .. _datasette_permission_allowed: -await .permission_allowed(actor, action, resource_identifier=None, default=False) ---------------------------------------------------------------------------------- +await .permission_allowed(actor, action, resource=None, default=False) +---------------------------------------------------------------------- ``actor`` - dictionary The authenticated actor. This is usually ``request.actor``. @@ -130,13 +130,15 @@ await .permission_allowed(actor, action, resource_identifier=None, default=False ``action`` - string The name of the action that is being permission checked. -``resource_identifier`` - string, optional - The resource identifier, e.g. the name of the table. +``resource`` - string, optional + The resource, e.g. the name of the table. Only some permissions apply to a resource. Check if the given actor has permission to perform the given action on the given resource. This uses plugins that implement the :ref:`plugin_permission_allowed` plugin hook to decide if the action is allowed or not. If none of the plugins express an opinion, the return value will be the ``default`` argument. This is deny, but you can pass ``default=True`` to default allow instead. +See :ref:`permissions` for a full list of permissions included in Datasette core. + .. _datasette_get_database: .get_database(name) diff --git a/docs/plugins.rst b/docs/plugins.rst index 118fab84..56041d0c 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -1005,7 +1005,7 @@ Instead of returning a dictionary, this function can return an awaitable functio .. _plugin_permission_allowed: -permission_allowed(datasette, actor, action, resource_identifier) +permission_allowed(datasette, actor, action, resource) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``datasette`` - :ref:`internals_datasette` @@ -1017,7 +1017,9 @@ permission_allowed(datasette, actor, action, resource_identifier) ``action`` - string The action to be performed, e.g. ``"edit-table"``. -``resource_identifier`` - string +``resource`` - string or None An identifier for the individual resource, e.g. the name of the table. Called to check that an actor has permission to perform an action on a resource. Can return ``True`` if the action is allowed, ``False`` if the action is not allowed or ``None`` if the plugin does not have an opinion one way or the other. + +See :ref:`permissions` for a full list of permissions included in Datasette core. diff --git a/tests/conftest.py b/tests/conftest.py index 7f1e9387..320aa45b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -70,8 +70,8 @@ def check_permission_actions_are_documented(): action = kwargs.get("action").replace("-", "_") assert ( action in documented_permission_actions - ), "Undocumented permission action: {}, resource_identifier: {}".format( - action, kwargs["resource_identifier"] + ), "Undocumented permission action: {}, resource: {}".format( + action, kwargs["resource"] ) pm.add_hookcall_monitoring( diff --git a/tests/fixtures.py b/tests/fixtures.py index 8210d34f..e9175b57 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -857,21 +857,18 @@ if __name__ == "__main__": def assert_permissions_checked(datasette, actions): - # actions is a list of "action" or (action, resource_identifier) tuples + # actions is a list of "action" or (action, resource) tuples for action in actions: if isinstance(action, str): - resource_identifier = None + resource = None else: - action, resource_identifier = action + action, resource = action assert [ pc for pc in datasette._permission_checks - if pc["action"] == action - and pc["resource_identifier"] == resource_identifier - ], """Missing expected permission check: action={}, resource_identifier={} + if pc["action"] == action and pc["resource"] == resource + ], """Missing expected permission check: action={}, resource={} Permission checks seen: {} """.format( - action, - resource_identifier, - json.dumps(list(datasette._permission_checks), indent=4), + action, resource, json.dumps(list(datasette._permission_checks), indent=4), ) From 040fc0546f1ad602125ecdc27d9d013d830aa808 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 12:02:56 -0700 Subject: [PATCH 0084/1871] Updated tests, refs #817 --- tests/test_permissions.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 475f93dd..90ba1494 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -210,41 +210,41 @@ def test_query_list_respects_view_query(): "path,permissions", [ ("/", ["view-instance"]), - ("/fixtures", ["view-instance", ("view-database", "database", "fixtures")]), + ("/fixtures", ["view-instance", ("view-database", "fixtures")]), ( "/fixtures/facetable/1", - ["view-instance", ("view-table", "table", ("fixtures", "facetable"))], + ["view-instance", ("view-table", ("fixtures", "facetable"))], ), ( "/fixtures/simple_primary_key", [ "view-instance", - ("view-database", "database", "fixtures"), - ("view-table", "table", ("fixtures", "simple_primary_key")), + ("view-database", "fixtures"), + ("view-table", ("fixtures", "simple_primary_key")), ], ), ( "/fixtures?sql=select+1", [ "view-instance", - ("view-database", "database", "fixtures"), - ("execute-sql", "database", "fixtures"), + ("view-database", "fixtures"), + ("execute-sql", "fixtures"), ], ), ( "/fixtures.db", [ "view-instance", - ("view-database", "database", "fixtures"), - ("view-database-download", "database", "fixtures"), + ("view-database", "fixtures"), + ("view-database-download", "fixtures"), ], ), ( "/fixtures/neighborhood_search", [ "view-instance", - ("view-database", "database", "fixtures"), - ("view-query", "query", ("fixtures", "neighborhood_search")), + ("view-database", "fixtures"), + ("view-query", ("fixtures", "neighborhood_search")), ], ), ], From c7d145e016522dd6ee229d4d0b3ba79a7a8877c1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 12:06:05 -0700 Subject: [PATCH 0085/1871] Updated example for extra_template_vars hook, closes #816 --- docs/plugins.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index 56041d0c..6b1e60f2 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -689,14 +689,14 @@ Function that returns an awaitable function that returns a dictionary Datasette runs Jinja2 in `async mode `__, which means you can add awaitable functions to the template scope and they will be automatically awaited when they are rendered by the template. -Here's an example plugin that returns an authentication object from the ASGI scope: +Here's an example plugin that adds a ``"user_agent"`` variable to the template context containing the current request's User-Agent header: .. code-block:: python @hookimpl def extra_template_vars(request): return { - "auth": request.scope.get("auth") + "user_agent": request.headers.get("user-agent") } This example returns an awaitable function which adds a list of ``hidden_table_names`` to the context: From 54370853828bdf87ca844fd0fc00900e0e2e659d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 12:32:27 -0700 Subject: [PATCH 0086/1871] Documentation for allow blocks on more stuff, closes #811 --- docs/authentication.rst | 121 ++++++++++++++++++++++++++++++++-------- docs/sql_queries.rst | 2 +- 2 files changed, 100 insertions(+), 23 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index f5209dfc..a6c4ee79 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -15,7 +15,7 @@ Actors Through plugins, Datasette can support both authenticated users (with cookies) and authenticated API agents (via authentication tokens). The word "actor" is used to cover both of these cases. -Every request to Datasette has an associated actor value. This can be ``None`` for unauthenticated requests, or a JSON compatible Python dictionary for authenticated users or API agents. +Every request to Datasette has an associated actor value, available in the code as ``request.actor``. This can be ``None`` for unauthenticated requests, or a JSON compatible Python dictionary for authenticated users or API agents. The only required field in an actor is ``"id"``, which must be a string. Plugins may decide to add any other fields to the actor dictionary. @@ -24,7 +24,7 @@ Plugins can use the :ref:`plugin_actor_from_request` hook to implement custom lo .. _authentication_root: Using the "root" actor -====================== +---------------------- Datasette currently leaves almost all forms of authentication to plugins - `datasette-auth-github `__ for example. @@ -49,37 +49,40 @@ The URL on the first line includes a one-use token which can be used to sign in .. _authentication_permissions: -Permissions -=========== +Checking permission +=================== Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. This method is also used by Datasette core code itself, which allows plugins to help make decisions on which actions are allowed by implementing the :ref:`plugin_permission_allowed` plugin hook. -.. _authentication_permissions_canned_queries: +.. _authentication_permissions_metadata: -Permissions for canned queries -============================== +Configuring permissions in metadata.json +======================================== -Datasette's :ref:`canned queries ` default to allowing any user to execute them. +You can limit who is allowed to view different parts of your Datasette instance using ``"allow"`` keys in your :ref:`metadata` configuration. -You can limit who is allowed to execute a specific query with the ``"allow"`` key in the :ref:`metadata` configuration for that query. +You can control the following: -Here's how to restrict access to a write query to just the "root" user: +* Access to the entire Datasette instance +* Access to specific databases +* Access to specific tables and views +* Access to specific :ref:`canned_queries` + +If a user cannot access a specific database, they will not be able to access tables, views or queries within that database. If a user cannot access the instance they will not be able to access any of the databases, tables, views or queries. + +.. _authentication_permissions_instance: + +Controlling access to an instance +--------------------------------- + +Here's how to restrict access to your entire Datasette instance to just the ``"id": "root"`` user: .. code-block:: json { - "databases": { - "mydatabase": { - "queries": { - "add_name": { - "sql": "INSERT INTO names (name) VALUES (:name)", - "write": true, - "allow": { - "id": ["root"] - } - } - } - } + "title": "My private Datasette instance", + "allow": { + "id": "root" } } @@ -126,6 +129,80 @@ If you want to provide access to any actor with a value for a specific key, use These keys act as an "or" mechanism. A actor will be able to execute the query if any of their JSON properties match any of the values in the corresponding lists in the ``allow`` block. +.. _authentication_permissions_database: + +Controlling access to specific databases +---------------------------------------- + +To limit access to a specific ``private.db`` database to just authenticated users, use the ``"allow"`` block like this: + +.. code-block:: json + + { + "databases": { + "private": { + "allow": { + "id": "*" + } + } + } + } + +.. _authentication_permissions_table: + +Controlling access to specific tables and views +----------------------------------------------- + +To limit access to the ``users`` table in your ``bakery.db`` database: + +.. code-block:: json + + { + "databases": { + "bakery": { + "tables": { + "users": { + "allow": { + "id": "*" + } + } + } + } + } + } + +This works for SQL views as well - you can treat them as if they are tables. + +.. warning:: + Restricting access to tables and views in this way will NOT prevent users from querying them using arbitrary SQL queries. + + If you are restricting access to specific tables you should also use the ``"allow_sql"`` block to prevent users from accessing + +.. _authentication_permissions_table: + +Controlling access to specific canned queries +--------------------------------------------- + +To limit access to the ``add_name`` canned query in your ``dogs.db`` database to just the :ref:`root user`: + +.. code-block:: json + + { + "databases": { + "dogs": { + "queries": { + "add_name": { + "sql": "INSERT INTO names (name) VALUES (:name)", + "write": true, + "allow": { + "id": ["root"] + } + } + } + } + } + } + .. _authentication_actor_matches_allow: actor_matches_allow() diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index 5df8bdb0..5295a2e0 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -217,7 +217,7 @@ Writable canned queries Canned queries by default are read-only. You can use the ``"write": true`` key to indicate that a canned query can write to the database. -See :ref:`authentication_permissions_canned_queries` for details on how to add permission checks to canned queries, using the ``"allow"`` key. +See :ref:`authentication_permissions_metadata` for details on how to add permission checks to canned queries, using the ``"allow"`` key. .. code-block:: json From 8205d58316ced1d5ae589b29a5a1b5ecb6257ab0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 13:10:40 -0700 Subject: [PATCH 0087/1871] Corrected documentation for resource in view-query --- docs/authentication.rst | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index a6c4ee79..88808428 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -245,7 +245,6 @@ view-instance Top level permission - Actor is allowed to view any pages within this instance, starting at https://latest.datasette.io/ - .. _permissions_view_database: view-database @@ -283,8 +282,8 @@ view-query Actor is allowed to view a :ref:`canned query ` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size -``resource`` - string - The name of the canned query +``resource`` - tuple: (string, string) + The name of the database, then the name of the canned query .. _permissions_execute_sql: From e0a4664fbab5556454dac7f3c798253a34db2928 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 15:09:57 -0700 Subject: [PATCH 0088/1871] Better example plugin for permission_allowed Also fixed it so default permission checks run after plugin permission checks, refs #818 --- datasette/default_permissions.py | 2 +- docs/authentication.rst | 4 ++-- docs/plugins.rst | 40 ++++++++++++++++++++++++++++++-- 3 files changed, 41 insertions(+), 5 deletions(-) diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index e989c0fa..a2f4a315 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -2,7 +2,7 @@ from datasette import hookimpl from datasette.utils import actor_matches_allow -@hookimpl +@hookimpl(tryfirst=True) def permission_allowed(datasette, actor, action, resource): if action == "permissions-debug": if actor and actor.get("id") == "root": diff --git a/docs/authentication.rst b/docs/authentication.rst index 88808428..34d46511 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -174,11 +174,11 @@ To limit access to the ``users`` table in your ``bakery.db`` database: This works for SQL views as well - you can treat them as if they are tables. .. warning:: - Restricting access to tables and views in this way will NOT prevent users from querying them using arbitrary SQL queries. + Restricting access to tables and views in this way will NOT prevent users from querying them using arbitrary SQL queries, `like this `__ for example. If you are restricting access to specific tables you should also use the ``"allow_sql"`` block to prevent users from accessing -.. _authentication_permissions_table: +.. _authentication_permissions_query: Controlling access to specific canned queries --------------------------------------------- diff --git a/docs/plugins.rst b/docs/plugins.rst index 6b1e60f2..73d2eabd 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -1006,7 +1006,7 @@ Instead of returning a dictionary, this function can return an awaitable functio .. _plugin_permission_allowed: permission_allowed(datasette, actor, action, resource) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``datasette`` - :ref:`internals_datasette` You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. @@ -1022,4 +1022,40 @@ permission_allowed(datasette, actor, action, resource) Called to check that an actor has permission to perform an action on a resource. Can return ``True`` if the action is allowed, ``False`` if the action is not allowed or ``None`` if the plugin does not have an opinion one way or the other. -See :ref:`permissions` for a full list of permissions included in Datasette core. +Here's an example plugin which randomly selects if a permission should be allowed or denied, except for ``view-instance`` which always uses the default permission scheme instead. + +.. code-block:: python + + from datasette import hookimpl + import random + + @hookimpl + def permission_allowed(action): + if action != "view-instance": + # Return True or False at random + return random.random() > 0.5 + # Returning None falls back to default permissions + +This function can alternatively return an awaitable function which itself returns ``True``, ``False`` or ``None``. You can use this option if you need to execute additional database queries using ``await datasette.execute(...)``. + +Here's an example that allows users to view the ``admin_log`` table only if their actor ``id`` is present in the ``admin_users`` table. It aso disallows arbitrary SQL queries for the ``staff.db`` database for all users. + +.. code-block:: python + + @hookimpl + def permission_allowed(datasette, actor, action, resource): + async def inner(): + if action == "execute-sql" and resource == "staff": + return False + if action == "view-table" and resource == ("staff", "admin_log"): + if not actor: + return False + user_id = actor["id"] + return await datasette.get_database("staff").execute( + "select count(*) from admin_users where user_id = :user_id", + {"user_id": user_id}, + ) + + return inner + +See :ref:`permissions` for a full list of permissions that are included in Datasette core. From 49d6d2f7b0f6cb02e25022e1c9403811f1fa0a7c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 17:05:44 -0700 Subject: [PATCH 0089/1871] allow_sql block to control execute-sql upermission in metadata.json, closes #813 Also removed the --config allow_sql:0 mechanism in favour of the new allow_sql block. --- datasette/app.py | 1 - datasette/default_permissions.py | 8 ++++++++ datasette/templates/database.html | 2 +- datasette/templates/query.html | 2 +- datasette/templates/table.html | 2 +- datasette/views/database.py | 8 ++++++-- datasette/views/table.py | 9 +++++++-- docs/authentication.rst | 33 ++++++++++++++++++++++++++++++- docs/config.rst | 9 --------- docs/json_api.rst | 2 +- docs/pages.rst | 2 +- docs/sql_queries.rst | 4 ++-- tests/test_api.py | 12 ++--------- tests/test_config_dir.py | 3 --- tests/test_html.py | 10 +--------- tests/test_permissions.py | 29 +++++++++++++++++++++++++++ 16 files changed, 92 insertions(+), 44 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 2f89d17c..a7c3c66a 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -110,7 +110,6 @@ CONFIG_OPTIONS = ( "Allow users to download the original SQLite database files", ), ConfigOption("suggest_facets", True, "Calculate and display suggested facets"), - ConfigOption("allow_sql", True, "Allow arbitrary SQL queries via ?sql= parameter"), ConfigOption( "default_cache_ttl", 5, diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index a2f4a315..e750acbf 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -34,3 +34,11 @@ def permission_allowed(datasette, actor, action, resource): if allow is None: return True return actor_matches_allow(actor, allow) + elif action == "execute-sql": + # Use allow_sql block from database block, or from top-level + database_allow_sql = datasette.metadata("allow_sql", database=resource) + if database_allow_sql is None: + database_allow_sql = datasette.metadata("allow_sql") + if database_allow_sql is None: + return True + return actor_matches_allow(actor, database_allow_sql) diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 100faee4..5ae51ef7 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -22,7 +22,7 @@ {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} -{% if config.allow_sql %} +{% if allow_execute_sql %}

    Custom SQL query

    diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 7771b101..c65953fb 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -35,7 +35,7 @@

    Custom SQL query{% if display_rows %} returning {% if truncated %}more than {% endif %}{{ "{:,}".format(display_rows|length) }} row{% if display_rows|length == 1 %}{% else %}s{% endif %}{% endif %} {% if hide_sql %}(show){% else %}(hide){% endif %}

    {% if not hide_sql %} - {% if editable and config.allow_sql %} + {% if editable and allow_execute_sql %}

    {% else %}
    {% if query %}{{ query.sql }}{% endif %}
    diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 1289e125..373fd576 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -109,7 +109,7 @@ {% endif %} -{% if query.sql and config.allow_sql %} +{% if query.sql and allow_execute_sql %}

    View and edit SQL

    {% endif %} diff --git a/datasette/views/database.py b/datasette/views/database.py index e1b29c27..ee99bc2d 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -26,8 +26,6 @@ class DatabaseView(DataView): self.ds.update_with_inherited_metadata(metadata) if request.args.get("sql"): - if not self.ds.config("allow_sql"): - raise DatasetteError("sql= is not allowed", status=400) sql = request.args.get("sql") validate_sql_select(sql) return await QueryView(self.ds).data( @@ -90,6 +88,9 @@ class DatabaseView(DataView): "private": not await self.ds.permission_allowed( None, "view-database", database ), + "allow_execute_sql": await self.ds.permission_allowed( + request.actor, "execute-sql", database, default=True + ), }, { "show_hidden": request.args.get("_show_hidden"), @@ -289,6 +290,9 @@ class QueryView(DataView): "columns": columns, "query": {"sql": sql, "params": params}, "private": private, + "allow_execute_sql": await self.ds.permission_allowed( + request.actor, "execute-sql", database, default=True + ), }, extra_template, templates, diff --git a/datasette/views/table.py b/datasette/views/table.py index 4cec0cda..91245293 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -342,8 +342,10 @@ class TableView(RowTableShared): extra_wheres_for_ui = [] # Add _where= from querystring if "_where" in request.args: - if not self.ds.config("allow_sql"): - raise DatasetteError("_where= is not allowed", status=400) + if not await self.ds.permission_allowed( + request.actor, "execute-sql", resource=database, default=True, + ): + raise DatasetteError("_where= is not allowed", status=403) else: where_clauses.extend(request.args.getlist("_where")) extra_wheres_for_ui = [ @@ -839,6 +841,9 @@ class TableView(RowTableShared): "next": next_value and str(next_value) or None, "next_url": next_url, "private": private, + "allow_execute_sql": await self.ds.permission_allowed( + request.actor, "execute-sql", database, default=True + ), }, extra_template, ( diff --git a/docs/authentication.rst b/docs/authentication.rst index 34d46511..f7281db4 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -176,7 +176,7 @@ This works for SQL views as well - you can treat them as if they are tables. .. warning:: Restricting access to tables and views in this way will NOT prevent users from querying them using arbitrary SQL queries, `like this `__ for example. - If you are restricting access to specific tables you should also use the ``"allow_sql"`` block to prevent users from accessing + If you are restricting access to specific tables you should also use the ``"allow_sql"`` block to prevent users from bypassing the limit with their own SQL queries - see :ref:`authentication_permissions_execute_sql`. .. _authentication_permissions_query: @@ -203,6 +203,37 @@ To limit access to the ``add_name`` canned query in your ``dogs.db`` database to } } +.. _authentication_permissions_execute_sql: + +Controlling the ability to execute arbitrary SQL +------------------------------------------------ + +The ``"allow_sql"`` block can be used to control who is allowed to execute arbitrary SQL queries, both using the form on the database page e.g. https://latest.datasette.io/fixtures or by appending a ``?_where=`` parameter to the table page as seen on https://latest.datasette.io/fixtures/facetable?_where=city_id=1. + +To enable just the :ref:`root user` to execute SQL for all databases in your instance, use the following: + +.. code-block:: json + + { + "allow_sql": { + "id": "root" + } + } + +To limit this ability for just one specific database, use this: + +.. code-block:: json + + { + "databases": { + "mydatabase": { + "allow_sql": { + "id": "root" + } + } + } + } + .. _authentication_actor_matches_allow: actor_matches_allow() diff --git a/docs/config.rst b/docs/config.rst index da93e40a..56b38613 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -150,15 +150,6 @@ Should users be able to download the original SQLite database using a link on th datasette mydatabase.db --config allow_download:off -.. _config_allow_sql: - -allow_sql -~~~~~~~~~ - -Enable/disable the ability for users to run custom SQL directly against a database. To disable this feature, run:: - - datasette mydatabase.db --config allow_sql:off - .. _config_default_cache_ttl: default_cache_ttl diff --git a/docs/json_api.rst b/docs/json_api.rst index 7d37d425..af98eecd 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -291,7 +291,7 @@ Special table arguments though this could potentially result in errors if the wrong syntax is used. ``?_where=SQL-fragment`` - If the :ref:`config_allow_sql` config option is enabled, this parameter + If the :ref:`permissions_execute_sql` permission is enabled, this parameter can be used to pass one or more additional SQL fragments to be used in the `WHERE` clause of the SQL used to query the table. diff --git a/docs/pages.rst b/docs/pages.rst index f220f94d..ce8f5d06 100644 --- a/docs/pages.rst +++ b/docs/pages.rst @@ -29,7 +29,7 @@ Database ======== Each database has a page listing the tables, views and canned queries -available for that database. If the :ref:`config_allow_sql` config option is enabled (it's turned on by default) there will also be an interface for executing arbitrary SQL select queries against the data. +available for that database. If the :ref:`permissions_execute_sql` permission is enabled (it's on by default) there will also be an interface for executing arbitrary SQL select queries against the data. Examples: diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index 5295a2e0..db72deb7 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -12,8 +12,8 @@ you like. You can also construct queries using the filter interface on the tables page, then click "View and edit SQL" to open that query in the custom SQL editor. -Note that this interface is only available if the :ref:`config_allow_sql` option -has not been disabled. +Note that this interface is only available if the :ref:`permissions_execute_sql` +permission is allowed. Any Datasette SQL query is reflected in the URL of the page, allowing you to bookmark them, share them with others and navigate through previous queries diff --git a/tests/test_api.py b/tests/test_api.py index 13a98b6a..1a54edec 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -634,13 +634,6 @@ def test_invalid_custom_sql(app_client): assert "Statement must be a SELECT" == response.json["error"] -def test_allow_sql_off(): - with make_app_client(config={"allow_sql": False}) as client: - response = client.get("/fixtures.json?sql=select+sleep(0.01)") - assert 400 == response.status - assert "sql= is not allowed" == response.json["error"] - - def test_table_json(app_client): response = app_client.get("/fixtures/simple_primary_key.json?_shape=objects") assert response.status == 200 @@ -1137,9 +1130,9 @@ def test_table_filter_extra_where_invalid(app_client): def test_table_filter_extra_where_disabled_if_no_sql_allowed(): - with make_app_client(config={"allow_sql": False}) as client: + with make_app_client(metadata={"allow_sql": {}}) as client: response = client.get("/fixtures/facetable.json?_where=neighborhood='Dogpatch'") - assert 400 == response.status + assert 403 == response.status assert "_where= is not allowed" == response.json["error"] @@ -1325,7 +1318,6 @@ def test_config_json(app_client): "allow_download": True, "allow_facet": True, "suggest_facets": True, - "allow_sql": True, "default_cache_ttl": 5, "default_cache_ttl_hashed": 365 * 24 * 60 * 60, "num_sql_threads": 3, diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index 490b1f1d..b1f6994f 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -10,7 +10,6 @@ from datasette import hookimpl @hookimpl def extra_template_vars(): - print("this is template vars") return { "from_plugin": "hooray" } @@ -18,7 +17,6 @@ def extra_template_vars(): METADATA = {"title": "This is from metadata"} CONFIG = { "default_cache_ttl": 60, - "allow_sql": False, } CSS = """ body { margin-top: 3em} @@ -91,7 +89,6 @@ def test_config(config_dir_client): response = config_dir_client.get("/-/config.json") assert 200 == response.status assert 60 == response.json["default_cache_ttl"] - assert not response.json["allow_sql"] def test_plugins(config_dir_client): diff --git a/tests/test_html.py b/tests/test_html.py index cb0e0c90..e6933dfe 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -924,16 +924,8 @@ def test_allow_download_off(): assert 403 == response.status -def test_allow_sql_on(app_client): - response = app_client.get("/fixtures") - soup = Soup(response.body, "html.parser") - assert len(soup.findAll("textarea", {"name": "sql"})) - response = app_client.get("/fixtures/sortable") - assert b"View and edit SQL" in response.body - - def test_allow_sql_off(): - with make_app_client(config={"allow_sql": False}) as client: + with make_app_client(metadata={"allow_sql": {}}) as client: response = client.get("/fixtures") soup = Soup(response.body, "html.parser") assert not len(soup.findAll("textarea", {"name": "sql"})) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 90ba1494..d8c98825 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -186,6 +186,35 @@ def test_view_query(allow, expected_anon, expected_auth): assert ">fixtures 🔒" in auth_response.text +@pytest.mark.parametrize( + "metadata", + [ + {"allow_sql": {"id": "root"}}, + {"databases": {"fixtures": {"allow_sql": {"id": "root"}}}}, + ], +) +def test_execute_sql(metadata): + with make_app_client(metadata=metadata) as client: + form_fragment = ' Date: Mon, 8 Jun 2020 17:35:23 -0700 Subject: [PATCH 0090/1871] Fixed broken CSS on 404 page, closes #777 --- datasette/app.py | 11 ++++++++++- tests/test_html.py | 12 ++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index a7c3c66a..d562e611 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1015,7 +1015,16 @@ class DatasetteRouter(AsgiRouter): templates = ["500.html"] if status != 500: templates = ["{}.html".format(status)] + templates - info.update({"ok": False, "error": message, "status": status, "title": title}) + info.update( + { + "ok": False, + "error": message, + "status": status, + "title": title, + "base_url": self.ds.config("base_url"), + "app_css_hash": self.ds.app_css_hash(), + } + ) headers = {} if self.ds.cors: headers["Access-Control-Allow-Origin"] = "*" diff --git a/tests/test_html.py b/tests/test_html.py index e6933dfe..f9b18daa 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -965,6 +965,18 @@ def inner_html(soup): return inner_html.strip() +@pytest.mark.parametrize("path", ["/404", "/fixtures/404"]) +def test_404(app_client, path): + response = app_client.get(path) + assert 404 == response.status + assert ( + ' Date: Mon, 8 Jun 2020 19:22:40 -0700 Subject: [PATCH 0091/1871] Fixed test_table_not_exists_json test --- datasette/app.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index d562e611..79f52a54 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1016,14 +1016,7 @@ class DatasetteRouter(AsgiRouter): if status != 500: templates = ["{}.html".format(status)] + templates info.update( - { - "ok": False, - "error": message, - "status": status, - "title": title, - "base_url": self.ds.config("base_url"), - "app_css_hash": self.ds.app_css_hash(), - } + {"ok": False, "error": message, "status": status, "title": title,} ) headers = {} if self.ds.cors: @@ -1033,7 +1026,16 @@ class DatasetteRouter(AsgiRouter): else: template = self.ds.jinja_env.select_template(templates) await asgi_send_html( - send, await template.render_async(info), status=status, headers=headers + send, + await template.render_async( + dict( + info, + base_url=self.ds.config("base_url"), + app_css_hash=self.ds.app_css_hash(), + ) + ), + status=status, + headers=headers, ) From f5e79adf26d0daa3831e3fba022f1b749a9efdee Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 20:12:06 -0700 Subject: [PATCH 0092/1871] register_routes() plugin hook (#819) Fixes #215 --- datasette/app.py | 21 ++++++++++++++++ datasette/hookspecs.py | 5 ++++ datasette/utils/__init__.py | 12 ++++++++- datasette/utils/asgi.py | 2 +- docs/index.rst | 2 +- docs/plugins.rst | 50 ++++++++++++++++++++++++++++++++++++- tests/fixtures.py | 1 + tests/plugins/my_plugin.py | 25 +++++++++++++++++++ tests/test_plugins.py | 15 +++++++++++ 9 files changed, 129 insertions(+), 4 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 79f52a54..120091f7 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -39,6 +39,7 @@ from .renderer import json_renderer from .database import Database, QueryInterrupted from .utils import ( + async_call_with_supported_arguments, escape_css_string, escape_sqlite, format_bytes, @@ -783,6 +784,10 @@ class Datasette: "Returns an ASGI app function that serves the whole of Datasette" routes = [] + for routes_to_add in pm.hook.register_routes(): + for regex, view_fn in routes_to_add: + routes.append((regex, wrap_view(view_fn, self))) + def add_route(view, regex): routes.append((regex, view)) @@ -1048,3 +1053,19 @@ def _cleaner_task_str(task): # running at /Users/simonw/Dropbox/Development/datasette/venv-3.7.5/lib/python3.7/site-packages/uvicorn/main.py:361> # Clean up everything up to and including site-packages return _cleaner_task_str_re.sub("", s) + + +def wrap_view(view_fn, datasette): + async def asgi_view_fn(scope, receive, send): + response = await async_call_with_supported_arguments( + view_fn, + scope=scope, + receive=receive, + send=send, + request=Request(scope, receive), + datasette=datasette, + ) + if response is not None: + await response.asgi_send(send) + + return asgi_view_fn diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index d5fd232f..ab3e131c 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -60,6 +60,11 @@ def register_facet_classes(): "Register Facet subclasses" +@hookspec +def register_routes(): + "Register URL routes: return a list of (regex, view_function) pairs" + + @hookspec def actor_from_request(datasette, request): "Return an actor dictionary based on the incoming request" diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 7c1f34e0..49268638 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -842,7 +842,7 @@ def parse_metadata(content): raise BadMetadataError("Metadata is not valid JSON or YAML") -def call_with_supported_arguments(fn, **kwargs): +def _gather_arguments(fn, kwargs): parameters = inspect.signature(fn).parameters.keys() call_with = [] for parameter in parameters: @@ -853,9 +853,19 @@ def call_with_supported_arguments(fn, **kwargs): ) ) call_with.append(kwargs[parameter]) + return call_with + + +def call_with_supported_arguments(fn, **kwargs): + call_with = _gather_arguments(fn, kwargs) return fn(*call_with) +async def async_call_with_supported_arguments(fn, **kwargs): + call_with = _gather_arguments(fn, kwargs) + return await fn(*call_with) + + def actor_matches_allow(actor, allow): actor = actor or {} if allow is None: diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index bca9c9ab..349f2a0a 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -399,7 +399,7 @@ class Response: @classmethod def text(cls, body, status=200, headers=None): return cls( - body, + str(body), status=status, headers=headers, content_type="text/plain; charset=utf-8", diff --git a/docs/index.rst b/docs/index.rst index 03988c8e..5334386f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -31,7 +31,7 @@ Contents -------- .. toctree:: - :maxdepth: 2 + :maxdepth: 3 getting_started installation diff --git a/docs/plugins.rst b/docs/plugins.rst index 73d2eabd..caca0019 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -835,6 +835,55 @@ And here is an example ``can_render`` function which returns ``True`` only if th Examples: `datasette-atom `_, `datasette-ics `_ +.. _plugin_register_routes: + +register_routes() +~~~~~~~~~~~~~~~~~ + +Register additional view functions to execute for specified URL routes. + +Return a list of ``(regex, async_view_function)`` pairs, something like this: + +.. code-block:: python + + from datasette.utils.asgi import Response + import html + + + async def hello_from(scope): + name = scope["url_route"]["kwargs"]["name"] + return Response.html("Hello from {}".format( + html.escape(name) + )) + + + @hookimpl + def register_routes(): + return [ + (r"^/hello-from/(?P.*)$"), hello_from) + ] + +The view functions can take a number of different optional arguments. The corresponding argument will be passed to your function depending on its named parameters - a form of dependency injection. + +The optional view function arguments are as follows: + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + +``request`` - Request object + The current HTTP :ref:`internals_request`. + +``scope`` - dictionary + The incoming ASGI scope dictionary. + +``send`` - function + The ASGI send function. + +``receive`` - function + The ASGI receive function. + +The function can either return a ``Response`` or it can return nothing and instead respond directly to the request using the ASGI ``receive`` function (for advanced uses only). + .. _plugin_register_facet_classes: register_facet_classes() @@ -901,7 +950,6 @@ The plugin hook can then be used to register the new facet class like this: def register_facet_classes(): return [SpecialFacet] - .. _plugin_asgi_wrapper: asgi_wrapper(datasette) diff --git a/tests/fixtures.py b/tests/fixtures.py index e9175b57..a51a869d 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -46,6 +46,7 @@ EXPECTED_PLUGINS = [ "prepare_connection", "prepare_jinja2_environment", "register_facet_classes", + "register_routes", "render_cell", ], }, diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 46893710..57803178 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -1,6 +1,7 @@ from datasette import hookimpl from datasette.facets import Facet from datasette.utils import path_with_added_args +from datasette.utils.asgi import asgi_send_json, Response import base64 import pint import json @@ -142,3 +143,27 @@ def permission_allowed(actor, action): return True elif action == "this_is_denied": return False + + +@hookimpl +def register_routes(): + async def one(datasette): + return Response.text( + (await datasette.get_database().execute("select 1 + 1")).first()[0] + ) + + async def two(request, scope): + name = scope["url_route"]["kwargs"]["name"] + greeting = request.args.get("greeting") + return Response.text("{} {}".format(greeting, name)) + + async def three(scope, send): + await asgi_send_json( + send, {"hello": "world"}, status=200, headers={"x-three": "1"} + ) + + return [ + (r"/one/$", one), + (r"/two/(?P.*)$", two), + (r"/three/$", three), + ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index c782b87b..c7bb4859 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -544,3 +544,18 @@ def test_actor_json(app_client): assert {"actor": {"id": "bot2", "1+1": 2}} == app_client.get( "/-/actor.json/?_bot2=1" ).json + + +@pytest.mark.parametrize( + "path,body", [("/one/", "2"), ("/two/Ray?greeting=Hail", "Hail Ray"),] +) +def test_register_routes(app_client, path, body): + response = app_client.get(path) + assert 200 == response.status + assert body == response.text + + +def test_register_routes_asgi(app_client): + response = app_client.get("/three/") + assert {"hello": "world"} == response.json + assert "1" == response.headers["x-three"] From db660db4632409334e646237c3dd214764729cd4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 20:32:10 -0700 Subject: [PATCH 0093/1871] Docs + unit tests for Response, closes #821 --- datasette/utils/asgi.py | 9 ++++++ docs/internals.rst | 48 ++++++++++++++++++++++++++++++++ docs/plugins.rst | 2 +- tests/test_internals_response.py | 28 +++++++++++++++++++ 4 files changed, 86 insertions(+), 1 deletion(-) create mode 100644 tests/test_internals_response.py diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 349f2a0a..9e6c82dd 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -405,6 +405,15 @@ class Response: content_type="text/plain; charset=utf-8", ) + @classmethod + def json(cls, body, status=200, headers=None): + return cls( + json.dumps(body), + status=status, + headers=headers, + content_type="application/json; charset=utf-8", + ) + @classmethod def redirect(cls, path, status=302, headers=None): headers = headers or {} diff --git a/docs/internals.rst b/docs/internals.rst index 83dbd897..b0096cfa 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -80,6 +80,54 @@ Consider the querystring ``?foo=1&foo=2&bar=3`` - with two values for ``foo`` an ``len(request.args)`` - integer Returns the number of keys. +.. _internals_response: + +Response class +~~~~~~~~~~~~~~ + +The ``Response`` class can be returned from view functions that have been registered using the :ref:`plugin_register_routes` hook. + +The ``Response()`` constructor takes the following arguments: + +``body`` - string + The body of the response. + +``status`` - integer (optional) + The HTTP status - defaults to 200. + +``headers`` - dictionary (optional) + A dictionary of extra HTTP headers, e.g. ``{"x-hello": "world"}``. + +``content_type`` - string (optional) + The content-type for the response. Defaults to ``text/plain``. + +For example: + +.. code-block:: python + + from datasette.utils.asgi import Response + + response = Response( + "This is XML", + content_type="application/xml; charset=utf-8" + ) + +The easiest way to create responses is using the ``Response.text(...)``, ``Response.html(...)``, ``Response.json(...)`` or ``Response.redirect(...)`` helper methods: + +.. code-block:: python + + from datasette.utils.asgi import Response + + html_response = Response.html("This is HTML") + json_response = Response.json({"this_is": "json"}) + text_response = Response.text("This will become utf-8 encoded text") + # Redirects are served as 302, unless you pass status=301: + redirect_response = Response.redirect("https://latest.datasette.io/") + +Each of these responses will use the correct corresponding content-type - ``text/html; charset=utf-8``, ``application/json; charset=utf-8`` or ``text/plain; charset=utf-8`` respectively. + +Each of the helper methods take optional ``status=`` and ``headers=`` arguments, documented above. + .. _internals_datasette: Datasette class diff --git a/docs/plugins.rst b/docs/plugins.rst index caca0019..465fcd52 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -882,7 +882,7 @@ The optional view function arguments are as follows: ``receive`` - function The ASGI receive function. -The function can either return a ``Response`` or it can return nothing and instead respond directly to the request using the ASGI ``receive`` function (for advanced uses only). +The function can either return a :ref:`internals_response` or it can return nothing and instead respond directly to the request using the ASGI ``send`` function (for advanced uses only). .. _plugin_register_facet_classes: diff --git a/tests/test_internals_response.py b/tests/test_internals_response.py new file mode 100644 index 00000000..7c11f858 --- /dev/null +++ b/tests/test_internals_response.py @@ -0,0 +1,28 @@ +from datasette.utils.asgi import Response + + +def test_response_html(): + response = Response.html("Hello from HTML") + assert 200 == response.status + assert "Hello from HTML" == response.body + assert "text/html; charset=utf-8" == response.content_type + + +def test_response_text(): + response = Response.text("Hello from text") + assert 200 == response.status + assert "Hello from text" == response.body + assert "text/plain; charset=utf-8" == response.content_type + + +def test_response_json(): + response = Response.json({"this_is": "json"}) + assert 200 == response.status + assert '{"this_is": "json"}' == response.body + assert "application/json; charset=utf-8" == response.content_type + + +def test_response_redirect(): + response = Response.redirect("/foo") + assert 302 == response.status + assert "/foo" == response.headers["Location"] From fac8e9381500fc02cec99281122ee8e0c72fabe1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 20:40:00 -0700 Subject: [PATCH 0094/1871] request.url_vars property, closes #822 --- datasette/utils/asgi.py | 4 ++++ docs/internals.rst | 3 +++ docs/plugins.rst | 4 ++-- tests/plugins/my_plugin.py | 4 ++-- tests/test_internals_request.py | 17 +++++++++++++++++ 5 files changed, 28 insertions(+), 4 deletions(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 9e6c82dd..cdd6b148 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -32,6 +32,10 @@ class Request: (self.scheme, self.host, self.path, None, self.query_string, None) ) + @property + def url_vars(self): + return (self.scope.get("url_route") or {}).get("kwargs") or {} + @property def scheme(self): return self.scope.get("scheme") or "http" diff --git a/docs/internals.rst b/docs/internals.rst index b0096cfa..df21eb09 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -42,6 +42,9 @@ The request object is passed to various plugin hooks. It represents an incoming ``.args`` - MultiParams An object representing the parsed querystring parameters, see below. +``.url_vars`` - dictionary (str -> str) + Variables extracted from the URL path, if that path was defined using a regular expression. See :ref:`plugin_register_routes`. + ``.actor`` - dictionary (str -> Any) or None The currently authenticated actor (see :ref:`actors `), or ``None`` if the request is unauthenticated. diff --git a/docs/plugins.rst b/docs/plugins.rst index 465fcd52..17fd64df 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -850,8 +850,8 @@ Return a list of ``(regex, async_view_function)`` pairs, something like this: import html - async def hello_from(scope): - name = scope["url_route"]["kwargs"]["name"] + async def hello_from(request): + name = request.url_vars["name"] return Response.html("Hello from {}".format( html.escape(name) )) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 57803178..a0f7441b 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -152,8 +152,8 @@ def register_routes(): (await datasette.get_database().execute("select 1 + 1")).first()[0] ) - async def two(request, scope): - name = scope["url_route"]["kwargs"]["name"] + async def two(request): + name = request.url_vars["name"] greeting = request.args.get("greeting") return Response.text("{} {}".format(greeting, name)) diff --git a/tests/test_internals_request.py b/tests/test_internals_request.py index 433b23d5..8367a693 100644 --- a/tests/test_internals_request.py +++ b/tests/test_internals_request.py @@ -44,3 +44,20 @@ def test_request_args(): assert 2 == len(request.args) with pytest.raises(KeyError): request.args["missing"] + + +def test_request_url_vars(): + scope = { + "http_version": "1.1", + "method": "POST", + "path": "/", + "raw_path": b"/", + "query_string": b"", + "scheme": "http", + "type": "http", + "headers": [[b"content-type", b"application/x-www-form-urlencoded"]], + } + assert {} == Request(scope, None).url_vars + assert {"name": "cleo"} == Request( + dict(scope, url_route={"kwargs": {"name": "cleo"}}), None + ).url_vars From 5a6a73e3190cac103906b479d56129413e5ef190 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 21:37:35 -0700 Subject: [PATCH 0095/1871] Replace os.urandom(32).hex() with secrets.token_hex(32) --- datasette/app.py | 5 +++-- docs/config.rst | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 120091f7..633ca4fe 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -8,6 +8,7 @@ import itertools import json import os import re +import secrets import sys import threading import traceback @@ -186,7 +187,7 @@ class Datasette: assert config_dir is None or isinstance( config_dir, Path ), "config_dir= should be a pathlib.Path" - self._secret = secret or os.urandom(32).hex() + self._secret = secret or secrets.token_hex(32) self.files = tuple(files) + tuple(immutables or []) if config_dir: self.files += tuple([str(p) for p in config_dir.glob("*.db")]) @@ -299,7 +300,7 @@ class Datasette: self._register_renderers() self._permission_checks = collections.deque(maxlen=200) - self._root_token = os.urandom(32).hex() + self._root_token = secrets.token_hex(32) def sign(self, value, namespace="default"): return URLSafeSerializer(self._secret, namespace).dumps(value) diff --git a/docs/config.rst b/docs/config.rst index 56b38613..ab14ea7b 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -302,7 +302,7 @@ Or:: One way to generate a secure random secret is to use Python like this:: - $ python3 -c 'import os; print(os.urandom(32).hex())' + $ python3 -c 'import secrets; print(secrets.token_hex(32))' cdb19e94283a20f9d42cca50c5a4871c0aa07392db308755d60a1a5b9bb0fa52 Plugin authors make use of this signing mechanism in their plugins using :ref:`datasette_sign` and :ref:`datasette_unsign`. From eb3ec279becd3b81e5fa509244711548c86f434f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 8 Jun 2020 23:33:06 -0700 Subject: [PATCH 0096/1871] Test for anonymous: true, refs #825 --- tests/test_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_utils.py b/tests/test_utils.py index 975ed0fd..4bade18b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -466,6 +466,7 @@ def test_multi_params(data, should_raise): [ ({"id": "root"}, None, True), ({"id": "root"}, {}, False), + ({"anonymous": True}, {"anonymous": True}, True), (None, None, True), (None, {}, False), (None, {"id": "root"}, False), From fec750435d405ac06cb61a5ddeda7317ef24843a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 07:01:23 -0700 Subject: [PATCH 0097/1871] Support anonymous: true in actor_matches_allow, refs #825 --- datasette/utils/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 49268638..d8cde95a 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -873,12 +873,12 @@ def actor_matches_allow(actor, allow): for key, values in allow.items(): if values == "*" and key in actor: return True - if isinstance(values, str): + if not isinstance(values, list): values = [values] actor_values = actor.get(key) if actor_values is None: return False - if isinstance(actor_values, str): + if not isinstance(actor_values, list): actor_values = [actor_values] actor_values = set(actor_values) if actor_values.intersection(values): From eefeafaa27a16af3bcb3150b4fe1ef6ee8d5c19f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 07:09:39 -0700 Subject: [PATCH 0098/1871] Removed unused import --- datasette/views/database.py | 1 - 1 file changed, 1 deletion(-) diff --git a/datasette/views/database.py b/datasette/views/database.py index ee99bc2d..4fab2cfb 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -2,7 +2,6 @@ import os import jinja2 from datasette.utils import ( - actor_matches_allow, check_visibility, to_css_class, validate_sql_select, From fa87d16612ff671683f35ecc5f5e36af007599e4 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 07:10:46 -0700 Subject: [PATCH 0099/1871] Clearer docs for actor_matches_allow --- datasette/utils/__init__.py | 3 ++- docs/authentication.rst | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index d8cde95a..5873fcaa 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -867,7 +867,8 @@ async def async_call_with_supported_arguments(fn, **kwargs): def actor_matches_allow(actor, allow): - actor = actor or {} + if actor is None: + actor = {"anonymous": True} if allow is None: return True for key, values in allow.items(): diff --git a/docs/authentication.rst b/docs/authentication.rst index f7281db4..04564886 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -239,7 +239,7 @@ To limit this ability for just one specific database, use this: actor_matches_allow() ===================== -Plugins that wish to implement the same permissions scheme as canned queries can take advantage of the ``datasette.utils.actor_matches_allow(actor, allow)`` function: +Plugins that wish to implement this same ``"allow"`` block permissions scheme can take advantage of the ``datasette.utils.actor_matches_allow(actor, allow)`` function: .. code-block:: python From 3aa87eeaf21083e32d9e02bd857fd44707dc4113 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 07:58:12 -0700 Subject: [PATCH 0100/1871] Documentation no loger suggests that actor["id"] is required, closes #823 --- docs/authentication.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 04564886..153466ad 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -17,7 +17,7 @@ Through plugins, Datasette can support both authenticated users (with cookies) a Every request to Datasette has an associated actor value, available in the code as ``request.actor``. This can be ``None`` for unauthenticated requests, or a JSON compatible Python dictionary for authenticated users or API agents. -The only required field in an actor is ``"id"``, which must be a string. Plugins may decide to add any other fields to the actor dictionary. +The actor dictionary can be any shape - the design of that data structure is left up to the plugins. A useful convention is to include an ``"id"`` string, as demonstrated by the "root" actor below. Plugins can use the :ref:`plugin_actor_from_request` hook to implement custom logic for authenticating an actor based on the incoming HTTP request. From 70dd14876e305ddb15263ec0687e23bef5b1ab78 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 09:04:46 -0700 Subject: [PATCH 0101/1871] Improved documentation for permissions, refs #699 --- docs/authentication.rst | 37 +++++++++++++++++++++++++++++-------- docs/sql_queries.rst | 6 ++++++ 2 files changed, 35 insertions(+), 8 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 153466ad..e26c8fc5 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -4,7 +4,7 @@ Authentication and permissions ================================ -Datasette does not require authentication by default. Any visitor to a Datasette instance can explore the full data and execute SQL queries. +Datasette does not require authentication by default. Any visitor to a Datasette instance can explore the full data and execute read-only SQL queries. Datasette's plugin system can be used to add many different styles of authentication, such as user accounts, single sign-on or API keys. @@ -49,10 +49,20 @@ The URL on the first line includes a one-use token which can be used to sign in .. _authentication_permissions: -Checking permission -=================== +Permissions +=========== -Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. This method is also used by Datasette core code itself, which allows plugins to help make decisions on which actions are allowed by implementing the :ref:`plugin_permission_allowed` plugin hook. +Datasette has an extensive permissions system built-in, which can be further extended and customized by plugins. + +The key question the permissions system answers is this: + + Is this **actor** allowed to perform this **action**, optionally against this particular **resource**? + +**Actors** are :ref:`described above `. + +An **action** is a string describing the action the actor would like to perfom. A full list is :ref:`provided below ` - examples include ``view-table`` and ``execute-sql``. + +A **resource** is the item the actor wishes to interact with - for example a specific database or table. Some actions, such as ``permissions-debug``, are not associated with a particular resource. .. _authentication_permissions_metadata: @@ -115,7 +125,7 @@ You can provide access to any user that has "developer" as one of their roles li } } -Note that "roles" is not a concept that is baked into Datasette - it's more of a convention that plugins can choose to implement and act on. +Note that "roles" is not a concept that is baked into Datasette - it's a convention that plugins can choose to implement and act on. If you want to provide access to any actor with a value for a specific key, use ``"*"``. For example, to spceify that a query can be accessed by any logged-in user use this: @@ -171,7 +181,7 @@ To limit access to the ``users`` table in your ``bakery.db`` database: } } -This works for SQL views as well - you can treat them as if they are tables. +This works for SQL views as well - you can list their names in the ``"tables"`` block above in the same way as regular tables. .. warning:: Restricting access to tables and views in this way will NOT prevent users from querying them using arbitrary SQL queries, `like this `__ for example. @@ -183,6 +193,8 @@ This works for SQL views as well - you can treat them as if they are tables. Controlling access to specific canned queries --------------------------------------------- +:ref:`canned_queries` allow you to configure named SQL queries in your ``metadata.json`` that can be executed by users. These queries can be set up to both read and write to the database, so controlling who can execute them can be important. + To limit access to the ``add_name`` canned query in your ``dogs.db`` database to just the :ref:`root user`: .. code-block:: json @@ -234,6 +246,15 @@ To limit this ability for just one specific database, use this: } } +.. _permissions_plugins: + +Checking permissions in plugins +=============================== + +Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. + +Datasette core performs a number of permission checks, :ref:`documented below `. Plugins can implement the :ref:`plugin_permission_allowed` plugin hook to participate in decisions about whether an actor should be able to perform a specified action. + .. _authentication_actor_matches_allow: actor_matches_allow() @@ -264,8 +285,8 @@ This is designed to help administrators and plugin authors understand exactly ho .. _permissions: -Permissions -=========== +Built-in permissions +==================== This section lists all of the permission checks that are carried out by Datasette core, along with the ``resource`` if it was passed. diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index db72deb7..a73f6bc2 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -1,3 +1,5 @@ +.. _sql: + Running SQL queries =================== @@ -22,6 +24,8 @@ using your browser back button. You can also retrieve the results of any query as JSON by adding ``.json`` to the base URL. +.. _sql_parameters: + Named parameters ---------------- @@ -51,6 +55,8 @@ statements can be used to change database settings at runtime. If you need to include the string "pragma" in a query you can do so safely using a named parameter. +.. _sql_views: + Views ----- From 7633b9ab249b2dce5ee0b4fcf9542c13a1703ef0 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 10:01:03 -0700 Subject: [PATCH 0102/1871] unauthenticated: true method plus allow block docs, closes #825 --- datasette/utils/__init__.py | 5 +- docs/authentication.rst | 142 +++++++++++++++++++++++++----------- docs/internals.rst | 11 ++- tests/test_auth.py | 24 ------ tests/test_permissions.py | 37 ++++++++++ tests/test_utils.py | 10 ++- 6 files changed, 154 insertions(+), 75 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 5873fcaa..51373c46 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -867,10 +867,11 @@ async def async_call_with_supported_arguments(fn, **kwargs): def actor_matches_allow(actor, allow): - if actor is None: - actor = {"anonymous": True} + if actor is None and allow and allow.get("unauthenticated") is True: + return True if allow is None: return True + actor = actor or {} for key, values in allow.items(): if values == "*" and key in actor: return True diff --git a/docs/authentication.rst b/docs/authentication.rst index e26c8fc5..a9537a20 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -64,6 +64,91 @@ An **action** is a string describing the action the actor would like to perfom. A **resource** is the item the actor wishes to interact with - for example a specific database or table. Some actions, such as ``permissions-debug``, are not associated with a particular resource. +Datasette's built-in view permissions (``view-database``, ``view-table`` etc) default to *allow* - unless you :ref:`configure additional permission rules ` unauthenticated users will be allowed to access content. + +Permissions with potentially harmful effects should default to *deny*. Plugin authors should account for this when designing new plugins - for example, the `datasette-upload-csvs `__ plugin defaults to deny so that installations don't accidentally allow unauthenticated users to create new tables by uploading a CSV file. + +.. _authentication_permissions_allow: + +Defining permissions with "allow" blocks +---------------------------------------- + +The standard way to define permissions in Datasette is to use an ``"allow"`` block. This is a JSON document describing which actors are allowed to perfom a permission. + +The most basic form of allow block is this: + +.. code-block:: json + + { + "allow": { + "id": "root" + } + } + +This will match any actors with an ``"id"`` property of ``"root"`` - for example, an actor that looks like this: + +.. code-block:: json + + { + "id": "root", + "name": "Root User" + } + +Allow keys can provide a list of values. These will match any actor that has any of those values. + +.. code-block:: json + + { + "allow": { + "id": ["simon", "cleopaws"] + } + } + +This will match any actor with an ``"id"`` of either ``"simon"`` or ``"cleopaws"``. + +Actors can have properties that feature a list of values. These will be matched against the list of values in an allow block. Consider the following actor: + +.. code-block:: json + + { + "id": "simon", + "roles": ["staff", "developer"] + } + +This allow block will provide access to any actor that has ``"developer"`` as one of their roles: + +.. code-block:: json + + { + "allow": { + "roles": ["developer"] + } + } + +Note that "roles" is not a concept that is baked into Datasette - it's a convention that plugins can choose to implement and act on. + +If you want to provide access to any actor with a value for a specific key, use ``"*"``. For example, to match any logged-in user specify the following: + +.. code-block:: json + + { + "allow": { + "id": "*" + } + } + +You can specify that unauthenticated actors (from anynomous HTTP requests) should be allowed access using the special ``"unauthenticated": true`` key in an allow block: + +.. code-block:: json + + { + "allow": { + "unauthenticated": true + } + } + +Allow keys act as an "or" mechanism. An actor will be able to execute the query if any of their JSON properties match any of the values in the corresponding lists in the ``allow`` block. + .. _authentication_permissions_metadata: Configuring permissions in metadata.json @@ -96,49 +181,6 @@ Here's how to restrict access to your entire Datasette instance to just the ``"i } } -To allow any of the actors with an ``id`` matching a specific list of values, use this: - -.. code-block:: json - - { - "allow": { - "id": ["simon", "cleopaws"] - } - } - -This works for other keys as well. Imagine an actor that looks like this: - -.. code-block:: json - - { - "id": "simon", - "roles": ["staff", "developer"] - } - -You can provide access to any user that has "developer" as one of their roles like so: - -.. code-block:: json - - { - "allow": { - "roles": ["developer"] - } - } - -Note that "roles" is not a concept that is baked into Datasette - it's a convention that plugins can choose to implement and act on. - -If you want to provide access to any actor with a value for a specific key, use ``"*"``. For example, to spceify that a query can be accessed by any logged-in user use this: - -.. code-block:: json - - { - "allow": { - "id": "*" - } - } - -These keys act as an "or" mechanism. A actor will be able to execute the query if any of their JSON properties match any of the values in the corresponding lists in the ``allow`` block. - .. _authentication_permissions_database: Controlling access to specific databases @@ -297,6 +339,8 @@ view-instance Top level permission - Actor is allowed to view any pages within this instance, starting at https://latest.datasette.io/ +Default *allow*. + .. _permissions_view_database: view-database @@ -307,6 +351,8 @@ Actor is allowed to view a database page, e.g. https://latest.datasette.io/fixtu ``resource`` - string The name of the database +Default *allow*. + .. _permissions_view_database_download: view-database-download @@ -317,6 +363,8 @@ Actor is allowed to download a database, e.g. https://latest.datasette.io/fixtur ``resource`` - string The name of the database +Default *allow*. + .. _permissions_view_table: view-table @@ -327,6 +375,8 @@ Actor is allowed to view a table (or view) page, e.g. https://latest.datasette.i ``resource`` - tuple: (string, string) The name of the database, then the name of the table +Default *allow*. + .. _permissions_view_query: view-query @@ -337,6 +387,8 @@ Actor is allowed to view a :ref:`canned query ` page, e.g. https ``resource`` - tuple: (string, string) The name of the database, then the name of the canned query +Default *allow*. + .. _permissions_execute_sql: execute-sql @@ -347,9 +399,13 @@ Actor is allowed to run arbitrary SQL queries against a specific database, e.g. ``resource`` - string The name of the database +Default *allow*. + .. _permissions_permissions_debug: permissions-debug ----------------- Actor is allowed to view the ``/-/permissions`` debug page. + +Default *deny*. \ No newline at end of file diff --git a/docs/internals.rst b/docs/internals.rst index df21eb09..8136d8ac 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -184,11 +184,16 @@ await .permission_allowed(actor, action, resource=None, default=False) ``resource`` - string, optional The resource, e.g. the name of the table. Only some permissions apply to a resource. -Check if the given actor has permission to perform the given action on the given resource. This uses plugins that implement the :ref:`plugin_permission_allowed` plugin hook to decide if the action is allowed or not. +``default`` - optional, True or False + Should this permission check be default allow or default deny. -If none of the plugins express an opinion, the return value will be the ``default`` argument. This is deny, but you can pass ``default=True`` to default allow instead. +Check if the given actor has :ref:`permission ` to perform the given action on the given resource. -See :ref:`permissions` for a full list of permissions included in Datasette core. +Some permission checks are carried out against :ref:`rules defined in metadata.json `, while other custom permissions may be decided by plugins that implement the :ref:`plugin_permission_allowed` plugin hook. + +If neither ``metadata.json`` nor any of the plugins provide an answer to the permission query the ``default`` argument will be returned. + +See :ref:`permissions` for a full list of permission actions included in Datasette core. .. _datasette_get_database: diff --git a/tests/test_auth.py b/tests/test_auth.py index 40dc2587..0e5563a3 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,5 +1,4 @@ from .fixtures import app_client -from bs4 import BeautifulSoup as Soup def test_auth_token(app_client): @@ -20,26 +19,3 @@ def test_actor_cookie(app_client): cookie = app_client.ds.sign({"id": "test"}, "actor") response = app_client.get("/", cookies={"ds_actor": cookie}) assert {"id": "test"} == app_client.ds._last_request.scope["actor"] - - -def test_permissions_debug(app_client): - app_client.ds._permission_checks.clear() - assert 403 == app_client.get("/-/permissions").status - # With the cookie it should work - cookie = app_client.ds.sign({"id": "root"}, "actor") - response = app_client.get("/-/permissions", cookies={"ds_actor": cookie}) - # Should show one failure and one success - soup = Soup(response.body, "html.parser") - check_divs = soup.findAll("div", {"class": "check"}) - checks = [ - { - "action": div.select_one(".check-action").text, - "result": bool(div.select(".check-result-true")), - "used_default": bool(div.select(".check-used-default")), - } - for div in check_divs - ] - assert [ - {"action": "permissions-debug", "result": True, "used_default": False}, - {"action": "permissions-debug", "result": False, "used_default": True}, - ] == checks diff --git a/tests/test_permissions.py b/tests/test_permissions.py index d8c98825..c088facd 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -1,4 +1,5 @@ from .fixtures import app_client, assert_permissions_checked, make_app_client +from bs4 import BeautifulSoup as Soup import pytest @@ -283,3 +284,39 @@ def test_permissions_checked(app_client, path, permissions): response = app_client.get(path) assert response.status in (200, 403) assert_permissions_checked(app_client.ds, permissions) + + +def test_permissions_debug(app_client): + app_client.ds._permission_checks.clear() + assert 403 == app_client.get("/-/permissions").status + # With the cookie it should work + cookie = app_client.ds.sign({"id": "root"}, "actor") + response = app_client.get("/-/permissions", cookies={"ds_actor": cookie}) + # Should show one failure and one success + soup = Soup(response.body, "html.parser") + check_divs = soup.findAll("div", {"class": "check"}) + checks = [ + { + "action": div.select_one(".check-action").text, + "result": bool(div.select(".check-result-true")), + "used_default": bool(div.select(".check-used-default")), + } + for div in check_divs + ] + assert [ + {"action": "permissions-debug", "result": True, "used_default": False}, + {"action": "permissions-debug", "result": False, "used_default": True}, + ] == checks + + +@pytest.mark.parametrize("allow,expected", [ + ({"id": "root"}, 403), + ({"id": "root", "unauthenticated": True}, 200), +]) +def test_allow_unauthenticated(allow, expected): + with make_app_client( + metadata={ + "allow": allow + } + ) as client: + assert expected == client.get("/").status diff --git a/tests/test_utils.py b/tests/test_utils.py index 4bade18b..0ffe8ae6 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -464,12 +464,16 @@ def test_multi_params(data, should_raise): @pytest.mark.parametrize( "actor,allow,expected", [ - ({"id": "root"}, None, True), - ({"id": "root"}, {}, False), - ({"anonymous": True}, {"anonymous": True}, True), (None, None, True), (None, {}, False), (None, {"id": "root"}, False), + ({"id": "root"}, None, True), + ({"id": "root"}, {}, False), + ({"id": "simon", "staff": True}, {"staff": True}, True), + ({"id": "simon", "staff": False}, {"staff": True}, False), + # Special case for "unauthenticated": true + (None, {"unauthenticated": True}, True), + (None, {"unauthenticated": False}, False), # Special "*" value for any key: ({"id": "root"}, {"id": "*"}, True), ({}, {"id": "*"}, False), From 5ef3b7b0c9b9e318af711bbd03e84af2abffdc29 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 12:25:44 -0700 Subject: [PATCH 0103/1871] Applied Black Refs #825 --- tests/test_permissions.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/tests/test_permissions.py b/tests/test_permissions.py index c088facd..477b8160 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -309,14 +309,10 @@ def test_permissions_debug(app_client): ] == checks -@pytest.mark.parametrize("allow,expected", [ - ({"id": "root"}, 403), - ({"id": "root", "unauthenticated": True}, 200), -]) +@pytest.mark.parametrize( + "allow,expected", + [({"id": "root"}, 403), ({"id": "root", "unauthenticated": True}, 200),], +) def test_allow_unauthenticated(allow, expected): - with make_app_client( - metadata={ - "allow": allow - } - ) as client: + with make_app_client(metadata={"allow": allow}) as client: assert expected == client.get("/").status From 56eb80a45925d804b443701e2c86315f194b5f7d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 12:32:52 -0700 Subject: [PATCH 0104/1871] Documented CSRF protection, closes #827 --- docs/internals.rst | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/docs/internals.rst b/docs/internals.rst index 8136d8ac..d92c985f 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -51,7 +51,7 @@ The request object is passed to various plugin hooks. It represents an incoming The object also has one awaitable method: ``await request.post_vars()`` - dictionary - Returns a dictionary of form variables that were submitted in the request body via ``POST``. + Returns a dictionary of form variables that were submitted in the request body via ``POST``. Don't forget to read about :ref:`internals_csrf`! .. _internals_multiparams: @@ -500,3 +500,17 @@ The ``Database`` class also provides properties and methods for introspecting th } ] } + + +.. _internals_csrf: + +CSRF protection +~~~~~~~~~~~~~~~ + +Datasette uses `asgi-csrf `__ to guard against CSRF attacks on form POST submissions. Users receive a ``ds_csrftoken`` cookie which is compared against the ``csrftoken`` form field (or ``x-csrftoken`` HTTP header) for every incoming request. + +If your plugin implements a ```` anywhere you will need to include that token. You can do so with the following template snippet: + +.. code-block:: html + + From f240970b834d595947c8d27d46d1f19b9119376d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 12:57:54 -0700 Subject: [PATCH 0105/1871] Fixed tests/fixtures.py, closes #804 --- docs/contributing.rst | 13 +++++- tests/fixtures.py | 97 ++++++++++++++++++++++++------------------- 2 files changed, 65 insertions(+), 45 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index da4dc35a..9c44d177 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -70,11 +70,20 @@ You can also use the ``fixtures.py`` script to recreate the testing version of ` python tests/fixtures.py fixtures.db fixtures-metadata.json -(You may need to delete ``fixtures.db`` before running this command.) +Or to output the plugins used by the tests, run this:: + + python tests/fixtures.py fixtures.db fixtures-metadata.json fixtures-plugins + Test tables written to fixtures.db + - metadata written to fixtures-metadata.json + Wrote plugin: fixtures-plugins/register_output_renderer.py + Wrote plugin: fixtures-plugins/view_name.py + Wrote plugin: fixtures-plugins/my_plugin.py + Wrote plugin: fixtures-plugins/messages_output_renderer.py + Wrote plugin: fixtures-plugins/my_plugin_2.py Then run Datasette like this:: - datasette fixtures.db -m fixtures-metadata.json + datasette fixtures.db -m fixtures-metadata.json --plugins-dir=fixtures-plugins/ .. _contributing_documentation: diff --git a/tests/fixtures.py b/tests/fixtures.py index a51a869d..1eb1bb6e 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -2,6 +2,7 @@ from datasette.app import Datasette from datasette.utils import sqlite3, MultiParams from asgiref.testing import ApplicationCommunicator from asgiref.sync import async_to_sync +import click import contextlib from http.cookies import SimpleCookie import itertools @@ -813,49 +814,6 @@ INSERT INTO "searchable_fts" (rowid, text1, text2) SELECT rowid, text1, text2 FROM searchable; """ -if __name__ == "__main__": - # Can be called with data.db OR data.db metadata.json - arg_index = -1 - db_filename = sys.argv[arg_index] - metadata_filename = None - plugins_path = None - if db_filename.endswith("/"): - # It's the plugins dir - plugins_path = db_filename - arg_index -= 1 - db_filename = sys.argv[arg_index] - if db_filename.endswith(".json"): - metadata_filename = db_filename - arg_index -= 1 - db_filename = sys.argv[arg_index] - if db_filename.endswith(".db"): - conn = sqlite3.connect(db_filename) - conn.executescript(TABLES) - for sql, params in TABLE_PARAMETERIZED_SQL: - with conn: - conn.execute(sql, params) - print("Test tables written to {}".format(db_filename)) - if metadata_filename: - open(metadata_filename, "w").write(json.dumps(METADATA)) - print("- metadata written to {}".format(metadata_filename)) - if plugins_path: - path = pathlib.Path(plugins_path) - if not path.exists(): - path.mkdir() - for filename, content in ( - ("my_plugin.py", PLUGIN1), - ("my_plugin_2.py", PLUGIN2), - ): - filepath = path / filename - filepath.write_text(content) - print(" Wrote plugin: {}".format(filepath)) - else: - print( - "Usage: {} db_to_write.db [metadata_to_write.json] [plugins-dir/]".format( - sys.argv[0] - ) - ) - def assert_permissions_checked(datasette, actions): # actions is a list of "action" or (action, resource) tuples @@ -873,3 +831,56 @@ def assert_permissions_checked(datasette, actions): """.format( action, resource, json.dumps(list(datasette._permission_checks), indent=4), ) + + +@click.command() +@click.argument( + "db_filename", + default="fixtures.db", + type=click.Path(file_okay=True, dir_okay=False), +) +@click.argument("metadata", required=False) +@click.argument( + "plugins_path", type=click.Path(file_okay=False, dir_okay=True), required=False +) +@click.option( + "--recreate", + is_flag=True, + default=False, + help="Delete and recreate database if it exists", +) +def cli(db_filename, metadata, plugins_path, recreate): + "Write out the fixtures database used by Datasette's test suite" + if metadata and not metadata.endswith(".json"): + raise click.ClickException("Metadata should end with .json") + if not db_filename.endswith(".db"): + raise click.ClickException("Database file should end with .db") + if pathlib.Path(db_filename).exists(): + if not recreate: + raise click.ClickException( + "{} already exists, use --recreate to reset it".format(db_filename) + ) + else: + pathlib.Path(db_filename).unlink() + conn = sqlite3.connect(db_filename) + conn.executescript(TABLES) + for sql, params in TABLE_PARAMETERIZED_SQL: + with conn: + conn.execute(sql, params) + print("Test tables written to {}".format(db_filename)) + if metadata: + open(metadata, "w").write(json.dumps(METADATA, indent=4)) + print("- metadata written to {}".format(metadata)) + if plugins_path: + path = pathlib.Path(plugins_path) + if not path.exists(): + path.mkdir() + test_plugins = pathlib.Path(__file__).parent / "plugins" + for filepath in test_plugins.glob("*.py"): + newpath = path / filepath.name + newpath.write_text(filepath.open().read()) + print(" Wrote plugin: {}".format(newpath)) + + +if __name__ == "__main__": + cli() From 008e2f63c217aa066027a872ee706b07bd084857 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 15:19:37 -0700 Subject: [PATCH 0106/1871] response.set_cookie(), closes #795 --- datasette/actor_auth_cookie.py | 1 - datasette/app.py | 15 ++------- datasette/utils/asgi.py | 53 +++++++++++++++++++++++++++++--- datasette/views/special.py | 14 ++------- docs/internals.rst | 30 ++++++++++++++++++ tests/test_internals_response.py | 26 ++++++++++++++++ 6 files changed, 108 insertions(+), 31 deletions(-) diff --git a/datasette/actor_auth_cookie.py b/datasette/actor_auth_cookie.py index f3a0f306..a2aa6889 100644 --- a/datasette/actor_auth_cookie.py +++ b/datasette/actor_auth_cookie.py @@ -1,6 +1,5 @@ from datasette import hookimpl from itsdangerous import BadSignature -from http.cookies import SimpleCookie @hookimpl diff --git a/datasette/app.py b/datasette/app.py index 633ca4fe..71fa9afb 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -3,7 +3,6 @@ import asgi_csrf import collections import datetime import hashlib -from http.cookies import SimpleCookie import itertools import json import os @@ -442,19 +441,9 @@ class Datasette: def _write_messages_to_response(self, request, response): if getattr(request, "_messages", None): # Set those messages - cookie = SimpleCookie() - cookie["ds_messages"] = self.sign(request._messages, "messages") - cookie["ds_messages"]["path"] = "/" - # TODO: Co-exist with existing set-cookie headers - assert "set-cookie" not in response.headers - response.headers["set-cookie"] = cookie.output(header="").lstrip() + response.set_cookie("ds_messages", self.sign(request._messages, "messages")) elif getattr(request, "_messages_should_clear", False): - cookie = SimpleCookie() - cookie["ds_messages"] = "" - cookie["ds_messages"]["path"] = "/" - # TODO: Co-exist with existing set-cookie headers - assert "set-cookie" not in response.headers - response.headers["set-cookie"] = cookie.output(header="").lstrip() + response.set_cookie("ds_messages", "", expires=0, max_age=0) def _show_messages(self, request): if getattr(request, "_messages", None): diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index cdd6b148..5a152570 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -4,10 +4,15 @@ from mimetypes import guess_type from urllib.parse import parse_qs, urlunparse, parse_qsl from pathlib import Path from html import escape -from http.cookies import SimpleCookie +from http.cookies import SimpleCookie, Morsel import re import aiofiles +# Workaround for adding samesite support to pre 3.8 python +Morsel._reserved["samesite"] = "SameSite" +# Thanks, Starlette: +# https://github.com/encode/starlette/blob/519f575/starlette/responses.py#L17 + class NotFound(Exception): pass @@ -17,6 +22,9 @@ class Forbidden(Exception): pass +SAMESITE_VALUES = ("strict", "lax", "none") + + class Request: def __init__(self, scope, receive): self.scope = scope @@ -370,20 +378,24 @@ class Response: self.body = body self.status = status self.headers = headers or {} + self._set_cookie_headers = [] self.content_type = content_type async def asgi_send(self, send): headers = {} headers.update(self.headers) headers["content-type"] = self.content_type + raw_headers = [ + [key.encode("utf-8"), value.encode("utf-8")] + for key, value in headers.items() + ] + for set_cookie in self._set_cookie_headers: + raw_headers.append([b"set-cookie", set_cookie.encode("utf-8")]) await send( { "type": "http.response.start", "status": self.status, - "headers": [ - [key.encode("utf-8"), value.encode("utf-8")] - for key, value in headers.items() - ], + "headers": raw_headers, } ) body = self.body @@ -391,6 +403,37 @@ class Response: body = body.encode("utf-8") await send({"type": "http.response.body", "body": body}) + def set_cookie( + self, + key, + value="", + max_age=None, + expires=None, + path="/", + domain=None, + secure=False, + httponly=False, + samesite="lax", + ): + assert samesite in SAMESITE_VALUES, "samesite should be one of {}".format( + SAMESITE_VALUES + ) + cookie = SimpleCookie() + cookie[key] = value + for prop_name, prop_value in ( + ("max_age", max_age), + ("expires", expires), + ("path", path), + ("domain", domain), + ("samesite", samesite), + ): + if prop_value is not None: + cookie[key][prop_name.replace("_", "-")] = prop_value + for prop_name, prop_value in (("secure", secure), ("httponly", httponly)): + if prop_value: + cookie[key][prop_name] = True + self._set_cookie_headers.append(cookie.output(header="").strip()) + @classmethod def html(cls, body, status=200, headers=None): return cls( diff --git a/datasette/views/special.py b/datasette/views/special.py index 7a5fbe21..7f4284a1 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -1,7 +1,6 @@ import json from datasette.utils.asgi import Response from .base import BaseView -from http.cookies import SimpleCookie import secrets @@ -62,17 +61,8 @@ class AuthTokenView(BaseView): return Response("Root token has already been used", status=403) if secrets.compare_digest(token, self.ds._root_token): self.ds._root_token = None - cookie = SimpleCookie() - cookie["ds_actor"] = self.ds.sign({"id": "root"}, "actor") - cookie["ds_actor"]["path"] = "/" - response = Response( - body="", - status=302, - headers={ - "Location": "/", - "set-cookie": cookie.output(header="").lstrip(), - }, - ) + response = Response.redirect("/") + response.set_cookie("ds_actor", self.ds.sign({"id": "root"}, "actor")) return response else: return Response("Invalid token", status=403) diff --git a/docs/internals.rst b/docs/internals.rst index d92c985f..7978e3d7 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -131,6 +131,36 @@ Each of these responses will use the correct corresponding content-type - ``text Each of the helper methods take optional ``status=`` and ``headers=`` arguments, documented above. +.. _internals_response_set_cookie: + +Setting cookies with response.set_cookie() +------------------------------------------ + +To set cookies on the response, use the ``response.set_cookie(...)`` method. The method signature looks like this: + +.. code-block:: python + + def set_cookie( + self, + key, + value="", + max_age=None, + expires=None, + path="/", + domain=None, + secure=False, + httponly=False, + samesite="lax", + ): + +You can use this with :ref:`datasette.sign() ` to set signed cookies. Here's how you would set the ``ds_actor`` cookie for use with Datasette :ref:`authentication `: + +.. code-block:: python + + response = Response.redirect("/") + response.set_cookie("ds_actor", datasette.sign({"id": "cleopaws"}, "actor")) + return response + .. _internals_datasette: Datasette class diff --git a/tests/test_internals_response.py b/tests/test_internals_response.py index 7c11f858..820b20b2 100644 --- a/tests/test_internals_response.py +++ b/tests/test_internals_response.py @@ -1,4 +1,5 @@ from datasette.utils.asgi import Response +import pytest def test_response_html(): @@ -26,3 +27,28 @@ def test_response_redirect(): response = Response.redirect("/foo") assert 302 == response.status assert "/foo" == response.headers["Location"] + + +@pytest.mark.asyncio +async def test_response_set_cookie(): + events = [] + + async def send(event): + events.append(event) + + response = Response.redirect("/foo") + response.set_cookie("foo", "bar", max_age=10, httponly=True) + await response.asgi_send(send) + + assert [ + { + "type": "http.response.start", + "status": 302, + "headers": [ + [b"Location", b"/foo"], + [b"content-type", b"text/plain"], + [b"set-cookie", b"foo=bar; HttpOnly; Max-Age=10; Path=/; SameSite=lax"], + ], + }, + {"type": "http.response.body", "body": b""}, + ] == events From b5f04f42ab56be90735e1df9660e334089fbd6aa Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 15:32:24 -0700 Subject: [PATCH 0107/1871] ds_actor cookie documentation, closes #826 --- docs/authentication.rst | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index a9537a20..f511e373 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -315,8 +315,8 @@ The currently authenticated actor is made available to plugins as ``request.acto .. _PermissionsDebugView: -Permissions Debug -================= +The permissions debug tool +========================== The debug tool at ``/-/permissions`` is only available to the :ref:`authenticated root user ` (or any actor granted the ``permissions-debug`` action according to a plugin). @@ -324,6 +324,22 @@ It shows the thirty most recent permission checks that have been carried out by This is designed to help administrators and plugin authors understand exactly how permission checks are being carried out, in order to effectively configure Datasette's permission system. +.. _authentication_ds_actor: + +The ds_actor cookie +=================== + +Datasette includes a default authentication plugin which looks for a signed ``ds_actor`` cookie containing a JSON actor dictionary. This is how the :ref:`root actor ` mechanism works. + +Authentication plugins can set signed ``ds_actor`` cookies themselves like so: + +.. code-block:: python + + response = Response.redirect("/") + response.set_cookie("ds_actor", datasette.sign({"id": "cleopaws"}, "actor")) + return response + +Note that you need to pass ``"actor"`` as the namespace to :ref:`datasette_sign`. .. _permissions: From b3919d8059a519eb7709f0b4fa1561fec219bc98 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 16:03:42 -0700 Subject: [PATCH 0108/1871] Mostly complete release notes for 0.44, refs #806 --- docs/changelog.rst | 140 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 140 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 8b6272cb..e4e6057b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,146 @@ Changelog ========= +.. _v0_44: + +0.44 (2020-06-??) +----------------- + +Authentication and permissions, writable canned queries, flash messages, new plugin hooks and more. + +Authentication +~~~~~~~~~~~~~~ + +Prior to this release the Datasette ecosystem has treated authentication as exclusively the realm of plugins, most notably through `datasette-auth-github `__. + +0.44 introduces :ref:`authentication` as core Datasette concepts (`#699 `__). This makes it easier for different plugins can share responsibility for authenticating requests - you might have one plugin that handles user accounts and another one that allows automated access via API keys, for example. + +You'll need to install plugins if you want full user accounts, but default Datasette can now authenticate a single root user with the new ``--root`` command-line option, which outputs a one-time use URL to :ref:`authenticate as a root actor ` (`#784 `__):: + + $ datasette fixtures.db --root + http://127.0.0.1:8001/-/auth-token?token=5b632f8cd44b868df625f5a6e2185d88eea5b22237fd3cc8773f107cc4fd6477 + INFO: Started server process [14973] + INFO: Waiting for application startup. + INFO: Application startup complete. + INFO: Uvicorn running on http://127.0.0.1:8001 (Press CTRL+C to quit) + +Plugins can implement new ways of authenticating users using the new :ref:`plugin_actor_from_request` hook. + +Permissions +~~~~~~~~~~~ + +Datasette also now has a built-in concept of :ref:`authentication_permissions`. The permissions system answers the following question: + + Is this **actor** allowed to perform this **action**, optionally against this particular **resource**? + +You can use the new ``"allow"`` block syntax in ``metadata.json`` (or ``metadata.yaml``) to set required permissions at the instance, database, table or canned query level. For example, to restrict access to the ``fixtures.db`` database to the ``"root"`` user: + +.. code-block:: json + + { + "databases": { + "fixtures": { + "allow": { + "id" "root" + } + } + } + } + +See :ref:`authentication_permissions_allow` for more details. + +Plugins can implement their own custom permission checks using the new :ref:`plugin_permission_allowed` hook. + +A new debug page at ``/-/permissions`` shows recent permission checks, to help administrators and plugin authors understand exactly what checks are being performed. This tool defaults to only being available to the root user, but can be exposed to other users by plugins that respond to the ``permissions-debug`` permission. (`#788 `__) + +Writable canned queries +~~~~~~~~~~~~~~~~~~~~~~~ + +Datasette's :ref:`canned_queries` feature lets you define SQL queries in ``metadata.json`` which can then be executed by users visiting a specific URL. https://latest.datasette.io/fixtures/neighborhood_search for example. + +Canned queries were previously restricted to ``SELECT``, but Datasette 0.44 introduces the ability for canned queries to execute ``INSERT`` or ``UPDATE`` queries as well, using the new ``"write": true`` property (`#800 `__): + +.. code-block:: json + + { + "databases": { + "dogs": { + "queries": { + "add_name": { + "sql": "INSERT INTO names (name) VALUES (:name)", + "write": true + } + } + } + } + } + +See :ref:`canned_queries_writable` for more details. + +Flash messages +~~~~~~~~~~~~~~ + +Writable canned queries needed a mechanism to let the user know that the query has been successfully executed. The new flash messaging system (`#790 `__) allows messages to persist in signed cookies which are then displayed to the user on the next page that they visit. Plugins can use this mechanism to display their own messages, see :ref:`datasette_add_message` for details. + +You can try out the new messages using the ``/-/messages`` debug tool, for example at https://latest.datasette.io/-/messages + +Signed values and secrets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Both flash messages and user authentication needed a way to sign values and set signed cookies. Two new methods are now available for plugins to take advantage of this mechanism: :ref:`datasette_sign` and :ref:`datasette_unsign`. + +Datasette will generate a secret automatically when it starts up, but to avoid resetting the secret (and hence invalidating any cookies) every time the server restarts you should set your own secret. You can pass a secret to Datasette using the new ``--secret`` option or with a ``DATASETTE_SECRET`` environment variable. See :ref:`config_secret` for more details. + +Plugins can now sign value and verify their signatures using the :ref:`datasette.sign() ` and :ref:`datasette.unsign() ` methods. + +CSRF protection +~~~~~~~~~~~~~~~ + +Since writable canned queries are built using POST forms, Datasette now ships with :ref:`internals_csrf` (`#798 `__). This applies automatically to any POST request, which means plugins need to include a ``csrftoken`` in any POST forms that they render. They can do that like so: + +.. code-block:: html + + + +register_routes() plugin hooks +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Plugins can now register new views and routes via the :ref:`plugin_register_routes` plugin hook (`#819 `__). View functions can be defined that accept any of the current ``datasette`` object, the current ``request``, or the ASGI ``scope``, ``send`` and ``receive`` objects. + +Smaller changes +~~~~~~~~~~~~~~~ + +- New internals documentation for :ref:`internals_request` and :ref:`internals_response`. (`#706 `__) +- ``request.url`` now respects the ``force_https_urls`` config setting. closes (`#781 `__) +- ``request.args.getlist()`` returns ``[]`` if missing. Removed ``request.raw_args`` entirely. (`#774 `__) +- New :ref:`datasette.get_database() ` method. +- Added ``_`` prefix to many private, undocumented methods of the Datasette class. (`#576 `__) +- Removed the ``db.get_outbound_foreign_keys()`` method which duplicated the behaviour of ``db.foreign_keys_for_table()``. +- New :ref:`await datasette.permission_allowed() ` method. +- ``/-/actor`` debugging endpoint for viewing the currently authenticated actor. +- New ``request.cookies`` property. +- ``/-/plugins`` endpoint now shows a list of hooks implemented by each plugin, e.g. https://latest.datasette.io/-/plugins?all=1 +- ``request.post_vars()`` method no longer discards empty values. +- New "params" canned query key for explicitly setting named parameters, see :ref:`canned_queries_named_parameters`. (`#797 `__) +- ``request.args`` is now a :ref:`MultiParams ` object. +- Fixed a bug with the ``datasette plugins`` command. (`#802 `__) +- Nicer pattern for using ``make_app_client()`` in tests. (`#395 `__) +- New ``request.actor`` property. +- Fixed broken CSS on nested 404 pages. (`#777 `__) +- New ``request.url_vars`` property. (`#822 `__) +- Fixed a bug with the ``python tests/fixtures.py`` command for outputting Datasette's testing fixtures database and plugins. (`#804 `__) + +The road to Datasette 1.0 +~~~~~~~~~~~~~~~~~~~~~~~~~ + +I've assembled a `milestone for Datasette 1.0 `__. The focus of the 1.0 release will be the following: + +- Signify confidence in the quality/stability of Datasette +- Give plugin authors confidence that their plugins will work for the whole 1.x release cycle +- Provide the same confidence to developers building against Datasette JSON APIs + +If you have thoughts about what you would like to see for Datasette 1.0 you can join `the conversation on issue #519 `__. + .. _v0_43: 0.43 (2020-05-28) From d94fc39e33b5eccae853e62f54bd8cc8e74688ff Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 16:43:58 -0700 Subject: [PATCH 0109/1871] Crafty JavaScript trick for generating commit references --- docs/contributing.rst | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index 9c44d177..6562afc8 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -156,6 +156,18 @@ To release a new version, first create a commit that updates :ref:`the changelog Referencing the issues that are part of the release in the commit message ensures the name of the release shows up on those issue pages, e.g. `here `__. +You can generate the list of issue references for a specific release by pasting the following into the browser devtools while looking at the :ref:`changelog` page (replace ``v0-44`` with the most recent version): + +.. code-block:: javascript + + [ + ...new Set( + Array.from( + document.getElementById("v0-44").querySelectorAll("a[href*=issues]") + ).map((a) => "#" + a.href.split("/issues/")[1]) + ), + ].sort().join(", "); + For non-bugfix releases you may want to update the news section of ``README.md`` as part of the same commit. To tag and push the releaes, run the following:: From f3951539f1750698976359411e19c1ccb79210ed Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 18:19:11 -0700 Subject: [PATCH 0110/1871] Hopefully fix horizontal scroll with changelog on mobile --- docs/changelog.rst | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index e4e6057b..911fb1b6 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -1051,9 +1051,7 @@ request all rows where that column is less than 50 meters or more than 20 feet f - Fix SQLite error when loading rows with no incoming FKs. [Russ Garrett] - This fixes ``ERROR: conn=, sql - = 'select ', params = {'id': '1'}`` caused by an invalid query when - loading incoming FKs. + This fixes an error caused by an invalid query when loading incoming FKs. The error was ignored due to async but it still got printed to the console. From d828abaddec0dce3ec4b4eeddc3a74384e52cf34 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 9 Jun 2020 21:20:07 -0700 Subject: [PATCH 0111/1871] Fix horizontal scrollbar on changelog, refs #828 --- docs/_static/css/custom.css | 3 +++ docs/conf.py | 5 +++++ 2 files changed, 8 insertions(+) create mode 100644 docs/_static/css/custom.css diff --git a/docs/_static/css/custom.css b/docs/_static/css/custom.css new file mode 100644 index 00000000..d7c2f164 --- /dev/null +++ b/docs/_static/css/custom.css @@ -0,0 +1,3 @@ +a.external { + overflow-wrap: anywhere; +} diff --git a/docs/conf.py b/docs/conf.py index 5e0bb328..b273afca 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -99,6 +99,11 @@ html_theme = "sphinx_rtd_theme" # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] +html_css_files = [ + "css/custom.css", +] + + # Custom sidebar templates, must be a dictionary that maps document names # to template names. # From 57e812d5de9663a3c177e0344f4d1e552a74d484 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 10 Jun 2020 12:39:54 -0700 Subject: [PATCH 0112/1871] ds_author cookie can now expire, closes #829 Refs https://github.com/simonw/datasette-auth-github/issues/62#issuecomment-642152076 --- datasette/actor_auth_cookie.py | 13 ++++++++- datasette/views/special.py | 4 ++- docs/authentication.rst | 48 ++++++++++++++++++++++++++++++++-- docs/internals.rst | 4 +-- setup.py | 1 + tests/fixtures.py | 3 +++ tests/test_auth.py | 21 +++++++++++++-- tests/test_canned_write.py | 6 ++--- tests/test_permissions.py | 20 +++++++------- 9 files changed, 99 insertions(+), 21 deletions(-) diff --git a/datasette/actor_auth_cookie.py b/datasette/actor_auth_cookie.py index a2aa6889..15ecd331 100644 --- a/datasette/actor_auth_cookie.py +++ b/datasette/actor_auth_cookie.py @@ -1,5 +1,7 @@ from datasette import hookimpl from itsdangerous import BadSignature +import baseconv +import time @hookimpl @@ -7,6 +9,15 @@ def actor_from_request(datasette, request): if "ds_actor" not in request.cookies: return None try: - return datasette.unsign(request.cookies["ds_actor"], "actor") + decoded = datasette.unsign(request.cookies["ds_actor"], "actor") + # If it has "e" and "a" keys process the "e" expiry + if not isinstance(decoded, dict) or "a" not in decoded: + return None + expires_at = decoded.get("e") + if expires_at: + timestamp = int(baseconv.base62.decode(expires_at)) + if time.time() > timestamp: + return None + return decoded["a"] except BadSignature: return None diff --git a/datasette/views/special.py b/datasette/views/special.py index 7f4284a1..dc6a25dc 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -62,7 +62,9 @@ class AuthTokenView(BaseView): if secrets.compare_digest(token, self.ds._root_token): self.ds._root_token = None response = Response.redirect("/") - response.set_cookie("ds_actor", self.ds.sign({"id": "root"}, "actor")) + response.set_cookie( + "ds_actor", self.ds.sign({"a": {"id": "root"}}, "actor") + ) return response else: return Response("Invalid token", status=403) diff --git a/docs/authentication.rst b/docs/authentication.rst index f511e373..9b66132a 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -336,11 +336,55 @@ Authentication plugins can set signed ``ds_actor`` cookies themselves like so: .. code-block:: python response = Response.redirect("/") - response.set_cookie("ds_actor", datasette.sign({"id": "cleopaws"}, "actor")) - return response + response.set_cookie("ds_actor", datasette.sign({ + "a": { + "id": "cleopaws" + } + }, "actor")) Note that you need to pass ``"actor"`` as the namespace to :ref:`datasette_sign`. +The shape of data encoded in the cookie is as follows:: + + { + "a": {... actor ...} + } + +.. _authentication_ds_actor_expiry: + +Including an expiry time +------------------------ + +``ds_actor`` cookies can optionally include a signed expiry timestamp, after which the cookies will no longer be valid. Authentication plugins may chose to use this mechanism to limit the lifetime of the cookie. For example, if a plugin implements single-sign-on against another source it may decide to set short-lived cookies so that if the user is removed from the SSO system their existing Datasette cookies will stop working shortly afterwards. + +To include an expiry, add a ``"e"`` key to the cookie value containing a `base62-encoded integer `__ representing the timestamp when the cookie should expire. For example, here's how to set a cookie that expires after 24 hours: + +.. code-block:: python + + import time + import baseconv + + expires_at = int(time.time()) + (24 * 60 * 60) + + response = Response.redirect("/") + response.set_cookie("ds_actor", datasette.sign({ + "a": { + "id": "cleopaws" + }, + "e": baseconv.base62.encode(expires_at), + }, "actor")) + +The resulting cookie will encode data that looks something like this: + +.. code-block:: json + + { + "a": { + "id": "cleopaws" + }, + "e": "1jjSji" + } + .. _permissions: Built-in permissions diff --git a/docs/internals.rst b/docs/internals.rst index 7978e3d7..d75544e1 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -153,12 +153,12 @@ To set cookies on the response, use the ``response.set_cookie(...)`` method. The samesite="lax", ): -You can use this with :ref:`datasette.sign() ` to set signed cookies. Here's how you would set the ``ds_actor`` cookie for use with Datasette :ref:`authentication `: +You can use this with :ref:`datasette.sign() ` to set signed cookies. Here's how you would set the :ref:`ds_actor cookie ` for use with Datasette :ref:`authentication `: .. code-block:: python response = Response.redirect("/") - response.set_cookie("ds_actor", datasette.sign({"id": "cleopaws"}, "actor")) + response.set_cookie("ds_actor", datasette.sign({"a": {"id": "cleopaws"}}, "actor")) return response .. _internals_datasette: diff --git a/setup.py b/setup.py index 678a022f..45af0253 100644 --- a/setup.py +++ b/setup.py @@ -57,6 +57,7 @@ setup( "PyYAML~=5.3", "mergedeep>=1.1.1,<1.4.0", "itsdangerous~=1.1", + "python-baseconv==1.2.2", ], entry_points=""" [console_scripts] diff --git a/tests/fixtures.py b/tests/fixtures.py index 1eb1bb6e..a846999b 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -109,6 +109,9 @@ class TestClient: def __init__(self, asgi_app): self.asgi_app = asgi_app + def actor_cookie(self, actor): + return self.ds.sign({"a": actor}, "actor") + @async_to_sync async def get( self, path, allow_redirects=True, redirect_count=0, method="GET", cookies=None diff --git a/tests/test_auth.py b/tests/test_auth.py index 0e5563a3..5e847445 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,4 +1,7 @@ from .fixtures import app_client +import baseconv +import pytest +import time def test_auth_token(app_client): @@ -8,7 +11,9 @@ def test_auth_token(app_client): response = app_client.get(path, allow_redirects=False,) assert 302 == response.status assert "/" == response.headers["Location"] - assert {"id": "root"} == app_client.ds.unsign(response.cookies["ds_actor"], "actor") + assert {"a": {"id": "root"}} == app_client.ds.unsign( + response.cookies["ds_actor"], "actor" + ) # Check that a second with same token fails assert app_client.ds._root_token is None assert 403 == app_client.get(path, allow_redirects=False,).status @@ -16,6 +21,18 @@ def test_auth_token(app_client): def test_actor_cookie(app_client): "A valid actor cookie sets request.scope['actor']" - cookie = app_client.ds.sign({"id": "test"}, "actor") + cookie = app_client.actor_cookie({"id": "test"}) response = app_client.get("/", cookies={"ds_actor": cookie}) assert {"id": "test"} == app_client.ds._last_request.scope["actor"] + + +@pytest.mark.parametrize( + "offset,expected", [((24 * 60 * 60), {"id": "test"}), (-(24 * 60 * 60), None),] +) +def test_actor_cookie_that_expires(app_client, offset, expected): + expires_at = int(time.time()) + offset + cookie = app_client.ds.sign( + {"a": {"id": "test"}, "e": baseconv.base62.encode(expires_at)}, "actor" + ) + response = app_client.get("/", cookies={"ds_actor": cookie}) + assert expected == app_client.ds._last_request.scope["actor"] diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index dc3fba3f..4257806e 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -55,7 +55,7 @@ def test_custom_success_message(canned_write_client): response = canned_write_client.post( "/data/delete_name", {"rowid": 1}, - cookies={"ds_actor": canned_write_client.ds.sign({"id": "root"}, "actor")}, + cookies={"ds_actor": canned_write_client.actor_cookie({"id": "root"})}, allow_redirects=False, csrftoken_from=True, ) @@ -116,7 +116,7 @@ def test_canned_query_permissions_on_database_page(canned_write_client): # With auth shows four response = canned_write_client.get( "/data.json", - cookies={"ds_actor": canned_write_client.ds.sign({"id": "root"}, "actor")}, + cookies={"ds_actor": canned_write_client.actor_cookie({"id": "root"})}, ) assert 200 == response.status assert [ @@ -132,6 +132,6 @@ def test_canned_query_permissions_on_database_page(canned_write_client): def test_canned_query_permissions(canned_write_client): assert 403 == canned_write_client.get("/data/delete_name").status assert 200 == canned_write_client.get("/data/update_name").status - cookies = {"ds_actor": canned_write_client.ds.sign({"id": "root"}, "actor")} + cookies = {"ds_actor": canned_write_client.actor_cookie({"id": "root"})} assert 200 == canned_write_client.get("/data/delete_name", cookies=cookies).status assert 200 == canned_write_client.get("/data/update_name", cookies=cookies).status diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 477b8160..1be9529a 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -21,7 +21,7 @@ def test_view_instance(allow, expected_anon, expected_auth): # Should be no padlock assert "

    Datasette 🔒

    " not in anon_response.text auth_response = client.get( - path, cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + path, cookies={"ds_actor": client.actor_cookie({"id": "root"})}, ) assert expected_auth == auth_response.status # Check for the padlock @@ -48,7 +48,7 @@ def test_view_database(allow, expected_anon, expected_auth): # Should be no padlock assert ">fixtures 🔒" not in anon_response.text auth_response = client.get( - path, cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + path, cookies={"ds_actor": client.actor_cookie({"id": "root"})}, ) assert expected_auth == auth_response.status if ( @@ -69,7 +69,7 @@ def test_database_list_respects_view_database(): assert 'data' in anon_response.text assert 'fixtures' not in anon_response.text auth_response = client.get( - "/", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + "/", cookies={"ds_actor": client.actor_cookie({"id": "root"})}, ) assert 'data' in auth_response.text assert 'fixtures 🔒' in auth_response.text @@ -100,7 +100,7 @@ def test_database_list_respects_view_table(): for html_fragment in html_fragments: assert html_fragment not in anon_response_text auth_response_text = client.get( - "/", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + "/", cookies={"ds_actor": client.actor_cookie({"id": "root"})}, ).text for html_fragment in html_fragments: assert html_fragment in auth_response_text @@ -127,7 +127,7 @@ def test_view_table(allow, expected_anon, expected_auth): assert ">compound_three_primary_keys 🔒" not in anon_response.text auth_response = client.get( "/fixtures/compound_three_primary_keys", - cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}, + cookies={"ds_actor": client.actor_cookie({"id": "root"})}, ) assert expected_auth == auth_response.status if allow and expected_anon == 403 and expected_auth == 200: @@ -156,7 +156,7 @@ def test_table_list_respects_view_table(): for html_fragment in html_fragments: assert html_fragment not in anon_response.text auth_response = client.get( - "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} + "/fixtures", cookies={"ds_actor": client.actor_cookie({"id": "root"})} ) for html_fragment in html_fragments: assert html_fragment in auth_response.text @@ -180,7 +180,7 @@ def test_view_query(allow, expected_anon, expected_auth): # Should be no padlock assert ">fixtures 🔒" not in anon_response.text auth_response = client.get( - "/fixtures/q", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} + "/fixtures/q", cookies={"ds_actor": client.actor_cookie({"id": "root"})} ) assert expected_auth == auth_response.status if allow and expected_anon == 403 and expected_auth == 200: @@ -206,7 +206,7 @@ def test_execute_sql(metadata): assert 403 == client.get("/fixtures/facet_cities?_where=id=3").status # But for logged in user all of these should work: - cookies = {"ds_actor": client.ds.sign({"id": "root"}, "actor")} + cookies = {"ds_actor": client.actor_cookie({"id": "root"})} response_text = client.get("/fixtures", cookies=cookies).text assert form_fragment in response_text assert 200 == client.get("/fixtures?sql=select+1", cookies=cookies).status @@ -231,7 +231,7 @@ def test_query_list_respects_view_query(): assert html_fragment not in anon_response.text assert '"/fixtures/q"' not in anon_response.text auth_response = client.get( - "/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")} + "/fixtures", cookies={"ds_actor": client.actor_cookie({"id": "root"})} ) assert html_fragment in auth_response.text @@ -290,7 +290,7 @@ def test_permissions_debug(app_client): app_client.ds._permission_checks.clear() assert 403 == app_client.get("/-/permissions").status # With the cookie it should work - cookie = app_client.ds.sign({"id": "root"}, "actor") + cookie = app_client.actor_cookie({"id": "root"}) response = app_client.get("/-/permissions", cookies={"ds_actor": cookie}) # Should show one failure and one success soup = Soup(response.body, "html.parser") From 9f236c4c00689a022fd1d508f2b809ee2305927f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 10 Jun 2020 13:06:46 -0700 Subject: [PATCH 0113/1871] Warn that register_facet_classes may change, refs #830 Also documented policy that plugin hooks should not be shipped without a real example. Refs #818 --- docs/contributing.rst | 1 + docs/plugins.rst | 3 +++ 2 files changed, 4 insertions(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index 6562afc8..ba52839c 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -12,6 +12,7 @@ General guidelines * **master should always be releasable**. Incomplete features should live in branches. This ensures that any small bug fixes can be quickly released. * **The ideal commit** should bundle together the implementation, unit tests and associated documentation updates. The commit message should link to an associated issue. +* **New plugin hooks** should only be shipped if accompanied by a separate release of a non-demo plugin that uses them. .. _devenvironment: diff --git a/docs/plugins.rst b/docs/plugins.rst index 17fd64df..a28092a3 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -891,6 +891,9 @@ register_facet_classes() Return a list of additional Facet subclasses to be registered. +.. warning:: + The design of this plugin hook is unstable and may change. See `issue 830 `__. + Each Facet subclass implements a new type of facet operation. The class should look like this: .. code-block:: python From 198545733b7a34d7b36ab6510ed30fb7687bcc7e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 10 Jun 2020 16:56:53 -0700 Subject: [PATCH 0114/1871] Document that "allow": {} denies all https://github.com/simonw/datasette/issues/831#issuecomment-642324847 --- docs/authentication.rst | 19 +++++++++++++++++++ tests/test_utils.py | 11 +++++++---- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 9b66132a..0da5a38b 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -94,6 +94,14 @@ This will match any actors with an ``"id"`` property of ``"root"`` - for example "name": "Root User" } +An allow block can specify "no-one is allowed to do this" using an empty ``{}``: + +.. code-block:: json + + { + "allow": {} + } + Allow keys can provide a list of values. These will match any actor that has any of those values. .. code-block:: json @@ -181,6 +189,17 @@ Here's how to restrict access to your entire Datasette instance to just the ``"i } } +To deny access to all users, you can use ``"allow": {}``: + +.. code-block:: json + + { + "title": "My entirely inaccessible instance", + "allow": {} + } + +One reason to do this is if you are using a Datasette plugin - such as `datasette-permissions-sql `__ - to control permissions instead. + .. _authentication_permissions_database: Controlling access to specific databases diff --git a/tests/test_utils.py b/tests/test_utils.py index 0ffe8ae6..b490953f 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -464,16 +464,19 @@ def test_multi_params(data, should_raise): @pytest.mark.parametrize( "actor,allow,expected", [ + # Default is to allow: (None, None, True), + # {} means deny-all: (None, {}, False), - (None, {"id": "root"}, False), - ({"id": "root"}, None, True), ({"id": "root"}, {}, False), - ({"id": "simon", "staff": True}, {"staff": True}, True), - ({"id": "simon", "staff": False}, {"staff": True}, False), # Special case for "unauthenticated": true (None, {"unauthenticated": True}, True), (None, {"unauthenticated": False}, False), + # Match on just one property: + (None, {"id": "root"}, False), + ({"id": "root"}, None, True), + ({"id": "simon", "staff": True}, {"staff": True}, True), + ({"id": "simon", "staff": False}, {"staff": True}, False), # Special "*" value for any key: ({"id": "root"}, {"id": "*"}, True), ({}, {"id": "*"}, False), From ce4958018ede00fbdadf0c37a99889b6901bfb9b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 10 Jun 2020 17:10:28 -0700 Subject: [PATCH 0115/1871] Clarify that view-query also lets you execute writable queries --- docs/authentication.rst | 2 +- docs/sql_queries.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 0da5a38b..6a526f34 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -461,7 +461,7 @@ Default *allow*. view-query ---------- -Actor is allowed to view a :ref:`canned query ` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size +Actor is allowed to view (and execute) a :ref:`canned query ` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size - this includes executing :ref:`canned_queries_writable`. ``resource`` - tuple: (string, string) The name of the database, then the name of the canned query diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst index a73f6bc2..6cc32da1 100644 --- a/docs/sql_queries.rst +++ b/docs/sql_queries.rst @@ -223,7 +223,7 @@ Writable canned queries Canned queries by default are read-only. You can use the ``"write": true`` key to indicate that a canned query can write to the database. -See :ref:`authentication_permissions_metadata` for details on how to add permission checks to canned queries, using the ``"allow"`` key. +See :ref:`authentication_permissions_query` for details on how to add permission checks to canned queries, using the ``"allow"`` key. .. code-block:: json From 371170eee8d1659437e42c8ee267cb4b2abcffb5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 08:44:44 -0700 Subject: [PATCH 0116/1871] publish heroku now deploys with Python 3.8.3 --- datasette/publish/heroku.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index 4db81d8e..7adf9d92 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -167,7 +167,7 @@ def temporary_heroku_directory( if metadata_content: open("metadata.json", "w").write(json.dumps(metadata_content, indent=2)) - open("runtime.txt", "w").write("python-3.8.0") + open("runtime.txt", "w").write("python-3.8.3") if branch: install = [ From 98632f0a874b7b9dac6abf0abb9fdb7e2839a4d3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 09:02:03 -0700 Subject: [PATCH 0117/1871] --secret command for datasette publish Closes #787 --- datasette/cli.py | 28 +++++++++++++++--------- datasette/publish/cloudrun.py | 2 ++ datasette/publish/common.py | 7 ++++++ datasette/publish/heroku.py | 3 +++ datasette/utils/__init__.py | 7 +++++- docs/datasette-package-help.txt | 3 +++ docs/datasette-publish-cloudrun-help.txt | 3 +++ docs/datasette-publish-heroku-help.txt | 3 +++ docs/plugins.rst | 1 + tests/test_package.py | 8 ++++--- tests/test_publish_cloudrun.py | 3 +++ tests/test_utils.py | 4 ++++ 12 files changed, 58 insertions(+), 14 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 2e3c8e36..ff9a2d5c 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -165,6 +165,12 @@ def plugins(all, plugins_dir): ) @click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension") @click.option("--version-note", help="Additional note to show on /-/versions") +@click.option( + "--secret", + help="Secret used for signing secure values, such as signed cookies", + envvar="DATASETTE_PUBLISH_SECRET", + default=lambda: os.urandom(32).hex(), +) @click.option( "-p", "--port", default=8001, help="Port to run the server on, defaults to 8001", ) @@ -187,6 +193,7 @@ def package( install, spatialite, version_note, + secret, port, **extra_metadata ): @@ -203,16 +210,17 @@ def package( with temporary_docker_directory( files, "datasette", - metadata, - extra_options, - branch, - template_dir, - plugins_dir, - static, - install, - spatialite, - version_note, - extra_metadata, + metadata=metadata, + extra_options=extra_options, + branch=branch, + template_dir=template_dir, + plugins_dir=plugins_dir, + static=static, + install=install, + spatialite=spatialite, + version_note=version_note, + secret=secret, + extra_metadata=extra_metadata, port=port, ): args = ["docker", "build"] diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 8271209a..8f99dc2e 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -47,6 +47,7 @@ def publish_subcommand(publish): install, plugin_secret, version_note, + secret, title, license, license_url, @@ -120,6 +121,7 @@ def publish_subcommand(publish): install, spatialite, version_note, + secret, extra_metadata, environment_variables, ): diff --git a/datasette/publish/common.py b/datasette/publish/common.py index 2911029d..49a4798e 100644 --- a/datasette/publish/common.py +++ b/datasette/publish/common.py @@ -1,5 +1,6 @@ from ..utils import StaticMount import click +import os import shutil import sys @@ -52,6 +53,12 @@ def add_common_publish_arguments_and_options(subcommand): click.option( "--version-note", help="Additional note to show on /-/versions" ), + click.option( + "--secret", + help="Secret used for signing secure values, such as signed cookies", + envvar="DATASETTE_PUBLISH_SECRET", + default=lambda: os.urandom(32).hex(), + ), click.option("--title", help="Title for metadata"), click.option("--license", help="License label for metadata"), click.option("--license_url", help="License URL for metadata"), diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index 7adf9d92..6cda68da 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -35,6 +35,7 @@ def publish_subcommand(publish): install, plugin_secret, version_note, + secret, title, license, license_url, @@ -100,6 +101,7 @@ def publish_subcommand(publish): static, install, version_note, + secret, extra_metadata, ): app_name = None @@ -144,6 +146,7 @@ def temporary_heroku_directory( static, install, version_note, + secret, extra_metadata=None, ): extra_metadata = extra_metadata or {} diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 51373c46..5090f67e 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -278,10 +278,13 @@ def make_dockerfile( install, spatialite, version_note, + secret, environment_variables=None, port=8001, ): cmd = ["datasette", "serve", "--host", "0.0.0.0"] + environment_variables = environment_variables or {} + environment_variables["DATASETTE_SECRET"] = secret for filename in files: cmd.extend(["-i", filename]) cmd.extend(["--cors", "--inspect-file", "inspect-data.json"]) @@ -324,7 +327,7 @@ CMD {cmd}""".format( environment_variables="\n".join( [ "ENV {} '{}'".format(key, value) - for key, value in (environment_variables or {}).items() + for key, value in environment_variables.items() ] ), files=" ".join(files), @@ -348,6 +351,7 @@ def temporary_docker_directory( install, spatialite, version_note, + secret, extra_metadata=None, environment_variables=None, port=8001, @@ -381,6 +385,7 @@ def temporary_docker_directory( install, spatialite, version_note, + secret, environment_variables, port=port, ) diff --git a/docs/datasette-package-help.txt b/docs/datasette-package-help.txt index 326b66cb..1b14f908 100644 --- a/docs/datasette-package-help.txt +++ b/docs/datasette-package-help.txt @@ -17,6 +17,9 @@ Options: --install TEXT Additional packages (e.g. plugins) to install --spatialite Enable SpatialLite extension --version-note TEXT Additional note to show on /-/versions + --secret TEXT Secret used for signing secure values, such as signed + cookies + -p, --port INTEGER Port to run the server on, defaults to 8001 --title TEXT Title for metadata --license TEXT License label for metadata diff --git a/docs/datasette-publish-cloudrun-help.txt b/docs/datasette-publish-cloudrun-help.txt index 98fc9c71..a625bd10 100644 --- a/docs/datasette-publish-cloudrun-help.txt +++ b/docs/datasette-publish-cloudrun-help.txt @@ -15,6 +15,9 @@ Options: datasette-auth-github client_id xxx --version-note TEXT Additional note to show on /-/versions + --secret TEXT Secret used for signing secure values, such as signed + cookies + --title TEXT Title for metadata --license TEXT License label for metadata --license_url TEXT License URL for metadata diff --git a/docs/datasette-publish-heroku-help.txt b/docs/datasette-publish-heroku-help.txt index ec157753..b2caa2cc 100644 --- a/docs/datasette-publish-heroku-help.txt +++ b/docs/datasette-publish-heroku-help.txt @@ -15,6 +15,9 @@ Options: datasette-auth-github client_id xxx --version-note TEXT Additional note to show on /-/versions + --secret TEXT Secret used for signing secure values, such as signed + cookies + --title TEXT Title for metadata --license TEXT License label for metadata --license_url TEXT License URL for metadata diff --git a/docs/plugins.rst b/docs/plugins.rst index a28092a3..989cf672 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -536,6 +536,7 @@ Let's say you want to build a plugin that adds a ``datasette publish my_hosting_ install, plugin_secret, version_note, + secret, title, license, license_url, diff --git a/tests/test_package.py b/tests/test_package.py index f0cbe88f..3248b3a4 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -15,7 +15,7 @@ FROM python:3.8 COPY . /app WORKDIR /app - +ENV DATASETTE_SECRET 'sekrit' RUN pip install -U datasette RUN datasette inspect test.db --inspect-file inspect-data.json ENV PORT {port} @@ -33,7 +33,7 @@ def test_package(mock_call, mock_which): mock_call.side_effect = capture with runner.isolated_filesystem(): open("test.db", "w").write("data") - result = runner.invoke(cli.cli, ["package", "test.db"]) + result = runner.invoke(cli.cli, ["package", "test.db", "--secret", "sekrit"]) assert 0 == result.exit_code mock_call.assert_has_calls([mock.call(["docker", "build", "."])]) assert EXPECTED_DOCKERFILE.format(port=8001) == capture.captured @@ -48,6 +48,8 @@ def test_package_with_port(mock_call, mock_which): runner = CliRunner() with runner.isolated_filesystem(): open("test.db", "w").write("data") - result = runner.invoke(cli.cli, ["package", "test.db", "-p", "8080"]) + result = runner.invoke( + cli.cli, ["package", "test.db", "-p", "8080", "--secret", "sekrit"] + ) assert 0 == result.exit_code assert EXPECTED_DOCKERFILE.format(port=8080) == capture.captured diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 55c207c7..c3ed1f90 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -172,6 +172,8 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which): "client_id", "x-client-id", "--show-files", + "--secret", + "x-secret", ], ) dockerfile = ( @@ -184,6 +186,7 @@ COPY . /app WORKDIR /app ENV DATASETTE_AUTH_GITHUB_CLIENT_ID 'x-client-id' +ENV DATASETTE_SECRET 'x-secret' RUN pip install -U datasette RUN datasette inspect test.db --inspect-file inspect-data.json ENV PORT 8001 diff --git a/tests/test_utils.py b/tests/test_utils.py index b490953f..d613e999 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -247,6 +247,7 @@ def test_temporary_docker_directory_uses_hard_link(): install=[], spatialite=False, version_note=None, + secret="secret", ) as temp_docker: hello = os.path.join(temp_docker, "hello") assert "world" == open(hello).read() @@ -274,6 +275,7 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): install=[], spatialite=False, version_note=None, + secret=None, ) as temp_docker: hello = os.path.join(temp_docker, "hello") assert "world" == open(hello).read() @@ -297,11 +299,13 @@ def test_temporary_docker_directory_quotes_args(): install=[], spatialite=False, version_note="$PWD", + secret="secret", ) as temp_docker: df = os.path.join(temp_docker, "Dockerfile") df_contents = open(df).read() assert "'$PWD'" in df_contents assert "'--$HOME'" in df_contents + assert "ENV DATASETTE_SECRET 'secret'" in df_contents def test_compound_keys_after_sql(): From fcc7cd6379ab62b5c2440d26935659a797133030 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 09:04:32 -0700 Subject: [PATCH 0118/1871] rST formatting --- docs/publish.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/publish.rst b/docs/publish.rst index c1024bd7..6eff74d0 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -139,7 +139,7 @@ You can now run the resulting container like so:: This exposes port 8001 inside the container as port 8081 on your host machine, so you can access the application at ``http://localhost:8081/`` -You can customize the port that is exposed by the countainer using the ``--port`` option: +You can customize the port that is exposed by the countainer using the ``--port`` option:: datasette package mydatabase.db --port 8080 From 09bf3c63225babe8e28cde880ca4399ca7dbd78b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 09:14:30 -0700 Subject: [PATCH 0119/1871] Documentation for publish --secret, refs #787 --- docs/config.rst | 13 +++++++++++++ docs/publish.rst | 2 ++ 2 files changed, 15 insertions(+) diff --git a/docs/config.rst b/docs/config.rst index ab14ea7b..bbbea822 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -306,3 +306,16 @@ One way to generate a secure random secret is to use Python like this:: cdb19e94283a20f9d42cca50c5a4871c0aa07392db308755d60a1a5b9bb0fa52 Plugin authors make use of this signing mechanism in their plugins using :ref:`datasette_sign` and :ref:`datasette_unsign`. + +.. _config_publish_secrets: + +Using secrets with datasette publish +------------------------------------ + +The :ref:`cli_publish` and :ref:`cli_package` commands both generate a secret for you automatically when Datasette is deployed. + +This means that every time you deploy a new version of a Datasette project, a new secret will be generated. This will cause signed cookies to become inalid on every fresh deploy. + +You can fix this by creating a secret that will be used for multiple deploys and passing it using the ``--secret`` option:: + + datasette publish cloudrun mydb.db --service=my-service --secret=cdb19e94283a20f9d42cca5 diff --git a/docs/publish.rst b/docs/publish.rst index 6eff74d0..ebaf826a 100644 --- a/docs/publish.rst +++ b/docs/publish.rst @@ -100,6 +100,8 @@ If a plugin has any :ref:`plugins_configuration_secret` you can use the ``--plug --plugin-secret datasette-auth-github client_id your_client_id \ --plugin-secret datasette-auth-github client_secret your_client_secret +.. _cli_package: + datasette package ================= From 29c5ff493ad7918b8fc44ea7920b41530e56dd5d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 15:14:51 -0700 Subject: [PATCH 0120/1871] view-instance permission for debug URLs, closes #833 --- datasette/views/special.py | 8 ++++++-- tests/test_permissions.py | 30 ++++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 2 deletions(-) diff --git a/datasette/views/special.py b/datasette/views/special.py index dc6a25dc..6fcb6b5e 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -14,6 +14,7 @@ class JsonDataView(BaseView): self.needs_request = needs_request async def get(self, request, as_format): + await self.check_permission(request, "view-instance") if self.needs_request: data = self.data_callback(request) else: @@ -46,6 +47,7 @@ class PatternPortfolioView(BaseView): self.ds = datasette async def get(self, request): + await self.check_permission(request, "view-instance") return await self.render(["patterns.html"], request=request) @@ -77,8 +79,8 @@ class PermissionsDebugView(BaseView): self.ds = datasette async def get(self, request): - if not await self.ds.permission_allowed(request.actor, "permissions-debug"): - return Response("Permission denied", status=403) + await self.check_permission(request, "view-instance") + await self.check_permission(request, "permissions-debug") return await self.render( ["permissions_debug.html"], request, @@ -93,9 +95,11 @@ class MessagesDebugView(BaseView): self.ds = datasette async def get(self, request): + await self.check_permission(request, "view-instance") return await self.render(["messages_debug.html"], request) async def post(self, request): + await self.check_permission(request, "view-instance") post = await request.post_vars() message = post.get("message", "") message_type = post.get("message_type") or "INFO" diff --git a/tests/test_permissions.py b/tests/test_permissions.py index 1be9529a..fcc1b5ed 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -316,3 +316,33 @@ def test_permissions_debug(app_client): def test_allow_unauthenticated(allow, expected): with make_app_client(metadata={"allow": allow}) as client: assert expected == client.get("/").status + + +@pytest.fixture(scope="session") +def view_instance_client(): + with make_app_client(metadata={"allow": {}}) as client: + yield client + + +@pytest.mark.parametrize( + "path", + [ + "/", + "/fixtures", + "/fixtures/facetable", + "/-/metadata", + "/-/versions", + "/-/plugins", + "/-/config", + "/-/threads", + "/-/databases", + "/-/actor", + "/-/permissions", + "/-/messages", + "/-/patterns", + ], +) +def test_view_instance(path, view_instance_client): + assert 403 == view_instance_client.get(path).status + if path not in ("/-/permissions", "/-/messages", "/-/patterns"): + assert 403 == view_instance_client.get(path + ".json").status From f39f11133126158e28780dee91bb9c7719ef5875 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 15:47:19 -0700 Subject: [PATCH 0121/1871] Fixed actor_matches_allow bug, closes #836 --- datasette/utils/__init__.py | 2 +- tests/test_utils.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 5090f67e..69cfa400 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -884,7 +884,7 @@ def actor_matches_allow(actor, allow): values = [values] actor_values = actor.get(key) if actor_values is None: - return False + continue if not isinstance(actor_values, list): actor_values = [actor_values] actor_values = set(actor_values) diff --git a/tests/test_utils.py b/tests/test_utils.py index d613e999..da1d298b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -497,6 +497,8 @@ def test_multi_params(data, should_raise): ({"id": "garry", "roles": ["staff", "dev"]}, {"roles": ["dev", "otter"]}, True), ({"id": "garry", "roles": []}, {"roles": ["staff"]}, False), ({"id": "garry"}, {"roles": ["staff"]}, False), + # Any single matching key works: + ({"id": "root"}, {"bot_id": "my-bot", "id": ["root"]}, True), ], ) def test_actor_matches_allow(actor, allow, expected): From fba8ff6e76253af2b03749ed8dd6e28985a7fb8f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 17:21:48 -0700 Subject: [PATCH 0122/1871] "$env": "X" mechanism now works with nested lists, closes #837 --- datasette/app.py | 14 ++------------ datasette/utils/__init__.py | 16 ++++++++++++++++ docs/changelog.rst | 2 ++ tests/fixtures.py | 1 + tests/test_plugins.py | 13 +++++++++++++ tests/test_utils.py | 14 ++++++++++++++ 6 files changed, 48 insertions(+), 12 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 71fa9afb..ebab3bee 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -45,6 +45,7 @@ from .utils import ( format_bytes, module_from_path, parse_metadata, + resolve_env_secrets, sqlite3, to_css_class, ) @@ -367,18 +368,7 @@ class Datasette: return None plugin_config = plugins.get(plugin_name) # Resolve any $file and $env keys - if isinstance(plugin_config, dict): - # Create a copy so we don't mutate the version visible at /-/metadata.json - plugin_config_copy = dict(plugin_config) - for key, value in plugin_config_copy.items(): - if isinstance(value, dict): - if list(value.keys()) == ["$env"]: - plugin_config_copy[key] = os.environ.get( - list(value.values())[0] - ) - elif list(value.keys()) == ["$file"]: - plugin_config_copy[key] = open(list(value.values())[0]).read() - return plugin_config_copy + plugin_config = resolve_env_secrets(plugin_config, os.environ) return plugin_config def app_css_hash(self): diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 69cfa400..ae7bbdb5 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -904,3 +904,19 @@ async def check_visibility(datasette, actor, action, resource, default=True): None, action, resource=resource, default=default, ) return visible, private + + +def resolve_env_secrets(config, environ): + 'Create copy that recursively replaces {"$env": "NAME"} with values from environ' + if isinstance(config, dict): + if list(config.keys()) == ["$env"]: + return environ.get(list(config.values())[0]) + else: + return { + key: resolve_env_secrets(value, environ) + for key, value in config.items() + } + elif isinstance(config, list): + return [resolve_env_secrets(value, environ) for value in config] + else: + return config diff --git a/docs/changelog.rst b/docs/changelog.rst index 911fb1b6..3a01d05e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -94,6 +94,8 @@ Both flash messages and user authentication needed a way to sign values and set Datasette will generate a secret automatically when it starts up, but to avoid resetting the secret (and hence invalidating any cookies) every time the server restarts you should set your own secret. You can pass a secret to Datasette using the new ``--secret`` option or with a ``DATASETTE_SECRET`` environment variable. See :ref:`config_secret` for more details. +You can also set a secret when you deploy Datasette using ``datasette publish`` or ``datasette package`` - see :ref:`config_publish_secrets`. + Plugins can now sign value and verify their signatures using the :ref:`datasette.sign() ` and :ref:`datasette.unsign() ` methods. CSRF protection diff --git a/tests/fixtures.py b/tests/fixtures.py index a846999b..907bf895 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -408,6 +408,7 @@ METADATA = { "plugins": { "name-of-plugin": {"depth": "root"}, "env-plugin": {"foo": {"$env": "FOO_ENV"}}, + "env-plugin-list": [{"in_a_list": {"$env": "FOO_ENV"}}], "file-plugin": {"foo": {"$file": TEMP_PLUGIN_SECRET_FILE}}, }, "databases": { diff --git a/tests/test_plugins.py b/tests/test_plugins.py index c7bb4859..0fae3740 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -173,6 +173,19 @@ def test_plugin_config_env(app_client): del os.environ["FOO_ENV"] +def test_plugin_config_env_from_list(app_client): + os.environ["FOO_ENV"] = "FROM_ENVIRONMENT" + assert [{"in_a_list": "FROM_ENVIRONMENT"}] == app_client.ds.plugin_config( + "env-plugin-list" + ) + # Ensure secrets aren't visible in /-/metadata.json + metadata = app_client.get("/-/metadata.json") + assert [{"in_a_list": {"$env": "FOO_ENV"}}] == metadata.json["plugins"][ + "env-plugin-list" + ] + del os.environ["FOO_ENV"] + + def test_plugin_config_file(app_client): open(TEMP_PLUGIN_SECRET_FILE, "w").write("FROM_FILE") assert {"foo": "FROM_FILE"} == app_client.ds.plugin_config("file-plugin") diff --git a/tests/test_utils.py b/tests/test_utils.py index da1d298b..80c6f223 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -503,3 +503,17 @@ def test_multi_params(data, should_raise): ) def test_actor_matches_allow(actor, allow, expected): assert expected == utils.actor_matches_allow(actor, allow) + + +@pytest.mark.parametrize( + "config,expected", + [ + ({"foo": "bar"}, {"foo": "bar"}), + ({"$env": "FOO"}, "x"), + ({"k": {"$env": "FOO"}}, {"k": "x"}), + ([{"k": {"$env": "FOO"}}, {"z": {"$env": "FOO"}}], [{"k": "x"}, {"z": "x"}]), + ({"k": [{"in_a_list": {"$env": "FOO"}}]}, {"k": [{"in_a_list": "x"}]}), + ], +) +def test_resolve_env_secrets(config, expected): + assert expected == utils.resolve_env_secrets(config, {"FOO": "x"}) From 308bcc8805236b8eb5a08d8045c84f68bd0ddf0e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 17:25:12 -0700 Subject: [PATCH 0123/1871] Fixed test_permissions_debug --- datasette/views/special.py | 3 ++- tests/test_permissions.py | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/datasette/views/special.py b/datasette/views/special.py index 6fcb6b5e..6c378995 100644 --- a/datasette/views/special.py +++ b/datasette/views/special.py @@ -80,7 +80,8 @@ class PermissionsDebugView(BaseView): async def get(self, request): await self.check_permission(request, "view-instance") - await self.check_permission(request, "permissions-debug") + if not await self.ds.permission_allowed(request.actor, "permissions-debug"): + return Response("Permission denied", status=403) return await self.render( ["permissions_debug.html"], request, diff --git a/tests/test_permissions.py b/tests/test_permissions.py index fcc1b5ed..241dd2e5 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -305,7 +305,9 @@ def test_permissions_debug(app_client): ] assert [ {"action": "permissions-debug", "result": True, "used_default": False}, + {"action": "view-instance", "result": True, "used_default": True}, {"action": "permissions-debug", "result": False, "used_default": True}, + {"action": "view-instance", "result": True, "used_default": True}, ] == checks From 1d2e8e09a00a4b695317627483f352464ea8a105 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 17:33:16 -0700 Subject: [PATCH 0124/1871] Some last touches to the 0.44 release notes, refs #806 --- docs/changelog.rst | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 3a01d05e..aca8f8c2 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -107,6 +107,13 @@ Since writable canned queries are built using POST forms, Datasette now ships wi +Cookie methods +~~~~~~~~~~~~~~ + +Plugins can now use the new :ref:`response.set_cookie() ` method to set cookies. + +A new ``request.cookies`` method on the :ref:internals_request` can be used to read incoming cookies. + register_routes() plugin hooks ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -134,6 +141,9 @@ Smaller changes - Fixed broken CSS on nested 404 pages. (`#777 `__) - New ``request.url_vars`` property. (`#822 `__) - Fixed a bug with the ``python tests/fixtures.py`` command for outputting Datasette's testing fixtures database and plugins. (`#804 `__) +- ``datasette publish heroku`` now deploys using Python 3.8.3. +- Added a warning that the :ref:`plugin_register_facet_classes` hook is unstable and may change in the future. (`#830 `__) +- The ``{"$env": "ENVIRONMENT_VARIBALE"}`` mechanism (see :ref:`plugins_configuration_secret`) now works with variables inside nested lists. (`#837 `__) The road to Datasette 1.0 ~~~~~~~~~~~~~~~~~~~~~~~~~ From 793a52b31771280a6c8660efb9e48b9b763477ff Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 17:43:51 -0700 Subject: [PATCH 0125/1871] Link to datasett-auth-tokens and datasette-permissions-sql in docs, refs #806 --- docs/authentication.rst | 4 ++-- docs/changelog.rst | 4 ++-- docs/ecosystem.rst | 10 ++++++++++ docs/internals.rst | 2 +- docs/plugins.rst | 17 ++++++++++------- 5 files changed, 25 insertions(+), 12 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 6a526f34..2a6fa9bc 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -19,7 +19,7 @@ Every request to Datasette has an associated actor value, available in the code The actor dictionary can be any shape - the design of that data structure is left up to the plugins. A useful convention is to include an ``"id"`` string, as demonstrated by the "root" actor below. -Plugins can use the :ref:`plugin_actor_from_request` hook to implement custom logic for authenticating an actor based on the incoming HTTP request. +Plugins can use the :ref:`plugin_hook_actor_from_request` hook to implement custom logic for authenticating an actor based on the incoming HTTP request. .. _authentication_root: @@ -314,7 +314,7 @@ Checking permissions in plugins Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. -Datasette core performs a number of permission checks, :ref:`documented below `. Plugins can implement the :ref:`plugin_permission_allowed` plugin hook to participate in decisions about whether an actor should be able to perform a specified action. +Datasette core performs a number of permission checks, :ref:`documented below `. Plugins can implement the :ref:`plugin_hook_permission_allowed` plugin hook to participate in decisions about whether an actor should be able to perform a specified action. .. _authentication_actor_matches_allow: diff --git a/docs/changelog.rst b/docs/changelog.rst index aca8f8c2..3a7f9562 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -27,7 +27,7 @@ You'll need to install plugins if you want full user accounts, but default Datas INFO: Application startup complete. INFO: Uvicorn running on http://127.0.0.1:8001 (Press CTRL+C to quit) -Plugins can implement new ways of authenticating users using the new :ref:`plugin_actor_from_request` hook. +Plugins can implement new ways of authenticating users using the new :ref:`plugin_hook_actor_from_request` hook. Permissions ~~~~~~~~~~~ @@ -52,7 +52,7 @@ You can use the new ``"allow"`` block syntax in ``metadata.json`` (or ``metadata See :ref:`authentication_permissions_allow` for more details. -Plugins can implement their own custom permission checks using the new :ref:`plugin_permission_allowed` hook. +Plugins can implement their own custom permission checks using the new :ref:`plugin_hook_permission_allowed` hook. A new debug page at ``/-/permissions`` shows recent permission checks, to help administrators and plugin authors understand exactly what checks are being performed. This tool defaults to only being available to the root user, but can be exposed to other users by plugins that respond to the ``permissions-debug`` permission. (`#788 `__) diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst index 4777cc16..dcb5a887 100644 --- a/docs/ecosystem.rst +++ b/docs/ecosystem.rst @@ -87,6 +87,16 @@ datasette-auth-github `datasette-auth-github `__ adds an authentication layer to Datasette. Users will have to sign in using their GitHub account before they can view data or interact with Datasette. You can also use it to restrict access to specific GitHub users, or to members of specified GitHub `organizations `__ or `teams `__. +datasette-auth-tokens +--------------------- + +`datasette-auth-tokens `__ provides a mechanism for creating secret API tokens that can then be used with Datasette's :ref:`authentication` system. + +datasette-permissions-sql +--------------------- + +`datasette-permissions-sql `__ lets you configure Datasette permissions checks to use custom SQL queries, which means you can make permisison decisions based on data contained within your databases. + datasette-upload-csvs --------------------- diff --git a/docs/internals.rst b/docs/internals.rst index d75544e1..ab9da410 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -219,7 +219,7 @@ await .permission_allowed(actor, action, resource=None, default=False) Check if the given actor has :ref:`permission ` to perform the given action on the given resource. -Some permission checks are carried out against :ref:`rules defined in metadata.json `, while other custom permissions may be decided by plugins that implement the :ref:`plugin_permission_allowed` plugin hook. +Some permission checks are carried out against :ref:`rules defined in metadata.json `, while other custom permissions may be decided by plugins that implement the :ref:`plugin_hook_permission_allowed` plugin hook. If neither ``metadata.json`` nor any of the plugins provide an answer to the permission query the ``default`` argument will be returned. diff --git a/docs/plugins.rst b/docs/plugins.rst index 989cf672..608f93da 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -25,9 +25,8 @@ Things you can do with plugins include: * Customize how database values are rendered in the Datasette interface, for example `datasette-render-binary `__ and `datasette-pretty-json `__. -* Wrap the entire Datasette application in custom ASGI middleware to add new pages - or implement authentication, for example - `datasette-auth-github `__. +* Customize how Datasette's authentication and permissions systems work, for example `datasette-auth-tokens `__ and + `datasette-permissions-sql `__. .. _plugins_installing: @@ -996,7 +995,7 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att Examples: `datasette-auth-github `_, `datasette-search-all `_, `datasette-media `_ -.. _plugin_actor_from_request: +.. _plugin_hook_actor_from_request: actor_from_request(datasette, request) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1055,7 +1054,9 @@ Instead of returning a dictionary, this function can return an awaitable functio return inner -.. _plugin_permission_allowed: +Example: `datasette-auth-tokens `_ + +.. _plugin_hook_permission_allowed: permission_allowed(datasette, actor, action, resource) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1064,7 +1065,7 @@ permission_allowed(datasette, actor, action, resource) You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. ``actor`` - dictionary - The current actor, as decided by :ref:`plugin_actor_from_request`. + The current actor, as decided by :ref:`plugin_hook_actor_from_request`. ``action`` - string The action to be performed, e.g. ``"edit-table"``. @@ -1110,4 +1111,6 @@ Here's an example that allows users to view the ``admin_log`` table only if thei return inner -See :ref:`permissions` for a full list of permissions that are included in Datasette core. +See :ref:`built-in permissions ` for a full list of permissions that are included in Datasette core. + +Example: `datasette-permissions-sql `_ From 9ae0d483ead93c0832142e5dc85959ae3c8f73ea Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 17:48:20 -0700 Subject: [PATCH 0126/1871] Get "$file": "../path" mechanism working again, closes #839 --- datasette/utils/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index ae7bbdb5..14060669 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -911,6 +911,8 @@ def resolve_env_secrets(config, environ): if isinstance(config, dict): if list(config.keys()) == ["$env"]: return environ.get(list(config.values())[0]) + elif list(config.keys()) == ["$file"]: + return open(list(config.values())[0]).read() else: return { key: resolve_env_secrets(value, environ) From b906030235efbdff536405d66078f4868ce0d3bd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Jun 2020 18:19:30 -0700 Subject: [PATCH 0127/1871] Release Datasette 0.44 Refs #395, #519, #576, #699, #706, #774, #777, #781, #784, #788, #790, #797, #798, #800, #802, #804, #819, #822, #825, #826, #827, #828, #829, #830, #833, #836, #837, #839 Closes #806. --- README.md | 1 + docs/changelog.rst | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 90df75de..925d68d2 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover ## News + * 11th June 2020: [Datasette 0.44](http://datasette.readthedocs.io/en/latest/changelog.html#v0-44) - [Authentication and permissions](https://datasette.readthedocs.io/en/latest/authentication.html), [writable canned queries](https://datasette.readthedocs.io/en/latest/sql_queries.html#writable-canned-queries), flash messages, new plugin hooks and much, much more. * 28th May 2020: [Datasette 0.43](http://datasette.readthedocs.io/en/latest/changelog.html#v0-43) - Redesigned [register_output_renderer](https://datasette.readthedocs.io/en/latest/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes. * 8th May 2020: [Datasette 0.42](http://datasette.readthedocs.io/en/latest/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database. * 6th May 2020: [Datasette 0.41](http://datasette.readthedocs.io/en/latest/changelog.html#v0-41) - New mechanism for [creating custom pages](https://datasette.readthedocs.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://datasette.readthedocs.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements. diff --git a/docs/changelog.rst b/docs/changelog.rst index 3a7f9562..b1e95bb7 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,7 +6,7 @@ Changelog .. _v0_44: -0.44 (2020-06-??) +0.44 (2020-06-11) ----------------- Authentication and permissions, writable canned queries, flash messages, new plugin hooks and more. From 09a3479a5402df96489ed6cab6cc9fd674bf3433 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 10:55:41 -0700 Subject: [PATCH 0128/1871] New "startup" plugin hook, closes #834 --- datasette/app.py | 7 +++++++ datasette/cli.py | 3 +++ datasette/hookspecs.py | 5 +++++ docs/plugins.rst | 33 +++++++++++++++++++++++++++++++++ tests/fixtures.py | 1 + tests/plugins/my_plugin.py | 5 +++++ tests/test_cli.py | 1 + tests/test_plugins.py | 6 ++++++ 8 files changed, 61 insertions(+) diff --git a/datasette/app.py b/datasette/app.py index ebab3bee..ca2efa91 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -302,6 +302,13 @@ class Datasette: self._permission_checks = collections.deque(maxlen=200) self._root_token = secrets.token_hex(32) + async def invoke_startup(self): + for hook in pm.hook.startup(datasette=self): + if callable(hook): + hook = hook() + if asyncio.iscoroutine(hook): + hook = await hook + def sign(self, value, namespace="default"): return URLSafeSerializer(self._secret, namespace).dumps(value) diff --git a/datasette/cli.py b/datasette/cli.py index ff9a2d5c..bba72484 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -397,6 +397,9 @@ def serve( # Private utility mechanism for writing unit tests return ds + # Run the "startup" plugin hooks + asyncio.get_event_loop().run_until_complete(ds.invoke_startup()) + # Run async sanity checks - but only if we're not under pytest asyncio.get_event_loop().run_until_complete(check_databases(ds)) diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index ab3e131c..9fceee41 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -5,6 +5,11 @@ hookspec = HookspecMarker("datasette") hookimpl = HookimplMarker("datasette") +@hookspec +def startup(datasette): + "Fires directly after Datasette first starts running" + + @hookspec def asgi_wrapper(datasette): "Returns an ASGI middleware callable to wrap our ASGI application with" diff --git a/docs/plugins.rst b/docs/plugins.rst index 608f93da..289be649 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -995,6 +995,39 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att Examples: `datasette-auth-github `_, `datasette-search-all `_, `datasette-media `_ +.. _plugin_hook_startup: + +startup(datasette) +~~~~~~~~~~~~~~~~~~ + +This hook fires when the Datasette application server first starts up. You can implement a regular function, for example to validate required plugin configuration: + +.. code-block:: python + + @hookimpl + def startup(datasette): + config = datasette.plugin_config("my-plugin") or {} + assert "required-setting" in config, "my-plugin requires setting required-setting" + +Or you can return an async function which will be awaited on startup. Use this option if you need to make any database queries: + + @hookimpl + def startup(datasette): + async def inner(): + db = datasette.get_database() + if "my_table" not in await db.table_names(): + await db.execute_write(""" + create table my_table (mycol text) + """, block=True) + return inner + + +Potential use-cases: + +* Run some initialization code for the plugin +* Create database tables that a plugin needs +* Validate the metadata configuration for a plugin on startup, and raise an error if it is invalid + .. _plugin_hook_actor_from_request: actor_from_request(datasette, request) diff --git a/tests/fixtures.py b/tests/fixtures.py index 907bf895..09819575 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -49,6 +49,7 @@ EXPECTED_PLUGINS = [ "register_facet_classes", "register_routes", "render_cell", + "startup", ], }, { diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index a0f7441b..3f019a84 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -167,3 +167,8 @@ def register_routes(): (r"/two/(?P.*)$", two), (r"/three/$", three), ] + + +@hookimpl +def startup(datasette): + datasette._startup_hook_fired = True diff --git a/tests/test_cli.py b/tests/test_cli.py index 6939fe57..90aa990d 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -10,6 +10,7 @@ from click.testing import CliRunner import io import json import pathlib +import pytest import textwrap diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 0fae3740..c0a7438f 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -572,3 +572,9 @@ def test_register_routes_asgi(app_client): response = app_client.get("/three/") assert {"hello": "world"} == response.json assert "1" == response.headers["x-three"] + + +@pytest.mark.asyncio +async def test_startup(app_client): + await app_client.ds.invoke_startup() + assert app_client.ds._startup_hook_fired From 72ae975156a09619a808cdd03fddddcf62e6f533 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 10:58:32 -0700 Subject: [PATCH 0129/1871] Added test for async startup hook, refs #834 --- tests/plugins/my_plugin_2.py | 8 ++++++++ tests/test_plugins.py | 1 + 2 files changed, 9 insertions(+) diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index 039112f4..bdfaea8d 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -120,3 +120,11 @@ def permission_allowed(datasette, actor, action): return False return inner + + +@hookimpl +def startup(datasette): + async def inner(): + result = await datasette.get_database().execute("select 1 + 1") + datasette._startup_hook_calculation = result.first()[0] + return inner diff --git a/tests/test_plugins.py b/tests/test_plugins.py index c0a7438f..bc759385 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -578,3 +578,4 @@ def test_register_routes_asgi(app_client): async def test_startup(app_client): await app_client.ds.invoke_startup() assert app_client.ds._startup_hook_fired + assert 2 == app_client.ds._startup_hook_calculation From ae99af25361c9248c721153922c623bd5f440159 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 10:59:35 -0700 Subject: [PATCH 0130/1871] Fixed rST code formatting, refs #834 --- docs/plugins.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index 289be649..8add7352 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -1011,6 +1011,8 @@ This hook fires when the Datasette application server first starts up. You can i Or you can return an async function which will be awaited on startup. Use this option if you need to make any database queries: +.. code-block:: python + @hookimpl def startup(datasette): async def inner(): @@ -1021,7 +1023,6 @@ Or you can return an async function which will be awaited on startup. Use this o """, block=True) return inner - Potential use-cases: * Run some initialization code for the plugin From d60bd6ad13ef908d7e66a677caee20536f3fb277 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 11:15:33 -0700 Subject: [PATCH 0131/1871] Update plugin tests, refs #834 --- tests/fixtures.py | 1 + tests/plugins/my_plugin_2.py | 1 + 2 files changed, 2 insertions(+) diff --git a/tests/fixtures.py b/tests/fixtures.py index 09819575..e2f90f09 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -64,6 +64,7 @@ EXPECTED_PLUGINS = [ "extra_template_vars", "permission_allowed", "render_cell", + "startup", ], }, { diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index bdfaea8d..f4a082a0 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -127,4 +127,5 @@ def startup(datasette): async def inner(): result = await datasette.get_database().execute("select 1 + 1") datasette._startup_hook_calculation = result.first()[0] + return inner From 0e49842e227a0f1f69d48108c87d17fe0379e548 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 11:29:14 -0700 Subject: [PATCH 0132/1871] datasette/actor_auth_cookie.py coverae to 100%, refs #841 --- tests/test_auth.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/test_auth.py b/tests/test_auth.py index 5e847445..bb4bee4b 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -26,6 +26,17 @@ def test_actor_cookie(app_client): assert {"id": "test"} == app_client.ds._last_request.scope["actor"] +def test_actor_cookie_invalid(app_client): + cookie = app_client.actor_cookie({"id": "test"}) + # Break the signature + response = app_client.get("/", cookies={"ds_actor": cookie[:-1] + "."}) + assert None == app_client.ds._last_request.scope["actor"] + # Break the cookie format + cookie = app_client.ds.sign({"b": {"id": "test"}}, "actor") + response = app_client.get("/", cookies={"ds_actor": cookie}) + assert None == app_client.ds._last_request.scope["actor"] + + @pytest.mark.parametrize( "offset,expected", [((24 * 60 * 60), {"id": "test"}), (-(24 * 60 * 60), None),] ) From 80c18a18fc444b89cc12b73599d56e091f3a3c87 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 13:48:23 -0700 Subject: [PATCH 0133/1871] Configure code coverage, refs #841, #843 --- .coveragerc | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .coveragerc diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000..6ca0fac8 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,2 @@ +[run] +omit = datasette/_version.py, datasette/utils/shutil_backport.py From cf7a2bdb404734910ec07abc7571351a2d934828 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 14:36:49 -0700 Subject: [PATCH 0134/1871] Action to run tests and upload coverage to codecov.io Closes #843. --- .github/workflows/test-coverage.yml | 41 +++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 .github/workflows/test-coverage.yml diff --git a/.github/workflows/test-coverage.yml b/.github/workflows/test-coverage.yml new file mode 100644 index 00000000..99c0526a --- /dev/null +++ b/.github/workflows/test-coverage.yml @@ -0,0 +1,41 @@ +name: Calculate test coverage + +on: + push: + branches: + - master + pull_request: + branches: + - master +jobs: + test: + runs-on: ubuntu-latest + steps: + - name: Check out datasette + uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v1 + with: + python-version: 3.8 + - uses: actions/cache@v2 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Install Python dependencies + run: | + python -m pip install -e .[test] + python -m pip install pytest-cov + - name: Run tests + run: |- + ls -lah + cat .coveragerc + pytest --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term + ls -lah + - name: Upload coverage report + uses: codecov/codecov-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} + file: coverage.xml From 0c27f10f9d2124f0f534c25612b58be20441c9d8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 16:41:26 -0700 Subject: [PATCH 0135/1871] Updated plugin examples to include datasette-psutil --- docs/plugins.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index 8add7352..113e6b24 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -884,6 +884,8 @@ The optional view function arguments are as follows: The function can either return a :ref:`internals_response` or it can return nothing and instead respond directly to the request using the ASGI ``send`` function (for advanced uses only). +Examples: `datasette-auth-github `__, `datasette-psutil `__ + .. _plugin_register_facet_classes: register_facet_classes() @@ -993,7 +995,7 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att return add_x_databases_header return wrap_with_databases_header -Examples: `datasette-auth-github `_, `datasette-search-all `_, `datasette-media `_ +Examples: `datasette-search-all `_, `datasette-media `_ .. _plugin_hook_startup: From a4ad5a504c161bc3b1caaa40b22e46d600f7d4fc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Jun 2020 17:26:02 -0700 Subject: [PATCH 0136/1871] Workaround for 'Too many open files' in test runs, refs #846 --- tests/fixtures.py | 3 +++ tests/test_api.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index e2f90f09..a4a96919 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -268,6 +268,9 @@ def make_app_client( "default_page_size": 50, "max_returned_rows": max_returned_rows or 100, "sql_time_limit_ms": sql_time_limit_ms or 200, + # Default is 3 but this results in "too many open files" + # errors when running the full test suite: + "num_sql_threads": 1, } ) ds = Datasette( diff --git a/tests/test_api.py b/tests/test_api.py index 1a54edec..322a0001 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1320,7 +1320,7 @@ def test_config_json(app_client): "suggest_facets": True, "default_cache_ttl": 5, "default_cache_ttl_hashed": 365 * 24 * 60 * 60, - "num_sql_threads": 3, + "num_sql_threads": 1, "cache_size_kb": 0, "allow_csv_stream": True, "max_csv_mb": 100, From d2aef9f7ef30fa20b1450cd181cf803f44fb4e21 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 09:21:15 -0700 Subject: [PATCH 0137/1871] Test illustrating POST against register_routes(), closes #853 --- tests/plugins/my_plugin.py | 7 +++++++ tests/test_plugins.py | 7 +++++++ 2 files changed, 14 insertions(+) diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 3f019a84..72736e84 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -162,10 +162,17 @@ def register_routes(): send, {"hello": "world"}, status=200, headers={"x-three": "1"} ) + async def post(request): + if request.method == "GET": + return Response.html(request.scope["csrftoken"]()) + else: + return Response.json(await request.post_vars()) + return [ (r"/one/$", one), (r"/two/(?P.*)$", two), (r"/three/$", three), + (r"/post/$", post), ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index bc759385..e3a234f2 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -568,6 +568,13 @@ def test_register_routes(app_client, path, body): assert body == response.text +def test_register_routes_post(app_client): + response = app_client.post("/post/", {"this is": "post data"}, csrftoken_from=True) + assert 200 == response.status + assert "csrftoken" in response.json + assert "post data" == response.json["this is"] + + def test_register_routes_asgi(app_client): response = app_client.get("/three/") assert {"hello": "world"} == response.json From 6151c25a5a8d566c109af296244b9267c536bd9a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 11:37:28 -0700 Subject: [PATCH 0138/1871] Respect existing scope["actor"] if set, closes #854 --- datasette/app.py | 3 ++- tests/fixtures.py | 1 + tests/plugins/my_plugin.py | 14 ++++++++++++++ tests/test_plugins.py | 5 +++++ 4 files changed, 22 insertions(+), 1 deletion(-) diff --git a/datasette/app.py b/datasette/app.py index ca2efa91..c684eabc 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -908,6 +908,7 @@ class DatasetteRouter(AsgiRouter): ): scope_modifications["scheme"] = "https" # Handle authentication + default_actor = scope.get("actor") or None actor = None for actor in pm.hook.actor_from_request( datasette=self.ds, request=Request(scope, receive) @@ -918,7 +919,7 @@ class DatasetteRouter(AsgiRouter): actor = await actor if actor: break - scope_modifications["actor"] = actor + scope_modifications["actor"] = actor or default_actor return await super().route_path( dict(scope, **scope_modifications), receive, send, path ) diff --git a/tests/fixtures.py b/tests/fixtures.py index a4a96919..612bee99 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -39,6 +39,7 @@ EXPECTED_PLUGINS = [ "version": None, "hooks": [ "actor_from_request", + "asgi_wrapper", "extra_body_script", "extra_css_urls", "extra_js_urls", diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index 72736e84..a86e3cbf 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -137,6 +137,20 @@ def actor_from_request(datasette, request): return None +@hookimpl +def asgi_wrapper(): + def wrap(app): + async def maybe_set_actor_in_scope(scope, recieve, send): + if b"_actor_in_scope" in scope["query_string"]: + scope = dict(scope, actor={"id": "from-scope"}) + print(scope) + await app(scope, recieve, send) + + return maybe_set_actor_in_scope + + return wrap + + @hookimpl def permission_allowed(actor, action): if action == "this_is_allowed": diff --git a/tests/test_plugins.py b/tests/test_plugins.py index e3a234f2..245c60f7 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -534,6 +534,11 @@ def test_actor_from_request_async(app_client): assert {"id": "bot2", "1+1": 2} == app_client.ds._last_request.scope["actor"] +def test_existing_scope_actor_respected(app_client): + app_client.get("/?_actor_in_scope=1") + assert {"id": "from-scope"} == app_client.ds._last_request.scope["actor"] + + @pytest.mark.asyncio @pytest.mark.parametrize( "action,expected", From 13216cb6bd715b3068b917bdeb1f1f24d159c34c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 13:40:33 -0700 Subject: [PATCH 0139/1871] Don't push alpha/beta tagged releases to Docker Hub Refs #807 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 5e328d7a..5aafe398 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,7 +32,7 @@ jobs: branch: master tags: true - stage: publish docker image - if: tag IS present + if: (tag IS present) AND NOT (tag =~ [ab]) python: 3.6 script: # Build and release to Docker Hub From c81f637d862a6b13ac4b07cef5a493b62e079c81 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 13:49:52 -0700 Subject: [PATCH 0140/1871] Documentation for alpha/beta release process, refs #807 --- docs/contributing.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index ba52839c..75c1c3b2 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -147,6 +147,8 @@ We increment ``minor`` for new features. We increment ``patch`` for bugfix releass. +:ref:`contributing_release_alpha_beta` may have an additional ``a0`` or ``b0`` prefix - the integer component will be incremented with each subsequent alpha or beta. + To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__:: # Update changelog @@ -180,3 +182,14 @@ Final steps once the release has deployed to https://pypi.org/project/datasette/ * Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases - you can convert the release notes to Markdown by copying and pasting the rendered HTML into this tool: https://euangoddard.github.io/clipboard2markdown/ * Manually kick off a build of the `stable` branch on Read The Docs: https://readthedocs.org/projects/datasette/builds/ + +.. _contributing_release_alpha_beta: + +Alpha and beta releases +----------------------- + +Alpha and beta releases are published to preview upcoming features that may not yet be stable - in particular to preview new plugin hooks. + +You are welcome to try these out, but please be aware that details may change before the final release. + +Please join `discussions on the issue tracker `__ to share your thoughts and experiences with on alpha and beta features that you try out. From dda932d818b34ccab11730a76554f0a3748d8348 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 13:58:09 -0700 Subject: [PATCH 0141/1871] Release notes for 0.45a0 Refs #834 #846 #854 #807 --- docs/changelog.rst | 12 ++++++++++++ docs/contributing.rst | 4 ++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b1e95bb7..705ba4d4 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,18 @@ Changelog ========= +.. _v0_45 alpha: + +0.45a0 (2020-06-18) +------------------- + +.. warning:: This is an **alpha** release. See :ref:`contributing_alpha_beta`. + +- New :ref:`plugin_hook_startup` plugin hook. (`#834 `__) +- Workaround for "Too many open files" error in test runs. (`#846 `__) +- Respect existing ``scope["actor"]`` if already set by ASGI middleware. (`#854 `__) +- New process for shipping :ref:`contributing_alpha_beta`. (`#807 `__) + .. _v0_44: 0.44 (2020-06-11) diff --git a/docs/contributing.rst b/docs/contributing.rst index 75c1c3b2..03af7644 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -147,7 +147,7 @@ We increment ``minor`` for new features. We increment ``patch`` for bugfix releass. -:ref:`contributing_release_alpha_beta` may have an additional ``a0`` or ``b0`` prefix - the integer component will be incremented with each subsequent alpha or beta. +:ref:`contributing_alpha_beta` may have an additional ``a0`` or ``b0`` prefix - the integer component will be incremented with each subsequent alpha or beta. To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__:: @@ -183,7 +183,7 @@ Final steps once the release has deployed to https://pypi.org/project/datasette/ * Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases - you can convert the release notes to Markdown by copying and pasting the rendered HTML into this tool: https://euangoddard.github.io/clipboard2markdown/ * Manually kick off a build of the `stable` branch on Read The Docs: https://readthedocs.org/projects/datasette/builds/ -.. _contributing_release_alpha_beta: +.. _contributing_alpha_beta: Alpha and beta releases ----------------------- From d2f387591bdda3949162e1802816be6ca1bb777a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 14:01:36 -0700 Subject: [PATCH 0142/1871] Better rST label for alpha release, refs #807 --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 705ba4d4..e117663f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,7 +4,7 @@ Changelog ========= -.. _v0_45 alpha: +.. _v0_45a0: 0.45a0 (2020-06-18) ------------------- From 6c2634583627bfab750c115cb13850252821d637 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 16:22:33 -0700 Subject: [PATCH 0143/1871] New plugin hook: canned_queries(), refs #852 --- datasette/app.py | 26 +++++++---- datasette/default_permissions.py | 75 ++++++++++++++++---------------- datasette/hookspecs.py | 5 +++ datasette/views/database.py | 4 +- datasette/views/table.py | 6 ++- docs/plugins.rst | 67 ++++++++++++++++++++++++++++ tests/fixtures.py | 2 + tests/plugins/my_plugin.py | 9 ++++ tests/plugins/my_plugin_2.py | 14 ++++++ tests/test_canned_write.py | 10 ++++- tests/test_html.py | 2 + tests/test_plugins.py | 31 +++++++++++++ 12 files changed, 202 insertions(+), 49 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index c684eabc..e131ba46 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -387,18 +387,28 @@ class Datasette: ).hexdigest()[:6] return self._app_css_hash - def get_canned_queries(self, database_name): + async def get_canned_queries(self, database_name, actor): queries = self.metadata("queries", database=database_name, fallback=False) or {} - names = queries.keys() - return [self.get_canned_query(database_name, name) for name in names] + for more_queries in pm.hook.canned_queries( + datasette=self, database=database_name, actor=actor, + ): + if callable(more_queries): + more_queries = more_queries() + if asyncio.iscoroutine(more_queries): + more_queries = await more_queries + queries.update(more_queries or {}) + # Fix any {"name": "select ..."} queries to be {"name": {"sql": "select ..."}} + for key in queries: + if not isinstance(queries[key], dict): + queries[key] = {"sql": queries[key]} + # Also make sure "name" is available: + queries[key]["name"] = key + return queries - def get_canned_query(self, database_name, query_name): - queries = self.metadata("queries", database=database_name, fallback=False) or {} + async def get_canned_query(self, database_name, query_name, actor): + queries = await self.get_canned_queries(database_name, actor) query = queries.get(query_name) if query: - if not isinstance(query, dict): - query = {"sql": query} - query["name"] = query_name return query def update_with_inherited_metadata(self, metadata): diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index e750acbf..0929a17a 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -4,41 +4,42 @@ from datasette.utils import actor_matches_allow @hookimpl(tryfirst=True) def permission_allowed(datasette, actor, action, resource): - if action == "permissions-debug": - if actor and actor.get("id") == "root": - return True - elif action == "view-instance": - allow = datasette.metadata("allow") - if allow is not None: + async def inner(): + if action == "permissions-debug": + if actor and actor.get("id") == "root": + return True + elif action == "view-instance": + allow = datasette.metadata("allow") + if allow is not None: + return actor_matches_allow(actor, allow) + elif action == "view-database": + database_allow = datasette.metadata("allow", database=resource) + if database_allow is None: + return True + return actor_matches_allow(actor, database_allow) + elif action == "view-table": + database, table = resource + tables = datasette.metadata("tables", database=database) or {} + table_allow = (tables.get(table) or {}).get("allow") + if table_allow is None: + return True + return actor_matches_allow(actor, table_allow) + elif action == "view-query": + # Check if this query has a "allow" block in metadata + database, query_name = resource + query = await datasette.get_canned_query(database, query_name, actor) + assert query is not None + allow = query.get("allow") + if allow is None: + return True return actor_matches_allow(actor, allow) - elif action == "view-database": - database_allow = datasette.metadata("allow", database=resource) - if database_allow is None: - return True - return actor_matches_allow(actor, database_allow) - elif action == "view-table": - database, table = resource - tables = datasette.metadata("tables", database=database) or {} - table_allow = (tables.get(table) or {}).get("allow") - if table_allow is None: - return True - return actor_matches_allow(actor, table_allow) - elif action == "view-query": - # Check if this query has a "allow" block in metadata - database, query_name = resource - queries_metadata = datasette.metadata("queries", database=database) - assert query_name in queries_metadata - if isinstance(queries_metadata[query_name], str): - return True - allow = queries_metadata[query_name].get("allow") - if allow is None: - return True - return actor_matches_allow(actor, allow) - elif action == "execute-sql": - # Use allow_sql block from database block, or from top-level - database_allow_sql = datasette.metadata("allow_sql", database=resource) - if database_allow_sql is None: - database_allow_sql = datasette.metadata("allow_sql") - if database_allow_sql is None: - return True - return actor_matches_allow(actor, database_allow_sql) + elif action == "execute-sql": + # Use allow_sql block from database block, or from top-level + database_allow_sql = datasette.metadata("allow_sql", database=resource) + if database_allow_sql is None: + database_allow_sql = datasette.metadata("allow_sql") + if database_allow_sql is None: + return True + return actor_matches_allow(actor, database_allow_sql) + + return inner diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 9fceee41..91feb49b 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -78,3 +78,8 @@ def actor_from_request(datasette, request): @hookspec def permission_allowed(datasette, actor, action, resource): "Check if actor is allowed to perfom this action - return True, False or None" + + +@hookspec +def canned_queries(datasette, database, actor): + "Return a dictonary of canned query definitions or an awaitable function that returns them" diff --git a/datasette/views/database.py b/datasette/views/database.py index 4fab2cfb..ad28fb63 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -70,7 +70,9 @@ class DatabaseView(DataView): tables.sort(key=lambda t: (t["hidden"], t["name"])) canned_queries = [] - for query in self.ds.get_canned_queries(database): + for query in ( + await self.ds.get_canned_queries(database, request.actor) + ).values(): visible, private = await check_visibility( self.ds, request.actor, "view-query", (database, query["name"]), ) diff --git a/datasette/views/table.py b/datasette/views/table.py index 91245293..1a55a495 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -223,7 +223,9 @@ class TableView(RowTableShared): async def post(self, request, db_name, table_and_format): # Handle POST to a canned query - canned_query = self.ds.get_canned_query(db_name, table_and_format) + canned_query = await self.ds.get_canned_query( + db_name, table_and_format, request.actor + ) assert canned_query, "You may only POST to a canned query" return await QueryView(self.ds).data( request, @@ -247,7 +249,7 @@ class TableView(RowTableShared): _next=None, _size=None, ): - canned_query = self.ds.get_canned_query(database, table) + canned_query = await self.ds.get_canned_query(database, table, request.actor) if canned_query: return await QueryView(self.ds).data( request, diff --git a/docs/plugins.rst b/docs/plugins.rst index 113e6b24..8444516c 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -1031,6 +1031,73 @@ Potential use-cases: * Create database tables that a plugin needs * Validate the metadata configuration for a plugin on startup, and raise an error if it is invalid +.. _plugin_hook_canned_queries: + +canned_queries(datasette, database, actor) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries. + +``database`` - string + The name of the database. + +``actor`` - dictionary or None + The currently authenticated :ref:`authentication_actor`. + +Ues this hook to return a dictionary of additional :ref:`canned query ` definitions for the specified database. The return value should be the same shape as the JSON described in the :ref:`canned query ` documentation. + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def canned_queries(datasette, database): + if database == "mydb": + return { + "my_query": { + "sql": "select * from my_table where id > :min_id" + } + } + +The hook can alternatively return an awaitable function that returns a list. Here's an example that returns queries that have been stored in the ``saved_queries`` database table, if one exists: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def canned_queries(datasette, database): + async def inner(): + db = datasette.get_database(database) + if await db.table_exists("saved_queries"): + results = await db.execute("select name, sql from saved_queries") + return {result["name"]: { + "sql": result["sql"] + } for result in results} + return inner + +The actor parameter can be used to include the currently authenticated actor in your decision. Here's an example that returns saved queries that were saved by that actor: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def canned_queries(datasette, database, actor): + async def inner(): + db = datasette.get_database(database) + if actor is not None and await db.table_exists("saved_queries"): + results = await db.execute( + "select name, sql from saved_queries where actor_id = :id", { + "id": actor["id"] + } + ) + return {result["name"]: { + "sql": result["sql"] + } for result in results} + return inner + .. _plugin_hook_actor_from_request: actor_from_request(datasette, request) diff --git a/tests/fixtures.py b/tests/fixtures.py index 612bee99..9b28c283 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -40,6 +40,7 @@ EXPECTED_PLUGINS = [ "hooks": [ "actor_from_request", "asgi_wrapper", + "canned_queries", "extra_body_script", "extra_css_urls", "extra_js_urls", @@ -61,6 +62,7 @@ EXPECTED_PLUGINS = [ "hooks": [ "actor_from_request", "asgi_wrapper", + "canned_queries", "extra_js_urls", "extra_template_vars", "permission_allowed", diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py index a86e3cbf..7ed26908 100644 --- a/tests/plugins/my_plugin.py +++ b/tests/plugins/my_plugin.py @@ -193,3 +193,12 @@ def register_routes(): @hookimpl def startup(datasette): datasette._startup_hook_fired = True + + +@hookimpl +def canned_queries(datasette, database, actor): + return { + "from_hook": "select 1, '{}' as actor_id".format( + actor["id"] if actor else "null" + ) + } diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py index f4a082a0..556c8090 100644 --- a/tests/plugins/my_plugin_2.py +++ b/tests/plugins/my_plugin_2.py @@ -129,3 +129,17 @@ def startup(datasette): datasette._startup_hook_calculation = result.first()[0] return inner + + +@hookimpl +def canned_queries(datasette, database): + async def inner(): + return { + "from_async_hook": "select {}".format( + ( + await datasette.get_database(database).execute("select 1 + 1") + ).first()[0] + ) + } + + return inner diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index 4257806e..c36baa09 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -111,7 +111,13 @@ def test_canned_query_permissions_on_database_page(canned_write_client): query_names = [ q["name"] for q in canned_write_client.get("/data.json").json["queries"] ] - assert ["add_name", "add_name_specify_id", "update_name"] == query_names + assert [ + "add_name", + "add_name_specify_id", + "update_name", + "from_async_hook", + "from_hook", + ] == query_names # With auth shows four response = canned_write_client.get( @@ -124,6 +130,8 @@ def test_canned_query_permissions_on_database_page(canned_write_client): {"name": "add_name_specify_id", "private": False}, {"name": "delete_name", "private": True}, {"name": "update_name", "private": False}, + {"name": "from_async_hook", "private": False}, + {"name": "from_hook", "private": False}, ] == [ {"name": q["name"], "private": q["private"]} for q in response.json["queries"] ] diff --git a/tests/test_html.py b/tests/test_html.py index f9b18daa..7bc935b0 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -97,6 +97,8 @@ def test_database_page(app_client): ), ("/fixtures/pragma_cache_size", "pragma_cache_size"), ("/fixtures/neighborhood_search#fragment-goes-here", "Search neighborhoods"), + ("/fixtures/from_async_hook", "from_async_hook"), + ("/fixtures/from_hook", "from_hook"), ] == [(a["href"], a.text) for a in queries_ul.find_all("a")] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 245c60f7..4f44430e 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -591,3 +591,34 @@ async def test_startup(app_client): await app_client.ds.invoke_startup() assert app_client.ds._startup_hook_fired assert 2 == app_client.ds._startup_hook_calculation + + +def test_canned_queries(app_client): + queries = app_client.get("/fixtures.json").json["queries"] + queries_by_name = {q["name"]: q for q in queries} + assert { + "sql": "select 2", + "name": "from_async_hook", + "private": False, + } == queries_by_name["from_async_hook"] + assert { + "sql": "select 1, 'null' as actor_id", + "name": "from_hook", + "private": False, + } == queries_by_name["from_hook"] + + +def test_canned_queries_non_async(app_client): + response = app_client.get("/fixtures/from_hook.json?_shape=array") + assert [{"1": 1, "actor_id": "null"}] == response.json + + +def test_canned_queries_async(app_client): + response = app_client.get("/fixtures/from_async_hook.json?_shape=array") + assert [{"2": 2}] == response.json + + +def test_canned_queries_actor(app_client): + assert [{"1": 1, "actor_id": "bot"}] == app_client.get( + "/fixtures/from_hook.json?_bot=1&_shape=array" + ).json From 9216127ace8d80493f743a4ef4c469f83a3b81ce Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 16:39:43 -0700 Subject: [PATCH 0144/1871] Documentation tweak, refs #852 --- docs/plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index 8444516c..dce1bdf0 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -1043,7 +1043,7 @@ canned_queries(datasette, database, actor) The name of the database. ``actor`` - dictionary or None - The currently authenticated :ref:`authentication_actor`. + The currently authenticated :ref:`actor `. Ues this hook to return a dictionary of additional :ref:`canned query ` definitions for the specified database. The return value should be the same shape as the JSON described in the :ref:`canned query ` documentation. From 0807c4200f6b31c804c476eb546ead3f875a2ecc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 16:40:45 -0700 Subject: [PATCH 0145/1871] Release notes for 0.45a1, refs #852 --- docs/changelog.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index e117663f..6f3af8ce 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,14 +4,15 @@ Changelog ========= -.. _v0_45a0: +.. _v0_45a1: -0.45a0 (2020-06-18) +0.45a1 (2020-06-18) ------------------- .. warning:: This is an **alpha** release. See :ref:`contributing_alpha_beta`. - New :ref:`plugin_hook_startup` plugin hook. (`#834 `__) +- New :ref:`plugin_hook_canned_queries` plugin hook. (`#852 `__) - Workaround for "Too many open files" error in test runs. (`#846 `__) - Respect existing ``scope["actor"]`` if already set by ASGI middleware. (`#854 `__) - New process for shipping :ref:`contributing_alpha_beta`. (`#807 `__) From b59b92b1b0517cf18fa748ff9d0a0bf86298dd43 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 16:52:06 -0700 Subject: [PATCH 0146/1871] Fix for tests - order was inconsistent, refs #852 --- tests/test_canned_write.py | 20 ++++++++++++-------- tests/test_html.py | 8 +++++--- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/tests/test_canned_write.py b/tests/test_canned_write.py index c36baa09..e33eed69 100644 --- a/tests/test_canned_write.py +++ b/tests/test_canned_write.py @@ -108,16 +108,16 @@ def test_vary_header(canned_write_client): def test_canned_query_permissions_on_database_page(canned_write_client): # Without auth only shows three queries - query_names = [ + query_names = { q["name"] for q in canned_write_client.get("/data.json").json["queries"] - ] - assert [ + } + assert { "add_name", "add_name_specify_id", "update_name", "from_async_hook", "from_hook", - ] == query_names + } == query_names # With auth shows four response = canned_write_client.get( @@ -129,12 +129,16 @@ def test_canned_query_permissions_on_database_page(canned_write_client): {"name": "add_name", "private": False}, {"name": "add_name_specify_id", "private": False}, {"name": "delete_name", "private": True}, - {"name": "update_name", "private": False}, {"name": "from_async_hook", "private": False}, {"name": "from_hook", "private": False}, - ] == [ - {"name": q["name"], "private": q["private"]} for q in response.json["queries"] - ] + {"name": "update_name", "private": False}, + ] == sorted( + [ + {"name": q["name"], "private": q["private"]} + for q in response.json["queries"] + ], + key=lambda q: q["name"], + ) def test_canned_query_permissions(canned_write_client): diff --git a/tests/test_html.py b/tests/test_html.py index 7bc935b0..1c7dce90 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -95,11 +95,13 @@ def test_database_page(app_client): "/fixtures/%F0%9D%90%9C%F0%9D%90%A2%F0%9D%90%AD%F0%9D%90%A2%F0%9D%90%9E%F0%9D%90%AC", "𝐜𝐢𝐭𝐢𝐞𝐬", ), - ("/fixtures/pragma_cache_size", "pragma_cache_size"), - ("/fixtures/neighborhood_search#fragment-goes-here", "Search neighborhoods"), ("/fixtures/from_async_hook", "from_async_hook"), ("/fixtures/from_hook", "from_hook"), - ] == [(a["href"], a.text) for a in queries_ul.find_all("a")] + ("/fixtures/neighborhood_search#fragment-goes-here", "Search neighborhoods"), + ("/fixtures/pragma_cache_size", "pragma_cache_size"), + ] == sorted( + [(a["href"], a.text) for a in queries_ul.find_all("a")], key=lambda p: p[0] + ) def test_invalid_custom_sql(app_client): From 64cc536b89b988b17e3ab853e4c64d9706543116 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 18 Jun 2020 17:03:23 -0700 Subject: [PATCH 0147/1871] Don't include prereleases in changelog badge --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 925d68d2..42eaaa81 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Datasette [![PyPI](https://img.shields.io/pypi/v/datasette.svg)](https://pypi.org/project/datasette/) -[![Changelog](https://img.shields.io/github/v/release/simonw/datasette?include_prereleases&label=changelog)](https://datasette.readthedocs.io/en/stable/changelog.html) +[![Changelog](https://img.shields.io/github/v/release/simonw/datasette?label=changelog)](https://datasette.readthedocs.io/en/stable/changelog.html) [![Python 3.x](https://img.shields.io/pypi/pyversions/datasette.svg?logo=python&logoColor=white)](https://pypi.org/project/datasette/) [![Travis CI](https://travis-ci.org/simonw/datasette.svg?branch=master)](https://travis-ci.org/simonw/datasette) [![Documentation Status](https://readthedocs.org/projects/datasette/badge/?version=latest)](http://datasette.readthedocs.io/en/latest/?badge=latest) From 55a6ffb93c57680e71a070416baae1129a0243b8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 19 Jun 2020 20:08:30 -0700 Subject: [PATCH 0148/1871] Link to datasette-saved-queries plugin, closes #852 --- docs/changelog.rst | 2 +- docs/ecosystem.rst | 6 +++++- docs/plugins.rst | 6 +++++- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6f3af8ce..d580f03e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -12,7 +12,7 @@ Changelog .. warning:: This is an **alpha** release. See :ref:`contributing_alpha_beta`. - New :ref:`plugin_hook_startup` plugin hook. (`#834 `__) -- New :ref:`plugin_hook_canned_queries` plugin hook. (`#852 `__) +- New :ref:`plugin_hook_canned_queries` plugin hook. See `datasette-saved-queries `__ for an example of this hook in action. (`#852 `__) - Workaround for "Too many open files" error in test runs. (`#846 `__) - Respect existing ``scope["actor"]`` if already set by ASGI middleware. (`#854 `__) - New process for shipping :ref:`contributing_alpha_beta`. (`#807 `__) diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst index dcb5a887..f2da885c 100644 --- a/docs/ecosystem.rst +++ b/docs/ecosystem.rst @@ -157,12 +157,16 @@ datasette-leaflet-geojson `datasette-leaflet-geojson `__ looks out for columns containing GeoJSON formatted geographical information and displays them on a `Leaflet-powered `__ map. - datasette-pretty-json --------------------- `datasette-pretty-json `__ seeks out JSON values in Datasette's table browsing interface and pretty-prints them, making them easier to read. +datasette-saved-queries +----------------------- + +`datasette-saved-queries `__ lets users interactively save queries to a ``saved_queries`` table. They are then made available as additional :ref:`canned queries `. + datasette-haversine ------------------- diff --git a/docs/plugins.rst b/docs/plugins.rst index dce1bdf0..d2743419 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -1028,9 +1028,11 @@ Or you can return an async function which will be awaited on startup. Use this o Potential use-cases: * Run some initialization code for the plugin -* Create database tables that a plugin needs +* Create database tables that a plugin needs on startup * Validate the metadata configuration for a plugin on startup, and raise an error if it is invalid +Example: `datasette-saved-queries `__ + .. _plugin_hook_canned_queries: canned_queries(datasette, database, actor) @@ -1098,6 +1100,8 @@ The actor parameter can be used to include the currently authenticated actor in } for result in results} return inner +Example: `datasette-saved-queries `__ + .. _plugin_hook_actor_from_request: actor_from_request(datasette, request) From d1640ba76b8f10830c56d8289f476fefde3bd1fb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Jun 2020 08:48:39 -0700 Subject: [PATCH 0149/1871] Don't show prereleases on changelog badge --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 5334386f..fa5d7f87 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -6,7 +6,7 @@ datasette| .. |PyPI| image:: https://img.shields.io/pypi/v/datasette.svg :target: https://pypi.org/project/datasette/ -.. |Changelog| image:: https://img.shields.io/github/v/release/simonw/datasette?include_prereleases&label=changelog +.. |Changelog| image:: https://img.shields.io/github/v/release/simonw/datasette?label=changelog :target: https://datasette.readthedocs.io/en/stable/changelog.html .. |Python 3.x| image:: https://img.shields.io/pypi/pyversions/datasette.svg?logo=python&logoColor=white :target: https://pypi.org/project/datasette/ From 84cbf1766083a785f5ce5154d0805654a5314d10 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Jun 2020 10:40:05 -0700 Subject: [PATCH 0150/1871] News: A cookiecutter template for writing Datasette plugins --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 42eaaa81..84d1dcd4 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover ## News + * 20th June 2020: [A cookiecutter template for writing Datasette plugins](https://simonwillison.net/2020/Jun/20/cookiecutter-plugins/) * 11th June 2020: [Datasette 0.44](http://datasette.readthedocs.io/en/latest/changelog.html#v0-44) - [Authentication and permissions](https://datasette.readthedocs.io/en/latest/authentication.html), [writable canned queries](https://datasette.readthedocs.io/en/latest/sql_queries.html#writable-canned-queries), flash messages, new plugin hooks and much, much more. * 28th May 2020: [Datasette 0.43](http://datasette.readthedocs.io/en/latest/changelog.html#v0-43) - Redesigned [register_output_renderer](https://datasette.readthedocs.io/en/latest/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes. * 8th May 2020: [Datasette 0.42](http://datasette.readthedocs.io/en/latest/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database. From e4216ff5035f57f2fb66031f105e41c3b9728bc1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 21 Jun 2020 14:55:17 -0700 Subject: [PATCH 0151/1871] Fixed rST warning --- docs/ecosystem.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst index f2da885c..7c8959dd 100644 --- a/docs/ecosystem.rst +++ b/docs/ecosystem.rst @@ -93,7 +93,7 @@ datasette-auth-tokens `datasette-auth-tokens `__ provides a mechanism for creating secret API tokens that can then be used with Datasette's :ref:`authentication` system. datasette-permissions-sql ---------------------- +------------------------- `datasette-permissions-sql `__ lets you configure Datasette permissions checks to use custom SQL queries, which means you can make permisison decisions based on data contained within your databases. From 36e77e100632573e1cf907aba9462debac7928e9 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 21 Jun 2020 17:33:48 -0700 Subject: [PATCH 0152/1871] Move plugin hooks docs to plugin_hooks.rst, refs #687 --- docs/index.rst | 1 + docs/plugin_hooks.rst | 888 +++++++++++++++++++++++++++++++++++++++++ docs/plugins.rst | 889 ------------------------------------------ 3 files changed, 889 insertions(+), 889 deletions(-) create mode 100644 docs/plugin_hooks.rst diff --git a/docs/index.rst b/docs/index.rst index fa5d7f87..20a55b2c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -51,6 +51,7 @@ Contents introspection custom_templates plugins + plugin_hooks internals contributing changelog diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst new file mode 100644 index 00000000..19f076b9 --- /dev/null +++ b/docs/plugin_hooks.rst @@ -0,0 +1,888 @@ +.. _plugin_hooks: + +Plugin hooks +============ + +When you implement a plugin hook you can accept any or all of the parameters that are documented as being passed to that hook. For example, you can implement a ``render_cell`` plugin hook like this even though the hook definition defines more parameters than just ``value`` and ``column``: + +.. code-block:: python + + @hookimpl + def render_cell(value, column): + if column == "stars": + return "*" * int(value) + +The full list of available plugin hooks is as follows. + +.. _plugin_hook_prepare_connection: + +prepare_connection(conn, database, datasette) +--------------------------------------------- + +``conn`` - sqlite3 connection object + The connection that is being opened + +``database`` - string + The name of the database + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` + +This hook is called when a new SQLite database connection is created. You can +use it to `register custom SQL functions `_, +aggregates and collations. For example: + +.. code-block:: python + + from datasette import hookimpl + import random + + @hookimpl + def prepare_connection(conn): + conn.create_function('random_integer', 2, random.randint) + +This registers a SQL function called ``random_integer`` which takes two +arguments and can be called like this:: + + select random_integer(1, 10); + +Examples: `datasette-jellyfish `_, `datasette-jq `_, `datasette-haversine `__, `datasette-rure `__ + +.. _plugin_hook_prepare_jinja2_environment: + +prepare_jinja2_environment(env) +------------------------------- + +``env`` - jinja2 Environment + The template environment that is being prepared + +This hook is called with the Jinja2 environment that is used to evaluate +Datasette HTML templates. You can use it to do things like `register custom +template filters `_, for +example: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def prepare_jinja2_environment(env): + env.filters['uppercase'] = lambda u: u.upper() + +You can now use this filter in your custom templates like so:: + + Table name: {{ table|uppercase }} + +.. _plugin_hook_extra_css_urls: + +extra_css_urls(template, database, table, datasette) +---------------------------------------------------- + +``template`` - string + The template that is being rendered, e.g. ``database.html`` + +``database`` - string or None + The name of the database + +``table`` - string or None + The name of the table + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` + +Return a list of extra CSS URLs that should be included on the page. These can +take advantage of the CSS class hooks described in :ref:`customization`. + +This can be a list of URLs: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def extra_css_urls(): + return [ + 'https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css' + ] + +Or a list of dictionaries defining both a URL and an +`SRI hash `_: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def extra_css_urls(): + return [{ + 'url': 'https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css', + 'sri': 'sha384-9gVQ4dYFwwWSjIDZnLEWnxCjeSWFphJiwGPXr1jddIhOegiu1FwO5qRGvFXOdJZ4', + }] + +Examples: `datasette-cluster-map `_, `datasette-vega `_ + +.. _plugin_hook_extra_js_urls: + +extra_js_urls(template, database, table, datasette) +--------------------------------------------------- + +Same arguments as ``extra_css_urls``. + +This works in the same way as ``extra_css_urls()`` but for JavaScript. You can +return either a list of URLs or a list of dictionaries: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def extra_js_urls(): + return [{ + 'url': 'https://code.jquery.com/jquery-3.3.1.slim.min.js', + 'sri': 'sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo', + }] + +You can also return URLs to files from your plugin's ``static/`` directory, if +you have one: + +.. code-block:: python + + from datasette import hookimpl + + @hookimpl + def extra_js_urls(): + return [ + '/-/static-plugins/your-plugin/app.js' + ] + +Examples: `datasette-cluster-map `_, `datasette-vega `_ + +.. _plugin_hook_publish_subcommand: + +publish_subcommand(publish) +--------------------------- + +``publish`` - Click publish command group + The Click command group for the ``datasette publish`` subcommand + +This hook allows you to create new providers for the ``datasette publish`` +command. Datasette uses this hook internally to implement the default ``now`` +and ``heroku`` subcommands, so you can read +`their source `_ +to see examples of this hook in action. + +Let's say you want to build a plugin that adds a ``datasette publish my_hosting_provider --api_key=xxx mydatabase.db`` publish command. Your implementation would start like this: + +.. code-block:: python + + from datasette import hookimpl + from datasette.publish.common import add_common_publish_arguments_and_options + import click + + + @hookimpl + def publish_subcommand(publish): + @publish.command() + @add_common_publish_arguments_and_options + @click.option( + "-k", + "--api_key", + help="API key for talking to my hosting provider", + ) + def my_hosting_provider( + files, + metadata, + extra_options, + branch, + template_dir, + plugins_dir, + static, + install, + plugin_secret, + version_note, + secret, + title, + license, + license_url, + source, + source_url, + about, + about_url, + api_key, + ): + # Your implementation goes here + +Examples: `datasette-publish-fly `_, `datasette-publish-now `_ + +.. _plugin_hook_render_cell: + +render_cell(value, column, table, database, datasette) +------------------------------------------------------ + +Lets you customize the display of values within table cells in the HTML table view. + +``value`` - string, integer or None + The value that was loaded from the database + +``column`` - string + The name of the column being rendered + +``table`` - string or None + The name of the table - or ``None`` if this is a custom SQL query + +``database`` - string + The name of the database + +``datasette`` - :ref:`internals_datasette` + You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)`` + +If your hook returns ``None``, it will be ignored. Use this to indicate that your hook is not able to custom render this particular value. + +If the hook returns a string, that string will be rendered in the table cell. + +If you want to return HTML markup you can do so by returning a ``jinja2.Markup`` object. + +Datasette will loop through all available ``render_cell`` hooks and display the value returned by the first one that does not return ``None``. + +Here is an example of a custom ``render_cell()`` plugin which looks for values that are a JSON string matching the following format:: + + {"href": "https://www.example.com/", "label": "Name"} + +If the value matches that pattern, the plugin returns an HTML link element: + +.. code-block:: python + + from datasette import hookimpl + import jinja2 + import json + + + @hookimpl + def render_cell(value): + # Render {"href": "...", "label": "..."} as link + if not isinstance(value, str): + return None + stripped = value.strip() + if not stripped.startswith("{") and stripped.endswith("}"): + return None + try: + data = json.loads(value) + except ValueError: + return None + if not isinstance(data, dict): + return None + if set(data.keys()) != {"href", "label"}: + return None + href = data["href"] + if not ( + href.startswith("/") or href.startswith("http://") + or href.startswith("https://") + ): + return None + return jinja2.Markup('{label}'.format( + href=jinja2.escape(data["href"]), + label=jinja2.escape(data["label"] or "") or " " + )) + +Examples: `datasette-render-binary `_, `datasette-render-markdown `_ + +.. _plugin_hook_extra_body_script: + +extra_body_script(template, database, table, view_name, datasette) +------------------------------------------------------------------ + +Extra JavaScript to be added to a ``") json_data = r.search(app_client.get(path).text).group(1) actual_data = json.loads(json_data) assert expected_extra_body_script == actual_data -def test_plugins_asgi_wrapper(app_client): +def test_hook_asgi_wrapper(app_client): response = app_client.get("/fixtures") assert "fixtures" == response.headers["x-databases"] -def test_plugins_extra_template_vars(restore_working_directory): +def test_hook_extra_template_vars(restore_working_directory): with make_app_client( template_dir=str(pathlib.Path(__file__).parent / "test_templates") ) as client: @@ -380,13 +380,13 @@ def test_view_names(view_names_client, path, view_name): assert "view_name:{}".format(view_name) == response.text -def test_register_output_renderer_no_parameters(app_client): +def test_hook_register_output_renderer_no_parameters(app_client): response = app_client.get("/fixtures/facetable.testnone") assert 200 == response.status assert b"Hello" == response.body -def test_register_output_renderer_all_parameters(app_client): +def test_hook_register_output_renderer_all_parameters(app_client): response = app_client.get("/fixtures/facetable.testall") assert 200 == response.status # Lots of 'at 0x103a4a690' in here - replace those so we can do @@ -436,19 +436,19 @@ def test_register_output_renderer_all_parameters(app_client): assert "pragma_cache_size" == json.loads(query_response.body)["query_name"] -def test_register_output_renderer_custom_status_code(app_client): +def test_hook_register_output_renderer_custom_status_code(app_client): response = app_client.get("/fixtures/pragma_cache_size.testall?status_code=202") assert 202 == response.status -def test_register_output_renderer_custom_content_type(app_client): +def test_hook_register_output_renderer_custom_content_type(app_client): response = app_client.get( "/fixtures/pragma_cache_size.testall?content_type=text/blah" ) assert "text/blah" == response.headers["content-type"] -def test_register_output_renderer_custom_headers(app_client): +def test_hook_register_output_renderer_custom_headers(app_client): response = app_client.get( "/fixtures/pragma_cache_size.testall?header=x-wow:1&header=x-gosh:2" ) @@ -456,7 +456,7 @@ def test_register_output_renderer_custom_headers(app_client): assert "2" == response.headers["x-gosh"] -def test_register_output_renderer_can_render(app_client): +def test_hook_register_output_renderer_can_render(app_client): response = app_client.get("/fixtures/facetable?_no_can_render=1") assert response.status == 200 links = ( @@ -492,7 +492,7 @@ def test_register_output_renderer_can_render(app_client): @pytest.mark.asyncio -async def test_prepare_jinja2_environment(app_client): +async def test_hook_prepare_jinja2_environment(app_client): template = app_client.ds.jinja_env.from_string( "Hello there, {{ a|format_numeric }}", {"a": 3412341} ) @@ -500,7 +500,7 @@ async def test_prepare_jinja2_environment(app_client): assert "Hello there, 3,412,341" == rendered -def test_publish_subcommand(): +def test_hook_publish_subcommand(): # This is hard to test properly, because publish subcommand plugins # cannot be loaded using the --plugins-dir mechanism - they need # to be installed using "pip install". So I'm cheating and taking @@ -509,7 +509,7 @@ def test_publish_subcommand(): assert ["cloudrun", "heroku"] == cli.publish.list_commands({}) -def test_register_facet_classes(app_client): +def test_hook_register_facet_classes(app_client): response = app_client.get( "/fixtures/compound_three_primary_keys.json?_dummy_facet=1" ) @@ -549,7 +549,7 @@ def test_register_facet_classes(app_client): ] == response.json["suggested_facets"] -def test_actor_from_request(app_client): +def test_hook_actor_from_request(app_client): app_client.get("/") # Should have no actor assert None == app_client.ds._last_request.scope["actor"] @@ -558,7 +558,7 @@ def test_actor_from_request(app_client): assert {"id": "bot"} == app_client.ds._last_request.scope["actor"] -def test_actor_from_request_async(app_client): +def test_hook_actor_from_request_async(app_client): app_client.get("/") # Should have no actor assert None == app_client.ds._last_request.scope["actor"] @@ -583,7 +583,7 @@ def test_existing_scope_actor_respected(app_client): ("no_match", None), ], ) -async def test_permission_allowed(app_client, action, expected): +async def test_hook_permission_allowed(app_client, action, expected): actual = await app_client.ds.permission_allowed( {"id": "actor"}, action, default=None ) @@ -605,20 +605,20 @@ def test_actor_json(app_client): ("/not-async/", "This was not async"), ], ) -def test_register_routes(app_client, path, body): +def test_hook_register_routes(app_client, path, body): response = app_client.get(path) assert 200 == response.status assert body == response.text -def test_register_routes_post(app_client): +def test_hook_register_routes_post(app_client): response = app_client.post("/post/", {"this is": "post data"}, csrftoken_from=True) assert 200 == response.status assert "csrftoken" in response.json assert "post data" == response.json["this is"] -def test_register_routes_csrftoken(restore_working_directory, tmpdir_factory): +def test_hook_register_routes_csrftoken(restore_working_directory, tmpdir_factory): templates = tmpdir_factory.mktemp("templates") (templates / "csrftoken_form.html").write_text( "CSRFTOKEN: {{ csrftoken() }}", "utf-8" @@ -629,13 +629,13 @@ def test_register_routes_csrftoken(restore_working_directory, tmpdir_factory): assert "CSRFTOKEN: {}".format(expected_token) == response.text -def test_register_routes_asgi(app_client): +def test_hook_register_routes_asgi(app_client): response = app_client.get("/three/") assert {"hello": "world"} == response.json assert "1" == response.headers["x-three"] -def test_register_routes_add_message(app_client): +def test_hook_register_routes_add_message(app_client): response = app_client.get("/add-message/") assert 200 == response.status assert "Added message" == response.text @@ -643,7 +643,7 @@ def test_register_routes_add_message(app_client): assert [["Hello from messages", 1]] == decoded -def test_register_routes_render_message(restore_working_directory, tmpdir_factory): +def test_hook_register_routes_render_message(restore_working_directory, tmpdir_factory): templates = tmpdir_factory.mktemp("templates") (templates / "render_message.html").write_text('{% extends "base.html" %}', "utf-8") with make_app_client(template_dir=templates) as client: @@ -654,13 +654,13 @@ def test_register_routes_render_message(restore_working_directory, tmpdir_factor @pytest.mark.asyncio -async def test_startup(app_client): +async def test_hook_startup(app_client): await app_client.ds.invoke_startup() assert app_client.ds._startup_hook_fired assert 2 == app_client.ds._startup_hook_calculation -def test_canned_queries(app_client): +def test_hook_canned_queries(app_client): queries = app_client.get("/fixtures.json").json["queries"] queries_by_name = {q["name"]: q for q in queries} assert { @@ -675,23 +675,23 @@ def test_canned_queries(app_client): } == queries_by_name["from_hook"] -def test_canned_queries_non_async(app_client): +def test_hook_canned_queries_non_async(app_client): response = app_client.get("/fixtures/from_hook.json?_shape=array") assert [{"1": 1, "actor_id": "null"}] == response.json -def test_canned_queries_async(app_client): +def test_hook_canned_queries_async(app_client): response = app_client.get("/fixtures/from_async_hook.json?_shape=array") assert [{"2": 2}] == response.json -def test_canned_queries_actor(app_client): +def test_hook_canned_queries_actor(app_client): assert [{"1": 1, "actor_id": "bot"}] == app_client.get( "/fixtures/from_hook.json?_bot=1&_shape=array" ).json -def test_register_magic_parameters(restore_working_directory): +def test_hook_register_magic_parameters(restore_working_directory): with make_app_client( extra_databases={"data.db": "create table logs (line text)"}, metadata={ @@ -719,7 +719,7 @@ def test_register_magic_parameters(restore_working_directory): assert 4 == new_uuid.count("-") -def test_forbidden(restore_working_directory): +def test_hook_forbidden(restore_working_directory): with make_app_client( extra_databases={"data2.db": "create table logs (line text)"}, metadata={"allow": {}}, From 3a4c8ed36aa97211e46849d32a09f2f386f342dd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 16 Aug 2020 11:09:53 -0700 Subject: [PATCH 0263/1871] Added columns argument to various extra_ plugin hooks, closes #938 --- datasette/app.py | 5 +- datasette/hookspecs.py | 12 +- docs/plugin_hooks.rst | 254 +++++++++++++++++-------------------- tests/plugins/my_plugin.py | 13 +- tests/test_plugins.py | 25 +++- 5 files changed, 159 insertions(+), 150 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 180ba246..2185a3ab 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -713,6 +713,7 @@ class Datasette: template=template.name, database=context.get("database"), table=context.get("table"), + columns=context.get("columns"), view_name=view_name, request=request, datasette=self, @@ -729,6 +730,7 @@ class Datasette: template=template.name, database=context.get("database"), table=context.get("table"), + columns=context.get("columns"), view_name=view_name, request=request, datasette=self, @@ -779,9 +781,10 @@ class Datasette: template=template.name, database=context.get("database"), table=context.get("table"), - datasette=self, + columns=context.get("columns"), view_name=view_name, request=request, + datasette=self, ): if callable(hook): hook = hook() diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 0e9c20cf..f7e90e4e 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -26,22 +26,26 @@ def prepare_jinja2_environment(env): @hookspec -def extra_css_urls(template, database, table, view_name, request, datasette): +def extra_css_urls(template, database, table, columns, view_name, request, datasette): "Extra CSS URLs added by this plugin" @hookspec -def extra_js_urls(template, database, table, view_name, request, datasette): +def extra_js_urls(template, database, table, columns, view_name, request, datasette): "Extra JavaScript URLs added by this plugin" @hookspec -def extra_body_script(template, database, table, view_name, request, datasette): +def extra_body_script( + template, database, table, columns, view_name, request, datasette +): "Extra JavaScript code to be included in ") json_data = r.search(app_client.get(path).text).group(1) actual_data = json.loads(json_data) @@ -286,6 +308,7 @@ def test_hook_extra_template_vars(restore_working_directory): assert { "template": "show_json.html", "scope_path": "/-/metadata", + "columns": None, } == extra_template_vars extra_template_vars_from_awaitable = json.loads( Soup(response.body, "html.parser") From 8e7e6458a6787a06a4488798bd643dd7728b8a5b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 16 Aug 2020 11:24:39 -0700 Subject: [PATCH 0264/1871] Fix bug with ?_nl=on and binary data, closes #914 --- datasette/renderer.py | 2 +- tests/fixtures.py | 3 ++- tests/test_api.py | 31 ++++++++++++++++++++++++++++++- tests/test_html.py | 9 +++++++-- 4 files changed, 40 insertions(+), 5 deletions(-) diff --git a/datasette/renderer.py b/datasette/renderer.py index 3f921fe7..27a5092f 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -84,7 +84,7 @@ def json_renderer(args, data, view_name): # Handle _nl option for _shape=array nl = args.get("_nl", "") if nl and shape == "array": - body = "\n".join(json.dumps(item) for item in data) + body = "\n".join(json.dumps(item, cls=CustomJSONEncoder) for item in data) content_type = "text/plain" else: body = json.dumps(data, cls=CustomJSONEncoder) diff --git a/tests/fixtures.py b/tests/fixtures.py index 139eff83..5bd063d9 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -663,7 +663,8 @@ CREATE VIEW searchable_view_configured_by_metadata AS ) ) TABLE_PARAMETERIZED_SQL = [ - ("insert into binary_data (data) values (?);", [b"this is binary data"]) + ("insert into binary_data (data) values (?);", [b"\x15\x1c\x02\xc7\xad\x05\xfe"]), + ("insert into binary_data (data) values (?);", [b"\x15\x1c\x03\xc7\xad\x05\xfe"]), ] EXTRA_DATABASE_SQL = """ diff --git a/tests/test_api.py b/tests/test_api.py index 1f93c1a7..22fa87d4 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -105,7 +105,7 @@ def test_database_page(app_client): "name": "binary_data", "columns": ["data"], "primary_keys": [], - "count": 1, + "count": 2, "hidden": False, "fts_table": None, "foreign_keys": {"incoming": [], "outgoing": []}, @@ -1793,3 +1793,32 @@ def test_null_foreign_keys_are_not_expanded(app_client): def test_inspect_file_used_for_count(app_client_immutable_and_inspect_file): response = app_client_immutable_and_inspect_file.get("/fixtures/sortable.json") assert response.json["filtered_table_rows_count"] == 100 + + +@pytest.mark.parametrize( + "path,expected_json,expected_text", + [ + ( + "/fixtures/binary_data.json?_shape=array", + [ + {"rowid": 1, "data": {"$base64": True, "encoded": "FRwCx60F/g=="}}, + {"rowid": 2, "data": {"$base64": True, "encoded": "FRwDx60F/g=="}}, + ], + None, + ), + ( + "/fixtures/binary_data.json?_shape=array&_nl=on", + None, + ( + '{"rowid": 1, "data": {"$base64": true, "encoded": "FRwCx60F/g=="}}\n' + '{"rowid": 2, "data": {"$base64": true, "encoded": "FRwDx60F/g=="}}' + ), + ), + ], +) +def test_binary_data_in_json(app_client, path, expected_json, expected_text): + response = app_client.get(path) + if expected_json: + assert response.json == expected_json + else: + assert response.text == expected_text diff --git a/tests/test_html.py b/tests/test_html.py index 89aa4d06..1a12b3ce 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1134,8 +1134,13 @@ def test_binary_data_display(app_client): [ '1', '1', - '<Binary\xa0data:\xa019\xa0bytes>', - ] + '<Binary\xa0data:\xa07\xa0bytes>', + ], + [ + '2', + '2', + '<Binary\xa0data:\xa07\xa0bytes>', + ], ] assert expected_tds == [ [str(td) for td in tr.select("td")] for tr in table.select("tbody tr") From 52eabb019d4051084b21524bd0fd9c2731126985 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 16 Aug 2020 11:56:31 -0700 Subject: [PATCH 0265/1871] Release 0.48 Refs #939, #938, #935, #914 --- README.md | 1 + docs/changelog.rst | 12 ++++++++++++ docs/internals.rst | 2 ++ 3 files changed, 15 insertions(+) diff --git a/README.md b/README.md index 9b49cc14..ee3246a5 100644 --- a/README.md +++ b/README.md @@ -23,6 +23,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover ## News + * 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. * 11th August 2020: [Datasette 0.47](https://docs.datasette.io/en/stable/changelog.html#v0-47) - Datasette can now be installed using Homebrew! `brew install simonw/datasette/datasette`. Also new: `datasette install name-of-plugin` and `datasette uninstall name-of-plugin` commands, and `datasette --get '/-/versions.json'` to output the result of Datasette HTTP calls on the command-line. * 9th August 2020: [Datasette 0.46](https://docs.datasette.io/en/stable/changelog.html#v0-46) - security fix relating to CSRF protection for writable canned queries, a new logo, new debugging tools, improved file downloads and more. * 6th August 2020: [GraphQL in Datasette with the new datasette-graphql plugin](https://simonwillison.net/2020/Aug/7/datasette-graphql/) diff --git a/docs/changelog.rst b/docs/changelog.rst index bf53b6f3..d18dae80 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,18 @@ Changelog ========= +.. _v0_48: + +0.48 (2020-08-16) +----------------- + +- Datasette documentation now lives at `docs.datasette.io `__. +- ``db.is_mutable`` property is now documented and tested, see :ref:`internals_database_introspection`. +- The ``extra_template_vars``, ``extra_css_urls``, ``extra_js_urls`` and ``extra_body_script`` plugin hooks now all accept the same arguments. See :ref:`plugin_hook_extra_template_vars` for details. (`#939 `__) +- Those hooks now accept a new ``columns`` argument detailing the table columns that will be rendered on that page. (`#938 `__) +- Fixed bug where plugins calling ``db.execute_write_fn()`` could hang Datasette if the connection failed. (`#935 `__) +- Fixed bug with the ``?_nl=on`` output option and binary data. (`#914 `__) + .. _v0_47_3: 0.47.3 (2020-08-15) diff --git a/docs/internals.rst b/docs/internals.rst index f8d4a136..ff7e883c 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -466,6 +466,8 @@ Here's an example of ``block=True`` in action: except Exception as e: print("An error occurred:", e) +.. _internals_database_introspection: + Database introspection ---------------------- From 5e0b72247ecab4ce0fcec599b77a83d73a480872 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 17 Aug 2020 22:09:34 -0700 Subject: [PATCH 0266/1871] Run CI on GitHub Actions, not Travis * Run CI on GitHub Actions, not Travis - refs #940 * Update documentation refs to Travis * Release action now runs parallel tests, then pushes to PyPI, then Docker Hub --- .dockerignore | 1 - .github/workflows/publish.yml | 72 +++++++++++++++++++++++++++++++++++ .github/workflows/test.yml | 29 ++++++++++++++ .travis.yml | 47 ----------------------- README.md | 2 +- docs/contributing.rst | 2 +- docs/index.rst | 4 +- setup.py | 2 +- 8 files changed, 106 insertions(+), 53 deletions(-) create mode 100644 .github/workflows/publish.yml create mode 100644 .github/workflows/test.yml delete mode 100644 .travis.yml diff --git a/.dockerignore b/.dockerignore index 938173e9..490f509e 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,7 +3,6 @@ .eggs .gitignore .ipynb_checkpoints -.travis.yml build *.spec *.egg-info diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000..4e554eda --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,72 @@ +name: Publish Python Package + +on: + release: + types: [created] + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.6, 3.7, 3.8] + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - uses: actions/cache@v2 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Install dependencies + run: | + pip install -e '.[test]' + - name: Run tests + run: | + pytest + deploy: + runs-on: ubuntu-latest + needs: [test] + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.8' + - uses: actions/cache@v2 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-publish-pip- + - name: Install dependencies + run: | + pip install setuptools wheel twine + - name: Publish + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} + run: | + python setup.py sdist bdist_wheel + twine upload dist/* + deploy_docker: + runs-on: ubuntu-latest + needs: [deploy] + steps: + - uses: actions/checkout@v2 + - name: Build and push to Docker Hub + env: + DOCKER_USER: ${{ secrets.DOCKER_USER }} + DOCKER_PASS: ${{ secrets.DOCKER_PASS }} + run: |- + docker login -u $DOCKER_USER -p $DOCKER_PASS + export REPO=datasetteproject/datasette + docker build -f Dockerfile -t $REPO::${GITHUB_REF#refs/tags/} . + docker tag $REPO::${GITHUB_REF#refs/tags/} $REPO:latest + docker push $REPO diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000..74e56e13 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,29 @@ +name: Test + +on: [push] + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.6, 3.7, 3.8] + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - uses: actions/cache@v2 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Install dependencies + run: | + pip install -e '.[test]' + - name: Run tests + run: | + pytest diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 181bc3f3..00000000 --- a/.travis.yml +++ /dev/null @@ -1,47 +0,0 @@ -language: python -dist: xenial - -branches: - except: - - master - -# 3.6 is listed first so it gets used for the later build stages -python: - - "3.6" - - "3.7" - - "3.8" - -# Executed for 3.5 AND 3.5 as the first "test" stage: -script: - - pip install -U pip wheel - - pip install .[test] - - pytest - -cache: - directories: - - $HOME/.cache/pip - -# This defines further stages that execute after the tests -jobs: - include: - - stage: release tagged version - if: tag IS present - python: 3.6 - deploy: - - provider: pypi - user: simonw - distributions: "sdist bdist_wheel" - password: ${PYPI_PASSWORD} - on: - branch: master - tags: true - - stage: publish docker image - if: (tag IS present) AND NOT (tag =~ [ab]) - python: 3.6 - script: - # Build and release to Docker Hub - - docker login -u $DOCKER_USER -p $DOCKER_PASS - - export REPO=datasetteproject/datasette - - docker build -f Dockerfile -t $REPO:$TRAVIS_TAG . - - docker tag $REPO:$TRAVIS_TAG $REPO:latest - - docker push $REPO diff --git a/README.md b/README.md index ee3246a5..38ea7f79 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ [![PyPI](https://img.shields.io/pypi/v/datasette.svg)](https://pypi.org/project/datasette/) [![Changelog](https://img.shields.io/github/v/release/simonw/datasette?label=changelog)](https://docs.datasette.io/en/stable/changelog.html) [![Python 3.x](https://img.shields.io/pypi/pyversions/datasette.svg?logo=python&logoColor=white)](https://pypi.org/project/datasette/) -[![Travis CI](https://travis-ci.org/simonw/datasette.svg?branch=main)](https://travis-ci.org/simonw/datasette) +[![Tests](https://github.com/simonw/datasette/workflows/Test/badge.svg)](https://github.com/simonw/datasette/actions?query=workflow%3ATest) [![Documentation Status](https://readthedocs.org/projects/datasette/badge/?version=latest)](https://docs.datasette.io/en/latest/?badge=latest) [![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://github.com/simonw/datasette/blob/main/LICENSE) [![docker: datasette](https://img.shields.io/badge/docker-datasette-blue)](https://hub.docker.com/r/datasetteproject/datasette) diff --git a/docs/contributing.rst b/docs/contributing.rst index 118146cf..95663dbc 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -126,7 +126,7 @@ Now browse to ``http://localhost:8000/`` to view the documentation. Any edits yo Release process --------------- -Datasette releases are performed using tags. When a new version tag is pushed to GitHub, a `Travis CI task `__ will perform the following: +Datasette releases are performed using tags. When a new release is published on GitHub, a `GitHub Action workflow `__ will perform the following: * Run the unit tests against all supported Python versions. If the tests pass... * Build a Docker image of the release and push a tag to https://hub.docker.com/r/datasetteproject/datasette diff --git a/docs/index.rst b/docs/index.rst index f9f2f0bb..946fa542 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -10,8 +10,8 @@ datasette| :target: https://docs.datasette.io/en/stable/changelog.html .. |Python 3.x| image:: https://img.shields.io/pypi/pyversions/datasette.svg?logo=python&logoColor=white :target: https://pypi.org/project/datasette/ -.. |Travis CI| image:: https://travis-ci.org/simonw/datasette.svg?branch=main - :target: https://travis-ci.org/simonw/datasette +.. |Tests| image:: https://github.com/simonw/datasette/workflows/Test/badge.svg + :target: https://github.com/simonw/datasette/actions?query=workflow%3ATest .. |License| image:: https://img.shields.io/badge/license-Apache%202.0-blue.svg :target: https://github.com/simonw/datasette/blob/main/LICENSE .. |docker: datasette| image:: https://img.shields.io/badge/docker-datasette-blue diff --git a/setup.py b/setup.py index bbd0aa8b..d9526149 100644 --- a/setup.py +++ b/setup.py @@ -38,7 +38,7 @@ setup( "Live demo": "https://latest.datasette.io/", "Source code": "https://github.com/simonw/datasette", "Issues": "https://github.com/simonw/datasette/issues", - "CI": "https://travis-ci.org/simonw/datasette", + "CI": "https://github.com/simonw/datasette/actions?query=workflow%3ATest", }, packages=find_packages(exclude=("tests",)), package_data={"datasette": ["templates/*.html"]}, From b21ed237ab940768574c834aa5a7130724bd3a2d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 18 Aug 2020 13:49:13 -0700 Subject: [PATCH 0267/1871] publish heroku now deploys with Python 3.8.5 --- datasette/publish/heroku.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index 6cda68da..24393b90 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -170,7 +170,7 @@ def temporary_heroku_directory( if metadata_content: open("metadata.json", "w").write(json.dumps(metadata_content, indent=2)) - open("runtime.txt", "w").write("python-3.8.3") + open("runtime.txt", "w").write("python-3.8.5") if branch: install = [ From 69033c6ec4a76d720e5c866aaa43b175c5ec1d8b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 19 Aug 2020 10:20:41 -0700 Subject: [PATCH 0268/1871] datasette install --upgrade option, closes #945 --- datasette/cli.py | 11 +++++++++-- docs/plugins.rst | 10 +++++++++- tests/test_cli.py | 9 +++++++++ 3 files changed, 27 insertions(+), 3 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index f3455f72..8dbc97c4 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -235,9 +235,16 @@ def package( @cli.command() @click.argument("packages", nargs=-1, required=True) -def install(packages): +@click.option( + "-U", "--upgrade", is_flag=True, help="Upgrade packages to latest version" +) +def install(packages, upgrade): "Install Python packages - e.g. Datasette plugins - into the same environment as Datasette" - sys.argv = ["pip", "install"] + list(packages) + args = ["pip", "install"] + if upgrade: + args += ["--upgrade"] + args += list(packages) + sys.argv = args run_module("pip", run_name="__main__") diff --git a/docs/plugins.rst b/docs/plugins.rst index e67c77b3..1c0dd588 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -43,7 +43,15 @@ You can uninstall plugins with ``datasette uninstall``:: datasette uninstall datasette-vega -These ommands are thin wrappers around ``pip install`` and ``pip uninstall``, which ensure they run ``pip`` in the same virtual environment as Datasette itself. +You can upgrade plugins with ``datasette install --upgrade`` or ``datasette install -U``:: + + datasette install -U datasette-vega + +This command can also be used to upgrade Datasette itself to the latest released version:: + + datasette install -U datasette + +These commands are thin wrappers around ``pip install`` and ``pip uninstall``, which ensure they run ``pip`` in the same virtual environment as Datasette itself. One-off plugins using --plugins-dir ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests/test_cli.py b/tests/test_cli.py index 38bb8834..dc5229cd 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -125,6 +125,15 @@ def test_install(run_module): ] +@pytest.mark.parametrize("flag", ["-U", "--upgrade"]) +@mock.patch("datasette.cli.run_module") +def test_install_upgrade(run_module, flag): + runner = CliRunner() + runner.invoke(cli, ["install", flag, "datasette"]) + run_module.assert_called_once_with("pip", run_name="__main__") + assert sys.argv == ["pip", "install", "--upgrade", "datasette"] + + @mock.patch("datasette.cli.run_module") def test_uninstall(run_module): runner = CliRunner() From 86aefc39c5aca01b00dbc57ba386a6743c21fb46 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 19 Aug 2020 10:22:33 -0700 Subject: [PATCH 0269/1871] Fixed undefined reference in index.rst --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 946fa542..db87f029 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,7 +1,7 @@ Datasette ========= -|PyPI| |Changelog| |Python 3.x| |Travis CI| |License| |docker: +|PyPI| |Changelog| |Python 3.x| |Tests| |License| |docker: datasette| .. |PyPI| image:: https://img.shields.io/pypi/v/datasette.svg From 799ecae94824640bdff21f86997f69844048d5c3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 27 Aug 2020 21:02:50 -0700 Subject: [PATCH 0270/1871] register_output_renderer can now return Response, closes #953 --- datasette/views/base.py | 18 +++++++++++------- docs/plugin_hooks.rst | 18 +++++++++--------- tests/plugins/register_output_renderer.py | 8 ++++++++ tests/test_html.py | 2 ++ tests/test_plugins.py | 12 ++++++++++++ 5 files changed, 42 insertions(+), 16 deletions(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index a1f38f21..fa730af8 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -455,13 +455,17 @@ class DataView(BaseView): result = await result if result is None: raise NotFound("No data") - - r = Response( - body=result.get("body"), - status=result.get("status_code", 200), - content_type=result.get("content_type", "text/plain"), - headers=result.get("headers"), - ) + if isinstance(result, dict): + r = Response( + body=result.get("body"), + status=result.get("status_code", 200), + content_type=result.get("content_type", "text/plain"), + headers=result.get("headers"), + ) + elif isinstance(result, Response): + r = result + else: + assert False, "{} should be dict or Response".format(result) else: extras = {} if callable(extra_template_data): diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index 96a1cd7f..fc710a2b 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -455,7 +455,9 @@ When a request is received, the ``"render"`` callback function is called with ze ``view_name`` - string The name of the current view being called. ``index``, ``database``, ``table``, and ``row`` are the most important ones. -The callback function can return ``None``, if it is unable to render the data, or a dictionary with the following keys: +The callback function can return ``None``, if it is unable to render the data, or a :ref:`internals_response` that will be returned to the caller. + +It can also return a dictionary with the following keys. This format is **deprecated** as-of Datasette 0.49 and will be removed by Datasette 1.0. ``body`` - string or bytes, optional The response body, default empty @@ -474,9 +476,7 @@ A simple example of an output renderer callback function: .. code-block:: python def render_demo(): - return { - "body": "Hello World" - } + return Response.text("Hello World") Here is a more complex example: @@ -490,11 +490,11 @@ Here is a more complex example: lines.append("=" * len(first_row)) for row in rows: lines.append(" | ".join(row)) - return { - "body": "\n".join(lines), - "content_type": "text/plain; charset=utf-8", - "headers": {"x-sqlite-version": result.first()[0]}, - } + return Response( + "\n".join(lines), + content_type="text/plain; charset=utf-8", + headers={"x-sqlite-version": result.first()[0]} + ) And here is an example ``can_render`` function which returns ``True`` only if the query results contain the columns ``atom_id``, ``atom_title`` and ``atom_updated``: diff --git a/tests/plugins/register_output_renderer.py b/tests/plugins/register_output_renderer.py index 82b60d01..cfe15215 100644 --- a/tests/plugins/register_output_renderer.py +++ b/tests/plugins/register_output_renderer.py @@ -1,4 +1,5 @@ from datasette import hookimpl +from datasette.utils.asgi import Response import json @@ -56,6 +57,12 @@ def render_test_no_parameters(): return {"body": "Hello"} +async def render_response(request): + if request.args.get("_broken"): + return "this should break" + return Response.json({"this_is": "json"}) + + @hookimpl def register_output_renderer(datasette): return [ @@ -65,4 +72,5 @@ def register_output_renderer(datasette): "can_render": can_render, }, {"extension": "testnone", "callback": render_test_no_parameters}, + {"extension": "testresponse", "render": render_response}, ] diff --git a/tests/test_html.py b/tests/test_html.py index 1a12b3ce..aec4db1d 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -559,6 +559,7 @@ def test_table_csv_json_export_interface(app_client): "simple_primary_key.json?id__gt=2", "simple_primary_key.testall?id__gt=2", "simple_primary_key.testnone?id__gt=2", + "simple_primary_key.testresponse?id__gt=2", "simple_primary_key.csv?id__gt=2&_size=max", "#export", ] @@ -597,6 +598,7 @@ def test_csv_json_export_links_include_labels_if_foreign_keys(app_client): "facetable.json?_labels=on", "facetable.testall?_labels=on", "facetable.testnone?_labels=on", + "facetable.testresponse?_labels=on", "facetable.csv?_labels=on&_size=max", "#export", ] diff --git a/tests/test_plugins.py b/tests/test_plugins.py index c535810c..f2017f07 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -479,6 +479,18 @@ def test_hook_register_output_renderer_custom_headers(app_client): assert "2" == response.headers["x-gosh"] +def test_hook_register_output_renderer_returning_response(app_client): + response = app_client.get("/fixtures/facetable.testresponse") + assert 200 == response.status + assert response.json == {"this_is": "json"} + + +def test_hook_register_output_renderer_returning_broken_value(app_client): + response = app_client.get("/fixtures/facetable.testresponse?_broken=1") + assert 500 == response.status + assert "this should break should be dict or Response" in response.text + + def test_hook_register_output_renderer_can_render(app_client): response = app_client.get("/fixtures/facetable?_no_can_render=1") assert response.status == 200 From 7178126d902e2cfca606be0b0cff96c6c679c5b8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 28 Aug 2020 16:12:47 -0700 Subject: [PATCH 0271/1871] Release notes for 0.49a0 Refs #953, #945 --- docs/changelog.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index d18dae80..74426f52 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,17 @@ Changelog ========= +.. _v0_49a0: + +0.49a0 (2020-08-28) +------------------- + +.. warning:: This is an **alpha** release. See :ref:`contributing_alpha_beta`. + +- ``register_output_renderer()`` render functions can now return a ``Response``. (`#953 `__) +- New ``--upgrade`` option for ``datasette install``. (`#945 `__) +- ``datasette publish heroku`` now deploys using Python 3.8.5 + .. _v0_48: 0.48 (2020-08-16) From c36e287d71d68ecb2a45e9808eede15f19f931fb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 28 Aug 2020 18:18:52 -0700 Subject: [PATCH 0272/1871] Don't deploy alpha/betas to Docker Hub Refs #956 --- .github/workflows/publish.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 4e554eda..e538a463 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -58,6 +58,8 @@ jobs: deploy_docker: runs-on: ubuntu-latest needs: [deploy] + if: | + !(contains(github.ref, "a") || contains(github.ref, "b")) steps: - uses: actions/checkout@v2 - name: Build and push to Docker Hub From 44cf424a94a85b74552075272660bb96a7432661 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 28 Aug 2020 18:33:05 -0700 Subject: [PATCH 0273/1871] Remove double colon, refs #956 --- .github/workflows/publish.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index e538a463..1a94a6b3 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -69,6 +69,6 @@ jobs: run: |- docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette - docker build -f Dockerfile -t $REPO::${GITHUB_REF#refs/tags/} . - docker tag $REPO::${GITHUB_REF#refs/tags/} $REPO:latest + docker build -f Dockerfile -t $REPO:${GITHUB_REF#refs/tags/} . + docker tag $REPO:${GITHUB_REF#refs/tags/} $REPO:latest docker push $REPO From 9dbbfa1f0b5cf07c91ba4c8d7b0145cf0ed4cf0f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 30 Aug 2020 10:39:16 -0700 Subject: [PATCH 0274/1871] Upgrade CodeMirror to 5.57.0, refs #948 --- datasette/static/codemirror-5.31.0-min.css | 2 - datasette/static/codemirror-5.31.0-sql.min.js | 1 - datasette/static/codemirror-5.31.0.js | 9659 ----------------- datasette/static/codemirror-5.57.0-sql.min.js | 5 + datasette/static/codemirror-5.57.0.min.css | 1 + datasette/static/codemirror-5.57.0.min.js | 11 + datasette/templates/_codemirror.html | 6 +- 7 files changed, 20 insertions(+), 9665 deletions(-) delete mode 100644 datasette/static/codemirror-5.31.0-min.css delete mode 100644 datasette/static/codemirror-5.31.0-sql.min.js delete mode 100644 datasette/static/codemirror-5.31.0.js create mode 100644 datasette/static/codemirror-5.57.0-sql.min.js create mode 100644 datasette/static/codemirror-5.57.0.min.css create mode 100644 datasette/static/codemirror-5.57.0.min.js diff --git a/datasette/static/codemirror-5.31.0-min.css b/datasette/static/codemirror-5.31.0-min.css deleted file mode 100644 index 7e162037..00000000 --- a/datasette/static/codemirror-5.31.0-min.css +++ /dev/null @@ -1,2 +0,0 @@ -.CodeMirror{font-family:monospace;height:300px;color:#000;direction:ltr}.CodeMirror-lines{padding:4px 0}.CodeMirror pre{padding:0 4px}.CodeMirror-gutter-filler,.CodeMirror-scrollbar-filler{background-color:#fff}.CodeMirror-gutters{border-right:1px solid #ddd;background-color:#f7f7f7;white-space:nowrap}.CodeMirror-linenumber{padding:0 3px 0 5px;min-width:20px;text-align:right;color:#999;white-space:nowrap}.CodeMirror-guttermarker{color:#000}.CodeMirror-guttermarker-subtle{color:#999}.CodeMirror-cursor{border-left:1px solid #000;border-right:none;width:0}.CodeMirror div.CodeMirror-secondarycursor{border-left:1px solid silver}.cm-fat-cursor .CodeMirror-cursor{width:auto;border:0!important;background:#7e7}.cm-fat-cursor div.CodeMirror-cursors{z-index:1}.cm-fat-cursor-mark{background-color:rgba(20,255,20,.5);-webkit-animation:blink 1.06s steps(1) infinite;-moz-animation:blink 1.06s steps(1) infinite;animation:blink 1.06s steps(1) infinite}.cm-animate-fat-cursor{width:auto;border:0;-webkit-animation:blink 1.06s steps(1) infinite;-moz-animation:blink 1.06s steps(1) infinite;animation:blink 1.06s steps(1) infinite;background-color:#7e7}@-moz-keyframes blink{50%{background-color:transparent}}@-webkit-keyframes blink{50%{background-color:transparent}}@keyframes blink{50%{background-color:transparent}}.cm-tab{display:inline-block;text-decoration:inherit}.CodeMirror-rulers{position:absolute;left:0;right:0;top:-50px;bottom:-20px;overflow:hidden}.CodeMirror-ruler{border-left:1px solid #ccc;top:0;bottom:0;position:absolute}.cm-s-default .cm-header{color:#00f}.cm-s-default .cm-quote{color:#090}.cm-negative{color:#d44}.cm-positive{color:#292}.cm-header,.cm-strong{font-weight:700}.cm-em{font-style:italic}.cm-link{text-decoration:underline}.cm-strikethrough{text-decoration:line-through}.cm-s-default .cm-keyword{color:#708}.cm-s-default .cm-atom{color:#219}.cm-s-default .cm-number{color:#164}.cm-s-default .cm-def{color:#00f}.cm-s-default .cm-variable-2{color:#05a}.cm-s-default .cm-type,.cm-s-default .cm-variable-3{color:#085}.cm-s-default .cm-comment{color:#a50}.cm-s-default .cm-string{color:#a11}.cm-s-default .cm-string-2{color:#f50}.cm-s-default .cm-meta{color:#555}.cm-s-default .cm-qualifier{color:#555}.cm-s-default .cm-builtin{color:#30a}.cm-s-default .cm-bracket{color:#997}.cm-s-default .cm-tag{color:#170}.cm-s-default .cm-attribute{color:#00c}.cm-s-default .cm-hr{color:#999}.cm-s-default .cm-link{color:#00c}.cm-s-default .cm-error{color:red}.cm-invalidchar{color:red}.CodeMirror-composing{border-bottom:2px solid}div.CodeMirror span.CodeMirror-matchingbracket{color:#0f0}div.CodeMirror span.CodeMirror-nonmatchingbracket{color:#f22}.CodeMirror-matchingtag{background:rgba(255,150,0,.3)}.CodeMirror-activeline-background{background:#e8f2ff}.CodeMirror{position:relative;overflow:hidden;background:#fff}.CodeMirror-scroll{overflow:scroll!important;margin-bottom:-30px;margin-right:-30px;padding-bottom:30px;height:100%;outline:0;position:relative}.CodeMirror-sizer{position:relative;border-right:30px solid transparent}.CodeMirror-gutter-filler,.CodeMirror-hscrollbar,.CodeMirror-scrollbar-filler,.CodeMirror-vscrollbar{position:absolute;z-index:6;display:none}.CodeMirror-vscrollbar{right:0;top:0;overflow-x:hidden;overflow-y:scroll}.CodeMirror-hscrollbar{bottom:0;left:0;overflow-y:hidden;overflow-x:scroll}.CodeMirror-scrollbar-filler{right:0;bottom:0}.CodeMirror-gutter-filler{left:0;bottom:0}.CodeMirror-gutters{position:absolute;left:0;top:0;min-height:100%;z-index:3}.CodeMirror-gutter{white-space:normal;height:100%;display:inline-block;vertical-align:top;margin-bottom:-30px}.CodeMirror-gutter-wrapper{position:absolute;z-index:4;background:0 0!important;border:none!important}.CodeMirror-gutter-background{position:absolute;top:0;bottom:0;z-index:4}.CodeMirror-gutter-elt{position:absolute;cursor:default;z-index:4}.CodeMirror-gutter-wrapper ::selection{background-color:transparent}.CodeMirror-gutter-wrapper ::-moz-selection{background-color:transparent}.CodeMirror-lines{cursor:text;min-height:1px}.CodeMirror pre{-moz-border-radius:0;-webkit-border-radius:0;border-radius:0;border-width:0;background:0 0;font-family:inherit;font-size:inherit;margin:0;white-space:pre;word-wrap:normal;line-height:inherit;color:inherit;z-index:2;position:relative;overflow:visible;-webkit-tap-highlight-color:transparent;-webkit-font-variant-ligatures:contextual;font-variant-ligatures:contextual}.CodeMirror-wrap pre{word-wrap:break-word;white-space:pre-wrap;word-break:normal}.CodeMirror-linebackground{position:absolute;left:0;right:0;top:0;bottom:0;z-index:0}.CodeMirror-linewidget{position:relative;z-index:2;overflow:auto}.CodeMirror-rtl pre{direction:rtl}.CodeMirror-code{outline:0}.CodeMirror-gutter,.CodeMirror-gutters,.CodeMirror-linenumber,.CodeMirror-scroll,.CodeMirror-sizer{-moz-box-sizing:content-box;box-sizing:content-box}.CodeMirror-measure{position:absolute;width:100%;height:0;overflow:hidden;visibility:hidden}.CodeMirror-cursor{position:absolute;pointer-events:none}.CodeMirror-measure pre{position:static}div.CodeMirror-cursors{visibility:hidden;position:relative;z-index:3}div.CodeMirror-dragcursors{visibility:visible}.CodeMirror-focused div.CodeMirror-cursors{visibility:visible}.CodeMirror-selected{background:#d9d9d9}.CodeMirror-focused .CodeMirror-selected{background:#d7d4f0}.CodeMirror-crosshair{cursor:crosshair}.CodeMirror-line::selection,.CodeMirror-line>span::selection,.CodeMirror-line>span>span::selection{background:#d7d4f0}.CodeMirror-line::-moz-selection,.CodeMirror-line>span::-moz-selection,.CodeMirror-line>span>span::-moz-selection{background:#d7d4f0}.cm-searching{background-color:#ffa;background-color:rgba(255,255,0,.4)}.cm-force-border{padding-right:.1px}@media print{.CodeMirror div.CodeMirror-cursors{visibility:hidden}}.cm-tab-wrap-hack:after{content:''}span.CodeMirror-selectedtext{background:0 0} -/*# sourceMappingURL=codemirror.min.css.map */ \ No newline at end of file diff --git a/datasette/static/codemirror-5.31.0-sql.min.js b/datasette/static/codemirror-5.31.0-sql.min.js deleted file mode 100644 index 1f05c0d0..00000000 --- a/datasette/static/codemirror-5.31.0-sql.min.js +++ /dev/null @@ -1 +0,0 @@ -!function(e){"object"==typeof exports&&"object"==typeof module?e(require("../../lib/codemirror")):"function"==typeof define&&define.amd?define(["../../lib/codemirror"],e):e(CodeMirror)}(function(e){"use strict";e.defineMode("sql",function(t,r){function a(e,t){var r=e.next();if(g[r]){var a=g[r](e,t);if(!1!==a)return a}if(p.hexNumber&&("0"==r&&e.match(/^[xX][0-9a-fA-F]+/)||("x"==r||"X"==r)&&e.match(/^'[0-9a-fA-F]+'/)))return"number";if(p.binaryNumber&&(("b"==r||"B"==r)&&e.match(/^'[01]+'/)||"0"==r&&e.match(/^b[01]+/)))return"number";if(r.charCodeAt(0)>47&&r.charCodeAt(0)<58)return e.match(/^[0-9]*(\.[0-9]+)?([eE][-+]?[0-9]+)?/),p.decimallessFloat&&e.match(/^\.(?!\.)/),"number";if("?"==r&&(e.eatSpace()||e.eol()||e.eat(";")))return"variable-3";if("'"==r||'"'==r&&p.doubleQuote)return t.tokenize=n(r),t.tokenize(e,t);if((p.nCharCast&&("n"==r||"N"==r)||p.charsetCast&&"_"==r&&e.match(/[a-z][a-z0-9]*/i))&&("'"==e.peek()||'"'==e.peek()))return"keyword";if(/^[\(\),\;\[\]]/.test(r))return null;if(p.commentSlashSlash&&"/"==r&&e.eat("/"))return e.skipToEnd(),"comment";if(p.commentHash&&"#"==r||"-"==r&&e.eat("-")&&(!p.commentSpaceRequired||e.eat(" ")))return e.skipToEnd(),"comment";if("/"==r&&e.eat("*"))return t.tokenize=i(1),t.tokenize(e,t);if("."!=r){if(m.test(r))return e.eatWhile(m),null;if("{"==r&&(e.match(/^( )*(d|D|t|T|ts|TS)( )*'[^']*'( )*}/)||e.match(/^( )*(d|D|t|T|ts|TS)( )*"[^"]*"( )*}/)))return"number";e.eatWhile(/^[_\w\d]/);var o=e.current().toLowerCase();return b.hasOwnProperty(o)&&(e.match(/^( )+'[^']*'/)||e.match(/^( )+"[^"]*"/))?"number":c.hasOwnProperty(o)?"atom":u.hasOwnProperty(o)?"builtin":d.hasOwnProperty(o)?"keyword":l.hasOwnProperty(o)?"string-2":null}return p.zerolessFloat&&e.match(/^(?:\d+(?:e[+-]?\d+)?)/i)?"number":e.match(/^\.+/)?null:p.ODBCdotTable&&e.match(/^[\w\d_]+/)?"variable-2":void 0}function n(e){return function(t,r){for(var n,i=!1;null!=(n=t.next());){if(n==e&&!i){r.tokenize=a;break}i=!i&&"\\"==n}return"string"}}function i(e){return function(t,r){var n=t.match(/^.*?(\/\*|\*\/)/);return n?"/*"==n[1]?r.tokenize=i(e+1):r.tokenize=e>1?i(e-1):a:t.skipToEnd(),"comment"}}function o(e,t,r){t.context={prev:t.context,indent:e.indentation(),col:e.column(),type:r}}function s(e){e.indent=e.context.indent,e.context=e.context.prev}var l=r.client||{},c=r.atoms||{false:!0,true:!0,null:!0},u=r.builtin||{},d=r.keywords||{},m=r.operatorChars||/^[*+\-%<>!=&|~^]/,p=r.support||{},g=r.hooks||{},b=r.dateSQL||{date:!0,time:!0,timestamp:!0};return{startState:function(){return{tokenize:a,context:null}},token:function(e,t){if(e.sol()&&t.context&&null==t.context.align&&(t.context.align=!1),t.tokenize==a&&e.eatSpace())return null;var r=t.tokenize(e,t);if("comment"==r)return r;t.context&&null==t.context.align&&(t.context.align=!0);var n=e.current();return"("==n?o(e,t,")"):"["==n?o(e,t,"]"):t.context&&t.context.type==n&&s(t),r},indent:function(r,a){var n=r.context;if(!n)return e.Pass;var i=a.charAt(0)==n.type;return n.align?n.col+(i?0:1):n.indent+(i?0:t.indentUnit)},blockCommentStart:"/*",blockCommentEnd:"*/",lineComment:p.commentSlashSlash?"//":p.commentHash?"#":"--"}}),function(){function t(e){for(var t;null!=(t=e.next());)if("`"==t&&!e.eat("`"))return"variable-2";return e.backUp(e.current().length-1),e.eatWhile(/\w/)?"variable-2":null}function r(e){return e.eat("@")&&(e.match(/^session\./),e.match(/^local\./),e.match(/^global\./)),e.eat("'")?(e.match(/^.*'/),"variable-2"):e.eat('"')?(e.match(/^.*"/),"variable-2"):e.eat("`")?(e.match(/^.*`/),"variable-2"):e.match(/^[0-9a-zA-Z$\.\_]+/)?"variable-2":null}function a(e){return e.eat("N")?"atom":e.match(/^[a-zA-Z.#!?]/)?"variable-2":null}function n(e){for(var t={},r=e.split(" "),a=0;a!=]/,dateSQL:n("date time timestamp"),support:n("ODBCdotTable doubleQuote binaryNumber hexNumber")}),e.defineMIME("text/x-mssql",{name:"sql",client:n("charset clear connect edit ego exit go help nopager notee nowarning pager print prompt quit rehash source status system tee"),keywords:n(i+"begin trigger proc view index for add constraint key primary foreign collate clustered nonclustered declare exec"),builtin:n("bigint numeric bit smallint decimal smallmoney int tinyint money float real char varchar text nchar nvarchar ntext binary varbinary image cursor timestamp hierarchyid uniqueidentifier sql_variant xml table "),atoms:n("false true null unknown"),operatorChars:/^[*+\-%<>!=]/,dateSQL:n("date datetimeoffset datetime2 smalldatetime datetime time"),hooks:{"@":r}}),e.defineMIME("text/x-mysql",{name:"sql",client:n("charset clear connect edit ego exit go help nopager notee nowarning pager print prompt quit rehash source status system tee"),keywords:n(i+"accessible action add after algorithm all analyze asensitive at authors auto_increment autocommit avg avg_row_length before binary binlog both btree cache call cascade cascaded case catalog_name chain change changed character check checkpoint checksum class_origin client_statistics close coalesce code collate collation collations column columns comment commit committed completion concurrent condition connection consistent constraint contains continue contributors convert cross current current_date current_time current_timestamp current_user cursor data database databases day_hour day_microsecond day_minute day_second deallocate dec declare default delay_key_write delayed delimiter des_key_file describe deterministic dev_pop dev_samp deviance diagnostics directory disable discard distinctrow div dual dumpfile each elseif enable enclosed end ends engine engines enum errors escape escaped even event events every execute exists exit explain extended fast fetch field fields first flush for force foreign found_rows full fulltext function general get global grant grants group group_concat handler hash help high_priority hosts hour_microsecond hour_minute hour_second if ignore ignore_server_ids import index index_statistics infile inner innodb inout insensitive insert_method install interval invoker isolation iterate key keys kill language last leading leave left level limit linear lines list load local localtime localtimestamp lock logs low_priority master master_heartbeat_period master_ssl_verify_server_cert masters match max max_rows maxvalue message_text middleint migrate min min_rows minute_microsecond minute_second mod mode modifies modify mutex mysql_errno natural next no no_write_to_binlog offline offset one online open optimize option optionally out outer outfile pack_keys parser partition partitions password phase plugin plugins prepare preserve prev primary privileges procedure processlist profile profiles purge query quick range read read_write reads real rebuild recover references regexp relaylog release remove rename reorganize repair repeatable replace require resignal restrict resume return returns revoke right rlike rollback rollup row row_format rtree savepoint schedule schema schema_name schemas second_microsecond security sensitive separator serializable server session share show signal slave slow smallint snapshot soname spatial specific sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_no_cache sql_small_result sqlexception sqlstate sqlwarning ssl start starting starts status std stddev stddev_pop stddev_samp storage straight_join subclass_origin sum suspend table_name table_statistics tables tablespace temporary terminated to trailing transaction trigger triggers truncate uncommitted undo uninstall unique unlock upgrade usage use use_frm user user_resources user_statistics using utc_date utc_time utc_timestamp value variables varying view views warnings when while with work write xa xor year_month zerofill begin do then else loop repeat"),builtin:n("bool boolean bit blob decimal double float long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text bigint int int1 int2 int3 int4 int8 integer float float4 float8 double char varbinary varchar varcharacter precision date datetime year unsigned signed numeric"),atoms:n("false true null unknown"),operatorChars:/^[*+\-%<>!=&|^]/,dateSQL:n("date time timestamp"),support:n("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber doubleQuote nCharCast charsetCast commentHash commentSpaceRequired"),hooks:{"@":r,"`":t,"\\":a}}),e.defineMIME("text/x-mariadb",{name:"sql",client:n("charset clear connect edit ego exit go help nopager notee nowarning pager print prompt quit rehash source status system tee"),keywords:n(i+"accessible action add after algorithm all always analyze asensitive at authors auto_increment autocommit avg avg_row_length before binary binlog both btree cache call cascade cascaded case catalog_name chain change changed character check checkpoint checksum class_origin client_statistics close coalesce code collate collation collations column columns comment commit committed completion concurrent condition connection consistent constraint contains continue contributors convert cross current current_date current_time current_timestamp current_user cursor data database databases day_hour day_microsecond day_minute day_second deallocate dec declare default delay_key_write delayed delimiter des_key_file describe deterministic dev_pop dev_samp deviance diagnostics directory disable discard distinctrow div dual dumpfile each elseif enable enclosed end ends engine engines enum errors escape escaped even event events every execute exists exit explain extended fast fetch field fields first flush for force foreign found_rows full fulltext function general generated get global grant grants group groupby_concat handler hard hash help high_priority hosts hour_microsecond hour_minute hour_second if ignore ignore_server_ids import index index_statistics infile inner innodb inout insensitive insert_method install interval invoker isolation iterate key keys kill language last leading leave left level limit linear lines list load local localtime localtimestamp lock logs low_priority master master_heartbeat_period master_ssl_verify_server_cert masters match max max_rows maxvalue message_text middleint migrate min min_rows minute_microsecond minute_second mod mode modifies modify mutex mysql_errno natural next no no_write_to_binlog offline offset one online open optimize option optionally out outer outfile pack_keys parser partition partitions password persistent phase plugin plugins prepare preserve prev primary privileges procedure processlist profile profiles purge query quick range read read_write reads real rebuild recover references regexp relaylog release remove rename reorganize repair repeatable replace require resignal restrict resume return returns revoke right rlike rollback rollup row row_format rtree savepoint schedule schema schema_name schemas second_microsecond security sensitive separator serializable server session share show shutdown signal slave slow smallint snapshot soft soname spatial specific sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_no_cache sql_small_result sqlexception sqlstate sqlwarning ssl start starting starts status std stddev stddev_pop stddev_samp storage straight_join subclass_origin sum suspend table_name table_statistics tables tablespace temporary terminated to trailing transaction trigger triggers truncate uncommitted undo uninstall unique unlock upgrade usage use use_frm user user_resources user_statistics using utc_date utc_time utc_timestamp value variables varying view views virtual warnings when while with work write xa xor year_month zerofill begin do then else loop repeat"),builtin:n("bool boolean bit blob decimal double float long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text bigint int int1 int2 int3 int4 int8 integer float float4 float8 double char varbinary varchar varcharacter precision date datetime year unsigned signed numeric"),atoms:n("false true null unknown"),operatorChars:/^[*+\-%<>!=&|^]/,dateSQL:n("date time timestamp"),support:n("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber doubleQuote nCharCast charsetCast commentHash commentSpaceRequired"),hooks:{"@":r,"`":t,"\\":a}}),e.defineMIME("text/x-sqlite",{name:"sql",client:n("auth backup bail binary changes check clone databases dbinfo dump echo eqp exit explain fullschema headers help import imposter indexes iotrace limit lint load log mode nullvalue once open output print prompt quit read restore save scanstats schema separator session shell show stats system tables testcase timeout timer trace vfsinfo vfslist vfsname width"),keywords:n(i+"abort action add after all analyze attach autoincrement before begin cascade case cast check collate column commit conflict constraint cross current_date current_time current_timestamp database default deferrable deferred detach each else end escape except exclusive exists explain fail for foreign full glob if ignore immediate index indexed initially inner instead intersect isnull key left limit match natural no notnull null of offset outer plan pragma primary query raise recursive references regexp reindex release rename replace restrict right rollback row savepoint temp temporary then to transaction trigger unique using vacuum view virtual when with without"),builtin:n("bool boolean bit blob decimal double float long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text clob bigint int int2 int8 integer float double char varchar date datetime year unsigned signed numeric real"),atoms:n("null current_date current_time current_timestamp"),operatorChars:/^[*+\-%<>!=&|/~]/,dateSQL:n("date time timestamp datetime"),support:n("decimallessFloat zerolessFloat"),identifierQuote:'"',hooks:{"@":r,":":r,"?":r,$:r,'"':function(e){for(var t;null!=(t=e.next());)if('"'==t&&!e.eat('"'))return"variable-2";return e.backUp(e.current().length-1),e.eatWhile(/\w/)?"variable-2":null},"`":t}}),e.defineMIME("text/x-cassandra",{name:"sql",client:{},keywords:n("add all allow alter and any apply as asc authorize batch begin by clustering columnfamily compact consistency count create custom delete desc distinct drop each_quorum exists filtering from grant if in index insert into key keyspace keyspaces level limit local_one local_quorum modify nan norecursive nosuperuser not of on one order password permission permissions primary quorum rename revoke schema select set storage superuser table three to token truncate ttl two type unlogged update use user users using values where with writetime"),builtin:n("ascii bigint blob boolean counter decimal double float frozen inet int list map static text timestamp timeuuid tuple uuid varchar varint"),atoms:n("false true infinity NaN"),operatorChars:/^[<>=]/,dateSQL:{},support:n("commentSlashSlash decimallessFloat"),hooks:{}}),e.defineMIME("text/x-plsql",{name:"sql",client:n("appinfo arraysize autocommit autoprint autorecovery autotrace blockterminator break btitle cmdsep colsep compatibility compute concat copycommit copytypecheck define describe echo editfile embedded escape exec execute feedback flagger flush heading headsep instance linesize lno loboffset logsource long longchunksize markup native newpage numformat numwidth pagesize pause pno recsep recsepchar release repfooter repheader serveroutput shiftinout show showmode size spool sqlblanklines sqlcase sqlcode sqlcontinue sqlnumber sqlpluscompatibility sqlprefix sqlprompt sqlterminator suffix tab term termout time timing trimout trimspool ttitle underline verify version wrap"),keywords:n("abort accept access add all alter and any array arraylen as asc assert assign at attributes audit authorization avg base_table begin between binary_integer body boolean by case cast char char_base check close cluster clusters colauth column comment commit compress connect connected constant constraint crash create current currval cursor data_base database date dba deallocate debugoff debugon decimal declare default definition delay delete desc digits dispose distinct do drop else elseif elsif enable end entry escape exception exception_init exchange exclusive exists exit external fast fetch file for force form from function generic goto grant group having identified if immediate in increment index indexes indicator initial initrans insert interface intersect into is key level library like limited local lock log logging long loop master maxextents maxtrans member minextents minus mislabel mode modify multiset new next no noaudit nocompress nologging noparallel not nowait number_base object of off offline on online only open option or order out package parallel partition pctfree pctincrease pctused pls_integer positive positiven pragma primary prior private privileges procedure public raise range raw read rebuild record ref references refresh release rename replace resource restrict return returning returns reverse revoke rollback row rowid rowlabel rownum rows run savepoint schema segment select separate session set share snapshot some space split sql start statement storage subtype successful synonym tabauth table tables tablespace task terminate then to trigger truncate type union unique unlimited unrecoverable unusable update use using validate value values variable view views when whenever where while with work"),builtin:n("abs acos add_months ascii asin atan atan2 average bfile bfilename bigserial bit blob ceil character chartorowid chr clob concat convert cos cosh count dec decode deref dual dump dup_val_on_index empty error exp false float floor found glb greatest hextoraw initcap instr instrb int integer isopen last_day least length lengthb ln lower lpad ltrim lub make_ref max min mlslabel mod months_between natural naturaln nchar nclob new_time next_day nextval nls_charset_decl_len nls_charset_id nls_charset_name nls_initcap nls_lower nls_sort nls_upper nlssort no_data_found notfound null number numeric nvarchar2 nvl others power rawtohex real reftohex round rowcount rowidtochar rowtype rpad rtrim serial sign signtype sin sinh smallint soundex sqlcode sqlerrm sqrt stddev string substr substrb sum sysdate tan tanh to_char text to_date to_label to_multi_byte to_number to_single_byte translate true trunc uid unlogged upper user userenv varchar varchar2 variance varying vsize xml"),operatorChars:/^[*+\-%<>!=~]/,dateSQL:n("date time timestamp"),support:n("doubleQuote nCharCast zerolessFloat binaryNumber hexNumber")}),e.defineMIME("text/x-hive",{name:"sql",keywords:n("select alter $elem$ $key$ $value$ add after all analyze and archive as asc before between binary both bucket buckets by cascade case cast change cluster clustered clusterstatus collection column columns comment compute concatenate continue create cross cursor data database databases dbproperties deferred delete delimited desc describe directory disable distinct distribute drop else enable end escaped exclusive exists explain export extended external false fetch fields fileformat first format formatted from full function functions grant group having hold_ddltime idxproperties if import in index indexes inpath inputdriver inputformat insert intersect into is items join keys lateral left like limit lines load local location lock locks mapjoin materialized minus msck no_drop nocompress not of offline on option or order out outer outputdriver outputformat overwrite partition partitioned partitions percent plus preserve procedure purge range rcfile read readonly reads rebuild recordreader recordwriter recover reduce regexp rename repair replace restrict revoke right rlike row schema schemas semi sequencefile serde serdeproperties set shared show show_database sort sorted ssl statistics stored streamtable table tables tablesample tblproperties temporary terminated textfile then tmp to touch transform trigger true unarchive undo union uniquejoin unlock update use using utc utc_tmestamp view when where while with"),builtin:n("bool boolean long timestamp tinyint smallint bigint int float double date datetime unsigned string array struct map uniontype"),atoms:n("false true null unknown"),operatorChars:/^[*+\-%<>!=]/,dateSQL:n("date timestamp"),support:n("ODBCdotTable doubleQuote binaryNumber hexNumber")}),e.defineMIME("text/x-pgsql",{name:"sql",client:n("source"),keywords:n(i+"a abort abs absent absolute access according action ada add admin after aggregate all allocate also always analyse analyze any are array array_agg array_max_cardinality asensitive assertion assignment asymmetric at atomic attribute attributes authorization avg backward base64 before begin begin_frame begin_partition bernoulli binary bit_length blob blocked bom both breadth c cache call called cardinality cascade cascaded case cast catalog catalog_name ceil ceiling chain characteristics characters character_length character_set_catalog character_set_name character_set_schema char_length check checkpoint class class_origin clob close cluster coalesce cobol collate collation collation_catalog collation_name collation_schema collect column columns column_name command_function command_function_code comment comments commit committed concurrently condition condition_number configuration conflict connect connection connection_name constraint constraints constraint_catalog constraint_name constraint_schema constructor contains content continue control conversion convert copy corr corresponding cost covar_pop covar_samp cross csv cube cume_dist current current_catalog current_date current_default_transform_group current_path current_role current_row current_schema current_time current_timestamp current_transform_group_for_type current_user cursor cursor_name cycle data database datalink datetime_interval_code datetime_interval_precision day db deallocate dec declare default defaults deferrable deferred defined definer degree delimiter delimiters dense_rank depth deref derived describe descriptor deterministic diagnostics dictionary disable discard disconnect dispatch dlnewcopy dlpreviouscopy dlurlcomplete dlurlcompleteonly dlurlcompletewrite dlurlpath dlurlpathonly dlurlpathwrite dlurlscheme dlurlserver dlvalue do document domain dynamic dynamic_function dynamic_function_code each element else empty enable encoding encrypted end end-exec end_frame end_partition enforced enum equals escape event every except exception exclude excluding exclusive exec execute exists exp explain expression extension external extract false family fetch file filter final first first_value flag float floor following for force foreign fortran forward found frame_row free freeze fs full function functions fusion g general generated get global go goto grant granted greatest grouping groups handler header hex hierarchy hold hour id identity if ignore ilike immediate immediately immutable implementation implicit import including increment indent index indexes indicator inherit inherits initially inline inner inout input insensitive instance instantiable instead integrity intersect intersection invoker isnull isolation k key key_member key_type label lag language large last last_value lateral lead leading leakproof least left length level library like_regex link listen ln load local localtime localtimestamp location locator lock locked logged lower m map mapping match matched materialized max maxvalue max_cardinality member merge message_length message_octet_length message_text method min minute minvalue mod mode modifies module month more move multiset mumps name names namespace national natural nchar nclob nesting new next nfc nfd nfkc nfkd nil no none normalize normalized nothing notify notnull nowait nth_value ntile null nullable nullif nulls number object occurrences_regex octets octet_length of off offset oids old only open operator option options ordering ordinality others out outer output over overlaps overlay overriding owned owner p pad parallel parameter parameter_mode parameter_name parameter_ordinal_position parameter_specific_catalog parameter_specific_name parameter_specific_schema parser partial partition pascal passing passthrough password percent percentile_cont percentile_disc percent_rank period permission placing plans pli policy portion position position_regex power precedes preceding prepare prepared preserve primary prior privileges procedural procedure program public quote range rank read reads reassign recheck recovery recursive ref references referencing refresh regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy regr_syy reindex relative release rename repeatable replace replica requiring reset respect restart restore restrict restricted result return returned_cardinality returned_length returned_octet_length returned_sqlstate returning returns revoke right role rollback rollup routine routine_catalog routine_name routine_schema row rows row_count row_number rule savepoint scale schema schema_name scope scope_catalog scope_name scope_schema scroll search second section security selective self sensitive sequence sequences serializable server server_name session session_user setof sets share show similar simple size skip snapshot some source space specific specifictype specific_name sql sqlcode sqlerror sqlexception sqlstate sqlwarning sqrt stable standalone start state statement static statistics stddev_pop stddev_samp stdin stdout storage strict strip structure style subclass_origin submultiset substring substring_regex succeeds sum symmetric sysid system system_time system_user t tables tablesample tablespace table_name temp template temporary then ties timezone_hour timezone_minute to token top_level_count trailing transaction transactions_committed transactions_rolled_back transaction_active transform transforms translate translate_regex translation treat trigger trigger_catalog trigger_name trigger_schema trim trim_array true truncate trusted type types uescape unbounded uncommitted under unencrypted unique unknown unlink unlisten unlogged unnamed unnest until untyped upper uri usage user user_defined_type_catalog user_defined_type_code user_defined_type_name user_defined_type_schema using vacuum valid validate validator value value_of varbinary variadic var_pop var_samp verbose version versioning view views volatile when whenever whitespace width_bucket window within work wrapper write xmlagg xmlattributes xmlbinary xmlcast xmlcomment xmlconcat xmldeclaration xmldocument xmlelement xmlexists xmlforest xmliterate xmlnamespaces xmlparse xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltext xmlvalidate year yes loop repeat attach path depends detach zone"),builtin:n("bigint int8 bigserial serial8 bit varying varbit boolean bool box bytea character char varchar cidr circle date double precision float8 inet integer int int4 interval json jsonb line lseg macaddr macaddr8 money numeric decimal path pg_lsn point polygon real float4 smallint int2 smallserial serial2 serial serial4 text time without zone with timetz timestamp timestamptz tsquery tsvector txid_snapshot uuid xml"),atoms:n("false true null unknown"),operatorChars:/^[*+\-%<>!=&|^\/#@?~]/,dateSQL:n("date time timestamp"),support:n("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber nCharCast charsetCast")}),e.defineMIME("text/x-gql",{name:"sql",keywords:n("ancestor and asc by contains desc descendant distinct from group has in is limit offset on order select superset where"),atoms:n("false true"),builtin:n("blob datetime first key __key__ string integer double boolean null"),operatorChars:/^[*+\-%<>!=]/}),e.defineMIME("text/x-gpsql",{name:"sql",client:n("source"),keywords:n("abort absolute access action active add admin after aggregate all also alter always analyse analyze and any array as asc assertion assignment asymmetric at authorization backward before begin between bigint binary bit boolean both by cache called cascade cascaded case cast chain char character characteristics check checkpoint class close cluster coalesce codegen collate column comment commit committed concurrency concurrently configuration connection constraint constraints contains content continue conversion copy cost cpu_rate_limit create createdb createexttable createrole createuser cross csv cube current current_catalog current_date current_role current_schema current_time current_timestamp current_user cursor cycle data database day deallocate dec decimal declare decode default defaults deferrable deferred definer delete delimiter delimiters deny desc dictionary disable discard distinct distributed do document domain double drop dxl each else enable encoding encrypted end enum errors escape every except exchange exclude excluding exclusive execute exists explain extension external extract false family fetch fields filespace fill filter first float following for force foreign format forward freeze from full function global grant granted greatest group group_id grouping handler hash having header hold host hour identity if ignore ilike immediate immutable implicit in including inclusive increment index indexes inherit inherits initially inline inner inout input insensitive insert instead int integer intersect interval into invoker is isnull isolation join key language large last leading least left level like limit list listen load local localtime localtimestamp location lock log login mapping master match maxvalue median merge minute minvalue missing mode modifies modify month move name names national natural nchar new newline next no nocreatedb nocreateexttable nocreaterole nocreateuser noinherit nologin none noovercommit nosuperuser not nothing notify notnull nowait null nullif nulls numeric object of off offset oids old on only operator option options or order ordered others out outer over overcommit overlaps overlay owned owner parser partial partition partitions passing password percent percentile_cont percentile_disc placing plans position preceding precision prepare prepared preserve primary prior privileges procedural procedure protocol queue quote randomly range read readable reads real reassign recheck recursive ref references reindex reject relative release rename repeatable replace replica reset resource restart restrict returning returns revoke right role rollback rollup rootpartition row rows rule savepoint scatter schema scroll search second security segment select sequence serializable session session_user set setof sets share show similar simple smallint some split sql stable standalone start statement statistics stdin stdout storage strict strip subpartition subpartitions substring superuser symmetric sysid system table tablespace temp template temporary text then threshold ties time timestamp to trailing transaction treat trigger trim true truncate trusted type unbounded uncommitted unencrypted union unique unknown unlisten until update user using vacuum valid validation validator value values varchar variadic varying verbose version view volatile web when where whitespace window with within without work writable write xml xmlattributes xmlconcat xmlelement xmlexists xmlforest xmlparse xmlpi xmlroot xmlserialize year yes zone"),builtin:n("bigint int8 bigserial serial8 bit varying varbit boolean bool box bytea character char varchar cidr circle date double precision float float8 inet integer int int4 interval json jsonb line lseg macaddr macaddr8 money numeric decimal path pg_lsn point polygon real float4 smallint int2 smallserial serial2 serial serial4 text time without zone with timetz timestamp timestamptz tsquery tsvector txid_snapshot uuid xml"),atoms:n("false true null unknown"),operatorChars:/^[*+\-%<>!=&|^\/#@?~]/,dateSQL:n("date time timestamp"),support:n("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber nCharCast charsetCast")}),e.defineMIME("text/x-sparksql",{name:"sql",keywords:n("add after all alter analyze and anti archive array as asc at between bucket buckets by cache cascade case cast change clear cluster clustered codegen collection column columns comment commit compact compactions compute concatenate cost create cross cube current current_date current_timestamp database databases datata dbproperties defined delete delimited desc describe dfs directories distinct distribute drop else end escaped except exchange exists explain export extended external false fields fileformat first following for format formatted from full function functions global grant group grouping having if ignore import in index indexes inner inpath inputformat insert intersect interval into is items join keys last lateral lazy left like limit lines list load local location lock locks logical macro map minus msck natural no not null nulls of on option options or order out outer outputformat over overwrite partition partitioned partitions percent preceding principals purge range recordreader recordwriter recover reduce refresh regexp rename repair replace reset restrict revoke right rlike role roles rollback rollup row rows schema schemas select semi separated serde serdeproperties set sets show skewed sort sorted start statistics stored stratify struct table tables tablesample tblproperties temp temporary terminated then to touch transaction transactions transform true truncate unarchive unbounded uncache union unlock unset use using values view when where window with"),builtin:n("tinyint smallint int bigint boolean float double string binary timestamp decimal array map struct uniontype delimited serde sequencefile textfile rcfile inputformat outputformat"),atoms:n("false true null"),operatorChars:/^[*+\-%<>!=~&|^]/,dateSQL:n("date time timestamp"),support:n("ODBCdotTable doubleQuote zerolessFloat")}),e.defineMIME("text/x-esper",{name:"sql",client:n("source"),keywords:n("alter and as asc between by count create delete desc distinct drop from group having in insert into is join like not on or order select set table union update values where limit after all and as at asc avedev avg between by case cast coalesce count create current_timestamp day days delete define desc distinct else end escape events every exists false first from full group having hour hours in inner insert instanceof into irstream is istream join last lastweekday left limit like max match_recognize matches median measures metadatasql min minute minutes msec millisecond milliseconds not null offset on or order outer output partition pattern prev prior regexp retain-union retain-intersection right rstream sec second seconds select set some snapshot sql stddev sum then true unidirectional until update variable weekday when where window"),builtin:{},atoms:n("false true null"),operatorChars:/^[*+\-%<>!=&|^\/#@?~]/,dateSQL:n("time"),support:n("decimallessFloat zerolessFloat binaryNumber hexNumber")})}()}); \ No newline at end of file diff --git a/datasette/static/codemirror-5.31.0.js b/datasette/static/codemirror-5.31.0.js deleted file mode 100644 index a0d5d688..00000000 --- a/datasette/static/codemirror-5.31.0.js +++ /dev/null @@ -1,9659 +0,0 @@ -// CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE - -// This is CodeMirror (http://codemirror.net), a code editor -// implemented in JavaScript on top of the browser's DOM. -// -// You can find some technical background for some of the code below -// at http://marijnhaverbeke.nl/blog/#cm-internals . - -(function (global, factory) { - typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : - typeof define === 'function' && define.amd ? define(factory) : - (global.CodeMirror = factory()); -}(this, (function () { 'use strict'; - -// Kludges for bugs and behavior differences that can't be feature -// detected are enabled based on userAgent etc sniffing. -var userAgent = navigator.userAgent; -var platform = navigator.platform; - -var gecko = /gecko\/\d/i.test(userAgent); -var ie_upto10 = /MSIE \d/.test(userAgent); -var ie_11up = /Trident\/(?:[7-9]|\d{2,})\..*rv:(\d+)/.exec(userAgent); -var edge = /Edge\/(\d+)/.exec(userAgent); -var ie = ie_upto10 || ie_11up || edge; -var ie_version = ie && (ie_upto10 ? document.documentMode || 6 : +(edge || ie_11up)[1]); -var webkit = !edge && /WebKit\//.test(userAgent); -var qtwebkit = webkit && /Qt\/\d+\.\d+/.test(userAgent); -var chrome = !edge && /Chrome\//.test(userAgent); -var presto = /Opera\//.test(userAgent); -var safari = /Apple Computer/.test(navigator.vendor); -var mac_geMountainLion = /Mac OS X 1\d\D([8-9]|\d\d)\D/.test(userAgent); -var phantom = /PhantomJS/.test(userAgent); - -var ios = !edge && /AppleWebKit/.test(userAgent) && /Mobile\/\w+/.test(userAgent); -var android = /Android/.test(userAgent); -// This is woefully incomplete. Suggestions for alternative methods welcome. -var mobile = ios || android || /webOS|BlackBerry|Opera Mini|Opera Mobi|IEMobile/i.test(userAgent); -var mac = ios || /Mac/.test(platform); -var chromeOS = /\bCrOS\b/.test(userAgent); -var windows = /win/i.test(platform); - -var presto_version = presto && userAgent.match(/Version\/(\d*\.\d*)/); -if (presto_version) { presto_version = Number(presto_version[1]); } -if (presto_version && presto_version >= 15) { presto = false; webkit = true; } -// Some browsers use the wrong event properties to signal cmd/ctrl on OS X -var flipCtrlCmd = mac && (qtwebkit || presto && (presto_version == null || presto_version < 12.11)); -var captureRightClick = gecko || (ie && ie_version >= 9); - -function classTest(cls) { return new RegExp("(^|\\s)" + cls + "(?:$|\\s)\\s*") } - -var rmClass = function(node, cls) { - var current = node.className; - var match = classTest(cls).exec(current); - if (match) { - var after = current.slice(match.index + match[0].length); - node.className = current.slice(0, match.index) + (after ? match[1] + after : ""); - } -}; - -function removeChildren(e) { - for (var count = e.childNodes.length; count > 0; --count) - { e.removeChild(e.firstChild); } - return e -} - -function removeChildrenAndAdd(parent, e) { - return removeChildren(parent).appendChild(e) -} - -function elt(tag, content, className, style) { - var e = document.createElement(tag); - if (className) { e.className = className; } - if (style) { e.style.cssText = style; } - if (typeof content == "string") { e.appendChild(document.createTextNode(content)); } - else if (content) { for (var i = 0; i < content.length; ++i) { e.appendChild(content[i]); } } - return e -} -// wrapper for elt, which removes the elt from the accessibility tree -function eltP(tag, content, className, style) { - var e = elt(tag, content, className, style); - e.setAttribute("role", "presentation"); - return e -} - -var range; -if (document.createRange) { range = function(node, start, end, endNode) { - var r = document.createRange(); - r.setEnd(endNode || node, end); - r.setStart(node, start); - return r -}; } -else { range = function(node, start, end) { - var r = document.body.createTextRange(); - try { r.moveToElementText(node.parentNode); } - catch(e) { return r } - r.collapse(true); - r.moveEnd("character", end); - r.moveStart("character", start); - return r -}; } - -function contains(parent, child) { - if (child.nodeType == 3) // Android browser always returns false when child is a textnode - { child = child.parentNode; } - if (parent.contains) - { return parent.contains(child) } - do { - if (child.nodeType == 11) { child = child.host; } - if (child == parent) { return true } - } while (child = child.parentNode) -} - -function activeElt() { - // IE and Edge may throw an "Unspecified Error" when accessing document.activeElement. - // IE < 10 will throw when accessed while the page is loading or in an iframe. - // IE > 9 and Edge will throw when accessed in an iframe if document.body is unavailable. - var activeElement; - try { - activeElement = document.activeElement; - } catch(e) { - activeElement = document.body || null; - } - while (activeElement && activeElement.shadowRoot && activeElement.shadowRoot.activeElement) - { activeElement = activeElement.shadowRoot.activeElement; } - return activeElement -} - -function addClass(node, cls) { - var current = node.className; - if (!classTest(cls).test(current)) { node.className += (current ? " " : "") + cls; } -} -function joinClasses(a, b) { - var as = a.split(" "); - for (var i = 0; i < as.length; i++) - { if (as[i] && !classTest(as[i]).test(b)) { b += " " + as[i]; } } - return b -} - -var selectInput = function(node) { node.select(); }; -if (ios) // Mobile Safari apparently has a bug where select() is broken. - { selectInput = function(node) { node.selectionStart = 0; node.selectionEnd = node.value.length; }; } -else if (ie) // Suppress mysterious IE10 errors - { selectInput = function(node) { try { node.select(); } catch(_e) {} }; } - -function bind(f) { - var args = Array.prototype.slice.call(arguments, 1); - return function(){return f.apply(null, args)} -} - -function copyObj(obj, target, overwrite) { - if (!target) { target = {}; } - for (var prop in obj) - { if (obj.hasOwnProperty(prop) && (overwrite !== false || !target.hasOwnProperty(prop))) - { target[prop] = obj[prop]; } } - return target -} - -// Counts the column offset in a string, taking tabs into account. -// Used mostly to find indentation. -function countColumn(string, end, tabSize, startIndex, startValue) { - if (end == null) { - end = string.search(/[^\s\u00a0]/); - if (end == -1) { end = string.length; } - } - for (var i = startIndex || 0, n = startValue || 0;;) { - var nextTab = string.indexOf("\t", i); - if (nextTab < 0 || nextTab >= end) - { return n + (end - i) } - n += nextTab - i; - n += tabSize - (n % tabSize); - i = nextTab + 1; - } -} - -var Delayed = function() {this.id = null;}; -Delayed.prototype.set = function (ms, f) { - clearTimeout(this.id); - this.id = setTimeout(f, ms); -}; - -function indexOf(array, elt) { - for (var i = 0; i < array.length; ++i) - { if (array[i] == elt) { return i } } - return -1 -} - -// Number of pixels added to scroller and sizer to hide scrollbar -var scrollerGap = 30; - -// Returned or thrown by various protocols to signal 'I'm not -// handling this'. -var Pass = {toString: function(){return "CodeMirror.Pass"}}; - -// Reused option objects for setSelection & friends -var sel_dontScroll = {scroll: false}; -var sel_mouse = {origin: "*mouse"}; -var sel_move = {origin: "+move"}; - -// The inverse of countColumn -- find the offset that corresponds to -// a particular column. -function findColumn(string, goal, tabSize) { - for (var pos = 0, col = 0;;) { - var nextTab = string.indexOf("\t", pos); - if (nextTab == -1) { nextTab = string.length; } - var skipped = nextTab - pos; - if (nextTab == string.length || col + skipped >= goal) - { return pos + Math.min(skipped, goal - col) } - col += nextTab - pos; - col += tabSize - (col % tabSize); - pos = nextTab + 1; - if (col >= goal) { return pos } - } -} - -var spaceStrs = [""]; -function spaceStr(n) { - while (spaceStrs.length <= n) - { spaceStrs.push(lst(spaceStrs) + " "); } - return spaceStrs[n] -} - -function lst(arr) { return arr[arr.length-1] } - -function map(array, f) { - var out = []; - for (var i = 0; i < array.length; i++) { out[i] = f(array[i], i); } - return out -} - -function insertSorted(array, value, score) { - var pos = 0, priority = score(value); - while (pos < array.length && score(array[pos]) <= priority) { pos++; } - array.splice(pos, 0, value); -} - -function nothing() {} - -function createObj(base, props) { - var inst; - if (Object.create) { - inst = Object.create(base); - } else { - nothing.prototype = base; - inst = new nothing(); - } - if (props) { copyObj(props, inst); } - return inst -} - -var nonASCIISingleCaseWordChar = /[\u00df\u0587\u0590-\u05f4\u0600-\u06ff\u3040-\u309f\u30a0-\u30ff\u3400-\u4db5\u4e00-\u9fcc\uac00-\ud7af]/; -function isWordCharBasic(ch) { - return /\w/.test(ch) || ch > "\x80" && - (ch.toUpperCase() != ch.toLowerCase() || nonASCIISingleCaseWordChar.test(ch)) -} -function isWordChar(ch, helper) { - if (!helper) { return isWordCharBasic(ch) } - if (helper.source.indexOf("\\w") > -1 && isWordCharBasic(ch)) { return true } - return helper.test(ch) -} - -function isEmpty(obj) { - for (var n in obj) { if (obj.hasOwnProperty(n) && obj[n]) { return false } } - return true -} - -// Extending unicode characters. A series of a non-extending char + -// any number of extending chars is treated as a single unit as far -// as editing and measuring is concerned. This is not fully correct, -// since some scripts/fonts/browsers also treat other configurations -// of code points as a group. -var extendingChars = /[\u0300-\u036f\u0483-\u0489\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u065e\u0670\u06d6-\u06dc\u06de-\u06e4\u06e7\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0900-\u0902\u093c\u0941-\u0948\u094d\u0951-\u0955\u0962\u0963\u0981\u09bc\u09be\u09c1-\u09c4\u09cd\u09d7\u09e2\u09e3\u0a01\u0a02\u0a3c\u0a41\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a70\u0a71\u0a75\u0a81\u0a82\u0abc\u0ac1-\u0ac5\u0ac7\u0ac8\u0acd\u0ae2\u0ae3\u0b01\u0b3c\u0b3e\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b57\u0b62\u0b63\u0b82\u0bbe\u0bc0\u0bcd\u0bd7\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0cbc\u0cbf\u0cc2\u0cc6\u0ccc\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0d3e\u0d41-\u0d44\u0d4d\u0d57\u0d62\u0d63\u0dca\u0dcf\u0dd2-\u0dd4\u0dd6\u0ddf\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0f18\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86\u0f87\u0f90-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039\u103a\u103d\u103e\u1058\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085\u1086\u108d\u109d\u135f\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u18a9\u1920-\u1922\u1927\u1928\u1932\u1939-\u193b\u1a17\u1a18\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80\u1b81\u1ba2-\u1ba5\u1ba8\u1ba9\u1c2c-\u1c33\u1c36\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1dc0-\u1de6\u1dfd-\u1dff\u200c\u200d\u20d0-\u20f0\u2cef-\u2cf1\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua66f-\ua672\ua67c\ua67d\ua6f0\ua6f1\ua802\ua806\ua80b\ua825\ua826\ua8c4\ua8e0-\ua8f1\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\uaa29-\uaa2e\uaa31\uaa32\uaa35\uaa36\uaa43\uaa4c\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uabe5\uabe8\uabed\udc00-\udfff\ufb1e\ufe00-\ufe0f\ufe20-\ufe26\uff9e\uff9f]/; -function isExtendingChar(ch) { return ch.charCodeAt(0) >= 768 && extendingChars.test(ch) } - -// Returns a number from the range [`0`; `str.length`] unless `pos` is outside that range. -function skipExtendingChars(str, pos, dir) { - while ((dir < 0 ? pos > 0 : pos < str.length) && isExtendingChar(str.charAt(pos))) { pos += dir; } - return pos -} - -// Returns the value from the range [`from`; `to`] that satisfies -// `pred` and is closest to `from`. Assumes that at least `to` -// satisfies `pred`. Supports `from` being greater than `to`. -function findFirst(pred, from, to) { - // At any point we are certain `to` satisfies `pred`, don't know - // whether `from` does. - var dir = from > to ? -1 : 1; - for (;;) { - if (from == to) { return from } - var midF = (from + to) / 2, mid = dir < 0 ? Math.ceil(midF) : Math.floor(midF); - if (mid == from) { return pred(mid) ? from : to } - if (pred(mid)) { to = mid; } - else { from = mid + dir; } - } -} - -// The display handles the DOM integration, both for input reading -// and content drawing. It holds references to DOM nodes and -// display-related state. - -function Display(place, doc, input) { - var d = this; - this.input = input; - - // Covers bottom-right square when both scrollbars are present. - d.scrollbarFiller = elt("div", null, "CodeMirror-scrollbar-filler"); - d.scrollbarFiller.setAttribute("cm-not-content", "true"); - // Covers bottom of gutter when coverGutterNextToScrollbar is on - // and h scrollbar is present. - d.gutterFiller = elt("div", null, "CodeMirror-gutter-filler"); - d.gutterFiller.setAttribute("cm-not-content", "true"); - // Will contain the actual code, positioned to cover the viewport. - d.lineDiv = eltP("div", null, "CodeMirror-code"); - // Elements are added to these to represent selection and cursors. - d.selectionDiv = elt("div", null, null, "position: relative; z-index: 1"); - d.cursorDiv = elt("div", null, "CodeMirror-cursors"); - // A visibility: hidden element used to find the size of things. - d.measure = elt("div", null, "CodeMirror-measure"); - // When lines outside of the viewport are measured, they are drawn in this. - d.lineMeasure = elt("div", null, "CodeMirror-measure"); - // Wraps everything that needs to exist inside the vertically-padded coordinate system - d.lineSpace = eltP("div", [d.measure, d.lineMeasure, d.selectionDiv, d.cursorDiv, d.lineDiv], - null, "position: relative; outline: none"); - var lines = eltP("div", [d.lineSpace], "CodeMirror-lines"); - // Moved around its parent to cover visible view. - d.mover = elt("div", [lines], null, "position: relative"); - // Set to the height of the document, allowing scrolling. - d.sizer = elt("div", [d.mover], "CodeMirror-sizer"); - d.sizerWidth = null; - // Behavior of elts with overflow: auto and padding is - // inconsistent across browsers. This is used to ensure the - // scrollable area is big enough. - d.heightForcer = elt("div", null, null, "position: absolute; height: " + scrollerGap + "px; width: 1px;"); - // Will contain the gutters, if any. - d.gutters = elt("div", null, "CodeMirror-gutters"); - d.lineGutter = null; - // Actual scrollable element. - d.scroller = elt("div", [d.sizer, d.heightForcer, d.gutters], "CodeMirror-scroll"); - d.scroller.setAttribute("tabIndex", "-1"); - // The element in which the editor lives. - d.wrapper = elt("div", [d.scrollbarFiller, d.gutterFiller, d.scroller], "CodeMirror"); - - // Work around IE7 z-index bug (not perfect, hence IE7 not really being supported) - if (ie && ie_version < 8) { d.gutters.style.zIndex = -1; d.scroller.style.paddingRight = 0; } - if (!webkit && !(gecko && mobile)) { d.scroller.draggable = true; } - - if (place) { - if (place.appendChild) { place.appendChild(d.wrapper); } - else { place(d.wrapper); } - } - - // Current rendered range (may be bigger than the view window). - d.viewFrom = d.viewTo = doc.first; - d.reportedViewFrom = d.reportedViewTo = doc.first; - // Information about the rendered lines. - d.view = []; - d.renderedView = null; - // Holds info about a single rendered line when it was rendered - // for measurement, while not in view. - d.externalMeasured = null; - // Empty space (in pixels) above the view - d.viewOffset = 0; - d.lastWrapHeight = d.lastWrapWidth = 0; - d.updateLineNumbers = null; - - d.nativeBarWidth = d.barHeight = d.barWidth = 0; - d.scrollbarsClipped = false; - - // Used to only resize the line number gutter when necessary (when - // the amount of lines crosses a boundary that makes its width change) - d.lineNumWidth = d.lineNumInnerWidth = d.lineNumChars = null; - // Set to true when a non-horizontal-scrolling line widget is - // added. As an optimization, line widget aligning is skipped when - // this is false. - d.alignWidgets = false; - - d.cachedCharWidth = d.cachedTextHeight = d.cachedPaddingH = null; - - // Tracks the maximum line length so that the horizontal scrollbar - // can be kept static when scrolling. - d.maxLine = null; - d.maxLineLength = 0; - d.maxLineChanged = false; - - // Used for measuring wheel scrolling granularity - d.wheelDX = d.wheelDY = d.wheelStartX = d.wheelStartY = null; - - // True when shift is held down. - d.shift = false; - - // Used to track whether anything happened since the context menu - // was opened. - d.selForContextMenu = null; - - d.activeTouch = null; - - input.init(d); -} - -// Find the line object corresponding to the given line number. -function getLine(doc, n) { - n -= doc.first; - if (n < 0 || n >= doc.size) { throw new Error("There is no line " + (n + doc.first) + " in the document.") } - var chunk = doc; - while (!chunk.lines) { - for (var i = 0;; ++i) { - var child = chunk.children[i], sz = child.chunkSize(); - if (n < sz) { chunk = child; break } - n -= sz; - } - } - return chunk.lines[n] -} - -// Get the part of a document between two positions, as an array of -// strings. -function getBetween(doc, start, end) { - var out = [], n = start.line; - doc.iter(start.line, end.line + 1, function (line) { - var text = line.text; - if (n == end.line) { text = text.slice(0, end.ch); } - if (n == start.line) { text = text.slice(start.ch); } - out.push(text); - ++n; - }); - return out -} -// Get the lines between from and to, as array of strings. -function getLines(doc, from, to) { - var out = []; - doc.iter(from, to, function (line) { out.push(line.text); }); // iter aborts when callback returns truthy value - return out -} - -// Update the height of a line, propagating the height change -// upwards to parent nodes. -function updateLineHeight(line, height) { - var diff = height - line.height; - if (diff) { for (var n = line; n; n = n.parent) { n.height += diff; } } -} - -// Given a line object, find its line number by walking up through -// its parent links. -function lineNo(line) { - if (line.parent == null) { return null } - var cur = line.parent, no = indexOf(cur.lines, line); - for (var chunk = cur.parent; chunk; cur = chunk, chunk = chunk.parent) { - for (var i = 0;; ++i) { - if (chunk.children[i] == cur) { break } - no += chunk.children[i].chunkSize(); - } - } - return no + cur.first -} - -// Find the line at the given vertical position, using the height -// information in the document tree. -function lineAtHeight(chunk, h) { - var n = chunk.first; - outer: do { - for (var i$1 = 0; i$1 < chunk.children.length; ++i$1) { - var child = chunk.children[i$1], ch = child.height; - if (h < ch) { chunk = child; continue outer } - h -= ch; - n += child.chunkSize(); - } - return n - } while (!chunk.lines) - var i = 0; - for (; i < chunk.lines.length; ++i) { - var line = chunk.lines[i], lh = line.height; - if (h < lh) { break } - h -= lh; - } - return n + i -} - -function isLine(doc, l) {return l >= doc.first && l < doc.first + doc.size} - -function lineNumberFor(options, i) { - return String(options.lineNumberFormatter(i + options.firstLineNumber)) -} - -// A Pos instance represents a position within the text. -function Pos(line, ch, sticky) { - if ( sticky === void 0 ) sticky = null; - - if (!(this instanceof Pos)) { return new Pos(line, ch, sticky) } - this.line = line; - this.ch = ch; - this.sticky = sticky; -} - -// Compare two positions, return 0 if they are the same, a negative -// number when a is less, and a positive number otherwise. -function cmp(a, b) { return a.line - b.line || a.ch - b.ch } - -function equalCursorPos(a, b) { return a.sticky == b.sticky && cmp(a, b) == 0 } - -function copyPos(x) {return Pos(x.line, x.ch)} -function maxPos(a, b) { return cmp(a, b) < 0 ? b : a } -function minPos(a, b) { return cmp(a, b) < 0 ? a : b } - -// Most of the external API clips given positions to make sure they -// actually exist within the document. -function clipLine(doc, n) {return Math.max(doc.first, Math.min(n, doc.first + doc.size - 1))} -function clipPos(doc, pos) { - if (pos.line < doc.first) { return Pos(doc.first, 0) } - var last = doc.first + doc.size - 1; - if (pos.line > last) { return Pos(last, getLine(doc, last).text.length) } - return clipToLen(pos, getLine(doc, pos.line).text.length) -} -function clipToLen(pos, linelen) { - var ch = pos.ch; - if (ch == null || ch > linelen) { return Pos(pos.line, linelen) } - else if (ch < 0) { return Pos(pos.line, 0) } - else { return pos } -} -function clipPosArray(doc, array) { - var out = []; - for (var i = 0; i < array.length; i++) { out[i] = clipPos(doc, array[i]); } - return out -} - -// Optimize some code when these features are not used. -var sawReadOnlySpans = false; -var sawCollapsedSpans = false; - -function seeReadOnlySpans() { - sawReadOnlySpans = true; -} - -function seeCollapsedSpans() { - sawCollapsedSpans = true; -} - -// TEXTMARKER SPANS - -function MarkedSpan(marker, from, to) { - this.marker = marker; - this.from = from; this.to = to; -} - -// Search an array of spans for a span matching the given marker. -function getMarkedSpanFor(spans, marker) { - if (spans) { for (var i = 0; i < spans.length; ++i) { - var span = spans[i]; - if (span.marker == marker) { return span } - } } -} -// Remove a span from an array, returning undefined if no spans are -// left (we don't store arrays for lines without spans). -function removeMarkedSpan(spans, span) { - var r; - for (var i = 0; i < spans.length; ++i) - { if (spans[i] != span) { (r || (r = [])).push(spans[i]); } } - return r -} -// Add a span to a line. -function addMarkedSpan(line, span) { - line.markedSpans = line.markedSpans ? line.markedSpans.concat([span]) : [span]; - span.marker.attachLine(line); -} - -// Used for the algorithm that adjusts markers for a change in the -// document. These functions cut an array of spans at a given -// character position, returning an array of remaining chunks (or -// undefined if nothing remains). -function markedSpansBefore(old, startCh, isInsert) { - var nw; - if (old) { for (var i = 0; i < old.length; ++i) { - var span = old[i], marker = span.marker; - var startsBefore = span.from == null || (marker.inclusiveLeft ? span.from <= startCh : span.from < startCh); - if (startsBefore || span.from == startCh && marker.type == "bookmark" && (!isInsert || !span.marker.insertLeft)) { - var endsAfter = span.to == null || (marker.inclusiveRight ? span.to >= startCh : span.to > startCh);(nw || (nw = [])).push(new MarkedSpan(marker, span.from, endsAfter ? null : span.to)); - } - } } - return nw -} -function markedSpansAfter(old, endCh, isInsert) { - var nw; - if (old) { for (var i = 0; i < old.length; ++i) { - var span = old[i], marker = span.marker; - var endsAfter = span.to == null || (marker.inclusiveRight ? span.to >= endCh : span.to > endCh); - if (endsAfter || span.from == endCh && marker.type == "bookmark" && (!isInsert || span.marker.insertLeft)) { - var startsBefore = span.from == null || (marker.inclusiveLeft ? span.from <= endCh : span.from < endCh);(nw || (nw = [])).push(new MarkedSpan(marker, startsBefore ? null : span.from - endCh, - span.to == null ? null : span.to - endCh)); - } - } } - return nw -} - -// Given a change object, compute the new set of marker spans that -// cover the line in which the change took place. Removes spans -// entirely within the change, reconnects spans belonging to the -// same marker that appear on both sides of the change, and cuts off -// spans partially within the change. Returns an array of span -// arrays with one element for each line in (after) the change. -function stretchSpansOverChange(doc, change) { - if (change.full) { return null } - var oldFirst = isLine(doc, change.from.line) && getLine(doc, change.from.line).markedSpans; - var oldLast = isLine(doc, change.to.line) && getLine(doc, change.to.line).markedSpans; - if (!oldFirst && !oldLast) { return null } - - var startCh = change.from.ch, endCh = change.to.ch, isInsert = cmp(change.from, change.to) == 0; - // Get the spans that 'stick out' on both sides - var first = markedSpansBefore(oldFirst, startCh, isInsert); - var last = markedSpansAfter(oldLast, endCh, isInsert); - - // Next, merge those two ends - var sameLine = change.text.length == 1, offset = lst(change.text).length + (sameLine ? startCh : 0); - if (first) { - // Fix up .to properties of first - for (var i = 0; i < first.length; ++i) { - var span = first[i]; - if (span.to == null) { - var found = getMarkedSpanFor(last, span.marker); - if (!found) { span.to = startCh; } - else if (sameLine) { span.to = found.to == null ? null : found.to + offset; } - } - } - } - if (last) { - // Fix up .from in last (or move them into first in case of sameLine) - for (var i$1 = 0; i$1 < last.length; ++i$1) { - var span$1 = last[i$1]; - if (span$1.to != null) { span$1.to += offset; } - if (span$1.from == null) { - var found$1 = getMarkedSpanFor(first, span$1.marker); - if (!found$1) { - span$1.from = offset; - if (sameLine) { (first || (first = [])).push(span$1); } - } - } else { - span$1.from += offset; - if (sameLine) { (first || (first = [])).push(span$1); } - } - } - } - // Make sure we didn't create any zero-length spans - if (first) { first = clearEmptySpans(first); } - if (last && last != first) { last = clearEmptySpans(last); } - - var newMarkers = [first]; - if (!sameLine) { - // Fill gap with whole-line-spans - var gap = change.text.length - 2, gapMarkers; - if (gap > 0 && first) - { for (var i$2 = 0; i$2 < first.length; ++i$2) - { if (first[i$2].to == null) - { (gapMarkers || (gapMarkers = [])).push(new MarkedSpan(first[i$2].marker, null, null)); } } } - for (var i$3 = 0; i$3 < gap; ++i$3) - { newMarkers.push(gapMarkers); } - newMarkers.push(last); - } - return newMarkers -} - -// Remove spans that are empty and don't have a clearWhenEmpty -// option of false. -function clearEmptySpans(spans) { - for (var i = 0; i < spans.length; ++i) { - var span = spans[i]; - if (span.from != null && span.from == span.to && span.marker.clearWhenEmpty !== false) - { spans.splice(i--, 1); } - } - if (!spans.length) { return null } - return spans -} - -// Used to 'clip' out readOnly ranges when making a change. -function removeReadOnlyRanges(doc, from, to) { - var markers = null; - doc.iter(from.line, to.line + 1, function (line) { - if (line.markedSpans) { for (var i = 0; i < line.markedSpans.length; ++i) { - var mark = line.markedSpans[i].marker; - if (mark.readOnly && (!markers || indexOf(markers, mark) == -1)) - { (markers || (markers = [])).push(mark); } - } } - }); - if (!markers) { return null } - var parts = [{from: from, to: to}]; - for (var i = 0; i < markers.length; ++i) { - var mk = markers[i], m = mk.find(0); - for (var j = 0; j < parts.length; ++j) { - var p = parts[j]; - if (cmp(p.to, m.from) < 0 || cmp(p.from, m.to) > 0) { continue } - var newParts = [j, 1], dfrom = cmp(p.from, m.from), dto = cmp(p.to, m.to); - if (dfrom < 0 || !mk.inclusiveLeft && !dfrom) - { newParts.push({from: p.from, to: m.from}); } - if (dto > 0 || !mk.inclusiveRight && !dto) - { newParts.push({from: m.to, to: p.to}); } - parts.splice.apply(parts, newParts); - j += newParts.length - 3; - } - } - return parts -} - -// Connect or disconnect spans from a line. -function detachMarkedSpans(line) { - var spans = line.markedSpans; - if (!spans) { return } - for (var i = 0; i < spans.length; ++i) - { spans[i].marker.detachLine(line); } - line.markedSpans = null; -} -function attachMarkedSpans(line, spans) { - if (!spans) { return } - for (var i = 0; i < spans.length; ++i) - { spans[i].marker.attachLine(line); } - line.markedSpans = spans; -} - -// Helpers used when computing which overlapping collapsed span -// counts as the larger one. -function extraLeft(marker) { return marker.inclusiveLeft ? -1 : 0 } -function extraRight(marker) { return marker.inclusiveRight ? 1 : 0 } - -// Returns a number indicating which of two overlapping collapsed -// spans is larger (and thus includes the other). Falls back to -// comparing ids when the spans cover exactly the same range. -function compareCollapsedMarkers(a, b) { - var lenDiff = a.lines.length - b.lines.length; - if (lenDiff != 0) { return lenDiff } - var aPos = a.find(), bPos = b.find(); - var fromCmp = cmp(aPos.from, bPos.from) || extraLeft(a) - extraLeft(b); - if (fromCmp) { return -fromCmp } - var toCmp = cmp(aPos.to, bPos.to) || extraRight(a) - extraRight(b); - if (toCmp) { return toCmp } - return b.id - a.id -} - -// Find out whether a line ends or starts in a collapsed span. If -// so, return the marker for that span. -function collapsedSpanAtSide(line, start) { - var sps = sawCollapsedSpans && line.markedSpans, found; - if (sps) { for (var sp = (void 0), i = 0; i < sps.length; ++i) { - sp = sps[i]; - if (sp.marker.collapsed && (start ? sp.from : sp.to) == null && - (!found || compareCollapsedMarkers(found, sp.marker) < 0)) - { found = sp.marker; } - } } - return found -} -function collapsedSpanAtStart(line) { return collapsedSpanAtSide(line, true) } -function collapsedSpanAtEnd(line) { return collapsedSpanAtSide(line, false) } - -// Test whether there exists a collapsed span that partially -// overlaps (covers the start or end, but not both) of a new span. -// Such overlap is not allowed. -function conflictingCollapsedRange(doc, lineNo$$1, from, to, marker) { - var line = getLine(doc, lineNo$$1); - var sps = sawCollapsedSpans && line.markedSpans; - if (sps) { for (var i = 0; i < sps.length; ++i) { - var sp = sps[i]; - if (!sp.marker.collapsed) { continue } - var found = sp.marker.find(0); - var fromCmp = cmp(found.from, from) || extraLeft(sp.marker) - extraLeft(marker); - var toCmp = cmp(found.to, to) || extraRight(sp.marker) - extraRight(marker); - if (fromCmp >= 0 && toCmp <= 0 || fromCmp <= 0 && toCmp >= 0) { continue } - if (fromCmp <= 0 && (sp.marker.inclusiveRight && marker.inclusiveLeft ? cmp(found.to, from) >= 0 : cmp(found.to, from) > 0) || - fromCmp >= 0 && (sp.marker.inclusiveRight && marker.inclusiveLeft ? cmp(found.from, to) <= 0 : cmp(found.from, to) < 0)) - { return true } - } } -} - -// A visual line is a line as drawn on the screen. Folding, for -// example, can cause multiple logical lines to appear on the same -// visual line. This finds the start of the visual line that the -// given line is part of (usually that is the line itself). -function visualLine(line) { - var merged; - while (merged = collapsedSpanAtStart(line)) - { line = merged.find(-1, true).line; } - return line -} - -function visualLineEnd(line) { - var merged; - while (merged = collapsedSpanAtEnd(line)) - { line = merged.find(1, true).line; } - return line -} - -// Returns an array of logical lines that continue the visual line -// started by the argument, or undefined if there are no such lines. -function visualLineContinued(line) { - var merged, lines; - while (merged = collapsedSpanAtEnd(line)) { - line = merged.find(1, true).line - ;(lines || (lines = [])).push(line); - } - return lines -} - -// Get the line number of the start of the visual line that the -// given line number is part of. -function visualLineNo(doc, lineN) { - var line = getLine(doc, lineN), vis = visualLine(line); - if (line == vis) { return lineN } - return lineNo(vis) -} - -// Get the line number of the start of the next visual line after -// the given line. -function visualLineEndNo(doc, lineN) { - if (lineN > doc.lastLine()) { return lineN } - var line = getLine(doc, lineN), merged; - if (!lineIsHidden(doc, line)) { return lineN } - while (merged = collapsedSpanAtEnd(line)) - { line = merged.find(1, true).line; } - return lineNo(line) + 1 -} - -// Compute whether a line is hidden. Lines count as hidden when they -// are part of a visual line that starts with another line, or when -// they are entirely covered by collapsed, non-widget span. -function lineIsHidden(doc, line) { - var sps = sawCollapsedSpans && line.markedSpans; - if (sps) { for (var sp = (void 0), i = 0; i < sps.length; ++i) { - sp = sps[i]; - if (!sp.marker.collapsed) { continue } - if (sp.from == null) { return true } - if (sp.marker.widgetNode) { continue } - if (sp.from == 0 && sp.marker.inclusiveLeft && lineIsHiddenInner(doc, line, sp)) - { return true } - } } -} -function lineIsHiddenInner(doc, line, span) { - if (span.to == null) { - var end = span.marker.find(1, true); - return lineIsHiddenInner(doc, end.line, getMarkedSpanFor(end.line.markedSpans, span.marker)) - } - if (span.marker.inclusiveRight && span.to == line.text.length) - { return true } - for (var sp = (void 0), i = 0; i < line.markedSpans.length; ++i) { - sp = line.markedSpans[i]; - if (sp.marker.collapsed && !sp.marker.widgetNode && sp.from == span.to && - (sp.to == null || sp.to != span.from) && - (sp.marker.inclusiveLeft || span.marker.inclusiveRight) && - lineIsHiddenInner(doc, line, sp)) { return true } - } -} - -// Find the height above the given line. -function heightAtLine(lineObj) { - lineObj = visualLine(lineObj); - - var h = 0, chunk = lineObj.parent; - for (var i = 0; i < chunk.lines.length; ++i) { - var line = chunk.lines[i]; - if (line == lineObj) { break } - else { h += line.height; } - } - for (var p = chunk.parent; p; chunk = p, p = chunk.parent) { - for (var i$1 = 0; i$1 < p.children.length; ++i$1) { - var cur = p.children[i$1]; - if (cur == chunk) { break } - else { h += cur.height; } - } - } - return h -} - -// Compute the character length of a line, taking into account -// collapsed ranges (see markText) that might hide parts, and join -// other lines onto it. -function lineLength(line) { - if (line.height == 0) { return 0 } - var len = line.text.length, merged, cur = line; - while (merged = collapsedSpanAtStart(cur)) { - var found = merged.find(0, true); - cur = found.from.line; - len += found.from.ch - found.to.ch; - } - cur = line; - while (merged = collapsedSpanAtEnd(cur)) { - var found$1 = merged.find(0, true); - len -= cur.text.length - found$1.from.ch; - cur = found$1.to.line; - len += cur.text.length - found$1.to.ch; - } - return len -} - -// Find the longest line in the document. -function findMaxLine(cm) { - var d = cm.display, doc = cm.doc; - d.maxLine = getLine(doc, doc.first); - d.maxLineLength = lineLength(d.maxLine); - d.maxLineChanged = true; - doc.iter(function (line) { - var len = lineLength(line); - if (len > d.maxLineLength) { - d.maxLineLength = len; - d.maxLine = line; - } - }); -} - -// BIDI HELPERS - -function iterateBidiSections(order, from, to, f) { - if (!order) { return f(from, to, "ltr", 0) } - var found = false; - for (var i = 0; i < order.length; ++i) { - var part = order[i]; - if (part.from < to && part.to > from || from == to && part.to == from) { - f(Math.max(part.from, from), Math.min(part.to, to), part.level == 1 ? "rtl" : "ltr", i); - found = true; - } - } - if (!found) { f(from, to, "ltr"); } -} - -var bidiOther = null; -function getBidiPartAt(order, ch, sticky) { - var found; - bidiOther = null; - for (var i = 0; i < order.length; ++i) { - var cur = order[i]; - if (cur.from < ch && cur.to > ch) { return i } - if (cur.to == ch) { - if (cur.from != cur.to && sticky == "before") { found = i; } - else { bidiOther = i; } - } - if (cur.from == ch) { - if (cur.from != cur.to && sticky != "before") { found = i; } - else { bidiOther = i; } - } - } - return found != null ? found : bidiOther -} - -// Bidirectional ordering algorithm -// See http://unicode.org/reports/tr9/tr9-13.html for the algorithm -// that this (partially) implements. - -// One-char codes used for character types: -// L (L): Left-to-Right -// R (R): Right-to-Left -// r (AL): Right-to-Left Arabic -// 1 (EN): European Number -// + (ES): European Number Separator -// % (ET): European Number Terminator -// n (AN): Arabic Number -// , (CS): Common Number Separator -// m (NSM): Non-Spacing Mark -// b (BN): Boundary Neutral -// s (B): Paragraph Separator -// t (S): Segment Separator -// w (WS): Whitespace -// N (ON): Other Neutrals - -// Returns null if characters are ordered as they appear -// (left-to-right), or an array of sections ({from, to, level} -// objects) in the order in which they occur visually. -var bidiOrdering = (function() { - // Character types for codepoints 0 to 0xff - var lowTypes = "bbbbbbbbbtstwsbbbbbbbbbbbbbbssstwNN%%%NNNNNN,N,N1111111111NNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNbbbbbbsbbbbbbbbbbbbbbbbbbbbbbbbbb,N%%%%NNNNLNNNNN%%11NLNNN1LNNNNNLLLLLLLLLLLLLLLLLLLLLLLNLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLN"; - // Character types for codepoints 0x600 to 0x6f9 - var arabicTypes = "nnnnnnNNr%%r,rNNmmmmmmmmmmmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmmmmmmmmmmmmmmmnnnnnnnnnn%nnrrrmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmnNmmmmmmrrmmNmmmmrr1111111111"; - function charType(code) { - if (code <= 0xf7) { return lowTypes.charAt(code) } - else if (0x590 <= code && code <= 0x5f4) { return "R" } - else if (0x600 <= code && code <= 0x6f9) { return arabicTypes.charAt(code - 0x600) } - else if (0x6ee <= code && code <= 0x8ac) { return "r" } - else if (0x2000 <= code && code <= 0x200b) { return "w" } - else if (code == 0x200c) { return "b" } - else { return "L" } - } - - var bidiRE = /[\u0590-\u05f4\u0600-\u06ff\u0700-\u08ac]/; - var isNeutral = /[stwN]/, isStrong = /[LRr]/, countsAsLeft = /[Lb1n]/, countsAsNum = /[1n]/; - - function BidiSpan(level, from, to) { - this.level = level; - this.from = from; this.to = to; - } - - return function(str, direction) { - var outerType = direction == "ltr" ? "L" : "R"; - - if (str.length == 0 || direction == "ltr" && !bidiRE.test(str)) { return false } - var len = str.length, types = []; - for (var i = 0; i < len; ++i) - { types.push(charType(str.charCodeAt(i))); } - - // W1. Examine each non-spacing mark (NSM) in the level run, and - // change the type of the NSM to the type of the previous - // character. If the NSM is at the start of the level run, it will - // get the type of sor. - for (var i$1 = 0, prev = outerType; i$1 < len; ++i$1) { - var type = types[i$1]; - if (type == "m") { types[i$1] = prev; } - else { prev = type; } - } - - // W2. Search backwards from each instance of a European number - // until the first strong type (R, L, AL, or sor) is found. If an - // AL is found, change the type of the European number to Arabic - // number. - // W3. Change all ALs to R. - for (var i$2 = 0, cur = outerType; i$2 < len; ++i$2) { - var type$1 = types[i$2]; - if (type$1 == "1" && cur == "r") { types[i$2] = "n"; } - else if (isStrong.test(type$1)) { cur = type$1; if (type$1 == "r") { types[i$2] = "R"; } } - } - - // W4. A single European separator between two European numbers - // changes to a European number. A single common separator between - // two numbers of the same type changes to that type. - for (var i$3 = 1, prev$1 = types[0]; i$3 < len - 1; ++i$3) { - var type$2 = types[i$3]; - if (type$2 == "+" && prev$1 == "1" && types[i$3+1] == "1") { types[i$3] = "1"; } - else if (type$2 == "," && prev$1 == types[i$3+1] && - (prev$1 == "1" || prev$1 == "n")) { types[i$3] = prev$1; } - prev$1 = type$2; - } - - // W5. A sequence of European terminators adjacent to European - // numbers changes to all European numbers. - // W6. Otherwise, separators and terminators change to Other - // Neutral. - for (var i$4 = 0; i$4 < len; ++i$4) { - var type$3 = types[i$4]; - if (type$3 == ",") { types[i$4] = "N"; } - else if (type$3 == "%") { - var end = (void 0); - for (end = i$4 + 1; end < len && types[end] == "%"; ++end) {} - var replace = (i$4 && types[i$4-1] == "!") || (end < len && types[end] == "1") ? "1" : "N"; - for (var j = i$4; j < end; ++j) { types[j] = replace; } - i$4 = end - 1; - } - } - - // W7. Search backwards from each instance of a European number - // until the first strong type (R, L, or sor) is found. If an L is - // found, then change the type of the European number to L. - for (var i$5 = 0, cur$1 = outerType; i$5 < len; ++i$5) { - var type$4 = types[i$5]; - if (cur$1 == "L" && type$4 == "1") { types[i$5] = "L"; } - else if (isStrong.test(type$4)) { cur$1 = type$4; } - } - - // N1. A sequence of neutrals takes the direction of the - // surrounding strong text if the text on both sides has the same - // direction. European and Arabic numbers act as if they were R in - // terms of their influence on neutrals. Start-of-level-run (sor) - // and end-of-level-run (eor) are used at level run boundaries. - // N2. Any remaining neutrals take the embedding direction. - for (var i$6 = 0; i$6 < len; ++i$6) { - if (isNeutral.test(types[i$6])) { - var end$1 = (void 0); - for (end$1 = i$6 + 1; end$1 < len && isNeutral.test(types[end$1]); ++end$1) {} - var before = (i$6 ? types[i$6-1] : outerType) == "L"; - var after = (end$1 < len ? types[end$1] : outerType) == "L"; - var replace$1 = before == after ? (before ? "L" : "R") : outerType; - for (var j$1 = i$6; j$1 < end$1; ++j$1) { types[j$1] = replace$1; } - i$6 = end$1 - 1; - } - } - - // Here we depart from the documented algorithm, in order to avoid - // building up an actual levels array. Since there are only three - // levels (0, 1, 2) in an implementation that doesn't take - // explicit embedding into account, we can build up the order on - // the fly, without following the level-based algorithm. - var order = [], m; - for (var i$7 = 0; i$7 < len;) { - if (countsAsLeft.test(types[i$7])) { - var start = i$7; - for (++i$7; i$7 < len && countsAsLeft.test(types[i$7]); ++i$7) {} - order.push(new BidiSpan(0, start, i$7)); - } else { - var pos = i$7, at = order.length; - for (++i$7; i$7 < len && types[i$7] != "L"; ++i$7) {} - for (var j$2 = pos; j$2 < i$7;) { - if (countsAsNum.test(types[j$2])) { - if (pos < j$2) { order.splice(at, 0, new BidiSpan(1, pos, j$2)); } - var nstart = j$2; - for (++j$2; j$2 < i$7 && countsAsNum.test(types[j$2]); ++j$2) {} - order.splice(at, 0, new BidiSpan(2, nstart, j$2)); - pos = j$2; - } else { ++j$2; } - } - if (pos < i$7) { order.splice(at, 0, new BidiSpan(1, pos, i$7)); } - } - } - if (direction == "ltr") { - if (order[0].level == 1 && (m = str.match(/^\s+/))) { - order[0].from = m[0].length; - order.unshift(new BidiSpan(0, 0, m[0].length)); - } - if (lst(order).level == 1 && (m = str.match(/\s+$/))) { - lst(order).to -= m[0].length; - order.push(new BidiSpan(0, len - m[0].length, len)); - } - } - - return direction == "rtl" ? order.reverse() : order - } -})(); - -// Get the bidi ordering for the given line (and cache it). Returns -// false for lines that are fully left-to-right, and an array of -// BidiSpan objects otherwise. -function getOrder(line, direction) { - var order = line.order; - if (order == null) { order = line.order = bidiOrdering(line.text, direction); } - return order -} - -// EVENT HANDLING - -// Lightweight event framework. on/off also work on DOM nodes, -// registering native DOM handlers. - -var noHandlers = []; - -var on = function(emitter, type, f) { - if (emitter.addEventListener) { - emitter.addEventListener(type, f, false); - } else if (emitter.attachEvent) { - emitter.attachEvent("on" + type, f); - } else { - var map$$1 = emitter._handlers || (emitter._handlers = {}); - map$$1[type] = (map$$1[type] || noHandlers).concat(f); - } -}; - -function getHandlers(emitter, type) { - return emitter._handlers && emitter._handlers[type] || noHandlers -} - -function off(emitter, type, f) { - if (emitter.removeEventListener) { - emitter.removeEventListener(type, f, false); - } else if (emitter.detachEvent) { - emitter.detachEvent("on" + type, f); - } else { - var map$$1 = emitter._handlers, arr = map$$1 && map$$1[type]; - if (arr) { - var index = indexOf(arr, f); - if (index > -1) - { map$$1[type] = arr.slice(0, index).concat(arr.slice(index + 1)); } - } - } -} - -function signal(emitter, type /*, values...*/) { - var handlers = getHandlers(emitter, type); - if (!handlers.length) { return } - var args = Array.prototype.slice.call(arguments, 2); - for (var i = 0; i < handlers.length; ++i) { handlers[i].apply(null, args); } -} - -// The DOM events that CodeMirror handles can be overridden by -// registering a (non-DOM) handler on the editor for the event name, -// and preventDefault-ing the event in that handler. -function signalDOMEvent(cm, e, override) { - if (typeof e == "string") - { e = {type: e, preventDefault: function() { this.defaultPrevented = true; }}; } - signal(cm, override || e.type, cm, e); - return e_defaultPrevented(e) || e.codemirrorIgnore -} - -function signalCursorActivity(cm) { - var arr = cm._handlers && cm._handlers.cursorActivity; - if (!arr) { return } - var set = cm.curOp.cursorActivityHandlers || (cm.curOp.cursorActivityHandlers = []); - for (var i = 0; i < arr.length; ++i) { if (indexOf(set, arr[i]) == -1) - { set.push(arr[i]); } } -} - -function hasHandler(emitter, type) { - return getHandlers(emitter, type).length > 0 -} - -// Add on and off methods to a constructor's prototype, to make -// registering events on such objects more convenient. -function eventMixin(ctor) { - ctor.prototype.on = function(type, f) {on(this, type, f);}; - ctor.prototype.off = function(type, f) {off(this, type, f);}; -} - -// Due to the fact that we still support jurassic IE versions, some -// compatibility wrappers are needed. - -function e_preventDefault(e) { - if (e.preventDefault) { e.preventDefault(); } - else { e.returnValue = false; } -} -function e_stopPropagation(e) { - if (e.stopPropagation) { e.stopPropagation(); } - else { e.cancelBubble = true; } -} -function e_defaultPrevented(e) { - return e.defaultPrevented != null ? e.defaultPrevented : e.returnValue == false -} -function e_stop(e) {e_preventDefault(e); e_stopPropagation(e);} - -function e_target(e) {return e.target || e.srcElement} -function e_button(e) { - var b = e.which; - if (b == null) { - if (e.button & 1) { b = 1; } - else if (e.button & 2) { b = 3; } - else if (e.button & 4) { b = 2; } - } - if (mac && e.ctrlKey && b == 1) { b = 3; } - return b -} - -// Detect drag-and-drop -var dragAndDrop = function() { - // There is *some* kind of drag-and-drop support in IE6-8, but I - // couldn't get it to work yet. - if (ie && ie_version < 9) { return false } - var div = elt('div'); - return "draggable" in div || "dragDrop" in div -}(); - -var zwspSupported; -function zeroWidthElement(measure) { - if (zwspSupported == null) { - var test = elt("span", "\u200b"); - removeChildrenAndAdd(measure, elt("span", [test, document.createTextNode("x")])); - if (measure.firstChild.offsetHeight != 0) - { zwspSupported = test.offsetWidth <= 1 && test.offsetHeight > 2 && !(ie && ie_version < 8); } - } - var node = zwspSupported ? elt("span", "\u200b") : - elt("span", "\u00a0", null, "display: inline-block; width: 1px; margin-right: -1px"); - node.setAttribute("cm-text", ""); - return node -} - -// Feature-detect IE's crummy client rect reporting for bidi text -var badBidiRects; -function hasBadBidiRects(measure) { - if (badBidiRects != null) { return badBidiRects } - var txt = removeChildrenAndAdd(measure, document.createTextNode("A\u062eA")); - var r0 = range(txt, 0, 1).getBoundingClientRect(); - var r1 = range(txt, 1, 2).getBoundingClientRect(); - removeChildren(measure); - if (!r0 || r0.left == r0.right) { return false } // Safari returns null in some cases (#2780) - return badBidiRects = (r1.right - r0.right < 3) -} - -// See if "".split is the broken IE version, if so, provide an -// alternative way to split lines. -var splitLinesAuto = "\n\nb".split(/\n/).length != 3 ? function (string) { - var pos = 0, result = [], l = string.length; - while (pos <= l) { - var nl = string.indexOf("\n", pos); - if (nl == -1) { nl = string.length; } - var line = string.slice(pos, string.charAt(nl - 1) == "\r" ? nl - 1 : nl); - var rt = line.indexOf("\r"); - if (rt != -1) { - result.push(line.slice(0, rt)); - pos += rt + 1; - } else { - result.push(line); - pos = nl + 1; - } - } - return result -} : function (string) { return string.split(/\r\n?|\n/); }; - -var hasSelection = window.getSelection ? function (te) { - try { return te.selectionStart != te.selectionEnd } - catch(e) { return false } -} : function (te) { - var range$$1; - try {range$$1 = te.ownerDocument.selection.createRange();} - catch(e) {} - if (!range$$1 || range$$1.parentElement() != te) { return false } - return range$$1.compareEndPoints("StartToEnd", range$$1) != 0 -}; - -var hasCopyEvent = (function () { - var e = elt("div"); - if ("oncopy" in e) { return true } - e.setAttribute("oncopy", "return;"); - return typeof e.oncopy == "function" -})(); - -var badZoomedRects = null; -function hasBadZoomedRects(measure) { - if (badZoomedRects != null) { return badZoomedRects } - var node = removeChildrenAndAdd(measure, elt("span", "x")); - var normal = node.getBoundingClientRect(); - var fromRange = range(node, 0, 1).getBoundingClientRect(); - return badZoomedRects = Math.abs(normal.left - fromRange.left) > 1 -} - -// Known modes, by name and by MIME -var modes = {}; -var mimeModes = {}; - -// Extra arguments are stored as the mode's dependencies, which is -// used by (legacy) mechanisms like loadmode.js to automatically -// load a mode. (Preferred mechanism is the require/define calls.) -function defineMode(name, mode) { - if (arguments.length > 2) - { mode.dependencies = Array.prototype.slice.call(arguments, 2); } - modes[name] = mode; -} - -function defineMIME(mime, spec) { - mimeModes[mime] = spec; -} - -// Given a MIME type, a {name, ...options} config object, or a name -// string, return a mode config object. -function resolveMode(spec) { - if (typeof spec == "string" && mimeModes.hasOwnProperty(spec)) { - spec = mimeModes[spec]; - } else if (spec && typeof spec.name == "string" && mimeModes.hasOwnProperty(spec.name)) { - var found = mimeModes[spec.name]; - if (typeof found == "string") { found = {name: found}; } - spec = createObj(found, spec); - spec.name = found.name; - } else if (typeof spec == "string" && /^[\w\-]+\/[\w\-]+\+xml$/.test(spec)) { - return resolveMode("application/xml") - } else if (typeof spec == "string" && /^[\w\-]+\/[\w\-]+\+json$/.test(spec)) { - return resolveMode("application/json") - } - if (typeof spec == "string") { return {name: spec} } - else { return spec || {name: "null"} } -} - -// Given a mode spec (anything that resolveMode accepts), find and -// initialize an actual mode object. -function getMode(options, spec) { - spec = resolveMode(spec); - var mfactory = modes[spec.name]; - if (!mfactory) { return getMode(options, "text/plain") } - var modeObj = mfactory(options, spec); - if (modeExtensions.hasOwnProperty(spec.name)) { - var exts = modeExtensions[spec.name]; - for (var prop in exts) { - if (!exts.hasOwnProperty(prop)) { continue } - if (modeObj.hasOwnProperty(prop)) { modeObj["_" + prop] = modeObj[prop]; } - modeObj[prop] = exts[prop]; - } - } - modeObj.name = spec.name; - if (spec.helperType) { modeObj.helperType = spec.helperType; } - if (spec.modeProps) { for (var prop$1 in spec.modeProps) - { modeObj[prop$1] = spec.modeProps[prop$1]; } } - - return modeObj -} - -// This can be used to attach properties to mode objects from -// outside the actual mode definition. -var modeExtensions = {}; -function extendMode(mode, properties) { - var exts = modeExtensions.hasOwnProperty(mode) ? modeExtensions[mode] : (modeExtensions[mode] = {}); - copyObj(properties, exts); -} - -function copyState(mode, state) { - if (state === true) { return state } - if (mode.copyState) { return mode.copyState(state) } - var nstate = {}; - for (var n in state) { - var val = state[n]; - if (val instanceof Array) { val = val.concat([]); } - nstate[n] = val; - } - return nstate -} - -// Given a mode and a state (for that mode), find the inner mode and -// state at the position that the state refers to. -function innerMode(mode, state) { - var info; - while (mode.innerMode) { - info = mode.innerMode(state); - if (!info || info.mode == mode) { break } - state = info.state; - mode = info.mode; - } - return info || {mode: mode, state: state} -} - -function startState(mode, a1, a2) { - return mode.startState ? mode.startState(a1, a2) : true -} - -// STRING STREAM - -// Fed to the mode parsers, provides helper functions to make -// parsers more succinct. - -var StringStream = function(string, tabSize, lineOracle) { - this.pos = this.start = 0; - this.string = string; - this.tabSize = tabSize || 8; - this.lastColumnPos = this.lastColumnValue = 0; - this.lineStart = 0; - this.lineOracle = lineOracle; -}; - -StringStream.prototype.eol = function () {return this.pos >= this.string.length}; -StringStream.prototype.sol = function () {return this.pos == this.lineStart}; -StringStream.prototype.peek = function () {return this.string.charAt(this.pos) || undefined}; -StringStream.prototype.next = function () { - if (this.pos < this.string.length) - { return this.string.charAt(this.pos++) } -}; -StringStream.prototype.eat = function (match) { - var ch = this.string.charAt(this.pos); - var ok; - if (typeof match == "string") { ok = ch == match; } - else { ok = ch && (match.test ? match.test(ch) : match(ch)); } - if (ok) {++this.pos; return ch} -}; -StringStream.prototype.eatWhile = function (match) { - var start = this.pos; - while (this.eat(match)){} - return this.pos > start -}; -StringStream.prototype.eatSpace = function () { - var this$1 = this; - - var start = this.pos; - while (/[\s\u00a0]/.test(this.string.charAt(this.pos))) { ++this$1.pos; } - return this.pos > start -}; -StringStream.prototype.skipToEnd = function () {this.pos = this.string.length;}; -StringStream.prototype.skipTo = function (ch) { - var found = this.string.indexOf(ch, this.pos); - if (found > -1) {this.pos = found; return true} -}; -StringStream.prototype.backUp = function (n) {this.pos -= n;}; -StringStream.prototype.column = function () { - if (this.lastColumnPos < this.start) { - this.lastColumnValue = countColumn(this.string, this.start, this.tabSize, this.lastColumnPos, this.lastColumnValue); - this.lastColumnPos = this.start; - } - return this.lastColumnValue - (this.lineStart ? countColumn(this.string, this.lineStart, this.tabSize) : 0) -}; -StringStream.prototype.indentation = function () { - return countColumn(this.string, null, this.tabSize) - - (this.lineStart ? countColumn(this.string, this.lineStart, this.tabSize) : 0) -}; -StringStream.prototype.match = function (pattern, consume, caseInsensitive) { - if (typeof pattern == "string") { - var cased = function (str) { return caseInsensitive ? str.toLowerCase() : str; }; - var substr = this.string.substr(this.pos, pattern.length); - if (cased(substr) == cased(pattern)) { - if (consume !== false) { this.pos += pattern.length; } - return true - } - } else { - var match = this.string.slice(this.pos).match(pattern); - if (match && match.index > 0) { return null } - if (match && consume !== false) { this.pos += match[0].length; } - return match - } -}; -StringStream.prototype.current = function (){return this.string.slice(this.start, this.pos)}; -StringStream.prototype.hideFirstChars = function (n, inner) { - this.lineStart += n; - try { return inner() } - finally { this.lineStart -= n; } -}; -StringStream.prototype.lookAhead = function (n) { - var oracle = this.lineOracle; - return oracle && oracle.lookAhead(n) -}; -StringStream.prototype.baseToken = function () { - var oracle = this.lineOracle; - return oracle && oracle.baseToken(this.pos) -}; - -var SavedContext = function(state, lookAhead) { - this.state = state; - this.lookAhead = lookAhead; -}; - -var Context = function(doc, state, line, lookAhead) { - this.state = state; - this.doc = doc; - this.line = line; - this.maxLookAhead = lookAhead || 0; - this.baseTokens = null; - this.baseTokenPos = 1; -}; - -Context.prototype.lookAhead = function (n) { - var line = this.doc.getLine(this.line + n); - if (line != null && n > this.maxLookAhead) { this.maxLookAhead = n; } - return line -}; - -Context.prototype.baseToken = function (n) { - var this$1 = this; - - if (!this.baseTokens) { return null } - while (this.baseTokens[this.baseTokenPos] <= n) - { this$1.baseTokenPos += 2; } - var type = this.baseTokens[this.baseTokenPos + 1]; - return {type: type && type.replace(/( |^)overlay .*/, ""), - size: this.baseTokens[this.baseTokenPos] - n} -}; - -Context.prototype.nextLine = function () { - this.line++; - if (this.maxLookAhead > 0) { this.maxLookAhead--; } -}; - -Context.fromSaved = function (doc, saved, line) { - if (saved instanceof SavedContext) - { return new Context(doc, copyState(doc.mode, saved.state), line, saved.lookAhead) } - else - { return new Context(doc, copyState(doc.mode, saved), line) } -}; - -Context.prototype.save = function (copy) { - var state = copy !== false ? copyState(this.doc.mode, this.state) : this.state; - return this.maxLookAhead > 0 ? new SavedContext(state, this.maxLookAhead) : state -}; - - -// Compute a style array (an array starting with a mode generation -// -- for invalidation -- followed by pairs of end positions and -// style strings), which is used to highlight the tokens on the -// line. -function highlightLine(cm, line, context, forceToEnd) { - // A styles array always starts with a number identifying the - // mode/overlays that it is based on (for easy invalidation). - var st = [cm.state.modeGen], lineClasses = {}; - // Compute the base array of styles - runMode(cm, line.text, cm.doc.mode, context, function (end, style) { return st.push(end, style); }, - lineClasses, forceToEnd); - var state = context.state; - - // Run overlays, adjust style array. - var loop = function ( o ) { - context.baseTokens = st; - var overlay = cm.state.overlays[o], i = 1, at = 0; - context.state = true; - runMode(cm, line.text, overlay.mode, context, function (end, style) { - var start = i; - // Ensure there's a token end at the current position, and that i points at it - while (at < end) { - var i_end = st[i]; - if (i_end > end) - { st.splice(i, 1, end, st[i+1], i_end); } - i += 2; - at = Math.min(end, i_end); - } - if (!style) { return } - if (overlay.opaque) { - st.splice(start, i - start, end, "overlay " + style); - i = start + 2; - } else { - for (; start < i; start += 2) { - var cur = st[start+1]; - st[start+1] = (cur ? cur + " " : "") + "overlay " + style; - } - } - }, lineClasses); - context.state = state; - context.baseTokens = null; - context.baseTokenPos = 1; - }; - - for (var o = 0; o < cm.state.overlays.length; ++o) loop( o ); - - return {styles: st, classes: lineClasses.bgClass || lineClasses.textClass ? lineClasses : null} -} - -function getLineStyles(cm, line, updateFrontier) { - if (!line.styles || line.styles[0] != cm.state.modeGen) { - var context = getContextBefore(cm, lineNo(line)); - var resetState = line.text.length > cm.options.maxHighlightLength && copyState(cm.doc.mode, context.state); - var result = highlightLine(cm, line, context); - if (resetState) { context.state = resetState; } - line.stateAfter = context.save(!resetState); - line.styles = result.styles; - if (result.classes) { line.styleClasses = result.classes; } - else if (line.styleClasses) { line.styleClasses = null; } - if (updateFrontier === cm.doc.highlightFrontier) - { cm.doc.modeFrontier = Math.max(cm.doc.modeFrontier, ++cm.doc.highlightFrontier); } - } - return line.styles -} - -function getContextBefore(cm, n, precise) { - var doc = cm.doc, display = cm.display; - if (!doc.mode.startState) { return new Context(doc, true, n) } - var start = findStartLine(cm, n, precise); - var saved = start > doc.first && getLine(doc, start - 1).stateAfter; - var context = saved ? Context.fromSaved(doc, saved, start) : new Context(doc, startState(doc.mode), start); - - doc.iter(start, n, function (line) { - processLine(cm, line.text, context); - var pos = context.line; - line.stateAfter = pos == n - 1 || pos % 5 == 0 || pos >= display.viewFrom && pos < display.viewTo ? context.save() : null; - context.nextLine(); - }); - if (precise) { doc.modeFrontier = context.line; } - return context -} - -// Lightweight form of highlight -- proceed over this line and -// update state, but don't save a style array. Used for lines that -// aren't currently visible. -function processLine(cm, text, context, startAt) { - var mode = cm.doc.mode; - var stream = new StringStream(text, cm.options.tabSize, context); - stream.start = stream.pos = startAt || 0; - if (text == "") { callBlankLine(mode, context.state); } - while (!stream.eol()) { - readToken(mode, stream, context.state); - stream.start = stream.pos; - } -} - -function callBlankLine(mode, state) { - if (mode.blankLine) { return mode.blankLine(state) } - if (!mode.innerMode) { return } - var inner = innerMode(mode, state); - if (inner.mode.blankLine) { return inner.mode.blankLine(inner.state) } -} - -function readToken(mode, stream, state, inner) { - for (var i = 0; i < 10; i++) { - if (inner) { inner[0] = innerMode(mode, state).mode; } - var style = mode.token(stream, state); - if (stream.pos > stream.start) { return style } - } - throw new Error("Mode " + mode.name + " failed to advance stream.") -} - -var Token = function(stream, type, state) { - this.start = stream.start; this.end = stream.pos; - this.string = stream.current(); - this.type = type || null; - this.state = state; -}; - -// Utility for getTokenAt and getLineTokens -function takeToken(cm, pos, precise, asArray) { - var doc = cm.doc, mode = doc.mode, style; - pos = clipPos(doc, pos); - var line = getLine(doc, pos.line), context = getContextBefore(cm, pos.line, precise); - var stream = new StringStream(line.text, cm.options.tabSize, context), tokens; - if (asArray) { tokens = []; } - while ((asArray || stream.pos < pos.ch) && !stream.eol()) { - stream.start = stream.pos; - style = readToken(mode, stream, context.state); - if (asArray) { tokens.push(new Token(stream, style, copyState(doc.mode, context.state))); } - } - return asArray ? tokens : new Token(stream, style, context.state) -} - -function extractLineClasses(type, output) { - if (type) { for (;;) { - var lineClass = type.match(/(?:^|\s+)line-(background-)?(\S+)/); - if (!lineClass) { break } - type = type.slice(0, lineClass.index) + type.slice(lineClass.index + lineClass[0].length); - var prop = lineClass[1] ? "bgClass" : "textClass"; - if (output[prop] == null) - { output[prop] = lineClass[2]; } - else if (!(new RegExp("(?:^|\s)" + lineClass[2] + "(?:$|\s)")).test(output[prop])) - { output[prop] += " " + lineClass[2]; } - } } - return type -} - -// Run the given mode's parser over a line, calling f for each token. -function runMode(cm, text, mode, context, f, lineClasses, forceToEnd) { - var flattenSpans = mode.flattenSpans; - if (flattenSpans == null) { flattenSpans = cm.options.flattenSpans; } - var curStart = 0, curStyle = null; - var stream = new StringStream(text, cm.options.tabSize, context), style; - var inner = cm.options.addModeClass && [null]; - if (text == "") { extractLineClasses(callBlankLine(mode, context.state), lineClasses); } - while (!stream.eol()) { - if (stream.pos > cm.options.maxHighlightLength) { - flattenSpans = false; - if (forceToEnd) { processLine(cm, text, context, stream.pos); } - stream.pos = text.length; - style = null; - } else { - style = extractLineClasses(readToken(mode, stream, context.state, inner), lineClasses); - } - if (inner) { - var mName = inner[0].name; - if (mName) { style = "m-" + (style ? mName + " " + style : mName); } - } - if (!flattenSpans || curStyle != style) { - while (curStart < stream.start) { - curStart = Math.min(stream.start, curStart + 5000); - f(curStart, curStyle); - } - curStyle = style; - } - stream.start = stream.pos; - } - while (curStart < stream.pos) { - // Webkit seems to refuse to render text nodes longer than 57444 - // characters, and returns inaccurate measurements in nodes - // starting around 5000 chars. - var pos = Math.min(stream.pos, curStart + 5000); - f(pos, curStyle); - curStart = pos; - } -} - -// Finds the line to start with when starting a parse. Tries to -// find a line with a stateAfter, so that it can start with a -// valid state. If that fails, it returns the line with the -// smallest indentation, which tends to need the least context to -// parse correctly. -function findStartLine(cm, n, precise) { - var minindent, minline, doc = cm.doc; - var lim = precise ? -1 : n - (cm.doc.mode.innerMode ? 1000 : 100); - for (var search = n; search > lim; --search) { - if (search <= doc.first) { return doc.first } - var line = getLine(doc, search - 1), after = line.stateAfter; - if (after && (!precise || search + (after instanceof SavedContext ? after.lookAhead : 0) <= doc.modeFrontier)) - { return search } - var indented = countColumn(line.text, null, cm.options.tabSize); - if (minline == null || minindent > indented) { - minline = search - 1; - minindent = indented; - } - } - return minline -} - -function retreatFrontier(doc, n) { - doc.modeFrontier = Math.min(doc.modeFrontier, n); - if (doc.highlightFrontier < n - 10) { return } - var start = doc.first; - for (var line = n - 1; line > start; line--) { - var saved = getLine(doc, line).stateAfter; - // change is on 3 - // state on line 1 looked ahead 2 -- so saw 3 - // test 1 + 2 < 3 should cover this - if (saved && (!(saved instanceof SavedContext) || line + saved.lookAhead < n)) { - start = line + 1; - break - } - } - doc.highlightFrontier = Math.min(doc.highlightFrontier, start); -} - -// LINE DATA STRUCTURE - -// Line objects. These hold state related to a line, including -// highlighting info (the styles array). -var Line = function(text, markedSpans, estimateHeight) { - this.text = text; - attachMarkedSpans(this, markedSpans); - this.height = estimateHeight ? estimateHeight(this) : 1; -}; - -Line.prototype.lineNo = function () { return lineNo(this) }; -eventMixin(Line); - -// Change the content (text, markers) of a line. Automatically -// invalidates cached information and tries to re-estimate the -// line's height. -function updateLine(line, text, markedSpans, estimateHeight) { - line.text = text; - if (line.stateAfter) { line.stateAfter = null; } - if (line.styles) { line.styles = null; } - if (line.order != null) { line.order = null; } - detachMarkedSpans(line); - attachMarkedSpans(line, markedSpans); - var estHeight = estimateHeight ? estimateHeight(line) : 1; - if (estHeight != line.height) { updateLineHeight(line, estHeight); } -} - -// Detach a line from the document tree and its markers. -function cleanUpLine(line) { - line.parent = null; - detachMarkedSpans(line); -} - -// Convert a style as returned by a mode (either null, or a string -// containing one or more styles) to a CSS style. This is cached, -// and also looks for line-wide styles. -var styleToClassCache = {}; -var styleToClassCacheWithMode = {}; -function interpretTokenStyle(style, options) { - if (!style || /^\s*$/.test(style)) { return null } - var cache = options.addModeClass ? styleToClassCacheWithMode : styleToClassCache; - return cache[style] || - (cache[style] = style.replace(/\S+/g, "cm-$&")) -} - -// Render the DOM representation of the text of a line. Also builds -// up a 'line map', which points at the DOM nodes that represent -// specific stretches of text, and is used by the measuring code. -// The returned object contains the DOM node, this map, and -// information about line-wide styles that were set by the mode. -function buildLineContent(cm, lineView) { - // The padding-right forces the element to have a 'border', which - // is needed on Webkit to be able to get line-level bounding - // rectangles for it (in measureChar). - var content = eltP("span", null, null, webkit ? "padding-right: .1px" : null); - var builder = {pre: eltP("pre", [content], "CodeMirror-line"), content: content, - col: 0, pos: 0, cm: cm, - trailingSpace: false, - splitSpaces: (ie || webkit) && cm.getOption("lineWrapping")}; - lineView.measure = {}; - - // Iterate over the logical lines that make up this visual line. - for (var i = 0; i <= (lineView.rest ? lineView.rest.length : 0); i++) { - var line = i ? lineView.rest[i - 1] : lineView.line, order = (void 0); - builder.pos = 0; - builder.addToken = buildToken; - // Optionally wire in some hacks into the token-rendering - // algorithm, to deal with browser quirks. - if (hasBadBidiRects(cm.display.measure) && (order = getOrder(line, cm.doc.direction))) - { builder.addToken = buildTokenBadBidi(builder.addToken, order); } - builder.map = []; - var allowFrontierUpdate = lineView != cm.display.externalMeasured && lineNo(line); - insertLineContent(line, builder, getLineStyles(cm, line, allowFrontierUpdate)); - if (line.styleClasses) { - if (line.styleClasses.bgClass) - { builder.bgClass = joinClasses(line.styleClasses.bgClass, builder.bgClass || ""); } - if (line.styleClasses.textClass) - { builder.textClass = joinClasses(line.styleClasses.textClass, builder.textClass || ""); } - } - - // Ensure at least a single node is present, for measuring. - if (builder.map.length == 0) - { builder.map.push(0, 0, builder.content.appendChild(zeroWidthElement(cm.display.measure))); } - - // Store the map and a cache object for the current logical line - if (i == 0) { - lineView.measure.map = builder.map; - lineView.measure.cache = {}; - } else { - (lineView.measure.maps || (lineView.measure.maps = [])).push(builder.map) - ;(lineView.measure.caches || (lineView.measure.caches = [])).push({}); - } - } - - // See issue #2901 - if (webkit) { - var last = builder.content.lastChild; - if (/\bcm-tab\b/.test(last.className) || (last.querySelector && last.querySelector(".cm-tab"))) - { builder.content.className = "cm-tab-wrap-hack"; } - } - - signal(cm, "renderLine", cm, lineView.line, builder.pre); - if (builder.pre.className) - { builder.textClass = joinClasses(builder.pre.className, builder.textClass || ""); } - - return builder -} - -function defaultSpecialCharPlaceholder(ch) { - var token = elt("span", "\u2022", "cm-invalidchar"); - token.title = "\\u" + ch.charCodeAt(0).toString(16); - token.setAttribute("aria-label", token.title); - return token -} - -// Build up the DOM representation for a single token, and add it to -// the line map. Takes care to render special characters separately. -function buildToken(builder, text, style, startStyle, endStyle, title, css) { - if (!text) { return } - var displayText = builder.splitSpaces ? splitSpaces(text, builder.trailingSpace) : text; - var special = builder.cm.state.specialChars, mustWrap = false; - var content; - if (!special.test(text)) { - builder.col += text.length; - content = document.createTextNode(displayText); - builder.map.push(builder.pos, builder.pos + text.length, content); - if (ie && ie_version < 9) { mustWrap = true; } - builder.pos += text.length; - } else { - content = document.createDocumentFragment(); - var pos = 0; - while (true) { - special.lastIndex = pos; - var m = special.exec(text); - var skipped = m ? m.index - pos : text.length - pos; - if (skipped) { - var txt = document.createTextNode(displayText.slice(pos, pos + skipped)); - if (ie && ie_version < 9) { content.appendChild(elt("span", [txt])); } - else { content.appendChild(txt); } - builder.map.push(builder.pos, builder.pos + skipped, txt); - builder.col += skipped; - builder.pos += skipped; - } - if (!m) { break } - pos += skipped + 1; - var txt$1 = (void 0); - if (m[0] == "\t") { - var tabSize = builder.cm.options.tabSize, tabWidth = tabSize - builder.col % tabSize; - txt$1 = content.appendChild(elt("span", spaceStr(tabWidth), "cm-tab")); - txt$1.setAttribute("role", "presentation"); - txt$1.setAttribute("cm-text", "\t"); - builder.col += tabWidth; - } else if (m[0] == "\r" || m[0] == "\n") { - txt$1 = content.appendChild(elt("span", m[0] == "\r" ? "\u240d" : "\u2424", "cm-invalidchar")); - txt$1.setAttribute("cm-text", m[0]); - builder.col += 1; - } else { - txt$1 = builder.cm.options.specialCharPlaceholder(m[0]); - txt$1.setAttribute("cm-text", m[0]); - if (ie && ie_version < 9) { content.appendChild(elt("span", [txt$1])); } - else { content.appendChild(txt$1); } - builder.col += 1; - } - builder.map.push(builder.pos, builder.pos + 1, txt$1); - builder.pos++; - } - } - builder.trailingSpace = displayText.charCodeAt(text.length - 1) == 32; - if (style || startStyle || endStyle || mustWrap || css) { - var fullStyle = style || ""; - if (startStyle) { fullStyle += startStyle; } - if (endStyle) { fullStyle += endStyle; } - var token = elt("span", [content], fullStyle, css); - if (title) { token.title = title; } - return builder.content.appendChild(token) - } - builder.content.appendChild(content); -} - -function splitSpaces(text, trailingBefore) { - if (text.length > 1 && !/ /.test(text)) { return text } - var spaceBefore = trailingBefore, result = ""; - for (var i = 0; i < text.length; i++) { - var ch = text.charAt(i); - if (ch == " " && spaceBefore && (i == text.length - 1 || text.charCodeAt(i + 1) == 32)) - { ch = "\u00a0"; } - result += ch; - spaceBefore = ch == " "; - } - return result -} - -// Work around nonsense dimensions being reported for stretches of -// right-to-left text. -function buildTokenBadBidi(inner, order) { - return function (builder, text, style, startStyle, endStyle, title, css) { - style = style ? style + " cm-force-border" : "cm-force-border"; - var start = builder.pos, end = start + text.length; - for (;;) { - // Find the part that overlaps with the start of this text - var part = (void 0); - for (var i = 0; i < order.length; i++) { - part = order[i]; - if (part.to > start && part.from <= start) { break } - } - if (part.to >= end) { return inner(builder, text, style, startStyle, endStyle, title, css) } - inner(builder, text.slice(0, part.to - start), style, startStyle, null, title, css); - startStyle = null; - text = text.slice(part.to - start); - start = part.to; - } - } -} - -function buildCollapsedSpan(builder, size, marker, ignoreWidget) { - var widget = !ignoreWidget && marker.widgetNode; - if (widget) { builder.map.push(builder.pos, builder.pos + size, widget); } - if (!ignoreWidget && builder.cm.display.input.needsContentAttribute) { - if (!widget) - { widget = builder.content.appendChild(document.createElement("span")); } - widget.setAttribute("cm-marker", marker.id); - } - if (widget) { - builder.cm.display.input.setUneditable(widget); - builder.content.appendChild(widget); - } - builder.pos += size; - builder.trailingSpace = false; -} - -// Outputs a number of spans to make up a line, taking highlighting -// and marked text into account. -function insertLineContent(line, builder, styles) { - var spans = line.markedSpans, allText = line.text, at = 0; - if (!spans) { - for (var i$1 = 1; i$1 < styles.length; i$1+=2) - { builder.addToken(builder, allText.slice(at, at = styles[i$1]), interpretTokenStyle(styles[i$1+1], builder.cm.options)); } - return - } - - var len = allText.length, pos = 0, i = 1, text = "", style, css; - var nextChange = 0, spanStyle, spanEndStyle, spanStartStyle, title, collapsed; - for (;;) { - if (nextChange == pos) { // Update current marker set - spanStyle = spanEndStyle = spanStartStyle = title = css = ""; - collapsed = null; nextChange = Infinity; - var foundBookmarks = [], endStyles = (void 0); - for (var j = 0; j < spans.length; ++j) { - var sp = spans[j], m = sp.marker; - if (m.type == "bookmark" && sp.from == pos && m.widgetNode) { - foundBookmarks.push(m); - } else if (sp.from <= pos && (sp.to == null || sp.to > pos || m.collapsed && sp.to == pos && sp.from == pos)) { - if (sp.to != null && sp.to != pos && nextChange > sp.to) { - nextChange = sp.to; - spanEndStyle = ""; - } - if (m.className) { spanStyle += " " + m.className; } - if (m.css) { css = (css ? css + ";" : "") + m.css; } - if (m.startStyle && sp.from == pos) { spanStartStyle += " " + m.startStyle; } - if (m.endStyle && sp.to == nextChange) { (endStyles || (endStyles = [])).push(m.endStyle, sp.to); } - if (m.title && !title) { title = m.title; } - if (m.collapsed && (!collapsed || compareCollapsedMarkers(collapsed.marker, m) < 0)) - { collapsed = sp; } - } else if (sp.from > pos && nextChange > sp.from) { - nextChange = sp.from; - } - } - if (endStyles) { for (var j$1 = 0; j$1 < endStyles.length; j$1 += 2) - { if (endStyles[j$1 + 1] == nextChange) { spanEndStyle += " " + endStyles[j$1]; } } } - - if (!collapsed || collapsed.from == pos) { for (var j$2 = 0; j$2 < foundBookmarks.length; ++j$2) - { buildCollapsedSpan(builder, 0, foundBookmarks[j$2]); } } - if (collapsed && (collapsed.from || 0) == pos) { - buildCollapsedSpan(builder, (collapsed.to == null ? len + 1 : collapsed.to) - pos, - collapsed.marker, collapsed.from == null); - if (collapsed.to == null) { return } - if (collapsed.to == pos) { collapsed = false; } - } - } - if (pos >= len) { break } - - var upto = Math.min(len, nextChange); - while (true) { - if (text) { - var end = pos + text.length; - if (!collapsed) { - var tokenText = end > upto ? text.slice(0, upto - pos) : text; - builder.addToken(builder, tokenText, style ? style + spanStyle : spanStyle, - spanStartStyle, pos + tokenText.length == nextChange ? spanEndStyle : "", title, css); - } - if (end >= upto) {text = text.slice(upto - pos); pos = upto; break} - pos = end; - spanStartStyle = ""; - } - text = allText.slice(at, at = styles[i++]); - style = interpretTokenStyle(styles[i++], builder.cm.options); - } - } -} - - -// These objects are used to represent the visible (currently drawn) -// part of the document. A LineView may correspond to multiple -// logical lines, if those are connected by collapsed ranges. -function LineView(doc, line, lineN) { - // The starting line - this.line = line; - // Continuing lines, if any - this.rest = visualLineContinued(line); - // Number of logical lines in this visual line - this.size = this.rest ? lineNo(lst(this.rest)) - lineN + 1 : 1; - this.node = this.text = null; - this.hidden = lineIsHidden(doc, line); -} - -// Create a range of LineView objects for the given lines. -function buildViewArray(cm, from, to) { - var array = [], nextPos; - for (var pos = from; pos < to; pos = nextPos) { - var view = new LineView(cm.doc, getLine(cm.doc, pos), pos); - nextPos = pos + view.size; - array.push(view); - } - return array -} - -var operationGroup = null; - -function pushOperation(op) { - if (operationGroup) { - operationGroup.ops.push(op); - } else { - op.ownsGroup = operationGroup = { - ops: [op], - delayedCallbacks: [] - }; - } -} - -function fireCallbacksForOps(group) { - // Calls delayed callbacks and cursorActivity handlers until no - // new ones appear - var callbacks = group.delayedCallbacks, i = 0; - do { - for (; i < callbacks.length; i++) - { callbacks[i].call(null); } - for (var j = 0; j < group.ops.length; j++) { - var op = group.ops[j]; - if (op.cursorActivityHandlers) - { while (op.cursorActivityCalled < op.cursorActivityHandlers.length) - { op.cursorActivityHandlers[op.cursorActivityCalled++].call(null, op.cm); } } - } - } while (i < callbacks.length) -} - -function finishOperation(op, endCb) { - var group = op.ownsGroup; - if (!group) { return } - - try { fireCallbacksForOps(group); } - finally { - operationGroup = null; - endCb(group); - } -} - -var orphanDelayedCallbacks = null; - -// Often, we want to signal events at a point where we are in the -// middle of some work, but don't want the handler to start calling -// other methods on the editor, which might be in an inconsistent -// state or simply not expect any other events to happen. -// signalLater looks whether there are any handlers, and schedules -// them to be executed when the last operation ends, or, if no -// operation is active, when a timeout fires. -function signalLater(emitter, type /*, values...*/) { - var arr = getHandlers(emitter, type); - if (!arr.length) { return } - var args = Array.prototype.slice.call(arguments, 2), list; - if (operationGroup) { - list = operationGroup.delayedCallbacks; - } else if (orphanDelayedCallbacks) { - list = orphanDelayedCallbacks; - } else { - list = orphanDelayedCallbacks = []; - setTimeout(fireOrphanDelayed, 0); - } - var loop = function ( i ) { - list.push(function () { return arr[i].apply(null, args); }); - }; - - for (var i = 0; i < arr.length; ++i) - loop( i ); -} - -function fireOrphanDelayed() { - var delayed = orphanDelayedCallbacks; - orphanDelayedCallbacks = null; - for (var i = 0; i < delayed.length; ++i) { delayed[i](); } -} - -// When an aspect of a line changes, a string is added to -// lineView.changes. This updates the relevant part of the line's -// DOM structure. -function updateLineForChanges(cm, lineView, lineN, dims) { - for (var j = 0; j < lineView.changes.length; j++) { - var type = lineView.changes[j]; - if (type == "text") { updateLineText(cm, lineView); } - else if (type == "gutter") { updateLineGutter(cm, lineView, lineN, dims); } - else if (type == "class") { updateLineClasses(cm, lineView); } - else if (type == "widget") { updateLineWidgets(cm, lineView, dims); } - } - lineView.changes = null; -} - -// Lines with gutter elements, widgets or a background class need to -// be wrapped, and have the extra elements added to the wrapper div -function ensureLineWrapped(lineView) { - if (lineView.node == lineView.text) { - lineView.node = elt("div", null, null, "position: relative"); - if (lineView.text.parentNode) - { lineView.text.parentNode.replaceChild(lineView.node, lineView.text); } - lineView.node.appendChild(lineView.text); - if (ie && ie_version < 8) { lineView.node.style.zIndex = 2; } - } - return lineView.node -} - -function updateLineBackground(cm, lineView) { - var cls = lineView.bgClass ? lineView.bgClass + " " + (lineView.line.bgClass || "") : lineView.line.bgClass; - if (cls) { cls += " CodeMirror-linebackground"; } - if (lineView.background) { - if (cls) { lineView.background.className = cls; } - else { lineView.background.parentNode.removeChild(lineView.background); lineView.background = null; } - } else if (cls) { - var wrap = ensureLineWrapped(lineView); - lineView.background = wrap.insertBefore(elt("div", null, cls), wrap.firstChild); - cm.display.input.setUneditable(lineView.background); - } -} - -// Wrapper around buildLineContent which will reuse the structure -// in display.externalMeasured when possible. -function getLineContent(cm, lineView) { - var ext = cm.display.externalMeasured; - if (ext && ext.line == lineView.line) { - cm.display.externalMeasured = null; - lineView.measure = ext.measure; - return ext.built - } - return buildLineContent(cm, lineView) -} - -// Redraw the line's text. Interacts with the background and text -// classes because the mode may output tokens that influence these -// classes. -function updateLineText(cm, lineView) { - var cls = lineView.text.className; - var built = getLineContent(cm, lineView); - if (lineView.text == lineView.node) { lineView.node = built.pre; } - lineView.text.parentNode.replaceChild(built.pre, lineView.text); - lineView.text = built.pre; - if (built.bgClass != lineView.bgClass || built.textClass != lineView.textClass) { - lineView.bgClass = built.bgClass; - lineView.textClass = built.textClass; - updateLineClasses(cm, lineView); - } else if (cls) { - lineView.text.className = cls; - } -} - -function updateLineClasses(cm, lineView) { - updateLineBackground(cm, lineView); - if (lineView.line.wrapClass) - { ensureLineWrapped(lineView).className = lineView.line.wrapClass; } - else if (lineView.node != lineView.text) - { lineView.node.className = ""; } - var textClass = lineView.textClass ? lineView.textClass + " " + (lineView.line.textClass || "") : lineView.line.textClass; - lineView.text.className = textClass || ""; -} - -function updateLineGutter(cm, lineView, lineN, dims) { - if (lineView.gutter) { - lineView.node.removeChild(lineView.gutter); - lineView.gutter = null; - } - if (lineView.gutterBackground) { - lineView.node.removeChild(lineView.gutterBackground); - lineView.gutterBackground = null; - } - if (lineView.line.gutterClass) { - var wrap = ensureLineWrapped(lineView); - lineView.gutterBackground = elt("div", null, "CodeMirror-gutter-background " + lineView.line.gutterClass, - ("left: " + (cm.options.fixedGutter ? dims.fixedPos : -dims.gutterTotalWidth) + "px; width: " + (dims.gutterTotalWidth) + "px")); - cm.display.input.setUneditable(lineView.gutterBackground); - wrap.insertBefore(lineView.gutterBackground, lineView.text); - } - var markers = lineView.line.gutterMarkers; - if (cm.options.lineNumbers || markers) { - var wrap$1 = ensureLineWrapped(lineView); - var gutterWrap = lineView.gutter = elt("div", null, "CodeMirror-gutter-wrapper", ("left: " + (cm.options.fixedGutter ? dims.fixedPos : -dims.gutterTotalWidth) + "px")); - cm.display.input.setUneditable(gutterWrap); - wrap$1.insertBefore(gutterWrap, lineView.text); - if (lineView.line.gutterClass) - { gutterWrap.className += " " + lineView.line.gutterClass; } - if (cm.options.lineNumbers && (!markers || !markers["CodeMirror-linenumbers"])) - { lineView.lineNumber = gutterWrap.appendChild( - elt("div", lineNumberFor(cm.options, lineN), - "CodeMirror-linenumber CodeMirror-gutter-elt", - ("left: " + (dims.gutterLeft["CodeMirror-linenumbers"]) + "px; width: " + (cm.display.lineNumInnerWidth) + "px"))); } - if (markers) { for (var k = 0; k < cm.options.gutters.length; ++k) { - var id = cm.options.gutters[k], found = markers.hasOwnProperty(id) && markers[id]; - if (found) - { gutterWrap.appendChild(elt("div", [found], "CodeMirror-gutter-elt", - ("left: " + (dims.gutterLeft[id]) + "px; width: " + (dims.gutterWidth[id]) + "px"))); } - } } - } -} - -function updateLineWidgets(cm, lineView, dims) { - if (lineView.alignable) { lineView.alignable = null; } - for (var node = lineView.node.firstChild, next = (void 0); node; node = next) { - next = node.nextSibling; - if (node.className == "CodeMirror-linewidget") - { lineView.node.removeChild(node); } - } - insertLineWidgets(cm, lineView, dims); -} - -// Build a line's DOM representation from scratch -function buildLineElement(cm, lineView, lineN, dims) { - var built = getLineContent(cm, lineView); - lineView.text = lineView.node = built.pre; - if (built.bgClass) { lineView.bgClass = built.bgClass; } - if (built.textClass) { lineView.textClass = built.textClass; } - - updateLineClasses(cm, lineView); - updateLineGutter(cm, lineView, lineN, dims); - insertLineWidgets(cm, lineView, dims); - return lineView.node -} - -// A lineView may contain multiple logical lines (when merged by -// collapsed spans). The widgets for all of them need to be drawn. -function insertLineWidgets(cm, lineView, dims) { - insertLineWidgetsFor(cm, lineView.line, lineView, dims, true); - if (lineView.rest) { for (var i = 0; i < lineView.rest.length; i++) - { insertLineWidgetsFor(cm, lineView.rest[i], lineView, dims, false); } } -} - -function insertLineWidgetsFor(cm, line, lineView, dims, allowAbove) { - if (!line.widgets) { return } - var wrap = ensureLineWrapped(lineView); - for (var i = 0, ws = line.widgets; i < ws.length; ++i) { - var widget = ws[i], node = elt("div", [widget.node], "CodeMirror-linewidget"); - if (!widget.handleMouseEvents) { node.setAttribute("cm-ignore-events", "true"); } - positionLineWidget(widget, node, lineView, dims); - cm.display.input.setUneditable(node); - if (allowAbove && widget.above) - { wrap.insertBefore(node, lineView.gutter || lineView.text); } - else - { wrap.appendChild(node); } - signalLater(widget, "redraw"); - } -} - -function positionLineWidget(widget, node, lineView, dims) { - if (widget.noHScroll) { - (lineView.alignable || (lineView.alignable = [])).push(node); - var width = dims.wrapperWidth; - node.style.left = dims.fixedPos + "px"; - if (!widget.coverGutter) { - width -= dims.gutterTotalWidth; - node.style.paddingLeft = dims.gutterTotalWidth + "px"; - } - node.style.width = width + "px"; - } - if (widget.coverGutter) { - node.style.zIndex = 5; - node.style.position = "relative"; - if (!widget.noHScroll) { node.style.marginLeft = -dims.gutterTotalWidth + "px"; } - } -} - -function widgetHeight(widget) { - if (widget.height != null) { return widget.height } - var cm = widget.doc.cm; - if (!cm) { return 0 } - if (!contains(document.body, widget.node)) { - var parentStyle = "position: relative;"; - if (widget.coverGutter) - { parentStyle += "margin-left: -" + cm.display.gutters.offsetWidth + "px;"; } - if (widget.noHScroll) - { parentStyle += "width: " + cm.display.wrapper.clientWidth + "px;"; } - removeChildrenAndAdd(cm.display.measure, elt("div", [widget.node], null, parentStyle)); - } - return widget.height = widget.node.parentNode.offsetHeight -} - -// Return true when the given mouse event happened in a widget -function eventInWidget(display, e) { - for (var n = e_target(e); n != display.wrapper; n = n.parentNode) { - if (!n || (n.nodeType == 1 && n.getAttribute("cm-ignore-events") == "true") || - (n.parentNode == display.sizer && n != display.mover)) - { return true } - } -} - -// POSITION MEASUREMENT - -function paddingTop(display) {return display.lineSpace.offsetTop} -function paddingVert(display) {return display.mover.offsetHeight - display.lineSpace.offsetHeight} -function paddingH(display) { - if (display.cachedPaddingH) { return display.cachedPaddingH } - var e = removeChildrenAndAdd(display.measure, elt("pre", "x")); - var style = window.getComputedStyle ? window.getComputedStyle(e) : e.currentStyle; - var data = {left: parseInt(style.paddingLeft), right: parseInt(style.paddingRight)}; - if (!isNaN(data.left) && !isNaN(data.right)) { display.cachedPaddingH = data; } - return data -} - -function scrollGap(cm) { return scrollerGap - cm.display.nativeBarWidth } -function displayWidth(cm) { - return cm.display.scroller.clientWidth - scrollGap(cm) - cm.display.barWidth -} -function displayHeight(cm) { - return cm.display.scroller.clientHeight - scrollGap(cm) - cm.display.barHeight -} - -// Ensure the lineView.wrapping.heights array is populated. This is -// an array of bottom offsets for the lines that make up a drawn -// line. When lineWrapping is on, there might be more than one -// height. -function ensureLineHeights(cm, lineView, rect) { - var wrapping = cm.options.lineWrapping; - var curWidth = wrapping && displayWidth(cm); - if (!lineView.measure.heights || wrapping && lineView.measure.width != curWidth) { - var heights = lineView.measure.heights = []; - if (wrapping) { - lineView.measure.width = curWidth; - var rects = lineView.text.firstChild.getClientRects(); - for (var i = 0; i < rects.length - 1; i++) { - var cur = rects[i], next = rects[i + 1]; - if (Math.abs(cur.bottom - next.bottom) > 2) - { heights.push((cur.bottom + next.top) / 2 - rect.top); } - } - } - heights.push(rect.bottom - rect.top); - } -} - -// Find a line map (mapping character offsets to text nodes) and a -// measurement cache for the given line number. (A line view might -// contain multiple lines when collapsed ranges are present.) -function mapFromLineView(lineView, line, lineN) { - if (lineView.line == line) - { return {map: lineView.measure.map, cache: lineView.measure.cache} } - for (var i = 0; i < lineView.rest.length; i++) - { if (lineView.rest[i] == line) - { return {map: lineView.measure.maps[i], cache: lineView.measure.caches[i]} } } - for (var i$1 = 0; i$1 < lineView.rest.length; i$1++) - { if (lineNo(lineView.rest[i$1]) > lineN) - { return {map: lineView.measure.maps[i$1], cache: lineView.measure.caches[i$1], before: true} } } -} - -// Render a line into the hidden node display.externalMeasured. Used -// when measurement is needed for a line that's not in the viewport. -function updateExternalMeasurement(cm, line) { - line = visualLine(line); - var lineN = lineNo(line); - var view = cm.display.externalMeasured = new LineView(cm.doc, line, lineN); - view.lineN = lineN; - var built = view.built = buildLineContent(cm, view); - view.text = built.pre; - removeChildrenAndAdd(cm.display.lineMeasure, built.pre); - return view -} - -// Get a {top, bottom, left, right} box (in line-local coordinates) -// for a given character. -function measureChar(cm, line, ch, bias) { - return measureCharPrepared(cm, prepareMeasureForLine(cm, line), ch, bias) -} - -// Find a line view that corresponds to the given line number. -function findViewForLine(cm, lineN) { - if (lineN >= cm.display.viewFrom && lineN < cm.display.viewTo) - { return cm.display.view[findViewIndex(cm, lineN)] } - var ext = cm.display.externalMeasured; - if (ext && lineN >= ext.lineN && lineN < ext.lineN + ext.size) - { return ext } -} - -// Measurement can be split in two steps, the set-up work that -// applies to the whole line, and the measurement of the actual -// character. Functions like coordsChar, that need to do a lot of -// measurements in a row, can thus ensure that the set-up work is -// only done once. -function prepareMeasureForLine(cm, line) { - var lineN = lineNo(line); - var view = findViewForLine(cm, lineN); - if (view && !view.text) { - view = null; - } else if (view && view.changes) { - updateLineForChanges(cm, view, lineN, getDimensions(cm)); - cm.curOp.forceUpdate = true; - } - if (!view) - { view = updateExternalMeasurement(cm, line); } - - var info = mapFromLineView(view, line, lineN); - return { - line: line, view: view, rect: null, - map: info.map, cache: info.cache, before: info.before, - hasHeights: false - } -} - -// Given a prepared measurement object, measures the position of an -// actual character (or fetches it from the cache). -function measureCharPrepared(cm, prepared, ch, bias, varHeight) { - if (prepared.before) { ch = -1; } - var key = ch + (bias || ""), found; - if (prepared.cache.hasOwnProperty(key)) { - found = prepared.cache[key]; - } else { - if (!prepared.rect) - { prepared.rect = prepared.view.text.getBoundingClientRect(); } - if (!prepared.hasHeights) { - ensureLineHeights(cm, prepared.view, prepared.rect); - prepared.hasHeights = true; - } - found = measureCharInner(cm, prepared, ch, bias); - if (!found.bogus) { prepared.cache[key] = found; } - } - return {left: found.left, right: found.right, - top: varHeight ? found.rtop : found.top, - bottom: varHeight ? found.rbottom : found.bottom} -} - -var nullRect = {left: 0, right: 0, top: 0, bottom: 0}; - -function nodeAndOffsetInLineMap(map$$1, ch, bias) { - var node, start, end, collapse, mStart, mEnd; - // First, search the line map for the text node corresponding to, - // or closest to, the target character. - for (var i = 0; i < map$$1.length; i += 3) { - mStart = map$$1[i]; - mEnd = map$$1[i + 1]; - if (ch < mStart) { - start = 0; end = 1; - collapse = "left"; - } else if (ch < mEnd) { - start = ch - mStart; - end = start + 1; - } else if (i == map$$1.length - 3 || ch == mEnd && map$$1[i + 3] > ch) { - end = mEnd - mStart; - start = end - 1; - if (ch >= mEnd) { collapse = "right"; } - } - if (start != null) { - node = map$$1[i + 2]; - if (mStart == mEnd && bias == (node.insertLeft ? "left" : "right")) - { collapse = bias; } - if (bias == "left" && start == 0) - { while (i && map$$1[i - 2] == map$$1[i - 3] && map$$1[i - 1].insertLeft) { - node = map$$1[(i -= 3) + 2]; - collapse = "left"; - } } - if (bias == "right" && start == mEnd - mStart) - { while (i < map$$1.length - 3 && map$$1[i + 3] == map$$1[i + 4] && !map$$1[i + 5].insertLeft) { - node = map$$1[(i += 3) + 2]; - collapse = "right"; - } } - break - } - } - return {node: node, start: start, end: end, collapse: collapse, coverStart: mStart, coverEnd: mEnd} -} - -function getUsefulRect(rects, bias) { - var rect = nullRect; - if (bias == "left") { for (var i = 0; i < rects.length; i++) { - if ((rect = rects[i]).left != rect.right) { break } - } } else { for (var i$1 = rects.length - 1; i$1 >= 0; i$1--) { - if ((rect = rects[i$1]).left != rect.right) { break } - } } - return rect -} - -function measureCharInner(cm, prepared, ch, bias) { - var place = nodeAndOffsetInLineMap(prepared.map, ch, bias); - var node = place.node, start = place.start, end = place.end, collapse = place.collapse; - - var rect; - if (node.nodeType == 3) { // If it is a text node, use a range to retrieve the coordinates. - for (var i$1 = 0; i$1 < 4; i$1++) { // Retry a maximum of 4 times when nonsense rectangles are returned - while (start && isExtendingChar(prepared.line.text.charAt(place.coverStart + start))) { --start; } - while (place.coverStart + end < place.coverEnd && isExtendingChar(prepared.line.text.charAt(place.coverStart + end))) { ++end; } - if (ie && ie_version < 9 && start == 0 && end == place.coverEnd - place.coverStart) - { rect = node.parentNode.getBoundingClientRect(); } - else - { rect = getUsefulRect(range(node, start, end).getClientRects(), bias); } - if (rect.left || rect.right || start == 0) { break } - end = start; - start = start - 1; - collapse = "right"; - } - if (ie && ie_version < 11) { rect = maybeUpdateRectForZooming(cm.display.measure, rect); } - } else { // If it is a widget, simply get the box for the whole widget. - if (start > 0) { collapse = bias = "right"; } - var rects; - if (cm.options.lineWrapping && (rects = node.getClientRects()).length > 1) - { rect = rects[bias == "right" ? rects.length - 1 : 0]; } - else - { rect = node.getBoundingClientRect(); } - } - if (ie && ie_version < 9 && !start && (!rect || !rect.left && !rect.right)) { - var rSpan = node.parentNode.getClientRects()[0]; - if (rSpan) - { rect = {left: rSpan.left, right: rSpan.left + charWidth(cm.display), top: rSpan.top, bottom: rSpan.bottom}; } - else - { rect = nullRect; } - } - - var rtop = rect.top - prepared.rect.top, rbot = rect.bottom - prepared.rect.top; - var mid = (rtop + rbot) / 2; - var heights = prepared.view.measure.heights; - var i = 0; - for (; i < heights.length - 1; i++) - { if (mid < heights[i]) { break } } - var top = i ? heights[i - 1] : 0, bot = heights[i]; - var result = {left: (collapse == "right" ? rect.right : rect.left) - prepared.rect.left, - right: (collapse == "left" ? rect.left : rect.right) - prepared.rect.left, - top: top, bottom: bot}; - if (!rect.left && !rect.right) { result.bogus = true; } - if (!cm.options.singleCursorHeightPerLine) { result.rtop = rtop; result.rbottom = rbot; } - - return result -} - -// Work around problem with bounding client rects on ranges being -// returned incorrectly when zoomed on IE10 and below. -function maybeUpdateRectForZooming(measure, rect) { - if (!window.screen || screen.logicalXDPI == null || - screen.logicalXDPI == screen.deviceXDPI || !hasBadZoomedRects(measure)) - { return rect } - var scaleX = screen.logicalXDPI / screen.deviceXDPI; - var scaleY = screen.logicalYDPI / screen.deviceYDPI; - return {left: rect.left * scaleX, right: rect.right * scaleX, - top: rect.top * scaleY, bottom: rect.bottom * scaleY} -} - -function clearLineMeasurementCacheFor(lineView) { - if (lineView.measure) { - lineView.measure.cache = {}; - lineView.measure.heights = null; - if (lineView.rest) { for (var i = 0; i < lineView.rest.length; i++) - { lineView.measure.caches[i] = {}; } } - } -} - -function clearLineMeasurementCache(cm) { - cm.display.externalMeasure = null; - removeChildren(cm.display.lineMeasure); - for (var i = 0; i < cm.display.view.length; i++) - { clearLineMeasurementCacheFor(cm.display.view[i]); } -} - -function clearCaches(cm) { - clearLineMeasurementCache(cm); - cm.display.cachedCharWidth = cm.display.cachedTextHeight = cm.display.cachedPaddingH = null; - if (!cm.options.lineWrapping) { cm.display.maxLineChanged = true; } - cm.display.lineNumChars = null; -} - -function pageScrollX() { - // Work around https://bugs.chromium.org/p/chromium/issues/detail?id=489206 - // which causes page_Offset and bounding client rects to use - // different reference viewports and invalidate our calculations. - if (chrome && android) { return -(document.body.getBoundingClientRect().left - parseInt(getComputedStyle(document.body).marginLeft)) } - return window.pageXOffset || (document.documentElement || document.body).scrollLeft -} -function pageScrollY() { - if (chrome && android) { return -(document.body.getBoundingClientRect().top - parseInt(getComputedStyle(document.body).marginTop)) } - return window.pageYOffset || (document.documentElement || document.body).scrollTop -} - -function widgetTopHeight(lineObj) { - var height = 0; - if (lineObj.widgets) { for (var i = 0; i < lineObj.widgets.length; ++i) { if (lineObj.widgets[i].above) - { height += widgetHeight(lineObj.widgets[i]); } } } - return height -} - -// Converts a {top, bottom, left, right} box from line-local -// coordinates into another coordinate system. Context may be one of -// "line", "div" (display.lineDiv), "local"./null (editor), "window", -// or "page". -function intoCoordSystem(cm, lineObj, rect, context, includeWidgets) { - if (!includeWidgets) { - var height = widgetTopHeight(lineObj); - rect.top += height; rect.bottom += height; - } - if (context == "line") { return rect } - if (!context) { context = "local"; } - var yOff = heightAtLine(lineObj); - if (context == "local") { yOff += paddingTop(cm.display); } - else { yOff -= cm.display.viewOffset; } - if (context == "page" || context == "window") { - var lOff = cm.display.lineSpace.getBoundingClientRect(); - yOff += lOff.top + (context == "window" ? 0 : pageScrollY()); - var xOff = lOff.left + (context == "window" ? 0 : pageScrollX()); - rect.left += xOff; rect.right += xOff; - } - rect.top += yOff; rect.bottom += yOff; - return rect -} - -// Coverts a box from "div" coords to another coordinate system. -// Context may be "window", "page", "div", or "local"./null. -function fromCoordSystem(cm, coords, context) { - if (context == "div") { return coords } - var left = coords.left, top = coords.top; - // First move into "page" coordinate system - if (context == "page") { - left -= pageScrollX(); - top -= pageScrollY(); - } else if (context == "local" || !context) { - var localBox = cm.display.sizer.getBoundingClientRect(); - left += localBox.left; - top += localBox.top; - } - - var lineSpaceBox = cm.display.lineSpace.getBoundingClientRect(); - return {left: left - lineSpaceBox.left, top: top - lineSpaceBox.top} -} - -function charCoords(cm, pos, context, lineObj, bias) { - if (!lineObj) { lineObj = getLine(cm.doc, pos.line); } - return intoCoordSystem(cm, lineObj, measureChar(cm, lineObj, pos.ch, bias), context) -} - -// Returns a box for a given cursor position, which may have an -// 'other' property containing the position of the secondary cursor -// on a bidi boundary. -// A cursor Pos(line, char, "before") is on the same visual line as `char - 1` -// and after `char - 1` in writing order of `char - 1` -// A cursor Pos(line, char, "after") is on the same visual line as `char` -// and before `char` in writing order of `char` -// Examples (upper-case letters are RTL, lower-case are LTR): -// Pos(0, 1, ...) -// before after -// ab a|b a|b -// aB a|B aB| -// Ab |Ab A|b -// AB B|A B|A -// Every position after the last character on a line is considered to stick -// to the last character on the line. -function cursorCoords(cm, pos, context, lineObj, preparedMeasure, varHeight) { - lineObj = lineObj || getLine(cm.doc, pos.line); - if (!preparedMeasure) { preparedMeasure = prepareMeasureForLine(cm, lineObj); } - function get(ch, right) { - var m = measureCharPrepared(cm, preparedMeasure, ch, right ? "right" : "left", varHeight); - if (right) { m.left = m.right; } else { m.right = m.left; } - return intoCoordSystem(cm, lineObj, m, context) - } - var order = getOrder(lineObj, cm.doc.direction), ch = pos.ch, sticky = pos.sticky; - if (ch >= lineObj.text.length) { - ch = lineObj.text.length; - sticky = "before"; - } else if (ch <= 0) { - ch = 0; - sticky = "after"; - } - if (!order) { return get(sticky == "before" ? ch - 1 : ch, sticky == "before") } - - function getBidi(ch, partPos, invert) { - var part = order[partPos], right = part.level == 1; - return get(invert ? ch - 1 : ch, right != invert) - } - var partPos = getBidiPartAt(order, ch, sticky); - var other = bidiOther; - var val = getBidi(ch, partPos, sticky == "before"); - if (other != null) { val.other = getBidi(ch, other, sticky != "before"); } - return val -} - -// Used to cheaply estimate the coordinates for a position. Used for -// intermediate scroll updates. -function estimateCoords(cm, pos) { - var left = 0; - pos = clipPos(cm.doc, pos); - if (!cm.options.lineWrapping) { left = charWidth(cm.display) * pos.ch; } - var lineObj = getLine(cm.doc, pos.line); - var top = heightAtLine(lineObj) + paddingTop(cm.display); - return {left: left, right: left, top: top, bottom: top + lineObj.height} -} - -// Positions returned by coordsChar contain some extra information. -// xRel is the relative x position of the input coordinates compared -// to the found position (so xRel > 0 means the coordinates are to -// the right of the character position, for example). When outside -// is true, that means the coordinates lie outside the line's -// vertical range. -function PosWithInfo(line, ch, sticky, outside, xRel) { - var pos = Pos(line, ch, sticky); - pos.xRel = xRel; - if (outside) { pos.outside = true; } - return pos -} - -// Compute the character position closest to the given coordinates. -// Input must be lineSpace-local ("div" coordinate system). -function coordsChar(cm, x, y) { - var doc = cm.doc; - y += cm.display.viewOffset; - if (y < 0) { return PosWithInfo(doc.first, 0, null, true, -1) } - var lineN = lineAtHeight(doc, y), last = doc.first + doc.size - 1; - if (lineN > last) - { return PosWithInfo(doc.first + doc.size - 1, getLine(doc, last).text.length, null, true, 1) } - if (x < 0) { x = 0; } - - var lineObj = getLine(doc, lineN); - for (;;) { - var found = coordsCharInner(cm, lineObj, lineN, x, y); - var merged = collapsedSpanAtEnd(lineObj); - var mergedPos = merged && merged.find(0, true); - if (merged && (found.ch > mergedPos.from.ch || found.ch == mergedPos.from.ch && found.xRel > 0)) - { lineN = lineNo(lineObj = mergedPos.to.line); } - else - { return found } - } -} - -function wrappedLineExtent(cm, lineObj, preparedMeasure, y) { - y -= widgetTopHeight(lineObj); - var end = lineObj.text.length; - var begin = findFirst(function (ch) { return measureCharPrepared(cm, preparedMeasure, ch - 1).bottom <= y; }, end, 0); - end = findFirst(function (ch) { return measureCharPrepared(cm, preparedMeasure, ch).top > y; }, begin, end); - return {begin: begin, end: end} -} - -function wrappedLineExtentChar(cm, lineObj, preparedMeasure, target) { - if (!preparedMeasure) { preparedMeasure = prepareMeasureForLine(cm, lineObj); } - var targetTop = intoCoordSystem(cm, lineObj, measureCharPrepared(cm, preparedMeasure, target), "line").top; - return wrappedLineExtent(cm, lineObj, preparedMeasure, targetTop) -} - -// Returns true if the given side of a box is after the given -// coordinates, in top-to-bottom, left-to-right order. -function boxIsAfter(box, x, y, left) { - return box.bottom <= y ? false : box.top > y ? true : (left ? box.left : box.right) > x -} - -function coordsCharInner(cm, lineObj, lineNo$$1, x, y) { - // Move y into line-local coordinate space - y -= heightAtLine(lineObj); - var preparedMeasure = prepareMeasureForLine(cm, lineObj); - // When directly calling `measureCharPrepared`, we have to adjust - // for the widgets at this line. - var widgetHeight$$1 = widgetTopHeight(lineObj); - var begin = 0, end = lineObj.text.length, ltr = true; - - var order = getOrder(lineObj, cm.doc.direction); - // If the line isn't plain left-to-right text, first figure out - // which bidi section the coordinates fall into. - if (order) { - var part = (cm.options.lineWrapping ? coordsBidiPartWrapped : coordsBidiPart) - (cm, lineObj, lineNo$$1, preparedMeasure, order, x, y); - ltr = part.level != 1; - // The awkward -1 offsets are needed because findFirst (called - // on these below) will treat its first bound as inclusive, - // second as exclusive, but we want to actually address the - // characters in the part's range - begin = ltr ? part.from : part.to - 1; - end = ltr ? part.to : part.from - 1; - } - - // A binary search to find the first character whose bounding box - // starts after the coordinates. If we run across any whose box wrap - // the coordinates, store that. - var chAround = null, boxAround = null; - var ch = findFirst(function (ch) { - var box = measureCharPrepared(cm, preparedMeasure, ch); - box.top += widgetHeight$$1; box.bottom += widgetHeight$$1; - if (!boxIsAfter(box, x, y, false)) { return false } - if (box.top <= y && box.left <= x) { - chAround = ch; - boxAround = box; - } - return true - }, begin, end); - - var baseX, sticky, outside = false; - // If a box around the coordinates was found, use that - if (boxAround) { - // Distinguish coordinates nearer to the left or right side of the box - var atLeft = x - boxAround.left < boxAround.right - x, atStart = atLeft == ltr; - ch = chAround + (atStart ? 0 : 1); - sticky = atStart ? "after" : "before"; - baseX = atLeft ? boxAround.left : boxAround.right; - } else { - // (Adjust for extended bound, if necessary.) - if (!ltr && (ch == end || ch == begin)) { ch++; } - // To determine which side to associate with, get the box to the - // left of the character and compare it's vertical position to the - // coordinates - sticky = ch == 0 ? "after" : ch == lineObj.text.length ? "before" : - (measureCharPrepared(cm, preparedMeasure, ch - (ltr ? 1 : 0)).bottom + widgetHeight$$1 <= y) == ltr ? - "after" : "before"; - // Now get accurate coordinates for this place, in order to get a - // base X position - var coords = cursorCoords(cm, Pos(lineNo$$1, ch, sticky), "line", lineObj, preparedMeasure); - baseX = coords.left; - outside = y < coords.top || y >= coords.bottom; - } - - ch = skipExtendingChars(lineObj.text, ch, 1); - return PosWithInfo(lineNo$$1, ch, sticky, outside, x - baseX) -} - -function coordsBidiPart(cm, lineObj, lineNo$$1, preparedMeasure, order, x, y) { - // Bidi parts are sorted left-to-right, and in a non-line-wrapping - // situation, we can take this ordering to correspond to the visual - // ordering. This finds the first part whose end is after the given - // coordinates. - var index = findFirst(function (i) { - var part = order[i], ltr = part.level != 1; - return boxIsAfter(cursorCoords(cm, Pos(lineNo$$1, ltr ? part.to : part.from, ltr ? "before" : "after"), - "line", lineObj, preparedMeasure), x, y, true) - }, 0, order.length - 1); - var part = order[index]; - // If this isn't the first part, the part's start is also after - // the coordinates, and the coordinates aren't on the same line as - // that start, move one part back. - if (index > 0) { - var ltr = part.level != 1; - var start = cursorCoords(cm, Pos(lineNo$$1, ltr ? part.from : part.to, ltr ? "after" : "before"), - "line", lineObj, preparedMeasure); - if (boxIsAfter(start, x, y, true) && start.top > y) - { part = order[index - 1]; } - } - return part -} - -function coordsBidiPartWrapped(cm, lineObj, _lineNo, preparedMeasure, order, x, y) { - // In a wrapped line, rtl text on wrapping boundaries can do things - // that don't correspond to the ordering in our `order` array at - // all, so a binary search doesn't work, and we want to return a - // part that only spans one line so that the binary search in - // coordsCharInner is safe. As such, we first find the extent of the - // wrapped line, and then do a flat search in which we discard any - // spans that aren't on the line. - var ref = wrappedLineExtent(cm, lineObj, preparedMeasure, y); - var begin = ref.begin; - var end = ref.end; - if (/\s/.test(lineObj.text.charAt(end - 1))) { end--; } - var part = null, closestDist = null; - for (var i = 0; i < order.length; i++) { - var p = order[i]; - if (p.from >= end || p.to <= begin) { continue } - var ltr = p.level != 1; - var endX = measureCharPrepared(cm, preparedMeasure, ltr ? Math.min(end, p.to) - 1 : Math.max(begin, p.from)).right; - // Weigh against spans ending before this, so that they are only - // picked if nothing ends after - var dist = endX < x ? x - endX + 1e9 : endX - x; - if (!part || closestDist > dist) { - part = p; - closestDist = dist; - } - } - if (!part) { part = order[order.length - 1]; } - // Clip the part to the wrapped line. - if (part.from < begin) { part = {from: begin, to: part.to, level: part.level}; } - if (part.to > end) { part = {from: part.from, to: end, level: part.level}; } - return part -} - -var measureText; -// Compute the default text height. -function textHeight(display) { - if (display.cachedTextHeight != null) { return display.cachedTextHeight } - if (measureText == null) { - measureText = elt("pre"); - // Measure a bunch of lines, for browsers that compute - // fractional heights. - for (var i = 0; i < 49; ++i) { - measureText.appendChild(document.createTextNode("x")); - measureText.appendChild(elt("br")); - } - measureText.appendChild(document.createTextNode("x")); - } - removeChildrenAndAdd(display.measure, measureText); - var height = measureText.offsetHeight / 50; - if (height > 3) { display.cachedTextHeight = height; } - removeChildren(display.measure); - return height || 1 -} - -// Compute the default character width. -function charWidth(display) { - if (display.cachedCharWidth != null) { return display.cachedCharWidth } - var anchor = elt("span", "xxxxxxxxxx"); - var pre = elt("pre", [anchor]); - removeChildrenAndAdd(display.measure, pre); - var rect = anchor.getBoundingClientRect(), width = (rect.right - rect.left) / 10; - if (width > 2) { display.cachedCharWidth = width; } - return width || 10 -} - -// Do a bulk-read of the DOM positions and sizes needed to draw the -// view, so that we don't interleave reading and writing to the DOM. -function getDimensions(cm) { - var d = cm.display, left = {}, width = {}; - var gutterLeft = d.gutters.clientLeft; - for (var n = d.gutters.firstChild, i = 0; n; n = n.nextSibling, ++i) { - left[cm.options.gutters[i]] = n.offsetLeft + n.clientLeft + gutterLeft; - width[cm.options.gutters[i]] = n.clientWidth; - } - return {fixedPos: compensateForHScroll(d), - gutterTotalWidth: d.gutters.offsetWidth, - gutterLeft: left, - gutterWidth: width, - wrapperWidth: d.wrapper.clientWidth} -} - -// Computes display.scroller.scrollLeft + display.gutters.offsetWidth, -// but using getBoundingClientRect to get a sub-pixel-accurate -// result. -function compensateForHScroll(display) { - return display.scroller.getBoundingClientRect().left - display.sizer.getBoundingClientRect().left -} - -// Returns a function that estimates the height of a line, to use as -// first approximation until the line becomes visible (and is thus -// properly measurable). -function estimateHeight(cm) { - var th = textHeight(cm.display), wrapping = cm.options.lineWrapping; - var perLine = wrapping && Math.max(5, cm.display.scroller.clientWidth / charWidth(cm.display) - 3); - return function (line) { - if (lineIsHidden(cm.doc, line)) { return 0 } - - var widgetsHeight = 0; - if (line.widgets) { for (var i = 0; i < line.widgets.length; i++) { - if (line.widgets[i].height) { widgetsHeight += line.widgets[i].height; } - } } - - if (wrapping) - { return widgetsHeight + (Math.ceil(line.text.length / perLine) || 1) * th } - else - { return widgetsHeight + th } - } -} - -function estimateLineHeights(cm) { - var doc = cm.doc, est = estimateHeight(cm); - doc.iter(function (line) { - var estHeight = est(line); - if (estHeight != line.height) { updateLineHeight(line, estHeight); } - }); -} - -// Given a mouse event, find the corresponding position. If liberal -// is false, it checks whether a gutter or scrollbar was clicked, -// and returns null if it was. forRect is used by rectangular -// selections, and tries to estimate a character position even for -// coordinates beyond the right of the text. -function posFromMouse(cm, e, liberal, forRect) { - var display = cm.display; - if (!liberal && e_target(e).getAttribute("cm-not-content") == "true") { return null } - - var x, y, space = display.lineSpace.getBoundingClientRect(); - // Fails unpredictably on IE[67] when mouse is dragged around quickly. - try { x = e.clientX - space.left; y = e.clientY - space.top; } - catch (e) { return null } - var coords = coordsChar(cm, x, y), line; - if (forRect && coords.xRel == 1 && (line = getLine(cm.doc, coords.line).text).length == coords.ch) { - var colDiff = countColumn(line, line.length, cm.options.tabSize) - line.length; - coords = Pos(coords.line, Math.max(0, Math.round((x - paddingH(cm.display).left) / charWidth(cm.display)) - colDiff)); - } - return coords -} - -// Find the view element corresponding to a given line. Return null -// when the line isn't visible. -function findViewIndex(cm, n) { - if (n >= cm.display.viewTo) { return null } - n -= cm.display.viewFrom; - if (n < 0) { return null } - var view = cm.display.view; - for (var i = 0; i < view.length; i++) { - n -= view[i].size; - if (n < 0) { return i } - } -} - -function updateSelection(cm) { - cm.display.input.showSelection(cm.display.input.prepareSelection()); -} - -function prepareSelection(cm, primary) { - if ( primary === void 0 ) primary = true; - - var doc = cm.doc, result = {}; - var curFragment = result.cursors = document.createDocumentFragment(); - var selFragment = result.selection = document.createDocumentFragment(); - - for (var i = 0; i < doc.sel.ranges.length; i++) { - if (!primary && i == doc.sel.primIndex) { continue } - var range$$1 = doc.sel.ranges[i]; - if (range$$1.from().line >= cm.display.viewTo || range$$1.to().line < cm.display.viewFrom) { continue } - var collapsed = range$$1.empty(); - if (collapsed || cm.options.showCursorWhenSelecting) - { drawSelectionCursor(cm, range$$1.head, curFragment); } - if (!collapsed) - { drawSelectionRange(cm, range$$1, selFragment); } - } - return result -} - -// Draws a cursor for the given range -function drawSelectionCursor(cm, head, output) { - var pos = cursorCoords(cm, head, "div", null, null, !cm.options.singleCursorHeightPerLine); - - var cursor = output.appendChild(elt("div", "\u00a0", "CodeMirror-cursor")); - cursor.style.left = pos.left + "px"; - cursor.style.top = pos.top + "px"; - cursor.style.height = Math.max(0, pos.bottom - pos.top) * cm.options.cursorHeight + "px"; - - if (pos.other) { - // Secondary cursor, shown when on a 'jump' in bi-directional text - var otherCursor = output.appendChild(elt("div", "\u00a0", "CodeMirror-cursor CodeMirror-secondarycursor")); - otherCursor.style.display = ""; - otherCursor.style.left = pos.other.left + "px"; - otherCursor.style.top = pos.other.top + "px"; - otherCursor.style.height = (pos.other.bottom - pos.other.top) * .85 + "px"; - } -} - -function cmpCoords(a, b) { return a.top - b.top || a.left - b.left } - -// Draws the given range as a highlighted selection -function drawSelectionRange(cm, range$$1, output) { - var display = cm.display, doc = cm.doc; - var fragment = document.createDocumentFragment(); - var padding = paddingH(cm.display), leftSide = padding.left; - var rightSide = Math.max(display.sizerWidth, displayWidth(cm) - display.sizer.offsetLeft) - padding.right; - var docLTR = doc.direction == "ltr"; - - function add(left, top, width, bottom) { - if (top < 0) { top = 0; } - top = Math.round(top); - bottom = Math.round(bottom); - fragment.appendChild(elt("div", null, "CodeMirror-selected", ("position: absolute; left: " + left + "px;\n top: " + top + "px; width: " + (width == null ? rightSide - left : width) + "px;\n height: " + (bottom - top) + "px"))); - } - - function drawForLine(line, fromArg, toArg) { - var lineObj = getLine(doc, line); - var lineLen = lineObj.text.length; - var start, end; - function coords(ch, bias) { - return charCoords(cm, Pos(line, ch), "div", lineObj, bias) - } - - function wrapX(pos, dir, side) { - var extent = wrappedLineExtentChar(cm, lineObj, null, pos); - var prop = (dir == "ltr") == (side == "after") ? "left" : "right"; - var ch = side == "after" ? extent.begin : extent.end - (/\s/.test(lineObj.text.charAt(extent.end - 1)) ? 2 : 1); - return coords(ch, prop)[prop] - } - - var order = getOrder(lineObj, doc.direction); - iterateBidiSections(order, fromArg || 0, toArg == null ? lineLen : toArg, function (from, to, dir, i) { - var ltr = dir == "ltr"; - var fromPos = coords(from, ltr ? "left" : "right"); - var toPos = coords(to - 1, ltr ? "right" : "left"); - - var openStart = fromArg == null && from == 0, openEnd = toArg == null && to == lineLen; - var first = i == 0, last = !order || i == order.length - 1; - if (toPos.top - fromPos.top <= 3) { // Single line - var openLeft = (docLTR ? openStart : openEnd) && first; - var openRight = (docLTR ? openEnd : openStart) && last; - var left = openLeft ? leftSide : (ltr ? fromPos : toPos).left; - var right = openRight ? rightSide : (ltr ? toPos : fromPos).right; - add(left, fromPos.top, right - left, fromPos.bottom); - } else { // Multiple lines - var topLeft, topRight, botLeft, botRight; - if (ltr) { - topLeft = docLTR && openStart && first ? leftSide : fromPos.left; - topRight = docLTR ? rightSide : wrapX(from, dir, "before"); - botLeft = docLTR ? leftSide : wrapX(to, dir, "after"); - botRight = docLTR && openEnd && last ? rightSide : toPos.right; - } else { - topLeft = !docLTR ? leftSide : wrapX(from, dir, "before"); - topRight = !docLTR && openStart && first ? rightSide : fromPos.right; - botLeft = !docLTR && openEnd && last ? leftSide : toPos.left; - botRight = !docLTR ? rightSide : wrapX(to, dir, "after"); - } - add(topLeft, fromPos.top, topRight - topLeft, fromPos.bottom); - if (fromPos.bottom < toPos.top) { add(leftSide, fromPos.bottom, null, toPos.top); } - add(botLeft, toPos.top, botRight - botLeft, toPos.bottom); - } - - if (!start || cmpCoords(fromPos, start) < 0) { start = fromPos; } - if (cmpCoords(toPos, start) < 0) { start = toPos; } - if (!end || cmpCoords(fromPos, end) < 0) { end = fromPos; } - if (cmpCoords(toPos, end) < 0) { end = toPos; } - }); - return {start: start, end: end} - } - - var sFrom = range$$1.from(), sTo = range$$1.to(); - if (sFrom.line == sTo.line) { - drawForLine(sFrom.line, sFrom.ch, sTo.ch); - } else { - var fromLine = getLine(doc, sFrom.line), toLine = getLine(doc, sTo.line); - var singleVLine = visualLine(fromLine) == visualLine(toLine); - var leftEnd = drawForLine(sFrom.line, sFrom.ch, singleVLine ? fromLine.text.length + 1 : null).end; - var rightStart = drawForLine(sTo.line, singleVLine ? 0 : null, sTo.ch).start; - if (singleVLine) { - if (leftEnd.top < rightStart.top - 2) { - add(leftEnd.right, leftEnd.top, null, leftEnd.bottom); - add(leftSide, rightStart.top, rightStart.left, rightStart.bottom); - } else { - add(leftEnd.right, leftEnd.top, rightStart.left - leftEnd.right, leftEnd.bottom); - } - } - if (leftEnd.bottom < rightStart.top) - { add(leftSide, leftEnd.bottom, null, rightStart.top); } - } - - output.appendChild(fragment); -} - -// Cursor-blinking -function restartBlink(cm) { - if (!cm.state.focused) { return } - var display = cm.display; - clearInterval(display.blinker); - var on = true; - display.cursorDiv.style.visibility = ""; - if (cm.options.cursorBlinkRate > 0) - { display.blinker = setInterval(function () { return display.cursorDiv.style.visibility = (on = !on) ? "" : "hidden"; }, - cm.options.cursorBlinkRate); } - else if (cm.options.cursorBlinkRate < 0) - { display.cursorDiv.style.visibility = "hidden"; } -} - -function ensureFocus(cm) { - if (!cm.state.focused) { cm.display.input.focus(); onFocus(cm); } -} - -function delayBlurEvent(cm) { - cm.state.delayingBlurEvent = true; - setTimeout(function () { if (cm.state.delayingBlurEvent) { - cm.state.delayingBlurEvent = false; - onBlur(cm); - } }, 100); -} - -function onFocus(cm, e) { - if (cm.state.delayingBlurEvent) { cm.state.delayingBlurEvent = false; } - - if (cm.options.readOnly == "nocursor") { return } - if (!cm.state.focused) { - signal(cm, "focus", cm, e); - cm.state.focused = true; - addClass(cm.display.wrapper, "CodeMirror-focused"); - // This test prevents this from firing when a context - // menu is closed (since the input reset would kill the - // select-all detection hack) - if (!cm.curOp && cm.display.selForContextMenu != cm.doc.sel) { - cm.display.input.reset(); - if (webkit) { setTimeout(function () { return cm.display.input.reset(true); }, 20); } // Issue #1730 - } - cm.display.input.receivedFocus(); - } - restartBlink(cm); -} -function onBlur(cm, e) { - if (cm.state.delayingBlurEvent) { return } - - if (cm.state.focused) { - signal(cm, "blur", cm, e); - cm.state.focused = false; - rmClass(cm.display.wrapper, "CodeMirror-focused"); - } - clearInterval(cm.display.blinker); - setTimeout(function () { if (!cm.state.focused) { cm.display.shift = false; } }, 150); -} - -// Read the actual heights of the rendered lines, and update their -// stored heights to match. -function updateHeightsInViewport(cm) { - var display = cm.display; - var prevBottom = display.lineDiv.offsetTop; - for (var i = 0; i < display.view.length; i++) { - var cur = display.view[i], height = (void 0); - if (cur.hidden) { continue } - if (ie && ie_version < 8) { - var bot = cur.node.offsetTop + cur.node.offsetHeight; - height = bot - prevBottom; - prevBottom = bot; - } else { - var box = cur.node.getBoundingClientRect(); - height = box.bottom - box.top; - } - var diff = cur.line.height - height; - if (height < 2) { height = textHeight(display); } - if (diff > .005 || diff < -.005) { - updateLineHeight(cur.line, height); - updateWidgetHeight(cur.line); - if (cur.rest) { for (var j = 0; j < cur.rest.length; j++) - { updateWidgetHeight(cur.rest[j]); } } - } - } -} - -// Read and store the height of line widgets associated with the -// given line. -function updateWidgetHeight(line) { - if (line.widgets) { for (var i = 0; i < line.widgets.length; ++i) - { line.widgets[i].height = line.widgets[i].node.parentNode.offsetHeight; } } -} - -// Compute the lines that are visible in a given viewport (defaults -// the the current scroll position). viewport may contain top, -// height, and ensure (see op.scrollToPos) properties. -function visibleLines(display, doc, viewport) { - var top = viewport && viewport.top != null ? Math.max(0, viewport.top) : display.scroller.scrollTop; - top = Math.floor(top - paddingTop(display)); - var bottom = viewport && viewport.bottom != null ? viewport.bottom : top + display.wrapper.clientHeight; - - var from = lineAtHeight(doc, top), to = lineAtHeight(doc, bottom); - // Ensure is a {from: {line, ch}, to: {line, ch}} object, and - // forces those lines into the viewport (if possible). - if (viewport && viewport.ensure) { - var ensureFrom = viewport.ensure.from.line, ensureTo = viewport.ensure.to.line; - if (ensureFrom < from) { - from = ensureFrom; - to = lineAtHeight(doc, heightAtLine(getLine(doc, ensureFrom)) + display.wrapper.clientHeight); - } else if (Math.min(ensureTo, doc.lastLine()) >= to) { - from = lineAtHeight(doc, heightAtLine(getLine(doc, ensureTo)) - display.wrapper.clientHeight); - to = ensureTo; - } - } - return {from: from, to: Math.max(to, from + 1)} -} - -// Re-align line numbers and gutter marks to compensate for -// horizontal scrolling. -function alignHorizontally(cm) { - var display = cm.display, view = display.view; - if (!display.alignWidgets && (!display.gutters.firstChild || !cm.options.fixedGutter)) { return } - var comp = compensateForHScroll(display) - display.scroller.scrollLeft + cm.doc.scrollLeft; - var gutterW = display.gutters.offsetWidth, left = comp + "px"; - for (var i = 0; i < view.length; i++) { if (!view[i].hidden) { - if (cm.options.fixedGutter) { - if (view[i].gutter) - { view[i].gutter.style.left = left; } - if (view[i].gutterBackground) - { view[i].gutterBackground.style.left = left; } - } - var align = view[i].alignable; - if (align) { for (var j = 0; j < align.length; j++) - { align[j].style.left = left; } } - } } - if (cm.options.fixedGutter) - { display.gutters.style.left = (comp + gutterW) + "px"; } -} - -// Used to ensure that the line number gutter is still the right -// size for the current document size. Returns true when an update -// is needed. -function maybeUpdateLineNumberWidth(cm) { - if (!cm.options.lineNumbers) { return false } - var doc = cm.doc, last = lineNumberFor(cm.options, doc.first + doc.size - 1), display = cm.display; - if (last.length != display.lineNumChars) { - var test = display.measure.appendChild(elt("div", [elt("div", last)], - "CodeMirror-linenumber CodeMirror-gutter-elt")); - var innerW = test.firstChild.offsetWidth, padding = test.offsetWidth - innerW; - display.lineGutter.style.width = ""; - display.lineNumInnerWidth = Math.max(innerW, display.lineGutter.offsetWidth - padding) + 1; - display.lineNumWidth = display.lineNumInnerWidth + padding; - display.lineNumChars = display.lineNumInnerWidth ? last.length : -1; - display.lineGutter.style.width = display.lineNumWidth + "px"; - updateGutterSpace(cm); - return true - } - return false -} - -// SCROLLING THINGS INTO VIEW - -// If an editor sits on the top or bottom of the window, partially -// scrolled out of view, this ensures that the cursor is visible. -function maybeScrollWindow(cm, rect) { - if (signalDOMEvent(cm, "scrollCursorIntoView")) { return } - - var display = cm.display, box = display.sizer.getBoundingClientRect(), doScroll = null; - if (rect.top + box.top < 0) { doScroll = true; } - else if (rect.bottom + box.top > (window.innerHeight || document.documentElement.clientHeight)) { doScroll = false; } - if (doScroll != null && !phantom) { - var scrollNode = elt("div", "\u200b", null, ("position: absolute;\n top: " + (rect.top - display.viewOffset - paddingTop(cm.display)) + "px;\n height: " + (rect.bottom - rect.top + scrollGap(cm) + display.barHeight) + "px;\n left: " + (rect.left) + "px; width: " + (Math.max(2, rect.right - rect.left)) + "px;")); - cm.display.lineSpace.appendChild(scrollNode); - scrollNode.scrollIntoView(doScroll); - cm.display.lineSpace.removeChild(scrollNode); - } -} - -// Scroll a given position into view (immediately), verifying that -// it actually became visible (as line heights are accurately -// measured, the position of something may 'drift' during drawing). -function scrollPosIntoView(cm, pos, end, margin) { - if (margin == null) { margin = 0; } - var rect; - if (!cm.options.lineWrapping && pos == end) { - // Set pos and end to the cursor positions around the character pos sticks to - // If pos.sticky == "before", that is around pos.ch - 1, otherwise around pos.ch - // If pos == Pos(_, 0, "before"), pos and end are unchanged - pos = pos.ch ? Pos(pos.line, pos.sticky == "before" ? pos.ch - 1 : pos.ch, "after") : pos; - end = pos.sticky == "before" ? Pos(pos.line, pos.ch + 1, "before") : pos; - } - for (var limit = 0; limit < 5; limit++) { - var changed = false; - var coords = cursorCoords(cm, pos); - var endCoords = !end || end == pos ? coords : cursorCoords(cm, end); - rect = {left: Math.min(coords.left, endCoords.left), - top: Math.min(coords.top, endCoords.top) - margin, - right: Math.max(coords.left, endCoords.left), - bottom: Math.max(coords.bottom, endCoords.bottom) + margin}; - var scrollPos = calculateScrollPos(cm, rect); - var startTop = cm.doc.scrollTop, startLeft = cm.doc.scrollLeft; - if (scrollPos.scrollTop != null) { - updateScrollTop(cm, scrollPos.scrollTop); - if (Math.abs(cm.doc.scrollTop - startTop) > 1) { changed = true; } - } - if (scrollPos.scrollLeft != null) { - setScrollLeft(cm, scrollPos.scrollLeft); - if (Math.abs(cm.doc.scrollLeft - startLeft) > 1) { changed = true; } - } - if (!changed) { break } - } - return rect -} - -// Scroll a given set of coordinates into view (immediately). -function scrollIntoView(cm, rect) { - var scrollPos = calculateScrollPos(cm, rect); - if (scrollPos.scrollTop != null) { updateScrollTop(cm, scrollPos.scrollTop); } - if (scrollPos.scrollLeft != null) { setScrollLeft(cm, scrollPos.scrollLeft); } -} - -// Calculate a new scroll position needed to scroll the given -// rectangle into view. Returns an object with scrollTop and -// scrollLeft properties. When these are undefined, the -// vertical/horizontal position does not need to be adjusted. -function calculateScrollPos(cm, rect) { - var display = cm.display, snapMargin = textHeight(cm.display); - if (rect.top < 0) { rect.top = 0; } - var screentop = cm.curOp && cm.curOp.scrollTop != null ? cm.curOp.scrollTop : display.scroller.scrollTop; - var screen = displayHeight(cm), result = {}; - if (rect.bottom - rect.top > screen) { rect.bottom = rect.top + screen; } - var docBottom = cm.doc.height + paddingVert(display); - var atTop = rect.top < snapMargin, atBottom = rect.bottom > docBottom - snapMargin; - if (rect.top < screentop) { - result.scrollTop = atTop ? 0 : rect.top; - } else if (rect.bottom > screentop + screen) { - var newTop = Math.min(rect.top, (atBottom ? docBottom : rect.bottom) - screen); - if (newTop != screentop) { result.scrollTop = newTop; } - } - - var screenleft = cm.curOp && cm.curOp.scrollLeft != null ? cm.curOp.scrollLeft : display.scroller.scrollLeft; - var screenw = displayWidth(cm) - (cm.options.fixedGutter ? display.gutters.offsetWidth : 0); - var tooWide = rect.right - rect.left > screenw; - if (tooWide) { rect.right = rect.left + screenw; } - if (rect.left < 10) - { result.scrollLeft = 0; } - else if (rect.left < screenleft) - { result.scrollLeft = Math.max(0, rect.left - (tooWide ? 0 : 10)); } - else if (rect.right > screenw + screenleft - 3) - { result.scrollLeft = rect.right + (tooWide ? 0 : 10) - screenw; } - return result -} - -// Store a relative adjustment to the scroll position in the current -// operation (to be applied when the operation finishes). -function addToScrollTop(cm, top) { - if (top == null) { return } - resolveScrollToPos(cm); - cm.curOp.scrollTop = (cm.curOp.scrollTop == null ? cm.doc.scrollTop : cm.curOp.scrollTop) + top; -} - -// Make sure that at the end of the operation the current cursor is -// shown. -function ensureCursorVisible(cm) { - resolveScrollToPos(cm); - var cur = cm.getCursor(); - cm.curOp.scrollToPos = {from: cur, to: cur, margin: cm.options.cursorScrollMargin}; -} - -function scrollToCoords(cm, x, y) { - if (x != null || y != null) { resolveScrollToPos(cm); } - if (x != null) { cm.curOp.scrollLeft = x; } - if (y != null) { cm.curOp.scrollTop = y; } -} - -function scrollToRange(cm, range$$1) { - resolveScrollToPos(cm); - cm.curOp.scrollToPos = range$$1; -} - -// When an operation has its scrollToPos property set, and another -// scroll action is applied before the end of the operation, this -// 'simulates' scrolling that position into view in a cheap way, so -// that the effect of intermediate scroll commands is not ignored. -function resolveScrollToPos(cm) { - var range$$1 = cm.curOp.scrollToPos; - if (range$$1) { - cm.curOp.scrollToPos = null; - var from = estimateCoords(cm, range$$1.from), to = estimateCoords(cm, range$$1.to); - scrollToCoordsRange(cm, from, to, range$$1.margin); - } -} - -function scrollToCoordsRange(cm, from, to, margin) { - var sPos = calculateScrollPos(cm, { - left: Math.min(from.left, to.left), - top: Math.min(from.top, to.top) - margin, - right: Math.max(from.right, to.right), - bottom: Math.max(from.bottom, to.bottom) + margin - }); - scrollToCoords(cm, sPos.scrollLeft, sPos.scrollTop); -} - -// Sync the scrollable area and scrollbars, ensure the viewport -// covers the visible area. -function updateScrollTop(cm, val) { - if (Math.abs(cm.doc.scrollTop - val) < 2) { return } - if (!gecko) { updateDisplaySimple(cm, {top: val}); } - setScrollTop(cm, val, true); - if (gecko) { updateDisplaySimple(cm); } - startWorker(cm, 100); -} - -function setScrollTop(cm, val, forceScroll) { - val = Math.min(cm.display.scroller.scrollHeight - cm.display.scroller.clientHeight, val); - if (cm.display.scroller.scrollTop == val && !forceScroll) { return } - cm.doc.scrollTop = val; - cm.display.scrollbars.setScrollTop(val); - if (cm.display.scroller.scrollTop != val) { cm.display.scroller.scrollTop = val; } -} - -// Sync scroller and scrollbar, ensure the gutter elements are -// aligned. -function setScrollLeft(cm, val, isScroller, forceScroll) { - val = Math.min(val, cm.display.scroller.scrollWidth - cm.display.scroller.clientWidth); - if ((isScroller ? val == cm.doc.scrollLeft : Math.abs(cm.doc.scrollLeft - val) < 2) && !forceScroll) { return } - cm.doc.scrollLeft = val; - alignHorizontally(cm); - if (cm.display.scroller.scrollLeft != val) { cm.display.scroller.scrollLeft = val; } - cm.display.scrollbars.setScrollLeft(val); -} - -// SCROLLBARS - -// Prepare DOM reads needed to update the scrollbars. Done in one -// shot to minimize update/measure roundtrips. -function measureForScrollbars(cm) { - var d = cm.display, gutterW = d.gutters.offsetWidth; - var docH = Math.round(cm.doc.height + paddingVert(cm.display)); - return { - clientHeight: d.scroller.clientHeight, - viewHeight: d.wrapper.clientHeight, - scrollWidth: d.scroller.scrollWidth, clientWidth: d.scroller.clientWidth, - viewWidth: d.wrapper.clientWidth, - barLeft: cm.options.fixedGutter ? gutterW : 0, - docHeight: docH, - scrollHeight: docH + scrollGap(cm) + d.barHeight, - nativeBarWidth: d.nativeBarWidth, - gutterWidth: gutterW - } -} - -var NativeScrollbars = function(place, scroll, cm) { - this.cm = cm; - var vert = this.vert = elt("div", [elt("div", null, null, "min-width: 1px")], "CodeMirror-vscrollbar"); - var horiz = this.horiz = elt("div", [elt("div", null, null, "height: 100%; min-height: 1px")], "CodeMirror-hscrollbar"); - place(vert); place(horiz); - - on(vert, "scroll", function () { - if (vert.clientHeight) { scroll(vert.scrollTop, "vertical"); } - }); - on(horiz, "scroll", function () { - if (horiz.clientWidth) { scroll(horiz.scrollLeft, "horizontal"); } - }); - - this.checkedZeroWidth = false; - // Need to set a minimum width to see the scrollbar on IE7 (but must not set it on IE8). - if (ie && ie_version < 8) { this.horiz.style.minHeight = this.vert.style.minWidth = "18px"; } -}; - -NativeScrollbars.prototype.update = function (measure) { - var needsH = measure.scrollWidth > measure.clientWidth + 1; - var needsV = measure.scrollHeight > measure.clientHeight + 1; - var sWidth = measure.nativeBarWidth; - - if (needsV) { - this.vert.style.display = "block"; - this.vert.style.bottom = needsH ? sWidth + "px" : "0"; - var totalHeight = measure.viewHeight - (needsH ? sWidth : 0); - // A bug in IE8 can cause this value to be negative, so guard it. - this.vert.firstChild.style.height = - Math.max(0, measure.scrollHeight - measure.clientHeight + totalHeight) + "px"; - } else { - this.vert.style.display = ""; - this.vert.firstChild.style.height = "0"; - } - - if (needsH) { - this.horiz.style.display = "block"; - this.horiz.style.right = needsV ? sWidth + "px" : "0"; - this.horiz.style.left = measure.barLeft + "px"; - var totalWidth = measure.viewWidth - measure.barLeft - (needsV ? sWidth : 0); - this.horiz.firstChild.style.width = - Math.max(0, measure.scrollWidth - measure.clientWidth + totalWidth) + "px"; - } else { - this.horiz.style.display = ""; - this.horiz.firstChild.style.width = "0"; - } - - if (!this.checkedZeroWidth && measure.clientHeight > 0) { - if (sWidth == 0) { this.zeroWidthHack(); } - this.checkedZeroWidth = true; - } - - return {right: needsV ? sWidth : 0, bottom: needsH ? sWidth : 0} -}; - -NativeScrollbars.prototype.setScrollLeft = function (pos) { - if (this.horiz.scrollLeft != pos) { this.horiz.scrollLeft = pos; } - if (this.disableHoriz) { this.enableZeroWidthBar(this.horiz, this.disableHoriz, "horiz"); } -}; - -NativeScrollbars.prototype.setScrollTop = function (pos) { - if (this.vert.scrollTop != pos) { this.vert.scrollTop = pos; } - if (this.disableVert) { this.enableZeroWidthBar(this.vert, this.disableVert, "vert"); } -}; - -NativeScrollbars.prototype.zeroWidthHack = function () { - var w = mac && !mac_geMountainLion ? "12px" : "18px"; - this.horiz.style.height = this.vert.style.width = w; - this.horiz.style.pointerEvents = this.vert.style.pointerEvents = "none"; - this.disableHoriz = new Delayed; - this.disableVert = new Delayed; -}; - -NativeScrollbars.prototype.enableZeroWidthBar = function (bar, delay, type) { - bar.style.pointerEvents = "auto"; - function maybeDisable() { - // To find out whether the scrollbar is still visible, we - // check whether the element under the pixel in the bottom - // right corner of the scrollbar box is the scrollbar box - // itself (when the bar is still visible) or its filler child - // (when the bar is hidden). If it is still visible, we keep - // it enabled, if it's hidden, we disable pointer events. - var box = bar.getBoundingClientRect(); - var elt$$1 = type == "vert" ? document.elementFromPoint(box.right - 1, (box.top + box.bottom) / 2) - : document.elementFromPoint((box.right + box.left) / 2, box.bottom - 1); - if (elt$$1 != bar) { bar.style.pointerEvents = "none"; } - else { delay.set(1000, maybeDisable); } - } - delay.set(1000, maybeDisable); -}; - -NativeScrollbars.prototype.clear = function () { - var parent = this.horiz.parentNode; - parent.removeChild(this.horiz); - parent.removeChild(this.vert); -}; - -var NullScrollbars = function () {}; - -NullScrollbars.prototype.update = function () { return {bottom: 0, right: 0} }; -NullScrollbars.prototype.setScrollLeft = function () {}; -NullScrollbars.prototype.setScrollTop = function () {}; -NullScrollbars.prototype.clear = function () {}; - -function updateScrollbars(cm, measure) { - if (!measure) { measure = measureForScrollbars(cm); } - var startWidth = cm.display.barWidth, startHeight = cm.display.barHeight; - updateScrollbarsInner(cm, measure); - for (var i = 0; i < 4 && startWidth != cm.display.barWidth || startHeight != cm.display.barHeight; i++) { - if (startWidth != cm.display.barWidth && cm.options.lineWrapping) - { updateHeightsInViewport(cm); } - updateScrollbarsInner(cm, measureForScrollbars(cm)); - startWidth = cm.display.barWidth; startHeight = cm.display.barHeight; - } -} - -// Re-synchronize the fake scrollbars with the actual size of the -// content. -function updateScrollbarsInner(cm, measure) { - var d = cm.display; - var sizes = d.scrollbars.update(measure); - - d.sizer.style.paddingRight = (d.barWidth = sizes.right) + "px"; - d.sizer.style.paddingBottom = (d.barHeight = sizes.bottom) + "px"; - d.heightForcer.style.borderBottom = sizes.bottom + "px solid transparent"; - - if (sizes.right && sizes.bottom) { - d.scrollbarFiller.style.display = "block"; - d.scrollbarFiller.style.height = sizes.bottom + "px"; - d.scrollbarFiller.style.width = sizes.right + "px"; - } else { d.scrollbarFiller.style.display = ""; } - if (sizes.bottom && cm.options.coverGutterNextToScrollbar && cm.options.fixedGutter) { - d.gutterFiller.style.display = "block"; - d.gutterFiller.style.height = sizes.bottom + "px"; - d.gutterFiller.style.width = measure.gutterWidth + "px"; - } else { d.gutterFiller.style.display = ""; } -} - -var scrollbarModel = {"native": NativeScrollbars, "null": NullScrollbars}; - -function initScrollbars(cm) { - if (cm.display.scrollbars) { - cm.display.scrollbars.clear(); - if (cm.display.scrollbars.addClass) - { rmClass(cm.display.wrapper, cm.display.scrollbars.addClass); } - } - - cm.display.scrollbars = new scrollbarModel[cm.options.scrollbarStyle](function (node) { - cm.display.wrapper.insertBefore(node, cm.display.scrollbarFiller); - // Prevent clicks in the scrollbars from killing focus - on(node, "mousedown", function () { - if (cm.state.focused) { setTimeout(function () { return cm.display.input.focus(); }, 0); } - }); - node.setAttribute("cm-not-content", "true"); - }, function (pos, axis) { - if (axis == "horizontal") { setScrollLeft(cm, pos); } - else { updateScrollTop(cm, pos); } - }, cm); - if (cm.display.scrollbars.addClass) - { addClass(cm.display.wrapper, cm.display.scrollbars.addClass); } -} - -// Operations are used to wrap a series of changes to the editor -// state in such a way that each change won't have to update the -// cursor and display (which would be awkward, slow, and -// error-prone). Instead, display updates are batched and then all -// combined and executed at once. - -var nextOpId = 0; -// Start a new operation. -function startOperation(cm) { - cm.curOp = { - cm: cm, - viewChanged: false, // Flag that indicates that lines might need to be redrawn - startHeight: cm.doc.height, // Used to detect need to update scrollbar - forceUpdate: false, // Used to force a redraw - updateInput: null, // Whether to reset the input textarea - typing: false, // Whether this reset should be careful to leave existing text (for compositing) - changeObjs: null, // Accumulated changes, for firing change events - cursorActivityHandlers: null, // Set of handlers to fire cursorActivity on - cursorActivityCalled: 0, // Tracks which cursorActivity handlers have been called already - selectionChanged: false, // Whether the selection needs to be redrawn - updateMaxLine: false, // Set when the widest line needs to be determined anew - scrollLeft: null, scrollTop: null, // Intermediate scroll position, not pushed to DOM yet - scrollToPos: null, // Used to scroll to a specific position - focus: false, - id: ++nextOpId // Unique ID - }; - pushOperation(cm.curOp); -} - -// Finish an operation, updating the display and signalling delayed events -function endOperation(cm) { - var op = cm.curOp; - finishOperation(op, function (group) { - for (var i = 0; i < group.ops.length; i++) - { group.ops[i].cm.curOp = null; } - endOperations(group); - }); -} - -// The DOM updates done when an operation finishes are batched so -// that the minimum number of relayouts are required. -function endOperations(group) { - var ops = group.ops; - for (var i = 0; i < ops.length; i++) // Read DOM - { endOperation_R1(ops[i]); } - for (var i$1 = 0; i$1 < ops.length; i$1++) // Write DOM (maybe) - { endOperation_W1(ops[i$1]); } - for (var i$2 = 0; i$2 < ops.length; i$2++) // Read DOM - { endOperation_R2(ops[i$2]); } - for (var i$3 = 0; i$3 < ops.length; i$3++) // Write DOM (maybe) - { endOperation_W2(ops[i$3]); } - for (var i$4 = 0; i$4 < ops.length; i$4++) // Read DOM - { endOperation_finish(ops[i$4]); } -} - -function endOperation_R1(op) { - var cm = op.cm, display = cm.display; - maybeClipScrollbars(cm); - if (op.updateMaxLine) { findMaxLine(cm); } - - op.mustUpdate = op.viewChanged || op.forceUpdate || op.scrollTop != null || - op.scrollToPos && (op.scrollToPos.from.line < display.viewFrom || - op.scrollToPos.to.line >= display.viewTo) || - display.maxLineChanged && cm.options.lineWrapping; - op.update = op.mustUpdate && - new DisplayUpdate(cm, op.mustUpdate && {top: op.scrollTop, ensure: op.scrollToPos}, op.forceUpdate); -} - -function endOperation_W1(op) { - op.updatedDisplay = op.mustUpdate && updateDisplayIfNeeded(op.cm, op.update); -} - -function endOperation_R2(op) { - var cm = op.cm, display = cm.display; - if (op.updatedDisplay) { updateHeightsInViewport(cm); } - - op.barMeasure = measureForScrollbars(cm); - - // If the max line changed since it was last measured, measure it, - // and ensure the document's width matches it. - // updateDisplay_W2 will use these properties to do the actual resizing - if (display.maxLineChanged && !cm.options.lineWrapping) { - op.adjustWidthTo = measureChar(cm, display.maxLine, display.maxLine.text.length).left + 3; - cm.display.sizerWidth = op.adjustWidthTo; - op.barMeasure.scrollWidth = - Math.max(display.scroller.clientWidth, display.sizer.offsetLeft + op.adjustWidthTo + scrollGap(cm) + cm.display.barWidth); - op.maxScrollLeft = Math.max(0, display.sizer.offsetLeft + op.adjustWidthTo - displayWidth(cm)); - } - - if (op.updatedDisplay || op.selectionChanged) - { op.preparedSelection = display.input.prepareSelection(); } -} - -function endOperation_W2(op) { - var cm = op.cm; - - if (op.adjustWidthTo != null) { - cm.display.sizer.style.minWidth = op.adjustWidthTo + "px"; - if (op.maxScrollLeft < cm.doc.scrollLeft) - { setScrollLeft(cm, Math.min(cm.display.scroller.scrollLeft, op.maxScrollLeft), true); } - cm.display.maxLineChanged = false; - } - - var takeFocus = op.focus && op.focus == activeElt(); - if (op.preparedSelection) - { cm.display.input.showSelection(op.preparedSelection, takeFocus); } - if (op.updatedDisplay || op.startHeight != cm.doc.height) - { updateScrollbars(cm, op.barMeasure); } - if (op.updatedDisplay) - { setDocumentHeight(cm, op.barMeasure); } - - if (op.selectionChanged) { restartBlink(cm); } - - if (cm.state.focused && op.updateInput) - { cm.display.input.reset(op.typing); } - if (takeFocus) { ensureFocus(op.cm); } -} - -function endOperation_finish(op) { - var cm = op.cm, display = cm.display, doc = cm.doc; - - if (op.updatedDisplay) { postUpdateDisplay(cm, op.update); } - - // Abort mouse wheel delta measurement, when scrolling explicitly - if (display.wheelStartX != null && (op.scrollTop != null || op.scrollLeft != null || op.scrollToPos)) - { display.wheelStartX = display.wheelStartY = null; } - - // Propagate the scroll position to the actual DOM scroller - if (op.scrollTop != null) { setScrollTop(cm, op.scrollTop, op.forceScroll); } - - if (op.scrollLeft != null) { setScrollLeft(cm, op.scrollLeft, true, true); } - // If we need to scroll a specific position into view, do so. - if (op.scrollToPos) { - var rect = scrollPosIntoView(cm, clipPos(doc, op.scrollToPos.from), - clipPos(doc, op.scrollToPos.to), op.scrollToPos.margin); - maybeScrollWindow(cm, rect); - } - - // Fire events for markers that are hidden/unidden by editing or - // undoing - var hidden = op.maybeHiddenMarkers, unhidden = op.maybeUnhiddenMarkers; - if (hidden) { for (var i = 0; i < hidden.length; ++i) - { if (!hidden[i].lines.length) { signal(hidden[i], "hide"); } } } - if (unhidden) { for (var i$1 = 0; i$1 < unhidden.length; ++i$1) - { if (unhidden[i$1].lines.length) { signal(unhidden[i$1], "unhide"); } } } - - if (display.wrapper.offsetHeight) - { doc.scrollTop = cm.display.scroller.scrollTop; } - - // Fire change events, and delayed event handlers - if (op.changeObjs) - { signal(cm, "changes", cm, op.changeObjs); } - if (op.update) - { op.update.finish(); } -} - -// Run the given function in an operation -function runInOp(cm, f) { - if (cm.curOp) { return f() } - startOperation(cm); - try { return f() } - finally { endOperation(cm); } -} -// Wraps a function in an operation. Returns the wrapped function. -function operation(cm, f) { - return function() { - if (cm.curOp) { return f.apply(cm, arguments) } - startOperation(cm); - try { return f.apply(cm, arguments) } - finally { endOperation(cm); } - } -} -// Used to add methods to editor and doc instances, wrapping them in -// operations. -function methodOp(f) { - return function() { - if (this.curOp) { return f.apply(this, arguments) } - startOperation(this); - try { return f.apply(this, arguments) } - finally { endOperation(this); } - } -} -function docMethodOp(f) { - return function() { - var cm = this.cm; - if (!cm || cm.curOp) { return f.apply(this, arguments) } - startOperation(cm); - try { return f.apply(this, arguments) } - finally { endOperation(cm); } - } -} - -// Updates the display.view data structure for a given change to the -// document. From and to are in pre-change coordinates. Lendiff is -// the amount of lines added or subtracted by the change. This is -// used for changes that span multiple lines, or change the way -// lines are divided into visual lines. regLineChange (below) -// registers single-line changes. -function regChange(cm, from, to, lendiff) { - if (from == null) { from = cm.doc.first; } - if (to == null) { to = cm.doc.first + cm.doc.size; } - if (!lendiff) { lendiff = 0; } - - var display = cm.display; - if (lendiff && to < display.viewTo && - (display.updateLineNumbers == null || display.updateLineNumbers > from)) - { display.updateLineNumbers = from; } - - cm.curOp.viewChanged = true; - - if (from >= display.viewTo) { // Change after - if (sawCollapsedSpans && visualLineNo(cm.doc, from) < display.viewTo) - { resetView(cm); } - } else if (to <= display.viewFrom) { // Change before - if (sawCollapsedSpans && visualLineEndNo(cm.doc, to + lendiff) > display.viewFrom) { - resetView(cm); - } else { - display.viewFrom += lendiff; - display.viewTo += lendiff; - } - } else if (from <= display.viewFrom && to >= display.viewTo) { // Full overlap - resetView(cm); - } else if (from <= display.viewFrom) { // Top overlap - var cut = viewCuttingPoint(cm, to, to + lendiff, 1); - if (cut) { - display.view = display.view.slice(cut.index); - display.viewFrom = cut.lineN; - display.viewTo += lendiff; - } else { - resetView(cm); - } - } else if (to >= display.viewTo) { // Bottom overlap - var cut$1 = viewCuttingPoint(cm, from, from, -1); - if (cut$1) { - display.view = display.view.slice(0, cut$1.index); - display.viewTo = cut$1.lineN; - } else { - resetView(cm); - } - } else { // Gap in the middle - var cutTop = viewCuttingPoint(cm, from, from, -1); - var cutBot = viewCuttingPoint(cm, to, to + lendiff, 1); - if (cutTop && cutBot) { - display.view = display.view.slice(0, cutTop.index) - .concat(buildViewArray(cm, cutTop.lineN, cutBot.lineN)) - .concat(display.view.slice(cutBot.index)); - display.viewTo += lendiff; - } else { - resetView(cm); - } - } - - var ext = display.externalMeasured; - if (ext) { - if (to < ext.lineN) - { ext.lineN += lendiff; } - else if (from < ext.lineN + ext.size) - { display.externalMeasured = null; } - } -} - -// Register a change to a single line. Type must be one of "text", -// "gutter", "class", "widget" -function regLineChange(cm, line, type) { - cm.curOp.viewChanged = true; - var display = cm.display, ext = cm.display.externalMeasured; - if (ext && line >= ext.lineN && line < ext.lineN + ext.size) - { display.externalMeasured = null; } - - if (line < display.viewFrom || line >= display.viewTo) { return } - var lineView = display.view[findViewIndex(cm, line)]; - if (lineView.node == null) { return } - var arr = lineView.changes || (lineView.changes = []); - if (indexOf(arr, type) == -1) { arr.push(type); } -} - -// Clear the view. -function resetView(cm) { - cm.display.viewFrom = cm.display.viewTo = cm.doc.first; - cm.display.view = []; - cm.display.viewOffset = 0; -} - -function viewCuttingPoint(cm, oldN, newN, dir) { - var index = findViewIndex(cm, oldN), diff, view = cm.display.view; - if (!sawCollapsedSpans || newN == cm.doc.first + cm.doc.size) - { return {index: index, lineN: newN} } - var n = cm.display.viewFrom; - for (var i = 0; i < index; i++) - { n += view[i].size; } - if (n != oldN) { - if (dir > 0) { - if (index == view.length - 1) { return null } - diff = (n + view[index].size) - oldN; - index++; - } else { - diff = n - oldN; - } - oldN += diff; newN += diff; - } - while (visualLineNo(cm.doc, newN) != newN) { - if (index == (dir < 0 ? 0 : view.length - 1)) { return null } - newN += dir * view[index - (dir < 0 ? 1 : 0)].size; - index += dir; - } - return {index: index, lineN: newN} -} - -// Force the view to cover a given range, adding empty view element -// or clipping off existing ones as needed. -function adjustView(cm, from, to) { - var display = cm.display, view = display.view; - if (view.length == 0 || from >= display.viewTo || to <= display.viewFrom) { - display.view = buildViewArray(cm, from, to); - display.viewFrom = from; - } else { - if (display.viewFrom > from) - { display.view = buildViewArray(cm, from, display.viewFrom).concat(display.view); } - else if (display.viewFrom < from) - { display.view = display.view.slice(findViewIndex(cm, from)); } - display.viewFrom = from; - if (display.viewTo < to) - { display.view = display.view.concat(buildViewArray(cm, display.viewTo, to)); } - else if (display.viewTo > to) - { display.view = display.view.slice(0, findViewIndex(cm, to)); } - } - display.viewTo = to; -} - -// Count the number of lines in the view whose DOM representation is -// out of date (or nonexistent). -function countDirtyView(cm) { - var view = cm.display.view, dirty = 0; - for (var i = 0; i < view.length; i++) { - var lineView = view[i]; - if (!lineView.hidden && (!lineView.node || lineView.changes)) { ++dirty; } - } - return dirty -} - -// HIGHLIGHT WORKER - -function startWorker(cm, time) { - if (cm.doc.highlightFrontier < cm.display.viewTo) - { cm.state.highlight.set(time, bind(highlightWorker, cm)); } -} - -function highlightWorker(cm) { - var doc = cm.doc; - if (doc.highlightFrontier >= cm.display.viewTo) { return } - var end = +new Date + cm.options.workTime; - var context = getContextBefore(cm, doc.highlightFrontier); - var changedLines = []; - - doc.iter(context.line, Math.min(doc.first + doc.size, cm.display.viewTo + 500), function (line) { - if (context.line >= cm.display.viewFrom) { // Visible - var oldStyles = line.styles; - var resetState = line.text.length > cm.options.maxHighlightLength ? copyState(doc.mode, context.state) : null; - var highlighted = highlightLine(cm, line, context, true); - if (resetState) { context.state = resetState; } - line.styles = highlighted.styles; - var oldCls = line.styleClasses, newCls = highlighted.classes; - if (newCls) { line.styleClasses = newCls; } - else if (oldCls) { line.styleClasses = null; } - var ischange = !oldStyles || oldStyles.length != line.styles.length || - oldCls != newCls && (!oldCls || !newCls || oldCls.bgClass != newCls.bgClass || oldCls.textClass != newCls.textClass); - for (var i = 0; !ischange && i < oldStyles.length; ++i) { ischange = oldStyles[i] != line.styles[i]; } - if (ischange) { changedLines.push(context.line); } - line.stateAfter = context.save(); - context.nextLine(); - } else { - if (line.text.length <= cm.options.maxHighlightLength) - { processLine(cm, line.text, context); } - line.stateAfter = context.line % 5 == 0 ? context.save() : null; - context.nextLine(); - } - if (+new Date > end) { - startWorker(cm, cm.options.workDelay); - return true - } - }); - doc.highlightFrontier = context.line; - doc.modeFrontier = Math.max(doc.modeFrontier, context.line); - if (changedLines.length) { runInOp(cm, function () { - for (var i = 0; i < changedLines.length; i++) - { regLineChange(cm, changedLines[i], "text"); } - }); } -} - -// DISPLAY DRAWING - -var DisplayUpdate = function(cm, viewport, force) { - var display = cm.display; - - this.viewport = viewport; - // Store some values that we'll need later (but don't want to force a relayout for) - this.visible = visibleLines(display, cm.doc, viewport); - this.editorIsHidden = !display.wrapper.offsetWidth; - this.wrapperHeight = display.wrapper.clientHeight; - this.wrapperWidth = display.wrapper.clientWidth; - this.oldDisplayWidth = displayWidth(cm); - this.force = force; - this.dims = getDimensions(cm); - this.events = []; -}; - -DisplayUpdate.prototype.signal = function (emitter, type) { - if (hasHandler(emitter, type)) - { this.events.push(arguments); } -}; -DisplayUpdate.prototype.finish = function () { - var this$1 = this; - - for (var i = 0; i < this.events.length; i++) - { signal.apply(null, this$1.events[i]); } -}; - -function maybeClipScrollbars(cm) { - var display = cm.display; - if (!display.scrollbarsClipped && display.scroller.offsetWidth) { - display.nativeBarWidth = display.scroller.offsetWidth - display.scroller.clientWidth; - display.heightForcer.style.height = scrollGap(cm) + "px"; - display.sizer.style.marginBottom = -display.nativeBarWidth + "px"; - display.sizer.style.borderRightWidth = scrollGap(cm) + "px"; - display.scrollbarsClipped = true; - } -} - -function selectionSnapshot(cm) { - if (cm.hasFocus()) { return null } - var active = activeElt(); - if (!active || !contains(cm.display.lineDiv, active)) { return null } - var result = {activeElt: active}; - if (window.getSelection) { - var sel = window.getSelection(); - if (sel.anchorNode && sel.extend && contains(cm.display.lineDiv, sel.anchorNode)) { - result.anchorNode = sel.anchorNode; - result.anchorOffset = sel.anchorOffset; - result.focusNode = sel.focusNode; - result.focusOffset = sel.focusOffset; - } - } - return result -} - -function restoreSelection(snapshot) { - if (!snapshot || !snapshot.activeElt || snapshot.activeElt == activeElt()) { return } - snapshot.activeElt.focus(); - if (snapshot.anchorNode && contains(document.body, snapshot.anchorNode) && contains(document.body, snapshot.focusNode)) { - var sel = window.getSelection(), range$$1 = document.createRange(); - range$$1.setEnd(snapshot.anchorNode, snapshot.anchorOffset); - range$$1.collapse(false); - sel.removeAllRanges(); - sel.addRange(range$$1); - sel.extend(snapshot.focusNode, snapshot.focusOffset); - } -} - -// Does the actual updating of the line display. Bails out -// (returning false) when there is nothing to be done and forced is -// false. -function updateDisplayIfNeeded(cm, update) { - var display = cm.display, doc = cm.doc; - - if (update.editorIsHidden) { - resetView(cm); - return false - } - - // Bail out if the visible area is already rendered and nothing changed. - if (!update.force && - update.visible.from >= display.viewFrom && update.visible.to <= display.viewTo && - (display.updateLineNumbers == null || display.updateLineNumbers >= display.viewTo) && - display.renderedView == display.view && countDirtyView(cm) == 0) - { return false } - - if (maybeUpdateLineNumberWidth(cm)) { - resetView(cm); - update.dims = getDimensions(cm); - } - - // Compute a suitable new viewport (from & to) - var end = doc.first + doc.size; - var from = Math.max(update.visible.from - cm.options.viewportMargin, doc.first); - var to = Math.min(end, update.visible.to + cm.options.viewportMargin); - if (display.viewFrom < from && from - display.viewFrom < 20) { from = Math.max(doc.first, display.viewFrom); } - if (display.viewTo > to && display.viewTo - to < 20) { to = Math.min(end, display.viewTo); } - if (sawCollapsedSpans) { - from = visualLineNo(cm.doc, from); - to = visualLineEndNo(cm.doc, to); - } - - var different = from != display.viewFrom || to != display.viewTo || - display.lastWrapHeight != update.wrapperHeight || display.lastWrapWidth != update.wrapperWidth; - adjustView(cm, from, to); - - display.viewOffset = heightAtLine(getLine(cm.doc, display.viewFrom)); - // Position the mover div to align with the current scroll position - cm.display.mover.style.top = display.viewOffset + "px"; - - var toUpdate = countDirtyView(cm); - if (!different && toUpdate == 0 && !update.force && display.renderedView == display.view && - (display.updateLineNumbers == null || display.updateLineNumbers >= display.viewTo)) - { return false } - - // For big changes, we hide the enclosing element during the - // update, since that speeds up the operations on most browsers. - var selSnapshot = selectionSnapshot(cm); - if (toUpdate > 4) { display.lineDiv.style.display = "none"; } - patchDisplay(cm, display.updateLineNumbers, update.dims); - if (toUpdate > 4) { display.lineDiv.style.display = ""; } - display.renderedView = display.view; - // There might have been a widget with a focused element that got - // hidden or updated, if so re-focus it. - restoreSelection(selSnapshot); - - // Prevent selection and cursors from interfering with the scroll - // width and height. - removeChildren(display.cursorDiv); - removeChildren(display.selectionDiv); - display.gutters.style.height = display.sizer.style.minHeight = 0; - - if (different) { - display.lastWrapHeight = update.wrapperHeight; - display.lastWrapWidth = update.wrapperWidth; - startWorker(cm, 400); - } - - display.updateLineNumbers = null; - - return true -} - -function postUpdateDisplay(cm, update) { - var viewport = update.viewport; - - for (var first = true;; first = false) { - if (!first || !cm.options.lineWrapping || update.oldDisplayWidth == displayWidth(cm)) { - // Clip forced viewport to actual scrollable area. - if (viewport && viewport.top != null) - { viewport = {top: Math.min(cm.doc.height + paddingVert(cm.display) - displayHeight(cm), viewport.top)}; } - // Updated line heights might result in the drawn area not - // actually covering the viewport. Keep looping until it does. - update.visible = visibleLines(cm.display, cm.doc, viewport); - if (update.visible.from >= cm.display.viewFrom && update.visible.to <= cm.display.viewTo) - { break } - } - if (!updateDisplayIfNeeded(cm, update)) { break } - updateHeightsInViewport(cm); - var barMeasure = measureForScrollbars(cm); - updateSelection(cm); - updateScrollbars(cm, barMeasure); - setDocumentHeight(cm, barMeasure); - update.force = false; - } - - update.signal(cm, "update", cm); - if (cm.display.viewFrom != cm.display.reportedViewFrom || cm.display.viewTo != cm.display.reportedViewTo) { - update.signal(cm, "viewportChange", cm, cm.display.viewFrom, cm.display.viewTo); - cm.display.reportedViewFrom = cm.display.viewFrom; cm.display.reportedViewTo = cm.display.viewTo; - } -} - -function updateDisplaySimple(cm, viewport) { - var update = new DisplayUpdate(cm, viewport); - if (updateDisplayIfNeeded(cm, update)) { - updateHeightsInViewport(cm); - postUpdateDisplay(cm, update); - var barMeasure = measureForScrollbars(cm); - updateSelection(cm); - updateScrollbars(cm, barMeasure); - setDocumentHeight(cm, barMeasure); - update.finish(); - } -} - -// Sync the actual display DOM structure with display.view, removing -// nodes for lines that are no longer in view, and creating the ones -// that are not there yet, and updating the ones that are out of -// date. -function patchDisplay(cm, updateNumbersFrom, dims) { - var display = cm.display, lineNumbers = cm.options.lineNumbers; - var container = display.lineDiv, cur = container.firstChild; - - function rm(node) { - var next = node.nextSibling; - // Works around a throw-scroll bug in OS X Webkit - if (webkit && mac && cm.display.currentWheelTarget == node) - { node.style.display = "none"; } - else - { node.parentNode.removeChild(node); } - return next - } - - var view = display.view, lineN = display.viewFrom; - // Loop over the elements in the view, syncing cur (the DOM nodes - // in display.lineDiv) with the view as we go. - for (var i = 0; i < view.length; i++) { - var lineView = view[i]; - if (lineView.hidden) { - } else if (!lineView.node || lineView.node.parentNode != container) { // Not drawn yet - var node = buildLineElement(cm, lineView, lineN, dims); - container.insertBefore(node, cur); - } else { // Already drawn - while (cur != lineView.node) { cur = rm(cur); } - var updateNumber = lineNumbers && updateNumbersFrom != null && - updateNumbersFrom <= lineN && lineView.lineNumber; - if (lineView.changes) { - if (indexOf(lineView.changes, "gutter") > -1) { updateNumber = false; } - updateLineForChanges(cm, lineView, lineN, dims); - } - if (updateNumber) { - removeChildren(lineView.lineNumber); - lineView.lineNumber.appendChild(document.createTextNode(lineNumberFor(cm.options, lineN))); - } - cur = lineView.node.nextSibling; - } - lineN += lineView.size; - } - while (cur) { cur = rm(cur); } -} - -function updateGutterSpace(cm) { - var width = cm.display.gutters.offsetWidth; - cm.display.sizer.style.marginLeft = width + "px"; -} - -function setDocumentHeight(cm, measure) { - cm.display.sizer.style.minHeight = measure.docHeight + "px"; - cm.display.heightForcer.style.top = measure.docHeight + "px"; - cm.display.gutters.style.height = (measure.docHeight + cm.display.barHeight + scrollGap(cm)) + "px"; -} - -// Rebuild the gutter elements, ensure the margin to the left of the -// code matches their width. -function updateGutters(cm) { - var gutters = cm.display.gutters, specs = cm.options.gutters; - removeChildren(gutters); - var i = 0; - for (; i < specs.length; ++i) { - var gutterClass = specs[i]; - var gElt = gutters.appendChild(elt("div", null, "CodeMirror-gutter " + gutterClass)); - if (gutterClass == "CodeMirror-linenumbers") { - cm.display.lineGutter = gElt; - gElt.style.width = (cm.display.lineNumWidth || 1) + "px"; - } - } - gutters.style.display = i ? "" : "none"; - updateGutterSpace(cm); -} - -// Make sure the gutters options contains the element -// "CodeMirror-linenumbers" when the lineNumbers option is true. -function setGuttersForLineNumbers(options) { - var found = indexOf(options.gutters, "CodeMirror-linenumbers"); - if (found == -1 && options.lineNumbers) { - options.gutters = options.gutters.concat(["CodeMirror-linenumbers"]); - } else if (found > -1 && !options.lineNumbers) { - options.gutters = options.gutters.slice(0); - options.gutters.splice(found, 1); - } -} - -// Since the delta values reported on mouse wheel events are -// unstandardized between browsers and even browser versions, and -// generally horribly unpredictable, this code starts by measuring -// the scroll effect that the first few mouse wheel events have, -// and, from that, detects the way it can convert deltas to pixel -// offsets afterwards. -// -// The reason we want to know the amount a wheel event will scroll -// is that it gives us a chance to update the display before the -// actual scrolling happens, reducing flickering. - -var wheelSamples = 0; -var wheelPixelsPerUnit = null; -// Fill in a browser-detected starting value on browsers where we -// know one. These don't have to be accurate -- the result of them -// being wrong would just be a slight flicker on the first wheel -// scroll (if it is large enough). -if (ie) { wheelPixelsPerUnit = -.53; } -else if (gecko) { wheelPixelsPerUnit = 15; } -else if (chrome) { wheelPixelsPerUnit = -.7; } -else if (safari) { wheelPixelsPerUnit = -1/3; } - -function wheelEventDelta(e) { - var dx = e.wheelDeltaX, dy = e.wheelDeltaY; - if (dx == null && e.detail && e.axis == e.HORIZONTAL_AXIS) { dx = e.detail; } - if (dy == null && e.detail && e.axis == e.VERTICAL_AXIS) { dy = e.detail; } - else if (dy == null) { dy = e.wheelDelta; } - return {x: dx, y: dy} -} -function wheelEventPixels(e) { - var delta = wheelEventDelta(e); - delta.x *= wheelPixelsPerUnit; - delta.y *= wheelPixelsPerUnit; - return delta -} - -function onScrollWheel(cm, e) { - var delta = wheelEventDelta(e), dx = delta.x, dy = delta.y; - - var display = cm.display, scroll = display.scroller; - // Quit if there's nothing to scroll here - var canScrollX = scroll.scrollWidth > scroll.clientWidth; - var canScrollY = scroll.scrollHeight > scroll.clientHeight; - if (!(dx && canScrollX || dy && canScrollY)) { return } - - // Webkit browsers on OS X abort momentum scrolls when the target - // of the scroll event is removed from the scrollable element. - // This hack (see related code in patchDisplay) makes sure the - // element is kept around. - if (dy && mac && webkit) { - outer: for (var cur = e.target, view = display.view; cur != scroll; cur = cur.parentNode) { - for (var i = 0; i < view.length; i++) { - if (view[i].node == cur) { - cm.display.currentWheelTarget = cur; - break outer - } - } - } - } - - // On some browsers, horizontal scrolling will cause redraws to - // happen before the gutter has been realigned, causing it to - // wriggle around in a most unseemly way. When we have an - // estimated pixels/delta value, we just handle horizontal - // scrolling entirely here. It'll be slightly off from native, but - // better than glitching out. - if (dx && !gecko && !presto && wheelPixelsPerUnit != null) { - if (dy && canScrollY) - { updateScrollTop(cm, Math.max(0, scroll.scrollTop + dy * wheelPixelsPerUnit)); } - setScrollLeft(cm, Math.max(0, scroll.scrollLeft + dx * wheelPixelsPerUnit)); - // Only prevent default scrolling if vertical scrolling is - // actually possible. Otherwise, it causes vertical scroll - // jitter on OSX trackpads when deltaX is small and deltaY - // is large (issue #3579) - if (!dy || (dy && canScrollY)) - { e_preventDefault(e); } - display.wheelStartX = null; // Abort measurement, if in progress - return - } - - // 'Project' the visible viewport to cover the area that is being - // scrolled into view (if we know enough to estimate it). - if (dy && wheelPixelsPerUnit != null) { - var pixels = dy * wheelPixelsPerUnit; - var top = cm.doc.scrollTop, bot = top + display.wrapper.clientHeight; - if (pixels < 0) { top = Math.max(0, top + pixels - 50); } - else { bot = Math.min(cm.doc.height, bot + pixels + 50); } - updateDisplaySimple(cm, {top: top, bottom: bot}); - } - - if (wheelSamples < 20) { - if (display.wheelStartX == null) { - display.wheelStartX = scroll.scrollLeft; display.wheelStartY = scroll.scrollTop; - display.wheelDX = dx; display.wheelDY = dy; - setTimeout(function () { - if (display.wheelStartX == null) { return } - var movedX = scroll.scrollLeft - display.wheelStartX; - var movedY = scroll.scrollTop - display.wheelStartY; - var sample = (movedY && display.wheelDY && movedY / display.wheelDY) || - (movedX && display.wheelDX && movedX / display.wheelDX); - display.wheelStartX = display.wheelStartY = null; - if (!sample) { return } - wheelPixelsPerUnit = (wheelPixelsPerUnit * wheelSamples + sample) / (wheelSamples + 1); - ++wheelSamples; - }, 200); - } else { - display.wheelDX += dx; display.wheelDY += dy; - } - } -} - -// Selection objects are immutable. A new one is created every time -// the selection changes. A selection is one or more non-overlapping -// (and non-touching) ranges, sorted, and an integer that indicates -// which one is the primary selection (the one that's scrolled into -// view, that getCursor returns, etc). -var Selection = function(ranges, primIndex) { - this.ranges = ranges; - this.primIndex = primIndex; -}; - -Selection.prototype.primary = function () { return this.ranges[this.primIndex] }; - -Selection.prototype.equals = function (other) { - var this$1 = this; - - if (other == this) { return true } - if (other.primIndex != this.primIndex || other.ranges.length != this.ranges.length) { return false } - for (var i = 0; i < this.ranges.length; i++) { - var here = this$1.ranges[i], there = other.ranges[i]; - if (!equalCursorPos(here.anchor, there.anchor) || !equalCursorPos(here.head, there.head)) { return false } - } - return true -}; - -Selection.prototype.deepCopy = function () { - var this$1 = this; - - var out = []; - for (var i = 0; i < this.ranges.length; i++) - { out[i] = new Range(copyPos(this$1.ranges[i].anchor), copyPos(this$1.ranges[i].head)); } - return new Selection(out, this.primIndex) -}; - -Selection.prototype.somethingSelected = function () { - var this$1 = this; - - for (var i = 0; i < this.ranges.length; i++) - { if (!this$1.ranges[i].empty()) { return true } } - return false -}; - -Selection.prototype.contains = function (pos, end) { - var this$1 = this; - - if (!end) { end = pos; } - for (var i = 0; i < this.ranges.length; i++) { - var range = this$1.ranges[i]; - if (cmp(end, range.from()) >= 0 && cmp(pos, range.to()) <= 0) - { return i } - } - return -1 -}; - -var Range = function(anchor, head) { - this.anchor = anchor; this.head = head; -}; - -Range.prototype.from = function () { return minPos(this.anchor, this.head) }; -Range.prototype.to = function () { return maxPos(this.anchor, this.head) }; -Range.prototype.empty = function () { return this.head.line == this.anchor.line && this.head.ch == this.anchor.ch }; - -// Take an unsorted, potentially overlapping set of ranges, and -// build a selection out of it. 'Consumes' ranges array (modifying -// it). -function normalizeSelection(ranges, primIndex) { - var prim = ranges[primIndex]; - ranges.sort(function (a, b) { return cmp(a.from(), b.from()); }); - primIndex = indexOf(ranges, prim); - for (var i = 1; i < ranges.length; i++) { - var cur = ranges[i], prev = ranges[i - 1]; - if (cmp(prev.to(), cur.from()) >= 0) { - var from = minPos(prev.from(), cur.from()), to = maxPos(prev.to(), cur.to()); - var inv = prev.empty() ? cur.from() == cur.head : prev.from() == prev.head; - if (i <= primIndex) { --primIndex; } - ranges.splice(--i, 2, new Range(inv ? to : from, inv ? from : to)); - } - } - return new Selection(ranges, primIndex) -} - -function simpleSelection(anchor, head) { - return new Selection([new Range(anchor, head || anchor)], 0) -} - -// Compute the position of the end of a change (its 'to' property -// refers to the pre-change end). -function changeEnd(change) { - if (!change.text) { return change.to } - return Pos(change.from.line + change.text.length - 1, - lst(change.text).length + (change.text.length == 1 ? change.from.ch : 0)) -} - -// Adjust a position to refer to the post-change position of the -// same text, or the end of the change if the change covers it. -function adjustForChange(pos, change) { - if (cmp(pos, change.from) < 0) { return pos } - if (cmp(pos, change.to) <= 0) { return changeEnd(change) } - - var line = pos.line + change.text.length - (change.to.line - change.from.line) - 1, ch = pos.ch; - if (pos.line == change.to.line) { ch += changeEnd(change).ch - change.to.ch; } - return Pos(line, ch) -} - -function computeSelAfterChange(doc, change) { - var out = []; - for (var i = 0; i < doc.sel.ranges.length; i++) { - var range = doc.sel.ranges[i]; - out.push(new Range(adjustForChange(range.anchor, change), - adjustForChange(range.head, change))); - } - return normalizeSelection(out, doc.sel.primIndex) -} - -function offsetPos(pos, old, nw) { - if (pos.line == old.line) - { return Pos(nw.line, pos.ch - old.ch + nw.ch) } - else - { return Pos(nw.line + (pos.line - old.line), pos.ch) } -} - -// Used by replaceSelections to allow moving the selection to the -// start or around the replaced test. Hint may be "start" or "around". -function computeReplacedSel(doc, changes, hint) { - var out = []; - var oldPrev = Pos(doc.first, 0), newPrev = oldPrev; - for (var i = 0; i < changes.length; i++) { - var change = changes[i]; - var from = offsetPos(change.from, oldPrev, newPrev); - var to = offsetPos(changeEnd(change), oldPrev, newPrev); - oldPrev = change.to; - newPrev = to; - if (hint == "around") { - var range = doc.sel.ranges[i], inv = cmp(range.head, range.anchor) < 0; - out[i] = new Range(inv ? to : from, inv ? from : to); - } else { - out[i] = new Range(from, from); - } - } - return new Selection(out, doc.sel.primIndex) -} - -// Used to get the editor into a consistent state again when options change. - -function loadMode(cm) { - cm.doc.mode = getMode(cm.options, cm.doc.modeOption); - resetModeState(cm); -} - -function resetModeState(cm) { - cm.doc.iter(function (line) { - if (line.stateAfter) { line.stateAfter = null; } - if (line.styles) { line.styles = null; } - }); - cm.doc.modeFrontier = cm.doc.highlightFrontier = cm.doc.first; - startWorker(cm, 100); - cm.state.modeGen++; - if (cm.curOp) { regChange(cm); } -} - -// DOCUMENT DATA STRUCTURE - -// By default, updates that start and end at the beginning of a line -// are treated specially, in order to make the association of line -// widgets and marker elements with the text behave more intuitive. -function isWholeLineUpdate(doc, change) { - return change.from.ch == 0 && change.to.ch == 0 && lst(change.text) == "" && - (!doc.cm || doc.cm.options.wholeLineUpdateBefore) -} - -// Perform a change on the document data structure. -function updateDoc(doc, change, markedSpans, estimateHeight$$1) { - function spansFor(n) {return markedSpans ? markedSpans[n] : null} - function update(line, text, spans) { - updateLine(line, text, spans, estimateHeight$$1); - signalLater(line, "change", line, change); - } - function linesFor(start, end) { - var result = []; - for (var i = start; i < end; ++i) - { result.push(new Line(text[i], spansFor(i), estimateHeight$$1)); } - return result - } - - var from = change.from, to = change.to, text = change.text; - var firstLine = getLine(doc, from.line), lastLine = getLine(doc, to.line); - var lastText = lst(text), lastSpans = spansFor(text.length - 1), nlines = to.line - from.line; - - // Adjust the line structure - if (change.full) { - doc.insert(0, linesFor(0, text.length)); - doc.remove(text.length, doc.size - text.length); - } else if (isWholeLineUpdate(doc, change)) { - // This is a whole-line replace. Treated specially to make - // sure line objects move the way they are supposed to. - var added = linesFor(0, text.length - 1); - update(lastLine, lastLine.text, lastSpans); - if (nlines) { doc.remove(from.line, nlines); } - if (added.length) { doc.insert(from.line, added); } - } else if (firstLine == lastLine) { - if (text.length == 1) { - update(firstLine, firstLine.text.slice(0, from.ch) + lastText + firstLine.text.slice(to.ch), lastSpans); - } else { - var added$1 = linesFor(1, text.length - 1); - added$1.push(new Line(lastText + firstLine.text.slice(to.ch), lastSpans, estimateHeight$$1)); - update(firstLine, firstLine.text.slice(0, from.ch) + text[0], spansFor(0)); - doc.insert(from.line + 1, added$1); - } - } else if (text.length == 1) { - update(firstLine, firstLine.text.slice(0, from.ch) + text[0] + lastLine.text.slice(to.ch), spansFor(0)); - doc.remove(from.line + 1, nlines); - } else { - update(firstLine, firstLine.text.slice(0, from.ch) + text[0], spansFor(0)); - update(lastLine, lastText + lastLine.text.slice(to.ch), lastSpans); - var added$2 = linesFor(1, text.length - 1); - if (nlines > 1) { doc.remove(from.line + 1, nlines - 1); } - doc.insert(from.line + 1, added$2); - } - - signalLater(doc, "change", doc, change); -} - -// Call f for all linked documents. -function linkedDocs(doc, f, sharedHistOnly) { - function propagate(doc, skip, sharedHist) { - if (doc.linked) { for (var i = 0; i < doc.linked.length; ++i) { - var rel = doc.linked[i]; - if (rel.doc == skip) { continue } - var shared = sharedHist && rel.sharedHist; - if (sharedHistOnly && !shared) { continue } - f(rel.doc, shared); - propagate(rel.doc, doc, shared); - } } - } - propagate(doc, null, true); -} - -// Attach a document to an editor. -function attachDoc(cm, doc) { - if (doc.cm) { throw new Error("This document is already in use.") } - cm.doc = doc; - doc.cm = cm; - estimateLineHeights(cm); - loadMode(cm); - setDirectionClass(cm); - if (!cm.options.lineWrapping) { findMaxLine(cm); } - cm.options.mode = doc.modeOption; - regChange(cm); -} - -function setDirectionClass(cm) { - (cm.doc.direction == "rtl" ? addClass : rmClass)(cm.display.lineDiv, "CodeMirror-rtl"); -} - -function directionChanged(cm) { - runInOp(cm, function () { - setDirectionClass(cm); - regChange(cm); - }); -} - -function History(startGen) { - // Arrays of change events and selections. Doing something adds an - // event to done and clears undo. Undoing moves events from done - // to undone, redoing moves them in the other direction. - this.done = []; this.undone = []; - this.undoDepth = Infinity; - // Used to track when changes can be merged into a single undo - // event - this.lastModTime = this.lastSelTime = 0; - this.lastOp = this.lastSelOp = null; - this.lastOrigin = this.lastSelOrigin = null; - // Used by the isClean() method - this.generation = this.maxGeneration = startGen || 1; -} - -// Create a history change event from an updateDoc-style change -// object. -function historyChangeFromChange(doc, change) { - var histChange = {from: copyPos(change.from), to: changeEnd(change), text: getBetween(doc, change.from, change.to)}; - attachLocalSpans(doc, histChange, change.from.line, change.to.line + 1); - linkedDocs(doc, function (doc) { return attachLocalSpans(doc, histChange, change.from.line, change.to.line + 1); }, true); - return histChange -} - -// Pop all selection events off the end of a history array. Stop at -// a change event. -function clearSelectionEvents(array) { - while (array.length) { - var last = lst(array); - if (last.ranges) { array.pop(); } - else { break } - } -} - -// Find the top change event in the history. Pop off selection -// events that are in the way. -function lastChangeEvent(hist, force) { - if (force) { - clearSelectionEvents(hist.done); - return lst(hist.done) - } else if (hist.done.length && !lst(hist.done).ranges) { - return lst(hist.done) - } else if (hist.done.length > 1 && !hist.done[hist.done.length - 2].ranges) { - hist.done.pop(); - return lst(hist.done) - } -} - -// Register a change in the history. Merges changes that are within -// a single operation, or are close together with an origin that -// allows merging (starting with "+") into a single event. -function addChangeToHistory(doc, change, selAfter, opId) { - var hist = doc.history; - hist.undone.length = 0; - var time = +new Date, cur; - var last; - - if ((hist.lastOp == opId || - hist.lastOrigin == change.origin && change.origin && - ((change.origin.charAt(0) == "+" && doc.cm && hist.lastModTime > time - doc.cm.options.historyEventDelay) || - change.origin.charAt(0) == "*")) && - (cur = lastChangeEvent(hist, hist.lastOp == opId))) { - // Merge this change into the last event - last = lst(cur.changes); - if (cmp(change.from, change.to) == 0 && cmp(change.from, last.to) == 0) { - // Optimized case for simple insertion -- don't want to add - // new changesets for every character typed - last.to = changeEnd(change); - } else { - // Add new sub-event - cur.changes.push(historyChangeFromChange(doc, change)); - } - } else { - // Can not be merged, start a new event. - var before = lst(hist.done); - if (!before || !before.ranges) - { pushSelectionToHistory(doc.sel, hist.done); } - cur = {changes: [historyChangeFromChange(doc, change)], - generation: hist.generation}; - hist.done.push(cur); - while (hist.done.length > hist.undoDepth) { - hist.done.shift(); - if (!hist.done[0].ranges) { hist.done.shift(); } - } - } - hist.done.push(selAfter); - hist.generation = ++hist.maxGeneration; - hist.lastModTime = hist.lastSelTime = time; - hist.lastOp = hist.lastSelOp = opId; - hist.lastOrigin = hist.lastSelOrigin = change.origin; - - if (!last) { signal(doc, "historyAdded"); } -} - -function selectionEventCanBeMerged(doc, origin, prev, sel) { - var ch = origin.charAt(0); - return ch == "*" || - ch == "+" && - prev.ranges.length == sel.ranges.length && - prev.somethingSelected() == sel.somethingSelected() && - new Date - doc.history.lastSelTime <= (doc.cm ? doc.cm.options.historyEventDelay : 500) -} - -// Called whenever the selection changes, sets the new selection as -// the pending selection in the history, and pushes the old pending -// selection into the 'done' array when it was significantly -// different (in number of selected ranges, emptiness, or time). -function addSelectionToHistory(doc, sel, opId, options) { - var hist = doc.history, origin = options && options.origin; - - // A new event is started when the previous origin does not match - // the current, or the origins don't allow matching. Origins - // starting with * are always merged, those starting with + are - // merged when similar and close together in time. - if (opId == hist.lastSelOp || - (origin && hist.lastSelOrigin == origin && - (hist.lastModTime == hist.lastSelTime && hist.lastOrigin == origin || - selectionEventCanBeMerged(doc, origin, lst(hist.done), sel)))) - { hist.done[hist.done.length - 1] = sel; } - else - { pushSelectionToHistory(sel, hist.done); } - - hist.lastSelTime = +new Date; - hist.lastSelOrigin = origin; - hist.lastSelOp = opId; - if (options && options.clearRedo !== false) - { clearSelectionEvents(hist.undone); } -} - -function pushSelectionToHistory(sel, dest) { - var top = lst(dest); - if (!(top && top.ranges && top.equals(sel))) - { dest.push(sel); } -} - -// Used to store marked span information in the history. -function attachLocalSpans(doc, change, from, to) { - var existing = change["spans_" + doc.id], n = 0; - doc.iter(Math.max(doc.first, from), Math.min(doc.first + doc.size, to), function (line) { - if (line.markedSpans) - { (existing || (existing = change["spans_" + doc.id] = {}))[n] = line.markedSpans; } - ++n; - }); -} - -// When un/re-doing restores text containing marked spans, those -// that have been explicitly cleared should not be restored. -function removeClearedSpans(spans) { - if (!spans) { return null } - var out; - for (var i = 0; i < spans.length; ++i) { - if (spans[i].marker.explicitlyCleared) { if (!out) { out = spans.slice(0, i); } } - else if (out) { out.push(spans[i]); } - } - return !out ? spans : out.length ? out : null -} - -// Retrieve and filter the old marked spans stored in a change event. -function getOldSpans(doc, change) { - var found = change["spans_" + doc.id]; - if (!found) { return null } - var nw = []; - for (var i = 0; i < change.text.length; ++i) - { nw.push(removeClearedSpans(found[i])); } - return nw -} - -// Used for un/re-doing changes from the history. Combines the -// result of computing the existing spans with the set of spans that -// existed in the history (so that deleting around a span and then -// undoing brings back the span). -function mergeOldSpans(doc, change) { - var old = getOldSpans(doc, change); - var stretched = stretchSpansOverChange(doc, change); - if (!old) { return stretched } - if (!stretched) { return old } - - for (var i = 0; i < old.length; ++i) { - var oldCur = old[i], stretchCur = stretched[i]; - if (oldCur && stretchCur) { - spans: for (var j = 0; j < stretchCur.length; ++j) { - var span = stretchCur[j]; - for (var k = 0; k < oldCur.length; ++k) - { if (oldCur[k].marker == span.marker) { continue spans } } - oldCur.push(span); - } - } else if (stretchCur) { - old[i] = stretchCur; - } - } - return old -} - -// Used both to provide a JSON-safe object in .getHistory, and, when -// detaching a document, to split the history in two -function copyHistoryArray(events, newGroup, instantiateSel) { - var copy = []; - for (var i = 0; i < events.length; ++i) { - var event = events[i]; - if (event.ranges) { - copy.push(instantiateSel ? Selection.prototype.deepCopy.call(event) : event); - continue - } - var changes = event.changes, newChanges = []; - copy.push({changes: newChanges}); - for (var j = 0; j < changes.length; ++j) { - var change = changes[j], m = (void 0); - newChanges.push({from: change.from, to: change.to, text: change.text}); - if (newGroup) { for (var prop in change) { if (m = prop.match(/^spans_(\d+)$/)) { - if (indexOf(newGroup, Number(m[1])) > -1) { - lst(newChanges)[prop] = change[prop]; - delete change[prop]; - } - } } } - } - } - return copy -} - -// The 'scroll' parameter given to many of these indicated whether -// the new cursor position should be scrolled into view after -// modifying the selection. - -// If shift is held or the extend flag is set, extends a range to -// include a given position (and optionally a second position). -// Otherwise, simply returns the range between the given positions. -// Used for cursor motion and such. -function extendRange(range, head, other, extend) { - if (extend) { - var anchor = range.anchor; - if (other) { - var posBefore = cmp(head, anchor) < 0; - if (posBefore != (cmp(other, anchor) < 0)) { - anchor = head; - head = other; - } else if (posBefore != (cmp(head, other) < 0)) { - head = other; - } - } - return new Range(anchor, head) - } else { - return new Range(other || head, head) - } -} - -// Extend the primary selection range, discard the rest. -function extendSelection(doc, head, other, options, extend) { - if (extend == null) { extend = doc.cm && (doc.cm.display.shift || doc.extend); } - setSelection(doc, new Selection([extendRange(doc.sel.primary(), head, other, extend)], 0), options); -} - -// Extend all selections (pos is an array of selections with length -// equal the number of selections) -function extendSelections(doc, heads, options) { - var out = []; - var extend = doc.cm && (doc.cm.display.shift || doc.extend); - for (var i = 0; i < doc.sel.ranges.length; i++) - { out[i] = extendRange(doc.sel.ranges[i], heads[i], null, extend); } - var newSel = normalizeSelection(out, doc.sel.primIndex); - setSelection(doc, newSel, options); -} - -// Updates a single range in the selection. -function replaceOneSelection(doc, i, range, options) { - var ranges = doc.sel.ranges.slice(0); - ranges[i] = range; - setSelection(doc, normalizeSelection(ranges, doc.sel.primIndex), options); -} - -// Reset the selection to a single range. -function setSimpleSelection(doc, anchor, head, options) { - setSelection(doc, simpleSelection(anchor, head), options); -} - -// Give beforeSelectionChange handlers a change to influence a -// selection update. -function filterSelectionChange(doc, sel, options) { - var obj = { - ranges: sel.ranges, - update: function(ranges) { - var this$1 = this; - - this.ranges = []; - for (var i = 0; i < ranges.length; i++) - { this$1.ranges[i] = new Range(clipPos(doc, ranges[i].anchor), - clipPos(doc, ranges[i].head)); } - }, - origin: options && options.origin - }; - signal(doc, "beforeSelectionChange", doc, obj); - if (doc.cm) { signal(doc.cm, "beforeSelectionChange", doc.cm, obj); } - if (obj.ranges != sel.ranges) { return normalizeSelection(obj.ranges, obj.ranges.length - 1) } - else { return sel } -} - -function setSelectionReplaceHistory(doc, sel, options) { - var done = doc.history.done, last = lst(done); - if (last && last.ranges) { - done[done.length - 1] = sel; - setSelectionNoUndo(doc, sel, options); - } else { - setSelection(doc, sel, options); - } -} - -// Set a new selection. -function setSelection(doc, sel, options) { - setSelectionNoUndo(doc, sel, options); - addSelectionToHistory(doc, doc.sel, doc.cm ? doc.cm.curOp.id : NaN, options); -} - -function setSelectionNoUndo(doc, sel, options) { - if (hasHandler(doc, "beforeSelectionChange") || doc.cm && hasHandler(doc.cm, "beforeSelectionChange")) - { sel = filterSelectionChange(doc, sel, options); } - - var bias = options && options.bias || - (cmp(sel.primary().head, doc.sel.primary().head) < 0 ? -1 : 1); - setSelectionInner(doc, skipAtomicInSelection(doc, sel, bias, true)); - - if (!(options && options.scroll === false) && doc.cm) - { ensureCursorVisible(doc.cm); } -} - -function setSelectionInner(doc, sel) { - if (sel.equals(doc.sel)) { return } - - doc.sel = sel; - - if (doc.cm) { - doc.cm.curOp.updateInput = doc.cm.curOp.selectionChanged = true; - signalCursorActivity(doc.cm); - } - signalLater(doc, "cursorActivity", doc); -} - -// Verify that the selection does not partially select any atomic -// marked ranges. -function reCheckSelection(doc) { - setSelectionInner(doc, skipAtomicInSelection(doc, doc.sel, null, false)); -} - -// Return a selection that does not partially select any atomic -// ranges. -function skipAtomicInSelection(doc, sel, bias, mayClear) { - var out; - for (var i = 0; i < sel.ranges.length; i++) { - var range = sel.ranges[i]; - var old = sel.ranges.length == doc.sel.ranges.length && doc.sel.ranges[i]; - var newAnchor = skipAtomic(doc, range.anchor, old && old.anchor, bias, mayClear); - var newHead = skipAtomic(doc, range.head, old && old.head, bias, mayClear); - if (out || newAnchor != range.anchor || newHead != range.head) { - if (!out) { out = sel.ranges.slice(0, i); } - out[i] = new Range(newAnchor, newHead); - } - } - return out ? normalizeSelection(out, sel.primIndex) : sel -} - -function skipAtomicInner(doc, pos, oldPos, dir, mayClear) { - var line = getLine(doc, pos.line); - if (line.markedSpans) { for (var i = 0; i < line.markedSpans.length; ++i) { - var sp = line.markedSpans[i], m = sp.marker; - if ((sp.from == null || (m.inclusiveLeft ? sp.from <= pos.ch : sp.from < pos.ch)) && - (sp.to == null || (m.inclusiveRight ? sp.to >= pos.ch : sp.to > pos.ch))) { - if (mayClear) { - signal(m, "beforeCursorEnter"); - if (m.explicitlyCleared) { - if (!line.markedSpans) { break } - else {--i; continue} - } - } - if (!m.atomic) { continue } - - if (oldPos) { - var near = m.find(dir < 0 ? 1 : -1), diff = (void 0); - if (dir < 0 ? m.inclusiveRight : m.inclusiveLeft) - { near = movePos(doc, near, -dir, near && near.line == pos.line ? line : null); } - if (near && near.line == pos.line && (diff = cmp(near, oldPos)) && (dir < 0 ? diff < 0 : diff > 0)) - { return skipAtomicInner(doc, near, pos, dir, mayClear) } - } - - var far = m.find(dir < 0 ? -1 : 1); - if (dir < 0 ? m.inclusiveLeft : m.inclusiveRight) - { far = movePos(doc, far, dir, far.line == pos.line ? line : null); } - return far ? skipAtomicInner(doc, far, pos, dir, mayClear) : null - } - } } - return pos -} - -// Ensure a given position is not inside an atomic range. -function skipAtomic(doc, pos, oldPos, bias, mayClear) { - var dir = bias || 1; - var found = skipAtomicInner(doc, pos, oldPos, dir, mayClear) || - (!mayClear && skipAtomicInner(doc, pos, oldPos, dir, true)) || - skipAtomicInner(doc, pos, oldPos, -dir, mayClear) || - (!mayClear && skipAtomicInner(doc, pos, oldPos, -dir, true)); - if (!found) { - doc.cantEdit = true; - return Pos(doc.first, 0) - } - return found -} - -function movePos(doc, pos, dir, line) { - if (dir < 0 && pos.ch == 0) { - if (pos.line > doc.first) { return clipPos(doc, Pos(pos.line - 1)) } - else { return null } - } else if (dir > 0 && pos.ch == (line || getLine(doc, pos.line)).text.length) { - if (pos.line < doc.first + doc.size - 1) { return Pos(pos.line + 1, 0) } - else { return null } - } else { - return new Pos(pos.line, pos.ch + dir) - } -} - -function selectAll(cm) { - cm.setSelection(Pos(cm.firstLine(), 0), Pos(cm.lastLine()), sel_dontScroll); -} - -// UPDATING - -// Allow "beforeChange" event handlers to influence a change -function filterChange(doc, change, update) { - var obj = { - canceled: false, - from: change.from, - to: change.to, - text: change.text, - origin: change.origin, - cancel: function () { return obj.canceled = true; } - }; - if (update) { obj.update = function (from, to, text, origin) { - if (from) { obj.from = clipPos(doc, from); } - if (to) { obj.to = clipPos(doc, to); } - if (text) { obj.text = text; } - if (origin !== undefined) { obj.origin = origin; } - }; } - signal(doc, "beforeChange", doc, obj); - if (doc.cm) { signal(doc.cm, "beforeChange", doc.cm, obj); } - - if (obj.canceled) { return null } - return {from: obj.from, to: obj.to, text: obj.text, origin: obj.origin} -} - -// Apply a change to a document, and add it to the document's -// history, and propagating it to all linked documents. -function makeChange(doc, change, ignoreReadOnly) { - if (doc.cm) { - if (!doc.cm.curOp) { return operation(doc.cm, makeChange)(doc, change, ignoreReadOnly) } - if (doc.cm.state.suppressEdits) { return } - } - - if (hasHandler(doc, "beforeChange") || doc.cm && hasHandler(doc.cm, "beforeChange")) { - change = filterChange(doc, change, true); - if (!change) { return } - } - - // Possibly split or suppress the update based on the presence - // of read-only spans in its range. - var split = sawReadOnlySpans && !ignoreReadOnly && removeReadOnlyRanges(doc, change.from, change.to); - if (split) { - for (var i = split.length - 1; i >= 0; --i) - { makeChangeInner(doc, {from: split[i].from, to: split[i].to, text: i ? [""] : change.text, origin: change.origin}); } - } else { - makeChangeInner(doc, change); - } -} - -function makeChangeInner(doc, change) { - if (change.text.length == 1 && change.text[0] == "" && cmp(change.from, change.to) == 0) { return } - var selAfter = computeSelAfterChange(doc, change); - addChangeToHistory(doc, change, selAfter, doc.cm ? doc.cm.curOp.id : NaN); - - makeChangeSingleDoc(doc, change, selAfter, stretchSpansOverChange(doc, change)); - var rebased = []; - - linkedDocs(doc, function (doc, sharedHist) { - if (!sharedHist && indexOf(rebased, doc.history) == -1) { - rebaseHist(doc.history, change); - rebased.push(doc.history); - } - makeChangeSingleDoc(doc, change, null, stretchSpansOverChange(doc, change)); - }); -} - -// Revert a change stored in a document's history. -function makeChangeFromHistory(doc, type, allowSelectionOnly) { - if (doc.cm && doc.cm.state.suppressEdits && !allowSelectionOnly) { return } - - var hist = doc.history, event, selAfter = doc.sel; - var source = type == "undo" ? hist.done : hist.undone, dest = type == "undo" ? hist.undone : hist.done; - - // Verify that there is a useable event (so that ctrl-z won't - // needlessly clear selection events) - var i = 0; - for (; i < source.length; i++) { - event = source[i]; - if (allowSelectionOnly ? event.ranges && !event.equals(doc.sel) : !event.ranges) - { break } - } - if (i == source.length) { return } - hist.lastOrigin = hist.lastSelOrigin = null; - - for (;;) { - event = source.pop(); - if (event.ranges) { - pushSelectionToHistory(event, dest); - if (allowSelectionOnly && !event.equals(doc.sel)) { - setSelection(doc, event, {clearRedo: false}); - return - } - selAfter = event; - } - else { break } - } - - // Build up a reverse change object to add to the opposite history - // stack (redo when undoing, and vice versa). - var antiChanges = []; - pushSelectionToHistory(selAfter, dest); - dest.push({changes: antiChanges, generation: hist.generation}); - hist.generation = event.generation || ++hist.maxGeneration; - - var filter = hasHandler(doc, "beforeChange") || doc.cm && hasHandler(doc.cm, "beforeChange"); - - var loop = function ( i ) { - var change = event.changes[i]; - change.origin = type; - if (filter && !filterChange(doc, change, false)) { - source.length = 0; - return {} - } - - antiChanges.push(historyChangeFromChange(doc, change)); - - var after = i ? computeSelAfterChange(doc, change) : lst(source); - makeChangeSingleDoc(doc, change, after, mergeOldSpans(doc, change)); - if (!i && doc.cm) { doc.cm.scrollIntoView({from: change.from, to: changeEnd(change)}); } - var rebased = []; - - // Propagate to the linked documents - linkedDocs(doc, function (doc, sharedHist) { - if (!sharedHist && indexOf(rebased, doc.history) == -1) { - rebaseHist(doc.history, change); - rebased.push(doc.history); - } - makeChangeSingleDoc(doc, change, null, mergeOldSpans(doc, change)); - }); - }; - - for (var i$1 = event.changes.length - 1; i$1 >= 0; --i$1) { - var returned = loop( i$1 ); - - if ( returned ) return returned.v; - } -} - -// Sub-views need their line numbers shifted when text is added -// above or below them in the parent document. -function shiftDoc(doc, distance) { - if (distance == 0) { return } - doc.first += distance; - doc.sel = new Selection(map(doc.sel.ranges, function (range) { return new Range( - Pos(range.anchor.line + distance, range.anchor.ch), - Pos(range.head.line + distance, range.head.ch) - ); }), doc.sel.primIndex); - if (doc.cm) { - regChange(doc.cm, doc.first, doc.first - distance, distance); - for (var d = doc.cm.display, l = d.viewFrom; l < d.viewTo; l++) - { regLineChange(doc.cm, l, "gutter"); } - } -} - -// More lower-level change function, handling only a single document -// (not linked ones). -function makeChangeSingleDoc(doc, change, selAfter, spans) { - if (doc.cm && !doc.cm.curOp) - { return operation(doc.cm, makeChangeSingleDoc)(doc, change, selAfter, spans) } - - if (change.to.line < doc.first) { - shiftDoc(doc, change.text.length - 1 - (change.to.line - change.from.line)); - return - } - if (change.from.line > doc.lastLine()) { return } - - // Clip the change to the size of this doc - if (change.from.line < doc.first) { - var shift = change.text.length - 1 - (doc.first - change.from.line); - shiftDoc(doc, shift); - change = {from: Pos(doc.first, 0), to: Pos(change.to.line + shift, change.to.ch), - text: [lst(change.text)], origin: change.origin}; - } - var last = doc.lastLine(); - if (change.to.line > last) { - change = {from: change.from, to: Pos(last, getLine(doc, last).text.length), - text: [change.text[0]], origin: change.origin}; - } - - change.removed = getBetween(doc, change.from, change.to); - - if (!selAfter) { selAfter = computeSelAfterChange(doc, change); } - if (doc.cm) { makeChangeSingleDocInEditor(doc.cm, change, spans); } - else { updateDoc(doc, change, spans); } - setSelectionNoUndo(doc, selAfter, sel_dontScroll); -} - -// Handle the interaction of a change to a document with the editor -// that this document is part of. -function makeChangeSingleDocInEditor(cm, change, spans) { - var doc = cm.doc, display = cm.display, from = change.from, to = change.to; - - var recomputeMaxLength = false, checkWidthStart = from.line; - if (!cm.options.lineWrapping) { - checkWidthStart = lineNo(visualLine(getLine(doc, from.line))); - doc.iter(checkWidthStart, to.line + 1, function (line) { - if (line == display.maxLine) { - recomputeMaxLength = true; - return true - } - }); - } - - if (doc.sel.contains(change.from, change.to) > -1) - { signalCursorActivity(cm); } - - updateDoc(doc, change, spans, estimateHeight(cm)); - - if (!cm.options.lineWrapping) { - doc.iter(checkWidthStart, from.line + change.text.length, function (line) { - var len = lineLength(line); - if (len > display.maxLineLength) { - display.maxLine = line; - display.maxLineLength = len; - display.maxLineChanged = true; - recomputeMaxLength = false; - } - }); - if (recomputeMaxLength) { cm.curOp.updateMaxLine = true; } - } - - retreatFrontier(doc, from.line); - startWorker(cm, 400); - - var lendiff = change.text.length - (to.line - from.line) - 1; - // Remember that these lines changed, for updating the display - if (change.full) - { regChange(cm); } - else if (from.line == to.line && change.text.length == 1 && !isWholeLineUpdate(cm.doc, change)) - { regLineChange(cm, from.line, "text"); } - else - { regChange(cm, from.line, to.line + 1, lendiff); } - - var changesHandler = hasHandler(cm, "changes"), changeHandler = hasHandler(cm, "change"); - if (changeHandler || changesHandler) { - var obj = { - from: from, to: to, - text: change.text, - removed: change.removed, - origin: change.origin - }; - if (changeHandler) { signalLater(cm, "change", cm, obj); } - if (changesHandler) { (cm.curOp.changeObjs || (cm.curOp.changeObjs = [])).push(obj); } - } - cm.display.selForContextMenu = null; -} - -function replaceRange(doc, code, from, to, origin) { - if (!to) { to = from; } - if (cmp(to, from) < 0) { var assign; - (assign = [to, from], from = assign[0], to = assign[1], assign); } - if (typeof code == "string") { code = doc.splitLines(code); } - makeChange(doc, {from: from, to: to, text: code, origin: origin}); -} - -// Rebasing/resetting history to deal with externally-sourced changes - -function rebaseHistSelSingle(pos, from, to, diff) { - if (to < pos.line) { - pos.line += diff; - } else if (from < pos.line) { - pos.line = from; - pos.ch = 0; - } -} - -// Tries to rebase an array of history events given a change in the -// document. If the change touches the same lines as the event, the -// event, and everything 'behind' it, is discarded. If the change is -// before the event, the event's positions are updated. Uses a -// copy-on-write scheme for the positions, to avoid having to -// reallocate them all on every rebase, but also avoid problems with -// shared position objects being unsafely updated. -function rebaseHistArray(array, from, to, diff) { - for (var i = 0; i < array.length; ++i) { - var sub = array[i], ok = true; - if (sub.ranges) { - if (!sub.copied) { sub = array[i] = sub.deepCopy(); sub.copied = true; } - for (var j = 0; j < sub.ranges.length; j++) { - rebaseHistSelSingle(sub.ranges[j].anchor, from, to, diff); - rebaseHistSelSingle(sub.ranges[j].head, from, to, diff); - } - continue - } - for (var j$1 = 0; j$1 < sub.changes.length; ++j$1) { - var cur = sub.changes[j$1]; - if (to < cur.from.line) { - cur.from = Pos(cur.from.line + diff, cur.from.ch); - cur.to = Pos(cur.to.line + diff, cur.to.ch); - } else if (from <= cur.to.line) { - ok = false; - break - } - } - if (!ok) { - array.splice(0, i + 1); - i = 0; - } - } -} - -function rebaseHist(hist, change) { - var from = change.from.line, to = change.to.line, diff = change.text.length - (to - from) - 1; - rebaseHistArray(hist.done, from, to, diff); - rebaseHistArray(hist.undone, from, to, diff); -} - -// Utility for applying a change to a line by handle or number, -// returning the number and optionally registering the line as -// changed. -function changeLine(doc, handle, changeType, op) { - var no = handle, line = handle; - if (typeof handle == "number") { line = getLine(doc, clipLine(doc, handle)); } - else { no = lineNo(handle); } - if (no == null) { return null } - if (op(line, no) && doc.cm) { regLineChange(doc.cm, no, changeType); } - return line -} - -// The document is represented as a BTree consisting of leaves, with -// chunk of lines in them, and branches, with up to ten leaves or -// other branch nodes below them. The top node is always a branch -// node, and is the document object itself (meaning it has -// additional methods and properties). -// -// All nodes have parent links. The tree is used both to go from -// line numbers to line objects, and to go from objects to numbers. -// It also indexes by height, and is used to convert between height -// and line object, and to find the total height of the document. -// -// See also http://marijnhaverbeke.nl/blog/codemirror-line-tree.html - -function LeafChunk(lines) { - var this$1 = this; - - this.lines = lines; - this.parent = null; - var height = 0; - for (var i = 0; i < lines.length; ++i) { - lines[i].parent = this$1; - height += lines[i].height; - } - this.height = height; -} - -LeafChunk.prototype = { - chunkSize: function chunkSize() { return this.lines.length }, - - // Remove the n lines at offset 'at'. - removeInner: function removeInner(at, n) { - var this$1 = this; - - for (var i = at, e = at + n; i < e; ++i) { - var line = this$1.lines[i]; - this$1.height -= line.height; - cleanUpLine(line); - signalLater(line, "delete"); - } - this.lines.splice(at, n); - }, - - // Helper used to collapse a small branch into a single leaf. - collapse: function collapse(lines) { - lines.push.apply(lines, this.lines); - }, - - // Insert the given array of lines at offset 'at', count them as - // having the given height. - insertInner: function insertInner(at, lines, height) { - var this$1 = this; - - this.height += height; - this.lines = this.lines.slice(0, at).concat(lines).concat(this.lines.slice(at)); - for (var i = 0; i < lines.length; ++i) { lines[i].parent = this$1; } - }, - - // Used to iterate over a part of the tree. - iterN: function iterN(at, n, op) { - var this$1 = this; - - for (var e = at + n; at < e; ++at) - { if (op(this$1.lines[at])) { return true } } - } -}; - -function BranchChunk(children) { - var this$1 = this; - - this.children = children; - var size = 0, height = 0; - for (var i = 0; i < children.length; ++i) { - var ch = children[i]; - size += ch.chunkSize(); height += ch.height; - ch.parent = this$1; - } - this.size = size; - this.height = height; - this.parent = null; -} - -BranchChunk.prototype = { - chunkSize: function chunkSize() { return this.size }, - - removeInner: function removeInner(at, n) { - var this$1 = this; - - this.size -= n; - for (var i = 0; i < this.children.length; ++i) { - var child = this$1.children[i], sz = child.chunkSize(); - if (at < sz) { - var rm = Math.min(n, sz - at), oldHeight = child.height; - child.removeInner(at, rm); - this$1.height -= oldHeight - child.height; - if (sz == rm) { this$1.children.splice(i--, 1); child.parent = null; } - if ((n -= rm) == 0) { break } - at = 0; - } else { at -= sz; } - } - // If the result is smaller than 25 lines, ensure that it is a - // single leaf node. - if (this.size - n < 25 && - (this.children.length > 1 || !(this.children[0] instanceof LeafChunk))) { - var lines = []; - this.collapse(lines); - this.children = [new LeafChunk(lines)]; - this.children[0].parent = this; - } - }, - - collapse: function collapse(lines) { - var this$1 = this; - - for (var i = 0; i < this.children.length; ++i) { this$1.children[i].collapse(lines); } - }, - - insertInner: function insertInner(at, lines, height) { - var this$1 = this; - - this.size += lines.length; - this.height += height; - for (var i = 0; i < this.children.length; ++i) { - var child = this$1.children[i], sz = child.chunkSize(); - if (at <= sz) { - child.insertInner(at, lines, height); - if (child.lines && child.lines.length > 50) { - // To avoid memory thrashing when child.lines is huge (e.g. first view of a large file), it's never spliced. - // Instead, small slices are taken. They're taken in order because sequential memory accesses are fastest. - var remaining = child.lines.length % 25 + 25; - for (var pos = remaining; pos < child.lines.length;) { - var leaf = new LeafChunk(child.lines.slice(pos, pos += 25)); - child.height -= leaf.height; - this$1.children.splice(++i, 0, leaf); - leaf.parent = this$1; - } - child.lines = child.lines.slice(0, remaining); - this$1.maybeSpill(); - } - break - } - at -= sz; - } - }, - - // When a node has grown, check whether it should be split. - maybeSpill: function maybeSpill() { - if (this.children.length <= 10) { return } - var me = this; - do { - var spilled = me.children.splice(me.children.length - 5, 5); - var sibling = new BranchChunk(spilled); - if (!me.parent) { // Become the parent node - var copy = new BranchChunk(me.children); - copy.parent = me; - me.children = [copy, sibling]; - me = copy; - } else { - me.size -= sibling.size; - me.height -= sibling.height; - var myIndex = indexOf(me.parent.children, me); - me.parent.children.splice(myIndex + 1, 0, sibling); - } - sibling.parent = me.parent; - } while (me.children.length > 10) - me.parent.maybeSpill(); - }, - - iterN: function iterN(at, n, op) { - var this$1 = this; - - for (var i = 0; i < this.children.length; ++i) { - var child = this$1.children[i], sz = child.chunkSize(); - if (at < sz) { - var used = Math.min(n, sz - at); - if (child.iterN(at, used, op)) { return true } - if ((n -= used) == 0) { break } - at = 0; - } else { at -= sz; } - } - } -}; - -// Line widgets are block elements displayed above or below a line. - -var LineWidget = function(doc, node, options) { - var this$1 = this; - - if (options) { for (var opt in options) { if (options.hasOwnProperty(opt)) - { this$1[opt] = options[opt]; } } } - this.doc = doc; - this.node = node; -}; - -LineWidget.prototype.clear = function () { - var this$1 = this; - - var cm = this.doc.cm, ws = this.line.widgets, line = this.line, no = lineNo(line); - if (no == null || !ws) { return } - for (var i = 0; i < ws.length; ++i) { if (ws[i] == this$1) { ws.splice(i--, 1); } } - if (!ws.length) { line.widgets = null; } - var height = widgetHeight(this); - updateLineHeight(line, Math.max(0, line.height - height)); - if (cm) { - runInOp(cm, function () { - adjustScrollWhenAboveVisible(cm, line, -height); - regLineChange(cm, no, "widget"); - }); - signalLater(cm, "lineWidgetCleared", cm, this, no); - } -}; - -LineWidget.prototype.changed = function () { - var this$1 = this; - - var oldH = this.height, cm = this.doc.cm, line = this.line; - this.height = null; - var diff = widgetHeight(this) - oldH; - if (!diff) { return } - updateLineHeight(line, line.height + diff); - if (cm) { - runInOp(cm, function () { - cm.curOp.forceUpdate = true; - adjustScrollWhenAboveVisible(cm, line, diff); - signalLater(cm, "lineWidgetChanged", cm, this$1, lineNo(line)); - }); - } -}; -eventMixin(LineWidget); - -function adjustScrollWhenAboveVisible(cm, line, diff) { - if (heightAtLine(line) < ((cm.curOp && cm.curOp.scrollTop) || cm.doc.scrollTop)) - { addToScrollTop(cm, diff); } -} - -function addLineWidget(doc, handle, node, options) { - var widget = new LineWidget(doc, node, options); - var cm = doc.cm; - if (cm && widget.noHScroll) { cm.display.alignWidgets = true; } - changeLine(doc, handle, "widget", function (line) { - var widgets = line.widgets || (line.widgets = []); - if (widget.insertAt == null) { widgets.push(widget); } - else { widgets.splice(Math.min(widgets.length - 1, Math.max(0, widget.insertAt)), 0, widget); } - widget.line = line; - if (cm && !lineIsHidden(doc, line)) { - var aboveVisible = heightAtLine(line) < doc.scrollTop; - updateLineHeight(line, line.height + widgetHeight(widget)); - if (aboveVisible) { addToScrollTop(cm, widget.height); } - cm.curOp.forceUpdate = true; - } - return true - }); - signalLater(cm, "lineWidgetAdded", cm, widget, typeof handle == "number" ? handle : lineNo(handle)); - return widget -} - -// TEXTMARKERS - -// Created with markText and setBookmark methods. A TextMarker is a -// handle that can be used to clear or find a marked position in the -// document. Line objects hold arrays (markedSpans) containing -// {from, to, marker} object pointing to such marker objects, and -// indicating that such a marker is present on that line. Multiple -// lines may point to the same marker when it spans across lines. -// The spans will have null for their from/to properties when the -// marker continues beyond the start/end of the line. Markers have -// links back to the lines they currently touch. - -// Collapsed markers have unique ids, in order to be able to order -// them, which is needed for uniquely determining an outer marker -// when they overlap (they may nest, but not partially overlap). -var nextMarkerId = 0; - -var TextMarker = function(doc, type) { - this.lines = []; - this.type = type; - this.doc = doc; - this.id = ++nextMarkerId; -}; - -// Clear the marker. -TextMarker.prototype.clear = function () { - var this$1 = this; - - if (this.explicitlyCleared) { return } - var cm = this.doc.cm, withOp = cm && !cm.curOp; - if (withOp) { startOperation(cm); } - if (hasHandler(this, "clear")) { - var found = this.find(); - if (found) { signalLater(this, "clear", found.from, found.to); } - } - var min = null, max = null; - for (var i = 0; i < this.lines.length; ++i) { - var line = this$1.lines[i]; - var span = getMarkedSpanFor(line.markedSpans, this$1); - if (cm && !this$1.collapsed) { regLineChange(cm, lineNo(line), "text"); } - else if (cm) { - if (span.to != null) { max = lineNo(line); } - if (span.from != null) { min = lineNo(line); } - } - line.markedSpans = removeMarkedSpan(line.markedSpans, span); - if (span.from == null && this$1.collapsed && !lineIsHidden(this$1.doc, line) && cm) - { updateLineHeight(line, textHeight(cm.display)); } - } - if (cm && this.collapsed && !cm.options.lineWrapping) { for (var i$1 = 0; i$1 < this.lines.length; ++i$1) { - var visual = visualLine(this$1.lines[i$1]), len = lineLength(visual); - if (len > cm.display.maxLineLength) { - cm.display.maxLine = visual; - cm.display.maxLineLength = len; - cm.display.maxLineChanged = true; - } - } } - - if (min != null && cm && this.collapsed) { regChange(cm, min, max + 1); } - this.lines.length = 0; - this.explicitlyCleared = true; - if (this.atomic && this.doc.cantEdit) { - this.doc.cantEdit = false; - if (cm) { reCheckSelection(cm.doc); } - } - if (cm) { signalLater(cm, "markerCleared", cm, this, min, max); } - if (withOp) { endOperation(cm); } - if (this.parent) { this.parent.clear(); } -}; - -// Find the position of the marker in the document. Returns a {from, -// to} object by default. Side can be passed to get a specific side -// -- 0 (both), -1 (left), or 1 (right). When lineObj is true, the -// Pos objects returned contain a line object, rather than a line -// number (used to prevent looking up the same line twice). -TextMarker.prototype.find = function (side, lineObj) { - var this$1 = this; - - if (side == null && this.type == "bookmark") { side = 1; } - var from, to; - for (var i = 0; i < this.lines.length; ++i) { - var line = this$1.lines[i]; - var span = getMarkedSpanFor(line.markedSpans, this$1); - if (span.from != null) { - from = Pos(lineObj ? line : lineNo(line), span.from); - if (side == -1) { return from } - } - if (span.to != null) { - to = Pos(lineObj ? line : lineNo(line), span.to); - if (side == 1) { return to } - } - } - return from && {from: from, to: to} -}; - -// Signals that the marker's widget changed, and surrounding layout -// should be recomputed. -TextMarker.prototype.changed = function () { - var this$1 = this; - - var pos = this.find(-1, true), widget = this, cm = this.doc.cm; - if (!pos || !cm) { return } - runInOp(cm, function () { - var line = pos.line, lineN = lineNo(pos.line); - var view = findViewForLine(cm, lineN); - if (view) { - clearLineMeasurementCacheFor(view); - cm.curOp.selectionChanged = cm.curOp.forceUpdate = true; - } - cm.curOp.updateMaxLine = true; - if (!lineIsHidden(widget.doc, line) && widget.height != null) { - var oldHeight = widget.height; - widget.height = null; - var dHeight = widgetHeight(widget) - oldHeight; - if (dHeight) - { updateLineHeight(line, line.height + dHeight); } - } - signalLater(cm, "markerChanged", cm, this$1); - }); -}; - -TextMarker.prototype.attachLine = function (line) { - if (!this.lines.length && this.doc.cm) { - var op = this.doc.cm.curOp; - if (!op.maybeHiddenMarkers || indexOf(op.maybeHiddenMarkers, this) == -1) - { (op.maybeUnhiddenMarkers || (op.maybeUnhiddenMarkers = [])).push(this); } - } - this.lines.push(line); -}; - -TextMarker.prototype.detachLine = function (line) { - this.lines.splice(indexOf(this.lines, line), 1); - if (!this.lines.length && this.doc.cm) { - var op = this.doc.cm.curOp;(op.maybeHiddenMarkers || (op.maybeHiddenMarkers = [])).push(this); - } -}; -eventMixin(TextMarker); - -// Create a marker, wire it up to the right lines, and -function markText(doc, from, to, options, type) { - // Shared markers (across linked documents) are handled separately - // (markTextShared will call out to this again, once per - // document). - if (options && options.shared) { return markTextShared(doc, from, to, options, type) } - // Ensure we are in an operation. - if (doc.cm && !doc.cm.curOp) { return operation(doc.cm, markText)(doc, from, to, options, type) } - - var marker = new TextMarker(doc, type), diff = cmp(from, to); - if (options) { copyObj(options, marker, false); } - // Don't connect empty markers unless clearWhenEmpty is false - if (diff > 0 || diff == 0 && marker.clearWhenEmpty !== false) - { return marker } - if (marker.replacedWith) { - // Showing up as a widget implies collapsed (widget replaces text) - marker.collapsed = true; - marker.widgetNode = eltP("span", [marker.replacedWith], "CodeMirror-widget"); - if (!options.handleMouseEvents) { marker.widgetNode.setAttribute("cm-ignore-events", "true"); } - if (options.insertLeft) { marker.widgetNode.insertLeft = true; } - } - if (marker.collapsed) { - if (conflictingCollapsedRange(doc, from.line, from, to, marker) || - from.line != to.line && conflictingCollapsedRange(doc, to.line, from, to, marker)) - { throw new Error("Inserting collapsed marker partially overlapping an existing one") } - seeCollapsedSpans(); - } - - if (marker.addToHistory) - { addChangeToHistory(doc, {from: from, to: to, origin: "markText"}, doc.sel, NaN); } - - var curLine = from.line, cm = doc.cm, updateMaxLine; - doc.iter(curLine, to.line + 1, function (line) { - if (cm && marker.collapsed && !cm.options.lineWrapping && visualLine(line) == cm.display.maxLine) - { updateMaxLine = true; } - if (marker.collapsed && curLine != from.line) { updateLineHeight(line, 0); } - addMarkedSpan(line, new MarkedSpan(marker, - curLine == from.line ? from.ch : null, - curLine == to.line ? to.ch : null)); - ++curLine; - }); - // lineIsHidden depends on the presence of the spans, so needs a second pass - if (marker.collapsed) { doc.iter(from.line, to.line + 1, function (line) { - if (lineIsHidden(doc, line)) { updateLineHeight(line, 0); } - }); } - - if (marker.clearOnEnter) { on(marker, "beforeCursorEnter", function () { return marker.clear(); }); } - - if (marker.readOnly) { - seeReadOnlySpans(); - if (doc.history.done.length || doc.history.undone.length) - { doc.clearHistory(); } - } - if (marker.collapsed) { - marker.id = ++nextMarkerId; - marker.atomic = true; - } - if (cm) { - // Sync editor state - if (updateMaxLine) { cm.curOp.updateMaxLine = true; } - if (marker.collapsed) - { regChange(cm, from.line, to.line + 1); } - else if (marker.className || marker.title || marker.startStyle || marker.endStyle || marker.css) - { for (var i = from.line; i <= to.line; i++) { regLineChange(cm, i, "text"); } } - if (marker.atomic) { reCheckSelection(cm.doc); } - signalLater(cm, "markerAdded", cm, marker); - } - return marker -} - -// SHARED TEXTMARKERS - -// A shared marker spans multiple linked documents. It is -// implemented as a meta-marker-object controlling multiple normal -// markers. -var SharedTextMarker = function(markers, primary) { - var this$1 = this; - - this.markers = markers; - this.primary = primary; - for (var i = 0; i < markers.length; ++i) - { markers[i].parent = this$1; } -}; - -SharedTextMarker.prototype.clear = function () { - var this$1 = this; - - if (this.explicitlyCleared) { return } - this.explicitlyCleared = true; - for (var i = 0; i < this.markers.length; ++i) - { this$1.markers[i].clear(); } - signalLater(this, "clear"); -}; - -SharedTextMarker.prototype.find = function (side, lineObj) { - return this.primary.find(side, lineObj) -}; -eventMixin(SharedTextMarker); - -function markTextShared(doc, from, to, options, type) { - options = copyObj(options); - options.shared = false; - var markers = [markText(doc, from, to, options, type)], primary = markers[0]; - var widget = options.widgetNode; - linkedDocs(doc, function (doc) { - if (widget) { options.widgetNode = widget.cloneNode(true); } - markers.push(markText(doc, clipPos(doc, from), clipPos(doc, to), options, type)); - for (var i = 0; i < doc.linked.length; ++i) - { if (doc.linked[i].isParent) { return } } - primary = lst(markers); - }); - return new SharedTextMarker(markers, primary) -} - -function findSharedMarkers(doc) { - return doc.findMarks(Pos(doc.first, 0), doc.clipPos(Pos(doc.lastLine())), function (m) { return m.parent; }) -} - -function copySharedMarkers(doc, markers) { - for (var i = 0; i < markers.length; i++) { - var marker = markers[i], pos = marker.find(); - var mFrom = doc.clipPos(pos.from), mTo = doc.clipPos(pos.to); - if (cmp(mFrom, mTo)) { - var subMark = markText(doc, mFrom, mTo, marker.primary, marker.primary.type); - marker.markers.push(subMark); - subMark.parent = marker; - } - } -} - -function detachSharedMarkers(markers) { - var loop = function ( i ) { - var marker = markers[i], linked = [marker.primary.doc]; - linkedDocs(marker.primary.doc, function (d) { return linked.push(d); }); - for (var j = 0; j < marker.markers.length; j++) { - var subMarker = marker.markers[j]; - if (indexOf(linked, subMarker.doc) == -1) { - subMarker.parent = null; - marker.markers.splice(j--, 1); - } - } - }; - - for (var i = 0; i < markers.length; i++) loop( i ); -} - -var nextDocId = 0; -var Doc = function(text, mode, firstLine, lineSep, direction) { - if (!(this instanceof Doc)) { return new Doc(text, mode, firstLine, lineSep, direction) } - if (firstLine == null) { firstLine = 0; } - - BranchChunk.call(this, [new LeafChunk([new Line("", null)])]); - this.first = firstLine; - this.scrollTop = this.scrollLeft = 0; - this.cantEdit = false; - this.cleanGeneration = 1; - this.modeFrontier = this.highlightFrontier = firstLine; - var start = Pos(firstLine, 0); - this.sel = simpleSelection(start); - this.history = new History(null); - this.id = ++nextDocId; - this.modeOption = mode; - this.lineSep = lineSep; - this.direction = (direction == "rtl") ? "rtl" : "ltr"; - this.extend = false; - - if (typeof text == "string") { text = this.splitLines(text); } - updateDoc(this, {from: start, to: start, text: text}); - setSelection(this, simpleSelection(start), sel_dontScroll); -}; - -Doc.prototype = createObj(BranchChunk.prototype, { - constructor: Doc, - // Iterate over the document. Supports two forms -- with only one - // argument, it calls that for each line in the document. With - // three, it iterates over the range given by the first two (with - // the second being non-inclusive). - iter: function(from, to, op) { - if (op) { this.iterN(from - this.first, to - from, op); } - else { this.iterN(this.first, this.first + this.size, from); } - }, - - // Non-public interface for adding and removing lines. - insert: function(at, lines) { - var height = 0; - for (var i = 0; i < lines.length; ++i) { height += lines[i].height; } - this.insertInner(at - this.first, lines, height); - }, - remove: function(at, n) { this.removeInner(at - this.first, n); }, - - // From here, the methods are part of the public interface. Most - // are also available from CodeMirror (editor) instances. - - getValue: function(lineSep) { - var lines = getLines(this, this.first, this.first + this.size); - if (lineSep === false) { return lines } - return lines.join(lineSep || this.lineSeparator()) - }, - setValue: docMethodOp(function(code) { - var top = Pos(this.first, 0), last = this.first + this.size - 1; - makeChange(this, {from: top, to: Pos(last, getLine(this, last).text.length), - text: this.splitLines(code), origin: "setValue", full: true}, true); - if (this.cm) { scrollToCoords(this.cm, 0, 0); } - setSelection(this, simpleSelection(top), sel_dontScroll); - }), - replaceRange: function(code, from, to, origin) { - from = clipPos(this, from); - to = to ? clipPos(this, to) : from; - replaceRange(this, code, from, to, origin); - }, - getRange: function(from, to, lineSep) { - var lines = getBetween(this, clipPos(this, from), clipPos(this, to)); - if (lineSep === false) { return lines } - return lines.join(lineSep || this.lineSeparator()) - }, - - getLine: function(line) {var l = this.getLineHandle(line); return l && l.text}, - - getLineHandle: function(line) {if (isLine(this, line)) { return getLine(this, line) }}, - getLineNumber: function(line) {return lineNo(line)}, - - getLineHandleVisualStart: function(line) { - if (typeof line == "number") { line = getLine(this, line); } - return visualLine(line) - }, - - lineCount: function() {return this.size}, - firstLine: function() {return this.first}, - lastLine: function() {return this.first + this.size - 1}, - - clipPos: function(pos) {return clipPos(this, pos)}, - - getCursor: function(start) { - var range$$1 = this.sel.primary(), pos; - if (start == null || start == "head") { pos = range$$1.head; } - else if (start == "anchor") { pos = range$$1.anchor; } - else if (start == "end" || start == "to" || start === false) { pos = range$$1.to(); } - else { pos = range$$1.from(); } - return pos - }, - listSelections: function() { return this.sel.ranges }, - somethingSelected: function() {return this.sel.somethingSelected()}, - - setCursor: docMethodOp(function(line, ch, options) { - setSimpleSelection(this, clipPos(this, typeof line == "number" ? Pos(line, ch || 0) : line), null, options); - }), - setSelection: docMethodOp(function(anchor, head, options) { - setSimpleSelection(this, clipPos(this, anchor), clipPos(this, head || anchor), options); - }), - extendSelection: docMethodOp(function(head, other, options) { - extendSelection(this, clipPos(this, head), other && clipPos(this, other), options); - }), - extendSelections: docMethodOp(function(heads, options) { - extendSelections(this, clipPosArray(this, heads), options); - }), - extendSelectionsBy: docMethodOp(function(f, options) { - var heads = map(this.sel.ranges, f); - extendSelections(this, clipPosArray(this, heads), options); - }), - setSelections: docMethodOp(function(ranges, primary, options) { - var this$1 = this; - - if (!ranges.length) { return } - var out = []; - for (var i = 0; i < ranges.length; i++) - { out[i] = new Range(clipPos(this$1, ranges[i].anchor), - clipPos(this$1, ranges[i].head)); } - if (primary == null) { primary = Math.min(ranges.length - 1, this.sel.primIndex); } - setSelection(this, normalizeSelection(out, primary), options); - }), - addSelection: docMethodOp(function(anchor, head, options) { - var ranges = this.sel.ranges.slice(0); - ranges.push(new Range(clipPos(this, anchor), clipPos(this, head || anchor))); - setSelection(this, normalizeSelection(ranges, ranges.length - 1), options); - }), - - getSelection: function(lineSep) { - var this$1 = this; - - var ranges = this.sel.ranges, lines; - for (var i = 0; i < ranges.length; i++) { - var sel = getBetween(this$1, ranges[i].from(), ranges[i].to()); - lines = lines ? lines.concat(sel) : sel; - } - if (lineSep === false) { return lines } - else { return lines.join(lineSep || this.lineSeparator()) } - }, - getSelections: function(lineSep) { - var this$1 = this; - - var parts = [], ranges = this.sel.ranges; - for (var i = 0; i < ranges.length; i++) { - var sel = getBetween(this$1, ranges[i].from(), ranges[i].to()); - if (lineSep !== false) { sel = sel.join(lineSep || this$1.lineSeparator()); } - parts[i] = sel; - } - return parts - }, - replaceSelection: function(code, collapse, origin) { - var dup = []; - for (var i = 0; i < this.sel.ranges.length; i++) - { dup[i] = code; } - this.replaceSelections(dup, collapse, origin || "+input"); - }, - replaceSelections: docMethodOp(function(code, collapse, origin) { - var this$1 = this; - - var changes = [], sel = this.sel; - for (var i = 0; i < sel.ranges.length; i++) { - var range$$1 = sel.ranges[i]; - changes[i] = {from: range$$1.from(), to: range$$1.to(), text: this$1.splitLines(code[i]), origin: origin}; - } - var newSel = collapse && collapse != "end" && computeReplacedSel(this, changes, collapse); - for (var i$1 = changes.length - 1; i$1 >= 0; i$1--) - { makeChange(this$1, changes[i$1]); } - if (newSel) { setSelectionReplaceHistory(this, newSel); } - else if (this.cm) { ensureCursorVisible(this.cm); } - }), - undo: docMethodOp(function() {makeChangeFromHistory(this, "undo");}), - redo: docMethodOp(function() {makeChangeFromHistory(this, "redo");}), - undoSelection: docMethodOp(function() {makeChangeFromHistory(this, "undo", true);}), - redoSelection: docMethodOp(function() {makeChangeFromHistory(this, "redo", true);}), - - setExtending: function(val) {this.extend = val;}, - getExtending: function() {return this.extend}, - - historySize: function() { - var hist = this.history, done = 0, undone = 0; - for (var i = 0; i < hist.done.length; i++) { if (!hist.done[i].ranges) { ++done; } } - for (var i$1 = 0; i$1 < hist.undone.length; i$1++) { if (!hist.undone[i$1].ranges) { ++undone; } } - return {undo: done, redo: undone} - }, - clearHistory: function() {this.history = new History(this.history.maxGeneration);}, - - markClean: function() { - this.cleanGeneration = this.changeGeneration(true); - }, - changeGeneration: function(forceSplit) { - if (forceSplit) - { this.history.lastOp = this.history.lastSelOp = this.history.lastOrigin = null; } - return this.history.generation - }, - isClean: function (gen) { - return this.history.generation == (gen || this.cleanGeneration) - }, - - getHistory: function() { - return {done: copyHistoryArray(this.history.done), - undone: copyHistoryArray(this.history.undone)} - }, - setHistory: function(histData) { - var hist = this.history = new History(this.history.maxGeneration); - hist.done = copyHistoryArray(histData.done.slice(0), null, true); - hist.undone = copyHistoryArray(histData.undone.slice(0), null, true); - }, - - setGutterMarker: docMethodOp(function(line, gutterID, value) { - return changeLine(this, line, "gutter", function (line) { - var markers = line.gutterMarkers || (line.gutterMarkers = {}); - markers[gutterID] = value; - if (!value && isEmpty(markers)) { line.gutterMarkers = null; } - return true - }) - }), - - clearGutter: docMethodOp(function(gutterID) { - var this$1 = this; - - this.iter(function (line) { - if (line.gutterMarkers && line.gutterMarkers[gutterID]) { - changeLine(this$1, line, "gutter", function () { - line.gutterMarkers[gutterID] = null; - if (isEmpty(line.gutterMarkers)) { line.gutterMarkers = null; } - return true - }); - } - }); - }), - - lineInfo: function(line) { - var n; - if (typeof line == "number") { - if (!isLine(this, line)) { return null } - n = line; - line = getLine(this, line); - if (!line) { return null } - } else { - n = lineNo(line); - if (n == null) { return null } - } - return {line: n, handle: line, text: line.text, gutterMarkers: line.gutterMarkers, - textClass: line.textClass, bgClass: line.bgClass, wrapClass: line.wrapClass, - widgets: line.widgets} - }, - - addLineClass: docMethodOp(function(handle, where, cls) { - return changeLine(this, handle, where == "gutter" ? "gutter" : "class", function (line) { - var prop = where == "text" ? "textClass" - : where == "background" ? "bgClass" - : where == "gutter" ? "gutterClass" : "wrapClass"; - if (!line[prop]) { line[prop] = cls; } - else if (classTest(cls).test(line[prop])) { return false } - else { line[prop] += " " + cls; } - return true - }) - }), - removeLineClass: docMethodOp(function(handle, where, cls) { - return changeLine(this, handle, where == "gutter" ? "gutter" : "class", function (line) { - var prop = where == "text" ? "textClass" - : where == "background" ? "bgClass" - : where == "gutter" ? "gutterClass" : "wrapClass"; - var cur = line[prop]; - if (!cur) { return false } - else if (cls == null) { line[prop] = null; } - else { - var found = cur.match(classTest(cls)); - if (!found) { return false } - var end = found.index + found[0].length; - line[prop] = cur.slice(0, found.index) + (!found.index || end == cur.length ? "" : " ") + cur.slice(end) || null; - } - return true - }) - }), - - addLineWidget: docMethodOp(function(handle, node, options) { - return addLineWidget(this, handle, node, options) - }), - removeLineWidget: function(widget) { widget.clear(); }, - - markText: function(from, to, options) { - return markText(this, clipPos(this, from), clipPos(this, to), options, options && options.type || "range") - }, - setBookmark: function(pos, options) { - var realOpts = {replacedWith: options && (options.nodeType == null ? options.widget : options), - insertLeft: options && options.insertLeft, - clearWhenEmpty: false, shared: options && options.shared, - handleMouseEvents: options && options.handleMouseEvents}; - pos = clipPos(this, pos); - return markText(this, pos, pos, realOpts, "bookmark") - }, - findMarksAt: function(pos) { - pos = clipPos(this, pos); - var markers = [], spans = getLine(this, pos.line).markedSpans; - if (spans) { for (var i = 0; i < spans.length; ++i) { - var span = spans[i]; - if ((span.from == null || span.from <= pos.ch) && - (span.to == null || span.to >= pos.ch)) - { markers.push(span.marker.parent || span.marker); } - } } - return markers - }, - findMarks: function(from, to, filter) { - from = clipPos(this, from); to = clipPos(this, to); - var found = [], lineNo$$1 = from.line; - this.iter(from.line, to.line + 1, function (line) { - var spans = line.markedSpans; - if (spans) { for (var i = 0; i < spans.length; i++) { - var span = spans[i]; - if (!(span.to != null && lineNo$$1 == from.line && from.ch >= span.to || - span.from == null && lineNo$$1 != from.line || - span.from != null && lineNo$$1 == to.line && span.from >= to.ch) && - (!filter || filter(span.marker))) - { found.push(span.marker.parent || span.marker); } - } } - ++lineNo$$1; - }); - return found - }, - getAllMarks: function() { - var markers = []; - this.iter(function (line) { - var sps = line.markedSpans; - if (sps) { for (var i = 0; i < sps.length; ++i) - { if (sps[i].from != null) { markers.push(sps[i].marker); } } } - }); - return markers - }, - - posFromIndex: function(off) { - var ch, lineNo$$1 = this.first, sepSize = this.lineSeparator().length; - this.iter(function (line) { - var sz = line.text.length + sepSize; - if (sz > off) { ch = off; return true } - off -= sz; - ++lineNo$$1; - }); - return clipPos(this, Pos(lineNo$$1, ch)) - }, - indexFromPos: function (coords) { - coords = clipPos(this, coords); - var index = coords.ch; - if (coords.line < this.first || coords.ch < 0) { return 0 } - var sepSize = this.lineSeparator().length; - this.iter(this.first, coords.line, function (line) { // iter aborts when callback returns a truthy value - index += line.text.length + sepSize; - }); - return index - }, - - copy: function(copyHistory) { - var doc = new Doc(getLines(this, this.first, this.first + this.size), - this.modeOption, this.first, this.lineSep, this.direction); - doc.scrollTop = this.scrollTop; doc.scrollLeft = this.scrollLeft; - doc.sel = this.sel; - doc.extend = false; - if (copyHistory) { - doc.history.undoDepth = this.history.undoDepth; - doc.setHistory(this.getHistory()); - } - return doc - }, - - linkedDoc: function(options) { - if (!options) { options = {}; } - var from = this.first, to = this.first + this.size; - if (options.from != null && options.from > from) { from = options.from; } - if (options.to != null && options.to < to) { to = options.to; } - var copy = new Doc(getLines(this, from, to), options.mode || this.modeOption, from, this.lineSep, this.direction); - if (options.sharedHist) { copy.history = this.history - ; }(this.linked || (this.linked = [])).push({doc: copy, sharedHist: options.sharedHist}); - copy.linked = [{doc: this, isParent: true, sharedHist: options.sharedHist}]; - copySharedMarkers(copy, findSharedMarkers(this)); - return copy - }, - unlinkDoc: function(other) { - var this$1 = this; - - if (other instanceof CodeMirror$1) { other = other.doc; } - if (this.linked) { for (var i = 0; i < this.linked.length; ++i) { - var link = this$1.linked[i]; - if (link.doc != other) { continue } - this$1.linked.splice(i, 1); - other.unlinkDoc(this$1); - detachSharedMarkers(findSharedMarkers(this$1)); - break - } } - // If the histories were shared, split them again - if (other.history == this.history) { - var splitIds = [other.id]; - linkedDocs(other, function (doc) { return splitIds.push(doc.id); }, true); - other.history = new History(null); - other.history.done = copyHistoryArray(this.history.done, splitIds); - other.history.undone = copyHistoryArray(this.history.undone, splitIds); - } - }, - iterLinkedDocs: function(f) {linkedDocs(this, f);}, - - getMode: function() {return this.mode}, - getEditor: function() {return this.cm}, - - splitLines: function(str) { - if (this.lineSep) { return str.split(this.lineSep) } - return splitLinesAuto(str) - }, - lineSeparator: function() { return this.lineSep || "\n" }, - - setDirection: docMethodOp(function (dir) { - if (dir != "rtl") { dir = "ltr"; } - if (dir == this.direction) { return } - this.direction = dir; - this.iter(function (line) { return line.order = null; }); - if (this.cm) { directionChanged(this.cm); } - }) -}); - -// Public alias. -Doc.prototype.eachLine = Doc.prototype.iter; - -// Kludge to work around strange IE behavior where it'll sometimes -// re-fire a series of drag-related events right after the drop (#1551) -var lastDrop = 0; - -function onDrop(e) { - var cm = this; - clearDragCursor(cm); - if (signalDOMEvent(cm, e) || eventInWidget(cm.display, e)) - { return } - e_preventDefault(e); - if (ie) { lastDrop = +new Date; } - var pos = posFromMouse(cm, e, true), files = e.dataTransfer.files; - if (!pos || cm.isReadOnly()) { return } - // Might be a file drop, in which case we simply extract the text - // and insert it. - if (files && files.length && window.FileReader && window.File) { - var n = files.length, text = Array(n), read = 0; - var loadFile = function (file, i) { - if (cm.options.allowDropFileTypes && - indexOf(cm.options.allowDropFileTypes, file.type) == -1) - { return } - - var reader = new FileReader; - reader.onload = operation(cm, function () { - var content = reader.result; - if (/[\x00-\x08\x0e-\x1f]{2}/.test(content)) { content = ""; } - text[i] = content; - if (++read == n) { - pos = clipPos(cm.doc, pos); - var change = {from: pos, to: pos, - text: cm.doc.splitLines(text.join(cm.doc.lineSeparator())), - origin: "paste"}; - makeChange(cm.doc, change); - setSelectionReplaceHistory(cm.doc, simpleSelection(pos, changeEnd(change))); - } - }); - reader.readAsText(file); - }; - for (var i = 0; i < n; ++i) { loadFile(files[i], i); } - } else { // Normal drop - // Don't do a replace if the drop happened inside of the selected text. - if (cm.state.draggingText && cm.doc.sel.contains(pos) > -1) { - cm.state.draggingText(e); - // Ensure the editor is re-focused - setTimeout(function () { return cm.display.input.focus(); }, 20); - return - } - try { - var text$1 = e.dataTransfer.getData("Text"); - if (text$1) { - var selected; - if (cm.state.draggingText && !cm.state.draggingText.copy) - { selected = cm.listSelections(); } - setSelectionNoUndo(cm.doc, simpleSelection(pos, pos)); - if (selected) { for (var i$1 = 0; i$1 < selected.length; ++i$1) - { replaceRange(cm.doc, "", selected[i$1].anchor, selected[i$1].head, "drag"); } } - cm.replaceSelection(text$1, "around", "paste"); - cm.display.input.focus(); - } - } - catch(e){} - } -} - -function onDragStart(cm, e) { - if (ie && (!cm.state.draggingText || +new Date - lastDrop < 100)) { e_stop(e); return } - if (signalDOMEvent(cm, e) || eventInWidget(cm.display, e)) { return } - - e.dataTransfer.setData("Text", cm.getSelection()); - e.dataTransfer.effectAllowed = "copyMove"; - - // Use dummy image instead of default browsers image. - // Recent Safari (~6.0.2) have a tendency to segfault when this happens, so we don't do it there. - if (e.dataTransfer.setDragImage && !safari) { - var img = elt("img", null, null, "position: fixed; left: 0; top: 0;"); - img.src = "data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw=="; - if (presto) { - img.width = img.height = 1; - cm.display.wrapper.appendChild(img); - // Force a relayout, or Opera won't use our image for some obscure reason - img._top = img.offsetTop; - } - e.dataTransfer.setDragImage(img, 0, 0); - if (presto) { img.parentNode.removeChild(img); } - } -} - -function onDragOver(cm, e) { - var pos = posFromMouse(cm, e); - if (!pos) { return } - var frag = document.createDocumentFragment(); - drawSelectionCursor(cm, pos, frag); - if (!cm.display.dragCursor) { - cm.display.dragCursor = elt("div", null, "CodeMirror-cursors CodeMirror-dragcursors"); - cm.display.lineSpace.insertBefore(cm.display.dragCursor, cm.display.cursorDiv); - } - removeChildrenAndAdd(cm.display.dragCursor, frag); -} - -function clearDragCursor(cm) { - if (cm.display.dragCursor) { - cm.display.lineSpace.removeChild(cm.display.dragCursor); - cm.display.dragCursor = null; - } -} - -// These must be handled carefully, because naively registering a -// handler for each editor will cause the editors to never be -// garbage collected. - -function forEachCodeMirror(f) { - if (!document.getElementsByClassName) { return } - var byClass = document.getElementsByClassName("CodeMirror"); - for (var i = 0; i < byClass.length; i++) { - var cm = byClass[i].CodeMirror; - if (cm) { f(cm); } - } -} - -var globalsRegistered = false; -function ensureGlobalHandlers() { - if (globalsRegistered) { return } - registerGlobalHandlers(); - globalsRegistered = true; -} -function registerGlobalHandlers() { - // When the window resizes, we need to refresh active editors. - var resizeTimer; - on(window, "resize", function () { - if (resizeTimer == null) { resizeTimer = setTimeout(function () { - resizeTimer = null; - forEachCodeMirror(onResize); - }, 100); } - }); - // When the window loses focus, we want to show the editor as blurred - on(window, "blur", function () { return forEachCodeMirror(onBlur); }); -} -// Called when the window resizes -function onResize(cm) { - var d = cm.display; - if (d.lastWrapHeight == d.wrapper.clientHeight && d.lastWrapWidth == d.wrapper.clientWidth) - { return } - // Might be a text scaling operation, clear size caches. - d.cachedCharWidth = d.cachedTextHeight = d.cachedPaddingH = null; - d.scrollbarsClipped = false; - cm.setSize(); -} - -var keyNames = { - 3: "Enter", 8: "Backspace", 9: "Tab", 13: "Enter", 16: "Shift", 17: "Ctrl", 18: "Alt", - 19: "Pause", 20: "CapsLock", 27: "Esc", 32: "Space", 33: "PageUp", 34: "PageDown", 35: "End", - 36: "Home", 37: "Left", 38: "Up", 39: "Right", 40: "Down", 44: "PrintScrn", 45: "Insert", - 46: "Delete", 59: ";", 61: "=", 91: "Mod", 92: "Mod", 93: "Mod", - 106: "*", 107: "=", 109: "-", 110: ".", 111: "/", 127: "Delete", - 173: "-", 186: ";", 187: "=", 188: ",", 189: "-", 190: ".", 191: "/", 192: "`", 219: "[", 220: "\\", - 221: "]", 222: "'", 63232: "Up", 63233: "Down", 63234: "Left", 63235: "Right", 63272: "Delete", - 63273: "Home", 63275: "End", 63276: "PageUp", 63277: "PageDown", 63302: "Insert" -}; - -// Number keys -for (var i = 0; i < 10; i++) { keyNames[i + 48] = keyNames[i + 96] = String(i); } -// Alphabetic keys -for (var i$1 = 65; i$1 <= 90; i$1++) { keyNames[i$1] = String.fromCharCode(i$1); } -// Function keys -for (var i$2 = 1; i$2 <= 12; i$2++) { keyNames[i$2 + 111] = keyNames[i$2 + 63235] = "F" + i$2; } - -var keyMap = {}; - -keyMap.basic = { - "Left": "goCharLeft", "Right": "goCharRight", "Up": "goLineUp", "Down": "goLineDown", - "End": "goLineEnd", "Home": "goLineStartSmart", "PageUp": "goPageUp", "PageDown": "goPageDown", - "Delete": "delCharAfter", "Backspace": "delCharBefore", "Shift-Backspace": "delCharBefore", - "Tab": "defaultTab", "Shift-Tab": "indentAuto", - "Enter": "newlineAndIndent", "Insert": "toggleOverwrite", - "Esc": "singleSelection" -}; -// Note that the save and find-related commands aren't defined by -// default. User code or addons can define them. Unknown commands -// are simply ignored. -keyMap.pcDefault = { - "Ctrl-A": "selectAll", "Ctrl-D": "deleteLine", "Ctrl-Z": "undo", "Shift-Ctrl-Z": "redo", "Ctrl-Y": "redo", - "Ctrl-Home": "goDocStart", "Ctrl-End": "goDocEnd", "Ctrl-Up": "goLineUp", "Ctrl-Down": "goLineDown", - "Ctrl-Left": "goGroupLeft", "Ctrl-Right": "goGroupRight", "Alt-Left": "goLineStart", "Alt-Right": "goLineEnd", - "Ctrl-Backspace": "delGroupBefore", "Ctrl-Delete": "delGroupAfter", "Ctrl-S": "save", "Ctrl-F": "find", - "Ctrl-G": "findNext", "Shift-Ctrl-G": "findPrev", "Shift-Ctrl-F": "replace", "Shift-Ctrl-R": "replaceAll", - "Ctrl-[": "indentLess", "Ctrl-]": "indentMore", - "Ctrl-U": "undoSelection", "Shift-Ctrl-U": "redoSelection", "Alt-U": "redoSelection", - fallthrough: "basic" -}; -// Very basic readline/emacs-style bindings, which are standard on Mac. -keyMap.emacsy = { - "Ctrl-F": "goCharRight", "Ctrl-B": "goCharLeft", "Ctrl-P": "goLineUp", "Ctrl-N": "goLineDown", - "Alt-F": "goWordRight", "Alt-B": "goWordLeft", "Ctrl-A": "goLineStart", "Ctrl-E": "goLineEnd", - "Ctrl-V": "goPageDown", "Shift-Ctrl-V": "goPageUp", "Ctrl-D": "delCharAfter", "Ctrl-H": "delCharBefore", - "Alt-D": "delWordAfter", "Alt-Backspace": "delWordBefore", "Ctrl-K": "killLine", "Ctrl-T": "transposeChars", - "Ctrl-O": "openLine" -}; -keyMap.macDefault = { - "Cmd-A": "selectAll", "Cmd-D": "deleteLine", "Cmd-Z": "undo", "Shift-Cmd-Z": "redo", "Cmd-Y": "redo", - "Cmd-Home": "goDocStart", "Cmd-Up": "goDocStart", "Cmd-End": "goDocEnd", "Cmd-Down": "goDocEnd", "Alt-Left": "goGroupLeft", - "Alt-Right": "goGroupRight", "Cmd-Left": "goLineLeft", "Cmd-Right": "goLineRight", "Alt-Backspace": "delGroupBefore", - "Ctrl-Alt-Backspace": "delGroupAfter", "Alt-Delete": "delGroupAfter", "Cmd-S": "save", "Cmd-F": "find", - "Cmd-G": "findNext", "Shift-Cmd-G": "findPrev", "Cmd-Alt-F": "replace", "Shift-Cmd-Alt-F": "replaceAll", - "Cmd-[": "indentLess", "Cmd-]": "indentMore", "Cmd-Backspace": "delWrappedLineLeft", "Cmd-Delete": "delWrappedLineRight", - "Cmd-U": "undoSelection", "Shift-Cmd-U": "redoSelection", "Ctrl-Up": "goDocStart", "Ctrl-Down": "goDocEnd", - fallthrough: ["basic", "emacsy"] -}; -keyMap["default"] = mac ? keyMap.macDefault : keyMap.pcDefault; - -// KEYMAP DISPATCH - -function normalizeKeyName(name) { - var parts = name.split(/-(?!$)/); - name = parts[parts.length - 1]; - var alt, ctrl, shift, cmd; - for (var i = 0; i < parts.length - 1; i++) { - var mod = parts[i]; - if (/^(cmd|meta|m)$/i.test(mod)) { cmd = true; } - else if (/^a(lt)?$/i.test(mod)) { alt = true; } - else if (/^(c|ctrl|control)$/i.test(mod)) { ctrl = true; } - else if (/^s(hift)?$/i.test(mod)) { shift = true; } - else { throw new Error("Unrecognized modifier name: " + mod) } - } - if (alt) { name = "Alt-" + name; } - if (ctrl) { name = "Ctrl-" + name; } - if (cmd) { name = "Cmd-" + name; } - if (shift) { name = "Shift-" + name; } - return name -} - -// This is a kludge to keep keymaps mostly working as raw objects -// (backwards compatibility) while at the same time support features -// like normalization and multi-stroke key bindings. It compiles a -// new normalized keymap, and then updates the old object to reflect -// this. -function normalizeKeyMap(keymap) { - var copy = {}; - for (var keyname in keymap) { if (keymap.hasOwnProperty(keyname)) { - var value = keymap[keyname]; - if (/^(name|fallthrough|(de|at)tach)$/.test(keyname)) { continue } - if (value == "...") { delete keymap[keyname]; continue } - - var keys = map(keyname.split(" "), normalizeKeyName); - for (var i = 0; i < keys.length; i++) { - var val = (void 0), name = (void 0); - if (i == keys.length - 1) { - name = keys.join(" "); - val = value; - } else { - name = keys.slice(0, i + 1).join(" "); - val = "..."; - } - var prev = copy[name]; - if (!prev) { copy[name] = val; } - else if (prev != val) { throw new Error("Inconsistent bindings for " + name) } - } - delete keymap[keyname]; - } } - for (var prop in copy) { keymap[prop] = copy[prop]; } - return keymap -} - -function lookupKey(key, map$$1, handle, context) { - map$$1 = getKeyMap(map$$1); - var found = map$$1.call ? map$$1.call(key, context) : map$$1[key]; - if (found === false) { return "nothing" } - if (found === "...") { return "multi" } - if (found != null && handle(found)) { return "handled" } - - if (map$$1.fallthrough) { - if (Object.prototype.toString.call(map$$1.fallthrough) != "[object Array]") - { return lookupKey(key, map$$1.fallthrough, handle, context) } - for (var i = 0; i < map$$1.fallthrough.length; i++) { - var result = lookupKey(key, map$$1.fallthrough[i], handle, context); - if (result) { return result } - } - } -} - -// Modifier key presses don't count as 'real' key presses for the -// purpose of keymap fallthrough. -function isModifierKey(value) { - var name = typeof value == "string" ? value : keyNames[value.keyCode]; - return name == "Ctrl" || name == "Alt" || name == "Shift" || name == "Mod" -} - -function addModifierNames(name, event, noShift) { - var base = name; - if (event.altKey && base != "Alt") { name = "Alt-" + name; } - if ((flipCtrlCmd ? event.metaKey : event.ctrlKey) && base != "Ctrl") { name = "Ctrl-" + name; } - if ((flipCtrlCmd ? event.ctrlKey : event.metaKey) && base != "Cmd") { name = "Cmd-" + name; } - if (!noShift && event.shiftKey && base != "Shift") { name = "Shift-" + name; } - return name -} - -// Look up the name of a key as indicated by an event object. -function keyName(event, noShift) { - if (presto && event.keyCode == 34 && event["char"]) { return false } - var name = keyNames[event.keyCode]; - if (name == null || event.altGraphKey) { return false } - return addModifierNames(name, event, noShift) -} - -function getKeyMap(val) { - return typeof val == "string" ? keyMap[val] : val -} - -// Helper for deleting text near the selection(s), used to implement -// backspace, delete, and similar functionality. -function deleteNearSelection(cm, compute) { - var ranges = cm.doc.sel.ranges, kill = []; - // Build up a set of ranges to kill first, merging overlapping - // ranges. - for (var i = 0; i < ranges.length; i++) { - var toKill = compute(ranges[i]); - while (kill.length && cmp(toKill.from, lst(kill).to) <= 0) { - var replaced = kill.pop(); - if (cmp(replaced.from, toKill.from) < 0) { - toKill.from = replaced.from; - break - } - } - kill.push(toKill); - } - // Next, remove those actual ranges. - runInOp(cm, function () { - for (var i = kill.length - 1; i >= 0; i--) - { replaceRange(cm.doc, "", kill[i].from, kill[i].to, "+delete"); } - ensureCursorVisible(cm); - }); -} - -function moveCharLogically(line, ch, dir) { - var target = skipExtendingChars(line.text, ch + dir, dir); - return target < 0 || target > line.text.length ? null : target -} - -function moveLogically(line, start, dir) { - var ch = moveCharLogically(line, start.ch, dir); - return ch == null ? null : new Pos(start.line, ch, dir < 0 ? "after" : "before") -} - -function endOfLine(visually, cm, lineObj, lineNo, dir) { - if (visually) { - var order = getOrder(lineObj, cm.doc.direction); - if (order) { - var part = dir < 0 ? lst(order) : order[0]; - var moveInStorageOrder = (dir < 0) == (part.level == 1); - var sticky = moveInStorageOrder ? "after" : "before"; - var ch; - // With a wrapped rtl chunk (possibly spanning multiple bidi parts), - // it could be that the last bidi part is not on the last visual line, - // since visual lines contain content order-consecutive chunks. - // Thus, in rtl, we are looking for the first (content-order) character - // in the rtl chunk that is on the last line (that is, the same line - // as the last (content-order) character). - if (part.level > 0 || cm.doc.direction == "rtl") { - var prep = prepareMeasureForLine(cm, lineObj); - ch = dir < 0 ? lineObj.text.length - 1 : 0; - var targetTop = measureCharPrepared(cm, prep, ch).top; - ch = findFirst(function (ch) { return measureCharPrepared(cm, prep, ch).top == targetTop; }, (dir < 0) == (part.level == 1) ? part.from : part.to - 1, ch); - if (sticky == "before") { ch = moveCharLogically(lineObj, ch, 1); } - } else { ch = dir < 0 ? part.to : part.from; } - return new Pos(lineNo, ch, sticky) - } - } - return new Pos(lineNo, dir < 0 ? lineObj.text.length : 0, dir < 0 ? "before" : "after") -} - -function moveVisually(cm, line, start, dir) { - var bidi = getOrder(line, cm.doc.direction); - if (!bidi) { return moveLogically(line, start, dir) } - if (start.ch >= line.text.length) { - start.ch = line.text.length; - start.sticky = "before"; - } else if (start.ch <= 0) { - start.ch = 0; - start.sticky = "after"; - } - var partPos = getBidiPartAt(bidi, start.ch, start.sticky), part = bidi[partPos]; - if (cm.doc.direction == "ltr" && part.level % 2 == 0 && (dir > 0 ? part.to > start.ch : part.from < start.ch)) { - // Case 1: We move within an ltr part in an ltr editor. Even with wrapped lines, - // nothing interesting happens. - return moveLogically(line, start, dir) - } - - var mv = function (pos, dir) { return moveCharLogically(line, pos instanceof Pos ? pos.ch : pos, dir); }; - var prep; - var getWrappedLineExtent = function (ch) { - if (!cm.options.lineWrapping) { return {begin: 0, end: line.text.length} } - prep = prep || prepareMeasureForLine(cm, line); - return wrappedLineExtentChar(cm, line, prep, ch) - }; - var wrappedLineExtent = getWrappedLineExtent(start.sticky == "before" ? mv(start, -1) : start.ch); - - if (cm.doc.direction == "rtl" || part.level == 1) { - var moveInStorageOrder = (part.level == 1) == (dir < 0); - var ch = mv(start, moveInStorageOrder ? 1 : -1); - if (ch != null && (!moveInStorageOrder ? ch >= part.from && ch >= wrappedLineExtent.begin : ch <= part.to && ch <= wrappedLineExtent.end)) { - // Case 2: We move within an rtl part or in an rtl editor on the same visual line - var sticky = moveInStorageOrder ? "before" : "after"; - return new Pos(start.line, ch, sticky) - } - } - - // Case 3: Could not move within this bidi part in this visual line, so leave - // the current bidi part - - var searchInVisualLine = function (partPos, dir, wrappedLineExtent) { - var getRes = function (ch, moveInStorageOrder) { return moveInStorageOrder - ? new Pos(start.line, mv(ch, 1), "before") - : new Pos(start.line, ch, "after"); }; - - for (; partPos >= 0 && partPos < bidi.length; partPos += dir) { - var part = bidi[partPos]; - var moveInStorageOrder = (dir > 0) == (part.level != 1); - var ch = moveInStorageOrder ? wrappedLineExtent.begin : mv(wrappedLineExtent.end, -1); - if (part.from <= ch && ch < part.to) { return getRes(ch, moveInStorageOrder) } - ch = moveInStorageOrder ? part.from : mv(part.to, -1); - if (wrappedLineExtent.begin <= ch && ch < wrappedLineExtent.end) { return getRes(ch, moveInStorageOrder) } - } - }; - - // Case 3a: Look for other bidi parts on the same visual line - var res = searchInVisualLine(partPos + dir, dir, wrappedLineExtent); - if (res) { return res } - - // Case 3b: Look for other bidi parts on the next visual line - var nextCh = dir > 0 ? wrappedLineExtent.end : mv(wrappedLineExtent.begin, -1); - if (nextCh != null && !(dir > 0 && nextCh == line.text.length)) { - res = searchInVisualLine(dir > 0 ? 0 : bidi.length - 1, dir, getWrappedLineExtent(nextCh)); - if (res) { return res } - } - - // Case 4: Nowhere to move - return null -} - -// Commands are parameter-less actions that can be performed on an -// editor, mostly used for keybindings. -var commands = { - selectAll: selectAll, - singleSelection: function (cm) { return cm.setSelection(cm.getCursor("anchor"), cm.getCursor("head"), sel_dontScroll); }, - killLine: function (cm) { return deleteNearSelection(cm, function (range) { - if (range.empty()) { - var len = getLine(cm.doc, range.head.line).text.length; - if (range.head.ch == len && range.head.line < cm.lastLine()) - { return {from: range.head, to: Pos(range.head.line + 1, 0)} } - else - { return {from: range.head, to: Pos(range.head.line, len)} } - } else { - return {from: range.from(), to: range.to()} - } - }); }, - deleteLine: function (cm) { return deleteNearSelection(cm, function (range) { return ({ - from: Pos(range.from().line, 0), - to: clipPos(cm.doc, Pos(range.to().line + 1, 0)) - }); }); }, - delLineLeft: function (cm) { return deleteNearSelection(cm, function (range) { return ({ - from: Pos(range.from().line, 0), to: range.from() - }); }); }, - delWrappedLineLeft: function (cm) { return deleteNearSelection(cm, function (range) { - var top = cm.charCoords(range.head, "div").top + 5; - var leftPos = cm.coordsChar({left: 0, top: top}, "div"); - return {from: leftPos, to: range.from()} - }); }, - delWrappedLineRight: function (cm) { return deleteNearSelection(cm, function (range) { - var top = cm.charCoords(range.head, "div").top + 5; - var rightPos = cm.coordsChar({left: cm.display.lineDiv.offsetWidth + 100, top: top}, "div"); - return {from: range.from(), to: rightPos } - }); }, - undo: function (cm) { return cm.undo(); }, - redo: function (cm) { return cm.redo(); }, - undoSelection: function (cm) { return cm.undoSelection(); }, - redoSelection: function (cm) { return cm.redoSelection(); }, - goDocStart: function (cm) { return cm.extendSelection(Pos(cm.firstLine(), 0)); }, - goDocEnd: function (cm) { return cm.extendSelection(Pos(cm.lastLine())); }, - goLineStart: function (cm) { return cm.extendSelectionsBy(function (range) { return lineStart(cm, range.head.line); }, - {origin: "+move", bias: 1} - ); }, - goLineStartSmart: function (cm) { return cm.extendSelectionsBy(function (range) { return lineStartSmart(cm, range.head); }, - {origin: "+move", bias: 1} - ); }, - goLineEnd: function (cm) { return cm.extendSelectionsBy(function (range) { return lineEnd(cm, range.head.line); }, - {origin: "+move", bias: -1} - ); }, - goLineRight: function (cm) { return cm.extendSelectionsBy(function (range) { - var top = cm.cursorCoords(range.head, "div").top + 5; - return cm.coordsChar({left: cm.display.lineDiv.offsetWidth + 100, top: top}, "div") - }, sel_move); }, - goLineLeft: function (cm) { return cm.extendSelectionsBy(function (range) { - var top = cm.cursorCoords(range.head, "div").top + 5; - return cm.coordsChar({left: 0, top: top}, "div") - }, sel_move); }, - goLineLeftSmart: function (cm) { return cm.extendSelectionsBy(function (range) { - var top = cm.cursorCoords(range.head, "div").top + 5; - var pos = cm.coordsChar({left: 0, top: top}, "div"); - if (pos.ch < cm.getLine(pos.line).search(/\S/)) { return lineStartSmart(cm, range.head) } - return pos - }, sel_move); }, - goLineUp: function (cm) { return cm.moveV(-1, "line"); }, - goLineDown: function (cm) { return cm.moveV(1, "line"); }, - goPageUp: function (cm) { return cm.moveV(-1, "page"); }, - goPageDown: function (cm) { return cm.moveV(1, "page"); }, - goCharLeft: function (cm) { return cm.moveH(-1, "char"); }, - goCharRight: function (cm) { return cm.moveH(1, "char"); }, - goColumnLeft: function (cm) { return cm.moveH(-1, "column"); }, - goColumnRight: function (cm) { return cm.moveH(1, "column"); }, - goWordLeft: function (cm) { return cm.moveH(-1, "word"); }, - goGroupRight: function (cm) { return cm.moveH(1, "group"); }, - goGroupLeft: function (cm) { return cm.moveH(-1, "group"); }, - goWordRight: function (cm) { return cm.moveH(1, "word"); }, - delCharBefore: function (cm) { return cm.deleteH(-1, "char"); }, - delCharAfter: function (cm) { return cm.deleteH(1, "char"); }, - delWordBefore: function (cm) { return cm.deleteH(-1, "word"); }, - delWordAfter: function (cm) { return cm.deleteH(1, "word"); }, - delGroupBefore: function (cm) { return cm.deleteH(-1, "group"); }, - delGroupAfter: function (cm) { return cm.deleteH(1, "group"); }, - indentAuto: function (cm) { return cm.indentSelection("smart"); }, - indentMore: function (cm) { return cm.indentSelection("add"); }, - indentLess: function (cm) { return cm.indentSelection("subtract"); }, - insertTab: function (cm) { return cm.replaceSelection("\t"); }, - insertSoftTab: function (cm) { - var spaces = [], ranges = cm.listSelections(), tabSize = cm.options.tabSize; - for (var i = 0; i < ranges.length; i++) { - var pos = ranges[i].from(); - var col = countColumn(cm.getLine(pos.line), pos.ch, tabSize); - spaces.push(spaceStr(tabSize - col % tabSize)); - } - cm.replaceSelections(spaces); - }, - defaultTab: function (cm) { - if (cm.somethingSelected()) { cm.indentSelection("add"); } - else { cm.execCommand("insertTab"); } - }, - // Swap the two chars left and right of each selection's head. - // Move cursor behind the two swapped characters afterwards. - // - // Doesn't consider line feeds a character. - // Doesn't scan more than one line above to find a character. - // Doesn't do anything on an empty line. - // Doesn't do anything with non-empty selections. - transposeChars: function (cm) { return runInOp(cm, function () { - var ranges = cm.listSelections(), newSel = []; - for (var i = 0; i < ranges.length; i++) { - if (!ranges[i].empty()) { continue } - var cur = ranges[i].head, line = getLine(cm.doc, cur.line).text; - if (line) { - if (cur.ch == line.length) { cur = new Pos(cur.line, cur.ch - 1); } - if (cur.ch > 0) { - cur = new Pos(cur.line, cur.ch + 1); - cm.replaceRange(line.charAt(cur.ch - 1) + line.charAt(cur.ch - 2), - Pos(cur.line, cur.ch - 2), cur, "+transpose"); - } else if (cur.line > cm.doc.first) { - var prev = getLine(cm.doc, cur.line - 1).text; - if (prev) { - cur = new Pos(cur.line, 1); - cm.replaceRange(line.charAt(0) + cm.doc.lineSeparator() + - prev.charAt(prev.length - 1), - Pos(cur.line - 1, prev.length - 1), cur, "+transpose"); - } - } - } - newSel.push(new Range(cur, cur)); - } - cm.setSelections(newSel); - }); }, - newlineAndIndent: function (cm) { return runInOp(cm, function () { - var sels = cm.listSelections(); - for (var i = sels.length - 1; i >= 0; i--) - { cm.replaceRange(cm.doc.lineSeparator(), sels[i].anchor, sels[i].head, "+input"); } - sels = cm.listSelections(); - for (var i$1 = 0; i$1 < sels.length; i$1++) - { cm.indentLine(sels[i$1].from().line, null, true); } - ensureCursorVisible(cm); - }); }, - openLine: function (cm) { return cm.replaceSelection("\n", "start"); }, - toggleOverwrite: function (cm) { return cm.toggleOverwrite(); } -}; - - -function lineStart(cm, lineN) { - var line = getLine(cm.doc, lineN); - var visual = visualLine(line); - if (visual != line) { lineN = lineNo(visual); } - return endOfLine(true, cm, visual, lineN, 1) -} -function lineEnd(cm, lineN) { - var line = getLine(cm.doc, lineN); - var visual = visualLineEnd(line); - if (visual != line) { lineN = lineNo(visual); } - return endOfLine(true, cm, line, lineN, -1) -} -function lineStartSmart(cm, pos) { - var start = lineStart(cm, pos.line); - var line = getLine(cm.doc, start.line); - var order = getOrder(line, cm.doc.direction); - if (!order || order[0].level == 0) { - var firstNonWS = Math.max(0, line.text.search(/\S/)); - var inWS = pos.line == start.line && pos.ch <= firstNonWS && pos.ch; - return Pos(start.line, inWS ? 0 : firstNonWS, start.sticky) - } - return start -} - -// Run a handler that was bound to a key. -function doHandleBinding(cm, bound, dropShift) { - if (typeof bound == "string") { - bound = commands[bound]; - if (!bound) { return false } - } - // Ensure previous input has been read, so that the handler sees a - // consistent view of the document - cm.display.input.ensurePolled(); - var prevShift = cm.display.shift, done = false; - try { - if (cm.isReadOnly()) { cm.state.suppressEdits = true; } - if (dropShift) { cm.display.shift = false; } - done = bound(cm) != Pass; - } finally { - cm.display.shift = prevShift; - cm.state.suppressEdits = false; - } - return done -} - -function lookupKeyForEditor(cm, name, handle) { - for (var i = 0; i < cm.state.keyMaps.length; i++) { - var result = lookupKey(name, cm.state.keyMaps[i], handle, cm); - if (result) { return result } - } - return (cm.options.extraKeys && lookupKey(name, cm.options.extraKeys, handle, cm)) - || lookupKey(name, cm.options.keyMap, handle, cm) -} - -// Note that, despite the name, this function is also used to check -// for bound mouse clicks. - -var stopSeq = new Delayed; -function dispatchKey(cm, name, e, handle) { - var seq = cm.state.keySeq; - if (seq) { - if (isModifierKey(name)) { return "handled" } - stopSeq.set(50, function () { - if (cm.state.keySeq == seq) { - cm.state.keySeq = null; - cm.display.input.reset(); - } - }); - name = seq + " " + name; - } - var result = lookupKeyForEditor(cm, name, handle); - - if (result == "multi") - { cm.state.keySeq = name; } - if (result == "handled") - { signalLater(cm, "keyHandled", cm, name, e); } - - if (result == "handled" || result == "multi") { - e_preventDefault(e); - restartBlink(cm); - } - - if (seq && !result && /\'$/.test(name)) { - e_preventDefault(e); - return true - } - return !!result -} - -// Handle a key from the keydown event. -function handleKeyBinding(cm, e) { - var name = keyName(e, true); - if (!name) { return false } - - if (e.shiftKey && !cm.state.keySeq) { - // First try to resolve full name (including 'Shift-'). Failing - // that, see if there is a cursor-motion command (starting with - // 'go') bound to the keyname without 'Shift-'. - return dispatchKey(cm, "Shift-" + name, e, function (b) { return doHandleBinding(cm, b, true); }) - || dispatchKey(cm, name, e, function (b) { - if (typeof b == "string" ? /^go[A-Z]/.test(b) : b.motion) - { return doHandleBinding(cm, b) } - }) - } else { - return dispatchKey(cm, name, e, function (b) { return doHandleBinding(cm, b); }) - } -} - -// Handle a key from the keypress event -function handleCharBinding(cm, e, ch) { - return dispatchKey(cm, "'" + ch + "'", e, function (b) { return doHandleBinding(cm, b, true); }) -} - -var lastStoppedKey = null; -function onKeyDown(e) { - var cm = this; - cm.curOp.focus = activeElt(); - if (signalDOMEvent(cm, e)) { return } - // IE does strange things with escape. - if (ie && ie_version < 11 && e.keyCode == 27) { e.returnValue = false; } - var code = e.keyCode; - cm.display.shift = code == 16 || e.shiftKey; - var handled = handleKeyBinding(cm, e); - if (presto) { - lastStoppedKey = handled ? code : null; - // Opera has no cut event... we try to at least catch the key combo - if (!handled && code == 88 && !hasCopyEvent && (mac ? e.metaKey : e.ctrlKey)) - { cm.replaceSelection("", null, "cut"); } - } - - // Turn mouse into crosshair when Alt is held on Mac. - if (code == 18 && !/\bCodeMirror-crosshair\b/.test(cm.display.lineDiv.className)) - { showCrossHair(cm); } -} - -function showCrossHair(cm) { - var lineDiv = cm.display.lineDiv; - addClass(lineDiv, "CodeMirror-crosshair"); - - function up(e) { - if (e.keyCode == 18 || !e.altKey) { - rmClass(lineDiv, "CodeMirror-crosshair"); - off(document, "keyup", up); - off(document, "mouseover", up); - } - } - on(document, "keyup", up); - on(document, "mouseover", up); -} - -function onKeyUp(e) { - if (e.keyCode == 16) { this.doc.sel.shift = false; } - signalDOMEvent(this, e); -} - -function onKeyPress(e) { - var cm = this; - if (eventInWidget(cm.display, e) || signalDOMEvent(cm, e) || e.ctrlKey && !e.altKey || mac && e.metaKey) { return } - var keyCode = e.keyCode, charCode = e.charCode; - if (presto && keyCode == lastStoppedKey) {lastStoppedKey = null; e_preventDefault(e); return} - if ((presto && (!e.which || e.which < 10)) && handleKeyBinding(cm, e)) { return } - var ch = String.fromCharCode(charCode == null ? keyCode : charCode); - // Some browsers fire keypress events for backspace - if (ch == "\x08") { return } - if (handleCharBinding(cm, e, ch)) { return } - cm.display.input.onKeyPress(e); -} - -var DOUBLECLICK_DELAY = 400; - -var PastClick = function(time, pos, button) { - this.time = time; - this.pos = pos; - this.button = button; -}; - -PastClick.prototype.compare = function (time, pos, button) { - return this.time + DOUBLECLICK_DELAY > time && - cmp(pos, this.pos) == 0 && button == this.button -}; - -var lastClick; -var lastDoubleClick; -function clickRepeat(pos, button) { - var now = +new Date; - if (lastDoubleClick && lastDoubleClick.compare(now, pos, button)) { - lastClick = lastDoubleClick = null; - return "triple" - } else if (lastClick && lastClick.compare(now, pos, button)) { - lastDoubleClick = new PastClick(now, pos, button); - lastClick = null; - return "double" - } else { - lastClick = new PastClick(now, pos, button); - lastDoubleClick = null; - return "single" - } -} - -// A mouse down can be a single click, double click, triple click, -// start of selection drag, start of text drag, new cursor -// (ctrl-click), rectangle drag (alt-drag), or xwin -// middle-click-paste. Or it might be a click on something we should -// not interfere with, such as a scrollbar or widget. -function onMouseDown(e) { - var cm = this, display = cm.display; - if (signalDOMEvent(cm, e) || display.activeTouch && display.input.supportsTouch()) { return } - display.input.ensurePolled(); - display.shift = e.shiftKey; - - if (eventInWidget(display, e)) { - if (!webkit) { - // Briefly turn off draggability, to allow widgets to do - // normal dragging things. - display.scroller.draggable = false; - setTimeout(function () { return display.scroller.draggable = true; }, 100); - } - return - } - if (clickInGutter(cm, e)) { return } - var pos = posFromMouse(cm, e), button = e_button(e), repeat = pos ? clickRepeat(pos, button) : "single"; - window.focus(); - - // #3261: make sure, that we're not starting a second selection - if (button == 1 && cm.state.selectingText) - { cm.state.selectingText(e); } - - if (pos && handleMappedButton(cm, button, pos, repeat, e)) { return } - - if (button == 1) { - if (pos) { leftButtonDown(cm, pos, repeat, e); } - else if (e_target(e) == display.scroller) { e_preventDefault(e); } - } else if (button == 2) { - if (pos) { extendSelection(cm.doc, pos); } - setTimeout(function () { return display.input.focus(); }, 20); - } else if (button == 3) { - if (captureRightClick) { onContextMenu(cm, e); } - else { delayBlurEvent(cm); } - } -} - -function handleMappedButton(cm, button, pos, repeat, event) { - var name = "Click"; - if (repeat == "double") { name = "Double" + name; } - else if (repeat == "triple") { name = "Triple" + name; } - name = (button == 1 ? "Left" : button == 2 ? "Middle" : "Right") + name; - - return dispatchKey(cm, addModifierNames(name, event), event, function (bound) { - if (typeof bound == "string") { bound = commands[bound]; } - if (!bound) { return false } - var done = false; - try { - if (cm.isReadOnly()) { cm.state.suppressEdits = true; } - done = bound(cm, pos) != Pass; - } finally { - cm.state.suppressEdits = false; - } - return done - }) -} - -function configureMouse(cm, repeat, event) { - var option = cm.getOption("configureMouse"); - var value = option ? option(cm, repeat, event) : {}; - if (value.unit == null) { - var rect = chromeOS ? event.shiftKey && event.metaKey : event.altKey; - value.unit = rect ? "rectangle" : repeat == "single" ? "char" : repeat == "double" ? "word" : "line"; - } - if (value.extend == null || cm.doc.extend) { value.extend = cm.doc.extend || event.shiftKey; } - if (value.addNew == null) { value.addNew = mac ? event.metaKey : event.ctrlKey; } - if (value.moveOnDrag == null) { value.moveOnDrag = !(mac ? event.altKey : event.ctrlKey); } - return value -} - -function leftButtonDown(cm, pos, repeat, event) { - if (ie) { setTimeout(bind(ensureFocus, cm), 0); } - else { cm.curOp.focus = activeElt(); } - - var behavior = configureMouse(cm, repeat, event); - - var sel = cm.doc.sel, contained; - if (cm.options.dragDrop && dragAndDrop && !cm.isReadOnly() && - repeat == "single" && (contained = sel.contains(pos)) > -1 && - (cmp((contained = sel.ranges[contained]).from(), pos) < 0 || pos.xRel > 0) && - (cmp(contained.to(), pos) > 0 || pos.xRel < 0)) - { leftButtonStartDrag(cm, event, pos, behavior); } - else - { leftButtonSelect(cm, event, pos, behavior); } -} - -// Start a text drag. When it ends, see if any dragging actually -// happen, and treat as a click if it didn't. -function leftButtonStartDrag(cm, event, pos, behavior) { - var display = cm.display, moved = false; - var dragEnd = operation(cm, function (e) { - if (webkit) { display.scroller.draggable = false; } - cm.state.draggingText = false; - off(document, "mouseup", dragEnd); - off(document, "mousemove", mouseMove); - off(display.scroller, "dragstart", dragStart); - off(display.scroller, "drop", dragEnd); - if (!moved) { - e_preventDefault(e); - if (!behavior.addNew) - { extendSelection(cm.doc, pos, null, null, behavior.extend); } - // Work around unexplainable focus problem in IE9 (#2127) and Chrome (#3081) - if (webkit || ie && ie_version == 9) - { setTimeout(function () {document.body.focus(); display.input.focus();}, 20); } - else - { display.input.focus(); } - } - }); - var mouseMove = function(e2) { - moved = moved || Math.abs(event.clientX - e2.clientX) + Math.abs(event.clientY - e2.clientY) >= 10; - }; - var dragStart = function () { return moved = true; }; - // Let the drag handler handle this. - if (webkit) { display.scroller.draggable = true; } - cm.state.draggingText = dragEnd; - dragEnd.copy = !behavior.moveOnDrag; - // IE's approach to draggable - if (display.scroller.dragDrop) { display.scroller.dragDrop(); } - on(document, "mouseup", dragEnd); - on(document, "mousemove", mouseMove); - on(display.scroller, "dragstart", dragStart); - on(display.scroller, "drop", dragEnd); - - delayBlurEvent(cm); - setTimeout(function () { return display.input.focus(); }, 20); -} - -function rangeForUnit(cm, pos, unit) { - if (unit == "char") { return new Range(pos, pos) } - if (unit == "word") { return cm.findWordAt(pos) } - if (unit == "line") { return new Range(Pos(pos.line, 0), clipPos(cm.doc, Pos(pos.line + 1, 0))) } - var result = unit(cm, pos); - return new Range(result.from, result.to) -} - -// Normal selection, as opposed to text dragging. -function leftButtonSelect(cm, event, start, behavior) { - var display = cm.display, doc = cm.doc; - e_preventDefault(event); - - var ourRange, ourIndex, startSel = doc.sel, ranges = startSel.ranges; - if (behavior.addNew && !behavior.extend) { - ourIndex = doc.sel.contains(start); - if (ourIndex > -1) - { ourRange = ranges[ourIndex]; } - else - { ourRange = new Range(start, start); } - } else { - ourRange = doc.sel.primary(); - ourIndex = doc.sel.primIndex; - } - - if (behavior.unit == "rectangle") { - if (!behavior.addNew) { ourRange = new Range(start, start); } - start = posFromMouse(cm, event, true, true); - ourIndex = -1; - } else { - var range$$1 = rangeForUnit(cm, start, behavior.unit); - if (behavior.extend) - { ourRange = extendRange(ourRange, range$$1.anchor, range$$1.head, behavior.extend); } - else - { ourRange = range$$1; } - } - - if (!behavior.addNew) { - ourIndex = 0; - setSelection(doc, new Selection([ourRange], 0), sel_mouse); - startSel = doc.sel; - } else if (ourIndex == -1) { - ourIndex = ranges.length; - setSelection(doc, normalizeSelection(ranges.concat([ourRange]), ourIndex), - {scroll: false, origin: "*mouse"}); - } else if (ranges.length > 1 && ranges[ourIndex].empty() && behavior.unit == "char" && !behavior.extend) { - setSelection(doc, normalizeSelection(ranges.slice(0, ourIndex).concat(ranges.slice(ourIndex + 1)), 0), - {scroll: false, origin: "*mouse"}); - startSel = doc.sel; - } else { - replaceOneSelection(doc, ourIndex, ourRange, sel_mouse); - } - - var lastPos = start; - function extendTo(pos) { - if (cmp(lastPos, pos) == 0) { return } - lastPos = pos; - - if (behavior.unit == "rectangle") { - var ranges = [], tabSize = cm.options.tabSize; - var startCol = countColumn(getLine(doc, start.line).text, start.ch, tabSize); - var posCol = countColumn(getLine(doc, pos.line).text, pos.ch, tabSize); - var left = Math.min(startCol, posCol), right = Math.max(startCol, posCol); - for (var line = Math.min(start.line, pos.line), end = Math.min(cm.lastLine(), Math.max(start.line, pos.line)); - line <= end; line++) { - var text = getLine(doc, line).text, leftPos = findColumn(text, left, tabSize); - if (left == right) - { ranges.push(new Range(Pos(line, leftPos), Pos(line, leftPos))); } - else if (text.length > leftPos) - { ranges.push(new Range(Pos(line, leftPos), Pos(line, findColumn(text, right, tabSize)))); } - } - if (!ranges.length) { ranges.push(new Range(start, start)); } - setSelection(doc, normalizeSelection(startSel.ranges.slice(0, ourIndex).concat(ranges), ourIndex), - {origin: "*mouse", scroll: false}); - cm.scrollIntoView(pos); - } else { - var oldRange = ourRange; - var range$$1 = rangeForUnit(cm, pos, behavior.unit); - var anchor = oldRange.anchor, head; - if (cmp(range$$1.anchor, anchor) > 0) { - head = range$$1.head; - anchor = minPos(oldRange.from(), range$$1.anchor); - } else { - head = range$$1.anchor; - anchor = maxPos(oldRange.to(), range$$1.head); - } - var ranges$1 = startSel.ranges.slice(0); - ranges$1[ourIndex] = bidiSimplify(cm, new Range(clipPos(doc, anchor), head)); - setSelection(doc, normalizeSelection(ranges$1, ourIndex), sel_mouse); - } - } - - var editorSize = display.wrapper.getBoundingClientRect(); - // Used to ensure timeout re-tries don't fire when another extend - // happened in the meantime (clearTimeout isn't reliable -- at - // least on Chrome, the timeouts still happen even when cleared, - // if the clear happens after their scheduled firing time). - var counter = 0; - - function extend(e) { - var curCount = ++counter; - var cur = posFromMouse(cm, e, true, behavior.unit == "rectangle"); - if (!cur) { return } - if (cmp(cur, lastPos) != 0) { - cm.curOp.focus = activeElt(); - extendTo(cur); - var visible = visibleLines(display, doc); - if (cur.line >= visible.to || cur.line < visible.from) - { setTimeout(operation(cm, function () {if (counter == curCount) { extend(e); }}), 150); } - } else { - var outside = e.clientY < editorSize.top ? -20 : e.clientY > editorSize.bottom ? 20 : 0; - if (outside) { setTimeout(operation(cm, function () { - if (counter != curCount) { return } - display.scroller.scrollTop += outside; - extend(e); - }), 50); } - } - } - - function done(e) { - cm.state.selectingText = false; - counter = Infinity; - e_preventDefault(e); - display.input.focus(); - off(document, "mousemove", move); - off(document, "mouseup", up); - doc.history.lastSelOrigin = null; - } - - var move = operation(cm, function (e) { - if (!e_button(e)) { done(e); } - else { extend(e); } - }); - var up = operation(cm, done); - cm.state.selectingText = up; - on(document, "mousemove", move); - on(document, "mouseup", up); -} - -// Used when mouse-selecting to adjust the anchor to the proper side -// of a bidi jump depending on the visual position of the head. -function bidiSimplify(cm, range$$1) { - var anchor = range$$1.anchor; - var head = range$$1.head; - var anchorLine = getLine(cm.doc, anchor.line); - if (cmp(anchor, head) == 0 && anchor.sticky == head.sticky) { return range$$1 } - var order = getOrder(anchorLine); - if (!order) { return range$$1 } - var index = getBidiPartAt(order, anchor.ch, anchor.sticky), part = order[index]; - if (part.from != anchor.ch && part.to != anchor.ch) { return range$$1 } - var boundary = index + ((part.from == anchor.ch) == (part.level != 1) ? 0 : 1); - if (boundary == 0 || boundary == order.length) { return range$$1 } - - // Compute the relative visual position of the head compared to the - // anchor (<0 is to the left, >0 to the right) - var leftSide; - if (head.line != anchor.line) { - leftSide = (head.line - anchor.line) * (cm.doc.direction == "ltr" ? 1 : -1) > 0; - } else { - var headIndex = getBidiPartAt(order, head.ch, head.sticky); - var dir = headIndex - index || (head.ch - anchor.ch) * (part.level == 1 ? -1 : 1); - if (headIndex == boundary - 1 || headIndex == boundary) - { leftSide = dir < 0; } - else - { leftSide = dir > 0; } - } - - var usePart = order[boundary + (leftSide ? -1 : 0)]; - var from = leftSide == (usePart.level == 1); - var ch = from ? usePart.from : usePart.to, sticky = from ? "after" : "before"; - return anchor.ch == ch && anchor.sticky == sticky ? range$$1 : new Range(new Pos(anchor.line, ch, sticky), head) -} - - -// Determines whether an event happened in the gutter, and fires the -// handlers for the corresponding event. -function gutterEvent(cm, e, type, prevent) { - var mX, mY; - if (e.touches) { - mX = e.touches[0].clientX; - mY = e.touches[0].clientY; - } else { - try { mX = e.clientX; mY = e.clientY; } - catch(e) { return false } - } - if (mX >= Math.floor(cm.display.gutters.getBoundingClientRect().right)) { return false } - if (prevent) { e_preventDefault(e); } - - var display = cm.display; - var lineBox = display.lineDiv.getBoundingClientRect(); - - if (mY > lineBox.bottom || !hasHandler(cm, type)) { return e_defaultPrevented(e) } - mY -= lineBox.top - display.viewOffset; - - for (var i = 0; i < cm.options.gutters.length; ++i) { - var g = display.gutters.childNodes[i]; - if (g && g.getBoundingClientRect().right >= mX) { - var line = lineAtHeight(cm.doc, mY); - var gutter = cm.options.gutters[i]; - signal(cm, type, cm, line, gutter, e); - return e_defaultPrevented(e) - } - } -} - -function clickInGutter(cm, e) { - return gutterEvent(cm, e, "gutterClick", true) -} - -// CONTEXT MENU HANDLING - -// To make the context menu work, we need to briefly unhide the -// textarea (making it as unobtrusive as possible) to let the -// right-click take effect on it. -function onContextMenu(cm, e) { - if (eventInWidget(cm.display, e) || contextMenuInGutter(cm, e)) { return } - if (signalDOMEvent(cm, e, "contextmenu")) { return } - cm.display.input.onContextMenu(e); -} - -function contextMenuInGutter(cm, e) { - if (!hasHandler(cm, "gutterContextMenu")) { return false } - return gutterEvent(cm, e, "gutterContextMenu", false) -} - -function themeChanged(cm) { - cm.display.wrapper.className = cm.display.wrapper.className.replace(/\s*cm-s-\S+/g, "") + - cm.options.theme.replace(/(^|\s)\s*/g, " cm-s-"); - clearCaches(cm); -} - -var Init = {toString: function(){return "CodeMirror.Init"}}; - -var defaults = {}; -var optionHandlers = {}; - -function defineOptions(CodeMirror) { - var optionHandlers = CodeMirror.optionHandlers; - - function option(name, deflt, handle, notOnInit) { - CodeMirror.defaults[name] = deflt; - if (handle) { optionHandlers[name] = - notOnInit ? function (cm, val, old) {if (old != Init) { handle(cm, val, old); }} : handle; } - } - - CodeMirror.defineOption = option; - - // Passed to option handlers when there is no old value. - CodeMirror.Init = Init; - - // These two are, on init, called from the constructor because they - // have to be initialized before the editor can start at all. - option("value", "", function (cm, val) { return cm.setValue(val); }, true); - option("mode", null, function (cm, val) { - cm.doc.modeOption = val; - loadMode(cm); - }, true); - - option("indentUnit", 2, loadMode, true); - option("indentWithTabs", false); - option("smartIndent", true); - option("tabSize", 4, function (cm) { - resetModeState(cm); - clearCaches(cm); - regChange(cm); - }, true); - option("lineSeparator", null, function (cm, val) { - cm.doc.lineSep = val; - if (!val) { return } - var newBreaks = [], lineNo = cm.doc.first; - cm.doc.iter(function (line) { - for (var pos = 0;;) { - var found = line.text.indexOf(val, pos); - if (found == -1) { break } - pos = found + val.length; - newBreaks.push(Pos(lineNo, found)); - } - lineNo++; - }); - for (var i = newBreaks.length - 1; i >= 0; i--) - { replaceRange(cm.doc, val, newBreaks[i], Pos(newBreaks[i].line, newBreaks[i].ch + val.length)); } - }); - option("specialChars", /[\u0000-\u001f\u007f-\u009f\u00ad\u061c\u200b-\u200f\u2028\u2029\ufeff]/g, function (cm, val, old) { - cm.state.specialChars = new RegExp(val.source + (val.test("\t") ? "" : "|\t"), "g"); - if (old != Init) { cm.refresh(); } - }); - option("specialCharPlaceholder", defaultSpecialCharPlaceholder, function (cm) { return cm.refresh(); }, true); - option("electricChars", true); - option("inputStyle", mobile ? "contenteditable" : "textarea", function () { - throw new Error("inputStyle can not (yet) be changed in a running editor") // FIXME - }, true); - option("spellcheck", false, function (cm, val) { return cm.getInputField().spellcheck = val; }, true); - option("rtlMoveVisually", !windows); - option("wholeLineUpdateBefore", true); - - option("theme", "default", function (cm) { - themeChanged(cm); - guttersChanged(cm); - }, true); - option("keyMap", "default", function (cm, val, old) { - var next = getKeyMap(val); - var prev = old != Init && getKeyMap(old); - if (prev && prev.detach) { prev.detach(cm, next); } - if (next.attach) { next.attach(cm, prev || null); } - }); - option("extraKeys", null); - option("configureMouse", null); - - option("lineWrapping", false, wrappingChanged, true); - option("gutters", [], function (cm) { - setGuttersForLineNumbers(cm.options); - guttersChanged(cm); - }, true); - option("fixedGutter", true, function (cm, val) { - cm.display.gutters.style.left = val ? compensateForHScroll(cm.display) + "px" : "0"; - cm.refresh(); - }, true); - option("coverGutterNextToScrollbar", false, function (cm) { return updateScrollbars(cm); }, true); - option("scrollbarStyle", "native", function (cm) { - initScrollbars(cm); - updateScrollbars(cm); - cm.display.scrollbars.setScrollTop(cm.doc.scrollTop); - cm.display.scrollbars.setScrollLeft(cm.doc.scrollLeft); - }, true); - option("lineNumbers", false, function (cm) { - setGuttersForLineNumbers(cm.options); - guttersChanged(cm); - }, true); - option("firstLineNumber", 1, guttersChanged, true); - option("lineNumberFormatter", function (integer) { return integer; }, guttersChanged, true); - option("showCursorWhenSelecting", false, updateSelection, true); - - option("resetSelectionOnContextMenu", true); - option("lineWiseCopyCut", true); - option("pasteLinesPerSelection", true); - - option("readOnly", false, function (cm, val) { - if (val == "nocursor") { - onBlur(cm); - cm.display.input.blur(); - } - cm.display.input.readOnlyChanged(val); - }); - option("disableInput", false, function (cm, val) {if (!val) { cm.display.input.reset(); }}, true); - option("dragDrop", true, dragDropChanged); - option("allowDropFileTypes", null); - - option("cursorBlinkRate", 530); - option("cursorScrollMargin", 0); - option("cursorHeight", 1, updateSelection, true); - option("singleCursorHeightPerLine", true, updateSelection, true); - option("workTime", 100); - option("workDelay", 100); - option("flattenSpans", true, resetModeState, true); - option("addModeClass", false, resetModeState, true); - option("pollInterval", 100); - option("undoDepth", 200, function (cm, val) { return cm.doc.history.undoDepth = val; }); - option("historyEventDelay", 1250); - option("viewportMargin", 10, function (cm) { return cm.refresh(); }, true); - option("maxHighlightLength", 10000, resetModeState, true); - option("moveInputWithCursor", true, function (cm, val) { - if (!val) { cm.display.input.resetPosition(); } - }); - - option("tabindex", null, function (cm, val) { return cm.display.input.getField().tabIndex = val || ""; }); - option("autofocus", null); - option("direction", "ltr", function (cm, val) { return cm.doc.setDirection(val); }, true); -} - -function guttersChanged(cm) { - updateGutters(cm); - regChange(cm); - alignHorizontally(cm); -} - -function dragDropChanged(cm, value, old) { - var wasOn = old && old != Init; - if (!value != !wasOn) { - var funcs = cm.display.dragFunctions; - var toggle = value ? on : off; - toggle(cm.display.scroller, "dragstart", funcs.start); - toggle(cm.display.scroller, "dragenter", funcs.enter); - toggle(cm.display.scroller, "dragover", funcs.over); - toggle(cm.display.scroller, "dragleave", funcs.leave); - toggle(cm.display.scroller, "drop", funcs.drop); - } -} - -function wrappingChanged(cm) { - if (cm.options.lineWrapping) { - addClass(cm.display.wrapper, "CodeMirror-wrap"); - cm.display.sizer.style.minWidth = ""; - cm.display.sizerWidth = null; - } else { - rmClass(cm.display.wrapper, "CodeMirror-wrap"); - findMaxLine(cm); - } - estimateLineHeights(cm); - regChange(cm); - clearCaches(cm); - setTimeout(function () { return updateScrollbars(cm); }, 100); -} - -// A CodeMirror instance represents an editor. This is the object -// that user code is usually dealing with. - -function CodeMirror$1(place, options) { - var this$1 = this; - - if (!(this instanceof CodeMirror$1)) { return new CodeMirror$1(place, options) } - - this.options = options = options ? copyObj(options) : {}; - // Determine effective options based on given values and defaults. - copyObj(defaults, options, false); - setGuttersForLineNumbers(options); - - var doc = options.value; - if (typeof doc == "string") { doc = new Doc(doc, options.mode, null, options.lineSeparator, options.direction); } - this.doc = doc; - - var input = new CodeMirror$1.inputStyles[options.inputStyle](this); - var display = this.display = new Display(place, doc, input); - display.wrapper.CodeMirror = this; - updateGutters(this); - themeChanged(this); - if (options.lineWrapping) - { this.display.wrapper.className += " CodeMirror-wrap"; } - initScrollbars(this); - - this.state = { - keyMaps: [], // stores maps added by addKeyMap - overlays: [], // highlighting overlays, as added by addOverlay - modeGen: 0, // bumped when mode/overlay changes, used to invalidate highlighting info - overwrite: false, - delayingBlurEvent: false, - focused: false, - suppressEdits: false, // used to disable editing during key handlers when in readOnly mode - pasteIncoming: false, cutIncoming: false, // help recognize paste/cut edits in input.poll - selectingText: false, - draggingText: false, - highlight: new Delayed(), // stores highlight worker timeout - keySeq: null, // Unfinished key sequence - specialChars: null - }; - - if (options.autofocus && !mobile) { display.input.focus(); } - - // Override magic textarea content restore that IE sometimes does - // on our hidden textarea on reload - if (ie && ie_version < 11) { setTimeout(function () { return this$1.display.input.reset(true); }, 20); } - - registerEventHandlers(this); - ensureGlobalHandlers(); - - startOperation(this); - this.curOp.forceUpdate = true; - attachDoc(this, doc); - - if ((options.autofocus && !mobile) || this.hasFocus()) - { setTimeout(bind(onFocus, this), 20); } - else - { onBlur(this); } - - for (var opt in optionHandlers) { if (optionHandlers.hasOwnProperty(opt)) - { optionHandlers[opt](this$1, options[opt], Init); } } - maybeUpdateLineNumberWidth(this); - if (options.finishInit) { options.finishInit(this); } - for (var i = 0; i < initHooks.length; ++i) { initHooks[i](this$1); } - endOperation(this); - // Suppress optimizelegibility in Webkit, since it breaks text - // measuring on line wrapping boundaries. - if (webkit && options.lineWrapping && - getComputedStyle(display.lineDiv).textRendering == "optimizelegibility") - { display.lineDiv.style.textRendering = "auto"; } -} - -// The default configuration options. -CodeMirror$1.defaults = defaults; -// Functions to run when options are changed. -CodeMirror$1.optionHandlers = optionHandlers; - -// Attach the necessary event handlers when initializing the editor -function registerEventHandlers(cm) { - var d = cm.display; - on(d.scroller, "mousedown", operation(cm, onMouseDown)); - // Older IE's will not fire a second mousedown for a double click - if (ie && ie_version < 11) - { on(d.scroller, "dblclick", operation(cm, function (e) { - if (signalDOMEvent(cm, e)) { return } - var pos = posFromMouse(cm, e); - if (!pos || clickInGutter(cm, e) || eventInWidget(cm.display, e)) { return } - e_preventDefault(e); - var word = cm.findWordAt(pos); - extendSelection(cm.doc, word.anchor, word.head); - })); } - else - { on(d.scroller, "dblclick", function (e) { return signalDOMEvent(cm, e) || e_preventDefault(e); }); } - // Some browsers fire contextmenu *after* opening the menu, at - // which point we can't mess with it anymore. Context menu is - // handled in onMouseDown for these browsers. - if (!captureRightClick) { on(d.scroller, "contextmenu", function (e) { return onContextMenu(cm, e); }); } - - // Used to suppress mouse event handling when a touch happens - var touchFinished, prevTouch = {end: 0}; - function finishTouch() { - if (d.activeTouch) { - touchFinished = setTimeout(function () { return d.activeTouch = null; }, 1000); - prevTouch = d.activeTouch; - prevTouch.end = +new Date; - } - } - function isMouseLikeTouchEvent(e) { - if (e.touches.length != 1) { return false } - var touch = e.touches[0]; - return touch.radiusX <= 1 && touch.radiusY <= 1 - } - function farAway(touch, other) { - if (other.left == null) { return true } - var dx = other.left - touch.left, dy = other.top - touch.top; - return dx * dx + dy * dy > 20 * 20 - } - on(d.scroller, "touchstart", function (e) { - if (!signalDOMEvent(cm, e) && !isMouseLikeTouchEvent(e) && !clickInGutter(cm, e)) { - d.input.ensurePolled(); - clearTimeout(touchFinished); - var now = +new Date; - d.activeTouch = {start: now, moved: false, - prev: now - prevTouch.end <= 300 ? prevTouch : null}; - if (e.touches.length == 1) { - d.activeTouch.left = e.touches[0].pageX; - d.activeTouch.top = e.touches[0].pageY; - } - } - }); - on(d.scroller, "touchmove", function () { - if (d.activeTouch) { d.activeTouch.moved = true; } - }); - on(d.scroller, "touchend", function (e) { - var touch = d.activeTouch; - if (touch && !eventInWidget(d, e) && touch.left != null && - !touch.moved && new Date - touch.start < 300) { - var pos = cm.coordsChar(d.activeTouch, "page"), range; - if (!touch.prev || farAway(touch, touch.prev)) // Single tap - { range = new Range(pos, pos); } - else if (!touch.prev.prev || farAway(touch, touch.prev.prev)) // Double tap - { range = cm.findWordAt(pos); } - else // Triple tap - { range = new Range(Pos(pos.line, 0), clipPos(cm.doc, Pos(pos.line + 1, 0))); } - cm.setSelection(range.anchor, range.head); - cm.focus(); - e_preventDefault(e); - } - finishTouch(); - }); - on(d.scroller, "touchcancel", finishTouch); - - // Sync scrolling between fake scrollbars and real scrollable - // area, ensure viewport is updated when scrolling. - on(d.scroller, "scroll", function () { - if (d.scroller.clientHeight) { - updateScrollTop(cm, d.scroller.scrollTop); - setScrollLeft(cm, d.scroller.scrollLeft, true); - signal(cm, "scroll", cm); - } - }); - - // Listen to wheel events in order to try and update the viewport on time. - on(d.scroller, "mousewheel", function (e) { return onScrollWheel(cm, e); }); - on(d.scroller, "DOMMouseScroll", function (e) { return onScrollWheel(cm, e); }); - - // Prevent wrapper from ever scrolling - on(d.wrapper, "scroll", function () { return d.wrapper.scrollTop = d.wrapper.scrollLeft = 0; }); - - d.dragFunctions = { - enter: function (e) {if (!signalDOMEvent(cm, e)) { e_stop(e); }}, - over: function (e) {if (!signalDOMEvent(cm, e)) { onDragOver(cm, e); e_stop(e); }}, - start: function (e) { return onDragStart(cm, e); }, - drop: operation(cm, onDrop), - leave: function (e) {if (!signalDOMEvent(cm, e)) { clearDragCursor(cm); }} - }; - - var inp = d.input.getField(); - on(inp, "keyup", function (e) { return onKeyUp.call(cm, e); }); - on(inp, "keydown", operation(cm, onKeyDown)); - on(inp, "keypress", operation(cm, onKeyPress)); - on(inp, "focus", function (e) { return onFocus(cm, e); }); - on(inp, "blur", function (e) { return onBlur(cm, e); }); -} - -var initHooks = []; -CodeMirror$1.defineInitHook = function (f) { return initHooks.push(f); }; - -// Indent the given line. The how parameter can be "smart", -// "add"/null, "subtract", or "prev". When aggressive is false -// (typically set to true for forced single-line indents), empty -// lines are not indented, and places where the mode returns Pass -// are left alone. -function indentLine(cm, n, how, aggressive) { - var doc = cm.doc, state; - if (how == null) { how = "add"; } - if (how == "smart") { - // Fall back to "prev" when the mode doesn't have an indentation - // method. - if (!doc.mode.indent) { how = "prev"; } - else { state = getContextBefore(cm, n).state; } - } - - var tabSize = cm.options.tabSize; - var line = getLine(doc, n), curSpace = countColumn(line.text, null, tabSize); - if (line.stateAfter) { line.stateAfter = null; } - var curSpaceString = line.text.match(/^\s*/)[0], indentation; - if (!aggressive && !/\S/.test(line.text)) { - indentation = 0; - how = "not"; - } else if (how == "smart") { - indentation = doc.mode.indent(state, line.text.slice(curSpaceString.length), line.text); - if (indentation == Pass || indentation > 150) { - if (!aggressive) { return } - how = "prev"; - } - } - if (how == "prev") { - if (n > doc.first) { indentation = countColumn(getLine(doc, n-1).text, null, tabSize); } - else { indentation = 0; } - } else if (how == "add") { - indentation = curSpace + cm.options.indentUnit; - } else if (how == "subtract") { - indentation = curSpace - cm.options.indentUnit; - } else if (typeof how == "number") { - indentation = curSpace + how; - } - indentation = Math.max(0, indentation); - - var indentString = "", pos = 0; - if (cm.options.indentWithTabs) - { for (var i = Math.floor(indentation / tabSize); i; --i) {pos += tabSize; indentString += "\t";} } - if (pos < indentation) { indentString += spaceStr(indentation - pos); } - - if (indentString != curSpaceString) { - replaceRange(doc, indentString, Pos(n, 0), Pos(n, curSpaceString.length), "+input"); - line.stateAfter = null; - return true - } else { - // Ensure that, if the cursor was in the whitespace at the start - // of the line, it is moved to the end of that space. - for (var i$1 = 0; i$1 < doc.sel.ranges.length; i$1++) { - var range = doc.sel.ranges[i$1]; - if (range.head.line == n && range.head.ch < curSpaceString.length) { - var pos$1 = Pos(n, curSpaceString.length); - replaceOneSelection(doc, i$1, new Range(pos$1, pos$1)); - break - } - } - } -} - -// This will be set to a {lineWise: bool, text: [string]} object, so -// that, when pasting, we know what kind of selections the copied -// text was made out of. -var lastCopied = null; - -function setLastCopied(newLastCopied) { - lastCopied = newLastCopied; -} - -function applyTextInput(cm, inserted, deleted, sel, origin) { - var doc = cm.doc; - cm.display.shift = false; - if (!sel) { sel = doc.sel; } - - var paste = cm.state.pasteIncoming || origin == "paste"; - var textLines = splitLinesAuto(inserted), multiPaste = null; - // When pasing N lines into N selections, insert one line per selection - if (paste && sel.ranges.length > 1) { - if (lastCopied && lastCopied.text.join("\n") == inserted) { - if (sel.ranges.length % lastCopied.text.length == 0) { - multiPaste = []; - for (var i = 0; i < lastCopied.text.length; i++) - { multiPaste.push(doc.splitLines(lastCopied.text[i])); } - } - } else if (textLines.length == sel.ranges.length && cm.options.pasteLinesPerSelection) { - multiPaste = map(textLines, function (l) { return [l]; }); - } - } - - var updateInput; - // Normal behavior is to insert the new text into every selection - for (var i$1 = sel.ranges.length - 1; i$1 >= 0; i$1--) { - var range$$1 = sel.ranges[i$1]; - var from = range$$1.from(), to = range$$1.to(); - if (range$$1.empty()) { - if (deleted && deleted > 0) // Handle deletion - { from = Pos(from.line, from.ch - deleted); } - else if (cm.state.overwrite && !paste) // Handle overwrite - { to = Pos(to.line, Math.min(getLine(doc, to.line).text.length, to.ch + lst(textLines).length)); } - else if (lastCopied && lastCopied.lineWise && lastCopied.text.join("\n") == inserted) - { from = to = Pos(from.line, 0); } - } - updateInput = cm.curOp.updateInput; - var changeEvent = {from: from, to: to, text: multiPaste ? multiPaste[i$1 % multiPaste.length] : textLines, - origin: origin || (paste ? "paste" : cm.state.cutIncoming ? "cut" : "+input")}; - makeChange(cm.doc, changeEvent); - signalLater(cm, "inputRead", cm, changeEvent); - } - if (inserted && !paste) - { triggerElectric(cm, inserted); } - - ensureCursorVisible(cm); - cm.curOp.updateInput = updateInput; - cm.curOp.typing = true; - cm.state.pasteIncoming = cm.state.cutIncoming = false; -} - -function handlePaste(e, cm) { - var pasted = e.clipboardData && e.clipboardData.getData("Text"); - if (pasted) { - e.preventDefault(); - if (!cm.isReadOnly() && !cm.options.disableInput) - { runInOp(cm, function () { return applyTextInput(cm, pasted, 0, null, "paste"); }); } - return true - } -} - -function triggerElectric(cm, inserted) { - // When an 'electric' character is inserted, immediately trigger a reindent - if (!cm.options.electricChars || !cm.options.smartIndent) { return } - var sel = cm.doc.sel; - - for (var i = sel.ranges.length - 1; i >= 0; i--) { - var range$$1 = sel.ranges[i]; - if (range$$1.head.ch > 100 || (i && sel.ranges[i - 1].head.line == range$$1.head.line)) { continue } - var mode = cm.getModeAt(range$$1.head); - var indented = false; - if (mode.electricChars) { - for (var j = 0; j < mode.electricChars.length; j++) - { if (inserted.indexOf(mode.electricChars.charAt(j)) > -1) { - indented = indentLine(cm, range$$1.head.line, "smart"); - break - } } - } else if (mode.electricInput) { - if (mode.electricInput.test(getLine(cm.doc, range$$1.head.line).text.slice(0, range$$1.head.ch))) - { indented = indentLine(cm, range$$1.head.line, "smart"); } - } - if (indented) { signalLater(cm, "electricInput", cm, range$$1.head.line); } - } -} - -function copyableRanges(cm) { - var text = [], ranges = []; - for (var i = 0; i < cm.doc.sel.ranges.length; i++) { - var line = cm.doc.sel.ranges[i].head.line; - var lineRange = {anchor: Pos(line, 0), head: Pos(line + 1, 0)}; - ranges.push(lineRange); - text.push(cm.getRange(lineRange.anchor, lineRange.head)); - } - return {text: text, ranges: ranges} -} - -function disableBrowserMagic(field, spellcheck) { - field.setAttribute("autocorrect", "off"); - field.setAttribute("autocapitalize", "off"); - field.setAttribute("spellcheck", !!spellcheck); -} - -function hiddenTextarea() { - var te = elt("textarea", null, null, "position: absolute; bottom: -1em; padding: 0; width: 1px; height: 1em; outline: none"); - var div = elt("div", [te], null, "overflow: hidden; position: relative; width: 3px; height: 0px;"); - // The textarea is kept positioned near the cursor to prevent the - // fact that it'll be scrolled into view on input from scrolling - // our fake cursor out of view. On webkit, when wrap=off, paste is - // very slow. So make the area wide instead. - if (webkit) { te.style.width = "1000px"; } - else { te.setAttribute("wrap", "off"); } - // If border: 0; -- iOS fails to open keyboard (issue #1287) - if (ios) { te.style.border = "1px solid black"; } - disableBrowserMagic(te); - return div -} - -// The publicly visible API. Note that methodOp(f) means -// 'wrap f in an operation, performed on its `this` parameter'. - -// This is not the complete set of editor methods. Most of the -// methods defined on the Doc type are also injected into -// CodeMirror.prototype, for backwards compatibility and -// convenience. - -var addEditorMethods = function(CodeMirror) { - var optionHandlers = CodeMirror.optionHandlers; - - var helpers = CodeMirror.helpers = {}; - - CodeMirror.prototype = { - constructor: CodeMirror, - focus: function(){window.focus(); this.display.input.focus();}, - - setOption: function(option, value) { - var options = this.options, old = options[option]; - if (options[option] == value && option != "mode") { return } - options[option] = value; - if (optionHandlers.hasOwnProperty(option)) - { operation(this, optionHandlers[option])(this, value, old); } - signal(this, "optionChange", this, option); - }, - - getOption: function(option) {return this.options[option]}, - getDoc: function() {return this.doc}, - - addKeyMap: function(map$$1, bottom) { - this.state.keyMaps[bottom ? "push" : "unshift"](getKeyMap(map$$1)); - }, - removeKeyMap: function(map$$1) { - var maps = this.state.keyMaps; - for (var i = 0; i < maps.length; ++i) - { if (maps[i] == map$$1 || maps[i].name == map$$1) { - maps.splice(i, 1); - return true - } } - }, - - addOverlay: methodOp(function(spec, options) { - var mode = spec.token ? spec : CodeMirror.getMode(this.options, spec); - if (mode.startState) { throw new Error("Overlays may not be stateful.") } - insertSorted(this.state.overlays, - {mode: mode, modeSpec: spec, opaque: options && options.opaque, - priority: (options && options.priority) || 0}, - function (overlay) { return overlay.priority; }); - this.state.modeGen++; - regChange(this); - }), - removeOverlay: methodOp(function(spec) { - var this$1 = this; - - var overlays = this.state.overlays; - for (var i = 0; i < overlays.length; ++i) { - var cur = overlays[i].modeSpec; - if (cur == spec || typeof spec == "string" && cur.name == spec) { - overlays.splice(i, 1); - this$1.state.modeGen++; - regChange(this$1); - return - } - } - }), - - indentLine: methodOp(function(n, dir, aggressive) { - if (typeof dir != "string" && typeof dir != "number") { - if (dir == null) { dir = this.options.smartIndent ? "smart" : "prev"; } - else { dir = dir ? "add" : "subtract"; } - } - if (isLine(this.doc, n)) { indentLine(this, n, dir, aggressive); } - }), - indentSelection: methodOp(function(how) { - var this$1 = this; - - var ranges = this.doc.sel.ranges, end = -1; - for (var i = 0; i < ranges.length; i++) { - var range$$1 = ranges[i]; - if (!range$$1.empty()) { - var from = range$$1.from(), to = range$$1.to(); - var start = Math.max(end, from.line); - end = Math.min(this$1.lastLine(), to.line - (to.ch ? 0 : 1)) + 1; - for (var j = start; j < end; ++j) - { indentLine(this$1, j, how); } - var newRanges = this$1.doc.sel.ranges; - if (from.ch == 0 && ranges.length == newRanges.length && newRanges[i].from().ch > 0) - { replaceOneSelection(this$1.doc, i, new Range(from, newRanges[i].to()), sel_dontScroll); } - } else if (range$$1.head.line > end) { - indentLine(this$1, range$$1.head.line, how, true); - end = range$$1.head.line; - if (i == this$1.doc.sel.primIndex) { ensureCursorVisible(this$1); } - } - } - }), - - // Fetch the parser token for a given character. Useful for hacks - // that want to inspect the mode state (say, for completion). - getTokenAt: function(pos, precise) { - return takeToken(this, pos, precise) - }, - - getLineTokens: function(line, precise) { - return takeToken(this, Pos(line), precise, true) - }, - - getTokenTypeAt: function(pos) { - pos = clipPos(this.doc, pos); - var styles = getLineStyles(this, getLine(this.doc, pos.line)); - var before = 0, after = (styles.length - 1) / 2, ch = pos.ch; - var type; - if (ch == 0) { type = styles[2]; } - else { for (;;) { - var mid = (before + after) >> 1; - if ((mid ? styles[mid * 2 - 1] : 0) >= ch) { after = mid; } - else if (styles[mid * 2 + 1] < ch) { before = mid + 1; } - else { type = styles[mid * 2 + 2]; break } - } } - var cut = type ? type.indexOf("overlay ") : -1; - return cut < 0 ? type : cut == 0 ? null : type.slice(0, cut - 1) - }, - - getModeAt: function(pos) { - var mode = this.doc.mode; - if (!mode.innerMode) { return mode } - return CodeMirror.innerMode(mode, this.getTokenAt(pos).state).mode - }, - - getHelper: function(pos, type) { - return this.getHelpers(pos, type)[0] - }, - - getHelpers: function(pos, type) { - var this$1 = this; - - var found = []; - if (!helpers.hasOwnProperty(type)) { return found } - var help = helpers[type], mode = this.getModeAt(pos); - if (typeof mode[type] == "string") { - if (help[mode[type]]) { found.push(help[mode[type]]); } - } else if (mode[type]) { - for (var i = 0; i < mode[type].length; i++) { - var val = help[mode[type][i]]; - if (val) { found.push(val); } - } - } else if (mode.helperType && help[mode.helperType]) { - found.push(help[mode.helperType]); - } else if (help[mode.name]) { - found.push(help[mode.name]); - } - for (var i$1 = 0; i$1 < help._global.length; i$1++) { - var cur = help._global[i$1]; - if (cur.pred(mode, this$1) && indexOf(found, cur.val) == -1) - { found.push(cur.val); } - } - return found - }, - - getStateAfter: function(line, precise) { - var doc = this.doc; - line = clipLine(doc, line == null ? doc.first + doc.size - 1: line); - return getContextBefore(this, line + 1, precise).state - }, - - cursorCoords: function(start, mode) { - var pos, range$$1 = this.doc.sel.primary(); - if (start == null) { pos = range$$1.head; } - else if (typeof start == "object") { pos = clipPos(this.doc, start); } - else { pos = start ? range$$1.from() : range$$1.to(); } - return cursorCoords(this, pos, mode || "page") - }, - - charCoords: function(pos, mode) { - return charCoords(this, clipPos(this.doc, pos), mode || "page") - }, - - coordsChar: function(coords, mode) { - coords = fromCoordSystem(this, coords, mode || "page"); - return coordsChar(this, coords.left, coords.top) - }, - - lineAtHeight: function(height, mode) { - height = fromCoordSystem(this, {top: height, left: 0}, mode || "page").top; - return lineAtHeight(this.doc, height + this.display.viewOffset) - }, - heightAtLine: function(line, mode, includeWidgets) { - var end = false, lineObj; - if (typeof line == "number") { - var last = this.doc.first + this.doc.size - 1; - if (line < this.doc.first) { line = this.doc.first; } - else if (line > last) { line = last; end = true; } - lineObj = getLine(this.doc, line); - } else { - lineObj = line; - } - return intoCoordSystem(this, lineObj, {top: 0, left: 0}, mode || "page", includeWidgets || end).top + - (end ? this.doc.height - heightAtLine(lineObj) : 0) - }, - - defaultTextHeight: function() { return textHeight(this.display) }, - defaultCharWidth: function() { return charWidth(this.display) }, - - getViewport: function() { return {from: this.display.viewFrom, to: this.display.viewTo}}, - - addWidget: function(pos, node, scroll, vert, horiz) { - var display = this.display; - pos = cursorCoords(this, clipPos(this.doc, pos)); - var top = pos.bottom, left = pos.left; - node.style.position = "absolute"; - node.setAttribute("cm-ignore-events", "true"); - this.display.input.setUneditable(node); - display.sizer.appendChild(node); - if (vert == "over") { - top = pos.top; - } else if (vert == "above" || vert == "near") { - var vspace = Math.max(display.wrapper.clientHeight, this.doc.height), - hspace = Math.max(display.sizer.clientWidth, display.lineSpace.clientWidth); - // Default to positioning above (if specified and possible); otherwise default to positioning below - if ((vert == 'above' || pos.bottom + node.offsetHeight > vspace) && pos.top > node.offsetHeight) - { top = pos.top - node.offsetHeight; } - else if (pos.bottom + node.offsetHeight <= vspace) - { top = pos.bottom; } - if (left + node.offsetWidth > hspace) - { left = hspace - node.offsetWidth; } - } - node.style.top = top + "px"; - node.style.left = node.style.right = ""; - if (horiz == "right") { - left = display.sizer.clientWidth - node.offsetWidth; - node.style.right = "0px"; - } else { - if (horiz == "left") { left = 0; } - else if (horiz == "middle") { left = (display.sizer.clientWidth - node.offsetWidth) / 2; } - node.style.left = left + "px"; - } - if (scroll) - { scrollIntoView(this, {left: left, top: top, right: left + node.offsetWidth, bottom: top + node.offsetHeight}); } - }, - - triggerOnKeyDown: methodOp(onKeyDown), - triggerOnKeyPress: methodOp(onKeyPress), - triggerOnKeyUp: onKeyUp, - triggerOnMouseDown: methodOp(onMouseDown), - - execCommand: function(cmd) { - if (commands.hasOwnProperty(cmd)) - { return commands[cmd].call(null, this) } - }, - - triggerElectric: methodOp(function(text) { triggerElectric(this, text); }), - - findPosH: function(from, amount, unit, visually) { - var this$1 = this; - - var dir = 1; - if (amount < 0) { dir = -1; amount = -amount; } - var cur = clipPos(this.doc, from); - for (var i = 0; i < amount; ++i) { - cur = findPosH(this$1.doc, cur, dir, unit, visually); - if (cur.hitSide) { break } - } - return cur - }, - - moveH: methodOp(function(dir, unit) { - var this$1 = this; - - this.extendSelectionsBy(function (range$$1) { - if (this$1.display.shift || this$1.doc.extend || range$$1.empty()) - { return findPosH(this$1.doc, range$$1.head, dir, unit, this$1.options.rtlMoveVisually) } - else - { return dir < 0 ? range$$1.from() : range$$1.to() } - }, sel_move); - }), - - deleteH: methodOp(function(dir, unit) { - var sel = this.doc.sel, doc = this.doc; - if (sel.somethingSelected()) - { doc.replaceSelection("", null, "+delete"); } - else - { deleteNearSelection(this, function (range$$1) { - var other = findPosH(doc, range$$1.head, dir, unit, false); - return dir < 0 ? {from: other, to: range$$1.head} : {from: range$$1.head, to: other} - }); } - }), - - findPosV: function(from, amount, unit, goalColumn) { - var this$1 = this; - - var dir = 1, x = goalColumn; - if (amount < 0) { dir = -1; amount = -amount; } - var cur = clipPos(this.doc, from); - for (var i = 0; i < amount; ++i) { - var coords = cursorCoords(this$1, cur, "div"); - if (x == null) { x = coords.left; } - else { coords.left = x; } - cur = findPosV(this$1, coords, dir, unit); - if (cur.hitSide) { break } - } - return cur - }, - - moveV: methodOp(function(dir, unit) { - var this$1 = this; - - var doc = this.doc, goals = []; - var collapse = !this.display.shift && !doc.extend && doc.sel.somethingSelected(); - doc.extendSelectionsBy(function (range$$1) { - if (collapse) - { return dir < 0 ? range$$1.from() : range$$1.to() } - var headPos = cursorCoords(this$1, range$$1.head, "div"); - if (range$$1.goalColumn != null) { headPos.left = range$$1.goalColumn; } - goals.push(headPos.left); - var pos = findPosV(this$1, headPos, dir, unit); - if (unit == "page" && range$$1 == doc.sel.primary()) - { addToScrollTop(this$1, charCoords(this$1, pos, "div").top - headPos.top); } - return pos - }, sel_move); - if (goals.length) { for (var i = 0; i < doc.sel.ranges.length; i++) - { doc.sel.ranges[i].goalColumn = goals[i]; } } - }), - - // Find the word at the given position (as returned by coordsChar). - findWordAt: function(pos) { - var doc = this.doc, line = getLine(doc, pos.line).text; - var start = pos.ch, end = pos.ch; - if (line) { - var helper = this.getHelper(pos, "wordChars"); - if ((pos.sticky == "before" || end == line.length) && start) { --start; } else { ++end; } - var startChar = line.charAt(start); - var check = isWordChar(startChar, helper) - ? function (ch) { return isWordChar(ch, helper); } - : /\s/.test(startChar) ? function (ch) { return /\s/.test(ch); } - : function (ch) { return (!/\s/.test(ch) && !isWordChar(ch)); }; - while (start > 0 && check(line.charAt(start - 1))) { --start; } - while (end < line.length && check(line.charAt(end))) { ++end; } - } - return new Range(Pos(pos.line, start), Pos(pos.line, end)) - }, - - toggleOverwrite: function(value) { - if (value != null && value == this.state.overwrite) { return } - if (this.state.overwrite = !this.state.overwrite) - { addClass(this.display.cursorDiv, "CodeMirror-overwrite"); } - else - { rmClass(this.display.cursorDiv, "CodeMirror-overwrite"); } - - signal(this, "overwriteToggle", this, this.state.overwrite); - }, - hasFocus: function() { return this.display.input.getField() == activeElt() }, - isReadOnly: function() { return !!(this.options.readOnly || this.doc.cantEdit) }, - - scrollTo: methodOp(function (x, y) { scrollToCoords(this, x, y); }), - getScrollInfo: function() { - var scroller = this.display.scroller; - return {left: scroller.scrollLeft, top: scroller.scrollTop, - height: scroller.scrollHeight - scrollGap(this) - this.display.barHeight, - width: scroller.scrollWidth - scrollGap(this) - this.display.barWidth, - clientHeight: displayHeight(this), clientWidth: displayWidth(this)} - }, - - scrollIntoView: methodOp(function(range$$1, margin) { - if (range$$1 == null) { - range$$1 = {from: this.doc.sel.primary().head, to: null}; - if (margin == null) { margin = this.options.cursorScrollMargin; } - } else if (typeof range$$1 == "number") { - range$$1 = {from: Pos(range$$1, 0), to: null}; - } else if (range$$1.from == null) { - range$$1 = {from: range$$1, to: null}; - } - if (!range$$1.to) { range$$1.to = range$$1.from; } - range$$1.margin = margin || 0; - - if (range$$1.from.line != null) { - scrollToRange(this, range$$1); - } else { - scrollToCoordsRange(this, range$$1.from, range$$1.to, range$$1.margin); - } - }), - - setSize: methodOp(function(width, height) { - var this$1 = this; - - var interpret = function (val) { return typeof val == "number" || /^\d+$/.test(String(val)) ? val + "px" : val; }; - if (width != null) { this.display.wrapper.style.width = interpret(width); } - if (height != null) { this.display.wrapper.style.height = interpret(height); } - if (this.options.lineWrapping) { clearLineMeasurementCache(this); } - var lineNo$$1 = this.display.viewFrom; - this.doc.iter(lineNo$$1, this.display.viewTo, function (line) { - if (line.widgets) { for (var i = 0; i < line.widgets.length; i++) - { if (line.widgets[i].noHScroll) { regLineChange(this$1, lineNo$$1, "widget"); break } } } - ++lineNo$$1; - }); - this.curOp.forceUpdate = true; - signal(this, "refresh", this); - }), - - operation: function(f){return runInOp(this, f)}, - startOperation: function(){return startOperation(this)}, - endOperation: function(){return endOperation(this)}, - - refresh: methodOp(function() { - var oldHeight = this.display.cachedTextHeight; - regChange(this); - this.curOp.forceUpdate = true; - clearCaches(this); - scrollToCoords(this, this.doc.scrollLeft, this.doc.scrollTop); - updateGutterSpace(this); - if (oldHeight == null || Math.abs(oldHeight - textHeight(this.display)) > .5) - { estimateLineHeights(this); } - signal(this, "refresh", this); - }), - - swapDoc: methodOp(function(doc) { - var old = this.doc; - old.cm = null; - attachDoc(this, doc); - clearCaches(this); - this.display.input.reset(); - scrollToCoords(this, doc.scrollLeft, doc.scrollTop); - this.curOp.forceScroll = true; - signalLater(this, "swapDoc", this, old); - return old - }), - - getInputField: function(){return this.display.input.getField()}, - getWrapperElement: function(){return this.display.wrapper}, - getScrollerElement: function(){return this.display.scroller}, - getGutterElement: function(){return this.display.gutters} - }; - eventMixin(CodeMirror); - - CodeMirror.registerHelper = function(type, name, value) { - if (!helpers.hasOwnProperty(type)) { helpers[type] = CodeMirror[type] = {_global: []}; } - helpers[type][name] = value; - }; - CodeMirror.registerGlobalHelper = function(type, name, predicate, value) { - CodeMirror.registerHelper(type, name, value); - helpers[type]._global.push({pred: predicate, val: value}); - }; -}; - -// Used for horizontal relative motion. Dir is -1 or 1 (left or -// right), unit can be "char", "column" (like char, but doesn't -// cross line boundaries), "word" (across next word), or "group" (to -// the start of next group of word or non-word-non-whitespace -// chars). The visually param controls whether, in right-to-left -// text, direction 1 means to move towards the next index in the -// string, or towards the character to the right of the current -// position. The resulting position will have a hitSide=true -// property if it reached the end of the document. -function findPosH(doc, pos, dir, unit, visually) { - var oldPos = pos; - var origDir = dir; - var lineObj = getLine(doc, pos.line); - function findNextLine() { - var l = pos.line + dir; - if (l < doc.first || l >= doc.first + doc.size) { return false } - pos = new Pos(l, pos.ch, pos.sticky); - return lineObj = getLine(doc, l) - } - function moveOnce(boundToLine) { - var next; - if (visually) { - next = moveVisually(doc.cm, lineObj, pos, dir); - } else { - next = moveLogically(lineObj, pos, dir); - } - if (next == null) { - if (!boundToLine && findNextLine()) - { pos = endOfLine(visually, doc.cm, lineObj, pos.line, dir); } - else - { return false } - } else { - pos = next; - } - return true - } - - if (unit == "char") { - moveOnce(); - } else if (unit == "column") { - moveOnce(true); - } else if (unit == "word" || unit == "group") { - var sawType = null, group = unit == "group"; - var helper = doc.cm && doc.cm.getHelper(pos, "wordChars"); - for (var first = true;; first = false) { - if (dir < 0 && !moveOnce(!first)) { break } - var cur = lineObj.text.charAt(pos.ch) || "\n"; - var type = isWordChar(cur, helper) ? "w" - : group && cur == "\n" ? "n" - : !group || /\s/.test(cur) ? null - : "p"; - if (group && !first && !type) { type = "s"; } - if (sawType && sawType != type) { - if (dir < 0) {dir = 1; moveOnce(); pos.sticky = "after";} - break - } - - if (type) { sawType = type; } - if (dir > 0 && !moveOnce(!first)) { break } - } - } - var result = skipAtomic(doc, pos, oldPos, origDir, true); - if (equalCursorPos(oldPos, result)) { result.hitSide = true; } - return result -} - -// For relative vertical movement. Dir may be -1 or 1. Unit can be -// "page" or "line". The resulting position will have a hitSide=true -// property if it reached the end of the document. -function findPosV(cm, pos, dir, unit) { - var doc = cm.doc, x = pos.left, y; - if (unit == "page") { - var pageSize = Math.min(cm.display.wrapper.clientHeight, window.innerHeight || document.documentElement.clientHeight); - var moveAmount = Math.max(pageSize - .5 * textHeight(cm.display), 3); - y = (dir > 0 ? pos.bottom : pos.top) + dir * moveAmount; - - } else if (unit == "line") { - y = dir > 0 ? pos.bottom + 3 : pos.top - 3; - } - var target; - for (;;) { - target = coordsChar(cm, x, y); - if (!target.outside) { break } - if (dir < 0 ? y <= 0 : y >= doc.height) { target.hitSide = true; break } - y += dir * 5; - } - return target -} - -// CONTENTEDITABLE INPUT STYLE - -var ContentEditableInput = function(cm) { - this.cm = cm; - this.lastAnchorNode = this.lastAnchorOffset = this.lastFocusNode = this.lastFocusOffset = null; - this.polling = new Delayed(); - this.composing = null; - this.gracePeriod = false; - this.readDOMTimeout = null; -}; - -ContentEditableInput.prototype.init = function (display) { - var this$1 = this; - - var input = this, cm = input.cm; - var div = input.div = display.lineDiv; - disableBrowserMagic(div, cm.options.spellcheck); - - on(div, "paste", function (e) { - if (signalDOMEvent(cm, e) || handlePaste(e, cm)) { return } - // IE doesn't fire input events, so we schedule a read for the pasted content in this way - if (ie_version <= 11) { setTimeout(operation(cm, function () { return this$1.updateFromDOM(); }), 20); } - }); - - on(div, "compositionstart", function (e) { - this$1.composing = {data: e.data, done: false}; - }); - on(div, "compositionupdate", function (e) { - if (!this$1.composing) { this$1.composing = {data: e.data, done: false}; } - }); - on(div, "compositionend", function (e) { - if (this$1.composing) { - if (e.data != this$1.composing.data) { this$1.readFromDOMSoon(); } - this$1.composing.done = true; - } - }); - - on(div, "touchstart", function () { return input.forceCompositionEnd(); }); - - on(div, "input", function () { - if (!this$1.composing) { this$1.readFromDOMSoon(); } - }); - - function onCopyCut(e) { - if (signalDOMEvent(cm, e)) { return } - if (cm.somethingSelected()) { - setLastCopied({lineWise: false, text: cm.getSelections()}); - if (e.type == "cut") { cm.replaceSelection("", null, "cut"); } - } else if (!cm.options.lineWiseCopyCut) { - return - } else { - var ranges = copyableRanges(cm); - setLastCopied({lineWise: true, text: ranges.text}); - if (e.type == "cut") { - cm.operation(function () { - cm.setSelections(ranges.ranges, 0, sel_dontScroll); - cm.replaceSelection("", null, "cut"); - }); - } - } - if (e.clipboardData) { - e.clipboardData.clearData(); - var content = lastCopied.text.join("\n"); - // iOS exposes the clipboard API, but seems to discard content inserted into it - e.clipboardData.setData("Text", content); - if (e.clipboardData.getData("Text") == content) { - e.preventDefault(); - return - } - } - // Old-fashioned briefly-focus-a-textarea hack - var kludge = hiddenTextarea(), te = kludge.firstChild; - cm.display.lineSpace.insertBefore(kludge, cm.display.lineSpace.firstChild); - te.value = lastCopied.text.join("\n"); - var hadFocus = document.activeElement; - selectInput(te); - setTimeout(function () { - cm.display.lineSpace.removeChild(kludge); - hadFocus.focus(); - if (hadFocus == div) { input.showPrimarySelection(); } - }, 50); - } - on(div, "copy", onCopyCut); - on(div, "cut", onCopyCut); -}; - -ContentEditableInput.prototype.prepareSelection = function () { - var result = prepareSelection(this.cm, false); - result.focus = this.cm.state.focused; - return result -}; - -ContentEditableInput.prototype.showSelection = function (info, takeFocus) { - if (!info || !this.cm.display.view.length) { return } - if (info.focus || takeFocus) { this.showPrimarySelection(); } - this.showMultipleSelections(info); -}; - -ContentEditableInput.prototype.showPrimarySelection = function () { - var sel = window.getSelection(), cm = this.cm, prim = cm.doc.sel.primary(); - var from = prim.from(), to = prim.to(); - - if (cm.display.viewTo == cm.display.viewFrom || from.line >= cm.display.viewTo || to.line < cm.display.viewFrom) { - sel.removeAllRanges(); - return - } - - var curAnchor = domToPos(cm, sel.anchorNode, sel.anchorOffset); - var curFocus = domToPos(cm, sel.focusNode, sel.focusOffset); - if (curAnchor && !curAnchor.bad && curFocus && !curFocus.bad && - cmp(minPos(curAnchor, curFocus), from) == 0 && - cmp(maxPos(curAnchor, curFocus), to) == 0) - { return } - - var view = cm.display.view; - var start = (from.line >= cm.display.viewFrom && posToDOM(cm, from)) || - {node: view[0].measure.map[2], offset: 0}; - var end = to.line < cm.display.viewTo && posToDOM(cm, to); - if (!end) { - var measure = view[view.length - 1].measure; - var map$$1 = measure.maps ? measure.maps[measure.maps.length - 1] : measure.map; - end = {node: map$$1[map$$1.length - 1], offset: map$$1[map$$1.length - 2] - map$$1[map$$1.length - 3]}; - } - - if (!start || !end) { - sel.removeAllRanges(); - return - } - - var old = sel.rangeCount && sel.getRangeAt(0), rng; - try { rng = range(start.node, start.offset, end.offset, end.node); } - catch(e) {} // Our model of the DOM might be outdated, in which case the range we try to set can be impossible - if (rng) { - if (!gecko && cm.state.focused) { - sel.collapse(start.node, start.offset); - if (!rng.collapsed) { - sel.removeAllRanges(); - sel.addRange(rng); - } - } else { - sel.removeAllRanges(); - sel.addRange(rng); - } - if (old && sel.anchorNode == null) { sel.addRange(old); } - else if (gecko) { this.startGracePeriod(); } - } - this.rememberSelection(); -}; - -ContentEditableInput.prototype.startGracePeriod = function () { - var this$1 = this; - - clearTimeout(this.gracePeriod); - this.gracePeriod = setTimeout(function () { - this$1.gracePeriod = false; - if (this$1.selectionChanged()) - { this$1.cm.operation(function () { return this$1.cm.curOp.selectionChanged = true; }); } - }, 20); -}; - -ContentEditableInput.prototype.showMultipleSelections = function (info) { - removeChildrenAndAdd(this.cm.display.cursorDiv, info.cursors); - removeChildrenAndAdd(this.cm.display.selectionDiv, info.selection); -}; - -ContentEditableInput.prototype.rememberSelection = function () { - var sel = window.getSelection(); - this.lastAnchorNode = sel.anchorNode; this.lastAnchorOffset = sel.anchorOffset; - this.lastFocusNode = sel.focusNode; this.lastFocusOffset = sel.focusOffset; -}; - -ContentEditableInput.prototype.selectionInEditor = function () { - var sel = window.getSelection(); - if (!sel.rangeCount) { return false } - var node = sel.getRangeAt(0).commonAncestorContainer; - return contains(this.div, node) -}; - -ContentEditableInput.prototype.focus = function () { - if (this.cm.options.readOnly != "nocursor") { - if (!this.selectionInEditor()) - { this.showSelection(this.prepareSelection(), true); } - this.div.focus(); - } -}; -ContentEditableInput.prototype.blur = function () { this.div.blur(); }; -ContentEditableInput.prototype.getField = function () { return this.div }; - -ContentEditableInput.prototype.supportsTouch = function () { return true }; - -ContentEditableInput.prototype.receivedFocus = function () { - var input = this; - if (this.selectionInEditor()) - { this.pollSelection(); } - else - { runInOp(this.cm, function () { return input.cm.curOp.selectionChanged = true; }); } - - function poll() { - if (input.cm.state.focused) { - input.pollSelection(); - input.polling.set(input.cm.options.pollInterval, poll); - } - } - this.polling.set(this.cm.options.pollInterval, poll); -}; - -ContentEditableInput.prototype.selectionChanged = function () { - var sel = window.getSelection(); - return sel.anchorNode != this.lastAnchorNode || sel.anchorOffset != this.lastAnchorOffset || - sel.focusNode != this.lastFocusNode || sel.focusOffset != this.lastFocusOffset -}; - -ContentEditableInput.prototype.pollSelection = function () { - if (this.readDOMTimeout != null || this.gracePeriod || !this.selectionChanged()) { return } - var sel = window.getSelection(), cm = this.cm; - // On Android Chrome (version 56, at least), backspacing into an - // uneditable block element will put the cursor in that element, - // and then, because it's not editable, hide the virtual keyboard. - // Because Android doesn't allow us to actually detect backspace - // presses in a sane way, this code checks for when that happens - // and simulates a backspace press in this case. - if (android && chrome && this.cm.options.gutters.length && isInGutter(sel.anchorNode)) { - this.cm.triggerOnKeyDown({type: "keydown", keyCode: 8, preventDefault: Math.abs}); - this.blur(); - this.focus(); - return - } - if (this.composing) { return } - this.rememberSelection(); - var anchor = domToPos(cm, sel.anchorNode, sel.anchorOffset); - var head = domToPos(cm, sel.focusNode, sel.focusOffset); - if (anchor && head) { runInOp(cm, function () { - setSelection(cm.doc, simpleSelection(anchor, head), sel_dontScroll); - if (anchor.bad || head.bad) { cm.curOp.selectionChanged = true; } - }); } -}; - -ContentEditableInput.prototype.pollContent = function () { - if (this.readDOMTimeout != null) { - clearTimeout(this.readDOMTimeout); - this.readDOMTimeout = null; - } - - var cm = this.cm, display = cm.display, sel = cm.doc.sel.primary(); - var from = sel.from(), to = sel.to(); - if (from.ch == 0 && from.line > cm.firstLine()) - { from = Pos(from.line - 1, getLine(cm.doc, from.line - 1).length); } - if (to.ch == getLine(cm.doc, to.line).text.length && to.line < cm.lastLine()) - { to = Pos(to.line + 1, 0); } - if (from.line < display.viewFrom || to.line > display.viewTo - 1) { return false } - - var fromIndex, fromLine, fromNode; - if (from.line == display.viewFrom || (fromIndex = findViewIndex(cm, from.line)) == 0) { - fromLine = lineNo(display.view[0].line); - fromNode = display.view[0].node; - } else { - fromLine = lineNo(display.view[fromIndex].line); - fromNode = display.view[fromIndex - 1].node.nextSibling; - } - var toIndex = findViewIndex(cm, to.line); - var toLine, toNode; - if (toIndex == display.view.length - 1) { - toLine = display.viewTo - 1; - toNode = display.lineDiv.lastChild; - } else { - toLine = lineNo(display.view[toIndex + 1].line) - 1; - toNode = display.view[toIndex + 1].node.previousSibling; - } - - if (!fromNode) { return false } - var newText = cm.doc.splitLines(domTextBetween(cm, fromNode, toNode, fromLine, toLine)); - var oldText = getBetween(cm.doc, Pos(fromLine, 0), Pos(toLine, getLine(cm.doc, toLine).text.length)); - while (newText.length > 1 && oldText.length > 1) { - if (lst(newText) == lst(oldText)) { newText.pop(); oldText.pop(); toLine--; } - else if (newText[0] == oldText[0]) { newText.shift(); oldText.shift(); fromLine++; } - else { break } - } - - var cutFront = 0, cutEnd = 0; - var newTop = newText[0], oldTop = oldText[0], maxCutFront = Math.min(newTop.length, oldTop.length); - while (cutFront < maxCutFront && newTop.charCodeAt(cutFront) == oldTop.charCodeAt(cutFront)) - { ++cutFront; } - var newBot = lst(newText), oldBot = lst(oldText); - var maxCutEnd = Math.min(newBot.length - (newText.length == 1 ? cutFront : 0), - oldBot.length - (oldText.length == 1 ? cutFront : 0)); - while (cutEnd < maxCutEnd && - newBot.charCodeAt(newBot.length - cutEnd - 1) == oldBot.charCodeAt(oldBot.length - cutEnd - 1)) - { ++cutEnd; } - // Try to move start of change to start of selection if ambiguous - if (newText.length == 1 && oldText.length == 1 && fromLine == from.line) { - while (cutFront && cutFront > from.ch && - newBot.charCodeAt(newBot.length - cutEnd - 1) == oldBot.charCodeAt(oldBot.length - cutEnd - 1)) { - cutFront--; - cutEnd++; - } - } - - newText[newText.length - 1] = newBot.slice(0, newBot.length - cutEnd).replace(/^\u200b+/, ""); - newText[0] = newText[0].slice(cutFront).replace(/\u200b+$/, ""); - - var chFrom = Pos(fromLine, cutFront); - var chTo = Pos(toLine, oldText.length ? lst(oldText).length - cutEnd : 0); - if (newText.length > 1 || newText[0] || cmp(chFrom, chTo)) { - replaceRange(cm.doc, newText, chFrom, chTo, "+input"); - return true - } -}; - -ContentEditableInput.prototype.ensurePolled = function () { - this.forceCompositionEnd(); -}; -ContentEditableInput.prototype.reset = function () { - this.forceCompositionEnd(); -}; -ContentEditableInput.prototype.forceCompositionEnd = function () { - if (!this.composing) { return } - clearTimeout(this.readDOMTimeout); - this.composing = null; - this.updateFromDOM(); - this.div.blur(); - this.div.focus(); -}; -ContentEditableInput.prototype.readFromDOMSoon = function () { - var this$1 = this; - - if (this.readDOMTimeout != null) { return } - this.readDOMTimeout = setTimeout(function () { - this$1.readDOMTimeout = null; - if (this$1.composing) { - if (this$1.composing.done) { this$1.composing = null; } - else { return } - } - this$1.updateFromDOM(); - }, 80); -}; - -ContentEditableInput.prototype.updateFromDOM = function () { - var this$1 = this; - - if (this.cm.isReadOnly() || !this.pollContent()) - { runInOp(this.cm, function () { return regChange(this$1.cm); }); } -}; - -ContentEditableInput.prototype.setUneditable = function (node) { - node.contentEditable = "false"; -}; - -ContentEditableInput.prototype.onKeyPress = function (e) { - if (e.charCode == 0) { return } - e.preventDefault(); - if (!this.cm.isReadOnly()) - { operation(this.cm, applyTextInput)(this.cm, String.fromCharCode(e.charCode == null ? e.keyCode : e.charCode), 0); } -}; - -ContentEditableInput.prototype.readOnlyChanged = function (val) { - this.div.contentEditable = String(val != "nocursor"); -}; - -ContentEditableInput.prototype.onContextMenu = function () {}; -ContentEditableInput.prototype.resetPosition = function () {}; - -ContentEditableInput.prototype.needsContentAttribute = true; - -function posToDOM(cm, pos) { - var view = findViewForLine(cm, pos.line); - if (!view || view.hidden) { return null } - var line = getLine(cm.doc, pos.line); - var info = mapFromLineView(view, line, pos.line); - - var order = getOrder(line, cm.doc.direction), side = "left"; - if (order) { - var partPos = getBidiPartAt(order, pos.ch); - side = partPos % 2 ? "right" : "left"; - } - var result = nodeAndOffsetInLineMap(info.map, pos.ch, side); - result.offset = result.collapse == "right" ? result.end : result.start; - return result -} - -function isInGutter(node) { - for (var scan = node; scan; scan = scan.parentNode) - { if (/CodeMirror-gutter-wrapper/.test(scan.className)) { return true } } - return false -} - -function badPos(pos, bad) { if (bad) { pos.bad = true; } return pos } - -function domTextBetween(cm, from, to, fromLine, toLine) { - var text = "", closing = false, lineSep = cm.doc.lineSeparator(); - function recognizeMarker(id) { return function (marker) { return marker.id == id; } } - function close() { - if (closing) { - text += lineSep; - closing = false; - } - } - function addText(str) { - if (str) { - close(); - text += str; - } - } - function walk(node) { - if (node.nodeType == 1) { - var cmText = node.getAttribute("cm-text"); - if (cmText != null) { - addText(cmText || node.textContent.replace(/\u200b/g, "")); - return - } - var markerID = node.getAttribute("cm-marker"), range$$1; - if (markerID) { - var found = cm.findMarks(Pos(fromLine, 0), Pos(toLine + 1, 0), recognizeMarker(+markerID)); - if (found.length && (range$$1 = found[0].find(0))) - { addText(getBetween(cm.doc, range$$1.from, range$$1.to).join(lineSep)); } - return - } - if (node.getAttribute("contenteditable") == "false") { return } - var isBlock = /^(pre|div|p)$/i.test(node.nodeName); - if (isBlock) { close(); } - for (var i = 0; i < node.childNodes.length; i++) - { walk(node.childNodes[i]); } - if (isBlock) { closing = true; } - } else if (node.nodeType == 3) { - addText(node.nodeValue); - } - } - for (;;) { - walk(from); - if (from == to) { break } - from = from.nextSibling; - } - return text -} - -function domToPos(cm, node, offset) { - var lineNode; - if (node == cm.display.lineDiv) { - lineNode = cm.display.lineDiv.childNodes[offset]; - if (!lineNode) { return badPos(cm.clipPos(Pos(cm.display.viewTo - 1)), true) } - node = null; offset = 0; - } else { - for (lineNode = node;; lineNode = lineNode.parentNode) { - if (!lineNode || lineNode == cm.display.lineDiv) { return null } - if (lineNode.parentNode && lineNode.parentNode == cm.display.lineDiv) { break } - } - } - for (var i = 0; i < cm.display.view.length; i++) { - var lineView = cm.display.view[i]; - if (lineView.node == lineNode) - { return locateNodeInLineView(lineView, node, offset) } - } -} - -function locateNodeInLineView(lineView, node, offset) { - var wrapper = lineView.text.firstChild, bad = false; - if (!node || !contains(wrapper, node)) { return badPos(Pos(lineNo(lineView.line), 0), true) } - if (node == wrapper) { - bad = true; - node = wrapper.childNodes[offset]; - offset = 0; - if (!node) { - var line = lineView.rest ? lst(lineView.rest) : lineView.line; - return badPos(Pos(lineNo(line), line.text.length), bad) - } - } - - var textNode = node.nodeType == 3 ? node : null, topNode = node; - if (!textNode && node.childNodes.length == 1 && node.firstChild.nodeType == 3) { - textNode = node.firstChild; - if (offset) { offset = textNode.nodeValue.length; } - } - while (topNode.parentNode != wrapper) { topNode = topNode.parentNode; } - var measure = lineView.measure, maps = measure.maps; - - function find(textNode, topNode, offset) { - for (var i = -1; i < (maps ? maps.length : 0); i++) { - var map$$1 = i < 0 ? measure.map : maps[i]; - for (var j = 0; j < map$$1.length; j += 3) { - var curNode = map$$1[j + 2]; - if (curNode == textNode || curNode == topNode) { - var line = lineNo(i < 0 ? lineView.line : lineView.rest[i]); - var ch = map$$1[j] + offset; - if (offset < 0 || curNode != textNode) { ch = map$$1[j + (offset ? 1 : 0)]; } - return Pos(line, ch) - } - } - } - } - var found = find(textNode, topNode, offset); - if (found) { return badPos(found, bad) } - - // FIXME this is all really shaky. might handle the few cases it needs to handle, but likely to cause problems - for (var after = topNode.nextSibling, dist = textNode ? textNode.nodeValue.length - offset : 0; after; after = after.nextSibling) { - found = find(after, after.firstChild, 0); - if (found) - { return badPos(Pos(found.line, found.ch - dist), bad) } - else - { dist += after.textContent.length; } - } - for (var before = topNode.previousSibling, dist$1 = offset; before; before = before.previousSibling) { - found = find(before, before.firstChild, -1); - if (found) - { return badPos(Pos(found.line, found.ch + dist$1), bad) } - else - { dist$1 += before.textContent.length; } - } -} - -// TEXTAREA INPUT STYLE - -var TextareaInput = function(cm) { - this.cm = cm; - // See input.poll and input.reset - this.prevInput = ""; - - // Flag that indicates whether we expect input to appear real soon - // now (after some event like 'keypress' or 'input') and are - // polling intensively. - this.pollingFast = false; - // Self-resetting timeout for the poller - this.polling = new Delayed(); - // Used to work around IE issue with selection being forgotten when focus moves away from textarea - this.hasSelection = false; - this.composing = null; -}; - -TextareaInput.prototype.init = function (display) { - var this$1 = this; - - var input = this, cm = this.cm; - - // Wraps and hides input textarea - var div = this.wrapper = hiddenTextarea(); - // The semihidden textarea that is focused when the editor is - // focused, and receives input. - var te = this.textarea = div.firstChild; - display.wrapper.insertBefore(div, display.wrapper.firstChild); - - // Needed to hide big blue blinking cursor on Mobile Safari (doesn't seem to work in iOS 8 anymore) - if (ios) { te.style.width = "0px"; } - - on(te, "input", function () { - if (ie && ie_version >= 9 && this$1.hasSelection) { this$1.hasSelection = null; } - input.poll(); - }); - - on(te, "paste", function (e) { - if (signalDOMEvent(cm, e) || handlePaste(e, cm)) { return } - - cm.state.pasteIncoming = true; - input.fastPoll(); - }); - - function prepareCopyCut(e) { - if (signalDOMEvent(cm, e)) { return } - if (cm.somethingSelected()) { - setLastCopied({lineWise: false, text: cm.getSelections()}); - } else if (!cm.options.lineWiseCopyCut) { - return - } else { - var ranges = copyableRanges(cm); - setLastCopied({lineWise: true, text: ranges.text}); - if (e.type == "cut") { - cm.setSelections(ranges.ranges, null, sel_dontScroll); - } else { - input.prevInput = ""; - te.value = ranges.text.join("\n"); - selectInput(te); - } - } - if (e.type == "cut") { cm.state.cutIncoming = true; } - } - on(te, "cut", prepareCopyCut); - on(te, "copy", prepareCopyCut); - - on(display.scroller, "paste", function (e) { - if (eventInWidget(display, e) || signalDOMEvent(cm, e)) { return } - cm.state.pasteIncoming = true; - input.focus(); - }); - - // Prevent normal selection in the editor (we handle our own) - on(display.lineSpace, "selectstart", function (e) { - if (!eventInWidget(display, e)) { e_preventDefault(e); } - }); - - on(te, "compositionstart", function () { - var start = cm.getCursor("from"); - if (input.composing) { input.composing.range.clear(); } - input.composing = { - start: start, - range: cm.markText(start, cm.getCursor("to"), {className: "CodeMirror-composing"}) - }; - }); - on(te, "compositionend", function () { - if (input.composing) { - input.poll(); - input.composing.range.clear(); - input.composing = null; - } - }); -}; - -TextareaInput.prototype.prepareSelection = function () { - // Redraw the selection and/or cursor - var cm = this.cm, display = cm.display, doc = cm.doc; - var result = prepareSelection(cm); - - // Move the hidden textarea near the cursor to prevent scrolling artifacts - if (cm.options.moveInputWithCursor) { - var headPos = cursorCoords(cm, doc.sel.primary().head, "div"); - var wrapOff = display.wrapper.getBoundingClientRect(), lineOff = display.lineDiv.getBoundingClientRect(); - result.teTop = Math.max(0, Math.min(display.wrapper.clientHeight - 10, - headPos.top + lineOff.top - wrapOff.top)); - result.teLeft = Math.max(0, Math.min(display.wrapper.clientWidth - 10, - headPos.left + lineOff.left - wrapOff.left)); - } - - return result -}; - -TextareaInput.prototype.showSelection = function (drawn) { - var cm = this.cm, display = cm.display; - removeChildrenAndAdd(display.cursorDiv, drawn.cursors); - removeChildrenAndAdd(display.selectionDiv, drawn.selection); - if (drawn.teTop != null) { - this.wrapper.style.top = drawn.teTop + "px"; - this.wrapper.style.left = drawn.teLeft + "px"; - } -}; - -// Reset the input to correspond to the selection (or to be empty, -// when not typing and nothing is selected) -TextareaInput.prototype.reset = function (typing) { - if (this.contextMenuPending || this.composing) { return } - var cm = this.cm; - if (cm.somethingSelected()) { - this.prevInput = ""; - var content = cm.getSelection(); - this.textarea.value = content; - if (cm.state.focused) { selectInput(this.textarea); } - if (ie && ie_version >= 9) { this.hasSelection = content; } - } else if (!typing) { - this.prevInput = this.textarea.value = ""; - if (ie && ie_version >= 9) { this.hasSelection = null; } - } -}; - -TextareaInput.prototype.getField = function () { return this.textarea }; - -TextareaInput.prototype.supportsTouch = function () { return false }; - -TextareaInput.prototype.focus = function () { - if (this.cm.options.readOnly != "nocursor" && (!mobile || activeElt() != this.textarea)) { - try { this.textarea.focus(); } - catch (e) {} // IE8 will throw if the textarea is display: none or not in DOM - } -}; - -TextareaInput.prototype.blur = function () { this.textarea.blur(); }; - -TextareaInput.prototype.resetPosition = function () { - this.wrapper.style.top = this.wrapper.style.left = 0; -}; - -TextareaInput.prototype.receivedFocus = function () { this.slowPoll(); }; - -// Poll for input changes, using the normal rate of polling. This -// runs as long as the editor is focused. -TextareaInput.prototype.slowPoll = function () { - var this$1 = this; - - if (this.pollingFast) { return } - this.polling.set(this.cm.options.pollInterval, function () { - this$1.poll(); - if (this$1.cm.state.focused) { this$1.slowPoll(); } - }); -}; - -// When an event has just come in that is likely to add or change -// something in the input textarea, we poll faster, to ensure that -// the change appears on the screen quickly. -TextareaInput.prototype.fastPoll = function () { - var missed = false, input = this; - input.pollingFast = true; - function p() { - var changed = input.poll(); - if (!changed && !missed) {missed = true; input.polling.set(60, p);} - else {input.pollingFast = false; input.slowPoll();} - } - input.polling.set(20, p); -}; - -// Read input from the textarea, and update the document to match. -// When something is selected, it is present in the textarea, and -// selected (unless it is huge, in which case a placeholder is -// used). When nothing is selected, the cursor sits after previously -// seen text (can be empty), which is stored in prevInput (we must -// not reset the textarea when typing, because that breaks IME). -TextareaInput.prototype.poll = function () { - var this$1 = this; - - var cm = this.cm, input = this.textarea, prevInput = this.prevInput; - // Since this is called a *lot*, try to bail out as cheaply as - // possible when it is clear that nothing happened. hasSelection - // will be the case when there is a lot of text in the textarea, - // in which case reading its value would be expensive. - if (this.contextMenuPending || !cm.state.focused || - (hasSelection(input) && !prevInput && !this.composing) || - cm.isReadOnly() || cm.options.disableInput || cm.state.keySeq) - { return false } - - var text = input.value; - // If nothing changed, bail. - if (text == prevInput && !cm.somethingSelected()) { return false } - // Work around nonsensical selection resetting in IE9/10, and - // inexplicable appearance of private area unicode characters on - // some key combos in Mac (#2689). - if (ie && ie_version >= 9 && this.hasSelection === text || - mac && /[\uf700-\uf7ff]/.test(text)) { - cm.display.input.reset(); - return false - } - - if (cm.doc.sel == cm.display.selForContextMenu) { - var first = text.charCodeAt(0); - if (first == 0x200b && !prevInput) { prevInput = "\u200b"; } - if (first == 0x21da) { this.reset(); return this.cm.execCommand("undo") } - } - // Find the part of the input that is actually new - var same = 0, l = Math.min(prevInput.length, text.length); - while (same < l && prevInput.charCodeAt(same) == text.charCodeAt(same)) { ++same; } - - runInOp(cm, function () { - applyTextInput(cm, text.slice(same), prevInput.length - same, - null, this$1.composing ? "*compose" : null); - - // Don't leave long text in the textarea, since it makes further polling slow - if (text.length > 1000 || text.indexOf("\n") > -1) { input.value = this$1.prevInput = ""; } - else { this$1.prevInput = text; } - - if (this$1.composing) { - this$1.composing.range.clear(); - this$1.composing.range = cm.markText(this$1.composing.start, cm.getCursor("to"), - {className: "CodeMirror-composing"}); - } - }); - return true -}; - -TextareaInput.prototype.ensurePolled = function () { - if (this.pollingFast && this.poll()) { this.pollingFast = false; } -}; - -TextareaInput.prototype.onKeyPress = function () { - if (ie && ie_version >= 9) { this.hasSelection = null; } - this.fastPoll(); -}; - -TextareaInput.prototype.onContextMenu = function (e) { - var input = this, cm = input.cm, display = cm.display, te = input.textarea; - var pos = posFromMouse(cm, e), scrollPos = display.scroller.scrollTop; - if (!pos || presto) { return } // Opera is difficult. - - // Reset the current text selection only if the click is done outside of the selection - // and 'resetSelectionOnContextMenu' option is true. - var reset = cm.options.resetSelectionOnContextMenu; - if (reset && cm.doc.sel.contains(pos) == -1) - { operation(cm, setSelection)(cm.doc, simpleSelection(pos), sel_dontScroll); } - - var oldCSS = te.style.cssText, oldWrapperCSS = input.wrapper.style.cssText; - input.wrapper.style.cssText = "position: absolute"; - var wrapperBox = input.wrapper.getBoundingClientRect(); - te.style.cssText = "position: absolute; width: 30px; height: 30px;\n top: " + (e.clientY - wrapperBox.top - 5) + "px; left: " + (e.clientX - wrapperBox.left - 5) + "px;\n z-index: 1000; background: " + (ie ? "rgba(255, 255, 255, .05)" : "transparent") + ";\n outline: none; border-width: 0; outline: none; overflow: hidden; opacity: .05; filter: alpha(opacity=5);"; - var oldScrollY; - if (webkit) { oldScrollY = window.scrollY; } // Work around Chrome issue (#2712) - display.input.focus(); - if (webkit) { window.scrollTo(null, oldScrollY); } - display.input.reset(); - // Adds "Select all" to context menu in FF - if (!cm.somethingSelected()) { te.value = input.prevInput = " "; } - input.contextMenuPending = true; - display.selForContextMenu = cm.doc.sel; - clearTimeout(display.detectingSelectAll); - - // Select-all will be greyed out if there's nothing to select, so - // this adds a zero-width space so that we can later check whether - // it got selected. - function prepareSelectAllHack() { - if (te.selectionStart != null) { - var selected = cm.somethingSelected(); - var extval = "\u200b" + (selected ? te.value : ""); - te.value = "\u21da"; // Used to catch context-menu undo - te.value = extval; - input.prevInput = selected ? "" : "\u200b"; - te.selectionStart = 1; te.selectionEnd = extval.length; - // Re-set this, in case some other handler touched the - // selection in the meantime. - display.selForContextMenu = cm.doc.sel; - } - } - function rehide() { - input.contextMenuPending = false; - input.wrapper.style.cssText = oldWrapperCSS; - te.style.cssText = oldCSS; - if (ie && ie_version < 9) { display.scrollbars.setScrollTop(display.scroller.scrollTop = scrollPos); } - - // Try to detect the user choosing select-all - if (te.selectionStart != null) { - if (!ie || (ie && ie_version < 9)) { prepareSelectAllHack(); } - var i = 0, poll = function () { - if (display.selForContextMenu == cm.doc.sel && te.selectionStart == 0 && - te.selectionEnd > 0 && input.prevInput == "\u200b") { - operation(cm, selectAll)(cm); - } else if (i++ < 10) { - display.detectingSelectAll = setTimeout(poll, 500); - } else { - display.selForContextMenu = null; - display.input.reset(); - } - }; - display.detectingSelectAll = setTimeout(poll, 200); - } - } - - if (ie && ie_version >= 9) { prepareSelectAllHack(); } - if (captureRightClick) { - e_stop(e); - var mouseup = function () { - off(window, "mouseup", mouseup); - setTimeout(rehide, 20); - }; - on(window, "mouseup", mouseup); - } else { - setTimeout(rehide, 50); - } -}; - -TextareaInput.prototype.readOnlyChanged = function (val) { - if (!val) { this.reset(); } - this.textarea.disabled = val == "nocursor"; -}; - -TextareaInput.prototype.setUneditable = function () {}; - -TextareaInput.prototype.needsContentAttribute = false; - -function fromTextArea(textarea, options) { - options = options ? copyObj(options) : {}; - options.value = textarea.value; - if (!options.tabindex && textarea.tabIndex) - { options.tabindex = textarea.tabIndex; } - if (!options.placeholder && textarea.placeholder) - { options.placeholder = textarea.placeholder; } - // Set autofocus to true if this textarea is focused, or if it has - // autofocus and no other element is focused. - if (options.autofocus == null) { - var hasFocus = activeElt(); - options.autofocus = hasFocus == textarea || - textarea.getAttribute("autofocus") != null && hasFocus == document.body; - } - - function save() {textarea.value = cm.getValue();} - - var realSubmit; - if (textarea.form) { - on(textarea.form, "submit", save); - // Deplorable hack to make the submit method do the right thing. - if (!options.leaveSubmitMethodAlone) { - var form = textarea.form; - realSubmit = form.submit; - try { - var wrappedSubmit = form.submit = function () { - save(); - form.submit = realSubmit; - form.submit(); - form.submit = wrappedSubmit; - }; - } catch(e) {} - } - } - - options.finishInit = function (cm) { - cm.save = save; - cm.getTextArea = function () { return textarea; }; - cm.toTextArea = function () { - cm.toTextArea = isNaN; // Prevent this from being ran twice - save(); - textarea.parentNode.removeChild(cm.getWrapperElement()); - textarea.style.display = ""; - if (textarea.form) { - off(textarea.form, "submit", save); - if (typeof textarea.form.submit == "function") - { textarea.form.submit = realSubmit; } - } - }; - }; - - textarea.style.display = "none"; - var cm = CodeMirror$1(function (node) { return textarea.parentNode.insertBefore(node, textarea.nextSibling); }, - options); - return cm -} - -function addLegacyProps(CodeMirror) { - CodeMirror.off = off; - CodeMirror.on = on; - CodeMirror.wheelEventPixels = wheelEventPixels; - CodeMirror.Doc = Doc; - CodeMirror.splitLines = splitLinesAuto; - CodeMirror.countColumn = countColumn; - CodeMirror.findColumn = findColumn; - CodeMirror.isWordChar = isWordCharBasic; - CodeMirror.Pass = Pass; - CodeMirror.signal = signal; - CodeMirror.Line = Line; - CodeMirror.changeEnd = changeEnd; - CodeMirror.scrollbarModel = scrollbarModel; - CodeMirror.Pos = Pos; - CodeMirror.cmpPos = cmp; - CodeMirror.modes = modes; - CodeMirror.mimeModes = mimeModes; - CodeMirror.resolveMode = resolveMode; - CodeMirror.getMode = getMode; - CodeMirror.modeExtensions = modeExtensions; - CodeMirror.extendMode = extendMode; - CodeMirror.copyState = copyState; - CodeMirror.startState = startState; - CodeMirror.innerMode = innerMode; - CodeMirror.commands = commands; - CodeMirror.keyMap = keyMap; - CodeMirror.keyName = keyName; - CodeMirror.isModifierKey = isModifierKey; - CodeMirror.lookupKey = lookupKey; - CodeMirror.normalizeKeyMap = normalizeKeyMap; - CodeMirror.StringStream = StringStream; - CodeMirror.SharedTextMarker = SharedTextMarker; - CodeMirror.TextMarker = TextMarker; - CodeMirror.LineWidget = LineWidget; - CodeMirror.e_preventDefault = e_preventDefault; - CodeMirror.e_stopPropagation = e_stopPropagation; - CodeMirror.e_stop = e_stop; - CodeMirror.addClass = addClass; - CodeMirror.contains = contains; - CodeMirror.rmClass = rmClass; - CodeMirror.keyNames = keyNames; -} - -// EDITOR CONSTRUCTOR - -defineOptions(CodeMirror$1); - -addEditorMethods(CodeMirror$1); - -// Set up methods on CodeMirror's prototype to redirect to the editor's document. -var dontDelegate = "iter insert remove copy getEditor constructor".split(" "); -for (var prop in Doc.prototype) { if (Doc.prototype.hasOwnProperty(prop) && indexOf(dontDelegate, prop) < 0) - { CodeMirror$1.prototype[prop] = (function(method) { - return function() {return method.apply(this.doc, arguments)} - })(Doc.prototype[prop]); } } - -eventMixin(Doc); - -// INPUT HANDLING - -CodeMirror$1.inputStyles = {"textarea": TextareaInput, "contenteditable": ContentEditableInput}; - -// MODE DEFINITION AND QUERYING - -// Extra arguments are stored as the mode's dependencies, which is -// used by (legacy) mechanisms like loadmode.js to automatically -// load a mode. (Preferred mechanism is the require/define calls.) -CodeMirror$1.defineMode = function(name/*, mode, …*/) { - if (!CodeMirror$1.defaults.mode && name != "null") { CodeMirror$1.defaults.mode = name; } - defineMode.apply(this, arguments); -}; - -CodeMirror$1.defineMIME = defineMIME; - -// Minimal default mode. -CodeMirror$1.defineMode("null", function () { return ({token: function (stream) { return stream.skipToEnd(); }}); }); -CodeMirror$1.defineMIME("text/plain", "null"); - -// EXTENSIONS - -CodeMirror$1.defineExtension = function (name, func) { - CodeMirror$1.prototype[name] = func; -}; -CodeMirror$1.defineDocExtension = function (name, func) { - Doc.prototype[name] = func; -}; - -CodeMirror$1.fromTextArea = fromTextArea; - -addLegacyProps(CodeMirror$1); - -CodeMirror$1.version = "5.31.0"; - -return CodeMirror$1; - -}))); diff --git a/datasette/static/codemirror-5.57.0-sql.min.js b/datasette/static/codemirror-5.57.0-sql.min.js new file mode 100644 index 00000000..13f667c6 --- /dev/null +++ b/datasette/static/codemirror-5.57.0-sql.min.js @@ -0,0 +1,5 @@ +/* + CodeMirror, copyright (c) by Marijn Haverbeke and others + Distributed under an MIT license: https://codemirror.net/LICENSE +*/ +(function(mod){if(typeof exports=="object"&&typeof module=="object")mod(require("../../lib/codemirror"));else if(typeof define=="function"&&define.amd)define(["../../lib/codemirror"],mod);else mod(CodeMirror)})(function(CodeMirror){"use strict";CodeMirror.defineMode("sql",function(config,parserConfig){var client=parserConfig.client||{},atoms=parserConfig.atoms||{false:true,true:true,null:true},builtin=parserConfig.builtin||set(defaultBuiltin),keywords=parserConfig.keywords||set(sqlKeywords),operatorChars=parserConfig.operatorChars||/^[*+\-%<>!=&|~^\/]/,support=parserConfig.support||{},hooks=parserConfig.hooks||{},dateSQL=parserConfig.dateSQL||{date:true,time:true,timestamp:true},backslashStringEscapes=parserConfig.backslashStringEscapes!==false,brackets=parserConfig.brackets||/^[\{}\(\)\[\]]/,punctuation=parserConfig.punctuation||/^[;.,:]/;function tokenBase(stream,state){var ch=stream.next();if(hooks[ch]){var result=hooks[ch](stream,state);if(result!==false)return result}if(support.hexNumber&&(ch=="0"&&stream.match(/^[xX][0-9a-fA-F]+/)||(ch=="x"||ch=="X")&&stream.match(/^'[0-9a-fA-F]+'/))){return"number"}else if(support.binaryNumber&&((ch=="b"||ch=="B")&&stream.match(/^'[01]+'/)||ch=="0"&&stream.match(/^b[01]+/))){return"number"}else if(ch.charCodeAt(0)>47&&ch.charCodeAt(0)<58){stream.match(/^[0-9]*(\.[0-9]+)?([eE][-+]?[0-9]+)?/);support.decimallessFloat&&stream.match(/^\.(?!\.)/);return"number"}else if(ch=="?"&&(stream.eatSpace()||stream.eol()||stream.eat(";"))){return"variable-3"}else if(ch=="'"||ch=='"'&&support.doubleQuote){state.tokenize=tokenLiteral(ch);return state.tokenize(stream,state)}else if((support.nCharCast&&(ch=="n"||ch=="N")||support.charsetCast&&ch=="_"&&stream.match(/[a-z][a-z0-9]*/i))&&(stream.peek()=="'"||stream.peek()=='"')){return"keyword"}else if(support.escapeConstant&&(ch=="e"||ch=="E")&&(stream.peek()=="'"||stream.peek()=='"'&&support.doubleQuote)){state.tokenize=function(stream,state){return(state.tokenize=tokenLiteral(stream.next(),true))(stream,state)};return"keyword"}else if(support.commentSlashSlash&&ch=="/"&&stream.eat("/")){stream.skipToEnd();return"comment"}else if(support.commentHash&&ch=="#"||ch=="-"&&stream.eat("-")&&(!support.commentSpaceRequired||stream.eat(" "))){stream.skipToEnd();return"comment"}else if(ch=="/"&&stream.eat("*")){state.tokenize=tokenComment(1);return state.tokenize(stream,state)}else if(ch=="."){if(support.zerolessFloat&&stream.match(/^(?:\d+(?:e[+-]?\d+)?)/i))return"number";if(stream.match(/^\.+/))return null;if(support.ODBCdotTable&&stream.match(/^[\w\d_$#]+/))return"variable-2"}else if(operatorChars.test(ch)){stream.eatWhile(operatorChars);return"operator"}else if(brackets.test(ch)){return"bracket"}else if(punctuation.test(ch)){stream.eatWhile(punctuation);return"punctuation"}else if(ch=="{"&&(stream.match(/^( )*(d|D|t|T|ts|TS)( )*'[^']*'( )*}/)||stream.match(/^( )*(d|D|t|T|ts|TS)( )*"[^"]*"( )*}/))){return"number"}else{stream.eatWhile(/^[_\w\d]/);var word=stream.current().toLowerCase();if(dateSQL.hasOwnProperty(word)&&(stream.match(/^( )+'[^']*'/)||stream.match(/^( )+"[^"]*"/)))return"number";if(atoms.hasOwnProperty(word))return"atom";if(builtin.hasOwnProperty(word))return"builtin";if(keywords.hasOwnProperty(word))return"keyword";if(client.hasOwnProperty(word))return"string-2";return null}}function tokenLiteral(quote,backslashEscapes){return function(stream,state){var escaped=false,ch;while((ch=stream.next())!=null){if(ch==quote&&!escaped){state.tokenize=tokenBase;break}escaped=(backslashStringEscapes||backslashEscapes)&&!escaped&&ch=="\\"}return"string"}}function tokenComment(depth){return function(stream,state){var m=stream.match(/^.*?(\/\*|\*\/)/);if(!m)stream.skipToEnd();else if(m[1]=="/*")state.tokenize=tokenComment(depth+1);else if(depth>1)state.tokenize=tokenComment(depth-1);else state.tokenize=tokenBase;return"comment"}}function pushContext(stream,state,type){state.context={prev:state.context,indent:stream.indentation(),col:stream.column(),type:type}}function popContext(state){state.indent=state.context.indent;state.context=state.context.prev}return{startState:function(){return{tokenize:tokenBase,context:null}},token:function(stream,state){if(stream.sol()){if(state.context&&state.context.align==null)state.context.align=false}if(state.tokenize==tokenBase&&stream.eatSpace())return null;var style=state.tokenize(stream,state);if(style=="comment")return style;if(state.context&&state.context.align==null)state.context.align=true;var tok=stream.current();if(tok=="(")pushContext(stream,state,")");else if(tok=="[")pushContext(stream,state,"]");else if(state.context&&state.context.type==tok)popContext(state);return style},indent:function(state,textAfter){var cx=state.context;if(!cx)return CodeMirror.Pass;var closing=textAfter.charAt(0)==cx.type;if(cx.align)return cx.col+(closing?0:1);else return cx.indent+(closing?0:config.indentUnit)},blockCommentStart:"/*",blockCommentEnd:"*/",lineComment:support.commentSlashSlash?"//":support.commentHash?"#":"--",closeBrackets:"()[]{}''\"\"``"}});function hookIdentifier(stream){var ch;while((ch=stream.next())!=null){if(ch=="`"&&!stream.eat("`"))return"variable-2"}stream.backUp(stream.current().length-1);return stream.eatWhile(/\w/)?"variable-2":null}function hookIdentifierDoublequote(stream){var ch;while((ch=stream.next())!=null){if(ch=='"'&&!stream.eat('"'))return"variable-2"}stream.backUp(stream.current().length-1);return stream.eatWhile(/\w/)?"variable-2":null}function hookVar(stream){if(stream.eat("@")){stream.match(/^session\./);stream.match(/^local\./);stream.match(/^global\./)}if(stream.eat("'")){stream.match(/^.*'/);return"variable-2"}else if(stream.eat('"')){stream.match(/^.*"/);return"variable-2"}else if(stream.eat("`")){stream.match(/^.*`/);return"variable-2"}else if(stream.match(/^[0-9a-zA-Z$\.\_]+/)){return"variable-2"}return null}function hookClient(stream){if(stream.eat("N")){return"atom"}return stream.match(/^[a-zA-Z.#!?]/)?"variable-2":null}var sqlKeywords="alter and as asc between by count create delete desc distinct drop from group having in insert into is join like not on or order select set table union update values where limit ";function set(str){var obj={},words=str.split(" ");for(var i=0;i!=^\&|\/]/,brackets:/^[\{}\(\)]/,punctuation:/^[;.,:/]/,backslashStringEscapes:false,dateSQL:set("date datetimeoffset datetime2 smalldatetime datetime time"),hooks:{"@":hookVar}});CodeMirror.defineMIME("text/x-mysql",{name:"sql",client:set("charset clear connect edit ego exit go help nopager notee nowarning pager print prompt quit rehash source status system tee"),keywords:set(sqlKeywords+"accessible action add after algorithm all analyze asensitive at authors auto_increment autocommit avg avg_row_length before binary binlog both btree cache call cascade cascaded case catalog_name chain change changed character check checkpoint checksum class_origin client_statistics close coalesce code collate collation collations column columns comment commit committed completion concurrent condition connection consistent constraint contains continue contributors convert cross current current_date current_time current_timestamp current_user cursor data database databases day_hour day_microsecond day_minute day_second deallocate dec declare default delay_key_write delayed delimiter des_key_file describe deterministic dev_pop dev_samp deviance diagnostics directory disable discard distinctrow div dual dumpfile each elseif enable enclosed end ends engine engines enum errors escape escaped even event events every execute exists exit explain extended fast fetch field fields first flush for force foreign found_rows full fulltext function general get global grant grants group group_concat handler hash help high_priority hosts hour_microsecond hour_minute hour_second if ignore ignore_server_ids import index index_statistics infile inner innodb inout insensitive insert_method install interval invoker isolation iterate key keys kill language last leading leave left level limit linear lines list load local localtime localtimestamp lock logs low_priority master master_heartbeat_period master_ssl_verify_server_cert masters match max max_rows maxvalue message_text middleint migrate min min_rows minute_microsecond minute_second mod mode modifies modify mutex mysql_errno natural next no no_write_to_binlog offline offset one online open optimize option optionally out outer outfile pack_keys parser partition partitions password phase plugin plugins prepare preserve prev primary privileges procedure processlist profile profiles purge query quick range read read_write reads real rebuild recover references regexp relaylog release remove rename reorganize repair repeatable replace require resignal restrict resume return returns revoke right rlike rollback rollup row row_format rtree savepoint schedule schema schema_name schemas second_microsecond security sensitive separator serializable server session share show signal slave slow smallint snapshot soname spatial specific sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_no_cache sql_small_result sqlexception sqlstate sqlwarning ssl start starting starts status std stddev stddev_pop stddev_samp storage straight_join subclass_origin sum suspend table_name table_statistics tables tablespace temporary terminated to trailing transaction trigger triggers truncate uncommitted undo uninstall unique unlock upgrade usage use use_frm user user_resources user_statistics using utc_date utc_time utc_timestamp value variables varying view views warnings when while with work write xa xor year_month zerofill begin do then else loop repeat"),builtin:set("bool boolean bit blob decimal double float long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text bigint int int1 int2 int3 int4 int8 integer float float4 float8 double char varbinary varchar varcharacter precision date datetime year unsigned signed numeric"),atoms:set("false true null unknown"),operatorChars:/^[*+\-%<>!=&|^]/,dateSQL:set("date time timestamp"),support:set("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber doubleQuote nCharCast charsetCast commentHash commentSpaceRequired"),hooks:{"@":hookVar,"`":hookIdentifier,"\\":hookClient}});CodeMirror.defineMIME("text/x-mariadb",{name:"sql",client:set("charset clear connect edit ego exit go help nopager notee nowarning pager print prompt quit rehash source status system tee"),keywords:set(sqlKeywords+"accessible action add after algorithm all always analyze asensitive at authors auto_increment autocommit avg avg_row_length before binary binlog both btree cache call cascade cascaded case catalog_name chain change changed character check checkpoint checksum class_origin client_statistics close coalesce code collate collation collations column columns comment commit committed completion concurrent condition connection consistent constraint contains continue contributors convert cross current current_date current_time current_timestamp current_user cursor data database databases day_hour day_microsecond day_minute day_second deallocate dec declare default delay_key_write delayed delimiter des_key_file describe deterministic dev_pop dev_samp deviance diagnostics directory disable discard distinctrow div dual dumpfile each elseif enable enclosed end ends engine engines enum errors escape escaped even event events every execute exists exit explain extended fast fetch field fields first flush for force foreign found_rows full fulltext function general generated get global grant grants group groupby_concat handler hard hash help high_priority hosts hour_microsecond hour_minute hour_second if ignore ignore_server_ids import index index_statistics infile inner innodb inout insensitive insert_method install interval invoker isolation iterate key keys kill language last leading leave left level limit linear lines list load local localtime localtimestamp lock logs low_priority master master_heartbeat_period master_ssl_verify_server_cert masters match max max_rows maxvalue message_text middleint migrate min min_rows minute_microsecond minute_second mod mode modifies modify mutex mysql_errno natural next no no_write_to_binlog offline offset one online open optimize option optionally out outer outfile pack_keys parser partition partitions password persistent phase plugin plugins prepare preserve prev primary privileges procedure processlist profile profiles purge query quick range read read_write reads real rebuild recover references regexp relaylog release remove rename reorganize repair repeatable replace require resignal restrict resume return returns revoke right rlike rollback rollup row row_format rtree savepoint schedule schema schema_name schemas second_microsecond security sensitive separator serializable server session share show shutdown signal slave slow smallint snapshot soft soname spatial specific sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_no_cache sql_small_result sqlexception sqlstate sqlwarning ssl start starting starts status std stddev stddev_pop stddev_samp storage straight_join subclass_origin sum suspend table_name table_statistics tables tablespace temporary terminated to trailing transaction trigger triggers truncate uncommitted undo uninstall unique unlock upgrade usage use use_frm user user_resources user_statistics using utc_date utc_time utc_timestamp value variables varying view views virtual warnings when while with work write xa xor year_month zerofill begin do then else loop repeat"),builtin:set("bool boolean bit blob decimal double float long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text bigint int int1 int2 int3 int4 int8 integer float float4 float8 double char varbinary varchar varcharacter precision date datetime year unsigned signed numeric"),atoms:set("false true null unknown"),operatorChars:/^[*+\-%<>!=&|^]/,dateSQL:set("date time timestamp"),support:set("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber doubleQuote nCharCast charsetCast commentHash commentSpaceRequired"),hooks:{"@":hookVar,"`":hookIdentifier,"\\":hookClient}});CodeMirror.defineMIME("text/x-sqlite",{name:"sql",client:set("auth backup bail binary changes check clone databases dbinfo dump echo eqp exit explain fullschema headers help import imposter indexes iotrace limit lint load log mode nullvalue once open output print prompt quit read restore save scanstats schema separator session shell show stats system tables testcase timeout timer trace vfsinfo vfslist vfsname width"),keywords:set(sqlKeywords+"abort action add after all analyze attach autoincrement before begin cascade case cast check collate column commit conflict constraint cross current_date current_time current_timestamp database default deferrable deferred detach each else end escape except exclusive exists explain fail for foreign full glob if ignore immediate index indexed initially inner instead intersect isnull key left limit match natural no notnull null of offset outer plan pragma primary query raise recursive references regexp reindex release rename replace restrict right rollback row savepoint temp temporary then to transaction trigger unique using vacuum view virtual when with without"),builtin:set("bool boolean bit blob decimal double float long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text clob bigint int int2 int8 integer float double char varchar date datetime year unsigned signed numeric real"),atoms:set("null current_date current_time current_timestamp"),operatorChars:/^[*+\-%<>!=&|/~]/,dateSQL:set("date time timestamp datetime"),support:set("decimallessFloat zerolessFloat"),identifierQuote:'"',hooks:{"@":hookVar,":":hookVar,"?":hookVar,$:hookVar,'"':hookIdentifierDoublequote,"`":hookIdentifier}});CodeMirror.defineMIME("text/x-cassandra",{name:"sql",client:{},keywords:set("add all allow alter and any apply as asc authorize batch begin by clustering columnfamily compact consistency count create custom delete desc distinct drop each_quorum exists filtering from grant if in index insert into key keyspace keyspaces level limit local_one local_quorum modify nan norecursive nosuperuser not of on one order password permission permissions primary quorum rename revoke schema select set storage superuser table three to token truncate ttl two type unlogged update use user users using values where with writetime"),builtin:set("ascii bigint blob boolean counter decimal double float frozen inet int list map static text timestamp timeuuid tuple uuid varchar varint"),atoms:set("false true infinity NaN"),operatorChars:/^[<>=]/,dateSQL:{},support:set("commentSlashSlash decimallessFloat"),hooks:{}});CodeMirror.defineMIME("text/x-plsql",{name:"sql",client:set("appinfo arraysize autocommit autoprint autorecovery autotrace blockterminator break btitle cmdsep colsep compatibility compute concat copycommit copytypecheck define describe echo editfile embedded escape exec execute feedback flagger flush heading headsep instance linesize lno loboffset logsource long longchunksize markup native newpage numformat numwidth pagesize pause pno recsep recsepchar release repfooter repheader serveroutput shiftinout show showmode size spool sqlblanklines sqlcase sqlcode sqlcontinue sqlnumber sqlpluscompatibility sqlprefix sqlprompt sqlterminator suffix tab term termout time timing trimout trimspool ttitle underline verify version wrap"),keywords:set("abort accept access add all alter and any array arraylen as asc assert assign at attributes audit authorization avg base_table begin between binary_integer body boolean by case cast char char_base check close cluster clusters colauth column comment commit compress connect connected constant constraint crash create current currval cursor data_base database date dba deallocate debugoff debugon decimal declare default definition delay delete desc digits dispose distinct do drop else elseif elsif enable end entry escape exception exception_init exchange exclusive exists exit external fast fetch file for force form from function generic goto grant group having identified if immediate in increment index indexes indicator initial initrans insert interface intersect into is key level library like limited local lock log logging long loop master maxextents maxtrans member minextents minus mislabel mode modify multiset new next no noaudit nocompress nologging noparallel not nowait number_base object of off offline on online only open option or order out package parallel partition pctfree pctincrease pctused pls_integer positive positiven pragma primary prior private privileges procedure public raise range raw read rebuild record ref references refresh release rename replace resource restrict return returning returns reverse revoke rollback row rowid rowlabel rownum rows run savepoint schema segment select separate session set share snapshot some space split sql start statement storage subtype successful synonym tabauth table tables tablespace task terminate then to trigger truncate type union unique unlimited unrecoverable unusable update use using validate value values variable view views when whenever where while with work"),builtin:set("abs acos add_months ascii asin atan atan2 average bfile bfilename bigserial bit blob ceil character chartorowid chr clob concat convert cos cosh count dec decode deref dual dump dup_val_on_index empty error exp false float floor found glb greatest hextoraw initcap instr instrb int integer isopen last_day least length lengthb ln lower lpad ltrim lub make_ref max min mlslabel mod months_between natural naturaln nchar nclob new_time next_day nextval nls_charset_decl_len nls_charset_id nls_charset_name nls_initcap nls_lower nls_sort nls_upper nlssort no_data_found notfound null number numeric nvarchar2 nvl others power rawtohex real reftohex round rowcount rowidtochar rowtype rpad rtrim serial sign signtype sin sinh smallint soundex sqlcode sqlerrm sqrt stddev string substr substrb sum sysdate tan tanh to_char text to_date to_label to_multi_byte to_number to_single_byte translate true trunc uid unlogged upper user userenv varchar varchar2 variance varying vsize xml"),operatorChars:/^[*\/+\-%<>!=~]/,dateSQL:set("date time timestamp"),support:set("doubleQuote nCharCast zerolessFloat binaryNumber hexNumber")});CodeMirror.defineMIME("text/x-hive",{name:"sql",keywords:set("select alter $elem$ $key$ $value$ add after all analyze and archive as asc before between binary both bucket buckets by cascade case cast change cluster clustered clusterstatus collection column columns comment compute concatenate continue create cross cursor data database databases dbproperties deferred delete delimited desc describe directory disable distinct distribute drop else enable end escaped exclusive exists explain export extended external fetch fields fileformat first format formatted from full function functions grant group having hold_ddltime idxproperties if import in index indexes inpath inputdriver inputformat insert intersect into is items join keys lateral left like limit lines load local location lock locks mapjoin materialized minus msck no_drop nocompress not of offline on option or order out outer outputdriver outputformat overwrite partition partitioned partitions percent plus preserve procedure purge range rcfile read readonly reads rebuild recordreader recordwriter recover reduce regexp rename repair replace restrict revoke right rlike row schema schemas semi sequencefile serde serdeproperties set shared show show_database sort sorted ssl statistics stored streamtable table tables tablesample tblproperties temporary terminated textfile then tmp to touch transform trigger unarchive undo union uniquejoin unlock update use using utc utc_tmestamp view when where while with admin authorization char compact compactions conf cube current current_date current_timestamp day decimal defined dependency directories elem_type exchange file following for grouping hour ignore inner interval jar less logical macro minute month more none noscan over owner partialscan preceding pretty principals protection reload rewrite role roles rollup rows second server sets skewed transactions truncate unbounded unset uri user values window year"),builtin:set("bool boolean long timestamp tinyint smallint bigint int float double date datetime unsigned string array struct map uniontype key_type utctimestamp value_type varchar"),atoms:set("false true null unknown"),operatorChars:/^[*+\-%<>!=]/,dateSQL:set("date timestamp"),support:set("ODBCdotTable doubleQuote binaryNumber hexNumber")});CodeMirror.defineMIME("text/x-pgsql",{name:"sql",client:set("source"),keywords:set(sqlKeywords+"a abort abs absent absolute access according action ada add admin after aggregate alias all allocate also alter always analyse analyze and any are array array_agg array_max_cardinality as asc asensitive assert assertion assignment asymmetric at atomic attach attribute attributes authorization avg backward base64 before begin begin_frame begin_partition bernoulli between bigint binary bit bit_length blob blocked bom boolean both breadth by c cache call called cardinality cascade cascaded case cast catalog catalog_name ceil ceiling chain char char_length character character_length character_set_catalog character_set_name character_set_schema characteristics characters check checkpoint class class_origin clob close cluster coalesce cobol collate collation collation_catalog collation_name collation_schema collect column column_name columns command_function command_function_code comment comments commit committed concurrently condition condition_number configuration conflict connect connection connection_name constant constraint constraint_catalog constraint_name constraint_schema constraints constructor contains content continue control conversion convert copy corr corresponding cost count covar_pop covar_samp create cross csv cube cume_dist current current_catalog current_date current_default_transform_group current_path current_role current_row current_schema current_time current_timestamp current_transform_group_for_type current_user cursor cursor_name cycle data database datalink datatype date datetime_interval_code datetime_interval_precision day db deallocate debug dec decimal declare default defaults deferrable deferred defined definer degree delete delimiter delimiters dense_rank depends depth deref derived desc describe descriptor detach detail deterministic diagnostics dictionary disable discard disconnect dispatch distinct dlnewcopy dlpreviouscopy dlurlcomplete dlurlcompleteonly dlurlcompletewrite dlurlpath dlurlpathonly dlurlpathwrite dlurlscheme dlurlserver dlvalue do document domain double drop dump dynamic dynamic_function dynamic_function_code each element else elseif elsif empty enable encoding encrypted end end_frame end_partition endexec enforced enum equals errcode error escape event every except exception exclude excluding exclusive exec execute exists exit exp explain expression extension external extract false family fetch file filter final first first_value flag float floor following for force foreach foreign fortran forward found frame_row free freeze from fs full function functions fusion g general generated get global go goto grant granted greatest group grouping groups handler having header hex hierarchy hint hold hour id identity if ignore ilike immediate immediately immutable implementation implicit import in include including increment indent index indexes indicator info inherit inherits initially inline inner inout input insensitive insert instance instantiable instead int integer integrity intersect intersection interval into invoker is isnull isolation join k key key_member key_type label lag language large last last_value lateral lead leading leakproof least left length level library like like_regex limit link listen ln load local localtime localtimestamp location locator lock locked log logged loop lower m map mapping match matched materialized max max_cardinality maxvalue member merge message message_length message_octet_length message_text method min minute minvalue mod mode modifies module month more move multiset mumps name names namespace national natural nchar nclob nesting new next nfc nfd nfkc nfkd nil no none normalize normalized not nothing notice notify notnull nowait nth_value ntile null nullable nullif nulls number numeric object occurrences_regex octet_length octets of off offset oids old on only open operator option options or order ordering ordinality others out outer output over overlaps overlay overriding owned owner p pad parallel parameter parameter_mode parameter_name parameter_ordinal_position parameter_specific_catalog parameter_specific_name parameter_specific_schema parser partial partition pascal passing passthrough password path percent percent_rank percentile_cont percentile_disc perform period permission pg_context pg_datatype_name pg_exception_context pg_exception_detail pg_exception_hint placing plans pli policy portion position position_regex power precedes preceding precision prepare prepared preserve primary print_strict_params prior privileges procedural procedure procedures program public publication query quote raise range rank read reads real reassign recheck recovery recursive ref references referencing refresh regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy regr_syy reindex relative release rename repeatable replace replica requiring reset respect restart restore restrict result result_oid return returned_cardinality returned_length returned_octet_length returned_sqlstate returning returns reverse revoke right role rollback rollup routine routine_catalog routine_name routine_schema routines row row_count row_number rows rowtype rule savepoint scale schema schema_name schemas scope scope_catalog scope_name scope_schema scroll search second section security select selective self sensitive sequence sequences serializable server server_name session session_user set setof sets share show similar simple size skip slice smallint snapshot some source space specific specific_name specifictype sql sqlcode sqlerror sqlexception sqlstate sqlwarning sqrt stable stacked standalone start state statement static statistics stddev_pop stddev_samp stdin stdout storage strict strip structure style subclass_origin submultiset subscription substring substring_regex succeeds sum symmetric sysid system system_time system_user t table table_name tables tablesample tablespace temp template temporary text then ties time timestamp timezone_hour timezone_minute to token top_level_count trailing transaction transaction_active transactions_committed transactions_rolled_back transform transforms translate translate_regex translation treat trigger trigger_catalog trigger_name trigger_schema trim trim_array true truncate trusted type types uescape unbounded uncommitted under unencrypted union unique unknown unlink unlisten unlogged unnamed unnest until untyped update upper uri usage use_column use_variable user user_defined_type_catalog user_defined_type_code user_defined_type_name user_defined_type_schema using vacuum valid validate validator value value_of values var_pop var_samp varbinary varchar variable_conflict variadic varying verbose version versioning view views volatile warning when whenever where while whitespace width_bucket window with within without work wrapper write xml xmlagg xmlattributes xmlbinary xmlcast xmlcomment xmlconcat xmldeclaration xmldocument xmlelement xmlexists xmlforest xmliterate xmlnamespaces xmlparse xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltext xmlvalidate year yes zone"),builtin:set("bigint int8 bigserial serial8 bit varying varbit boolean bool box bytea character char varchar cidr circle date double precision float8 inet integer int int4 interval json jsonb line lseg macaddr macaddr8 money numeric decimal path pg_lsn point polygon real float4 smallint int2 smallserial serial2 serial serial4 text time without zone with timetz timestamp timestamptz tsquery tsvector txid_snapshot uuid xml"),atoms:set("false true null unknown"),operatorChars:/^[*\/+\-%<>!=&|^\/#@?~]/,backslashStringEscapes:false,dateSQL:set("date time timestamp"),support:set("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber nCharCast charsetCast escapeConstant")});CodeMirror.defineMIME("text/x-gql",{name:"sql",keywords:set("ancestor and asc by contains desc descendant distinct from group has in is limit offset on order select superset where"),atoms:set("false true"),builtin:set("blob datetime first key __key__ string integer double boolean null"),operatorChars:/^[*+\-%<>!=]/});CodeMirror.defineMIME("text/x-gpsql",{name:"sql",client:set("source"),keywords:set("abort absolute access action active add admin after aggregate all also alter always analyse analyze and any array as asc assertion assignment asymmetric at authorization backward before begin between bigint binary bit boolean both by cache called cascade cascaded case cast chain char character characteristics check checkpoint class close cluster coalesce codegen collate column comment commit committed concurrency concurrently configuration connection constraint constraints contains content continue conversion copy cost cpu_rate_limit create createdb createexttable createrole createuser cross csv cube current current_catalog current_date current_role current_schema current_time current_timestamp current_user cursor cycle data database day deallocate dec decimal declare decode default defaults deferrable deferred definer delete delimiter delimiters deny desc dictionary disable discard distinct distributed do document domain double drop dxl each else enable encoding encrypted end enum errors escape every except exchange exclude excluding exclusive execute exists explain extension external extract false family fetch fields filespace fill filter first float following for force foreign format forward freeze from full function global grant granted greatest group group_id grouping handler hash having header hold host hour identity if ignore ilike immediate immutable implicit in including inclusive increment index indexes inherit inherits initially inline inner inout input insensitive insert instead int integer intersect interval into invoker is isnull isolation join key language large last leading least left level like limit list listen load local localtime localtimestamp location lock log login mapping master match maxvalue median merge minute minvalue missing mode modifies modify month move name names national natural nchar new newline next no nocreatedb nocreateexttable nocreaterole nocreateuser noinherit nologin none noovercommit nosuperuser not nothing notify notnull nowait null nullif nulls numeric object of off offset oids old on only operator option options or order ordered others out outer over overcommit overlaps overlay owned owner parser partial partition partitions passing password percent percentile_cont percentile_disc placing plans position preceding precision prepare prepared preserve primary prior privileges procedural procedure protocol queue quote randomly range read readable reads real reassign recheck recursive ref references reindex reject relative release rename repeatable replace replica reset resource restart restrict returning returns revoke right role rollback rollup rootpartition row rows rule savepoint scatter schema scroll search second security segment select sequence serializable session session_user set setof sets share show similar simple smallint some split sql stable standalone start statement statistics stdin stdout storage strict strip subpartition subpartitions substring superuser symmetric sysid system table tablespace temp template temporary text then threshold ties time timestamp to trailing transaction treat trigger trim true truncate trusted type unbounded uncommitted unencrypted union unique unknown unlisten until update user using vacuum valid validation validator value values varchar variadic varying verbose version view volatile web when where whitespace window with within without work writable write xml xmlattributes xmlconcat xmlelement xmlexists xmlforest xmlparse xmlpi xmlroot xmlserialize year yes zone"),builtin:set("bigint int8 bigserial serial8 bit varying varbit boolean bool box bytea character char varchar cidr circle date double precision float float8 inet integer int int4 interval json jsonb line lseg macaddr macaddr8 money numeric decimal path pg_lsn point polygon real float4 smallint int2 smallserial serial2 serial serial4 text time without zone with timetz timestamp timestamptz tsquery tsvector txid_snapshot uuid xml"),atoms:set("false true null unknown"),operatorChars:/^[*+\-%<>!=&|^\/#@?~]/,dateSQL:set("date time timestamp"),support:set("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber nCharCast charsetCast")});CodeMirror.defineMIME("text/x-sparksql",{name:"sql",keywords:set("add after all alter analyze and anti archive array as asc at between bucket buckets by cache cascade case cast change clear cluster clustered codegen collection column columns comment commit compact compactions compute concatenate cost create cross cube current current_date current_timestamp database databases datata dbproperties defined delete delimited deny desc describe dfs directories distinct distribute drop else end escaped except exchange exists explain export extended external false fields fileformat first following for format formatted from full function functions global grant group grouping having if ignore import in index indexes inner inpath inputformat insert intersect interval into is items join keys last lateral lazy left like limit lines list load local location lock locks logical macro map minus msck natural no not null nulls of on optimize option options or order out outer outputformat over overwrite partition partitioned partitions percent preceding principals purge range recordreader recordwriter recover reduce refresh regexp rename repair replace reset restrict revoke right rlike role roles rollback rollup row rows schema schemas select semi separated serde serdeproperties set sets show skewed sort sorted start statistics stored stratify struct table tables tablesample tblproperties temp temporary terminated then to touch transaction transactions transform true truncate unarchive unbounded uncache union unlock unset use using values view when where window with"),builtin:set("tinyint smallint int bigint boolean float double string binary timestamp decimal array map struct uniontype delimited serde sequencefile textfile rcfile inputformat outputformat"),atoms:set("false true null"),operatorChars:/^[*\/+\-%<>!=~&|^]/,dateSQL:set("date time timestamp"),support:set("ODBCdotTable doubleQuote zerolessFloat")});CodeMirror.defineMIME("text/x-esper",{name:"sql",client:set("source"),keywords:set("alter and as asc between by count create delete desc distinct drop from group having in insert into is join like not on or order select set table union update values where limit after all and as at asc avedev avg between by case cast coalesce count create current_timestamp day days delete define desc distinct else end escape events every exists false first from full group having hour hours in inner insert instanceof into irstream is istream join last lastweekday left limit like max match_recognize matches median measures metadatasql min minute minutes msec millisecond milliseconds not null offset on or order outer output partition pattern prev prior regexp retain-union retain-intersection right rstream sec second seconds select set some snapshot sql stddev sum then true unidirectional until update variable weekday when where window"),builtin:{},atoms:set("false true null"),operatorChars:/^[*+\-%<>!=&|^\/#@?~]/,dateSQL:set("time"),support:set("decimallessFloat zerolessFloat binaryNumber hexNumber")})}); \ No newline at end of file diff --git a/datasette/static/codemirror-5.57.0.min.css b/datasette/static/codemirror-5.57.0.min.css new file mode 100644 index 00000000..0adf786f --- /dev/null +++ b/datasette/static/codemirror-5.57.0.min.css @@ -0,0 +1 @@ +.CodeMirror{font-family:monospace;height:300px;color:#000;direction:ltr}.CodeMirror-lines{padding:4px 0}.CodeMirror pre.CodeMirror-line,.CodeMirror pre.CodeMirror-line-like{padding:0 4px}.CodeMirror-gutter-filler,.CodeMirror-scrollbar-filler{background-color:#fff}.CodeMirror-gutters{border-right:1px solid #ddd;background-color:#f7f7f7;white-space:nowrap}.CodeMirror-linenumber{padding:0 3px 0 5px;min-width:20px;text-align:right;color:#999;white-space:nowrap}.CodeMirror-guttermarker{color:#000}.CodeMirror-guttermarker-subtle{color:#999}.CodeMirror-cursor{border-left:1px solid #000;border-right:none;width:0}.CodeMirror div.CodeMirror-secondarycursor{border-left:1px solid silver}.cm-fat-cursor .CodeMirror-cursor{width:auto;border:0!important;background:#7e7}.cm-fat-cursor div.CodeMirror-cursors{z-index:1}.cm-fat-cursor-mark{background-color:rgba(20,255,20,.5);-webkit-animation:blink 1.06s steps(1) infinite;-moz-animation:blink 1.06s steps(1) infinite;animation:blink 1.06s steps(1) infinite}.cm-animate-fat-cursor{width:auto;border:0;-webkit-animation:blink 1.06s steps(1) infinite;-moz-animation:blink 1.06s steps(1) infinite;animation:blink 1.06s steps(1) infinite;background-color:#7e7}@-moz-keyframes blink{50%{background-color:transparent}}@-webkit-keyframes blink{50%{background-color:transparent}}@keyframes blink{50%{background-color:transparent}}.cm-tab{display:inline-block;text-decoration:inherit}.CodeMirror-rulers{position:absolute;left:0;right:0;top:-50px;bottom:0;overflow:hidden}.CodeMirror-ruler{border-left:1px solid #ccc;top:0;bottom:0;position:absolute}.cm-s-default .cm-header{color:#00f}.cm-s-default .cm-quote{color:#090}.cm-negative{color:#d44}.cm-positive{color:#292}.cm-header,.cm-strong{font-weight:700}.cm-em{font-style:italic}.cm-link{text-decoration:underline}.cm-strikethrough{text-decoration:line-through}.cm-s-default .cm-keyword{color:#708}.cm-s-default .cm-atom{color:#219}.cm-s-default .cm-number{color:#164}.cm-s-default .cm-def{color:#00f}.cm-s-default .cm-variable-2{color:#05a}.cm-s-default .cm-type,.cm-s-default .cm-variable-3{color:#085}.cm-s-default .cm-comment{color:#a50}.cm-s-default .cm-string{color:#a11}.cm-s-default .cm-string-2{color:#f50}.cm-s-default .cm-meta{color:#555}.cm-s-default .cm-qualifier{color:#555}.cm-s-default .cm-builtin{color:#30a}.cm-s-default .cm-bracket{color:#997}.cm-s-default .cm-tag{color:#170}.cm-s-default .cm-attribute{color:#00c}.cm-s-default .cm-hr{color:#999}.cm-s-default .cm-link{color:#00c}.cm-s-default .cm-error{color:red}.cm-invalidchar{color:red}.CodeMirror-composing{border-bottom:2px solid}div.CodeMirror span.CodeMirror-matchingbracket{color:#0b0}div.CodeMirror span.CodeMirror-nonmatchingbracket{color:#a22}.CodeMirror-matchingtag{background:rgba(255,150,0,.3)}.CodeMirror-activeline-background{background:#e8f2ff}.CodeMirror{position:relative;overflow:hidden;background:#fff}.CodeMirror-scroll{overflow:scroll!important;margin-bottom:-50px;margin-right:-50px;padding-bottom:50px;height:100%;outline:0;position:relative}.CodeMirror-sizer{position:relative;border-right:50px solid transparent}.CodeMirror-gutter-filler,.CodeMirror-hscrollbar,.CodeMirror-scrollbar-filler,.CodeMirror-vscrollbar{position:absolute;z-index:6;display:none}.CodeMirror-vscrollbar{right:0;top:0;overflow-x:hidden;overflow-y:scroll}.CodeMirror-hscrollbar{bottom:0;left:0;overflow-y:hidden;overflow-x:scroll}.CodeMirror-scrollbar-filler{right:0;bottom:0}.CodeMirror-gutter-filler{left:0;bottom:0}.CodeMirror-gutters{position:absolute;left:0;top:0;min-height:100%;z-index:3}.CodeMirror-gutter{white-space:normal;height:100%;display:inline-block;vertical-align:top;margin-bottom:-50px}.CodeMirror-gutter-wrapper{position:absolute;z-index:4;background:0 0!important;border:none!important}.CodeMirror-gutter-background{position:absolute;top:0;bottom:0;z-index:4}.CodeMirror-gutter-elt{position:absolute;cursor:default;z-index:4}.CodeMirror-gutter-wrapper ::selection{background-color:transparent}.CodeMirror-gutter-wrapper ::-moz-selection{background-color:transparent}.CodeMirror-lines{cursor:text;min-height:1px}.CodeMirror pre.CodeMirror-line,.CodeMirror pre.CodeMirror-line-like{-moz-border-radius:0;-webkit-border-radius:0;border-radius:0;border-width:0;background:0 0;font-family:inherit;font-size:inherit;margin:0;white-space:pre;word-wrap:normal;line-height:inherit;color:inherit;z-index:2;position:relative;overflow:visible;-webkit-tap-highlight-color:transparent;-webkit-font-variant-ligatures:contextual;font-variant-ligatures:contextual}.CodeMirror-wrap pre.CodeMirror-line,.CodeMirror-wrap pre.CodeMirror-line-like{word-wrap:break-word;white-space:pre-wrap;word-break:normal}.CodeMirror-linebackground{position:absolute;left:0;right:0;top:0;bottom:0;z-index:0}.CodeMirror-linewidget{position:relative;z-index:2;padding:.1px}.CodeMirror-rtl pre{direction:rtl}.CodeMirror-code{outline:0}.CodeMirror-gutter,.CodeMirror-gutters,.CodeMirror-linenumber,.CodeMirror-scroll,.CodeMirror-sizer{-moz-box-sizing:content-box;box-sizing:content-box}.CodeMirror-measure{position:absolute;width:100%;height:0;overflow:hidden;visibility:hidden}.CodeMirror-cursor{position:absolute;pointer-events:none}.CodeMirror-measure pre{position:static}div.CodeMirror-cursors{visibility:hidden;position:relative;z-index:3}div.CodeMirror-dragcursors{visibility:visible}.CodeMirror-focused div.CodeMirror-cursors{visibility:visible}.CodeMirror-selected{background:#d9d9d9}.CodeMirror-focused .CodeMirror-selected{background:#d7d4f0}.CodeMirror-crosshair{cursor:crosshair}.CodeMirror-line::selection,.CodeMirror-line>span::selection,.CodeMirror-line>span>span::selection{background:#d7d4f0}.CodeMirror-line::-moz-selection,.CodeMirror-line>span::-moz-selection,.CodeMirror-line>span>span::-moz-selection{background:#d7d4f0}.cm-searching{background-color:#ffa;background-color:rgba(255,255,0,.4)}.cm-force-border{padding-right:.1px}@media print{.CodeMirror div.CodeMirror-cursors{visibility:hidden}}.cm-tab-wrap-hack:after{content:''}span.CodeMirror-selectedtext{background:0 0} \ No newline at end of file diff --git a/datasette/static/codemirror-5.57.0.min.js b/datasette/static/codemirror-5.57.0.min.js new file mode 100644 index 00000000..a8ef1854 --- /dev/null +++ b/datasette/static/codemirror-5.57.0.min.js @@ -0,0 +1,11 @@ +/* + CodeMirror, copyright (c) by Marijn Haverbeke and others + Distributed under an MIT license: https://codemirror.net/LICENSE + + This is CodeMirror (https://codemirror.net), a code editor + implemented in JavaScript on top of the browser's DOM. + + You can find some technical background for some of the code below + at http://marijnhaverbeke.nl/blog/#cm-internals . +*/ +(function(global,factory){typeof exports==="object"&&typeof module!=="undefined"?module.exports=factory():typeof define==="function"&&define.amd?define(factory):(global=global||self,global.CodeMirror=factory())})(this,function(){"use strict";var userAgent=navigator.userAgent;var platform=navigator.platform;var gecko=/gecko\/\d/i.test(userAgent);var ie_upto10=/MSIE \d/.test(userAgent);var ie_11up=/Trident\/(?:[7-9]|\d{2,})\..*rv:(\d+)/.exec(userAgent);var edge=/Edge\/(\d+)/.exec(userAgent);var ie=ie_upto10||ie_11up||edge;var ie_version=ie&&(ie_upto10?document.documentMode||6:+(edge||ie_11up)[1]);var webkit=!edge&&/WebKit\//.test(userAgent);var qtwebkit=webkit&&/Qt\/\d+\.\d+/.test(userAgent);var chrome=!edge&&/Chrome\//.test(userAgent);var presto=/Opera\//.test(userAgent);var safari=/Apple Computer/.test(navigator.vendor);var mac_geMountainLion=/Mac OS X 1\d\D([8-9]|\d\d)\D/.test(userAgent);var phantom=/PhantomJS/.test(userAgent);var ios=!edge&&/AppleWebKit/.test(userAgent)&&/Mobile\/\w+/.test(userAgent);var android=/Android/.test(userAgent);var mobile=ios||android||/webOS|BlackBerry|Opera Mini|Opera Mobi|IEMobile/i.test(userAgent);var mac=ios||/Mac/.test(platform);var chromeOS=/\bCrOS\b/.test(userAgent);var windows=/win/i.test(platform);var presto_version=presto&&userAgent.match(/Version\/(\d*\.\d*)/);if(presto_version){presto_version=Number(presto_version[1])}if(presto_version&&presto_version>=15){presto=false;webkit=true}var flipCtrlCmd=mac&&(qtwebkit||presto&&(presto_version==null||presto_version<12.11));var captureRightClick=gecko||ie&&ie_version>=9;function classTest(cls){return new RegExp("(^|\\s)"+cls+"(?:$|\\s)\\s*")}var rmClass=function(node,cls){var current=node.className;var match=classTest(cls).exec(current);if(match){var after=current.slice(match.index+match[0].length);node.className=current.slice(0,match.index)+(after?match[1]+after:"")}};function removeChildren(e){for(var count=e.childNodes.length;count>0;--count){e.removeChild(e.firstChild)}return e}function removeChildrenAndAdd(parent,e){return removeChildren(parent).appendChild(e)}function elt(tag,content,className,style){var e=document.createElement(tag);if(className){e.className=className}if(style){e.style.cssText=style}if(typeof content=="string"){e.appendChild(document.createTextNode(content))}else if(content){for(var i=0;i=end){return n+(end-i)}n+=nextTab-i;n+=tabSize-n%tabSize;i=nextTab+1}}var Delayed=function(){this.id=null;this.f=null;this.time=0;this.handler=bind(this.onTimeout,this)};Delayed.prototype.onTimeout=function(self){self.id=0;if(self.time<=+new Date){self.f()}else{setTimeout(self.handler,self.time-+new Date)}};Delayed.prototype.set=function(ms,f){this.f=f;var time=+new Date+ms;if(!this.id||time=goal){return pos+Math.min(skipped,goal-col)}col+=nextTab-pos;col+=tabSize-col%tabSize;pos=nextTab+1;if(col>=goal){return pos}}}var spaceStrs=[""];function spaceStr(n){while(spaceStrs.length<=n){spaceStrs.push(lst(spaceStrs)+" ")}return spaceStrs[n]}function lst(arr){return arr[arr.length-1]}function map(array,f){var out=[];for(var i=0;i"€"&&(ch.toUpperCase()!=ch.toLowerCase()||nonASCIISingleCaseWordChar.test(ch))}function isWordChar(ch,helper){if(!helper){return isWordCharBasic(ch)}if(helper.source.indexOf("\\w")>-1&&isWordCharBasic(ch)){return true}return helper.test(ch)}function isEmpty(obj){for(var n in obj){if(obj.hasOwnProperty(n)&&obj[n]){return false}}return true}var extendingChars=/[\u0300-\u036f\u0483-\u0489\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u065e\u0670\u06d6-\u06dc\u06de-\u06e4\u06e7\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0900-\u0902\u093c\u0941-\u0948\u094d\u0951-\u0955\u0962\u0963\u0981\u09bc\u09be\u09c1-\u09c4\u09cd\u09d7\u09e2\u09e3\u0a01\u0a02\u0a3c\u0a41\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a70\u0a71\u0a75\u0a81\u0a82\u0abc\u0ac1-\u0ac5\u0ac7\u0ac8\u0acd\u0ae2\u0ae3\u0b01\u0b3c\u0b3e\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b57\u0b62\u0b63\u0b82\u0bbe\u0bc0\u0bcd\u0bd7\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0cbc\u0cbf\u0cc2\u0cc6\u0ccc\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0d3e\u0d41-\u0d44\u0d4d\u0d57\u0d62\u0d63\u0dca\u0dcf\u0dd2-\u0dd4\u0dd6\u0ddf\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0f18\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86\u0f87\u0f90-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039\u103a\u103d\u103e\u1058\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085\u1086\u108d\u109d\u135f\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u18a9\u1920-\u1922\u1927\u1928\u1932\u1939-\u193b\u1a17\u1a18\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80\u1b81\u1ba2-\u1ba5\u1ba8\u1ba9\u1c2c-\u1c33\u1c36\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1dc0-\u1de6\u1dfd-\u1dff\u200c\u200d\u20d0-\u20f0\u2cef-\u2cf1\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua66f-\ua672\ua67c\ua67d\ua6f0\ua6f1\ua802\ua806\ua80b\ua825\ua826\ua8c4\ua8e0-\ua8f1\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\uaa29-\uaa2e\uaa31\uaa32\uaa35\uaa36\uaa43\uaa4c\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uabe5\uabe8\uabed\udc00-\udfff\ufb1e\ufe00-\ufe0f\ufe20-\ufe26\uff9e\uff9f]/;function isExtendingChar(ch){return ch.charCodeAt(0)>=768&&extendingChars.test(ch)}function skipExtendingChars(str,pos,dir){while((dir<0?pos>0:posto?-1:1;for(;;){if(from==to){return from}var midF=(from+to)/2,mid=dir<0?Math.ceil(midF):Math.floor(midF);if(mid==from){return pred(mid)?from:to}if(pred(mid)){to=mid}else{from=mid+dir}}}function iterateBidiSections(order,from,to,f){if(!order){return f(from,to,"ltr",0)}var found=false;for(var i=0;ifrom||from==to&&part.to==from){f(Math.max(part.from,from),Math.min(part.to,to),part.level==1?"rtl":"ltr",i);found=true}}if(!found){f(from,to,"ltr")}}var bidiOther=null;function getBidiPartAt(order,ch,sticky){var found;bidiOther=null;for(var i=0;ich){return i}if(cur.to==ch){if(cur.from!=cur.to&&sticky=="before"){found=i}else{bidiOther=i}}if(cur.from==ch){if(cur.from!=cur.to&&sticky!="before"){found=i}else{bidiOther=i}}}return found!=null?found:bidiOther}var bidiOrdering=function(){var lowTypes="bbbbbbbbbtstwsbbbbbbbbbbbbbbssstwNN%%%NNNNNN,N,N1111111111NNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNbbbbbbsbbbbbbbbbbbbbbbbbbbbbbbbbb,N%%%%NNNNLNNNNN%%11NLNNN1LNNNNNLLLLLLLLLLLLLLLLLLLLLLLNLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLN";var arabicTypes="nnnnnnNNr%%r,rNNmmmmmmmmmmmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmmmmmmmmmmmmmmmnnnnnnnnnn%nnrrrmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmnNmmmmmmrrmmNmmmmrr1111111111";function charType(code){if(code<=247){return lowTypes.charAt(code)}else if(1424<=code&&code<=1524){return"R"}else if(1536<=code&&code<=1785){return arabicTypes.charAt(code-1536)}else if(1774<=code&&code<=2220){return"r"}else if(8192<=code&&code<=8203){return"w"}else if(code==8204){return"b"}else{return"L"}}var bidiRE=/[\u0590-\u05f4\u0600-\u06ff\u0700-\u08ac]/;var isNeutral=/[stwN]/,isStrong=/[LRr]/,countsAsLeft=/[Lb1n]/,countsAsNum=/[1n]/;function BidiSpan(level,from,to){this.level=level;this.from=from;this.to=to}return function(str,direction){var outerType=direction=="ltr"?"L":"R";if(str.length==0||direction=="ltr"&&!bidiRE.test(str)){return false}var len=str.length,types=[];for(var i=0;i-1){map[type]=arr.slice(0,index).concat(arr.slice(index+1))}}}}function signal(emitter,type){var handlers=getHandlers(emitter,type);if(!handlers.length){return}var args=Array.prototype.slice.call(arguments,2);for(var i=0;i0}function eventMixin(ctor){ctor.prototype.on=function(type,f){on(this,type,f)};ctor.prototype.off=function(type,f){off(this,type,f)}}function e_preventDefault(e){if(e.preventDefault){e.preventDefault()}else{e.returnValue=false}}function e_stopPropagation(e){if(e.stopPropagation){e.stopPropagation()}else{e.cancelBubble=true}}function e_defaultPrevented(e){return e.defaultPrevented!=null?e.defaultPrevented:e.returnValue==false}function e_stop(e){e_preventDefault(e);e_stopPropagation(e)}function e_target(e){return e.target||e.srcElement}function e_button(e){var b=e.which;if(b==null){if(e.button&1){b=1}else if(e.button&2){b=3}else if(e.button&4){b=2}}if(mac&&e.ctrlKey&&b==1){b=3}return b}var dragAndDrop=function(){if(ie&&ie_version<9){return false}var div=elt("div");return"draggable"in div||"dragDrop"in div}();var zwspSupported;function zeroWidthElement(measure){if(zwspSupported==null){var test=elt("span","​");removeChildrenAndAdd(measure,elt("span",[test,document.createTextNode("x")]));if(measure.firstChild.offsetHeight!=0){zwspSupported=test.offsetWidth<=1&&test.offsetHeight>2&&!(ie&&ie_version<8)}}var node=zwspSupported?elt("span","​"):elt("span"," ",null,"display: inline-block; width: 1px; margin-right: -1px");node.setAttribute("cm-text","");return node}var badBidiRects;function hasBadBidiRects(measure){if(badBidiRects!=null){return badBidiRects}var txt=removeChildrenAndAdd(measure,document.createTextNode("AخA"));var r0=range(txt,0,1).getBoundingClientRect();var r1=range(txt,1,2).getBoundingClientRect();removeChildren(measure);if(!r0||r0.left==r0.right){return false}return badBidiRects=r1.right-r0.right<3}var splitLinesAuto="\n\nb".split(/\n/).length!=3?function(string){var pos=0,result=[],l=string.length;while(pos<=l){var nl=string.indexOf("\n",pos);if(nl==-1){nl=string.length}var line=string.slice(pos,string.charAt(nl-1)=="\r"?nl-1:nl);var rt=line.indexOf("\r");if(rt!=-1){result.push(line.slice(0,rt));pos+=rt+1}else{result.push(line);pos=nl+1}}return result}:function(string){return string.split(/\r\n?|\n/)};var hasSelection=window.getSelection?function(te){try{return te.selectionStart!=te.selectionEnd}catch(e){return false}}:function(te){var range;try{range=te.ownerDocument.selection.createRange()}catch(e){}if(!range||range.parentElement()!=te){return false}return range.compareEndPoints("StartToEnd",range)!=0};var hasCopyEvent=function(){var e=elt("div");if("oncopy"in e){return true}e.setAttribute("oncopy","return;");return typeof e.oncopy=="function"}();var badZoomedRects=null;function hasBadZoomedRects(measure){if(badZoomedRects!=null){return badZoomedRects}var node=removeChildrenAndAdd(measure,elt("span","x"));var normal=node.getBoundingClientRect();var fromRange=range(node,0,1).getBoundingClientRect();return badZoomedRects=Math.abs(normal.left-fromRange.left)>1}var modes={},mimeModes={};function defineMode(name,mode){if(arguments.length>2){mode.dependencies=Array.prototype.slice.call(arguments,2)}modes[name]=mode}function defineMIME(mime,spec){mimeModes[mime]=spec}function resolveMode(spec){if(typeof spec=="string"&&mimeModes.hasOwnProperty(spec)){spec=mimeModes[spec]}else if(spec&&typeof spec.name=="string"&&mimeModes.hasOwnProperty(spec.name)){var found=mimeModes[spec.name];if(typeof found=="string"){found={name:found}}spec=createObj(found,spec);spec.name=found.name}else if(typeof spec=="string"&&/^[\w\-]+\/[\w\-]+\+xml$/.test(spec)){return resolveMode("application/xml")}else if(typeof spec=="string"&&/^[\w\-]+\/[\w\-]+\+json$/.test(spec)){return resolveMode("application/json")}if(typeof spec=="string"){return{name:spec}}else{return spec||{name:"null"}}}function getMode(options,spec){spec=resolveMode(spec);var mfactory=modes[spec.name];if(!mfactory){return getMode(options,"text/plain")}var modeObj=mfactory(options,spec);if(modeExtensions.hasOwnProperty(spec.name)){var exts=modeExtensions[spec.name];for(var prop in exts){if(!exts.hasOwnProperty(prop)){continue}if(modeObj.hasOwnProperty(prop)){modeObj["_"+prop]=modeObj[prop]}modeObj[prop]=exts[prop]}}modeObj.name=spec.name;if(spec.helperType){modeObj.helperType=spec.helperType}if(spec.modeProps){for(var prop$1 in spec.modeProps){modeObj[prop$1]=spec.modeProps[prop$1]}}return modeObj}var modeExtensions={};function extendMode(mode,properties){var exts=modeExtensions.hasOwnProperty(mode)?modeExtensions[mode]:modeExtensions[mode]={};copyObj(properties,exts)}function copyState(mode,state){if(state===true){return state}if(mode.copyState){return mode.copyState(state)}var nstate={};for(var n in state){var val=state[n];if(val instanceof Array){val=val.concat([])}nstate[n]=val}return nstate}function innerMode(mode,state){var info;while(mode.innerMode){info=mode.innerMode(state);if(!info||info.mode==mode){break}state=info.state;mode=info.mode}return info||{mode:mode,state:state}}function startState(mode,a1,a2){return mode.startState?mode.startState(a1,a2):true}var StringStream=function(string,tabSize,lineOracle){this.pos=this.start=0;this.string=string;this.tabSize=tabSize||8;this.lastColumnPos=this.lastColumnValue=0;this.lineStart=0;this.lineOracle=lineOracle};StringStream.prototype.eol=function(){return this.pos>=this.string.length};StringStream.prototype.sol=function(){return this.pos==this.lineStart};StringStream.prototype.peek=function(){return this.string.charAt(this.pos)||undefined};StringStream.prototype.next=function(){if(this.posstart};StringStream.prototype.eatSpace=function(){var start=this.pos;while(/[\s\u00a0]/.test(this.string.charAt(this.pos))){++this.pos}return this.pos>start};StringStream.prototype.skipToEnd=function(){this.pos=this.string.length};StringStream.prototype.skipTo=function(ch){var found=this.string.indexOf(ch,this.pos);if(found>-1){this.pos=found;return true}};StringStream.prototype.backUp=function(n){this.pos-=n};StringStream.prototype.column=function(){if(this.lastColumnPos0){return null}if(match&&consume!==false){this.pos+=match[0].length}return match}};StringStream.prototype.current=function(){return this.string.slice(this.start,this.pos)};StringStream.prototype.hideFirstChars=function(n,inner){this.lineStart+=n;try{return inner()}finally{this.lineStart-=n}};StringStream.prototype.lookAhead=function(n){var oracle=this.lineOracle;return oracle&&oracle.lookAhead(n)};StringStream.prototype.baseToken=function(){var oracle=this.lineOracle;return oracle&&oracle.baseToken(this.pos)};function getLine(doc,n){n-=doc.first;if(n<0||n>=doc.size){throw new Error("There is no line "+(n+doc.first)+" in the document.")}var chunk=doc;while(!chunk.lines){for(var i=0;;++i){var child=chunk.children[i],sz=child.chunkSize();if(n=doc.first&&llast){return Pos(last,getLine(doc,last).text.length)}return clipToLen(pos,getLine(doc,pos.line).text.length)}function clipToLen(pos,linelen){var ch=pos.ch;if(ch==null||ch>linelen){return Pos(pos.line,linelen)}else if(ch<0){return Pos(pos.line,0)}else{return pos}}function clipPosArray(doc,array){var out=[];for(var i=0;ithis.maxLookAhead){this.maxLookAhead=n}return line};Context.prototype.baseToken=function(n){if(!this.baseTokens){return null}while(this.baseTokens[this.baseTokenPos]<=n){this.baseTokenPos+=2}var type=this.baseTokens[this.baseTokenPos+1];return{type:type&&type.replace(/( |^)overlay .*/,""),size:this.baseTokens[this.baseTokenPos]-n}};Context.prototype.nextLine=function(){this.line++;if(this.maxLookAhead>0){this.maxLookAhead--}};Context.fromSaved=function(doc,saved,line){if(saved instanceof SavedContext){return new Context(doc,copyState(doc.mode,saved.state),line,saved.lookAhead)}else{return new Context(doc,copyState(doc.mode,saved),line)}};Context.prototype.save=function(copy){var state=copy!==false?copyState(this.doc.mode,this.state):this.state;return this.maxLookAhead>0?new SavedContext(state,this.maxLookAhead):state};function highlightLine(cm,line,context,forceToEnd){var st=[cm.state.modeGen],lineClasses={};runMode(cm,line.text,cm.doc.mode,context,function(end,style){return st.push(end,style)},lineClasses,forceToEnd);var state=context.state;var loop=function(o){context.baseTokens=st;var overlay=cm.state.overlays[o],i=1,at=0;context.state=true;runMode(cm,line.text,overlay.mode,context,function(end,style){var start=i;while(atend){st.splice(i,1,end,st[i+1],i_end)}i+=2;at=Math.min(end,i_end)}if(!style){return}if(overlay.opaque){st.splice(start,i-start,end,"overlay "+style);i=start+2}else{for(;startcm.options.maxHighlightLength&©State(cm.doc.mode,context.state);var result=highlightLine(cm,line,context);if(resetState){context.state=resetState}line.stateAfter=context.save(!resetState);line.styles=result.styles;if(result.classes){line.styleClasses=result.classes}else if(line.styleClasses){line.styleClasses=null}if(updateFrontier===cm.doc.highlightFrontier){cm.doc.modeFrontier=Math.max(cm.doc.modeFrontier,++cm.doc.highlightFrontier)}}return line.styles}function getContextBefore(cm,n,precise){var doc=cm.doc,display=cm.display;if(!doc.mode.startState){return new Context(doc,true,n)}var start=findStartLine(cm,n,precise);var saved=start>doc.first&&getLine(doc,start-1).stateAfter;var context=saved?Context.fromSaved(doc,saved,start):new Context(doc,startState(doc.mode),start);doc.iter(start,n,function(line){processLine(cm,line.text,context);var pos=context.line;line.stateAfter=pos==n-1||pos%5==0||pos>=display.viewFrom&&posstream.start){return style}}throw new Error("Mode "+mode.name+" failed to advance stream.")}var Token=function(stream,type,state){this.start=stream.start;this.end=stream.pos;this.string=stream.current();this.type=type||null;this.state=state};function takeToken(cm,pos,precise,asArray){var doc=cm.doc,mode=doc.mode,style;pos=clipPos(doc,pos);var line=getLine(doc,pos.line),context=getContextBefore(cm,pos.line,precise);var stream=new StringStream(line.text,cm.options.tabSize,context),tokens;if(asArray){tokens=[]}while((asArray||stream.poscm.options.maxHighlightLength){flattenSpans=false;if(forceToEnd){processLine(cm,text,context,stream.pos)}stream.pos=text.length;style=null}else{style=extractLineClasses(readToken(mode,stream,context.state,inner),lineClasses)}if(inner){var mName=inner[0].name;if(mName){style="m-"+(style?mName+" "+style:mName)}}if(!flattenSpans||curStyle!=style){while(curStartlim;--search){if(search<=doc.first){return doc.first}var line=getLine(doc,search-1),after=line.stateAfter;if(after&&(!precise||search+(after instanceof SavedContext?after.lookAhead:0)<=doc.modeFrontier)){return search}var indented=countColumn(line.text,null,cm.options.tabSize);if(minline==null||minindent>indented){minline=search-1;minindent=indented}}return minline}function retreatFrontier(doc,n){doc.modeFrontier=Math.min(doc.modeFrontier,n);if(doc.highlightFrontierstart;line--){var saved=getLine(doc,line).stateAfter;if(saved&&(!(saved instanceof SavedContext)||line+saved.lookAhead=startCh:span.to>startCh);(nw||(nw=[])).push(new MarkedSpan(marker,span.from,endsAfter?null:span.to))}}}return nw}function markedSpansAfter(old,endCh,isInsert){var nw;if(old){for(var i=0;i=endCh:span.to>endCh);if(endsAfter||span.from==endCh&&marker.type=="bookmark"&&(!isInsert||span.marker.insertLeft)){var startsBefore=span.from==null||(marker.inclusiveLeft?span.from<=endCh:span.from0&&first){for(var i$2=0;i$20){continue}var newParts=[j,1],dfrom=cmp(p.from,m.from),dto=cmp(p.to,m.to);if(dfrom<0||!mk.inclusiveLeft&&!dfrom){newParts.push({from:p.from,to:m.from})}if(dto>0||!mk.inclusiveRight&&!dto){newParts.push({from:m.to,to:p.to})}parts.splice.apply(parts,newParts);j+=newParts.length-3}}return parts}function detachMarkedSpans(line){var spans=line.markedSpans;if(!spans){return}for(var i=0;ich)&&(!found||compareCollapsedMarkers(found,sp.marker)<0)){found=sp.marker}}}return found}function conflictingCollapsedRange(doc,lineNo,from,to,marker){var line=getLine(doc,lineNo);var sps=sawCollapsedSpans&&line.markedSpans;if(sps){for(var i=0;i=0&&toCmp<=0||fromCmp<=0&&toCmp>=0){continue}if(fromCmp<=0&&(sp.marker.inclusiveRight&&marker.inclusiveLeft?cmp(found.to,from)>=0:cmp(found.to,from)>0)||fromCmp>=0&&(sp.marker.inclusiveRight&&marker.inclusiveLeft?cmp(found.from,to)<=0:cmp(found.from,to)<0)){return true}}}}function visualLine(line){var merged;while(merged=collapsedSpanAtStart(line)){line=merged.find(-1,true).line}return line}function visualLineEnd(line){var merged;while(merged=collapsedSpanAtEnd(line)){line=merged.find(1,true).line}return line}function visualLineContinued(line){var merged,lines;while(merged=collapsedSpanAtEnd(line)){line=merged.find(1,true).line;(lines||(lines=[])).push(line)}return lines}function visualLineNo(doc,lineN){var line=getLine(doc,lineN),vis=visualLine(line);if(line==vis){return lineN}return lineNo(vis)}function visualLineEndNo(doc,lineN){if(lineN>doc.lastLine()){return lineN}var line=getLine(doc,lineN),merged;if(!lineIsHidden(doc,line)){return lineN}while(merged=collapsedSpanAtEnd(line)){line=merged.find(1,true).line}return lineNo(line)+1}function lineIsHidden(doc,line){var sps=sawCollapsedSpans&&line.markedSpans;if(sps){for(var sp=void 0,i=0;id.maxLineLength){d.maxLineLength=len;d.maxLine=line}})}var Line=function(text,markedSpans,estimateHeight){this.text=text;attachMarkedSpans(this,markedSpans);this.height=estimateHeight?estimateHeight(this):1};Line.prototype.lineNo=function(){return lineNo(this)};eventMixin(Line);function updateLine(line,text,markedSpans,estimateHeight){line.text=text;if(line.stateAfter){line.stateAfter=null}if(line.styles){line.styles=null}if(line.order!=null){line.order=null}detachMarkedSpans(line);attachMarkedSpans(line,markedSpans);var estHeight=estimateHeight?estimateHeight(line):1;if(estHeight!=line.height){updateLineHeight(line,estHeight)}}function cleanUpLine(line){line.parent=null;detachMarkedSpans(line)}var styleToClassCache={},styleToClassCacheWithMode={};function interpretTokenStyle(style,options){if(!style||/^\s*$/.test(style)){return null}var cache=options.addModeClass?styleToClassCacheWithMode:styleToClassCache;return cache[style]||(cache[style]=style.replace(/\S+/g,"cm-$&"))}function buildLineContent(cm,lineView){var content=eltP("span",null,null,webkit?"padding-right: .1px":null);var builder={pre:eltP("pre",[content],"CodeMirror-line"),content:content,col:0,pos:0,cm:cm,trailingSpace:false,splitSpaces:cm.getOption("lineWrapping")};lineView.measure={};for(var i=0;i<=(lineView.rest?lineView.rest.length:0);i++){var line=i?lineView.rest[i-1]:lineView.line,order=void 0;builder.pos=0;builder.addToken=buildToken;if(hasBadBidiRects(cm.display.measure)&&(order=getOrder(line,cm.doc.direction))){builder.addToken=buildTokenBadBidi(builder.addToken,order)}builder.map=[];var allowFrontierUpdate=lineView!=cm.display.externalMeasured&&lineNo(line);insertLineContent(line,builder,getLineStyles(cm,line,allowFrontierUpdate));if(line.styleClasses){if(line.styleClasses.bgClass){builder.bgClass=joinClasses(line.styleClasses.bgClass,builder.bgClass||"")}if(line.styleClasses.textClass){builder.textClass=joinClasses(line.styleClasses.textClass,builder.textClass||"")}}if(builder.map.length==0){builder.map.push(0,0,builder.content.appendChild(zeroWidthElement(cm.display.measure)))}if(i==0){lineView.measure.map=builder.map;lineView.measure.cache={}}else{(lineView.measure.maps||(lineView.measure.maps=[])).push(builder.map);(lineView.measure.caches||(lineView.measure.caches=[])).push({})}}if(webkit){var last=builder.content.lastChild;if(/\bcm-tab\b/.test(last.className)||last.querySelector&&last.querySelector(".cm-tab")){builder.content.className="cm-tab-wrap-hack"}}signal(cm,"renderLine",cm,lineView.line,builder.pre);if(builder.pre.className){builder.textClass=joinClasses(builder.pre.className,builder.textClass||"")}return builder}function defaultSpecialCharPlaceholder(ch){var token=elt("span","•","cm-invalidchar");token.title="\\u"+ch.charCodeAt(0).toString(16);token.setAttribute("aria-label",token.title);return token}function buildToken(builder,text,style,startStyle,endStyle,css,attributes){if(!text){return}var displayText=builder.splitSpaces?splitSpaces(text,builder.trailingSpace):text;var special=builder.cm.state.specialChars,mustWrap=false;var content;if(!special.test(text)){builder.col+=text.length;content=document.createTextNode(displayText);builder.map.push(builder.pos,builder.pos+text.length,content);if(ie&&ie_version<9){mustWrap=true}builder.pos+=text.length}else{content=document.createDocumentFragment();var pos=0;while(true){special.lastIndex=pos;var m=special.exec(text);var skipped=m?m.index-pos:text.length-pos;if(skipped){var txt=document.createTextNode(displayText.slice(pos,pos+skipped));if(ie&&ie_version<9){content.appendChild(elt("span",[txt]))}else{content.appendChild(txt)}builder.map.push(builder.pos,builder.pos+skipped,txt);builder.col+=skipped;builder.pos+=skipped}if(!m){break}pos+=skipped+1;var txt$1=void 0;if(m[0]=="\t"){var tabSize=builder.cm.options.tabSize,tabWidth=tabSize-builder.col%tabSize;txt$1=content.appendChild(elt("span",spaceStr(tabWidth),"cm-tab"));txt$1.setAttribute("role","presentation");txt$1.setAttribute("cm-text","\t");builder.col+=tabWidth}else if(m[0]=="\r"||m[0]=="\n"){txt$1=content.appendChild(elt("span",m[0]=="\r"?"␍":"␤","cm-invalidchar"));txt$1.setAttribute("cm-text",m[0]);builder.col+=1}else{txt$1=builder.cm.options.specialCharPlaceholder(m[0]);txt$1.setAttribute("cm-text",m[0]);if(ie&&ie_version<9){content.appendChild(elt("span",[txt$1]))}else{content.appendChild(txt$1)}builder.col+=1}builder.map.push(builder.pos,builder.pos+1,txt$1);builder.pos++}}builder.trailingSpace=displayText.charCodeAt(text.length-1)==32;if(style||startStyle||endStyle||mustWrap||css){var fullStyle=style||"";if(startStyle){fullStyle+=startStyle}if(endStyle){fullStyle+=endStyle}var token=elt("span",[content],fullStyle,css);if(attributes){for(var attr in attributes){if(attributes.hasOwnProperty(attr)&&attr!="style"&&attr!="class"){token.setAttribute(attr,attributes[attr])}}}return builder.content.appendChild(token)}builder.content.appendChild(content)}function splitSpaces(text,trailingBefore){if(text.length>1&&!/ /.test(text)){return text}var spaceBefore=trailingBefore,result="";for(var i=0;istart&&part.from<=start){break}}if(part.to>=end){return inner(builder,text,style,startStyle,endStyle,css,attributes)}inner(builder,text.slice(0,part.to-start),style,startStyle,null,css,attributes);startStyle=null;text=text.slice(part.to-start);start=part.to}}}function buildCollapsedSpan(builder,size,marker,ignoreWidget){var widget=!ignoreWidget&&marker.widgetNode;if(widget){builder.map.push(builder.pos,builder.pos+size,widget)}if(!ignoreWidget&&builder.cm.display.input.needsContentAttribute){if(!widget){widget=builder.content.appendChild(document.createElement("span"))}widget.setAttribute("cm-marker",marker.id)}if(widget){builder.cm.display.input.setUneditable(widget);builder.content.appendChild(widget)}builder.pos+=size;builder.trailingSpace=false}function insertLineContent(line,builder,styles){var spans=line.markedSpans,allText=line.text,at=0;if(!spans){for(var i$1=1;i$1pos||m.collapsed&&sp.to==pos&&sp.from==pos)){if(sp.to!=null&&sp.to!=pos&&nextChange>sp.to){nextChange=sp.to;spanEndStyle=""}if(m.className){spanStyle+=" "+m.className}if(m.css){css=(css?css+";":"")+m.css}if(m.startStyle&&sp.from==pos){spanStartStyle+=" "+m.startStyle}if(m.endStyle&&sp.to==nextChange){(endStyles||(endStyles=[])).push(m.endStyle,sp.to)}if(m.title){(attributes||(attributes={})).title=m.title}if(m.attributes){for(var attr in m.attributes){(attributes||(attributes={}))[attr]=m.attributes[attr]}}if(m.collapsed&&(!collapsed||compareCollapsedMarkers(collapsed.marker,m)<0)){collapsed=sp}}else if(sp.from>pos&&nextChange>sp.from){nextChange=sp.from}}if(endStyles){for(var j$1=0;j$1=len){break}var upto=Math.min(len,nextChange);while(true){if(text){var end=pos+text.length;if(!collapsed){var tokenText=end>upto?text.slice(0,upto-pos):text;builder.addToken(builder,tokenText,style?style+spanStyle:spanStyle,spanStartStyle,pos+tokenText.length==nextChange?spanEndStyle:"",css,attributes)}if(end>=upto){text=text.slice(upto-pos);pos=upto;break}pos=end;spanStartStyle=""}text=allText.slice(at,at=styles[i++]);style=interpretTokenStyle(styles[i++],builder.cm.options)}}}function LineView(doc,line,lineN){this.line=line;this.rest=visualLineContinued(line);this.size=this.rest?lineNo(lst(this.rest))-lineN+1:1;this.node=this.text=null;this.hidden=lineIsHidden(doc,line)}function buildViewArray(cm,from,to){var array=[],nextPos;for(var pos=from;pos2){heights.push((cur.bottom+next.top)/2-rect.top)}}}heights.push(rect.bottom-rect.top)}}function mapFromLineView(lineView,line,lineN){if(lineView.line==line){return{map:lineView.measure.map,cache:lineView.measure.cache}}for(var i=0;ilineN){return{map:lineView.measure.maps[i$1],cache:lineView.measure.caches[i$1],before:true}}}}function updateExternalMeasurement(cm,line){line=visualLine(line);var lineN=lineNo(line);var view=cm.display.externalMeasured=new LineView(cm.doc,line,lineN);view.lineN=lineN;var built=view.built=buildLineContent(cm,view);view.text=built.pre;removeChildrenAndAdd(cm.display.lineMeasure,built.pre);return view}function measureChar(cm,line,ch,bias){return measureCharPrepared(cm,prepareMeasureForLine(cm,line),ch,bias)}function findViewForLine(cm,lineN){if(lineN>=cm.display.viewFrom&&lineN=ext.lineN&&lineNch){end=mEnd-mStart;start=end-1;if(ch>=mEnd){collapse="right"}}if(start!=null){node=map[i+2];if(mStart==mEnd&&bias==(node.insertLeft?"left":"right")){collapse=bias}if(bias=="left"&&start==0){while(i&&map[i-2]==map[i-3]&&map[i-1].insertLeft){node=map[(i-=3)+2];collapse="left"}}if(bias=="right"&&start==mEnd-mStart){while(i=0;i$1--){if((rect=rects[i$1]).left!=rect.right){break}}}return rect}function measureCharInner(cm,prepared,ch,bias){var place=nodeAndOffsetInLineMap(prepared.map,ch,bias);var node=place.node,start=place.start,end=place.end,collapse=place.collapse;var rect;if(node.nodeType==3){for(var i$1=0;i$1<4;i$1++){while(start&&isExtendingChar(prepared.line.text.charAt(place.coverStart+start))){--start}while(place.coverStart+end0){collapse=bias="right"}var rects;if(cm.options.lineWrapping&&(rects=node.getClientRects()).length>1){rect=rects[bias=="right"?rects.length-1:0]}else{rect=node.getBoundingClientRect()}}if(ie&&ie_version<9&&!start&&(!rect||!rect.left&&!rect.right)){var rSpan=node.parentNode.getClientRects()[0];if(rSpan){rect={left:rSpan.left,right:rSpan.left+charWidth(cm.display),top:rSpan.top,bottom:rSpan.bottom}}else{rect=nullRect}}var rtop=rect.top-prepared.rect.top,rbot=rect.bottom-prepared.rect.top;var mid=(rtop+rbot)/2;var heights=prepared.view.measure.heights;var i=0;for(;i=lineObj.text.length){ch=lineObj.text.length;sticky="before"}else if(ch<=0){ch=0;sticky="after"}if(!order){return get(sticky=="before"?ch-1:ch,sticky=="before")}function getBidi(ch,partPos,invert){var part=order[partPos],right=part.level==1;return get(invert?ch-1:ch,right!=invert)}var partPos=getBidiPartAt(order,ch,sticky);var other=bidiOther;var val=getBidi(ch,partPos,sticky=="before");if(other!=null){val.other=getBidi(ch,other,sticky!="before")}return val}function estimateCoords(cm,pos){var left=0;pos=clipPos(cm.doc,pos);if(!cm.options.lineWrapping){left=charWidth(cm.display)*pos.ch}var lineObj=getLine(cm.doc,pos.line);var top=heightAtLine(lineObj)+paddingTop(cm.display);return{left:left,right:left,top:top,bottom:top+lineObj.height}}function PosWithInfo(line,ch,sticky,outside,xRel){var pos=Pos(line,ch,sticky);pos.xRel=xRel;if(outside){pos.outside=outside}return pos}function coordsChar(cm,x,y){var doc=cm.doc;y+=cm.display.viewOffset;if(y<0){return PosWithInfo(doc.first,0,null,-1,-1)}var lineN=lineAtHeight(doc,y),last=doc.first+doc.size-1;if(lineN>last){return PosWithInfo(doc.first+doc.size-1,getLine(doc,last).text.length,null,1,1)}if(x<0){x=0}var lineObj=getLine(doc,lineN);for(;;){var found=coordsCharInner(cm,lineObj,lineN,x,y);var collapsed=collapsedSpanAround(lineObj,found.ch+(found.xRel>0||found.outside>0?1:0));if(!collapsed){return found}var rangeEnd=collapsed.find(1);if(rangeEnd.line==lineN){return rangeEnd}lineObj=getLine(doc,lineN=rangeEnd.line)}}function wrappedLineExtent(cm,lineObj,preparedMeasure,y){y-=widgetTopHeight(lineObj);var end=lineObj.text.length;var begin=findFirst(function(ch){return measureCharPrepared(cm,preparedMeasure,ch-1).bottom<=y},end,0);end=findFirst(function(ch){return measureCharPrepared(cm,preparedMeasure,ch).top>y},begin,end);return{begin:begin,end:end}}function wrappedLineExtentChar(cm,lineObj,preparedMeasure,target){if(!preparedMeasure){preparedMeasure=prepareMeasureForLine(cm,lineObj)}var targetTop=intoCoordSystem(cm,lineObj,measureCharPrepared(cm,preparedMeasure,target),"line").top;return wrappedLineExtent(cm,lineObj,preparedMeasure,targetTop)}function boxIsAfter(box,x,y,left){return box.bottom<=y?false:box.top>y?true:(left?box.left:box.right)>x}function coordsCharInner(cm,lineObj,lineNo,x,y){y-=heightAtLine(lineObj);var preparedMeasure=prepareMeasureForLine(cm,lineObj);var widgetHeight=widgetTopHeight(lineObj);var begin=0,end=lineObj.text.length,ltr=true;var order=getOrder(lineObj,cm.doc.direction);if(order){var part=(cm.options.lineWrapping?coordsBidiPartWrapped:coordsBidiPart)(cm,lineObj,lineNo,preparedMeasure,order,x,y);ltr=part.level!=1;begin=ltr?part.from:part.to-1;end=ltr?part.to:part.from-1}var chAround=null,boxAround=null;var ch=findFirst(function(ch){var box=measureCharPrepared(cm,preparedMeasure,ch);box.top+=widgetHeight;box.bottom+=widgetHeight;if(!boxIsAfter(box,x,y,false)){return false}if(box.top<=y&&box.left<=x){chAround=ch;boxAround=box}return true},begin,end);var baseX,sticky,outside=false;if(boxAround){var atLeft=x-boxAround.left=coords.bottom?1:0}ch=skipExtendingChars(lineObj.text,ch,1);return PosWithInfo(lineNo,ch,sticky,outside,x-baseX)}function coordsBidiPart(cm,lineObj,lineNo,preparedMeasure,order,x,y){var index=findFirst(function(i){var part=order[i],ltr=part.level!=1;return boxIsAfter(cursorCoords(cm,Pos(lineNo,ltr?part.to:part.from,ltr?"before":"after"),"line",lineObj,preparedMeasure),x,y,true)},0,order.length-1);var part=order[index];if(index>0){var ltr=part.level!=1;var start=cursorCoords(cm,Pos(lineNo,ltr?part.from:part.to,ltr?"after":"before"),"line",lineObj,preparedMeasure);if(boxIsAfter(start,x,y,true)&&start.top>y){part=order[index-1]}}return part}function coordsBidiPartWrapped(cm,lineObj,_lineNo,preparedMeasure,order,x,y){var ref=wrappedLineExtent(cm,lineObj,preparedMeasure,y);var begin=ref.begin;var end=ref.end;if(/\s/.test(lineObj.text.charAt(end-1))){end--}var part=null,closestDist=null;for(var i=0;i=end||p.to<=begin){continue}var ltr=p.level!=1;var endX=measureCharPrepared(cm,preparedMeasure,ltr?Math.min(end,p.to)-1:Math.max(begin,p.from)).right;var dist=endXdist){part=p;closestDist=dist}}if(!part){part=order[order.length-1]}if(part.fromend){part={from:part.from,to:end,level:part.level}}return part}var measureText;function textHeight(display){if(display.cachedTextHeight!=null){return display.cachedTextHeight}if(measureText==null){measureText=elt("pre",null,"CodeMirror-line-like");for(var i=0;i<49;++i){measureText.appendChild(document.createTextNode("x"));measureText.appendChild(elt("br"))}measureText.appendChild(document.createTextNode("x"))}removeChildrenAndAdd(display.measure,measureText);var height=measureText.offsetHeight/50;if(height>3){display.cachedTextHeight=height}removeChildren(display.measure);return height||1}function charWidth(display){if(display.cachedCharWidth!=null){return display.cachedCharWidth}var anchor=elt("span","xxxxxxxxxx");var pre=elt("pre",[anchor],"CodeMirror-line-like");removeChildrenAndAdd(display.measure,pre);var rect=anchor.getBoundingClientRect(),width=(rect.right-rect.left)/10;if(width>2){display.cachedCharWidth=width}return width||10}function getDimensions(cm){var d=cm.display,left={},width={};var gutterLeft=d.gutters.clientLeft;for(var n=d.gutters.firstChild,i=0;n;n=n.nextSibling,++i){var id=cm.display.gutterSpecs[i].className;left[id]=n.offsetLeft+n.clientLeft+gutterLeft;width[id]=n.clientWidth}return{fixedPos:compensateForHScroll(d),gutterTotalWidth:d.gutters.offsetWidth,gutterLeft:left,gutterWidth:width,wrapperWidth:d.wrapper.clientWidth}}function compensateForHScroll(display){return display.scroller.getBoundingClientRect().left-display.sizer.getBoundingClientRect().left}function estimateHeight(cm){var th=textHeight(cm.display),wrapping=cm.options.lineWrapping;var perLine=wrapping&&Math.max(5,cm.display.scroller.clientWidth/charWidth(cm.display)-3);return function(line){if(lineIsHidden(cm.doc,line)){return 0}var widgetsHeight=0;if(line.widgets){for(var i=0;i0&&(line=getLine(cm.doc,coords.line).text).length==coords.ch){var colDiff=countColumn(line,line.length,cm.options.tabSize)-line.length;coords=Pos(coords.line,Math.max(0,Math.round((x-paddingH(cm.display).left)/charWidth(cm.display))-colDiff))}return coords}function findViewIndex(cm,n){if(n>=cm.display.viewTo){return null}n-=cm.display.viewFrom;if(n<0){return null}var view=cm.display.view;for(var i=0;ifrom)){display.updateLineNumbers=from}cm.curOp.viewChanged=true;if(from>=display.viewTo){if(sawCollapsedSpans&&visualLineNo(cm.doc,from)display.viewFrom){resetView(cm)}else{display.viewFrom+=lendiff;display.viewTo+=lendiff}}else if(from<=display.viewFrom&&to>=display.viewTo){resetView(cm)}else if(from<=display.viewFrom){var cut=viewCuttingPoint(cm,to,to+lendiff,1);if(cut){display.view=display.view.slice(cut.index);display.viewFrom=cut.lineN;display.viewTo+=lendiff}else{resetView(cm)}}else if(to>=display.viewTo){var cut$1=viewCuttingPoint(cm,from,from,-1);if(cut$1){display.view=display.view.slice(0,cut$1.index);display.viewTo=cut$1.lineN}else{resetView(cm)}}else{var cutTop=viewCuttingPoint(cm,from,from,-1);var cutBot=viewCuttingPoint(cm,to,to+lendiff,1);if(cutTop&&cutBot){display.view=display.view.slice(0,cutTop.index).concat(buildViewArray(cm,cutTop.lineN,cutBot.lineN)).concat(display.view.slice(cutBot.index));display.viewTo+=lendiff}else{resetView(cm)}}var ext=display.externalMeasured;if(ext){if(to=ext.lineN&&line=display.viewTo){return}var lineView=display.view[findViewIndex(cm,line)];if(lineView.node==null){return}var arr=lineView.changes||(lineView.changes=[]);if(indexOf(arr,type)==-1){arr.push(type)}}function resetView(cm){cm.display.viewFrom=cm.display.viewTo=cm.doc.first;cm.display.view=[];cm.display.viewOffset=0}function viewCuttingPoint(cm,oldN,newN,dir){var index=findViewIndex(cm,oldN),diff,view=cm.display.view;if(!sawCollapsedSpans||newN==cm.doc.first+cm.doc.size){return{index:index,lineN:newN}}var n=cm.display.viewFrom;for(var i=0;i0){if(index==view.length-1){return null}diff=n+view[index].size-oldN;index++}else{diff=n-oldN}oldN+=diff;newN+=diff}while(visualLineNo(cm.doc,newN)!=newN){if(index==(dir<0?0:view.length-1)){return null}newN+=dir*view[index-(dir<0?1:0)].size;index+=dir}return{index:index,lineN:newN}}function adjustView(cm,from,to){var display=cm.display,view=display.view;if(view.length==0||from>=display.viewTo||to<=display.viewFrom){display.view=buildViewArray(cm,from,to);display.viewFrom=from}else{if(display.viewFrom>from){display.view=buildViewArray(cm,from,display.viewFrom).concat(display.view)}else if(display.viewFromto){display.view=display.view.slice(0,findViewIndex(cm,to))}}display.viewTo=to}function countDirtyView(cm){var view=cm.display.view,dirty=0;for(var i=0;i=cm.display.viewTo||range.to().line0){display.blinker=setInterval(function(){return display.cursorDiv.style.visibility=(on=!on)?"":"hidden"},cm.options.cursorBlinkRate)}else if(cm.options.cursorBlinkRate<0){display.cursorDiv.style.visibility="hidden"}}function ensureFocus(cm){if(!cm.state.focused){cm.display.input.focus();onFocus(cm)}}function delayBlurEvent(cm){cm.state.delayingBlurEvent=true;setTimeout(function(){if(cm.state.delayingBlurEvent){cm.state.delayingBlurEvent=false;onBlur(cm)}},100)}function onFocus(cm,e){if(cm.state.delayingBlurEvent){cm.state.delayingBlurEvent=false}if(cm.options.readOnly=="nocursor"){return}if(!cm.state.focused){signal(cm,"focus",cm,e);cm.state.focused=true;addClass(cm.display.wrapper,"CodeMirror-focused");if(!cm.curOp&&cm.display.selForContextMenu!=cm.doc.sel){cm.display.input.reset();if(webkit){setTimeout(function(){return cm.display.input.reset(true)},20)}}cm.display.input.receivedFocus()}restartBlink(cm)}function onBlur(cm,e){if(cm.state.delayingBlurEvent){return}if(cm.state.focused){signal(cm,"blur",cm,e);cm.state.focused=false;rmClass(cm.display.wrapper,"CodeMirror-focused")}clearInterval(cm.display.blinker);setTimeout(function(){if(!cm.state.focused){cm.display.shift=false}},150)}function updateHeightsInViewport(cm){var display=cm.display;var prevBottom=display.lineDiv.offsetTop;for(var i=0;i.005||diff<-.005){updateLineHeight(cur.line,height);updateWidgetHeight(cur.line);if(cur.rest){for(var j=0;jcm.display.sizerWidth){var chWidth=Math.ceil(width/charWidth(cm.display));if(chWidth>cm.display.maxLineLength){cm.display.maxLineLength=chWidth;cm.display.maxLine=cur.line;cm.display.maxLineChanged=true}}}}function updateWidgetHeight(line){if(line.widgets){for(var i=0;i=to){from=lineAtHeight(doc,heightAtLine(getLine(doc,ensureTo))-display.wrapper.clientHeight);to=ensureTo}}return{from:from,to:Math.max(to,from+1)}}function maybeScrollWindow(cm,rect){if(signalDOMEvent(cm,"scrollCursorIntoView")){return}var display=cm.display,box=display.sizer.getBoundingClientRect(),doScroll=null;if(rect.top+box.top<0){doScroll=true}else if(rect.bottom+box.top>(window.innerHeight||document.documentElement.clientHeight)){doScroll=false}if(doScroll!=null&&!phantom){var scrollNode=elt("div","​",null,"position: absolute;\n top: "+(rect.top-display.viewOffset-paddingTop(cm.display))+"px;\n height: "+(rect.bottom-rect.top+scrollGap(cm)+display.barHeight)+"px;\n left: "+rect.left+"px; width: "+Math.max(2,rect.right-rect.left)+"px;");cm.display.lineSpace.appendChild(scrollNode);scrollNode.scrollIntoView(doScroll);cm.display.lineSpace.removeChild(scrollNode)}}function scrollPosIntoView(cm,pos,end,margin){if(margin==null){margin=0}var rect;if(!cm.options.lineWrapping&&pos==end){pos=pos.ch?Pos(pos.line,pos.sticky=="before"?pos.ch-1:pos.ch,"after"):pos;end=pos.sticky=="before"?Pos(pos.line,pos.ch+1,"before"):pos}for(var limit=0;limit<5;limit++){var changed=false;var coords=cursorCoords(cm,pos);var endCoords=!end||end==pos?coords:cursorCoords(cm,end);rect={left:Math.min(coords.left,endCoords.left),top:Math.min(coords.top,endCoords.top)-margin,right:Math.max(coords.left,endCoords.left),bottom:Math.max(coords.bottom,endCoords.bottom)+margin};var scrollPos=calculateScrollPos(cm,rect);var startTop=cm.doc.scrollTop,startLeft=cm.doc.scrollLeft;if(scrollPos.scrollTop!=null){updateScrollTop(cm,scrollPos.scrollTop);if(Math.abs(cm.doc.scrollTop-startTop)>1){changed=true}}if(scrollPos.scrollLeft!=null){setScrollLeft(cm,scrollPos.scrollLeft);if(Math.abs(cm.doc.scrollLeft-startLeft)>1){changed=true}}if(!changed){break}}return rect}function scrollIntoView(cm,rect){var scrollPos=calculateScrollPos(cm,rect);if(scrollPos.scrollTop!=null){updateScrollTop(cm,scrollPos.scrollTop)}if(scrollPos.scrollLeft!=null){setScrollLeft(cm,scrollPos.scrollLeft)}}function calculateScrollPos(cm,rect){var display=cm.display,snapMargin=textHeight(cm.display);if(rect.top<0){rect.top=0}var screentop=cm.curOp&&cm.curOp.scrollTop!=null?cm.curOp.scrollTop:display.scroller.scrollTop;var screen=displayHeight(cm),result={};if(rect.bottom-rect.top>screen){rect.bottom=rect.top+screen}var docBottom=cm.doc.height+paddingVert(display);var atTop=rect.topdocBottom-snapMargin;if(rect.topscreentop+screen){var newTop=Math.min(rect.top,(atBottom?docBottom:rect.bottom)-screen);if(newTop!=screentop){result.scrollTop=newTop}}var screenleft=cm.curOp&&cm.curOp.scrollLeft!=null?cm.curOp.scrollLeft:display.scroller.scrollLeft;var screenw=displayWidth(cm)-(cm.options.fixedGutter?display.gutters.offsetWidth:0);var tooWide=rect.right-rect.left>screenw;if(tooWide){rect.right=rect.left+screenw}if(rect.left<10){result.scrollLeft=0}else if(rect.leftscreenw+screenleft-3){result.scrollLeft=rect.right+(tooWide?0:10)-screenw}return result}function addToScrollTop(cm,top){if(top==null){return}resolveScrollToPos(cm);cm.curOp.scrollTop=(cm.curOp.scrollTop==null?cm.doc.scrollTop:cm.curOp.scrollTop)+top}function ensureCursorVisible(cm){resolveScrollToPos(cm);var cur=cm.getCursor();cm.curOp.scrollToPos={from:cur,to:cur,margin:cm.options.cursorScrollMargin}}function scrollToCoords(cm,x,y){if(x!=null||y!=null){resolveScrollToPos(cm)}if(x!=null){cm.curOp.scrollLeft=x}if(y!=null){cm.curOp.scrollTop=y}}function scrollToRange(cm,range){resolveScrollToPos(cm);cm.curOp.scrollToPos=range}function resolveScrollToPos(cm){var range=cm.curOp.scrollToPos;if(range){cm.curOp.scrollToPos=null;var from=estimateCoords(cm,range.from),to=estimateCoords(cm,range.to);scrollToCoordsRange(cm,from,to,range.margin)}}function scrollToCoordsRange(cm,from,to,margin){var sPos=calculateScrollPos(cm,{left:Math.min(from.left,to.left),top:Math.min(from.top,to.top)-margin,right:Math.max(from.right,to.right),bottom:Math.max(from.bottom,to.bottom)+margin});scrollToCoords(cm,sPos.scrollLeft,sPos.scrollTop)}function updateScrollTop(cm,val){if(Math.abs(cm.doc.scrollTop-val)<2){return}if(!gecko){updateDisplaySimple(cm,{top:val})}setScrollTop(cm,val,true);if(gecko){updateDisplaySimple(cm)}startWorker(cm,100)}function setScrollTop(cm,val,forceScroll){val=Math.max(0,Math.min(cm.display.scroller.scrollHeight-cm.display.scroller.clientHeight,val));if(cm.display.scroller.scrollTop==val&&!forceScroll){return}cm.doc.scrollTop=val;cm.display.scrollbars.setScrollTop(val);if(cm.display.scroller.scrollTop!=val){cm.display.scroller.scrollTop=val}}function setScrollLeft(cm,val,isScroller,forceScroll){val=Math.max(0,Math.min(val,cm.display.scroller.scrollWidth-cm.display.scroller.clientWidth));if((isScroller?val==cm.doc.scrollLeft:Math.abs(cm.doc.scrollLeft-val)<2)&&!forceScroll){return}cm.doc.scrollLeft=val;alignHorizontally(cm);if(cm.display.scroller.scrollLeft!=val){cm.display.scroller.scrollLeft=val}cm.display.scrollbars.setScrollLeft(val)}function measureForScrollbars(cm){var d=cm.display,gutterW=d.gutters.offsetWidth;var docH=Math.round(cm.doc.height+paddingVert(cm.display));return{clientHeight:d.scroller.clientHeight,viewHeight:d.wrapper.clientHeight,scrollWidth:d.scroller.scrollWidth,clientWidth:d.scroller.clientWidth,viewWidth:d.wrapper.clientWidth,barLeft:cm.options.fixedGutter?gutterW:0,docHeight:docH,scrollHeight:docH+scrollGap(cm)+d.barHeight,nativeBarWidth:d.nativeBarWidth,gutterWidth:gutterW}}var NativeScrollbars=function(place,scroll,cm){this.cm=cm;var vert=this.vert=elt("div",[elt("div",null,null,"min-width: 1px")],"CodeMirror-vscrollbar");var horiz=this.horiz=elt("div",[elt("div",null,null,"height: 100%; min-height: 1px")],"CodeMirror-hscrollbar");vert.tabIndex=horiz.tabIndex=-1;place(vert);place(horiz);on(vert,"scroll",function(){if(vert.clientHeight){scroll(vert.scrollTop,"vertical")}});on(horiz,"scroll",function(){if(horiz.clientWidth){scroll(horiz.scrollLeft,"horizontal")}});this.checkedZeroWidth=false;if(ie&&ie_version<8){this.horiz.style.minHeight=this.vert.style.minWidth="18px"}};NativeScrollbars.prototype.update=function(measure){var needsH=measure.scrollWidth>measure.clientWidth+1;var needsV=measure.scrollHeight>measure.clientHeight+1;var sWidth=measure.nativeBarWidth;if(needsV){this.vert.style.display="block";this.vert.style.bottom=needsH?sWidth+"px":"0";var totalHeight=measure.viewHeight-(needsH?sWidth:0);this.vert.firstChild.style.height=Math.max(0,measure.scrollHeight-measure.clientHeight+totalHeight)+"px"}else{this.vert.style.display="";this.vert.firstChild.style.height="0"}if(needsH){this.horiz.style.display="block";this.horiz.style.right=needsV?sWidth+"px":"0";this.horiz.style.left=measure.barLeft+"px";var totalWidth=measure.viewWidth-measure.barLeft-(needsV?sWidth:0);this.horiz.firstChild.style.width=Math.max(0,measure.scrollWidth-measure.clientWidth+totalWidth)+"px"}else{this.horiz.style.display="";this.horiz.firstChild.style.width="0"}if(!this.checkedZeroWidth&&measure.clientHeight>0){if(sWidth==0){this.zeroWidthHack()}this.checkedZeroWidth=true}return{right:needsV?sWidth:0,bottom:needsH?sWidth:0}};NativeScrollbars.prototype.setScrollLeft=function(pos){if(this.horiz.scrollLeft!=pos){this.horiz.scrollLeft=pos}if(this.disableHoriz){this.enableZeroWidthBar(this.horiz,this.disableHoriz,"horiz")}};NativeScrollbars.prototype.setScrollTop=function(pos){if(this.vert.scrollTop!=pos){this.vert.scrollTop=pos}if(this.disableVert){this.enableZeroWidthBar(this.vert,this.disableVert,"vert")}};NativeScrollbars.prototype.zeroWidthHack=function(){var w=mac&&!mac_geMountainLion?"12px":"18px";this.horiz.style.height=this.vert.style.width=w;this.horiz.style.pointerEvents=this.vert.style.pointerEvents="none";this.disableHoriz=new Delayed;this.disableVert=new Delayed};NativeScrollbars.prototype.enableZeroWidthBar=function(bar,delay,type){bar.style.pointerEvents="auto";function maybeDisable(){var box=bar.getBoundingClientRect();var elt=type=="vert"?document.elementFromPoint(box.right-1,(box.top+box.bottom)/2):document.elementFromPoint((box.right+box.left)/2,box.bottom-1);if(elt!=bar){bar.style.pointerEvents="none"}else{delay.set(1e3,maybeDisable)}}delay.set(1e3,maybeDisable)};NativeScrollbars.prototype.clear=function(){var parent=this.horiz.parentNode;parent.removeChild(this.horiz);parent.removeChild(this.vert)};var NullScrollbars=function(){};NullScrollbars.prototype.update=function(){return{bottom:0,right:0}};NullScrollbars.prototype.setScrollLeft=function(){};NullScrollbars.prototype.setScrollTop=function(){};NullScrollbars.prototype.clear=function(){};function updateScrollbars(cm,measure){if(!measure){measure=measureForScrollbars(cm)}var startWidth=cm.display.barWidth,startHeight=cm.display.barHeight;updateScrollbarsInner(cm,measure);for(var i=0;i<4&&startWidth!=cm.display.barWidth||startHeight!=cm.display.barHeight;i++){if(startWidth!=cm.display.barWidth&&cm.options.lineWrapping){updateHeightsInViewport(cm)}updateScrollbarsInner(cm,measureForScrollbars(cm));startWidth=cm.display.barWidth;startHeight=cm.display.barHeight}}function updateScrollbarsInner(cm,measure){var d=cm.display;var sizes=d.scrollbars.update(measure);d.sizer.style.paddingRight=(d.barWidth=sizes.right)+"px";d.sizer.style.paddingBottom=(d.barHeight=sizes.bottom)+"px";d.heightForcer.style.borderBottom=sizes.bottom+"px solid transparent";if(sizes.right&&sizes.bottom){d.scrollbarFiller.style.display="block";d.scrollbarFiller.style.height=sizes.bottom+"px";d.scrollbarFiller.style.width=sizes.right+"px"}else{d.scrollbarFiller.style.display=""}if(sizes.bottom&&cm.options.coverGutterNextToScrollbar&&cm.options.fixedGutter){d.gutterFiller.style.display="block";d.gutterFiller.style.height=sizes.bottom+"px";d.gutterFiller.style.width=measure.gutterWidth+"px"}else{d.gutterFiller.style.display=""}}var scrollbarModel={native:NativeScrollbars,null:NullScrollbars};function initScrollbars(cm){if(cm.display.scrollbars){cm.display.scrollbars.clear();if(cm.display.scrollbars.addClass){rmClass(cm.display.wrapper,cm.display.scrollbars.addClass)}}cm.display.scrollbars=new scrollbarModel[cm.options.scrollbarStyle](function(node){cm.display.wrapper.insertBefore(node,cm.display.scrollbarFiller);on(node,"mousedown",function(){if(cm.state.focused){setTimeout(function(){return cm.display.input.focus()},0)}});node.setAttribute("cm-not-content","true")},function(pos,axis){if(axis=="horizontal"){setScrollLeft(cm,pos)}else{updateScrollTop(cm,pos)}},cm);if(cm.display.scrollbars.addClass){addClass(cm.display.wrapper,cm.display.scrollbars.addClass)}}var nextOpId=0;function startOperation(cm){cm.curOp={cm:cm,viewChanged:false,startHeight:cm.doc.height,forceUpdate:false,updateInput:0,typing:false,changeObjs:null,cursorActivityHandlers:null,cursorActivityCalled:0,selectionChanged:false,updateMaxLine:false,scrollLeft:null,scrollTop:null,scrollToPos:null,focus:false,id:++nextOpId};pushOperation(cm.curOp)}function endOperation(cm){var op=cm.curOp;if(op){finishOperation(op,function(group){for(var i=0;i=display.viewTo)||display.maxLineChanged&&cm.options.lineWrapping;op.update=op.mustUpdate&&new DisplayUpdate(cm,op.mustUpdate&&{top:op.scrollTop,ensure:op.scrollToPos},op.forceUpdate)}function endOperation_W1(op){op.updatedDisplay=op.mustUpdate&&updateDisplayIfNeeded(op.cm,op.update)}function endOperation_R2(op){var cm=op.cm,display=cm.display;if(op.updatedDisplay){updateHeightsInViewport(cm)}op.barMeasure=measureForScrollbars(cm);if(display.maxLineChanged&&!cm.options.lineWrapping){op.adjustWidthTo=measureChar(cm,display.maxLine,display.maxLine.text.length).left+3;cm.display.sizerWidth=op.adjustWidthTo;op.barMeasure.scrollWidth=Math.max(display.scroller.clientWidth,display.sizer.offsetLeft+op.adjustWidthTo+scrollGap(cm)+cm.display.barWidth);op.maxScrollLeft=Math.max(0,display.sizer.offsetLeft+op.adjustWidthTo-displayWidth(cm))}if(op.updatedDisplay||op.selectionChanged){op.preparedSelection=display.input.prepareSelection()}}function endOperation_W2(op){var cm=op.cm;if(op.adjustWidthTo!=null){cm.display.sizer.style.minWidth=op.adjustWidthTo+"px";if(op.maxScrollLeft