From 2b847240bb37cf21dc0e8d87f206e1c089670d72 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 25 Jun 2019 05:02:42 -0700
Subject: [PATCH 0001/2124] New experimental Row() for templates, refs #521
---
datasette/views/table.py | 43 +++++++++++++++++++++++++++++++++-------
1 file changed, 36 insertions(+), 7 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 06be5671..f2f5fda0 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -33,6 +33,35 @@ LINK_WITH_LABEL = (
LINK_WITH_VALUE = '{id}'
+class Row:
+ def __init__(self, cells):
+ self.cells = cells
+
+ def __iter__(self):
+ return iter(self.cells)
+
+ def __getitem__(self, key):
+ for cell in self.cells:
+ if cell["column"] == key:
+ return cell["value"]
+ raise KeyError
+
+ def raw(self, key):
+ for cell in self.cells:
+ if cell["column"] == key:
+ return cell["raw"]
+ return None
+
+ def __str__(self):
+ d = {
+ key: self[key]
+ for key in [
+ c["column"] for c in self.cells if not c.get("is_special_link_column")
+ ]
+ }
+ return json.dumps(d, default=repr, indent=2)
+
+
class RowTableShared(DataView):
async def sortable_columns_for_table(self, database, table, use_rowid):
db = self.ds.databases[database]
@@ -76,18 +105,18 @@ class RowTableShared(DataView):
# Unless we are a view, the first column is a link - either to the rowid
# or to the simple or compound primary key
if link_column:
+ is_special_link_column = len(pks) != 1
+ pk_path = path_from_row_pks(row, pks, not pks, False)
cells.append(
{
"column": pks[0] if len(pks) == 1 else "Link",
+ "is_special_link_column": is_special_link_column,
+ "raw": pk_path,
"value": jinja2.Markup(
'{flat_pks}'.format(
database=database,
table=urllib.parse.quote_plus(table),
- flat_pks=str(
- jinja2.escape(
- path_from_row_pks(row, pks, not pks, False)
- )
- ),
+ flat_pks=str(jinja2.escape(pk_path)),
flat_pks_quoted=path_from_row_pks(row, pks, not pks),
)
),
@@ -159,8 +188,8 @@ class RowTableShared(DataView):
if truncate_cells and len(display_value) > truncate_cells:
display_value = display_value[:truncate_cells] + u"\u2026"
- cells.append({"column": column, "value": display_value})
- cell_rows.append(cells)
+ cells.append({"column": column, "value": display_value, "raw": value})
+ cell_rows.append(Row(cells))
if link_column:
# Add the link column header.
From 9e97b725f11be3f4dca077fe5569078a62ec2761 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 25 Jun 2019 05:08:04 -0700
Subject: [PATCH 0002/2124] pip install -e .[docs] for docs dependencies
---
docs/contributing.rst | 12 +++++-------
setup.py | 7 ++-----
2 files changed, 7 insertions(+), 12 deletions(-)
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 993d01d8..27e3b0db 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -91,7 +91,7 @@ You can build it locally by installing ``sphinx`` and ``sphinx_rtd_theme`` in yo
source venv/bin/activate
# Install the dependencies needed to build the docs
- pip install sphinx sphinx_rtd_theme
+ pip install -e .[docs]
# Now build the docs
cd docs/
@@ -103,16 +103,14 @@ This will create the HTML version of the documentation in ``docs/_build/html``.
Any time you make changes to a ``.rst`` file you can re-run ``make html`` to update the built documents, then refresh them in your browser.
-For added productivity, you can run Sphinx in auto-build mode. This will run a local webserver serving the docs that automatically rebuilds them and refreshes the page any time you hit save in your editor.
+For added productivity, you can use use `sphinx-autobuild `__ to run Sphinx in auto-build mode. This will run a local webserver serving the docs that automatically rebuilds them and refreshes the page any time you hit save in your editor.
-To enable auto-build mode, first install `sphinx-autobuild `__::
-
- pip install sphinx-autobuild
-
-Now start the server by running::
+``sphinx-autobuild`` will have been installed when you ran ``pip install -e .[docs]``. In your ``docs/`` directory you can start the server by running the following::
make livehtml
+Now browse to ``http://localhost:8000/`` to view the documentation. Any edits you make should be instantly relected in your browser.
+
.. _contributing_release:
Release process
diff --git a/setup.py b/setup.py
index edb8d51e..fdbb948e 100644
--- a/setup.py
+++ b/setup.py
@@ -56,10 +56,7 @@ setup(
""",
setup_requires=["pytest-runner"],
extras_require={
- "docs": [
- "sphinx_rtd_theme",
- "sphinx-autobuild",
- ],
+ "docs": ["sphinx_rtd_theme", "sphinx-autobuild"],
"test": [
"pytest==4.6.1",
"pytest-asyncio==0.10.0",
@@ -67,7 +64,7 @@ setup(
"beautifulsoup4==4.6.1",
"asgiref==3.1.2",
]
- + maybe_black
+ + maybe_black,
},
tests_require=["datasette[test]"],
classifiers=[
From 43a5567be8bb8f963b2ef1507fa4ed26209840c2 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 25 Jun 2019 05:21:10 -0700
Subject: [PATCH 0003/2124] Default to raw value, use Row.display(key) for
display, refs #521
---
datasette/views/table.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index f2f5fda0..c41bc305 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -43,13 +43,13 @@ class Row:
def __getitem__(self, key):
for cell in self.cells:
if cell["column"] == key:
- return cell["value"]
+ return cell["raw"]
raise KeyError
- def raw(self, key):
+ def display(self, key):
for cell in self.cells:
if cell["column"] == key:
- return cell["raw"]
+ return cell["value"]
return None
def __str__(self):
From 76882830548e16905348ee75acb0044cb8e1fd20 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 25 Jun 2019 08:36:39 -0700
Subject: [PATCH 0004/2124] Typo
---
docs/contributing.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 27e3b0db..43834edc 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -109,7 +109,7 @@ For added productivity, you can use use `sphinx-autobuild
Date: Tue, 2 Jul 2019 17:50:45 -0700
Subject: [PATCH 0005/2124] Rename _rows_and_columns.html to _table.html, refs
#521
---
.../{_rows_and_columns.html => _table.html} | 0
datasette/templates/row.html | 2 +-
datasette/templates/table.html | 2 +-
datasette/views/table.py | 16 +++++++--------
docs/custom_templates.rst | 20 +++++++++----------
5 files changed, 20 insertions(+), 20 deletions(-)
rename datasette/templates/{_rows_and_columns.html => _table.html} (100%)
diff --git a/datasette/templates/_rows_and_columns.html b/datasette/templates/_table.html
similarity index 100%
rename from datasette/templates/_rows_and_columns.html
rename to datasette/templates/_table.html
diff --git a/datasette/templates/row.html b/datasette/templates/row.html
index baffaf96..bda1e4e2 100644
--- a/datasette/templates/row.html
+++ b/datasette/templates/row.html
@@ -24,7 +24,7 @@
This data as {% for name, url in renderers.items() %}{{ name }}{{ ", " if not loop.last }}{% endfor %}
-{% include custom_rows_and_columns_templates %}
+{% include custom_table_templates %}
{% if foreign_key_tables %}
Links from other tables
diff --git a/datasette/templates/table.html b/datasette/templates/table.html
index 5ba3ff6d..2287e901 100644
--- a/datasette/templates/table.html
+++ b/datasette/templates/table.html
@@ -145,7 +145,7 @@
{% endif %}
-{% include custom_rows_and_columns_templates %}
+{% include custom_table_templates %}
{% if next_url %}
diff --git a/datasette/views/table.py b/datasette/views/table.py
index c41bc305..8ba3abe4 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -744,14 +744,14 @@ class TableView(RowTableShared):
"sort": sort,
"sort_desc": sort_desc,
"disable_sort": is_view,
- "custom_rows_and_columns_templates": [
- "_rows_and_columns-{}-{}.html".format(
+ "custom_table_templates": [
+ "_table-{}-{}.html".format(
to_css_class(database), to_css_class(table)
),
- "_rows_and_columns-table-{}-{}.html".format(
+ "_table-table-{}-{}.html".format(
to_css_class(database), to_css_class(table)
),
- "_rows_and_columns.html",
+ "_table.html",
],
"metadata": metadata,
"view_definition": await db.get_view_definition(table),
@@ -828,14 +828,14 @@ class RowView(RowTableShared):
),
"display_columns": display_columns,
"display_rows": display_rows,
- "custom_rows_and_columns_templates": [
- "_rows_and_columns-{}-{}.html".format(
+ "custom_table_templates": [
+ "_table-{}-{}.html".format(
to_css_class(database), to_css_class(table)
),
- "_rows_and_columns-row-{}-{}.html".format(
+ "_table-row-{}-{}.html".format(
to_css_class(database), to_css_class(table)
),
- "_rows_and_columns.html",
+ "_table.html",
],
"metadata": (self.ds.metadata("databases") or {})
.get(database, {})
diff --git a/docs/custom_templates.rst b/docs/custom_templates.rst
index b0863381..1dfaf892 100644
--- a/docs/custom_templates.rst
+++ b/docs/custom_templates.rst
@@ -145,14 +145,14 @@ The lookup rules Datasette uses are as follows::
row.html
Rows and columns include on table page:
- _rows_and_columns-table-mydatabase-mytable.html
- _rows_and_columns-mydatabase-mytable.html
- _rows_and_columns.html
+ _table-table-mydatabase-mytable.html
+ _table-mydatabase-mytable.html
+ _table.html
Rows and columns include on row page:
- _rows_and_columns-row-mydatabase-mytable.html
- _rows_and_columns-mydatabase-mytable.html
- _rows_and_columns.html
+ _table-row-mydatabase-mytable.html
+ _table-mydatabase-mytable.html
+ _table.html
If a table name has spaces or other unexpected characters in it, the template
filename will follow the same rules as our custom ```` CSS classes - for
@@ -189,16 +189,16 @@ content you can do so by creating a ``row.html`` template like this::
Note the ``default:row.html`` template name, which ensures Jinja will inherit
from the default template.
-The ``_rows_and_columns.html`` template is included on both the row and the table
-page, and displays the content of the row. The default ``_rows_and_columns.html`` template
-`can be seen here `_.
+The ``_table.html`` template is included on both the row and the table
+page, and displays the content of the row. The default ``_table.html`` template
+`can be seen here `_.
You can provide a custom template that applies to all of your databases and
tables, or you can provide custom templates for specific tables using the
template naming scheme described above.
Say for example you want to output a certain column as unescaped HTML. You could
-provide a custom ``_rows_and_columns.html`` template like this::
+provide a custom ``_table.html`` template like this::
+{% endfor %}
From 1add905532b7bc4f681318b8f22b9b74cca2b2a0 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 2 Jul 2019 20:13:34 -0700
Subject: [PATCH 0007/2124] Updated custom template docs, refs #521
---
docs/custom_templates.rst | 52 ++++++++++++++++-----------------------
1 file changed, 21 insertions(+), 31 deletions(-)
diff --git a/docs/custom_templates.rst b/docs/custom_templates.rst
index 1dfaf892..47271542 100644
--- a/docs/custom_templates.rst
+++ b/docs/custom_templates.rst
@@ -144,12 +144,12 @@ The lookup rules Datasette uses are as follows::
row-mydatabase-mytable.html
row.html
- Rows and columns include on table page:
+ Table of rows and columns include on table page:
_table-table-mydatabase-mytable.html
_table-mydatabase-mytable.html
_table.html
- Rows and columns include on row page:
+ Table of rows and columns include on row page:
_table-row-mydatabase-mytable.html
_table-mydatabase-mytable.html
_table.html
@@ -189,38 +189,28 @@ content you can do so by creating a ``row.html`` template like this::
Note the ``default:row.html`` template name, which ensures Jinja will inherit
from the default template.
-The ``_table.html`` template is included on both the row and the table
-page, and displays the content of the row. The default ``_table.html`` template
-`can be seen here `_.
+The ``_table.html`` template is included by both the row and the table pages,
+and a list of rows. The default ``_table.html`` template renders them as an
+HTML template and `can be seen here `_.
You can provide a custom template that applies to all of your databases and
tables, or you can provide custom templates for specific tables using the
template naming scheme described above.
-Say for example you want to output a certain column as unescaped HTML. You could
-provide a custom ``_table.html`` template like this::
+If you want to present your data in a format other than an HTML table, you
+can do so by looping through ``display_rows`` in your own ``_table.html``
+template. You can use ``{{ row["column_name"] }}`` to output the raw value
+of a specific column.
-
+If you want to output the rendered HTML version of a column, including any
+links to foreign keys, you can use ``{{ row.display("column_name") }}``.
+
+Here is an example of a custom ``_table.html`` template::
+
+ {% for row in display_rows %}
+
+
{{ row["title"] }}
+
{{ row["description"] }}
+
Category: {{ row.display("category_id") }}
+
+ {% endfor %}
From 754836eef043676e84626c4fd3cb993eed0d2976 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 25 Jun 2019 05:02:42 -0700
Subject: [PATCH 0008/2124] New experimental Row() for templates, refs #521
---
datasette/views/table.py | 43 +++++++++++++++++++++++++++++++++-------
1 file changed, 36 insertions(+), 7 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 06be5671..f2f5fda0 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -33,6 +33,35 @@ LINK_WITH_LABEL = (
LINK_WITH_VALUE = '{id}'
+class Row:
+ def __init__(self, cells):
+ self.cells = cells
+
+ def __iter__(self):
+ return iter(self.cells)
+
+ def __getitem__(self, key):
+ for cell in self.cells:
+ if cell["column"] == key:
+ return cell["value"]
+ raise KeyError
+
+ def raw(self, key):
+ for cell in self.cells:
+ if cell["column"] == key:
+ return cell["raw"]
+ return None
+
+ def __str__(self):
+ d = {
+ key: self[key]
+ for key in [
+ c["column"] for c in self.cells if not c.get("is_special_link_column")
+ ]
+ }
+ return json.dumps(d, default=repr, indent=2)
+
+
class RowTableShared(DataView):
async def sortable_columns_for_table(self, database, table, use_rowid):
db = self.ds.databases[database]
@@ -76,18 +105,18 @@ class RowTableShared(DataView):
# Unless we are a view, the first column is a link - either to the rowid
# or to the simple or compound primary key
if link_column:
+ is_special_link_column = len(pks) != 1
+ pk_path = path_from_row_pks(row, pks, not pks, False)
cells.append(
{
"column": pks[0] if len(pks) == 1 else "Link",
+ "is_special_link_column": is_special_link_column,
+ "raw": pk_path,
"value": jinja2.Markup(
'{flat_pks}'.format(
database=database,
table=urllib.parse.quote_plus(table),
- flat_pks=str(
- jinja2.escape(
- path_from_row_pks(row, pks, not pks, False)
- )
- ),
+ flat_pks=str(jinja2.escape(pk_path)),
flat_pks_quoted=path_from_row_pks(row, pks, not pks),
)
),
@@ -159,8 +188,8 @@ class RowTableShared(DataView):
if truncate_cells and len(display_value) > truncate_cells:
display_value = display_value[:truncate_cells] + u"\u2026"
- cells.append({"column": column, "value": display_value})
- cell_rows.append(cells)
+ cells.append({"column": column, "value": display_value, "raw": value})
+ cell_rows.append(Row(cells))
if link_column:
# Add the link column header.
From c461357b4483f1cc44efb3f12cf91cc1fb45ab1d Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 25 Jun 2019 05:21:10 -0700
Subject: [PATCH 0009/2124] Default to raw value, use Row.display(key) for
display, refs #521
---
datasette/views/table.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index f2f5fda0..c41bc305 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -43,13 +43,13 @@ class Row:
def __getitem__(self, key):
for cell in self.cells:
if cell["column"] == key:
- return cell["value"]
+ return cell["raw"]
raise KeyError
- def raw(self, key):
+ def display(self, key):
for cell in self.cells:
if cell["column"] == key:
- return cell["raw"]
+ return cell["value"]
return None
def __str__(self):
From e7120d91f6ab7917b57b0e966259082d513c36ee Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 2 Jul 2019 17:50:45 -0700
Subject: [PATCH 0010/2124] Rename _rows_and_columns.html to _table.html, refs
#521
---
.../{_rows_and_columns.html => _table.html} | 0
datasette/templates/row.html | 2 +-
datasette/templates/table.html | 2 +-
datasette/views/table.py | 16 +++++++--------
docs/custom_templates.rst | 20 +++++++++----------
5 files changed, 20 insertions(+), 20 deletions(-)
rename datasette/templates/{_rows_and_columns.html => _table.html} (100%)
diff --git a/datasette/templates/_rows_and_columns.html b/datasette/templates/_table.html
similarity index 100%
rename from datasette/templates/_rows_and_columns.html
rename to datasette/templates/_table.html
diff --git a/datasette/templates/row.html b/datasette/templates/row.html
index baffaf96..bda1e4e2 100644
--- a/datasette/templates/row.html
+++ b/datasette/templates/row.html
@@ -24,7 +24,7 @@
This data as {% for name, url in renderers.items() %}{{ name }}{{ ", " if not loop.last }}{% endfor %}
-{% include custom_rows_and_columns_templates %}
+{% include custom_table_templates %}
{% if foreign_key_tables %}
Links from other tables
diff --git a/datasette/templates/table.html b/datasette/templates/table.html
index 5ba3ff6d..2287e901 100644
--- a/datasette/templates/table.html
+++ b/datasette/templates/table.html
@@ -145,7 +145,7 @@
{% endif %}
-{% include custom_rows_and_columns_templates %}
+{% include custom_table_templates %}
{% if next_url %}
diff --git a/datasette/views/table.py b/datasette/views/table.py
index c41bc305..8ba3abe4 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -744,14 +744,14 @@ class TableView(RowTableShared):
"sort": sort,
"sort_desc": sort_desc,
"disable_sort": is_view,
- "custom_rows_and_columns_templates": [
- "_rows_and_columns-{}-{}.html".format(
+ "custom_table_templates": [
+ "_table-{}-{}.html".format(
to_css_class(database), to_css_class(table)
),
- "_rows_and_columns-table-{}-{}.html".format(
+ "_table-table-{}-{}.html".format(
to_css_class(database), to_css_class(table)
),
- "_rows_and_columns.html",
+ "_table.html",
],
"metadata": metadata,
"view_definition": await db.get_view_definition(table),
@@ -828,14 +828,14 @@ class RowView(RowTableShared):
),
"display_columns": display_columns,
"display_rows": display_rows,
- "custom_rows_and_columns_templates": [
- "_rows_and_columns-{}-{}.html".format(
+ "custom_table_templates": [
+ "_table-{}-{}.html".format(
to_css_class(database), to_css_class(table)
),
- "_rows_and_columns-row-{}-{}.html".format(
+ "_table-row-{}-{}.html".format(
to_css_class(database), to_css_class(table)
),
- "_rows_and_columns.html",
+ "_table.html",
],
"metadata": (self.ds.metadata("databases") or {})
.get(database, {})
diff --git a/docs/custom_templates.rst b/docs/custom_templates.rst
index b0863381..1dfaf892 100644
--- a/docs/custom_templates.rst
+++ b/docs/custom_templates.rst
@@ -145,14 +145,14 @@ The lookup rules Datasette uses are as follows::
row.html
Rows and columns include on table page:
- _rows_and_columns-table-mydatabase-mytable.html
- _rows_and_columns-mydatabase-mytable.html
- _rows_and_columns.html
+ _table-table-mydatabase-mytable.html
+ _table-mydatabase-mytable.html
+ _table.html
Rows and columns include on row page:
- _rows_and_columns-row-mydatabase-mytable.html
- _rows_and_columns-mydatabase-mytable.html
- _rows_and_columns.html
+ _table-row-mydatabase-mytable.html
+ _table-mydatabase-mytable.html
+ _table.html
If a table name has spaces or other unexpected characters in it, the template
filename will follow the same rules as our custom ```` CSS classes - for
@@ -189,16 +189,16 @@ content you can do so by creating a ``row.html`` template like this::
Note the ``default:row.html`` template name, which ensures Jinja will inherit
from the default template.
-The ``_rows_and_columns.html`` template is included on both the row and the table
-page, and displays the content of the row. The default ``_rows_and_columns.html`` template
-`can be seen here `_.
+The ``_table.html`` template is included on both the row and the table
+page, and displays the content of the row. The default ``_table.html`` template
+`can be seen here `_.
You can provide a custom template that applies to all of your databases and
tables, or you can provide custom templates for specific tables using the
template naming scheme described above.
Say for example you want to output a certain column as unescaped HTML. You could
-provide a custom ``_rows_and_columns.html`` template like this::
+provide a custom ``_table.html`` template like this::
+{% endfor %}
From b9ede4c1898616512b5d204f9c941deff473cbe4 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 2 Jul 2019 20:13:34 -0700
Subject: [PATCH 0012/2124] Updated custom template docs, refs #521
---
docs/custom_templates.rst | 52 ++++++++++++++++-----------------------
1 file changed, 21 insertions(+), 31 deletions(-)
diff --git a/docs/custom_templates.rst b/docs/custom_templates.rst
index 1dfaf892..47271542 100644
--- a/docs/custom_templates.rst
+++ b/docs/custom_templates.rst
@@ -144,12 +144,12 @@ The lookup rules Datasette uses are as follows::
row-mydatabase-mytable.html
row.html
- Rows and columns include on table page:
+ Table of rows and columns include on table page:
_table-table-mydatabase-mytable.html
_table-mydatabase-mytable.html
_table.html
- Rows and columns include on row page:
+ Table of rows and columns include on row page:
_table-row-mydatabase-mytable.html
_table-mydatabase-mytable.html
_table.html
@@ -189,38 +189,28 @@ content you can do so by creating a ``row.html`` template like this::
Note the ``default:row.html`` template name, which ensures Jinja will inherit
from the default template.
-The ``_table.html`` template is included on both the row and the table
-page, and displays the content of the row. The default ``_table.html`` template
-`can be seen here `_.
+The ``_table.html`` template is included by both the row and the table pages,
+and a list of rows. The default ``_table.html`` template renders them as an
+HTML template and `can be seen here `_.
You can provide a custom template that applies to all of your databases and
tables, or you can provide custom templates for specific tables using the
template naming scheme described above.
-Say for example you want to output a certain column as unescaped HTML. You could
-provide a custom ``_table.html`` template like this::
+If you want to present your data in a format other than an HTML table, you
+can do so by looping through ``display_rows`` in your own ``_table.html``
+template. You can use ``{{ row["column_name"] }}`` to output the raw value
+of a specific column.
-
+If you want to output the rendered HTML version of a column, including any
+links to foreign keys, you can use ``{{ row.display("column_name") }}``.
+
+Here is an example of a custom ``_table.html`` template::
+
+ {% for row in display_rows %}
+
+
{{ row["title"] }}
+
{{ row["description"] }}
+
Category: {{ row.display("category_id") }}
+
+ {% endfor %}
From 4d2fdafe39159c9a8aa83f7e9bfe768bbbbb56a3 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 2 Jul 2019 20:57:28 -0700
Subject: [PATCH 0013/2124] Added asgi_wrapper plugin hook, closes #520
---
datasette/app.py | 5 ++++-
datasette/hookspecs.py | 5 +++++
docs/plugins.rst | 41 +++++++++++++++++++++++++++++++++++++++++
tests/fixtures.py | 23 +++++++++++++++++++++++
tests/test_plugins.py | 5 +++++
5 files changed, 78 insertions(+), 1 deletion(-)
diff --git a/datasette/app.py b/datasette/app.py
index 4a8ead1d..16a29e20 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -651,9 +651,12 @@ class Datasette:
if not database.is_mutable:
await database.table_counts(limit=60 * 60 * 1000)
- return AsgiLifespan(
+ asgi = AsgiLifespan(
AsgiTracer(DatasetteRouter(self, routes)), on_startup=setup_db
)
+ for wrapper in pm.hook.asgi_wrapper(datasette=self):
+ asgi = wrapper(asgi)
+ return asgi
class DatasetteRouter(AsgiRouter):
diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py
index 61523a31..42adaae8 100644
--- a/datasette/hookspecs.py
+++ b/datasette/hookspecs.py
@@ -5,6 +5,11 @@ hookspec = HookspecMarker("datasette")
hookimpl = HookimplMarker("datasette")
+@hookspec
+def asgi_wrapper(datasette):
+ "Returns an ASGI middleware callable to wrap our ASGI application with"
+
+
@hookspec
def prepare_connection(conn):
"Modify SQLite connection in some way e.g. register custom SQL functions"
diff --git a/docs/plugins.rst b/docs/plugins.rst
index bd32b3a6..be335546 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -666,3 +666,44 @@ The plugin hook can then be used to register the new facet class like this:
@hookimpl
def register_facet_classes():
return [SpecialFacet]
+
+
+.. _plugin_asgi_wrapper:
+
+asgi_wrapper(datasette)
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Return an `ASGI `__ middleware wrapper function that will be applied to the Datasette ASGI application.
+
+This is a very powerful hook. You can use it to manipulate the entire Datasette response, or even to configure new URL routes that will be handled by your own custom code.
+
+You can write your ASGI code directly against the low-level specification, or you can use the middleware utilites provided by an ASGI framework such as `Starlette `__.
+
+This example plugin adds a ``x-databases`` HTTP header listing the currently attached databases:
+
+.. code-block:: python
+
+ from datasette import hookimpl
+ from functools import wraps
+
+
+ @hookimpl
+ def asgi_wrapper(datasette):
+ def wrap_with_databases_header(app):
+ @wraps(app)
+ async def add_x_databases_header(scope, recieve, send):
+ async def wrapped_send(event):
+ if event["type"] == "http.response.start":
+ original_headers = event.get("headers") or []
+ event = {
+ "type": event["type"],
+ "status": event["status"],
+ "headers": original_headers + [
+ [b"x-databases",
+ ", ".join(datasette.databases.keys()).encode("utf-8")]
+ ],
+ }
+ await send(event)
+ await app(scope, recieve, wrapped_send)
+ return add_x_databases_header
+ return wrap_with_databases_header
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 0330c8ed..fab6509e 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -372,6 +372,7 @@ def render_cell(value, column, table, database, datasette):
PLUGIN2 = """
from datasette import hookimpl
+from functools import wraps
import jinja2
import json
@@ -413,6 +414,28 @@ def render_cell(value, database):
label=jinja2.escape(data["label"] or "") or " "
)
)
+
+
+@hookimpl
+def asgi_wrapper(datasette):
+ def wrap_with_databases_header(app):
+ @wraps(app)
+ async def add_x_databases_header(scope, recieve, send):
+ async def wrapped_send(event):
+ if event["type"] == "http.response.start":
+ original_headers = event.get("headers") or []
+ event = {
+ "type": event["type"],
+ "status": event["status"],
+ "headers": original_headers + [
+ [b"x-databases",
+ ", ".join(datasette.databases.keys()).encode("utf-8")]
+ ],
+ }
+ await send(event)
+ await app(scope, recieve, wrapped_send)
+ return add_x_databases_header
+ return wrap_with_databases_header
"""
TABLES = (
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index 56033bdd..9bdd491a 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -162,3 +162,8 @@ def test_plugins_extra_body_script(app_client, path, expected_extra_body_script)
json_data = r.search(app_client.get(path).body.decode("utf8")).group(1)
actual_data = json.loads(json_data)
assert expected_extra_body_script == actual_data
+
+
+def test_plugins_asgi_wrapper(app_client):
+ response = app_client.get("/fixtures")
+ assert "fixtures" == response.headers["x-databases"]
From f0d32da0a9af87bcb15e34e35424f0c0053be83a Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 2 Jul 2019 21:32:55 -0700
Subject: [PATCH 0014/2124] Switch to ~= dependencies, closes #532 (#536)
* Switch to ~= dependencies, closes #532
* Bump click and click-default-group
* imp. is deprecated, use types.ModuleType instead - thanks https://stackoverflow.com/a/32175781
* Upgrade to pytest 5
---
datasette/utils/__init__.py | 4 ++--
setup.py | 26 +++++++++++++-------------
2 files changed, 15 insertions(+), 15 deletions(-)
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index 94ccc23e..17a4d595 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -3,7 +3,6 @@ from collections import OrderedDict
import base64
import click
import hashlib
-import imp
import json
import os
import pkg_resources
@@ -11,6 +10,7 @@ import re
import shlex
import tempfile
import time
+import types
import shutil
import urllib
import numbers
@@ -588,7 +588,7 @@ def link_or_copy_directory(src, dst):
def module_from_path(path, name):
# Adapted from http://sayspy.blogspot.com/2011/07/how-to-import-module-from-just-file.html
- mod = imp.new_module(name)
+ mod = types.ModuleType(name)
mod.__file__ = path
with open(path, "r") as file:
code = compile(file.read(), path, "exec", dont_inherit=True)
diff --git a/setup.py b/setup.py
index fdbb948e..254859b0 100644
--- a/setup.py
+++ b/setup.py
@@ -41,14 +41,14 @@ setup(
package_data={"datasette": ["templates/*.html"]},
include_package_data=True,
install_requires=[
- "click>=6.7",
- "click-default-group==1.2",
- "Jinja2==2.10.1",
- "hupper==1.0",
- "pint==0.8.1",
- "pluggy>=0.12.0",
- "uvicorn>=0.8.1",
- "aiofiles==0.4.0",
+ "click~=7.0",
+ "click-default-group~=1.2.1",
+ "Jinja2~=2.10.1",
+ "hupper~=1.0",
+ "pint~=0.8.1",
+ "pluggy~=0.12.0",
+ "uvicorn~=0.8.1",
+ "aiofiles~=0.4.0",
],
entry_points="""
[console_scripts]
@@ -58,11 +58,11 @@ setup(
extras_require={
"docs": ["sphinx_rtd_theme", "sphinx-autobuild"],
"test": [
- "pytest==4.6.1",
- "pytest-asyncio==0.10.0",
- "aiohttp==3.5.3",
- "beautifulsoup4==4.6.1",
- "asgiref==3.1.2",
+ "pytest~=5.0.0",
+ "pytest-asyncio~=0.10.0",
+ "aiohttp~=3.5.3",
+ "beautifulsoup4~=4.6.1",
+ "asgiref~=3.1.2",
]
+ maybe_black,
},
From a2d45931935f6bb73605a94afedf9e78308c95d6 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 3 Jul 2019 22:36:44 -0700
Subject: [PATCH 0015/2124] Secret plugin configuration options (#539)
Closes #538
---
datasette/app.py | 11 ++++++++++-
docs/plugins.rst | 33 +++++++++++++++++++++++++++++++++
tests/fixtures.py | 10 +++++++++-
tests/test_plugins.py | 15 ++++++++++++++-
4 files changed, 66 insertions(+), 3 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 16a29e20..70bd3c12 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -268,7 +268,16 @@ class Datasette:
)
if plugins is None:
return None
- return plugins.get(plugin_name)
+ plugin_config = plugins.get(plugin_name)
+ # Resolve any $file and $env keys
+ if isinstance(plugin_config, dict):
+ for key, value in plugin_config.items():
+ if isinstance(value, dict):
+ if list(value.keys()) == ["$env"]:
+ plugin_config[key] = os.environ.get(list(value.values())[0])
+ elif list(value.keys()) == ["$file"]:
+ plugin_config[key] = open(list(value.values())[0]).read()
+ return plugin_config
def app_css_hash(self):
if not hasattr(self, "_app_css_hash"):
diff --git a/docs/plugins.rst b/docs/plugins.rst
index be335546..609fa844 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -219,6 +219,39 @@ Here is an example of some plugin configuration for a specific table::
This tells the ``datasette-cluster-map`` column which latitude and longitude columns should be used for a table called ``Street_Tree_List`` inside a database file called ``sf-trees.db``.
+Secret configuration values
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Any values embedded in ``metadata.json`` will be visible to anyone who views the ``/-/metadata`` page of your Datasette instance. Some plugins may need configuration that should stay secret - API keys for example. There are two ways in which you can store secret configuration values.
+
+**As environment variables**. If your secret lives in an environment variable that is available to the Datasette process, you can indicate that the configuration value should be read from that environment variable like so::
+
+ {
+ "plugins": {
+ "datasette-auth-github": {
+ "client_secret": {
+ "$env": "GITHUB_CLIENT_SECRET"
+ }
+ }
+ }
+ }
+
+
+**As values in separate files**. Your secrets can also live in files on disk. To specify a secret should be read from a file, provide the full file path like this::
+
+ {
+ "plugins": {
+ "datasette-auth-github": {
+ "client_secret": {
+ "$file": "/secrets/client-secret"
+ }
+ }
+ }
+ }
+
+Writing plugins that accept configuration
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
When you are writing plugins, you can access plugin configuration like this using the ``datasette.plugin_config()`` method. If you know you need plugin configuration for a specific table, you can access it like this::
plugin_config = datasette.plugin_config(
diff --git a/tests/fixtures.py b/tests/fixtures.py
index fab6509e..db5f06e2 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -15,6 +15,10 @@ import time
from urllib.parse import unquote
+# This temp file is used by one of the plugin config tests
+TEMP_PLUGIN_SECRET_FILE = os.path.join(tempfile.gettempdir(), "plugin-secret")
+
+
class TestResponse:
def __init__(self, status, headers, body):
self.status = status
@@ -246,7 +250,11 @@ METADATA = {
"source_url": "https://github.com/simonw/datasette/blob/master/tests/fixtures.py",
"about": "About Datasette",
"about_url": "https://github.com/simonw/datasette",
- "plugins": {"name-of-plugin": {"depth": "root"}},
+ "plugins": {
+ "name-of-plugin": {"depth": "root"},
+ "env-plugin": {"foo": {"$env": "FOO_ENV"}},
+ "file-plugin": {"foo": {"$file": TEMP_PLUGIN_SECRET_FILE}},
+ },
"databases": {
"fixtures": {
"description": "Test tables description",
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index 9bdd491a..f42eebd7 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -1,7 +1,8 @@
from bs4 import BeautifulSoup as Soup
-from .fixtures import app_client # noqa
+from .fixtures import app_client, make_app_client, TEMP_PLUGIN_SECRET_FILE # noqa
import base64
import json
+import os
import re
import pytest
import urllib
@@ -125,6 +126,18 @@ def test_plugin_config(app_client):
assert None is app_client.ds.plugin_config("unknown-plugin")
+def test_plugin_config_env(app_client):
+ os.environ["FOO_ENV"] = "FROM_ENVIRONMENT"
+ assert {"foo": "FROM_ENVIRONMENT"} == app_client.ds.plugin_config("env-plugin")
+ del os.environ["FOO_ENV"]
+
+
+def test_plugin_config_file(app_client):
+ open(TEMP_PLUGIN_SECRET_FILE, "w").write("FROM_FILE")
+ assert {"foo": "FROM_FILE"} == app_client.ds.plugin_config("file-plugin")
+ os.remove(TEMP_PLUGIN_SECRET_FILE)
+
+
@pytest.mark.parametrize(
"path,expected_extra_body_script",
[
From 25ff0a8ba6b2e3247a66048ad173ba5ed8a38b80 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 3 Jul 2019 22:47:45 -0700
Subject: [PATCH 0016/2124] Fix for accidentally leaking secrets in
/-/metadata, closes #538
---
datasette/app.py | 9 ++++++---
tests/test_plugins.py | 8 ++++++++
2 files changed, 14 insertions(+), 3 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 70bd3c12..56b60533 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -271,12 +271,15 @@ class Datasette:
plugin_config = plugins.get(plugin_name)
# Resolve any $file and $env keys
if isinstance(plugin_config, dict):
- for key, value in plugin_config.items():
+ # Create a copy so we don't mutate the version visible at /-/metadata.json
+ plugin_config_copy = dict(plugin_config)
+ for key, value in plugin_config_copy.items():
if isinstance(value, dict):
if list(value.keys()) == ["$env"]:
- plugin_config[key] = os.environ.get(list(value.values())[0])
+ plugin_config_copy[key] = os.environ.get(list(value.values())[0])
elif list(value.keys()) == ["$file"]:
- plugin_config[key] = open(list(value.values())[0]).read()
+ plugin_config_copy[key] = open(list(value.values())[0]).read()
+ return plugin_config_copy
return plugin_config
def app_css_hash(self):
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index f42eebd7..9af2a430 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -129,12 +129,20 @@ def test_plugin_config(app_client):
def test_plugin_config_env(app_client):
os.environ["FOO_ENV"] = "FROM_ENVIRONMENT"
assert {"foo": "FROM_ENVIRONMENT"} == app_client.ds.plugin_config("env-plugin")
+ # Ensure secrets aren't visible in /-/metadata.json
+ metadata = app_client.get("/-/metadata.json")
+ assert {"foo": {"$env": "FOO_ENV"}} == metadata.json["plugins"]["env-plugin"]
del os.environ["FOO_ENV"]
def test_plugin_config_file(app_client):
open(TEMP_PLUGIN_SECRET_FILE, "w").write("FROM_FILE")
assert {"foo": "FROM_FILE"} == app_client.ds.plugin_config("file-plugin")
+ # Ensure secrets aren't visible in /-/metadata.json
+ metadata = app_client.get("/-/metadata.json")
+ assert {"foo": {"$file": TEMP_PLUGIN_SECRET_FILE}} == metadata.json["plugins"][
+ "file-plugin"
+ ]
os.remove(TEMP_PLUGIN_SECRET_FILE)
From 107d47567dedd472eebec7f35bc34f5b58285ba8 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 3 Jul 2019 22:56:13 -0700
Subject: [PATCH 0017/2124] Black
---
datasette/app.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/datasette/app.py b/datasette/app.py
index 56b60533..1a41c1c6 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -276,7 +276,9 @@ class Datasette:
for key, value in plugin_config_copy.items():
if isinstance(value, dict):
if list(value.keys()) == ["$env"]:
- plugin_config_copy[key] = os.environ.get(list(value.values())[0])
+ plugin_config_copy[key] = os.environ.get(
+ list(value.values())[0]
+ )
elif list(value.keys()) == ["$file"]:
plugin_config_copy[key] = open(list(value.values())[0]).read()
return plugin_config_copy
From 16fdabda978fa659bed0e8670a385dab3c2cd197 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 4 Jul 2019 07:03:02 -0700
Subject: [PATCH 0018/2124] Better robustness in face of missing raw_path
---
datasette/utils/asgi.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py
index fdf330ae..38ffc072 100644
--- a/datasette/utils/asgi.py
+++ b/datasette/utils/asgi.py
@@ -88,7 +88,10 @@ class AsgiRouter:
async def __call__(self, scope, receive, send):
# Because we care about "foo/bar" v.s. "foo%2Fbar" we decode raw_path ourselves
- path = scope["raw_path"].decode("ascii")
+ path = scope["path"]
+ raw_path = scope.get("raw_path")
+ if raw_path:
+ path = raw_path.decode("ascii")
for regex, view in self.routes:
match = regex.match(path)
if match is not None:
From a18e0964ecd04593f227616538a80dee08768057 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 5 Jul 2019 13:34:41 -0700
Subject: [PATCH 0019/2124] Refactor templates for better top nav
customization, refs #540
---
datasette/static/app.css | 18 +++++++++++++++++-
datasette/templates/_footer.html | 21 +++++++++++++++++++++
datasette/templates/base.html | 28 +++++-----------------------
datasette/templates/database.html | 8 +++++++-
datasette/templates/index.html | 3 ++-
datasette/templates/row.html | 11 +++++++++--
datasette/templates/table.html | 9 ++++++++-
7 files changed, 69 insertions(+), 29 deletions(-)
create mode 100644 datasette/templates/_footer.html
diff --git a/datasette/static/app.css b/datasette/static/app.css
index 468c15f6..76ecdd8d 100644
--- a/datasette/static/app.css
+++ b/datasette/static/app.css
@@ -1,5 +1,6 @@
body {
- margin: 0 1em;
+ margin: 0;
+ padding: 0;
font-family: "Helvetica Neue", sans-serif;
font-size: 1rem;
font-weight: 400;
@@ -8,6 +9,9 @@ body {
text-align: left;
background-color: #fff;
}
+.bd {
+ margin: 0 1em;
+}
table {
border-collapse: collapse;
}
@@ -82,9 +86,21 @@ table a:visited {
.hd {
border-bottom: 2px solid #ccc;
+ padding: 0.2em 1em;
+ background-color: #eee;
+ overflow: hidden;
+ box-sizing: border-box;
+}
+.hd p {
+ margin: 0;
+ padding: 0;
+}
+.hd .crumbs {
+ float: left;
}
.ft {
margin: 1em 0;
+ padding: 0.5em 1em 0 1em;
border-top: 1px solid #ccc;
font-size: 0.8em;
}
diff --git a/datasette/templates/_footer.html b/datasette/templates/_footer.html
new file mode 100644
index 00000000..f930f445
--- /dev/null
+++ b/datasette/templates/_footer.html
@@ -0,0 +1,21 @@
+Powered by Datasette
+{% if query_ms %}· Query took {{ query_ms|round(3) }}ms{% endif %}
+{% if metadata %}
+ {% if metadata.license or metadata.license_url %}· Data license:
+ {% if metadata.license_url %}
+ {{ metadata.license or metadata.license_url }}
+ {% else %}
+ {{ metadata.license }}
+ {% endif %}
+ {% endif %}
+ {% if metadata.source or metadata.source_url %}·
+ Data source: {% if metadata.source_url %}
+
+ {% endif %}{{ metadata.source or metadata.source_url }}{% if metadata.source_url %}{% endif %}
+ {% endif %}
+ {% if metadata.about or metadata.about_url %}·
+ About: {% if metadata.about_url %}
+
+ {% endif %}{{ metadata.about or metadata.about_url }}{% if metadata.about_url %}{% endif %}
+ {% endif %}
+{% endif %}
diff --git a/datasette/templates/base.html b/datasette/templates/base.html
index 0ea41d7e..d26043f8 100644
--- a/datasette/templates/base.html
+++ b/datasette/templates/base.html
@@ -14,33 +14,15 @@
+
+
+
{% for table in database.tables_and_views_truncated %}{{ table.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}
+
{% for table in database.tables_and_views_truncated %}{{ table.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}
',
+ ],
]
assert expected == [
[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")
@@ -611,7 +616,7 @@ def test_table_html_foreign_key_links(app_client):
def test_table_html_disable_foreign_key_links_with_labels(app_client):
- response = app_client.get("/fixtures/foreign_key_references?_labels=off")
+ response = app_client.get("/fixtures/foreign_key_references?_labels=off&_size=1")
assert response.status == 200
table = Soup(response.body, "html.parser").find("table")
expected = [
From c3181d9a840dff7be8c990b21f5749db393a4ea0 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 2 Nov 2019 15:47:20 -0700
Subject: [PATCH 0071/2124] Release notes for 0.30.2
---
docs/changelog.rst | 13 ++++++++++++-
1 file changed, 12 insertions(+), 1 deletion(-)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 8ac32c45..f4761efe 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,16 @@
Changelog
=========
+.. _v0_30_2:
+
+0.30.2 (2019-11-02)
+-------------------
+
+- ``/-/plugins`` page now uses distribution name e.g. ``datasette-cluster-map`` instead of the name of the underlying Python package (``datasette_cluster_map``) (`#606 `__)
+- Array faceting is now only suggested for columns that contain arrays of strings (`#562 `__)
+- Better documentation for the ``--host`` argument (`#574 `__)
+- Don't show ``None`` with a broken link for the label on a nullable foreign key (`#406 `__)
+
.. _v0_30_1:
0.30.1 (2019-10-30)
@@ -14,6 +24,7 @@ Changelog
.. _v0_30:
+
0.30 (2019-10-18)
-----------------
@@ -82,7 +93,7 @@ Two new plugins take advantage of this hook:
New plugin hook: extra_template_vars
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The :ref:`plugin_extra_template_vars` plugin hook allows plugins to inject their own additional variables into the Datasette template context. This can be used in conjunction with custom templates to customize the Datasette interface. `datasette-auth-github `__ uses this hook to add custom HTML to the new top navigation bar (which is designed to be modified by plugins, see `#540 `__).
+The :ref:`plugin_hook_extra_template_vars` plugin hook allows plugins to inject their own additional variables into the Datasette template context. This can be used in conjunction with custom templates to customize the Datasette interface. `datasette-auth-github `__ uses this hook to add custom HTML to the new top navigation bar (which is designed to be modified by plugins, see `#540 `__).
Secret plugin configuration options
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
From 2bf7ce5f517d772a16d7855a35a8a75d4456aad7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 2 Nov 2019 16:12:46 -0700
Subject: [PATCH 0072/2124] Fix CSV export for nullable foreign keys, closes
#612
---
datasette/views/base.py | 12 ++++++++----
tests/test_csv.py | 15 +++++++++++++++
2 files changed, 23 insertions(+), 4 deletions(-)
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 1568b084..94945304 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -330,10 +330,14 @@ class DataView(BaseView):
else:
# Look for {"value": "label": } dicts and expand
new_row = []
- for cell in row:
- if isinstance(cell, dict):
- new_row.append(cell["value"])
- new_row.append(cell["label"])
+ for heading, cell in zip(data["columns"], row):
+ if heading in expanded_columns:
+ if cell is None:
+ new_row.extend(("", ""))
+ else:
+ assert isinstance(cell, dict)
+ new_row.append(cell["value"])
+ new_row.append(cell["label"])
else:
new_row.append(cell)
await writer.writerow(new_row)
diff --git a/tests/test_csv.py b/tests/test_csv.py
index b148b6db..13aca489 100644
--- a/tests/test_csv.py
+++ b/tests/test_csv.py
@@ -41,6 +41,14 @@ pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,com
"\n", "\r\n"
)
+EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV = """
+pk,foreign_key_with_label,foreign_key_with_label_label,foreign_key_with_no_label,foreign_key_with_no_label_label
+1,1,hello,1,1
+2,,,,
+""".lstrip().replace(
+ "\n", "\r\n"
+)
+
def test_table_csv(app_client):
response = app_client.get("/fixtures/simple_primary_key.csv")
@@ -63,6 +71,13 @@ def test_table_csv_with_labels(app_client):
assert EXPECTED_TABLE_WITH_LABELS_CSV == response.text
+def test_table_csv_with_nullable_labels(app_client):
+ response = app_client.get("/fixtures/foreign_key_references.csv?_labels=1")
+ assert response.status == 200
+ assert "text/plain; charset=utf-8" == response.headers["content-type"]
+ assert EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV == response.text
+
+
def test_custom_sql_csv(app_client):
response = app_client.get(
"/fixtures.csv?sql=select+content+from+simple_primary_key+limit+2"
From ee330222f4c3ee66c2fe41ebc76fed56b9cb9a00 Mon Sep 17 00:00:00 2001
From: Tobias Kunze
Date: Mon, 4 Nov 2019 03:39:55 +0100
Subject: [PATCH 0073/2124] Offer to format readonly SQL (#602)
Following discussion in #601, this PR adds a "Format SQL" button to
read-only SQL (if the SQL actually differs from the formatting result).
It also removes a console error on readonly SQL queries.
Thanks, @rixx!
---
datasette/templates/_codemirror_foot.html | 41 ++++++++++++++---------
1 file changed, 26 insertions(+), 15 deletions(-)
diff --git a/datasette/templates/_codemirror_foot.html b/datasette/templates/_codemirror_foot.html
index 9aba61ab..4019d448 100644
--- a/datasette/templates/_codemirror_foot.html
+++ b/datasette/templates/_codemirror_foot.html
@@ -6,21 +6,32 @@ window.onload = () => {
if (sqlFormat && !readOnly) {
sqlFormat.hidden = false;
}
- var editor = CodeMirror.fromTextArea(sqlInput, {
- lineNumbers: true,
- mode: "text/x-sql",
- lineWrapping: true,
- });
- editor.setOption("extraKeys", {
- "Shift-Enter": function() {
- document.getElementsByClassName("sql")[0].submit();
- },
- Tab: false
- });
- if (sqlInput && sqlFormat) {
- sqlFormat.addEventListener("click", ev => {
- editor.setValue(sqlFormatter.format(editor.getValue()));
- })
+ if (sqlInput) {
+ var editor = CodeMirror.fromTextArea(sqlInput, {
+ lineNumbers: true,
+ mode: "text/x-sql",
+ lineWrapping: true,
+ });
+ editor.setOption("extraKeys", {
+ "Shift-Enter": function() {
+ document.getElementsByClassName("sql")[0].submit();
+ },
+ Tab: false
+ });
+ if (sqlFormat) {
+ sqlFormat.addEventListener("click", ev => {
+ editor.setValue(sqlFormatter.format(editor.getValue()));
+ })
+ }
+ }
+ if (sqlFormat && readOnly) {
+ const formatted = sqlFormatter.format(readOnly.innerHTML);
+ if (formatted != readOnly.innerHTML) {
+ sqlFormat.hidden = false;
+ sqlFormat.addEventListener("click", ev => {
+ readOnly.innerHTML = formatted;
+ })
+ }
}
}
From 9db22cdf1809fb78a7b183cd2f617cd5e26efc68 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 3 Nov 2019 20:11:55 -0800
Subject: [PATCH 0074/2124] pk__notin= filter, closes #614
---
datasette/filters.py | 15 +++++++++++++++
docs/json_api.rst | 3 +++
tests/test_filters.py | 3 +++
3 files changed, 21 insertions(+)
diff --git a/datasette/filters.py b/datasette/filters.py
index efe014ae..5897a3ed 100644
--- a/datasette/filters.py
+++ b/datasette/filters.py
@@ -77,6 +77,20 @@ class InFilter(Filter):
return "{} in {}".format(column, json.dumps(self.split_value(value)))
+class NotInFilter(InFilter):
+ key = "notin"
+ display = "not in"
+
+ def where_clause(self, table, column, value, param_counter):
+ values = self.split_value(value)
+ params = [":p{}".format(param_counter + i) for i in range(len(values))]
+ sql = "{} not in ({})".format(escape_sqlite(column), ", ".join(params))
+ return sql, values
+
+ def human_clause(self, column, value):
+ return "{} not in {}".format(column, json.dumps(self.split_value(value)))
+
+
class Filters:
_filters = (
[
@@ -125,6 +139,7 @@ class Filters:
TemplatedFilter("like", "like", '"{c}" like :{p}', '{c} like "{v}"'),
TemplatedFilter("glob", "glob", '"{c}" glob :{p}', '{c} glob "{v}"'),
InFilter(),
+ NotInFilter(),
]
+ (
[
diff --git a/docs/json_api.rst b/docs/json_api.rst
index 4b365e14..de70362c 100644
--- a/docs/json_api.rst
+++ b/docs/json_api.rst
@@ -228,6 +228,9 @@ You can filter the data returned by the table based on column values using a que
``?column__in=["value","value,with,commas"]``
+``?column__notin=value1,value2,value3``
+ Rows where column does not match any of the provided values. The inverse of ``__in=``. Also supports JSON arrays.
+
``?column__arraycontains=value``
Works against columns that contain JSON arrays - matches if any of the values in that array match.
diff --git a/tests/test_filters.py b/tests/test_filters.py
index fd682cd9..8598087f 100644
--- a/tests/test_filters.py
+++ b/tests/test_filters.py
@@ -47,6 +47,9 @@ import pytest
["foo in (:p0, :p1)"],
["dog,cat", "cat[dog]"],
),
+ # Not in, and JSON array not in
+ ((("foo__notin", "1,2,3"),), ["foo not in (:p0, :p1, :p2)"], ["1", "2", "3"]),
+ ((("foo__notin", "[1,2,3]"),), ["foo not in (:p0, :p1, :p2)"], [1, 2, 3]),
],
)
def test_build_where(args, expected_where, expected_params):
From 52fa79c6075f0830ff635b81d957c64d877a05aa Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 4 Nov 2019 15:03:48 -0800
Subject: [PATCH 0075/2124] Use select colnames, not select * for table view -
refs #615
---
datasette/views/table.py | 8 ++++++--
tests/test_api.py | 3 ++-
2 files changed, 8 insertions(+), 3 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 326c11ae..139ff80b 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -235,13 +235,17 @@ class TableView(RowTableShared):
raise NotFound("Table not found: {}".format(table))
pks = await db.primary_keys(table)
+ table_columns = await db.table_columns(table)
+
+ select_columns = ", ".join(escape_sqlite(t) for t in table_columns)
+
use_rowid = not pks and not is_view
if use_rowid:
- select = "rowid, *"
+ select = "rowid, {}".format(select_columns)
order_by = "rowid"
order_by_pks = "rowid"
else:
- select = "*"
+ select = select_columns
order_by_pks = ", ".join([escape_sqlite(pk) for pk in pks])
order_by = order_by_pks
diff --git a/tests/test_api.py b/tests/test_api.py
index c6acbab1..4a09b238 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -610,7 +610,8 @@ def test_table_json(app_client):
assert response.status == 200
data = response.json
assert (
- data["query"]["sql"] == "select * from simple_primary_key order by id limit 51"
+ data["query"]["sql"]
+ == "select id, content from simple_primary_key order by id limit 51"
)
assert data["query"]["params"] == {}
assert data["rows"] == [
From 931bfc66613aa3e22f8314df5c0d0758baf31f38 Mon Sep 17 00:00:00 2001
From: Tobias Kunze
Date: Tue, 5 Nov 2019 00:16:30 +0100
Subject: [PATCH 0076/2124] Handle spaces in DB names (#590)
Closes #503 - thanks, @rixx
---
datasette/views/base.py | 3 ++-
tests/fixtures.py | 4 ++--
tests/test_api.py | 19 ++++++++++++++++++-
tests/test_html.py | 8 ++++----
4 files changed, 26 insertions(+), 8 deletions(-)
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 94945304..062c6956 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -203,12 +203,13 @@ class DataView(BaseView):
hash = hash_bit
else:
name = db_name
- # Verify the hash
+ name = urllib.parse.unquote_plus(name)
try:
db = self.ds.databases[name]
except KeyError:
raise NotFound("Database not found: {}".format(name))
+ # Verify the hash
expected = "000"
if db.hash is not None:
expected = db.hash[:HASH_LENGTH]
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 8aa44687..dcc414bf 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -174,7 +174,7 @@ def app_client_no_files():
@pytest.fixture(scope="session")
def app_client_two_attached_databases():
yield from make_app_client(
- extra_databases={"extra_database.db": EXTRA_DATABASE_SQL}
+ extra_databases={"extra database.db": EXTRA_DATABASE_SQL}
)
@@ -188,7 +188,7 @@ def app_client_conflicting_database_names():
@pytest.fixture(scope="session")
def app_client_two_attached_databases_one_immutable():
yield from make_app_client(
- is_immutable=True, extra_databases={"extra_database.db": EXTRA_DATABASE_SQL}
+ is_immutable=True, extra_databases={"extra database.db": EXTRA_DATABASE_SQL}
)
diff --git a/tests/test_api.py b/tests/test_api.py
index 4a09b238..1fa8642f 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -6,6 +6,7 @@ from .fixtures import ( # noqa
app_client_shorter_time_limit,
app_client_larger_cache_size,
app_client_returned_rows_matches_page_size,
+ app_client_two_attached_databases,
app_client_two_attached_databases_one_immutable,
app_client_conflicting_database_names,
app_client_with_cors,
@@ -1188,7 +1189,7 @@ def test_databases_json(app_client_two_attached_databases_one_immutable):
databases = response.json
assert 2 == len(databases)
extra_database, fixtures_database = databases
- assert "extra_database" == extra_database["name"]
+ assert "extra database" == extra_database["name"]
assert None == extra_database["hash"]
assert True == extra_database["is_mutable"]
assert False == extra_database["is_memory"]
@@ -1679,6 +1680,22 @@ def test_cors(app_client_with_cors, path, status_code):
assert "*" == response.headers["Access-Control-Allow-Origin"]
+@pytest.mark.parametrize(
+ "path",
+ (
+ "/",
+ ".json",
+ "/searchable",
+ "/searchable.json",
+ "/searchable_view",
+ "/searchable_view.json",
+ ),
+)
+def test_database_with_space_in_name(app_client_two_attached_databases, path):
+ response = app_client_two_attached_databases.get("/extra database" + path)
+ assert response.status == 200
+
+
def test_common_prefix_database_names(app_client_conflicting_database_names):
# https://github.com/simonw/datasette/issues/597
assert ["fixtures", "foo", "foo-bar"] == [
diff --git a/tests/test_html.py b/tests/test_html.py
index f63e595b..7f1af86e 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -27,11 +27,11 @@ def test_homepage(app_client_two_attached_databases):
# Should be two attached databases
assert [
{"href": "/fixtures", "text": "fixtures"},
- {"href": "/extra_database", "text": "extra_database"},
+ {"href": "/extra database", "text": "extra database"},
] == [{"href": a["href"], "text": a.text.strip()} for a in soup.select("h2 a")]
# The first attached database should show count text and attached tables
h2 = soup.select("h2")[1]
- assert "extra_database" == h2.text.strip()
+ assert "extra database" == h2.text.strip()
counts_p, links_p = h2.find_all_next("p")[:2]
assert (
"2 rows in 1 table, 5 rows in 4 hidden tables, 1 view" == counts_p.text.strip()
@@ -41,8 +41,8 @@ def test_homepage(app_client_two_attached_databases):
{"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a")
]
assert [
- {"href": "/extra_database/searchable", "text": "searchable"},
- {"href": "/extra_database/searchable_view", "text": "searchable_view"},
+ {"href": "/extra database/searchable", "text": "searchable"},
+ {"href": "/extra database/searchable_view", "text": "searchable_view"},
] == table_links
From c30f07c58e410ee296b28aeabe4dc461dd40b435 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 5 Nov 2019 21:12:55 -0800
Subject: [PATCH 0077/2124] Removed _group_count=col feature, closes #504
---
datasette/views/table.py | 12 ------------
docs/json_api.rst | 9 ---------
2 files changed, 21 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 139ff80b..920693d7 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -499,18 +499,6 @@ class TableView(RowTableShared):
if order_by:
order_by = "order by {} ".format(order_by)
- # _group_count=col1&_group_count=col2
- group_count = special_args_lists.get("_group_count") or []
- if group_count:
- sql = 'select {group_cols}, count(*) as "count" from {table_name} {where} group by {group_cols} order by "count" desc limit 100'.format(
- group_cols=", ".join(
- '"{}"'.format(group_count_col) for group_count_col in group_count
- ),
- table_name=escape_sqlite(table),
- where=where_clause,
- )
- return await self.custom_sql(request, database, hash, sql, editable=True)
-
extra_args = {}
# Handle ?_size=500
page_size = _size or request.raw_args.get("_size")
diff --git a/docs/json_api.rst b/docs/json_api.rst
index de70362c..e369bee7 100644
--- a/docs/json_api.rst
+++ b/docs/json_api.rst
@@ -321,15 +321,6 @@ Special table arguments
Here's `an example `__.
-
-``?_group_count=COLUMN``
- Executes a SQL query that returns a count of the number of rows matching
- each unique value in that column, with the most common ordered first.
-
-``?_group_count=COLUMN1&_group_count=column2``
- You can pass multiple ``_group_count`` columns to return counts against
- unique combinations of those columns.
-
``?_next=TOKEN``
Pagination by continuation token - pass the token that was returned in the
``"next"`` property by the previous page.
From f9c146b893856a48afa810ebcce1714f30d0d3a2 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 6 Nov 2019 16:55:44 -0800
Subject: [PATCH 0078/2124] Removed unused special_args_lists variable
---
datasette/views/table.py | 2 --
1 file changed, 2 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 920693d7..a60a3941 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -261,12 +261,10 @@ class TableView(RowTableShared):
# That's so if there is a column that starts with _
# it can still be queried using ?_col__exact=blah
special_args = {}
- special_args_lists = {}
other_args = []
for key, value in args.items():
if key.startswith("_") and "__" not in key:
special_args[key] = value[0]
- special_args_lists[key] = value
else:
for v in value:
other_args.append((key, v))
From 83fc5165ac724f69cd57d8f15cd3038e7b30f878 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 7 Nov 2019 18:48:39 -0800
Subject: [PATCH 0079/2124] Improved UI for publish cloudrun, closes #608
---
datasette/publish/cloudrun.py | 39 ++++++++++++++++++++++--
tests/test_publish_cloudrun.py | 55 ++++++++++++++++++++++++++++++++--
2 files changed, 90 insertions(+), 4 deletions(-)
diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py
index c2d77746..a833a32b 100644
--- a/datasette/publish/cloudrun.py
+++ b/datasette/publish/cloudrun.py
@@ -60,6 +60,23 @@ def publish_subcommand(publish):
"gcloud config get-value project", shell=True, universal_newlines=True
).strip()
+ if not service:
+ # Show the user their current services, then prompt for one
+ click.echo("Please provide a service name for this deployment\n")
+ click.echo("Using an existing service name will over-write it")
+ click.echo("")
+ existing_services = get_existing_services()
+ if existing_services:
+ click.echo("Your existing services:\n")
+ for existing_service in existing_services:
+ click.echo(
+ " {name} - created {created} - {url}".format(
+ **existing_service
+ )
+ )
+ click.echo("")
+ service = click.prompt("Service name", type=str)
+
extra_metadata = {
"title": title,
"license": license,
@@ -110,8 +127,26 @@ def publish_subcommand(publish):
image_id = "gcr.io/{project}/{name}".format(project=project, name=name)
check_call("gcloud builds submit --tag {}".format(image_id), shell=True)
check_call(
- "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {}{}".format(
- image_id, " {}".format(service) if service else ""
+ "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {} {}".format(
+ image_id, service,
),
shell=True,
)
+
+
+def get_existing_services():
+ services = json.loads(
+ check_output(
+ "gcloud beta run services list --platform=managed --format json",
+ shell=True,
+ universal_newlines=True,
+ )
+ )
+ return [
+ {
+ "name": service["metadata"]["name"],
+ "created": service["metadata"]["creationTimestamp"],
+ "url": service["status"]["address"]["url"],
+ }
+ for service in services
+ ]
diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py
index 481ac04d..a038b60e 100644
--- a/tests/test_publish_cloudrun.py
+++ b/tests/test_publish_cloudrun.py
@@ -24,6 +24,53 @@ def test_publish_cloudrun_invalid_database(mock_which):
assert 'Path "woop.db" does not exist' in result.output
+@mock.patch("shutil.which")
+@mock.patch("datasette.publish.cloudrun.check_output")
+@mock.patch("datasette.publish.cloudrun.check_call")
+@mock.patch("datasette.publish.cloudrun.get_existing_services")
+def test_publish_cloudrun_prompts_for_service(
+ mock_get_existing_services, mock_call, mock_output, mock_which
+):
+ mock_get_existing_services.return_value = [
+ {"name": "existing", "created": "2019-01-01", "url": "http://www.example.com/"}
+ ]
+ mock_output.return_value = "myproject"
+ mock_which.return_value = True
+ runner = CliRunner()
+ with runner.isolated_filesystem():
+ open("test.db", "w").write("data")
+ result = runner.invoke(
+ cli.cli, ["publish", "cloudrun", "test.db"], input="input-service"
+ )
+ assert (
+ """
+Please provide a service name for this deployment
+
+Using an existing service name will over-write it
+
+Your existing services:
+
+ existing - created 2019-01-01 - http://www.example.com/
+
+Service name: input-service
+""".strip()
+ == result.output.strip()
+ )
+ assert 0 == result.exit_code
+ tag = "gcr.io/myproject/datasette"
+ mock_call.assert_has_calls(
+ [
+ mock.call("gcloud builds submit --tag {}".format(tag), shell=True),
+ mock.call(
+ "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {} input-service".format(
+ tag
+ ),
+ shell=True,
+ ),
+ ]
+ )
+
+
@mock.patch("shutil.which")
@mock.patch("datasette.publish.cloudrun.check_output")
@mock.patch("datasette.publish.cloudrun.check_call")
@@ -33,14 +80,16 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which):
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
- result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"])
+ result = runner.invoke(
+ cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"]
+ )
assert 0 == result.exit_code
tag = "gcr.io/{}/datasette".format(mock_output.return_value)
mock_call.assert_has_calls(
[
mock.call("gcloud builds submit --tag {}".format(tag), shell=True),
mock.call(
- "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {}".format(
+ "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {} test".format(
tag
),
shell=True,
@@ -65,6 +114,8 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which):
"publish",
"cloudrun",
"test.db",
+ "--service",
+ "datasette",
"--plugin-secret",
"datasette-auth-github",
"client_id",
From 9f5d19c254d1bfbd99f576dff47a6e32e01c76ed Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 Nov 2019 18:12:20 -0800
Subject: [PATCH 0080/2124] Improved documentation for "publish cloudrun"
---
docs/publish.rst | 18 ++++++++++--------
1 file changed, 10 insertions(+), 8 deletions(-)
diff --git a/docs/publish.rst b/docs/publish.rst
index 304be8ef..89d33085 100644
--- a/docs/publish.rst
+++ b/docs/publish.rst
@@ -43,14 +43,16 @@ You will first need to install and configure the Google Cloud CLI tools by follo
You can then publish a database to Google Cloud Run using the following command::
- datasette publish cloudrun mydatabase.db
+ datasette publish cloudrun mydatabase.db --service=my-database
+
+A Cloud Run **service** is a single hosted application. The service name you specify will be used as part of the Cloud Run URL. If you deploy to a service name that you have used in the past your new deployment will replace the previous one.
+
+If you omit the ``--service`` option you will be asked to pick a service name interactively during the deploy.
You may need to interact with prompts from the tool. Once it has finished it will output a URL like this one::
- Service [datasette] revision [datasette-00001] has been deployed
- and is serving traffic at https://datasette-j7hipcg4aq-uc.a.run.app
-
-During the deployment the tool will prompt you for the name of your service. You can reuse an existing name to replace your previous deployment with your new version, or pick a new name to deploy to a new URL.
+ Service [my-service] revision [my-service-00001] has been deployed
+ and is serving traffic at https://my-service-j7hipcg4aq-uc.a.run.app
.. literalinclude:: datasette-publish-cloudrun-help.txt
@@ -90,18 +92,18 @@ Custom metadata and plugins
You can define your own :ref:`metadata` and deploy that with your instance like so::
- datasette publish nowv1 mydatabase.db -m metadata.json
+ datasette publish cloudrun --service=my-service mydatabase.db -m metadata.json
If you just want to set the title, license or source information you can do that directly using extra options to ``datasette publish``::
- datasette publish nowv1 mydatabase.db \
+ datasette publish cloudrun mydatabase.db --service=my-service \
--title="Title of my database" \
--source="Where the data originated" \
--source_url="http://www.example.com/"
You can also specify plugins you would like to install. For example, if you want to include the `datasette-vega `_ visualization plugin you can use the following::
- datasette publish nowv1 mydatabase.db --install=datasette-vega
+ datasette publish cloudrun mydatabase.db --service=my-service --install=datasette-vega
If a plugin has any :ref:`plugins_configuration_secret` you can use the ``--plugin-secret`` option to set those secrets at publish time. For example, using Heroku with `datasette-auth-github `__ you might run the following command::
From 10b9d85edaaf198879344aa1c498000cfb27dff8 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 Nov 2019 18:15:13 -0800
Subject: [PATCH 0081/2124] datasette-csvs on Glitch now uses sqlite-utils
It previously used csvs-to-sqlite but that had heavy dependencies.
See https://support.glitch.com/t/can-you-upgrade-python-to-latest-version/7980/33
---
docs/getting_started.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/getting_started.rst b/docs/getting_started.rst
index d0c22583..fdf7d23c 100644
--- a/docs/getting_started.rst
+++ b/docs/getting_started.rst
@@ -25,7 +25,7 @@ Glitch allows you to "remix" any project to create your own copy and start editi
.. image:: https://cdn.glitch.com/2703baf2-b643-4da7-ab91-7ee2a2d00b5b%2Fremix-button.svg
:target: https://glitch.com/edit/#!/remix/datasette-csvs
-Find a CSV file and drag it onto the Glitch file explorer panel - ``datasette-csvs`` will automatically convert it to a SQLite database (using `csvs-to-sqlite `__) and allow you to start exploring it using Datasette.
+Find a CSV file and drag it onto the Glitch file explorer panel - ``datasette-csvs`` will automatically convert it to a SQLite database (using `sqlite-utils `__) and allow you to start exploring it using Datasette.
If your CSV file has a ``latitude`` and ``longitude`` column you can visualize it on a map by uncommenting the ``datasette-cluster-map`` line in the ``requirements.txt`` file using the Glitch file editor.
From 28c4a6db5b5e512db630d7ba6127196185de67c7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 9 Nov 2019 17:29:36 -0800
Subject: [PATCH 0082/2124] CREATE INDEX statements on table page, closes #618
---
datasette/database.py | 13 ++++++++++++-
tests/fixtures.py | 1 +
tests/test_html.py | 33 +++++++++++++++++++++++++++++++++
3 files changed, 46 insertions(+), 1 deletion(-)
diff --git a/datasette/database.py b/datasette/database.py
index 7e6f7245..3a1cea94 100644
--- a/datasette/database.py
+++ b/datasette/database.py
@@ -232,7 +232,18 @@ class Database:
)
if not table_definition_rows:
return None
- return table_definition_rows[0][0]
+ bits = [table_definition_rows[0][0] + ";"]
+ # Add on any indexes
+ index_rows = list(
+ await self.ds.execute(
+ self.name,
+ "select sql from sqlite_master where tbl_name = :n and type='index' and sql is not null",
+ {"n": table},
+ )
+ )
+ for index_row in index_rows:
+ bits.append(index_row[0] + ";")
+ return "\n".join(bits)
async def get_view_definition(self, view):
return await self.get_table_definition(view, "view")
diff --git a/tests/fixtures.py b/tests/fixtures.py
index dcc414bf..87e66f99 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -514,6 +514,7 @@ CREATE TABLE compound_three_primary_keys (
content text,
PRIMARY KEY (pk1, pk2, pk3)
);
+CREATE INDEX idx_compound_three_primary_keys_content ON compound_three_primary_keys(content);
CREATE TABLE foreign_key_references (
pk varchar(30) primary key,
diff --git a/tests/test_html.py b/tests/test_html.py
index 7f1af86e..44627cdc 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -119,6 +119,39 @@ def test_row_strange_table_name_with_url_hash(app_client_with_hash):
assert response.status == 200
+@pytest.mark.parametrize(
+ "path,expected_definition_sql",
+ [
+ (
+ "/fixtures/facet_cities",
+ """
+CREATE TABLE facet_cities (
+ id integer primary key,
+ name text
+);
+ """.strip(),
+ ),
+ (
+ "/fixtures/compound_three_primary_keys",
+ """
+CREATE TABLE compound_three_primary_keys (
+ pk1 varchar(30),
+ pk2 varchar(30),
+ pk3 varchar(30),
+ content text,
+ PRIMARY KEY (pk1, pk2, pk3)
+);
+CREATE INDEX idx_compound_three_primary_keys_content ON compound_three_primary_keys(content);
+ """.strip(),
+ ),
+ ],
+)
+def test_definition_sql(path, expected_definition_sql, app_client):
+ response = app_client.get(path)
+ pre = Soup(response.body, "html.parser").select_one("pre.wrapped-sql")
+ assert expected_definition_sql == pre.string
+
+
def test_table_cell_truncation():
for client in make_app_client(config={"truncate_cells_html": 5}):
response = client.get("/fixtures/facetable")
From 1c063fae9dba70f70244db010d55a18846640f07 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 10 Nov 2019 19:45:34 -0800
Subject: [PATCH 0083/2124] Test against Python 3.8 in Travis (#623)
* Test against Python 3.8 in Travis
* Avoid current_task warnings in Python 3.8
---
.travis.yml | 1 +
datasette/tracer.py | 9 ++++++++-
2 files changed, 9 insertions(+), 1 deletion(-)
diff --git a/.travis.yml b/.travis.yml
index 29388bc1..a6b15b7e 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -5,6 +5,7 @@ dist: xenial
python:
- "3.6"
- "3.7"
+ - "3.8"
- "3.5"
# Executed for 3.5 AND 3.5 as the first "test" stage:
diff --git a/datasette/tracer.py b/datasette/tracer.py
index e46a6fda..a638b140 100644
--- a/datasette/tracer.py
+++ b/datasette/tracer.py
@@ -9,12 +9,19 @@ tracers = {}
TRACE_RESERVED_KEYS = {"type", "start", "end", "duration_ms", "traceback"}
+# asyncio.current_task was introduced in Python 3.7:
+for obj in (asyncio, asyncio.Task):
+ current_task = getattr(obj, "current_task", None)
+ if current_task is not None:
+ break
+
+
def get_task_id():
try:
loop = asyncio.get_event_loop()
except RuntimeError:
return None
- return id(asyncio.Task.current_task(loop=loop))
+ return id(current_task(loop=loop))
@contextmanager
From 42ee3e16a9ba7cc513b8da944cc1609a5407cf42 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 10 Nov 2019 20:19:01 -0800
Subject: [PATCH 0084/2124] Bump pint to 0.9 (#624)
This fixes 2 deprecation warnings in Python 3.8 - refs #623 #622
---
setup.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.py b/setup.py
index 9ae56306..e8229de1 100644
--- a/setup.py
+++ b/setup.py
@@ -45,7 +45,7 @@ setup(
"click-default-group~=1.2.1",
"Jinja2~=2.10.1",
"hupper~=1.0",
- "pint~=0.8.1",
+ "pint~=0.9",
"pluggy~=0.12.0",
"uvicorn~=0.8.4",
"aiofiles~=0.4.0",
From 5bc2570121aea8141ff88790e214765472882b08 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 20:45:12 -0800
Subject: [PATCH 0085/2124] Include uvicorn version in /-/versions, refs #622
---
datasette/app.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/datasette/app.py b/datasette/app.py
index 203e0991..4ba4adfb 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -12,6 +12,7 @@ from pathlib import Path
import click
from markupsafe import Markup
from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader
+import uvicorn
from .views.base import DatasetteError, ureg, AsgiRouter
from .views.database import DatabaseDownload, DatabaseView
@@ -433,6 +434,7 @@ class Datasette:
},
"datasette": datasette_version,
"asgi": "3.0",
+ "uvicorn": uvicorn.__version__,
"sqlite": {
"version": sqlite_version,
"fts_versions": fts_versions,
From cf7776d36fbacefa874cbd6e5fcdc9fff7661203 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:09:11 -0800
Subject: [PATCH 0086/2124] Support Python 3.8, stop supporting Python 3.5
(#627)
* Upgrade to uvicorn 0.10.4
* Drop support for Python 3.5
* Bump all dependencies to latest releases
* Update docs to reflect we no longer support 3.5
* Removed code that skipped black unit test on 3.5
Closes #622
---
.travis.yml | 1 -
README.md | 2 +-
docs/contributing.rst | 2 +-
docs/installation.rst | 7 +++++--
setup.py | 20 ++++++++++----------
tests/test_black.py | 7 +------
6 files changed, 18 insertions(+), 21 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index a6b15b7e..0fc87d93 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -6,7 +6,6 @@ python:
- "3.6"
- "3.7"
- "3.8"
- - "3.5"
# Executed for 3.5 AND 3.5 as the first "test" stage:
script:
diff --git a/README.md b/README.md
index 9f85f1ba..14c9cfd6 100644
--- a/README.md
+++ b/README.md
@@ -69,7 +69,7 @@ sqlite-utils: a Python library and CLI tool for building SQLite databases](https
pip3 install datasette
-Datasette requires Python 3.5 or higher. We also have [detailed installation instructions](https://datasette.readthedocs.io/en/stable/installation.html) covering other options such as Docker.
+Datasette requires Python 3.6 or higher. We also have [detailed installation instructions](https://datasette.readthedocs.io/en/stable/installation.html) covering other options such as Docker.
## Basic usage
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 43834edc..078fd841 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -18,7 +18,7 @@ General guidelines
Setting up a development environment
------------------------------------
-If you have Python 3.5 or higher installed on your computer (on OS X the easiest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps.
+If you have Python 3.6 or higher installed on your computer (on OS X the easiest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps.
If you want to use GitHub to publish your changes, first `create a fork of datasette `__ under your own GitHub account.
diff --git a/docs/installation.rst b/docs/installation.rst
index e65d8ee3..9ee7eb4e 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -69,16 +69,19 @@ You can now run the new custom image like so::
You can confirm that the plugins are installed by visiting
http://127.0.0.1:8001/-/plugins
-
Install using pip
-----------------
-To run Datasette without Docker you will need Python 3.5 or higher.
+To run Datasette without Docker you will need Python 3.6 or higher.
You can install Datasette and its dependencies using ``pip``::
pip install datasette
+The last version to support Python 3.5 was 0.30.2 - you can install that version like so::
+
+ pip install datasette==0.30.2
+
If you want to install Datasette in its own virtual environment, use this::
python -mvenv datasette-venv
diff --git a/setup.py b/setup.py
index e8229de1..7a4cdcb3 100644
--- a/setup.py
+++ b/setup.py
@@ -42,12 +42,12 @@ setup(
include_package_data=True,
install_requires=[
"click~=7.0",
- "click-default-group~=1.2.1",
- "Jinja2~=2.10.1",
- "hupper~=1.0",
+ "click-default-group~=1.2.2",
+ "Jinja2~=2.10.3",
+ "hupper~=1.9",
"pint~=0.9",
- "pluggy~=0.12.0",
- "uvicorn~=0.8.4",
+ "pluggy~=0.13.0",
+ "uvicorn~=0.10.4",
"aiofiles~=0.4.0",
],
entry_points="""
@@ -58,11 +58,11 @@ setup(
extras_require={
"docs": ["sphinx_rtd_theme", "sphinx-autobuild"],
"test": [
- "pytest~=5.0.0",
+ "pytest~=5.2.2",
"pytest-asyncio~=0.10.0",
- "aiohttp~=3.5.3",
- "beautifulsoup4~=4.6.1",
- "asgiref~=3.1.2",
+ "aiohttp~=3.6.2",
+ "beautifulsoup4~=4.8.1",
+ "asgiref~=3.2.3",
]
+ maybe_black,
},
@@ -74,8 +74,8 @@ setup(
"Intended Audience :: End Users/Desktop",
"Topic :: Database",
"License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.6",
- "Programming Language :: Python :: 3.5",
],
)
diff --git a/tests/test_black.py b/tests/test_black.py
index 68e2dcc0..b5bfcfd0 100644
--- a/tests/test_black.py
+++ b/tests/test_black.py
@@ -1,3 +1,4 @@
+import black
from click.testing import CliRunner
from pathlib import Path
import pytest
@@ -6,13 +7,7 @@ import sys
code_root = Path(__file__).parent.parent
-@pytest.mark.skipif(
- sys.version_info[:2] < (3, 6), reason="Black requires Python 3.6 or later"
-)
def test_black():
- # Do not import at top of module because Python 3.5 will not have it installed
- import black
-
runner = CliRunner()
result = runner.invoke(
black.main, [str(code_root / "tests"), str(code_root / "datasette"), "--check"]
From 76fc6a9c7317ce4fbf3cc3d327c849f7274d960a Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:17:59 -0800
Subject: [PATCH 0087/2124] Release notes for 0.31
---
docs/changelog.rst | 22 ++++++++++++++++++++++
1 file changed, 22 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index f4761efe..6e260be9 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,28 @@
Changelog
=========
+.. _v0_31:
+
+0.31 (2019-11-11)
+-----------------
+
+This version adds compatibility with Python 3.8 and breaks compatibility with Python 3.5.
+
+If you are still running Python 3.5 you should stick with ``0.30.2``, which you can install like this::
+
+ pip install datasette==0.30.2
+
+- Format SQL button now works with read-only SQL queries - thanks, Tobias Kunze (`#602 `__)
+- New ``?column__notin=x,y,z`` filter for table views (`#614 `__)
+- Table view now uses ``select col1, col2, col3`` instead of ``select *``
+- Database filenames can now contain spaces - thanks, Tobias Kunze (`#590 `__)
+- Removed obsolete ``?_group_count=col`` feature (`#504 `__)
+- Improved user interface and documentation for ``datasette publish cloudrun`` (`#608 `__)
+- Tables with indexes now show the `` CREATE INDEX`` statements on the table page (`#618 `__)
+- Current version of `uvicorn `__ is now shown on ``/-/versions``
+- Python 3.8 is now supported! (`#622 `__)
+- Python 3.5 is no longer supported.
+
.. _v0_30_2:
0.30.2 (2019-11-02)
From c633c035dc8d4c60f1d13cb074918406bbdb3734 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:26:56 -0800
Subject: [PATCH 0088/2124] Datasette 0.31 in news section
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 14c9cfd6..05995a74 100644
--- a/README.md
+++ b/README.md
@@ -21,6 +21,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
## News
+ * 11th November 2019: [Datasette 0.31](https://datasette.readthedocs.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5.
* 18th October 2019: [Datasette 0.30](https://datasette.readthedocs.io/en/stable/changelog.html#v0-30)
* 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail.
* 7th July 2019: [Datasette 0.29](https://datasette.readthedocs.io/en/stable/changelog.html#v0-29) - ASGI, new plugin hooks, facet by date and much, much more...
From 7f89928062b1a1fdb2625a946f7cd5161e597401 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:33:51 -0800
Subject: [PATCH 0089/2124] Removed code that conditionally installs black
Since we no longer support Python 3.5 we don't need this any more.
---
setup.py | 9 ++-------
1 file changed, 2 insertions(+), 7 deletions(-)
diff --git a/setup.py b/setup.py
index 7a4cdcb3..15284779 100644
--- a/setup.py
+++ b/setup.py
@@ -22,11 +22,6 @@ def get_version():
return g["__version__"]
-# Only install black on Python 3.6 or higher
-maybe_black = []
-if sys.version_info > (3, 6):
- maybe_black = ["black~=19.10b0"]
-
setup(
name="datasette",
version=versioneer.get_version(),
@@ -63,8 +58,8 @@ setup(
"aiohttp~=3.6.2",
"beautifulsoup4~=4.8.1",
"asgiref~=3.2.3",
- ]
- + maybe_black,
+ "black~=19.10b0",
+ ],
},
tests_require=["datasette[test]"],
classifiers=[
From 1c518680e9692a9a77022af54f3de3e77fb1aaf4 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:57:48 -0800
Subject: [PATCH 0090/2124] Final steps: build stable branch of Read The Docs
---
docs/contributing.rst | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 078fd841..48930332 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -150,4 +150,7 @@ Wait long enough for Travis to build and deploy the demo version of that commit
git tag 0.25.2
git push --tags
-Once the release is out, you can manually update https://github.com/simonw/datasette/releases
+Final steps once the release has deployed to https://pypi.org/project/datasette/
+
+* Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases
+* Manually kick off a build of the `stable` branch on Read The Docs: https://readthedocs.org/projects/datasette/builds/
From f554be39fc14ddc18921ca29d3920d55aad03d46 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 22:00:13 -0800
Subject: [PATCH 0091/2124] ReST fix
---
docs/changelog.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 6e260be9..763b178e 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -21,7 +21,7 @@ If you are still running Python 3.5 you should stick with ``0.30.2``, which you
- Database filenames can now contain spaces - thanks, Tobias Kunze (`#590 `__)
- Removed obsolete ``?_group_count=col`` feature (`#504 `__)
- Improved user interface and documentation for ``datasette publish cloudrun`` (`#608 `__)
-- Tables with indexes now show the `` CREATE INDEX`` statements on the table page (`#618 `__)
+- Tables with indexes now show the ``CREATE INDEX`` statements on the table page (`#618 `__)
- Current version of `uvicorn `__ is now shown on ``/-/versions``
- Python 3.8 is now supported! (`#622 `__)
- Python 3.5 is no longer supported.
From d977fbadf70a96bf2eea1407d01f99d98e092dec Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 22:03:09 -0800
Subject: [PATCH 0092/2124] datasette publish uses python:3.8 base Docker
image, closes #629
---
datasette/utils/__init__.py | 2 +-
tests/test_publish_cloudrun.py | 2 +-
tests/test_publish_now.py | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index 3d28a36b..b8df48cf 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -306,7 +306,7 @@ def make_dockerfile(
install = ["datasette"] + list(install)
return """
-FROM python:3.6
+FROM python:3.8
COPY . /app
WORKDIR /app
{spatialite_extras}
diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py
index a038b60e..c5b18cdf 100644
--- a/tests/test_publish_cloudrun.py
+++ b/tests/test_publish_cloudrun.py
@@ -128,7 +128,7 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which):
.split("\n====================\n")[0]
.strip()
)
- expected = """FROM python:3.6
+ expected = """FROM python:3.8
COPY . /app
WORKDIR /app
diff --git a/tests/test_publish_now.py b/tests/test_publish_now.py
index 72aa71db..27fd1245 100644
--- a/tests/test_publish_now.py
+++ b/tests/test_publish_now.py
@@ -138,7 +138,7 @@ def test_publish_now_plugin_secrets(mock_run, mock_which):
.split("\n====================\n")[0]
.strip()
)
- expected = """FROM python:3.6
+ expected = """FROM python:3.8
COPY . /app
WORKDIR /app
From 16265f6a1a7c547e3925e0fc2d6b88754afb0435 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 12 Nov 2019 18:18:04 -0800
Subject: [PATCH 0093/2124] Release notes for 0.31.1
---
docs/changelog.rst | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 763b178e..746f5b42 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,13 @@
Changelog
=========
+.. _v0_31_1:
+
+0.31.1 (2019-11-12)
+-------------------
+
+- Deploymens created using ``datasette publish`` now use ``python:3.8`` base Docker image (`#629 `__)
+
.. _v0_31:
0.31 (2019-11-11)
From a22c7761b61baa61b8e3da7d30887468d61d6b83 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 12 Nov 2019 18:18:39 -0800
Subject: [PATCH 0094/2124] Fixed typo in release notes
---
docs/changelog.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 746f5b42..e527518e 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -9,7 +9,7 @@ Changelog
0.31.1 (2019-11-12)
-------------------
-- Deploymens created using ``datasette publish`` now use ``python:3.8`` base Docker image (`#629 `__)
+- Deployments created using ``datasette publish`` now use ``python:3.8`` base Docker image (`#629 `__)
.. _v0_31:
From bbd00e903cdd49067ecdbdb60a4d225833a44b05 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 12 Nov 2019 18:38:13 -0800
Subject: [PATCH 0095/2124] Badge linking to datasette on hub.docker.com
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 05995a74..9a22c2b2 100644
--- a/README.md
+++ b/README.md
@@ -6,6 +6,7 @@
[](http://datasette.readthedocs.io/en/latest/?badge=latest)
[](https://github.com/simonw/datasette/blob/master/LICENSE)
[](https://black.readthedocs.io/en/stable/)
+[](https://hub.docker.com/r/datasetteproject/datasette)
*A tool for exploring and publishing data*
From 848dec4deb0d3c140a4e0394cac45fbb2593349b Mon Sep 17 00:00:00 2001
From: Stanley Zheng
Date: Tue, 12 Nov 2019 23:28:42 -0500
Subject: [PATCH 0096/2124] Fix for datasette publish with just --source_url
(#631)
Closes #572
---
datasette/templates/_description_source_license.html | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/datasette/templates/_description_source_license.html b/datasette/templates/_description_source_license.html
index 3327706e..a2bc18f2 100644
--- a/datasette/templates/_description_source_license.html
+++ b/datasette/templates/_description_source_license.html
@@ -21,7 +21,7 @@
{% endif %}{{ metadata.source or metadata.source_url }}{% if metadata.source_url %}{% endif %}
{% endif %}
- {% if metadata.about or metadata.about_url %}{% if metadata.license or metadata.license_url or metadata.source or metadat.source_url %}·{% endif %}
+ {% if metadata.about or metadata.about_url %}{% if metadata.license or metadata.license_url or metadata.source or metadata.source_url %}·{% endif %}
About: {% if metadata.about_url %}
{% endif %}{{ metadata.about or metadata.about_url }}{% if metadata.about_url %}{% endif %}
From f52451023025579ae9a13de4a7f00d69200184cd Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 13 Nov 2019 08:42:47 -0800
Subject: [PATCH 0097/2124] Fix "publish heroku" + upgrade to use Python 3.8.0
Closes #633. Closes #632.
---
datasette/publish/heroku.py | 7 +++++--
tests/test_publish_heroku.py | 9 +++++++--
2 files changed, 12 insertions(+), 4 deletions(-)
diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py
index 34d1f773..e75f76df 100644
--- a/datasette/publish/heroku.py
+++ b/datasette/publish/heroku.py
@@ -72,7 +72,10 @@ def publish_subcommand(publish):
"about_url": about_url,
}
- environment_variables = {}
+ environment_variables = {
+ # Avoid uvicorn error: https://github.com/simonw/datasette/issues/633
+ "WEB_CONCURRENCY": "1"
+ }
if plugin_secret:
extra_metadata["plugins"] = {}
for plugin_name, plugin_setting, setting_value in plugin_secret:
@@ -164,7 +167,7 @@ def temporary_heroku_directory(
if metadata_content:
open("metadata.json", "w").write(json.dumps(metadata_content, indent=2))
- open("runtime.txt", "w").write("python-3.6.8")
+ open("runtime.txt", "w").write("python-3.8.0")
if branch:
install = [
diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py
index 4cd66219..87386e93 100644
--- a/tests/test_publish_heroku.py
+++ b/tests/test_publish_heroku.py
@@ -57,8 +57,13 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which):
open("test.db", "w").write("data")
result = runner.invoke(cli.cli, ["publish", "heroku", "test.db"])
assert 0 == result.exit_code, result.output
- mock_call.assert_called_once_with(
- ["heroku", "builds:create", "-a", "f", "--include-vcs-ignore"]
+ mock_call.assert_has_calls(
+ [
+ mock.call(["heroku", "config:set", "-a", "f", "WEB_CONCURRENCY=1",]),
+ mock.call(
+ ["heroku", "builds:create", "-a", "f", "--include-vcs-ignore"]
+ ),
+ ]
)
From b51f258d00bb3c3b401f15d46a1fbd50394dbe1c Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 13 Nov 2019 08:48:36 -0800
Subject: [PATCH 0098/2124] Release notes for 0.31.2
---
docs/changelog.rst | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index e527518e..f4958399 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,15 @@
Changelog
=========
+.. _v0_31_2:
+
+0.31.2 (2019-11-13)
+-------------------
+
+- Fixed a bug where ``datasette publish heroku`` applications failed to start (`#633 `__)
+- Fix for ``datasette publish`` with just ``--source_url`` - thanks, Stanley Zheng (`#572 `__)
+- Deployments to Heroku now use Python 3.8.0 (`#632 `__)
+
.. _v0_31_1:
0.31.1 (2019-11-12)
From 8c642f04e0608bf537fdd1f76d64c2367fb04d57 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Nov 2019 15:14:22 -0800
Subject: [PATCH 0099/2124] Render templates using Jinja async mode
Closes #628
---
datasette/app.py | 6 ++++--
datasette/views/base.py | 2 +-
docs/plugins.rst | 23 ++++++++++++-----------
tests/fixtures.py | 8 +++++++-
tests/test_plugins.py | 18 ++++++++++++++++++
tests/test_templates/show_json.html | 1 +
6 files changed, 43 insertions(+), 15 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 4ba4adfb..02fcf303 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -583,7 +583,9 @@ class Datasette:
),
]
)
- self.jinja_env = Environment(loader=template_loader, autoescape=True)
+ self.jinja_env = Environment(
+ loader=template_loader, autoescape=True, enable_async=True
+ )
self.jinja_env.filters["escape_css_string"] = escape_css_string
self.jinja_env.filters["quote_plus"] = lambda u: urllib.parse.quote_plus(u)
self.jinja_env.filters["escape_sqlite"] = escape_sqlite
@@ -730,5 +732,5 @@ class DatasetteRouter(AsgiRouter):
else:
template = self.ds.jinja_env.select_template(templates)
await asgi_send_html(
- send, template.render(info), status=status, headers=headers
+ send, await template.render_async(info), status=status, headers=headers
)
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 062c6956..5182479c 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -139,7 +139,7 @@ class BaseView(AsgiView):
extra_template_vars.update(extra_vars)
return Response.html(
- template.render(
+ await template.render_async(
{
**context,
**{
diff --git a/docs/plugins.rst b/docs/plugins.rst
index 6df7ff6a..e5a3d7dd 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -629,7 +629,9 @@ Function that returns a dictionary
If you return a function it will be executed. If it returns a dictionary those values will will be merged into the template context.
Function that returns an awaitable function that returns a dictionary
- You can also return a function which returns an awaitable function which returns a dictionary. This means you can execute additional SQL queries using ``datasette.execute()``.
+ You can also return a function which returns an awaitable function which returns a dictionary.
+
+Datasette runs Jinja2 in `async mode `__, which means you can add awaitable functions to the template scope and they will be automatically awaited when they are rendered by the template.
Here's an example plugin that returns an authentication object from the ASGI scope:
@@ -641,20 +643,19 @@ Here's an example plugin that returns an authentication object from the ASGI sco
"auth": request.scope.get("auth")
}
-And here's an example which returns the current version of SQLite:
+And here's an example which adds a ``sql_first(sql_query)`` function which executes a SQL statement and returns the first column of the first row of results:
.. code-block:: python
@hookimpl
- def extra_template_vars(datasette):
- async def inner():
- first_db = list(datasette.databases.keys())[0]
- return {
- "sqlite_version": (
- await datasette.execute(first_db, "select sqlite_version()")
- ).rows[0][0]
- }
- return inner
+ def extra_template_vars(datasette, database):
+ async def sql_first(sql, dbname=None):
+ dbname = dbname or database or next(iter(datasette.databases.keys()))
+ return (await datasette.execute(dbname, sql)).rows[0][0]
+
+You can then use the new function in a template like so::
+
+ SQLite version: {{ sql_first("select sqlite_version()") }}
.. _plugin_register_output_renderer:
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 87e66f99..3e4203f7 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -446,13 +446,19 @@ def render_cell(value, database):
@hookimpl
def extra_template_vars(template, database, table, view_name, request, datasette):
+ async def query_database(sql):
+ first_db = list(datasette.databases.keys())[0]
+ return (
+ await datasette.execute(first_db, sql)
+ ).rows[0][0]
async def inner():
return {
"extra_template_vars_from_awaitable": json.dumps({
"template": template,
"scope_path": request.scope["path"],
"awaitable": True,
- }, default=lambda b: b.decode("utf8"))
+ }, default=lambda b: b.decode("utf8")),
+ "query_database": query_database,
}
return inner
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index b1c7fd9a..42d063f4 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -1,5 +1,6 @@
from bs4 import BeautifulSoup as Soup
from .fixtures import app_client, make_app_client, TEMP_PLUGIN_SECRET_FILE # noqa
+from datasette.utils import sqlite3
import base64
import json
import os
@@ -214,3 +215,20 @@ def test_plugins_extra_template_vars(restore_working_directory):
"awaitable": True,
"scope_path": "/-/metadata",
} == extra_template_vars_from_awaitable
+
+
+def test_plugins_async_template_function(restore_working_directory):
+ for client in make_app_client(
+ template_dir=str(pathlib.Path(__file__).parent / "test_templates")
+ ):
+ response = client.get("/-/metadata")
+ assert response.status == 200
+ extra_from_awaitable_function = (
+ Soup(response.body, "html.parser")
+ .select("pre.extra_from_awaitable_function")[0]
+ .text
+ )
+ expected = (
+ sqlite3.connect(":memory:").execute("select sqlite_version()").fetchone()[0]
+ )
+ assert expected == extra_from_awaitable_function
diff --git a/tests/test_templates/show_json.html b/tests/test_templates/show_json.html
index bbf1bc06..cff04fb4 100644
--- a/tests/test_templates/show_json.html
+++ b/tests/test_templates/show_json.html
@@ -5,4 +5,5 @@
Test data for extra_template_vars:
{{ extra_template_vars|safe }}
{{ extra_template_vars_from_awaitable|safe }}
+
{{ query_database("select sqlite_version();") }}
{% endblock %}
From a95bedb9c423fa6d772c93ef47bc40f13a5bea50 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Nov 2019 15:18:53 -0800
Subject: [PATCH 0100/2124] Release notes for 0.32
---
docs/changelog.rst | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index f4958399..2f909364 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,13 @@
Changelog
=========
+.. _v0_32:
+
+0.32 (2019-11-14)
+-----------------
+
+Datasette now renders templates using `Jinja async mode `__. This makes it easy for plugins to provide custom template functions that perform asynchronous actions, for example the new `datasette-template-sql `__ plugin which allows custom templates to directly execute SQL queries and render their results. (`#628 `__)
+
.. _v0_31_2:
0.31.2 (2019-11-13)
From 8fc9a5d877d26dbf2654e125f407ddd2fd767335 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Nov 2019 15:46:37 -0800
Subject: [PATCH 0101/2124] Datasette 0.32 and datasette-template-sql in news
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 9a22c2b2..030c507f 100644
--- a/README.md
+++ b/README.md
@@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
## News
+ * 14th November 2019: [Datasette 0.32](https://datasette.readthedocs.io/en/stable/changelog.html#v0-32) now uses asynchronous rendering in Jinja templates, which means template functions can perform asynchronous operations such as executing SQL queries. [datasette-template-sql](https://github.com/simonw/datasette-template-sql) is a new plugin uses this capability to add a new custom `sql(sql_query)` template function.
* 11th November 2019: [Datasette 0.31](https://datasette.readthedocs.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5.
* 18th October 2019: [Datasette 0.30](https://datasette.readthedocs.io/en/stable/changelog.html#v0-30)
* 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail.
From a9909c29ccac771c23c2ef22b89d10697b5256b9 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 15 Nov 2019 14:49:45 -0800
Subject: [PATCH 0102/2124] Move .execute() from Datasette to Database
Refs #569 - I split this change out from #579
---
datasette/app.py | 90 ++++++---------------------
datasette/database.py | 137 +++++++++++++++++++++++++++++++-----------
2 files changed, 121 insertions(+), 106 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 02fcf303..119d0e19 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -24,13 +24,11 @@ from .database import Database
from .utils import (
QueryInterrupted,
- Results,
escape_css_string,
escape_sqlite,
get_plugins,
module_from_path,
sqlite3,
- sqlite_timelimit,
to_css_class,
)
from .utils.asgi import (
@@ -42,13 +40,12 @@ from .utils.asgi import (
asgi_send_json,
asgi_send_redirect,
)
-from .tracer import trace, AsgiTracer
+from .tracer import AsgiTracer
from .plugins import pm, DEFAULT_PLUGINS
from .version import __version__
app_root = Path(__file__).parent.parent
-connections = threading.local()
MEMORY = object()
ConfigOption = collections.namedtuple("ConfigOption", ("name", "default", "help"))
@@ -336,6 +333,25 @@ class Datasette:
# pylint: disable=no-member
pm.hook.prepare_connection(conn=conn)
+ async def execute(
+ self,
+ db_name,
+ sql,
+ params=None,
+ truncate=False,
+ custom_time_limit=None,
+ page_size=None,
+ log_sql_errors=True,
+ ):
+ return await self.databases[db_name].execute(
+ sql,
+ params=params,
+ truncate=truncate,
+ custom_time_limit=custom_time_limit,
+ page_size=page_size,
+ log_sql_errors=log_sql_errors,
+ )
+
async def expand_foreign_keys(self, database, table, column, values):
"Returns dict mapping (column, value) -> label"
labeled_fks = {}
@@ -477,72 +493,6 @@ class Datasette:
.get(table, {})
)
- async def execute_against_connection_in_thread(self, db_name, fn):
- def in_thread():
- conn = getattr(connections, db_name, None)
- if not conn:
- conn = self.databases[db_name].connect()
- self.prepare_connection(conn)
- setattr(connections, db_name, conn)
- return fn(conn)
-
- return await asyncio.get_event_loop().run_in_executor(self.executor, in_thread)
-
- async def execute(
- self,
- db_name,
- sql,
- params=None,
- truncate=False,
- custom_time_limit=None,
- page_size=None,
- log_sql_errors=True,
- ):
- """Executes sql against db_name in a thread"""
- page_size = page_size or self.page_size
-
- def sql_operation_in_thread(conn):
- time_limit_ms = self.sql_time_limit_ms
- if custom_time_limit and custom_time_limit < time_limit_ms:
- time_limit_ms = custom_time_limit
-
- with sqlite_timelimit(conn, time_limit_ms):
- try:
- cursor = conn.cursor()
- cursor.execute(sql, params or {})
- max_returned_rows = self.max_returned_rows
- if max_returned_rows == page_size:
- max_returned_rows += 1
- if max_returned_rows and truncate:
- rows = cursor.fetchmany(max_returned_rows + 1)
- truncated = len(rows) > max_returned_rows
- rows = rows[:max_returned_rows]
- else:
- rows = cursor.fetchall()
- truncated = False
- except sqlite3.OperationalError as e:
- if e.args == ("interrupted",):
- raise QueryInterrupted(e, sql, params)
- if log_sql_errors:
- print(
- "ERROR: conn={}, sql = {}, params = {}: {}".format(
- conn, repr(sql), params, e
- )
- )
- raise
-
- if truncate:
- return Results(rows, truncated, cursor.description)
-
- else:
- return Results(rows, False, cursor.description)
-
- with trace("sql", database=db_name, sql=sql.strip(), params=params):
- results = await self.execute_against_connection_in_thread(
- db_name, sql_operation_in_thread
- )
- return results
-
def register_renderers(self):
""" Register output renderers which output data in custom formats. """
# Built-in renderers
diff --git a/datasette/database.py b/datasette/database.py
index 3a1cea94..9a8ae4d4 100644
--- a/datasette/database.py
+++ b/datasette/database.py
@@ -1,17 +1,25 @@
+import asyncio
+import contextlib
from pathlib import Path
+import threading
+from .tracer import trace
from .utils import (
QueryInterrupted,
+ Results,
detect_fts,
detect_primary_keys,
detect_spatialite,
get_all_foreign_keys,
get_outbound_foreign_keys,
+ sqlite_timelimit,
sqlite3,
table_columns,
)
from .inspect import inspect_hash
+connections = threading.local()
+
class Database:
def __init__(self, ds, path=None, is_mutable=False, is_memory=False):
@@ -45,6 +53,73 @@ class Database:
"file:{}?{}".format(self.path, qs), uri=True, check_same_thread=False
)
+ async def execute_against_connection_in_thread(self, fn):
+ def in_thread():
+ conn = getattr(connections, self.name, None)
+ if not conn:
+ conn = self.connect()
+ self.ds.prepare_connection(conn)
+ setattr(connections, self.name, conn)
+ return fn(conn)
+
+ return await asyncio.get_event_loop().run_in_executor(
+ self.ds.executor, in_thread
+ )
+
+ async def execute(
+ self,
+ sql,
+ params=None,
+ truncate=False,
+ custom_time_limit=None,
+ page_size=None,
+ log_sql_errors=True,
+ ):
+ """Executes sql against db_name in a thread"""
+ page_size = page_size or self.ds.page_size
+
+ def sql_operation_in_thread(conn):
+ time_limit_ms = self.ds.sql_time_limit_ms
+ if custom_time_limit and custom_time_limit < time_limit_ms:
+ time_limit_ms = custom_time_limit
+
+ with sqlite_timelimit(conn, time_limit_ms):
+ try:
+ cursor = conn.cursor()
+ cursor.execute(sql, params or {})
+ max_returned_rows = self.ds.max_returned_rows
+ if max_returned_rows == page_size:
+ max_returned_rows += 1
+ if max_returned_rows and truncate:
+ rows = cursor.fetchmany(max_returned_rows + 1)
+ truncated = len(rows) > max_returned_rows
+ rows = rows[:max_returned_rows]
+ else:
+ rows = cursor.fetchall()
+ truncated = False
+ except sqlite3.OperationalError as e:
+ if e.args == ("interrupted",):
+ raise QueryInterrupted(e, sql, params)
+ if log_sql_errors:
+ print(
+ "ERROR: conn={}, sql = {}, params = {}: {}".format(
+ conn, repr(sql), params, e
+ )
+ )
+ raise
+
+ if truncate:
+ return Results(rows, truncated, cursor.description)
+
+ else:
+ return Results(rows, False, cursor.description)
+
+ with trace("sql", database=self.name, sql=sql.strip(), params=params):
+ results = await self.execute_against_connection_in_thread(
+ sql_operation_in_thread
+ )
+ return results
+
@property
def size(self):
if self.is_memory:
@@ -62,8 +137,7 @@ class Database:
for table in await self.table_names():
try:
table_count = (
- await self.ds.execute(
- self.name,
+ await self.execute(
"select count(*) from [{}]".format(table),
custom_time_limit=limit,
)
@@ -89,32 +163,30 @@ class Database:
return Path(self.path).stem
async def table_exists(self, table):
- results = await self.ds.execute(
- self.name,
- "select 1 from sqlite_master where type='table' and name=?",
- params=(table,),
+ results = await self.execute(
+ "select 1 from sqlite_master where type='table' and name=?", params=(table,)
)
return bool(results.rows)
async def table_names(self):
- results = await self.ds.execute(
- self.name, "select name from sqlite_master where type='table'"
+ results = await self.execute(
+ "select name from sqlite_master where type='table'"
)
return [r[0] for r in results.rows]
async def table_columns(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: table_columns(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: table_columns(conn, table)
)
async def primary_keys(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: detect_primary_keys(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: detect_primary_keys(conn, table)
)
async def fts_table(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: detect_fts(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: detect_fts(conn, table)
)
async def label_column_for_table(self, table):
@@ -124,8 +196,8 @@ class Database:
if explicit_label_column:
return explicit_label_column
# If a table has two columns, one of which is ID, then label_column is the other one
- column_names = await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: table_columns(conn, table)
+ column_names = await self.execute_against_connection_in_thread(
+ lambda conn: table_columns(conn, table)
)
# Is there a name or title column?
name_or_title = [c for c in column_names if c in ("name", "title")]
@@ -141,8 +213,8 @@ class Database:
return None
async def foreign_keys_for_table(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: get_outbound_foreign_keys(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: get_outbound_foreign_keys(conn, table)
)
async def hidden_table_names(self):
@@ -150,18 +222,17 @@ class Database:
hidden_tables = [
r[0]
for r in (
- await self.ds.execute(
- self.name,
+ await self.execute(
"""
select name from sqlite_master
where rootpage = 0
and sql like '%VIRTUAL TABLE%USING FTS%'
- """,
+ """
)
).rows
]
- has_spatialite = await self.ds.execute_against_connection_in_thread(
- self.name, detect_spatialite
+ has_spatialite = await self.execute_against_connection_in_thread(
+ detect_spatialite
)
if has_spatialite:
# Also hide Spatialite internal tables
@@ -178,13 +249,12 @@ class Database:
] + [
r[0]
for r in (
- await self.ds.execute(
- self.name,
+ await self.execute(
"""
select name from sqlite_master
where name like "idx_%"
and type = "table"
- """,
+ """
)
).rows
]
@@ -207,25 +277,20 @@ class Database:
return hidden_tables
async def view_names(self):
- results = await self.ds.execute(
- self.name, "select name from sqlite_master where type='view'"
- )
+ results = await self.execute("select name from sqlite_master where type='view'")
return [r[0] for r in results.rows]
async def get_all_foreign_keys(self):
- return await self.ds.execute_against_connection_in_thread(
- self.name, get_all_foreign_keys
- )
+ return await self.execute_against_connection_in_thread(get_all_foreign_keys)
async def get_outbound_foreign_keys(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: get_outbound_foreign_keys(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: get_outbound_foreign_keys(conn, table)
)
async def get_table_definition(self, table, type_="table"):
table_definition_rows = list(
- await self.ds.execute(
- self.name,
+ await self.execute(
"select sql from sqlite_master where name = :n and type=:t",
{"n": table, "t": type_},
)
From 440a70428c624f6e27b630026acdba2032acc9a7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 19 Nov 2019 15:01:10 -0800
Subject: [PATCH 0103/2124] Include rowid in filter select, closes #636
---
datasette/views/table.py | 6 +-----
tests/test_html.py | 24 ++++++++++++++++++++++++
2 files changed, 25 insertions(+), 5 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index a60a3941..516b474d 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -587,10 +587,6 @@ class TableView(RowTableShared):
columns = [r[0] for r in results.description]
rows = list(results.rows)
- filter_columns = columns[:]
- if use_rowid and filter_columns[0] == "rowid":
- filter_columns = filter_columns[1:]
-
# Expand labeled columns if requested
expanded_columns = []
expandable_columns = await self.expandable_columns(database, table)
@@ -720,7 +716,7 @@ class TableView(RowTableShared):
"use_rowid": use_rowid,
"filters": filters,
"display_columns": display_columns,
- "filter_columns": filter_columns,
+ "filter_columns": columns,
"display_rows": display_rows,
"facets_timed_out": facets_timed_out,
"sorted_facet_results": sorted(
diff --git a/tests/test_html.py b/tests/test_html.py
index 44627cdc..3b331f38 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -679,6 +679,30 @@ def test_table_html_foreign_key_custom_label_column(app_client):
]
+@pytest.mark.parametrize(
+ "path,expected_column_options",
+ [
+ ("/fixtures/infinity", ["- column -", "rowid", "value"]),
+ (
+ "/fixtures/primary_key_multiple_columns",
+ ["- column -", "id", "content", "content2"],
+ ),
+ ("/fixtures/compound_primary_key", ["- column -", "pk1", "pk2", "content"]),
+ ],
+)
+def test_table_html_filter_form_column_options(
+ path, expected_column_options, app_client
+):
+ response = app_client.get(path)
+ assert response.status == 200
+ form = Soup(response.body, "html.parser").find("form")
+ column_options = [
+ o.attrs.get("value") or o.string
+ for o in form.select("select[name=_filter_column] option")
+ ]
+ assert expected_column_options == column_options
+
+
def test_row_html_compound_primary_key(app_client):
response = app_client.get("/fixtures/compound_primary_key/a,b")
assert response.status == 200
From c16be14517414a94e1fdbd888e8a3ad0669e3bca Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 20 Nov 2019 10:02:07 -0800
Subject: [PATCH 0104/2124] How to upgrade using Docker
---
docs/installation.rst | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/docs/installation.rst b/docs/installation.rst
index 9ee7eb4e..c547f9e4 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -33,6 +33,10 @@ Now visit http://127.0.0.1:8001/ to access Datasette.
(You can download a copy of ``fixtures.db`` from
https://latest.datasette.io/fixtures.db )
+To upgrade to the most recent release of Datasette, run the following::
+
+ docker pull datasetteproject/datasette
+
Loading Spatialite
~~~~~~~~~~~~~~~~~~
From fd137da7f83c117b18e189707a1039e319dd5c91 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 21 Nov 2019 16:56:55 -0800
Subject: [PATCH 0105/2124] Suggest column facet only if at least one count > 1
Fixes #638
---
datasette/facets.py | 5 ++++-
tests/fixtures.py | 33 +++++++++++++++++----------------
tests/test_api.py | 30 ++++++++++++++++++++++++++++--
tests/test_csv.py | 32 ++++++++++++++++----------------
4 files changed, 65 insertions(+), 35 deletions(-)
diff --git a/datasette/facets.py b/datasette/facets.py
index 0c6459d6..a314faaf 100644
--- a/datasette/facets.py
+++ b/datasette/facets.py
@@ -143,9 +143,10 @@ class ColumnFacet(Facet):
if column in already_enabled:
continue
suggested_facet_sql = """
- select distinct {column} from (
+ select {column}, count(*) as n from (
{sql}
) where {column} is not null
+ group by {column}
limit {limit}
""".format(
column=escape_sqlite(column), sql=self.sql, limit=facet_size + 1
@@ -165,6 +166,8 @@ class ColumnFacet(Facet):
and num_distinct_values > 1
and num_distinct_values <= facet_size
and num_distinct_values < row_count
+ # And at least one has n > 1
+ and any(r["n"] > 1 for r in distinct_values)
):
suggested_facets.append(
{
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 3e4203f7..bb01d171 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -669,26 +669,27 @@ CREATE TABLE facetable (
neighborhood text,
tags text,
complex_array text,
+ distinct_some_null,
FOREIGN KEY ("city_id") REFERENCES [facet_cities](id)
);
INSERT INTO facetable
- (created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array)
+ (created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array, distinct_some_null)
VALUES
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]'),
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]'),
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'SOMA', '[]', '[]'),
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Tenderloin', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Bernal Heights', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Hayes Valley', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Hollywood', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Downtown', '[]', '[]'),
- ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Los Feliz', '[]', '[]'),
- ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Koreatown', '[]', '[]'),
- ("2019-01-16 08:00:00", 1, 1, 'MI', 3, 'Downtown', '[]', '[]'),
- ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Greektown', '[]', '[]'),
- ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Corktown', '[]', '[]'),
- ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Mexicantown', '[]', '[]'),
- ("2019-01-17 08:00:00", 2, 0, 'MC', 4, 'Arcadia Planitia', '[]', '[]')
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]', 'one'),
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]', 'two'),
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'SOMA', '[]', '[]', null),
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Tenderloin', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Bernal Heights', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Hayes Valley', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Hollywood', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Downtown', '[]', '[]', null),
+ ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Los Feliz', '[]', '[]', null),
+ ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Koreatown', '[]', '[]', null),
+ ("2019-01-16 08:00:00", 1, 1, 'MI', 3, 'Downtown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Greektown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Corktown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Mexicantown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 2, 0, 'MC', 4, 'Arcadia Planitia', '[]', '[]', null)
;
CREATE TABLE binary_data (
diff --git a/tests/test_api.py b/tests/test_api.py
index 1fa8642f..34eef4ce 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -197,6 +197,7 @@ def test_database_page(app_client):
"neighborhood",
"tags",
"complex_array",
+ "distinct_some_null",
],
"primary_keys": ["pk"],
"count": 15,
@@ -1042,15 +1043,38 @@ def test_table_filter_json_arraycontains(app_client):
"Mission",
'["tag1", "tag2"]',
'[{"foo": "bar"}]',
+ "one",
+ ],
+ [
+ 2,
+ "2019-01-14 08:00:00",
+ 1,
+ 1,
+ "CA",
+ 1,
+ "Dogpatch",
+ '["tag1", "tag3"]',
+ "[]",
+ "two",
],
- [2, "2019-01-14 08:00:00", 1, 1, "CA", 1, "Dogpatch", '["tag1", "tag3"]', "[]"],
] == response.json["rows"]
def test_table_filter_extra_where(app_client):
response = app_client.get("/fixtures/facetable.json?_where=neighborhood='Dogpatch'")
assert [
- [2, "2019-01-14 08:00:00", 1, 1, "CA", 1, "Dogpatch", '["tag1", "tag3"]', "[]"]
+ [
+ 2,
+ "2019-01-14 08:00:00",
+ 1,
+ 1,
+ "CA",
+ 1,
+ "Dogpatch",
+ '["tag1", "tag3"]',
+ "[]",
+ "two",
+ ]
] == response.json["rows"]
@@ -1503,6 +1527,7 @@ def test_expand_labels(app_client):
"neighborhood": "Dogpatch",
"tags": '["tag1", "tag3"]',
"complex_array": "[]",
+ "distinct_some_null": "two",
},
"13": {
"pk": 13,
@@ -1514,6 +1539,7 @@ def test_expand_labels(app_client):
"neighborhood": "Corktown",
"tags": "[]",
"complex_array": "[]",
+ "distinct_some_null": None,
},
} == response.json
diff --git a/tests/test_csv.py b/tests/test_csv.py
index 13aca489..1030c2bb 100644
--- a/tests/test_csv.py
+++ b/tests/test_csv.py
@@ -21,22 +21,22 @@ world
)
EXPECTED_TABLE_WITH_LABELS_CSV = """
-pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,complex_array
-1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]","[{""foo"": ""bar""}]"
-2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]",[]
-3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[],[]
-4,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Tenderloin,[],[]
-5,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Bernal Heights,[],[]
-6,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Hayes Valley,[],[]
-7,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Hollywood,[],[]
-8,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Downtown,[],[]
-9,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Los Feliz,[],[]
-10,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Koreatown,[],[]
-11,2019-01-16 08:00:00,1,1,MI,3,Detroit,Downtown,[],[]
-12,2019-01-17 08:00:00,1,1,MI,3,Detroit,Greektown,[],[]
-13,2019-01-17 08:00:00,1,1,MI,3,Detroit,Corktown,[],[]
-14,2019-01-17 08:00:00,1,1,MI,3,Detroit,Mexicantown,[],[]
-15,2019-01-17 08:00:00,2,0,MC,4,Memnonia,Arcadia Planitia,[],[]
+pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,complex_array,distinct_some_null
+1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]","[{""foo"": ""bar""}]",one
+2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]",[],two
+3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[],[],
+4,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Tenderloin,[],[],
+5,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Bernal Heights,[],[],
+6,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Hayes Valley,[],[],
+7,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Hollywood,[],[],
+8,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Downtown,[],[],
+9,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Los Feliz,[],[],
+10,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Koreatown,[],[],
+11,2019-01-16 08:00:00,1,1,MI,3,Detroit,Downtown,[],[],
+12,2019-01-17 08:00:00,1,1,MI,3,Detroit,Greektown,[],[],
+13,2019-01-17 08:00:00,1,1,MI,3,Detroit,Corktown,[],[],
+14,2019-01-17 08:00:00,1,1,MI,3,Detroit,Mexicantown,[],[],
+15,2019-01-17 08:00:00,2,0,MC,4,Memnonia,Arcadia Planitia,[],[],
""".lstrip().replace(
"\n", "\r\n"
)
From d3e1c3017ee2f606a731208d59fe48805cdc3259 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 22 Nov 2019 22:07:01 -0800
Subject: [PATCH 0106/2124] Display 0 results, closes #637
---
datasette/static/app.css | 7 +++++
datasette/templates/_table.html | 56 ++++++++++++++++++---------------
datasette/templates/query.html | 2 ++
tests/test_html.py | 14 +++++++++
4 files changed, 53 insertions(+), 26 deletions(-)
diff --git a/datasette/static/app.css b/datasette/static/app.css
index 34eb122c..d7cf6334 100644
--- a/datasette/static/app.css
+++ b/datasette/static/app.css
@@ -327,3 +327,10 @@ a.not-underlined {
pre.wrapped-sql {
white-space: pre-wrap;
}
+
+p.zero-results {
+ border: 2px solid #ccc;
+ background-color: #eee;
+ padding: 0.5em;
+ font-style: italic;
+}
diff --git a/datasette/templates/_table.html b/datasette/templates/_table.html
index c7a72253..42c37c55 100644
--- a/datasette/templates/_table.html
+++ b/datasette/templates/_table.html
@@ -1,28 +1,32 @@
-
-
-
- {% for column in display_columns %}
-
- {% if not column.sortable %}
- {{ column.name }}
- {% else %}
- {% if column.name == sort %}
- {{ column.name }} ▼
+{% if display_rows %}
+
".format(escape(repr(exception)))
- await send({"type": "http.response.body", "body": html.encode("latin-1")})
+ await send({"type": "http.response.body", "body": html.encode("utf-8")})
class AsgiLifespan:
@@ -259,7 +259,11 @@ async def asgi_send_json(send, info, status=200, headers=None):
async def asgi_send_html(send, html, status=200, headers=None):
headers = headers or {}
await asgi_send(
- send, html, status=status, headers=headers, content_type="text/html"
+ send,
+ html,
+ status=status,
+ headers=headers,
+ content_type="text/html; charset=utf-8",
)
@@ -269,13 +273,13 @@ async def asgi_send_redirect(send, location, status=302):
"",
status=status,
headers={"Location": location},
- content_type="text/html",
+ content_type="text/html; charset=utf-8",
)
async def asgi_send(send, content, status, headers=None, content_type="text/plain"):
await asgi_start(send, status, headers, content_type)
- await send({"type": "http.response.body", "body": content.encode("latin-1")})
+ await send({"type": "http.response.body", "body": content.encode("utf-8")})
async def asgi_start(send, status, headers=None, content_type="text/plain"):
diff --git a/docs/custom_templates.rst b/docs/custom_templates.rst
index 142ecc97..adbfbc25 100644
--- a/docs/custom_templates.rst
+++ b/docs/custom_templates.rst
@@ -284,7 +284,7 @@ You can nest directories within pages to create a nested structure. To create a
Custom headers and status codes
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Custom pages default to being served with a content-type of ``text/html`` and a ``200`` status code. You can change these by calling a custom function from within your template.
+Custom pages default to being served with a content-type of ``text/html; charset=utf-8`` and a ``200`` status code. You can change these by calling a custom function from within your template.
For example, to serve a custom page with a ``418 I'm a teapot`` HTTP status code, create a file in ``pages/teapot.html`` containing the following::
@@ -314,7 +314,7 @@ You can verify this is working using ``curl`` like this::
date: Sun, 26 Apr 2020 18:38:30 GMT
server: uvicorn
x-teapot: I am
- content-type: text/html
+ content-type: text/html; charset=utf-8
Custom redirects
~~~~~~~~~~~~~~~~
diff --git a/tests/test_html.py b/tests/test_html.py
index b8dc543c..564365ce 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -963,6 +963,12 @@ def test_404_trailing_slash_redirect(app_client, path, expected_redirect):
assert expected_redirect == response.headers["Location"]
+def test_404_content_type(app_client):
+ response = app_client.get("/404")
+ assert 404 == response.status
+ assert "text/html; charset=utf-8" == response.headers["content-type"]
+
+
def test_canned_query_with_custom_metadata(app_client):
response = app_client.get("/fixtures/neighborhood_search?text=town")
assert response.status == 200
From cc872b1f50f1d2c0bc2d930c86a6644f154459dc Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 4 May 2020 11:42:01 -0700
Subject: [PATCH 0217/2124] Fixed rogue output in tests, closes #755
---
tests/test_database.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/test_database.py b/tests/test_database.py
index 35923c0b..a9728019 100644
--- a/tests/test_database.py
+++ b/tests/test_database.py
@@ -116,7 +116,7 @@ async def test_execute_write_fn_block_false(app_client):
def write_fn(conn):
with conn:
- conn.execute("delete from roadside_attractions where id = 1;")
+ conn.execute("delete from roadside_attractions where pk = 1;")
row = conn.execute("select count(*) from roadside_attractions").fetchone()
print("row = ", row)
return row[0]
From 7e2bb314649baa9e782ad22ff452d90d46aa840b Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 4 May 2020 12:10:31 -0700
Subject: [PATCH 0218/2124] Documented installation using pipx, closes #756
---
docs/installation.rst | 87 +++++++++++++++++++++++++++++++++++++++++++
1 file changed, 87 insertions(+)
diff --git a/docs/installation.rst b/docs/installation.rst
index c547f9e4..990d867b 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -97,3 +97,90 @@ You can now run Datasette like so::
datasette fixtures.db
If you want to start making contributions to the Datasette project by installing a copy that lets you directly modify the code, take a look at our guide to :ref:`devenvironment`.
+
+Install using pipx
+------------------
+
+`pipx `__ is a tool for installing Python software with all of its dependencies in an isolated environment, to ensure that they will not conflict with any other installed Python software.
+
+If you use `Homebrew `__ on macOS you can install pipx like this::
+
+ brew install pipx
+ pipx ensurepath
+
+Without Homebrew you can install it like so::
+
+ python3 -m pip install --user pipx
+ python3 -m pipx ensurepath
+
+The ``pipx ensurepath`` command configures your shell to ensure it can find commands that have been installed by pipx - generally by making sure ``~/.local/bin`` has been added to your ``PATH``.
+
+Once pipx is installed you can use it to install Datasette like this::
+
+ pipx install datasette
+
+Then run ``datasette --version`` to confirm that it has been successfully installed.
+
+Installing plugins using pipx
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Datasette plugins need to be installed into the same environment as Datasette itself. You can do this using ``pipx inject datasette name-of-plugin`` - and then confirm that the plugin has been installed using the ``datasette plugins`` command::
+
+ $ datasette plugins
+ []
+
+ $ pipx inject datasette datasette-json-html
+ injected package datasette-json-html into venv datasette
+ done! ✨ 🌟 ✨
+
+ $ datasette plugins
+ [
+ {
+ "name": "datasette-json-html",
+ "static": false,
+ "templates": false,
+ "version": "0.6"
+ }
+ ]
+
+Upgrading packages using pipx
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You can upgrade your pipx installation to the latest release of Datasette using ``pipx upgrade datasette``::
+
+ $ pipx upgrade datasette
+ upgraded package datasette from 0.39 to 0.40 (location: /Users/simon/.local/pipx/venvs/datasette)
+
+To upgrade a plugin within the pipx environment use ``pipx runpip datasette install -U name-of-plugin`` - like this::
+
+ % datasette plugins
+ [
+ {
+ "name": "datasette-vega",
+ "static": true,
+ "templates": false,
+ "version": "0.6"
+ }
+ ]
+
+ $ pipx runpip datasette install -U datasette-vega
+ Collecting datasette-vega
+ Downloading datasette_vega-0.6.2-py3-none-any.whl (1.8 MB)
+ |████████████████████████████████| 1.8 MB 2.0 MB/s
+ ...
+ Installing collected packages: datasette-vega
+ Attempting uninstall: datasette-vega
+ Found existing installation: datasette-vega 0.6
+ Uninstalling datasette-vega-0.6:
+ Successfully uninstalled datasette-vega-0.6
+ Successfully installed datasette-vega-0.6.2
+
+ $ datasette plugins
+ [
+ {
+ "name": "datasette-vega",
+ "static": true,
+ "templates": false,
+ "version": "0.6.2"
+ }
+ ]
From 0cdf111ae68d46eb2eb51d85e20e1447a42cbdcc Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 4 May 2020 12:31:13 -0700
Subject: [PATCH 0219/2124] Move pip/pipx to top of installation instructions
Less intimidating than Docker, hopefully.
---
docs/installation.rst | 128 ++++++++++++++++++++++--------------------
1 file changed, 67 insertions(+), 61 deletions(-)
diff --git a/docs/installation.rst b/docs/installation.rst
index 990d867b..c88950c2 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -11,67 +11,7 @@ on to your machine, or you can install it using Docker.
.. contents::
-Using Docker
-------------
-
-A Docker image containing the latest release of Datasette is published to Docker
-Hub here: https://hub.docker.com/r/datasetteproject/datasette/
-
-If you have Docker installed (for example with `Docker for Mac
-`_ on OS X) you can download and run this
-image like so::
-
- docker run -p 8001:8001 -v `pwd`:/mnt \
- datasetteproject/datasette \
- datasette -p 8001 -h 0.0.0.0 /mnt/fixtures.db
-
-This will start an instance of Datasette running on your machine's port 8001,
-serving the ``fixtures.db`` file in your current directory.
-
-Now visit http://127.0.0.1:8001/ to access Datasette.
-
-(You can download a copy of ``fixtures.db`` from
-https://latest.datasette.io/fixtures.db )
-
-To upgrade to the most recent release of Datasette, run the following::
-
- docker pull datasetteproject/datasette
-
-Loading Spatialite
-~~~~~~~~~~~~~~~~~~
-
-The ``datasetteproject/datasette`` image includes a recent version of the
-:ref:`SpatiaLite extension ` for SQLite. To load and enable that
-module, use the following command::
-
- docker run -p 8001:8001 -v `pwd`:/mnt \
- datasetteproject/datasette \
- datasette -p 8001 -h 0.0.0.0 /mnt/fixtures.db \
- --load-extension=/usr/local/lib/mod_spatialite.so
-
-You can confirm that SpatiaLite is successfully loaded by visiting
-http://127.0.0.1:8001/-/versions
-
-Installing plugins
-~~~~~~~~~~~~~~~~~~
-
-If you want to install plugins into your local Datasette Docker image you can do
-so using the following recipe. This will install the plugins and then save a
-brand new local image called ``datasette-with-plugins``::
-
- docker run datasetteproject/datasette \
- pip install datasette-vega
-
- docker commit $(docker ps -lq) datasette-with-plugins
-
-You can now run the new custom image like so::
-
- docker run -p 8001:8001 -v `pwd`:/mnt \
- datasette-with-plugins \
- datasette -p 8001 -h 0.0.0.0 /mnt/fixtures.db
-
-You can confirm that the plugins are installed by visiting
-http://127.0.0.1:8001/-/plugins
+.. _installation_pip:
Install using pip
-----------------
@@ -98,6 +38,8 @@ You can now run Datasette like so::
If you want to start making contributions to the Datasette project by installing a copy that lets you directly modify the code, take a look at our guide to :ref:`devenvironment`.
+.. _installation_pipx:
+
Install using pipx
------------------
@@ -184,3 +126,67 @@ To upgrade a plugin within the pipx environment use ``pipx runpip datasette inst
"version": "0.6.2"
}
]
+
+.. _installation_docker:
+
+Using Docker
+------------
+
+A Docker image containing the latest release of Datasette is published to Docker
+Hub here: https://hub.docker.com/r/datasetteproject/datasette/
+
+If you have Docker installed (for example with `Docker for Mac
+`_ on OS X) you can download and run this
+image like so::
+
+ docker run -p 8001:8001 -v `pwd`:/mnt \
+ datasetteproject/datasette \
+ datasette -p 8001 -h 0.0.0.0 /mnt/fixtures.db
+
+This will start an instance of Datasette running on your machine's port 8001,
+serving the ``fixtures.db`` file in your current directory.
+
+Now visit http://127.0.0.1:8001/ to access Datasette.
+
+(You can download a copy of ``fixtures.db`` from
+https://latest.datasette.io/fixtures.db )
+
+To upgrade to the most recent release of Datasette, run the following::
+
+ docker pull datasetteproject/datasette
+
+Loading Spatialite
+~~~~~~~~~~~~~~~~~~
+
+The ``datasetteproject/datasette`` image includes a recent version of the
+:ref:`SpatiaLite extension ` for SQLite. To load and enable that
+module, use the following command::
+
+ docker run -p 8001:8001 -v `pwd`:/mnt \
+ datasetteproject/datasette \
+ datasette -p 8001 -h 0.0.0.0 /mnt/fixtures.db \
+ --load-extension=/usr/local/lib/mod_spatialite.so
+
+You can confirm that SpatiaLite is successfully loaded by visiting
+http://127.0.0.1:8001/-/versions
+
+Installing plugins
+~~~~~~~~~~~~~~~~~~
+
+If you want to install plugins into your local Datasette Docker image you can do
+so using the following recipe. This will install the plugins and then save a
+brand new local image called ``datasette-with-plugins``::
+
+ docker run datasetteproject/datasette \
+ pip install datasette-vega
+
+ docker commit $(docker ps -lq) datasette-with-plugins
+
+You can now run the new custom image like so::
+
+ docker run -p 8001:8001 -v `pwd`:/mnt \
+ datasette-with-plugins \
+ datasette -p 8001 -h 0.0.0.0 /mnt/fixtures.db
+
+You can confirm that the plugins are installed by visiting
+http://127.0.0.1:8001/-/plugins
From 9212f0c9c3138f005ea8d57acacb8a2a80b252a6 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 4 May 2020 12:35:28 -0700
Subject: [PATCH 0220/2124] Removed note about virtual environments
Simplifies things now that we also talk about pipx.
---
docs/installation.rst | 10 ++--------
1 file changed, 2 insertions(+), 8 deletions(-)
diff --git a/docs/installation.rst b/docs/installation.rst
index c88950c2..cdf1467a 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -22,19 +22,13 @@ You can install Datasette and its dependencies using ``pip``::
pip install datasette
-The last version to support Python 3.5 was 0.30.2 - you can install that version like so::
+The last version to support Python 3.5 was 0.30.2. If you are running Python 3.5 (check using ``python3 --version``) you can install that version of Datasette like so::
pip install datasette==0.30.2
-If you want to install Datasette in its own virtual environment, use this::
-
- python -mvenv datasette-venv
- source datasette-venv/bin/activate
- pip install datasette
-
You can now run Datasette like so::
- datasette fixtures.db
+ datasette
If you want to start making contributions to the Datasette project by installing a copy that lets you directly modify the code, take a look at our guide to :ref:`devenvironment`.
From 0784f2ef9d3ff6dd9df05f54cb51de29a6d11764 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 6 May 2020 10:18:31 -0700
Subject: [PATCH 0221/2124] Allow specific pragma functions, closes #761
---
datasette/utils/__init__.py | 21 ++++++++++++++++++++-
tests/test_utils.py | 5 ++++-
2 files changed, 24 insertions(+), 2 deletions(-)
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index 490b71c8..f1c24041 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -171,7 +171,26 @@ allowed_sql_res = [
re.compile(r"^explain with\b"),
re.compile(r"^explain query plan with\b"),
]
-disallawed_sql_res = [(re.compile("pragma"), "Statement may not contain PRAGMA")]
+allowed_pragmas = (
+ "database_list",
+ "foreign_key_list",
+ "function_list",
+ "index_info",
+ "index_list",
+ "index_xinfo",
+ "page_count",
+ "max_page_count",
+ "page_size",
+ "schema_version",
+ "table_info",
+ "table_xinfo",
+)
+disallawed_sql_res = [
+ (
+ re.compile("pragma(?!_({}))".format("|".join(allowed_pragmas))),
+ "Statement may not contain PRAGMA",
+ )
+]
def validate_sql_select(sql):
diff --git a/tests/test_utils.py b/tests/test_utils.py
index fe5d9a26..7e4f1a8e 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -140,7 +140,8 @@ def test_custom_json_encoder(obj, expected):
"update blah;",
"-- sql comment to skip\nupdate blah;",
"update blah set some_column='# Hello there\n\n* This is a list\n* of items\n--\n[And a link](https://github.com/simonw/datasette-render-markdown).'\nas demo_markdown",
- "PRAGMA case_sensitive_like = true" "SELECT * FROM pragma_index_info('idx52')",
+ "PRAGMA case_sensitive_like = true",
+ "SELECT * FROM pragma_not_on_allow_list('idx52')",
],
)
def test_validate_sql_select_bad(bad_sql):
@@ -162,6 +163,8 @@ def test_validate_sql_select_bad(bad_sql):
"WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;",
"explain WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;",
"explain query plan WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;",
+ "SELECT * FROM pragma_index_info('idx52')",
+ "select * from pragma_table_xinfo('table')",
],
)
def test_validate_sql_select_good(good_sql):
From 182e5c8745c94576718315f7596ccc81e5e2417b Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 6 May 2020 11:20:58 -0700
Subject: [PATCH 0222/2124] Release Datasette 0.41
Refs #648 #731 #750 #151 #761 #752 #719 #756 #748
---
README.md | 1 +
docs/changelog.rst | 29 +++++++++++++++++++++++++++++
2 files changed, 30 insertions(+)
diff --git a/README.md b/README.md
index 12a1ec39..f2a3d81d 100644
--- a/README.md
+++ b/README.md
@@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
## News
+ * 6th May 2020: [Datasette 0.41](http://datasette.readthedocs.io/en/latest/changelog.html#v0-41) - New mechanism for [creating custom pages](https://datasette.readthedocs.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://datasette.readthedocs.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements.
* 21st April 2020: [Datasette 0.40](http://datasette.readthedocs.io/en/latest/changelog.html#v0-40) - Metadata can now be provided as YAML instead of JSON. Publishing to Zeit Now v1 is no longer supported, but Now v2 support is provided by the new [datasette-publish-now](https://github.com/simonw/datasette-publish-now) plugin. Various bug fixes.
* 24th March 2020: [Datasette 0.39](http://datasette.readthedocs.io/en/latest/changelog.html#v0-39) - New `base_url` configuration option for running Datasette under a different URL prefix, `"sort"` and `"sort_desc"` metadata options for setting a default sort order for a table.
* 8th March 2020: [Datasette 0.38](http://datasette.readthedocs.io/en/latest/changelog.html#v0-38) - New `--memory` option for `datasete publish cloudrun`, [Docker image](https://hub.docker.com/r/datasetteproject/datasette) upgraded to SQLite 3.31.1.
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 7a58f58b..dc06e4ef 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,35 @@
Changelog
=========
+.. _v0_41:
+
+0.41 (2020-05-06)
+-----------------
+
+You can now create :ref:`custom pages ` within your Datasette instance using a custom template file. For example, adding a template file called ``templates/pages/about.html`` will result in a new page being served at ``/about`` on your instance. See the :ref:`custom pages documentation ` for full details, including how to return custom HTTP headers, redirects and status codes. (`#648 `__)
+
+:ref:`config_dir` (`#731 `__) allows you to define a custom Datasette instance as a directory. So instead of running the following::
+
+ $ datasette one.db two.db \
+ --metadata.json \
+ --template-dir=templates/ \
+ --plugins-dir=plugins \
+ --static css:css
+
+You can instead arrange your files in a single directory called ``my-project`` and run this::
+
+ $ datasette my-project/
+
+Also in this release:
+
+* New ``NOT LIKE`` table filter: ``?colname__notlike=expression``. (`#750 `__)
+* Datasette now has a *pattern portfolio* at ``/-/patterns`` - e.g. https://latest.datasette.io/-/patterns. This is a page that shows every Datasette user interface component in one place, to aid core development and people building custom CSS themes. (`#151 `__)
+* SQLite `PRAGMA functions `__ such as ``pragma_table_info(tablename)`` are now allowed in Datasette SQL queries. (`#761 `__)
+* Datasette pages now consistently return a ``content-type`` of ``text/html; charset=utf-8"``. (`#752 `__)
+* Datasette now handles an ASGI ``raw_path`` value of ``None``, which should allow compatibilty with the `Mangum `__ adapter for running ASGI apps on AWS Lambda. Thanks, Colin Dellow. (`#719 `__)
+* Installation documentation now covers how to :ref:`installation_pipx`. (`#756 `__)
+* Improved the documentation for :ref:`full_text_search`. (`#748 `__)
+
.. _v0_40:
0.40 (2020-04-21)
From 69e3a855dd7e5a77409d70b18c45ae3c1a145a75 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 May 2020 07:16:39 -0700
Subject: [PATCH 0223/2124] Rename execute_against_connection_in_thread() to
execute_fn(), refs #685
---
datasette/cli.py | 2 +-
datasette/database.py | 32 ++++++++++----------------------
2 files changed, 11 insertions(+), 23 deletions(-)
diff --git a/datasette/cli.py b/datasette/cli.py
index 919be065..c59fb6e0 100644
--- a/datasette/cli.py
+++ b/datasette/cli.py
@@ -388,7 +388,7 @@ async def check_databases(ds):
# to confirm they are all usable
for database in list(ds.databases.values()):
try:
- await database.execute_against_connection_in_thread(check_connection)
+ await database.execute_fn(check_connection)
except SpatialiteConnectionProblem:
raise click.UsageError(
"It looks like you're trying to load a SpatiaLite"
diff --git a/datasette/database.py b/datasette/database.py
index 48c367ef..0f540e01 100644
--- a/datasette/database.py
+++ b/datasette/database.py
@@ -101,7 +101,7 @@ class Database:
result = e
task.reply_queue.sync_q.put(result)
- async def execute_against_connection_in_thread(self, fn):
+ async def execute_fn(self, fn):
def in_thread():
conn = getattr(connections, self.name, None)
if not conn:
@@ -163,9 +163,7 @@ class Database:
return Results(rows, False, cursor.description)
with trace("sql", database=self.name, sql=sql.strip(), params=params):
- results = await self.execute_against_connection_in_thread(
- sql_operation_in_thread
- )
+ results = await self.execute_fn(sql_operation_in_thread)
return results
@property
@@ -223,19 +221,13 @@ class Database:
return [r[0] for r in results.rows]
async def table_columns(self, table):
- return await self.execute_against_connection_in_thread(
- lambda conn: table_columns(conn, table)
- )
+ return await self.execute_fn(lambda conn: table_columns(conn, table))
async def primary_keys(self, table):
- return await self.execute_against_connection_in_thread(
- lambda conn: detect_primary_keys(conn, table)
- )
+ return await self.execute_fn(lambda conn: detect_primary_keys(conn, table))
async def fts_table(self, table):
- return await self.execute_against_connection_in_thread(
- lambda conn: detect_fts(conn, table)
- )
+ return await self.execute_fn(lambda conn: detect_fts(conn, table))
async def label_column_for_table(self, table):
explicit_label_column = self.ds.table_metadata(self.name, table).get(
@@ -244,9 +236,7 @@ class Database:
if explicit_label_column:
return explicit_label_column
# If a table has two columns, one of which is ID, then label_column is the other one
- column_names = await self.execute_against_connection_in_thread(
- lambda conn: table_columns(conn, table)
- )
+ column_names = await self.execute_fn(lambda conn: table_columns(conn, table))
# Is there a name or title column?
name_or_title = [c for c in column_names if c in ("name", "title")]
if name_or_title:
@@ -261,7 +251,7 @@ class Database:
return None
async def foreign_keys_for_table(self, table):
- return await self.execute_against_connection_in_thread(
+ return await self.execute_fn(
lambda conn: get_outbound_foreign_keys(conn, table)
)
@@ -279,9 +269,7 @@ class Database:
)
).rows
]
- has_spatialite = await self.execute_against_connection_in_thread(
- detect_spatialite
- )
+ has_spatialite = await self.execute_fn(detect_spatialite)
if has_spatialite:
# Also hide Spatialite internal tables
hidden_tables += [
@@ -329,10 +317,10 @@ class Database:
return [r[0] for r in results.rows]
async def get_all_foreign_keys(self):
- return await self.execute_against_connection_in_thread(get_all_foreign_keys)
+ return await self.execute_fn(get_all_foreign_keys)
async def get_outbound_foreign_keys(self, table):
- return await self.execute_against_connection_in_thread(
+ return await self.execute_fn(
lambda conn: get_outbound_foreign_keys(conn, table)
)
From 4433306c1855ad69840cc76cbd41086137572be2 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 May 2020 09:05:46 -0700
Subject: [PATCH 0224/2124] Improvements + docs for db.execute() and Results
class
* Including new results.first() and results.single_value() methods. Closes #685
---
datasette/app.py | 3 +-
datasette/database.py | 39 +++++++++++++++++++++--
datasette/facets.py | 2 +-
datasette/utils/__init__.py | 21 -------------
datasette/views/base.py | 2 +-
datasette/views/table.py | 2 +-
docs/internals.rst | 63 ++++++++++++++++++++++++++++++++++++-
tests/test_database.py | 38 ++++++++++++++++++++++
8 files changed, 141 insertions(+), 29 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 8a4b6011..f1fcc5eb 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -26,10 +26,9 @@ from .views.index import IndexView
from .views.special import JsonDataView, PatternPortfolioView
from .views.table import RowView, TableView
from .renderer import json_renderer
-from .database import Database
+from .database import Database, QueryInterrupted
from .utils import (
- QueryInterrupted,
escape_css_string,
escape_sqlite,
format_bytes,
diff --git a/datasette/database.py b/datasette/database.py
index 0f540e01..e6154caa 100644
--- a/datasette/database.py
+++ b/datasette/database.py
@@ -8,8 +8,6 @@ import uuid
from .tracer import trace
from .utils import (
- QueryInterrupted,
- Results,
detect_fts,
detect_primary_keys,
detect_spatialite,
@@ -371,3 +369,40 @@ class WriteTask:
self.fn = fn
self.task_id = task_id
self.reply_queue = reply_queue
+
+
+class QueryInterrupted(Exception):
+ pass
+
+
+class MultipleValues(Exception):
+ pass
+
+
+class Results:
+ def __init__(self, rows, truncated, description):
+ self.rows = rows
+ self.truncated = truncated
+ self.description = description
+
+ @property
+ def columns(self):
+ return [d[0] for d in self.description]
+
+ def first(self):
+ if self.rows:
+ return self.rows[0]
+ else:
+ return None
+
+ def single_value(self):
+ if self.rows and 1 == len(self.rows) and 1 == len(self.rows[0]):
+ return self.rows[0][0]
+ else:
+ raise MultipleValues
+
+ def __iter__(self):
+ return iter(self.rows)
+
+ def __len__(self):
+ return len(self.rows)
diff --git a/datasette/facets.py b/datasette/facets.py
index 18558754..1712db9b 100644
--- a/datasette/facets.py
+++ b/datasette/facets.py
@@ -2,12 +2,12 @@ import json
import urllib
import re
from datasette import hookimpl
+from datasette.database import QueryInterrupted
from datasette.utils import (
escape_sqlite,
path_with_added_args,
path_with_removed_args,
detect_json1,
- QueryInterrupted,
InvalidSql,
sqlite3,
)
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index f1c24041..26a778d3 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -47,27 +47,6 @@ ENV SQLITE_EXTENSIONS /usr/lib/x86_64-linux-gnu/mod_spatialite.so
"""
-class QueryInterrupted(Exception):
- pass
-
-
-class Results:
- def __init__(self, rows, truncated, description):
- self.rows = rows
- self.truncated = truncated
- self.description = description
-
- @property
- def columns(self):
- return [d[0] for d in self.description]
-
- def __iter__(self):
- return iter(self.rows)
-
- def __len__(self):
- return len(self.rows)
-
-
def urlsafe_components(token):
"Splits token on commas and URL decodes each component"
return [urllib.parse.unquote_plus(b) for b in token.split(",")]
diff --git a/datasette/views/base.py b/datasette/views/base.py
index e2bce2f9..f5eafe63 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -10,8 +10,8 @@ import pint
from datasette import __version__
from datasette.plugins import pm
+from datasette.database import QueryInterrupted
from datasette.utils import (
- QueryInterrupted,
InvalidSql,
LimitedWriter,
is_url,
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 10e86eeb..c07447d3 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -5,9 +5,9 @@ import json
import jinja2
from datasette.plugins import pm
+from datasette.database import QueryInterrupted
from datasette.utils import (
CustomRow,
- QueryInterrupted,
RequestParameters,
append_querystring,
compound_keys_after_sql,
diff --git a/docs/internals.rst b/docs/internals.rst
index d7b6e7cb..0020f96d 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -94,13 +94,74 @@ Database class
Instances of the ``Database`` class can be used to execute queries against attached SQLite databases, and to run introspection against their schemas.
-SQLite only allows one database connection to write at a time. Datasette handles this for you by maintaining a queue of writes to be executed against a given database. Plugins can submit write operations to this queue and they will be executed in the order in which they are received.
+.. _database_execute:
+
+await db.execute(sql, ...)
+--------------------------
+
+Executes a SQL query against the database and returns the resulting rows (see :ref:`database_results`).
+
+``sql`` - string (required)
+ The SQL query to execute. This can include ``?`` or ``:named`` parameters.
+
+``params`` - list or dict
+ A list or dictionary of values to use for the parameters. List for ``?``, dictionary for ``:named``.
+
+``truncate`` - boolean
+ Should the rows returned by the query be truncated at the maximum page size? Defaults to ``True``, set this to ``False`` to disable truncation.
+
+``custom_time_limit`` - integer ms
+ A custom time limit for this query. This can be set to a lower value than the Datasette configured default. If a query takes longer than this it will be terminated early and raise a ``dataette.database.QueryInterrupted`` exception.
+
+``page_size`` - integer
+ Set a custom page size for truncation, over-riding the configured Datasette default.
+
+``log_sql_errors`` - boolean
+ Should any SQL errors be logged to the console in addition to being raised as an error? Defaults to ``True``.
+
+.. _database_results:
+
+Results
+-------
+
+The ``db.execute()`` method returns a single ``Results`` object. This can be used to access the rows returned by the query.
+
+Iterating over a ``Results`` object will yield SQLite `Row objects `__. Each of these can be treated as a tuple or can be accessed using ``row["column"]`` syntax:
+
+.. code-block:: python
+
+ info = []
+ results = await db.execute("select name from sqlite_master")
+ for row in results:
+ info.append(row["name"])
+
+The ``Results`` object also has the following properties and methods:
+
+``.truncated`` - boolean
+ Indicates if this query was truncated - if it returned more results than the specified ``page_size``. If this is true then the results object will only provide access to the first ``page_size`` rows in the query result. You can disable truncation by passing ``truncate=False`` to the ``db.query()`` method.
+
+``.columns`` - list of strings
+ A list of column names returned by the query.
+
+``.rows`` - list of sqlite3.Row
+ This property provides direct access to the list of rows returned by the database. You can access specific rows by index using ``results.rows[0]``.
+
+``.first()`` - row or None
+ Returns the first row in the results, or ``None`` if no rows were returned.
+
+``.single_value()``
+ Returns the value of the first column of the first row of results - but only if the query returned a single row with a single column. Raises a ``datasette.database.MultipleValues`` exception otherwise.
+
+``.__len__()``
+ Calling ``len(results)`` returns the (truncated) number of returned results.
.. _database_execute_write:
await db.execute_write(sql, params=None, block=False)
-----------------------------------------------------
+SQLite only allows one database connection to write at a time. Datasette handles this for you by maintaining a queue of writes to be executed against a given database. Plugins can submit write operations to this queue and they will be executed in the order in which they are received.
+
This method can be used to queue up a non-SELECT SQL query to be executed against a single write connection to the database.
You can pass additional SQL parameters as a tuple or dictionary.
diff --git a/tests/test_database.py b/tests/test_database.py
index a9728019..d4055776 100644
--- a/tests/test_database.py
+++ b/tests/test_database.py
@@ -1,9 +1,47 @@
+from datasette.database import Results, MultipleValues
+from datasette.utils import sqlite3
from .fixtures import app_client
import pytest
import time
import uuid
+@pytest.mark.asyncio
+async def test_execute1(app_client):
+ db = app_client.ds.databases["fixtures"]
+ results = await db.execute("select * from facetable")
+ assert isinstance(results, Results)
+ assert 15 == len(results)
+
+
+@pytest.mark.asyncio
+async def test_results_first(app_client):
+ db = app_client.ds.databases["fixtures"]
+ assert None is (await db.execute("select * from facetable where pk > 100")).first()
+ results = await db.execute("select * from facetable")
+ row = results.first()
+ assert isinstance(row, sqlite3.Row)
+
+
+@pytest.mark.parametrize(
+ "query,expected",
+ [
+ ("select 1", 1),
+ ("select 1, 2", None),
+ ("select 1 as num union select 2 as num", None),
+ ],
+)
+@pytest.mark.asyncio
+async def test_results_single_value(app_client, query, expected):
+ db = app_client.ds.databases["fixtures"]
+ results = await db.execute(query)
+ if expected:
+ assert expected == results.single_value()
+ else:
+ with pytest.raises(MultipleValues):
+ results.single_value()
+
+
@pytest.mark.parametrize(
"tables,exists",
(
From ec9cdc3ffa7d9a9a214f71fa7864f0cbdf6ccb23 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 May 2020 09:52:53 -0700
Subject: [PATCH 0225/2124] Documentation for .execute_fn(), refs #685
---
docs/internals.rst | 20 ++++++++++++++++++++
1 file changed, 20 insertions(+)
diff --git a/docs/internals.rst b/docs/internals.rst
index 0020f96d..526c531c 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -155,6 +155,26 @@ The ``Results`` object also has the following properties and methods:
``.__len__()``
Calling ``len(results)`` returns the (truncated) number of returned results.
+.. _database_execute_fn:
+
+await db.execute_fn(fn)
+-----------------------
+
+Executes a given callback function against a read-only database connection running in a thread. The function will be passed a SQLite connection, and the return value from the function will be returned by the ``await``.
+
+Example usage:
+
+syntax:
+
+.. code-block:: python
+
+ def get_version(conn);
+ return conn.execute(
+ "select sqlite_version()"
+ ).fetchall()[0][0]
+
+ version = await db.execute_fn(get_version)
+
.. _database_execute_write:
await db.execute_write(sql, params=None, block=False)
From 545c71b6044bbf30caef04976cbd73d519d278a5 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 May 2020 09:57:01 -0700
Subject: [PATCH 0226/2124] Small cleanup
---
docs/internals.rst | 3 ---
1 file changed, 3 deletions(-)
diff --git a/docs/internals.rst b/docs/internals.rst
index 526c531c..aa1ff7e7 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -163,9 +163,6 @@ await db.execute_fn(fn)
Executes a given callback function against a read-only database connection running in a thread. The function will be passed a SQLite connection, and the return value from the function will be returned by the ``await``.
Example usage:
-
-syntax:
-
.. code-block:: python
def get_version(conn);
From 5ab848f0b87ad2030088a7259fc1802316b90200 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 May 2020 10:04:47 -0700
Subject: [PATCH 0227/2124] RST fix
---
docs/internals.rst | 1 +
1 file changed, 1 insertion(+)
diff --git a/docs/internals.rst b/docs/internals.rst
index aa1ff7e7..43944de9 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -163,6 +163,7 @@ await db.execute_fn(fn)
Executes a given callback function against a read-only database connection running in a thread. The function will be passed a SQLite connection, and the return value from the function will be returned by the ``await``.
Example usage:
+
.. code-block:: python
def get_version(conn);
From 2694ddcf14b88955e93a6cfb6c725500bb93e219 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 May 2020 10:29:17 -0700
Subject: [PATCH 0228/2124] Test for .execute_fn(), refs #685
---
docs/internals.rst | 2 +-
tests/test_database.py | 12 +++++++++++-
2 files changed, 12 insertions(+), 2 deletions(-)
diff --git a/docs/internals.rst b/docs/internals.rst
index 43944de9..7b4c1755 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -166,7 +166,7 @@ Example usage:
.. code-block:: python
- def get_version(conn);
+ def get_version(conn):
return conn.execute(
"select sqlite_version()"
).fetchall()[0][0]
diff --git a/tests/test_database.py b/tests/test_database.py
index d4055776..1f1a3a7e 100644
--- a/tests/test_database.py
+++ b/tests/test_database.py
@@ -7,7 +7,7 @@ import uuid
@pytest.mark.asyncio
-async def test_execute1(app_client):
+async def test_execute(app_client):
db = app_client.ds.databases["fixtures"]
results = await db.execute("select * from facetable")
assert isinstance(results, Results)
@@ -42,6 +42,16 @@ async def test_results_single_value(app_client, query, expected):
results.single_value()
+@pytest.mark.asyncio
+async def test_execute_fn(app_client):
+ db = app_client.ds.databases["fixtures"]
+
+ def get_1_plus_1(conn):
+ return conn.execute("select 1 + 1").fetchall()[0][0]
+
+ assert 2 == await db.execute_fn(get_1_plus_1)
+
+
@pytest.mark.parametrize(
"tables,exists",
(
From af6c6c5d6f929f951c0e63bfd1c82e37a071b50f Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 May 2020 10:38:27 -0700
Subject: [PATCH 0229/2124] Release 0.42, refs #685
---
README.md | 1 +
docs/changelog.rst | 11 +++++++++++
2 files changed, 12 insertions(+)
diff --git a/README.md b/README.md
index f2a3d81d..7351c5c0 100644
--- a/README.md
+++ b/README.md
@@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
## News
+ * 8th May 2020: [Datasette 0.42](http://datasette.readthedocs.io/en/latest/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database.
* 6th May 2020: [Datasette 0.41](http://datasette.readthedocs.io/en/latest/changelog.html#v0-41) - New mechanism for [creating custom pages](https://datasette.readthedocs.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://datasette.readthedocs.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements.
* 21st April 2020: [Datasette 0.40](http://datasette.readthedocs.io/en/latest/changelog.html#v0-40) - Metadata can now be provided as YAML instead of JSON. Publishing to Zeit Now v1 is no longer supported, but Now v2 support is provided by the new [datasette-publish-now](https://github.com/simonw/datasette-publish-now) plugin. Various bug fixes.
* 24th March 2020: [Datasette 0.39](http://datasette.readthedocs.io/en/latest/changelog.html#v0-39) - New `base_url` configuration option for running Datasette under a different URL prefix, `"sort"` and `"sort_desc"` metadata options for setting a default sort order for a table.
diff --git a/docs/changelog.rst b/docs/changelog.rst
index dc06e4ef..48d3128b 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,17 @@
Changelog
=========
+.. _v0_42:
+
+0.42 (2020-05-08)
+-----------------
+
+A small release which provides improved internal methods for use in plugins, along with documentation. See `#685 `__.
+
+* Added documentation for ``db.execute()``, see :ref:`database_execute`.
+* Renamed ``db.execute_against_connection_in_thread()`` to ``db.execute_fn()`` and made it a documented method, see :ref:`database_execute_fn`.
+* New ``results.first()`` and ``results.single_value()`` methods, plus documentation for the ``Results`` class - see :ref:`database_results`.
+
.. _v0_41:
0.41 (2020-05-06)
From fc24edc153d76bcec917bb23d532981d9862e696 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 May 2020 11:28:53 -0700
Subject: [PATCH 0230/2124] Added project_urls, closes #764
---
setup.py | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/setup.py b/setup.py
index c4886a11..d9c70de5 100644
--- a/setup.py
+++ b/setup.py
@@ -32,6 +32,14 @@ setup(
author="Simon Willison",
license="Apache License, Version 2.0",
url="https://github.com/simonw/datasette",
+ project_urls={
+ "Documentation": "https://datasette.readthedocs.io/en/stable/",
+ "Changelog": "https://datasette.readthedocs.io/en/stable/changelog.html",
+ "Live demo": "https://latest.datasette.io/",
+ "Source code": "https://github.com/simonw/datasette",
+ "Issues": "https://github.com/simonw/datasette/issues",
+ "CI": "https://travis-ci.org/simonw/datasette",
+ },
packages=find_packages(exclude="tests"),
package_data={"datasette": ["templates/*.html"]},
include_package_data=True,
From 504196341c49840270bd75ea1a1871ef386ba7ea Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 May 2020 22:51:39 -0700
Subject: [PATCH 0231/2124] Visually distinguish float/int columns, closes #729
---
datasette/static/app.css | 5 ++
datasette/templates/_table.html | 2 +-
datasette/views/table.py | 12 ++-
tests/test_html.py | 133 ++++++++++++++++----------------
4 files changed, 85 insertions(+), 67 deletions(-)
diff --git a/datasette/static/app.css b/datasette/static/app.css
index bae091b8..cc33277a 100644
--- a/datasette/static/app.css
+++ b/datasette/static/app.css
@@ -345,3 +345,8 @@ p.zero-results {
padding: 0.5em;
font-style: italic;
}
+
+/* Value types */
+.type-float, .type-int {
+ color: #666;
+}
\ No newline at end of file
diff --git a/datasette/templates/_table.html b/datasette/templates/_table.html
index 42c37c55..8fee77b2 100644
--- a/datasette/templates/_table.html
+++ b/datasette/templates/_table.html
@@ -21,7 +21,7 @@
{% for row in display_rows %}
{% endif %}
diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst
index da10191e..c3efd930 100644
--- a/docs/sql_queries.rst
+++ b/docs/sql_queries.rst
@@ -72,7 +72,9 @@ Canned queries
--------------
As an alternative to adding views to your database, you can define canned
-queries inside your ``metadata.json`` file. Here's an example::
+queries inside your ``metadata.json`` file. Here's an example:
+
+.. code-block:: json
{
"databases": {
@@ -86,7 +88,7 @@ queries inside your ``metadata.json`` file. Here's an example::
}
}
-Then run datasette like this::
+Then run Datasette like this::
datasette sf-trees.db -m metadata.json
@@ -104,6 +106,11 @@ title and description on the canned query page. As with regular table metadata
you can alternatively specify ``"description_html"`` to have your description
rendered as HTML (rather than having HTML special characters escaped).
+.. _canned_queries_named_parameters:
+
+Named parameters
+~~~~~~~~~~~~~~~~
+
Canned queries support named parameters, so if you include those in the SQL you
will then be able to enter them using the form fields on the canned query page
or by adding them to the URL. This means canned queries can be used to create
@@ -117,7 +124,9 @@ Here's an example of a canned query with a named parameter:
from facetable join facet_cities on facetable.city_id = facet_cities.id
where neighborhood like '%' || :text || '%' order by neighborhood;
-In the canned query JSON it looks like this::
+In the canned query JSON it looks like this:
+
+.. code-block:: json
{
"databases": {
@@ -139,6 +148,31 @@ https://latest.datasette.io/fixtures/neighborhood_search?text=town
Note that we are using SQLite string concatenation here - the ``||`` operator -
to add wildcard ``%`` characters to the string provided by the user.
+.. _canned_queries_default_fragment:
+
+Setting a default fragment
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Some plugins, such as `datasette-vega `__, can be configured by including additional data in the fragment hash of the URL - the bit that comes after a ``#`` symbol.
+
+You can set a default fragment hash that will be included in the link to the canned query from the database index page using the ``"fragment"`` key:
+
+.. code-block:: json
+
+ {
+ "databases": {
+ "fixtures": {
+ "queries": {
+ "neighborhood_search": {
+ "sql": "select neighborhood, facet_cities.name, state\nfrom facetable join facet_cities on facetable.city_id = facet_cities.id\nwhere neighborhood like '%' || :text || '%' order by neighborhood;",
+ "fragment": "fragment-goes-here"
+ }
+ }
+ }
+ }
+
+`See here `__ for a demo of this in action.
+
.. _pagination:
Pagination
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 0284ff9c..a3b75f9f 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -327,6 +327,7 @@ METADATA = {
),
"title": "Search neighborhoods",
"description_html": "Demonstrating simple like search",
+ "fragment": "fragment-goes-here",
},
},
}
diff --git a/tests/test_html.py b/tests/test_html.py
index 445f7b4c..5a07953e 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -85,6 +85,21 @@ def test_database_page_redirects_with_url_hash(app_client_with_hash):
assert "fixtures" in response.text
+def test_database_page(app_client):
+ response = app_client.get("/fixtures")
+ soup = Soup(response.body, "html.parser")
+ queries_ul = soup.find("h2", text="Queries").find_next_sibling("ul")
+ assert queries_ul is not None
+ assert [
+ (
+ "/fixtures/%F0%9D%90%9C%F0%9D%90%A2%F0%9D%90%AD%F0%9D%90%A2%F0%9D%90%9E%F0%9D%90%AC",
+ "𝐜𝐢𝐭𝐢𝐞𝐬",
+ ),
+ ("/fixtures/pragma_cache_size", "pragma_cache_size"),
+ ("/fixtures/neighborhood_search#fragment-goes-here", "Search neighborhoods"),
+ ] == [(a["href"], a.text) for a in queries_ul.find_all("a")]
+
+
def test_invalid_custom_sql(app_client):
response = app_client.get("/fixtures?sql=.schema")
assert response.status == 400
From 6d7cb02f00010d3cb4b4bac0460d41277652b80e Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 27 May 2020 15:17:53 -0700
Subject: [PATCH 0241/2124] Documentation for request object, refs #706
---
docs/internals.rst | 55 ++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 55 insertions(+)
diff --git a/docs/internals.rst b/docs/internals.rst
index 7b4c1755..5535ceb1 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -225,3 +225,58 @@ Here's an example of ``block=True`` in action:
num_rows_left = await database.execute_write_fn(my_action, block=True)
except Exception as e:
print("An error occurred:", e)
+
+.. _internals_request:
+
+Request object
+~~~~~~~~~~~~~~
+
+The request object is passed to various plugin hooks. It represents an incoming HTTP request. It has the following properties:
+
+``.scope`` - dictionary
+ The ASGI scope that was used to construct this request, described in the `ASGI HTTP connection scope `__ specification.
+
+``.method`` - string
+ The HTTP method for this request, usually ``GET`` or ``POST``.
+
+``.url`` - string
+ The full URL for this request, e.g. ``https://latest.datasette.io/fixtures``.
+
+``.scheme`` - string
+ The request scheme - usually ``https`` or ``http``.
+
+``.headers`` - dictionary (str -> str)
+ A dictionary of incoming HTTP request headers.
+
+``.host`` - string
+ The host header from the incoming request, e.g. ``latest.datasette.io`` or ``localhost``.
+
+``.path`` - string
+ The path of the request, e.g. ``/fixtures``.
+
+``.query_string`` - string
+ The querystring component of the request, without the ``?`` - e.g. ``name__contains=sam&age__gt=10``.
+
+``.args`` - RequestParameters
+ An object representing the parsed querystring parameters, see below.
+
+``.raw_args`` - dictionary
+ A dictionary mapping querystring keys to values. If multiple keys of the same kind are provided, e.g. ``?foo=1&foo=2``, only the first value will be present in this dictionary.
+
+The object also has one awaitable method:
+
+``await request.post_vars()`` - dictionary
+ Returns a dictionary of form variables that were submitted in the request body via ``POST``.
+
+The RequestParameters class
+---------------------------
+
+This class, returned by ``request.args``, is a subclass of a Python dictionary that provides methods for working with keys that map to lists of values.
+
+Conider the querystring ``?foo=1&foo=2``. This will produce a ``request.args`` that looks like this::
+
+ RequestParameters({"foo": ["1", "2"]})
+
+Calling ``request.args.get("foo")`` will return the first value, ``"1"``. If that key is not present it will return ``None`` - or the second argument if you passed one, which will be used as the default.
+
+Calling ``request.args.getlist("foo")`` will return the full list, ``["1", "2"]``.
\ No newline at end of file
From 50652f474b94e83d49ee15f219820cdbfc450f11 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 27 May 2020 15:29:42 -0700
Subject: [PATCH 0242/2124] Stop using .raw_args, deprecate and undocument it -
refs #706
---
datasette/utils/asgi.py | 1 +
datasette/views/database.py | 4 ++--
datasette/views/table.py | 8 ++++----
docs/internals.rst | 3 ---
4 files changed, 7 insertions(+), 9 deletions(-)
diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py
index 20047bb5..62a2a0c8 100644
--- a/datasette/utils/asgi.py
+++ b/datasette/utils/asgi.py
@@ -65,6 +65,7 @@ class Request:
@property
def raw_args(self):
+ # Deprecated, undocumented - may be removed in Datasette 1.0
return {key: value[0] for key, value in self.args.items()}
async def post_vars(self):
diff --git a/datasette/views/database.py b/datasette/views/database.py
index 92e24f84..cd27dd5f 100644
--- a/datasette/views/database.py
+++ b/datasette/views/database.py
@@ -24,7 +24,7 @@ class DatabaseView(DataView):
if request.args.get("sql"):
if not self.ds.config("allow_sql"):
raise DatasetteError("sql= is not allowed", status=400)
- sql = request.raw_args.pop("sql")
+ sql = request.args.get("sql")
validate_sql_select(sql)
return await QueryView(self.ds).data(
request, database, hash, sql, _size=_size, metadata=metadata
@@ -107,7 +107,7 @@ class QueryView(DataView):
metadata=None,
_size=None,
):
- params = request.raw_args
+ params = {key: request.args.get(key) for key in request.args}
if "sql" in params:
params.pop("sql")
if "_shape" in params:
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 3289e58b..aab4bbe3 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -527,7 +527,7 @@ class TableView(RowTableShared):
extra_args = {}
# Handle ?_size=500
- page_size = _size or request.raw_args.get("_size")
+ page_size = _size or request.args.get("_size")
if page_size:
if page_size == "max":
page_size = self.ds.max_returned_rows
@@ -558,8 +558,8 @@ class TableView(RowTableShared):
sql_no_limit=sql_no_limit.rstrip(), limit=page_size + 1, offset=offset
)
- if request.raw_args.get("_timelimit"):
- extra_args["custom_time_limit"] = int(request.raw_args["_timelimit"])
+ if request.args.get("_timelimit"):
+ extra_args["custom_time_limit"] = int(request.args["_timelimit"])
results = await db.execute(sql, params, truncate=True, **extra_args)
@@ -890,7 +890,7 @@ class RowView(RowTableShared):
"units": self.ds.table_metadata(database, table).get("units", {}),
}
- if "foreign_key_tables" in (request.raw_args.get("_extras") or "").split(","):
+ if "foreign_key_tables" in (request.args.get("_extras") or "").split(","):
data["foreign_key_tables"] = await self.foreign_key_tables(
database, table, pk_values
)
diff --git a/docs/internals.rst b/docs/internals.rst
index 5535ceb1..5bcb9da9 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -260,9 +260,6 @@ The request object is passed to various plugin hooks. It represents an incoming
``.args`` - RequestParameters
An object representing the parsed querystring parameters, see below.
-``.raw_args`` - dictionary
- A dictionary mapping querystring keys to values. If multiple keys of the same kind are provided, e.g. ``?foo=1&foo=2``, only the first value will be present in this dictionary.
-
The object also has one awaitable method:
``await request.post_vars()`` - dictionary
From 4b96857f170e329a73186e703cc0d9ca4e8719cc Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 27 May 2020 15:35:25 -0700
Subject: [PATCH 0243/2124] Link to request object documentation, refs #706
---
docs/plugins.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/plugins.rst b/docs/plugins.rst
index 78ad0309..feb14593 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -669,7 +669,7 @@ Extra template variables that should be made available in the rendered template
The name of the view being displayed. (`index`, `database`, `table`, and `row` are the most important ones.)
``request`` - object
- The current HTTP request object. ``request.scope`` provides access to the ASGI scope.
+ The current HTTP :ref:`internals_request`.
``datasette`` - :ref:`internals_datasette`
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``
From 446e5de65d1b9c6c877e38b0ef13bc9285c465a1 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 27 May 2020 17:57:25 -0700
Subject: [PATCH 0244/2124] Refactored test plugins into tests/plugins, closes
#775
---
tests/fixtures.py | 182 +----------------------------------
tests/plugins/my_plugin.py | 89 +++++++++++++++++
tests/plugins/my_plugin_2.py | 94 ++++++++++++++++++
tests/plugins/view_name.py | 9 ++
tests/test_api.py | 1 +
tests/test_custom_pages.py | 16 +--
6 files changed, 197 insertions(+), 194 deletions(-)
create mode 100644 tests/plugins/my_plugin.py
create mode 100644 tests/plugins/my_plugin_2.py
create mode 100644 tests/plugins/view_name.py
diff --git a/tests/fixtures.py b/tests/fixtures.py
index a3b75f9f..1eaa1dfe 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -19,6 +19,8 @@ from urllib.parse import unquote, quote
# This temp file is used by one of the plugin config tests
TEMP_PLUGIN_SECRET_FILE = os.path.join(tempfile.gettempdir(), "plugin-secret")
+PLUGINS_DIR = str(pathlib.Path(__file__).parent / "plugins")
+
class TestResponse:
def __init__(self, status, headers, body):
@@ -109,7 +111,6 @@ def make_app_client(
inspect_data=None,
static_mounts=None,
template_dir=None,
- extra_plugins=None,
):
with tempfile.TemporaryDirectory() as tmpdir:
filepath = os.path.join(tmpdir, filename)
@@ -130,12 +131,6 @@ def make_app_client(
sqlite3.connect(extra_filepath).executescript(extra_sql)
files.append(extra_filepath)
os.chdir(os.path.dirname(filepath))
- plugins_dir = os.path.join(tmpdir, "plugins")
- os.mkdir(plugins_dir)
- open(os.path.join(plugins_dir, "my_plugin.py"), "w").write(PLUGIN1)
- open(os.path.join(plugins_dir, "my_plugin_2.py"), "w").write(PLUGIN2)
- for filename, content in (extra_plugins or {}).items():
- open(os.path.join(plugins_dir, filename), "w").write(content)
config = config or {}
config.update(
{
@@ -150,7 +145,7 @@ def make_app_client(
memory=memory,
cors=cors,
metadata=METADATA,
- plugins_dir=plugins_dir,
+ plugins_dir=PLUGINS_DIR,
config=config,
inspect_data=inspect_data,
static_mounts=static_mounts,
@@ -334,177 +329,6 @@ METADATA = {
},
}
-PLUGIN1 = """
-from datasette import hookimpl
-import base64
-import pint
-import json
-
-ureg = pint.UnitRegistry()
-
-
-@hookimpl
-def prepare_connection(conn, database, datasette):
- def convert_units(amount, from_, to_):
- "select convert_units(100, 'm', 'ft');"
- return (amount * ureg(from_)).to(to_).to_tuple()[0]
- conn.create_function('convert_units', 3, convert_units)
- def prepare_connection_args():
- return 'database={}, datasette.plugin_config("name-of-plugin")={}'.format(
- database, datasette.plugin_config("name-of-plugin")
- )
- conn.create_function('prepare_connection_args', 0, prepare_connection_args)
-
-
-@hookimpl
-def extra_css_urls(template, database, table, datasette):
- return ['https://plugin-example.com/{}/extra-css-urls-demo.css'.format(
- base64.b64encode(json.dumps({
- "template": template,
- "database": database,
- "table": table,
- }).encode("utf8")).decode("utf8")
- )]
-
-
-@hookimpl
-def extra_js_urls():
- return [{
- 'url': 'https://plugin-example.com/jquery.js',
- 'sri': 'SRIHASH',
- }, 'https://plugin-example.com/plugin1.js']
-
-
-@hookimpl
-def extra_body_script(template, database, table, datasette):
- return 'var extra_body_script = {};'.format(
- json.dumps({
- "template": template,
- "database": database,
- "table": table,
- "config": datasette.plugin_config(
- "name-of-plugin",
- database=database,
- table=table,
- )
- })
- )
-
-
-@hookimpl
-def render_cell(value, column, table, database, datasette):
- # Render some debug output in cell with value RENDER_CELL_DEMO
- if value != "RENDER_CELL_DEMO":
- return None
- return json.dumps({
- "column": column,
- "table": table,
- "database": database,
- "config": datasette.plugin_config(
- "name-of-plugin",
- database=database,
- table=table,
- )
- })
-
-
-@hookimpl
-def extra_template_vars(template, database, table, view_name, request, datasette):
- return {
- "extra_template_vars": json.dumps({
- "template": template,
- "scope_path": request.scope["path"] if request else None
- }, default=lambda b: b.decode("utf8"))
- }
-"""
-
-PLUGIN2 = """
-from datasette import hookimpl
-from functools import wraps
-import jinja2
-import json
-
-
-@hookimpl
-def extra_js_urls():
- return [{
- 'url': 'https://plugin-example.com/jquery.js',
- 'sri': 'SRIHASH',
- }, 'https://plugin-example.com/plugin2.js']
-
-
-@hookimpl
-def render_cell(value, database):
- # Render {"href": "...", "label": "..."} as link
- if not isinstance(value, str):
- return None
- stripped = value.strip()
- if not stripped.startswith("{") and stripped.endswith("}"):
- return None
- try:
- data = json.loads(value)
- except ValueError:
- return None
- if not isinstance(data, dict):
- return None
- if set(data.keys()) != {"href", "label"}:
- return None
- href = data["href"]
- if not (
- href.startswith("/") or href.startswith("http://")
- or href.startswith("https://")
- ):
- return None
- return jinja2.Markup(
- '{label}'.format(
- database=database,
- href=jinja2.escape(data["href"]),
- label=jinja2.escape(data["label"] or "") or " "
- )
- )
-
-
-@hookimpl
-def extra_template_vars(template, database, table, view_name, request, datasette):
- async def query_database(sql):
- first_db = list(datasette.databases.keys())[0]
- return (
- await datasette.execute(first_db, sql)
- ).rows[0][0]
- async def inner():
- return {
- "extra_template_vars_from_awaitable": json.dumps({
- "template": template,
- "scope_path": request.scope["path"] if request else None,
- "awaitable": True,
- }, default=lambda b: b.decode("utf8")),
- "query_database": query_database,
- }
- return inner
-
-
-@hookimpl
-def asgi_wrapper(datasette):
- def wrap_with_databases_header(app):
- @wraps(app)
- async def add_x_databases_header(scope, recieve, send):
- async def wrapped_send(event):
- if event["type"] == "http.response.start":
- original_headers = event.get("headers") or []
- event = {
- "type": event["type"],
- "status": event["status"],
- "headers": original_headers + [
- [b"x-databases",
- ", ".join(datasette.databases.keys()).encode("utf-8")]
- ],
- }
- await send(event)
- await app(scope, recieve, wrapped_send)
- return add_x_databases_header
- return wrap_with_databases_header
-"""
-
TABLES = (
"""
CREATE TABLE simple_primary_key (
diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py
new file mode 100644
index 00000000..e55a0a32
--- /dev/null
+++ b/tests/plugins/my_plugin.py
@@ -0,0 +1,89 @@
+from datasette import hookimpl
+import base64
+import pint
+import json
+
+ureg = pint.UnitRegistry()
+
+
+@hookimpl
+def prepare_connection(conn, database, datasette):
+ def convert_units(amount, from_, to_):
+ "select convert_units(100, 'm', 'ft');"
+ return (amount * ureg(from_)).to(to_).to_tuple()[0]
+
+ conn.create_function("convert_units", 3, convert_units)
+
+ def prepare_connection_args():
+ return 'database={}, datasette.plugin_config("name-of-plugin")={}'.format(
+ database, datasette.plugin_config("name-of-plugin")
+ )
+
+ conn.create_function("prepare_connection_args", 0, prepare_connection_args)
+
+
+@hookimpl
+def extra_css_urls(template, database, table, datasette):
+ return [
+ "https://plugin-example.com/{}/extra-css-urls-demo.css".format(
+ base64.b64encode(
+ json.dumps(
+ {"template": template, "database": database, "table": table,}
+ ).encode("utf8")
+ ).decode("utf8")
+ )
+ ]
+
+
+@hookimpl
+def extra_js_urls():
+ return [
+ {"url": "https://plugin-example.com/jquery.js", "sri": "SRIHASH",},
+ "https://plugin-example.com/plugin1.js",
+ ]
+
+
+@hookimpl
+def extra_body_script(template, database, table, datasette):
+ return "var extra_body_script = {};".format(
+ json.dumps(
+ {
+ "template": template,
+ "database": database,
+ "table": table,
+ "config": datasette.plugin_config(
+ "name-of-plugin", database=database, table=table,
+ ),
+ }
+ )
+ )
+
+
+@hookimpl
+def render_cell(value, column, table, database, datasette):
+ # Render some debug output in cell with value RENDER_CELL_DEMO
+ if value != "RENDER_CELL_DEMO":
+ return None
+ return json.dumps(
+ {
+ "column": column,
+ "table": table,
+ "database": database,
+ "config": datasette.plugin_config(
+ "name-of-plugin", database=database, table=table,
+ ),
+ }
+ )
+
+
+@hookimpl
+def extra_template_vars(template, database, table, view_name, request, datasette):
+ return {
+ "extra_template_vars": json.dumps(
+ {
+ "template": template,
+ "scope_path": request.scope["path"] if request else None,
+ },
+ default=lambda b: b.decode("utf8"),
+ )
+ }
diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py
new file mode 100644
index 00000000..fdc6956d
--- /dev/null
+++ b/tests/plugins/my_plugin_2.py
@@ -0,0 +1,94 @@
+from datasette import hookimpl
+from functools import wraps
+import jinja2
+import json
+
+
+@hookimpl
+def extra_js_urls():
+ return [
+ {"url": "https://plugin-example.com/jquery.js", "sri": "SRIHASH",},
+ "https://plugin-example.com/plugin2.js",
+ ]
+
+
+@hookimpl
+def render_cell(value, database):
+ # Render {"href": "...", "label": "..."} as link
+ if not isinstance(value, str):
+ return None
+ stripped = value.strip()
+ if not stripped.startswith("{") and stripped.endswith("}"):
+ return None
+ try:
+ data = json.loads(value)
+ except ValueError:
+ return None
+ if not isinstance(data, dict):
+ return None
+ if set(data.keys()) != {"href", "label"}:
+ return None
+ href = data["href"]
+ if not (
+ href.startswith("/")
+ or href.startswith("http://")
+ or href.startswith("https://")
+ ):
+ return None
+ return jinja2.Markup(
+ '{label}'.format(
+ database=database,
+ href=jinja2.escape(data["href"]),
+ label=jinja2.escape(data["label"] or "") or " ",
+ )
+ )
+
+
+@hookimpl
+def extra_template_vars(template, database, table, view_name, request, datasette):
+ async def query_database(sql):
+ first_db = list(datasette.databases.keys())[0]
+ return (await datasette.execute(first_db, sql)).rows[0][0]
+
+ async def inner():
+ return {
+ "extra_template_vars_from_awaitable": json.dumps(
+ {
+ "template": template,
+ "scope_path": request.scope["path"] if request else None,
+ "awaitable": True,
+ },
+ default=lambda b: b.decode("utf8"),
+ ),
+ "query_database": query_database,
+ }
+
+ return inner
+
+
+@hookimpl
+def asgi_wrapper(datasette):
+ def wrap_with_databases_header(app):
+ @wraps(app)
+ async def add_x_databases_header(scope, recieve, send):
+ async def wrapped_send(event):
+ if event["type"] == "http.response.start":
+ original_headers = event.get("headers") or []
+ event = {
+ "type": event["type"],
+ "status": event["status"],
+ "headers": original_headers
+ + [
+ [
+ b"x-databases",
+ ", ".join(datasette.databases.keys()).encode("utf-8"),
+ ]
+ ],
+ }
+ await send(event)
+
+ await app(scope, recieve, wrapped_send)
+
+ return add_x_databases_header
+
+ return wrap_with_databases_header
diff --git a/tests/plugins/view_name.py b/tests/plugins/view_name.py
new file mode 100644
index 00000000..4d29ab67
--- /dev/null
+++ b/tests/plugins/view_name.py
@@ -0,0 +1,9 @@
+from datasette import hookimpl
+
+
+@hookimpl
+def extra_template_vars(view_name, request):
+ return {
+ "view_name": view_name,
+ "request": request,
+ }
diff --git a/tests/test_api.py b/tests/test_api.py
index 7edd7ee6..260d399b 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -1267,6 +1267,7 @@ def test_plugins_json(app_client):
"templates": False,
"version": None,
},
+ {"name": "view_name.py", "static": False, "templates": False, "version": None},
] == sorted(response.json, key=lambda p: p["name"])
diff --git a/tests/test_custom_pages.py b/tests/test_custom_pages.py
index 8ac75ec8..c69facb5 100644
--- a/tests/test_custom_pages.py
+++ b/tests/test_custom_pages.py
@@ -1,22 +1,10 @@
import pytest
from .fixtures import make_app_client
-VIEW_NAME_PLUGIN = """
-from datasette import hookimpl
-
-@hookimpl
-def extra_template_vars(view_name, request):
- return {
- "view_name": view_name,
- "request": request,
- }
-"""
-
@pytest.fixture(scope="session")
def custom_pages_client(tmp_path_factory):
template_dir = tmp_path_factory.mktemp("page-templates")
- extra_plugins = {"view_name.py": VIEW_NAME_PLUGIN}
pages_dir = template_dir / "pages"
pages_dir.mkdir()
(pages_dir / "about.html").write_text("ABOUT! view_name:{{ view_name }}", "utf-8")
@@ -39,9 +27,7 @@ def custom_pages_client(tmp_path_factory):
nested_dir = pages_dir / "nested"
nested_dir.mkdir()
(nested_dir / "nest.html").write_text("Nest!", "utf-8")
- for client in make_app_client(
- template_dir=str(template_dir), extra_plugins=extra_plugins
- ):
+ for client in make_app_client(template_dir=str(template_dir)):
yield client
From 52c4387c7d37c867104e3728cc1f4c4d1e100642 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 27 May 2020 19:21:41 -0700
Subject: [PATCH 0245/2124] Redesigned register_output_renderer plugin hook,
closes #581
---
datasette/app.py | 6 +-
datasette/views/base.py | 18 +++++-
datasette/views/database.py | 1 +
docs/plugins.rst | 53 ++++++++++++---
tests/plugins/register_output_renderer.py | 42 ++++++++++++
tests/test_api.py | 20 +++---
tests/test_html.py | 4 ++
tests/test_plugins.py | 78 +++++++++++++++++++++++
8 files changed, 202 insertions(+), 20 deletions(-)
create mode 100644 tests/plugins/register_output_renderer.py
diff --git a/datasette/app.py b/datasette/app.py
index f1fcc5eb..941b2895 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -586,7 +586,11 @@ class Datasette:
hook_renderers.append(hook)
for renderer in hook_renderers:
- self.renderers[renderer["extension"]] = renderer["callback"]
+ self.renderers[renderer["extension"]] = (
+ # It used to be called "callback" - remove this in Datasette 1.0
+ renderer.get("render")
+ or renderer["callback"]
+ )
async def render_template(
self, templates, context=None, request=None, view_name=None
diff --git a/datasette/views/base.py b/datasette/views/base.py
index f5eafe63..5a5fe056 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -14,6 +14,7 @@ from datasette.database import QueryInterrupted
from datasette.utils import (
InvalidSql,
LimitedWriter,
+ call_with_supported_arguments,
is_url,
path_with_added_args,
path_with_removed_args,
@@ -387,7 +388,21 @@ class DataView(BaseView):
if _format in self.ds.renderers.keys():
# Dispatch request to the correct output format renderer
# (CSV is not handled here due to streaming)
- result = self.ds.renderers[_format](request.args, data, self.name)
+ result = call_with_supported_arguments(
+ self.ds.renderers[_format],
+ datasette=self.ds,
+ columns=data.get("columns") or [],
+ rows=data.get("rows") or [],
+ sql=data.get("query", {}).get("sql", None),
+ query_name=data.get("query_name"),
+ database=database,
+ table=data.get("table"),
+ request=request,
+ view_name=self.name,
+ # These will be deprecated in Datasette 1.0:
+ args=request.args,
+ data=data,
+ )
if result is None:
raise NotFound("No data")
@@ -395,6 +410,7 @@ class DataView(BaseView):
body=result.get("body"),
status=result.get("status_code", 200),
content_type=result.get("content_type", "text/plain"),
+ headers=result.get("headers"),
)
else:
extras = {}
diff --git a/datasette/views/database.py b/datasette/views/database.py
index cd27dd5f..15545fb8 100644
--- a/datasette/views/database.py
+++ b/datasette/views/database.py
@@ -188,6 +188,7 @@ class QueryView(DataView):
return (
{
"database": database,
+ "query_name": canned_query,
"rows": results.rows,
"truncated": results.truncated,
"columns": columns,
diff --git a/docs/plugins.rst b/docs/plugins.rst
index feb14593..27f00476 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -744,19 +744,37 @@ Allows the plugin to register a new output renderer, to output data in a custom
def register_output_renderer(datasette):
return {
"extension": "test",
- "callback": render_test
+ "render": render_test
}
-This will register ``render_test`` to be called when paths with the extension ``.test`` (for example ``/database.test``, ``/database/table.test``, or ``/database/table/row.test``) are requested. When a request is received, the callback function is called with three positional arguments:
+This will register ``render_test`` to be called when paths with the extension ``.test`` (for example ``/database.test``, ``/database/table.test``, or ``/database/table/row.test``) are requested. When a request is received, the callback function is called with zero or more of the following arguments. Datasette will inspect your callback function and pass arguments that match its function signature.
-``args`` - dictionary
- The GET parameters of the request
+``datasette`` - :ref:`internals_datasette`
+ For accessing plugin configuration and executing queries.
-``data`` - dictionary
- The data to be rendered
+``columns`` - list of strings
+ The names of the columns returned by this query.
+
+``rows`` - list of ``sqlite3.Row`` objects
+ The rows returned by the query.
+
+``sql`` - string
+ The SQL query that was executed.
+
+``query_name`` - string or None
+ If this was the execution of a :ref:`canned query `, the name of that query.
+
+``database`` - string
+ The name of the database.
+
+``table`` - string or None
+ The table or view, if one is being rendered.
+
+``request`` - :ref:`internals_request`
+ The incoming HTTP request.
``view_name`` - string
- The name of the view where the renderer is being called. (``index``, ``database``, ``table``, and ``row`` are the most important ones.)
+ The name of the current view being called. ``index``, ``database``, ``table``, and ``row`` are the most important ones.
The callback function can return ``None``, if it is unable to render the data, or a dictionary with the following keys:
@@ -769,15 +787,34 @@ The callback function can return ``None``, if it is unable to render the data, o
``status_code`` - integer, optional
The HTTP status code, default 200
+``headers`` - dictionary, optional
+ Extra HTTP headers to be returned in the response.
+
A simple example of an output renderer callback function:
.. code-block:: python
- def render_test(args, data, view_name):
+ def render_test():
return {
"body": "Hello World"
}
+Here is a more complex example:
+
+.. code-block:: python
+
+ def render_test(columns, rows):
+ first_row = " | ".join(columns)
+ lines = [first_row]
+ lines.append("=" * len(first_row))
+ for row in rows:
+ lines.append(" | ".join(row))
+ return {
+ "body": "Hello World",
+ "content_type": "text/plain; charset=utf-8",
+ "headers": {"x-pipes": "yay-pipes"}
+ }
+
Examples: `datasette-atom `_, `datasette-ics `_
.. _plugin_register_facet_classes:
diff --git a/tests/plugins/register_output_renderer.py b/tests/plugins/register_output_renderer.py
new file mode 100644
index 00000000..2ea5660e
--- /dev/null
+++ b/tests/plugins/register_output_renderer.py
@@ -0,0 +1,42 @@
+from datasette import hookimpl
+import json
+
+
+def render_test_all_parameters(
+ datasette, columns, rows, sql, query_name, database, table, request, view_name, data
+):
+ headers = {}
+ for custom_header in request.args.getlist("header") or []:
+ key, value = custom_header.split(":")
+ headers[key] = value
+ return {
+ "body": json.dumps(
+ {
+ "datasette": datasette,
+ "columns": columns,
+ "rows": rows,
+ "sql": sql,
+ "query_name": query_name,
+ "database": database,
+ "table": table,
+ "request": request,
+ "view_name": view_name,
+ },
+ default=repr,
+ ),
+ "content_type": request.args.get("content_type", "text/plain"),
+ "status_code": int(request.args.get("status_code", 200)),
+ "headers": headers,
+ }
+
+
+def render_test_no_parameters():
+ return {"body": "Hello"}
+
+
+@hookimpl
+def register_output_renderer(datasette):
+ return [
+ {"extension": "testall", "render": render_test_all_parameters},
+ {"extension": "testnone", "callback": render_test_no_parameters},
+ ]
diff --git a/tests/test_api.py b/tests/test_api.py
index 260d399b..f92da45e 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -1259,16 +1259,16 @@ def test_threads_json(app_client):
def test_plugins_json(app_client):
response = app_client.get("/-/plugins.json")
- assert [
- {"name": "my_plugin.py", "static": False, "templates": False, "version": None},
- {
- "name": "my_plugin_2.py",
- "static": False,
- "templates": False,
- "version": None,
- },
- {"name": "view_name.py", "static": False, "templates": False, "version": None},
- ] == sorted(response.json, key=lambda p: p["name"])
+ expected = [
+ {"name": name, "static": False, "templates": False, "version": None}
+ for name in (
+ "my_plugin.py",
+ "my_plugin_2.py",
+ "register_output_renderer.py",
+ "view_name.py",
+ )
+ ]
+ assert expected == sorted(response.json, key=lambda p: p["name"])
def test_versions_json(app_client):
diff --git a/tests/test_html.py b/tests/test_html.py
index 5a07953e..e602bf0e 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -546,6 +546,8 @@ def test_table_csv_json_export_interface(app_client):
actual = [l["href"].split("/")[-1] for l in links]
expected = [
"simple_primary_key.json?id__gt=2",
+ "simple_primary_key.testall?id__gt=2",
+ "simple_primary_key.testnone?id__gt=2",
"simple_primary_key.csv?id__gt=2&_size=max",
"#export",
]
@@ -582,6 +584,8 @@ def test_csv_json_export_links_include_labels_if_foreign_keys(app_client):
actual = [l["href"].split("/")[-1] for l in links]
expected = [
"facetable.json?_labels=on",
+ "facetable.testall?_labels=on",
+ "facetable.testnone?_labels=on",
"facetable.csv?_labels=on&_size=max",
"#export",
]
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index 1546de92..0e4186d5 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -19,6 +19,8 @@ import textwrap
import pytest
import urllib
+at_memory_re = re.compile(r" at 0x\w+")
+
@pytest.mark.xfail
@pytest.mark.parametrize(
@@ -329,3 +331,79 @@ def test_view_names(view_names_client, path, view_name):
response = view_names_client.get(path)
assert response.status == 200
assert "view_name:{}".format(view_name) == response.body.decode("utf8")
+
+
+def test_register_output_renderer_no_parameters(app_client):
+ response = app_client.get("/fixtures/facetable.testnone")
+ assert 200 == response.status
+ assert b"Hello" == response.body
+
+
+def test_register_output_renderer_all_parameters(app_client):
+ response = app_client.get("/fixtures/facetable.testall")
+ assert 200 == response.status
+ # Lots of 'at 0x103a4a690' in here - replace those so we can do
+ # an easy comparison
+ body = response.body.decode("utf-8")
+ body = at_memory_re.sub(" at 0xXXX", body)
+ assert {
+ "datasette": "",
+ "columns": [
+ "pk",
+ "created",
+ "planet_int",
+ "on_earth",
+ "state",
+ "city_id",
+ "neighborhood",
+ "tags",
+ "complex_array",
+ "distinct_some_null",
+ ],
+ "rows": [
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ ],
+ "sql": "select pk, created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array, distinct_some_null from facetable order by pk limit 51",
+ "query_name": None,
+ "database": "fixtures",
+ "table": "facetable",
+ "request": "",
+ "view_name": "table",
+ } == json.loads(body)
+ # Test that query_name is set correctly
+ query_response = app_client.get("/fixtures/pragma_cache_size.testall")
+ assert "pragma_cache_size" == json.loads(query_response.body)["query_name"]
+
+
+def test_register_output_renderer_custom_status_code(app_client):
+ response = app_client.get("/fixtures/pragma_cache_size.testall?status_code=202")
+ assert 202 == response.status
+
+
+def test_register_output_renderer_custom_content_type(app_client):
+ response = app_client.get(
+ "/fixtures/pragma_cache_size.testall?content_type=text/blah"
+ )
+ assert "text/blah" == response.headers["content-type"]
+
+
+def test_register_output_renderer_custom_headers(app_client):
+ response = app_client.get(
+ "/fixtures/pragma_cache_size.testall?header=x-wow:1&header=x-gosh:2"
+ )
+ assert "1" == response.headers["x-wow"]
+ assert "2" == response.headers["x-gosh"]
From 57f48b8416f5e13df138d63db5bfffd0bb99a9b4 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 27 May 2020 19:43:30 -0700
Subject: [PATCH 0246/2124] Made register_output_renderer callback optionally
awaitable, closes #776
---
datasette/views/base.py | 2 ++
docs/plugins.rst | 16 +++++++++++-----
tests/plugins/register_output_renderer.py | 4 +++-
tests/test_plugins.py | 1 +
4 files changed, 17 insertions(+), 6 deletions(-)
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 5a5fe056..d56fd2f6 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -403,6 +403,8 @@ class DataView(BaseView):
args=request.args,
data=data,
)
+ if asyncio.iscoroutine(result):
+ result = await result
if result is None:
raise NotFound("No data")
diff --git a/docs/plugins.rst b/docs/plugins.rst
index 27f00476..ebf6adf6 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -736,7 +736,7 @@ register_output_renderer(datasette)
``datasette`` - :ref:`internals_datasette`
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``
-Allows the plugin to register a new output renderer, to output data in a custom format. The hook function should return a dictionary, or a list of dictionaries, which contain the file extension you want to handle and a callback function:
+Registers a new output renderer, to output data in a custom format. The hook function should return a dictionary, or a list of dictionaries, of the following shape:
.. code-block:: python
@@ -747,7 +747,11 @@ Allows the plugin to register a new output renderer, to output data in a custom
"render": render_test
}
-This will register ``render_test`` to be called when paths with the extension ``.test`` (for example ``/database.test``, ``/database/table.test``, or ``/database/table/row.test``) are requested. When a request is received, the callback function is called with zero or more of the following arguments. Datasette will inspect your callback function and pass arguments that match its function signature.
+This will register ``render_test`` to be called when paths with the extension ``.test`` (for example ``/database.test``, ``/database/table.test``, or ``/database/table/row.test``) are requested.
+
+``render_test`` is a Python function. It can be a regular function or an ``async def render_test()`` awaitable function, depending on if it needs to make any asynchronous calls.
+
+When a request is received, the callback function is called with zero or more of the following arguments. Datasette will inspect your callback function and pass arguments that match its function signature.
``datasette`` - :ref:`internals_datasette`
For accessing plugin configuration and executing queries.
@@ -803,16 +807,18 @@ Here is a more complex example:
.. code-block:: python
- def render_test(columns, rows):
+ async def render_test(datasette, columns, rows):
+ db = next(iter(datasette.databases.values()))
+ result = await db.execute("select sqlite_version()")
first_row = " | ".join(columns)
lines = [first_row]
lines.append("=" * len(first_row))
for row in rows:
lines.append(" | ".join(row))
return {
- "body": "Hello World",
+ "body": "\n".join(lines),
"content_type": "text/plain; charset=utf-8",
- "headers": {"x-pipes": "yay-pipes"}
+ "headers": {"x-sqlite-version": result.first()[0]},
}
Examples: `datasette-atom `_, `datasette-ics `_
diff --git a/tests/plugins/register_output_renderer.py b/tests/plugins/register_output_renderer.py
index 2ea5660e..d4c1228d 100644
--- a/tests/plugins/register_output_renderer.py
+++ b/tests/plugins/register_output_renderer.py
@@ -2,13 +2,14 @@ from datasette import hookimpl
import json
-def render_test_all_parameters(
+async def render_test_all_parameters(
datasette, columns, rows, sql, query_name, database, table, request, view_name, data
):
headers = {}
for custom_header in request.args.getlist("header") or []:
key, value = custom_header.split(":")
headers[key] = value
+ result = await datasette.databases["fixtures"].execute("select 1 + 1")
return {
"body": json.dumps(
{
@@ -21,6 +22,7 @@ def render_test_all_parameters(
"table": table,
"request": request,
"view_name": view_name,
+ "1+1": result.first()[0],
},
default=repr,
),
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index 0e4186d5..94b69c1f 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -347,6 +347,7 @@ def test_register_output_renderer_all_parameters(app_client):
body = response.body.decode("utf-8")
body = at_memory_re.sub(" at 0xXXX", body)
assert {
+ "1+1": 2,
"datasette": "",
"columns": [
"pk",
From cbeea23d00b36f72386e68b67d76fdb8a151a486 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 27 May 2020 20:13:32 -0700
Subject: [PATCH 0247/2124] Test for prepare_jinja2_environment, refs #773
---
tests/plugins/my_plugin.py | 5 +++++
tests/test_plugins.py | 10 ++++++++++
2 files changed, 15 insertions(+)
diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py
index e55a0a32..434a1977 100644
--- a/tests/plugins/my_plugin.py
+++ b/tests/plugins/my_plugin.py
@@ -87,3 +87,8 @@ def extra_template_vars(template, database, table, view_name, request, datasette
default=lambda b: b.decode("utf8"),
)
}
+
+
+@hookimpl
+def prepare_jinja2_environment(env):
+ env.filters["format_numeric"] = lambda s: "{:,.0f}".format(float(s))
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index 94b69c1f..1bfd9d3f 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -9,6 +9,7 @@ from .fixtures import (
from datasette.app import Datasette
from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm
from datasette.utils import sqlite3
+from jinja2.environment import Template
import base64
import json
import os
@@ -408,3 +409,12 @@ def test_register_output_renderer_custom_headers(app_client):
)
assert "1" == response.headers["x-wow"]
assert "2" == response.headers["x-gosh"]
+
+
+@pytest.mark.asyncio
+async def test_prepare_jinja2_environment(app_client):
+ template = app_client.ds.jinja_env.from_string(
+ "Hello there, {{ a|format_numeric }}", {"a": 3412341}
+ )
+ rendered = await app_client.ds.render_template(template)
+ assert "Hello there, 3,412,341" == rendered
From defead17a4c9d68670ba2d9aeec9c2a70b5b059e Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 27 May 2020 20:30:32 -0700
Subject: [PATCH 0248/2124] Test for publish_subcommand hook, refs #773
---
tests/test_plugins.py | 10 ++++++++++
1 file changed, 10 insertions(+)
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index 1bfd9d3f..9ebf455a 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -7,6 +7,7 @@ from .fixtures import (
TestClient as _TestClient,
) # noqa
from datasette.app import Datasette
+from datasette import cli
from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm
from datasette.utils import sqlite3
from jinja2.environment import Template
@@ -418,3 +419,12 @@ async def test_prepare_jinja2_environment(app_client):
)
rendered = await app_client.ds.render_template(template)
assert "Hello there, 3,412,341" == rendered
+
+
+def test_publish_subcommand():
+ # This is hard to test properly, because publish subcommand plugins
+ # cannot be loaded using the --plugins-dir mechanism - they need
+ # to be installed using "pip install". So I'm cheating and taking
+ # advantage of the fact that cloudrun/heroku use the plugin hook
+ # to register themselves as default plugins.
+ assert ["cloudrun", "heroku"] == cli.publish.list_commands({})
From 6d95cb4f9146a5c4584a147bdf243c778a0f23f5 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 27 May 2020 21:09:16 -0700
Subject: [PATCH 0249/2124] Unit test for register_facet_classes plugin, closes
#773
I was a bit lazy with this one. I didn't hook up a test for the facet_results mechanism.
The custom facet hook isn't a great design so I will probably rethink it at some point
in the future anyway.
---
tests/plugins/my_plugin.py | 34 +++++++++++++++++++++++++++++++
tests/test_plugins.py | 41 ++++++++++++++++++++++++++++++++++++++
2 files changed, 75 insertions(+)
diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py
index 434a1977..10d7e7e6 100644
--- a/tests/plugins/my_plugin.py
+++ b/tests/plugins/my_plugin.py
@@ -1,4 +1,6 @@
from datasette import hookimpl
+from datasette.facets import Facet
+from datasette.utils import path_with_added_args
import base64
import pint
import json
@@ -92,3 +94,35 @@ def extra_template_vars(template, database, table, view_name, request, datasette
@hookimpl
def prepare_jinja2_environment(env):
env.filters["format_numeric"] = lambda s: "{:,.0f}".format(float(s))
+
+
+@hookimpl
+def register_facet_classes():
+ return [DummyFacet]
+
+
+class DummyFacet(Facet):
+ type = "dummy"
+
+ async def suggest(self):
+ columns = await self.get_columns(self.sql, self.params)
+ return (
+ [
+ {
+ "name": column,
+ "toggle_url": self.ds.absolute_url(
+ self.request,
+ path_with_added_args(self.request, {"_facet_dummy": column}),
+ ),
+ "type": "dummy",
+ }
+ for column in columns
+ ]
+ if self.request.args.get("_dummy_facet")
+ else []
+ )
+
+ async def facet_results(self):
+ facet_results = {}
+ facets_timed_out = []
+ return facet_results, facets_timed_out
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index 9ebf455a..2aadb252 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -428,3 +428,44 @@ def test_publish_subcommand():
# advantage of the fact that cloudrun/heroku use the plugin hook
# to register themselves as default plugins.
assert ["cloudrun", "heroku"] == cli.publish.list_commands({})
+
+
+def test_register_facet_classes(app_client):
+ response = app_client.get(
+ "/fixtures/compound_three_primary_keys.json?_dummy_facet=1"
+ )
+ data = json.loads(response.body)
+ assert [
+ {
+ "name": "pk1",
+ "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet_dummy=pk1",
+ "type": "dummy",
+ },
+ {
+ "name": "pk2",
+ "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet_dummy=pk2",
+ "type": "dummy",
+ },
+ {
+ "name": "pk3",
+ "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet_dummy=pk3",
+ "type": "dummy",
+ },
+ {
+ "name": "content",
+ "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet_dummy=content",
+ "type": "dummy",
+ },
+ {
+ "name": "pk1",
+ "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet=pk1",
+ },
+ {
+ "name": "pk2",
+ "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet=pk2",
+ },
+ {
+ "name": "pk3",
+ "toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet=pk3",
+ },
+ ] == data["suggested_facets"]
From 510c1989d43cd9b7c9f116ad161b7380220ac5d5 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 27 May 2020 21:11:53 -0700
Subject: [PATCH 0250/2124] Removed xfail, refs #773
---
tests/test_plugins.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index 2aadb252..e9556b31 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -24,7 +24,6 @@ import urllib
at_memory_re = re.compile(r" at 0x\w+")
-@pytest.mark.xfail
@pytest.mark.parametrize(
"plugin_hook", [name for name in dir(pm.hook) if not name.startswith("_")]
)
From 75cd432e5a96c5fe2577f839c3a059fd6bf41124 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 27 May 2020 22:00:04 -0700
Subject: [PATCH 0251/2124] Ability to set custom table/view page size in
metadata, closes #751
---
datasette/views/table.py | 2 +-
docs/metadata.rst | 23 +++++++++++++++++++++++
tests/fixtures.py | 1 +
tests/test_api.py | 4 ++--
4 files changed, 27 insertions(+), 3 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index aab4bbe3..d014db71 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -527,7 +527,7 @@ class TableView(RowTableShared):
extra_args = {}
# Handle ?_size=500
- page_size = _size or request.args.get("_size")
+ page_size = _size or request.args.get("_size") or table_metadata.get("size")
if page_size:
if page_size == "max":
page_size = self.ds.max_returned_rows
diff --git a/docs/metadata.rst b/docs/metadata.rst
index 3cb1f739..88ad5854 100644
--- a/docs/metadata.rst
+++ b/docs/metadata.rst
@@ -156,6 +156,29 @@ Or use ``"sort_desc"`` to sort in descending order:
}
}
+.. _metadata_page_size:
+
+Setting a custom page size
+--------------------------
+
+Datasette defaults to displaing 100 rows per page, for both tables and views. You can change this default page size on a per-table or per-view basis using the ``"size"`` key in ``metadata.json``:
+
+.. code-block:: json
+
+ {
+ "databases": {
+ "mydatabase": {
+ "tables": {
+ "example_table": {
+ "size": 10
+ }
+ }
+ }
+ }
+ }
+
+This size can still be over-ridden by passing e.g. ``?_size=50`` in the querystring.
+
.. _metadata_sortable_columns:
Setting which columns can be used for sorting
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 1eaa1dfe..9479abf6 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -305,6 +305,7 @@ METADATA = {
},
"attraction_characteristic": {"sort_desc": "pk"},
"facet_cities": {"sort": "name"},
+ "paginated_view": {"size": 25},
},
"queries": {
"𝐜𝐢𝐭𝐢𝐞𝐬": "select id, name from facet_cities order by id limit 1;",
diff --git a/tests/test_api.py b/tests/test_api.py
index f92da45e..eb80f8e7 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -754,9 +754,9 @@ def test_table_with_reserved_word_name(app_client):
"path,expected_rows,expected_pages",
[
("/fixtures/no_primary_key.json", 201, 5),
- ("/fixtures/paginated_view.json", 201, 5),
+ ("/fixtures/paginated_view.json", 201, 9),
("/fixtures/no_primary_key.json?_size=25", 201, 9),
- ("/fixtures/paginated_view.json?_size=25", 201, 9),
+ ("/fixtures/paginated_view.json?_size=50", 201, 5),
("/fixtures/paginated_view.json?_size=max", 201, 3),
("/fixtures/123_starts_with_digits.json", 0, 1),
# Ensure faceting doesn't break pagination:
From 5ab411c733233435d613d04c610a5a41fd0b7735 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 27 May 2020 22:57:05 -0700
Subject: [PATCH 0252/2124] can_render mechanism for register_output_renderer,
closes #770
---
datasette/app.py | 8 ++---
datasette/utils/__init__.py | 6 +++-
datasette/views/base.py | 27 ++++++++++++++---
docs/plugins.rst | 22 ++++++++++----
tests/plugins/register_output_renderer.py | 26 +++++++++++++++-
tests/test_plugins.py | 37 ++++++++++++++++++++++-
6 files changed, 108 insertions(+), 18 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 941b2895..40d39ac9 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -228,7 +228,7 @@ class Datasette:
if config_dir and (config_dir / "config.json").exists() and not config:
config = json.load((config_dir / "config.json").open())
self._config = dict(DEFAULT_CONFIG, **(config or {}))
- self.renderers = {} # File extension -> renderer function
+ self.renderers = {} # File extension -> (renderer, can_render) functions
self.version_note = version_note
self.executor = futures.ThreadPoolExecutor(
max_workers=self.config("num_sql_threads")
@@ -574,7 +574,7 @@ class Datasette:
def register_renderers(self):
""" Register output renderers which output data in custom formats. """
# Built-in renderers
- self.renderers["json"] = json_renderer
+ self.renderers["json"] = (json_renderer, lambda: True)
# Hooks
hook_renderers = []
@@ -588,8 +588,8 @@ class Datasette:
for renderer in hook_renderers:
self.renderers[renderer["extension"]] = (
# It used to be called "callback" - remove this in Datasette 1.0
- renderer.get("render")
- or renderer["callback"]
+ renderer.get("render") or renderer["callback"],
+ renderer.get("can_render") or (lambda: True),
)
async def render_template(
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index 03157072..2dab8e14 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -811,6 +811,10 @@ def call_with_supported_arguments(fn, **kwargs):
call_with = []
for parameter in parameters:
if parameter not in kwargs:
- raise TypeError("{} requires parameters {}".format(fn, tuple(parameters)))
+ raise TypeError(
+ "{} requires parameters {}, missing: {}".format(
+ fn, tuple(parameters), set(parameters) - set(kwargs.keys())
+ )
+ )
call_with.append(kwargs[parameter])
return fn(*call_with)
diff --git a/datasette/views/base.py b/datasette/views/base.py
index d56fd2f6..06b78d5f 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -389,7 +389,7 @@ class DataView(BaseView):
# Dispatch request to the correct output format renderer
# (CSV is not handled here due to streaming)
result = call_with_supported_arguments(
- self.ds.renderers[_format],
+ self.ds.renderers[_format][0],
datasette=self.ds,
columns=data.get("columns") or [],
rows=data.get("rows") or [],
@@ -426,10 +426,27 @@ class DataView(BaseView):
if data.get("expandable_columns"):
url_labels_extra = {"_labels": "on"}
- renderers = {
- key: path_with_format(request, key, {**url_labels_extra})
- for key in self.ds.renderers.keys()
- }
+ renderers = {}
+ for key, (_, can_render) in self.ds.renderers.items():
+ it_can_render = call_with_supported_arguments(
+ can_render,
+ datasette=self.ds,
+ columns=data.get("columns") or [],
+ rows=data.get("rows") or [],
+ sql=data.get("query", {}).get("sql", None),
+ query_name=data.get("query_name"),
+ database=database,
+ table=data.get("table"),
+ request=request,
+ view_name=self.name,
+ )
+ if asyncio.iscoroutine(it_can_render):
+ it_can_render = await it_can_render
+ if it_can_render:
+ renderers[key] = path_with_format(
+ request, key, {**url_labels_extra}
+ )
+
url_csv_args = {"_size": "max", **url_labels_extra}
url_csv = path_with_format(request, "csv", url_csv_args)
url_csv_path = url_csv.split("?")[0]
diff --git a/docs/plugins.rst b/docs/plugins.rst
index ebf6adf6..b27daf3f 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -744,14 +744,17 @@ Registers a new output renderer, to output data in a custom format. The hook fun
def register_output_renderer(datasette):
return {
"extension": "test",
- "render": render_test
+ "render": render_demo,
+ "can_render": can_render_demo, # Optional
}
-This will register ``render_test`` to be called when paths with the extension ``.test`` (for example ``/database.test``, ``/database/table.test``, or ``/database/table/row.test``) are requested.
+This will register ``render_demo`` to be called when paths with the extension ``.test`` (for example ``/database.test``, ``/database/table.test``, or ``/database/table/row.test``) are requested.
-``render_test`` is a Python function. It can be a regular function or an ``async def render_test()`` awaitable function, depending on if it needs to make any asynchronous calls.
+``render_demo`` is a Python function. It can be a regular function or an ``async def render_demo()`` awaitable function, depending on if it needs to make any asynchronous calls.
-When a request is received, the callback function is called with zero or more of the following arguments. Datasette will inspect your callback function and pass arguments that match its function signature.
+``can_render_demo`` is a Python function (or ``async def`` function) which acepts the same arguments as ``render_demo`` but just returns ``True`` or ``False``. It lets Datasette know if the current SQL query can be represented by the plugin - and hence influnce if a link to this output format is displayed in the user interface. If you omit the ``"can_render"`` key from the dictionary every query will be treated as being supported by the plugin.
+
+When a request is received, the ``"render"`` callback function is called with zero or more of the following arguments. Datasette will inspect your callback function and pass arguments that match its function signature.
``datasette`` - :ref:`internals_datasette`
For accessing plugin configuration and executing queries.
@@ -798,7 +801,7 @@ A simple example of an output renderer callback function:
.. code-block:: python
- def render_test():
+ def render_demo():
return {
"body": "Hello World"
}
@@ -807,7 +810,7 @@ Here is a more complex example:
.. code-block:: python
- async def render_test(datasette, columns, rows):
+ async def render_demo(datasette, columns, rows):
db = next(iter(datasette.databases.values()))
result = await db.execute("select sqlite_version()")
first_row = " | ".join(columns)
@@ -821,6 +824,13 @@ Here is a more complex example:
"headers": {"x-sqlite-version": result.first()[0]},
}
+And here is an example ``can_render`` function which returns ``True`` only if the query results contain the columns ``atom_id``, ``atom_title`` and ``atom_updated``:
+
+.. code-block:: python
+
+ def can_render_demo(columns):
+ return {"atom_id", "atom_title", "atom_updated"}.issubset(columns)
+
Examples: `datasette-atom `_, `datasette-ics `_
.. _plugin_register_facet_classes:
diff --git a/tests/plugins/register_output_renderer.py b/tests/plugins/register_output_renderer.py
index d4c1228d..a9f0f157 100644
--- a/tests/plugins/register_output_renderer.py
+++ b/tests/plugins/register_output_renderer.py
@@ -2,6 +2,26 @@ from datasette import hookimpl
import json
+async def can_render(
+ datasette, columns, rows, sql, query_name, database, table, request, view_name
+):
+ # We stash this on datasette so the calling unit test can see it
+ datasette._can_render_saw = {
+ "datasette": datasette,
+ "columns": columns,
+ "rows": rows,
+ "sql": sql,
+ "query_name": query_name,
+ "database": database,
+ "table": table,
+ "request": request,
+ "view_name": view_name,
+ }
+ if request.args.get("_no_can_render"):
+ return False
+ return True
+
+
async def render_test_all_parameters(
datasette, columns, rows, sql, query_name, database, table, request, view_name, data
):
@@ -39,6 +59,10 @@ def render_test_no_parameters():
@hookimpl
def register_output_renderer(datasette):
return [
- {"extension": "testall", "render": render_test_all_parameters},
+ {
+ "extension": "testall",
+ "render": render_test_all_parameters,
+ "can_render": can_render,
+ },
{"extension": "testnone", "callback": render_test_no_parameters},
]
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index e9556b31..a34328a9 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -9,7 +9,7 @@ from .fixtures import (
from datasette.app import Datasette
from datasette import cli
from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm
-from datasette.utils import sqlite3
+from datasette.utils import sqlite3, CustomRow
from jinja2.environment import Template
import base64
import json
@@ -411,6 +411,41 @@ def test_register_output_renderer_custom_headers(app_client):
assert "2" == response.headers["x-gosh"]
+def test_register_output_renderer_can_render(app_client):
+ response = app_client.get("/fixtures/facetable?_no_can_render=1")
+ assert response.status == 200
+ links = (
+ Soup(response.body, "html.parser")
+ .find("p", {"class": "export-links"})
+ .findAll("a")
+ )
+ actual = [l["href"].split("/")[-1] for l in links]
+ # Should not be present because we sent ?_no_can_render=1
+ assert "facetable.testall?_labels=on" not in actual
+ # Check that it was passed the values we expected
+ assert hasattr(app_client.ds, "_can_render_saw")
+ assert {
+ "datasette": app_client.ds,
+ "columns": [
+ "pk",
+ "created",
+ "planet_int",
+ "on_earth",
+ "state",
+ "city_id",
+ "neighborhood",
+ "tags",
+ "complex_array",
+ "distinct_some_null",
+ ],
+ "sql": "select pk, created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array, distinct_some_null from facetable order by pk limit 51",
+ "query_name": None,
+ "database": "fixtures",
+ "table": "facetable",
+ "view_name": "table",
+ }.items() <= app_client.ds._can_render_saw.items()
+
+
@pytest.mark.asyncio
async def test_prepare_jinja2_environment(app_client):
template = app_client.ds.jinja_env.from_string(
From d56f402822df102f9cf1a9a056449d01a15e3aae Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 28 May 2020 07:10:21 -0700
Subject: [PATCH 0253/2124] Release notes for 0.43
Refs #581, #770, #729, #706, #751, #706, #744, #771, #773
---
README.md | 1 +
docs/changelog.rst | 15 +++++++++++++++
2 files changed, 16 insertions(+)
diff --git a/README.md b/README.md
index 7351c5c0..90df75de 100644
--- a/README.md
+++ b/README.md
@@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
## News
+ * 28th May 2020: [Datasette 0.43](http://datasette.readthedocs.io/en/latest/changelog.html#v0-43) - Redesigned [register_output_renderer](https://datasette.readthedocs.io/en/latest/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes.
* 8th May 2020: [Datasette 0.42](http://datasette.readthedocs.io/en/latest/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database.
* 6th May 2020: [Datasette 0.41](http://datasette.readthedocs.io/en/latest/changelog.html#v0-41) - New mechanism for [creating custom pages](https://datasette.readthedocs.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://datasette.readthedocs.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements.
* 21st April 2020: [Datasette 0.40](http://datasette.readthedocs.io/en/latest/changelog.html#v0-40) - Metadata can now be provided as YAML instead of JSON. Publishing to Zeit Now v1 is no longer supported, but Now v2 support is provided by the new [datasette-publish-now](https://github.com/simonw/datasette-publish-now) plugin. Various bug fixes.
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 48d3128b..8f375dd1 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,21 @@
Changelog
=========
+.. _v0_43:
+
+0.43 (2020-05-28)
+-----------------
+
+The main focus of this release is a major upgrade to the :ref:`plugin_register_output_renderer` plugin hook, which allows plugins to provide new output formats for Datasette such as `datasette-atom `__ and `datasette-ics `__.
+
+* Redesign of :ref:`plugin_register_output_renderer` to provide more context to the render callback and support an optional ``"can_render"`` callback that controls if a suggested link to the output format is provided. (`#581 `__, `#770 `__)
+* Visually distinguish float and integer columns - useful for figuring out why order-by-column might be returning unexpected results. (`#729 `__)
+* The :ref:`internals_request`, which is passed to several plugin hooks, is now documented. (`#706 `__)
+* New ``metadata.json`` option for setting a custom default page size for specific tables and views, see :ref:`metadata_page_size`. (`#751 `__)
+* Canned queries can now be configured with a default URL fragment hash, useful when working with plugins such as `datasette-vega `__, see :ref:`canned_queries_default_fragment`. (`#706 `__)
+* Fixed a bug in ``datasette publish`` when running on operating systems where the ``/tmp`` directory lives in a different volume, using a backport of the Python 3.8 ``shutil.copytree()`` function. (`#744 `__)
+* Every plugin hook is now covered by the unit tests, and a new unit test checks that each plugin hook has at least one corresponding test. (`#771 `__, `#773 `__)
+
.. _v0_42:
0.42 (2020-05-08)
From 40885ef24e32d91502b6b8bbad1c7376f50f2830 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 28 May 2020 07:41:22 -0700
Subject: [PATCH 0254/2124] Noted tool for converting release notes to Markdown
---
docs/contributing.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 48930332..567c4f47 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -145,12 +145,12 @@ To release a new version, first create a commit that updates :ref:`the changelog
For non-bugfix releases you may want to update the news section of ``README.md`` as part of the same commit.
-Wait long enough for Travis to build and deploy the demo version of that commit (otherwise the tag deployment may fail to alias to it properly). Then run the following::
+To tag and push the releaes, run the following::
git tag 0.25.2
git push --tags
Final steps once the release has deployed to https://pypi.org/project/datasette/
-* Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases
+* Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases - you can convert the release notes to Markdown by copying and pasting the rendered HTML into this tool: https://euangoddard.github.io/clipboard2markdown/
* Manually kick off a build of the `stable` branch on Read The Docs: https://readthedocs.org/projects/datasette/builds/
From 7bb30c1f11f7246baf7bb6a229f6b93572c4cbe3 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 28 May 2020 10:09:32 -0700
Subject: [PATCH 0255/2124] request.url now respects force_https_urls, closes
#781
---
datasette/app.py | 7 +++++++
tests/plugins/my_plugin_2.py | 3 +++
tests/test_api.py | 4 ++++
3 files changed, 14 insertions(+)
diff --git a/datasette/app.py b/datasette/app.py
index 40d39ac9..07190c16 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -786,6 +786,13 @@ class DatasetteRouter(AsgiRouter):
base_url = self.ds.config("base_url")
if base_url != "/" and path.startswith(base_url):
path = "/" + path[len(base_url) :]
+ # Apply force_https_urls, if set
+ if (
+ self.ds.config("force_https_urls")
+ and scope["type"] == "http"
+ and scope.get("scheme") != "https"
+ ):
+ scope = dict(scope, scheme="https")
return await super().route_path(scope, receive, send, path)
async def handle_404(self, scope, receive, send, exception=None):
diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py
index fdc6956d..c9e7c78f 100644
--- a/tests/plugins/my_plugin_2.py
+++ b/tests/plugins/my_plugin_2.py
@@ -46,6 +46,9 @@ def render_cell(value, database):
@hookimpl
def extra_template_vars(template, database, table, view_name, request, datasette):
+ # This helps unit tests that want to run assertions against the request object:
+ datasette._last_request = request
+
async def query_database(sql):
first_db = list(datasette.databases.keys())[0]
return (await datasette.execute(first_db, sql)).rows[0][0]
diff --git a/tests/test_api.py b/tests/test_api.py
index eb80f8e7..d7e7c03f 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -1676,6 +1676,10 @@ def test_config_force_https_urls():
"toggle_url"
].startswith("https://")
assert response.json["suggested_facets"][0]["toggle_url"].startswith("https://")
+ # Also confirm that request.url and request.scheme are set correctly
+ response = client.get("/")
+ assert client.ds._last_request.url.startswith("https://")
+ assert client.ds._last_request.scheme == "https"
def test_infinity_returned_as_null(app_client):
From 21a8ffc82dcf5e8e5f484ce39ee9713f959e0ad5 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 28 May 2020 10:49:58 -0700
Subject: [PATCH 0256/2124] Tip about referencing issues in release notes
commit
---
docs/contributing.rst | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 567c4f47..da4dc35a 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -137,12 +137,16 @@ We increment ``minor`` for new features.
We increment ``patch`` for bugfix releass.
-To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__::
+To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__::
# Update changelog
- git commit -m "Release 0.25.2" -a
+ git commit -m "Release notes for 0.43
+
+ Refs #581, #770, #729, #706, #751, #706, #744, #771, #773" -a
git push
+Referencing the issues that are part of the release in the commit message ensures the name of the release shows up on those issue pages, e.g. `here `__.
+
For non-bugfix releases you may want to update the news section of ``README.md`` as part of the same commit.
To tag and push the releaes, run the following::
From 3c1a60589e14849344acd8aa6da0a60b40fbfc60 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 28 May 2020 11:27:24 -0700
Subject: [PATCH 0257/2124] Consistent capitalization of SpatiaLite in the docs
---
docs/changelog.rst | 2 +-
docs/installation.rst | 2 +-
docs/metadata.rst | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 8f375dd1..8b6272cb 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -947,7 +947,7 @@ request all rows where that column is less than 50 meters or more than 20 feet f
404s for missing tables/databases closes `#184 `_
- long_description in markdown for the new PyPI
-- Hide Spatialite system tables. [Russ Garrett]
+- Hide SpatiaLite system tables. [Russ Garrett]
- Allow ``explain select`` / ``explain query plan select`` `#201 `_
- Datasette inspect now finds primary_keys `#195 `_
- Ability to sort using form fields (for mobile portrait mode) `#199 `_
diff --git a/docs/installation.rst b/docs/installation.rst
index cdf1467a..aacfed1d 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -149,7 +149,7 @@ To upgrade to the most recent release of Datasette, run the following::
docker pull datasetteproject/datasette
-Loading Spatialite
+Loading SpatiaLite
~~~~~~~~~~~~~~~~~~
The ``datasetteproject/datasette`` image includes a recent version of the
diff --git a/docs/metadata.rst b/docs/metadata.rst
index 88ad5854..18766bac 100644
--- a/docs/metadata.rst
+++ b/docs/metadata.rst
@@ -260,7 +260,7 @@ Hiding tables
-------------
You can hide tables from the database listing view (in the same way that FTS and
-Spatialite tables are automatically hidden) using ``"hidden": true``:
+SpatiaLite tables are automatically hidden) using ``"hidden": true``:
.. code-block:: json
From 3e8932bf6443bd5168f22d559597aed619205995 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 29 May 2020 15:12:10 -0700
Subject: [PATCH 0258/2124] Upgrade to actions/cache@v2
---
.github/workflows/deploy-latest.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml
index 33490972..fd53f754 100644
--- a/.github/workflows/deploy-latest.yml
+++ b/.github/workflows/deploy-latest.yml
@@ -15,7 +15,7 @@ jobs:
uses: actions/setup-python@v1
with:
python-version: 3.8
- - uses: actions/cache@v1
+ - uses: actions/cache@v2
name: Configure pip caching
with:
path: ~/.cache/pip
From 7ccd55a1638d7d2762f2789f192e5bb81fb0d0c7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 28 May 2020 11:54:57 -0700
Subject: [PATCH 0259/2124] Views do support sorting now, refs #508
---
docs/metadata.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/metadata.rst b/docs/metadata.rst
index 18766bac..024af01e 100644
--- a/docs/metadata.rst
+++ b/docs/metadata.rst
@@ -210,7 +210,7 @@ This will restrict sorting of ``example_table`` to just the ``height`` and
You can also disable sorting entirely by setting ``"sortable_columns": []``
-By default, database views in Datasette do not support sorting. You can use ``sortable_columns`` to enable specific sort orders for a view called ``name_of_view`` in the database ``my_database`` like so:
+You can use ``sortable_columns`` to enable specific sort orders for a view called ``name_of_view`` in the database ``my_database`` like so:
.. code-block:: json
From 84616a2364df56f966f579eecc0716b9877f0d70 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 29 May 2020 15:51:30 -0700
Subject: [PATCH 0260/2124] request.args.getlist() returns [] if missing, refs
#774
Also added some unit tests for request.args
---
datasette/utils/__init__.py | 4 ++--
docs/internals.rst | 2 +-
tests/plugins/register_output_renderer.py | 2 +-
tests/test_utils.py | 10 ++++++++++
4 files changed, 14 insertions(+), 4 deletions(-)
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index 2dab8e14..9b4f21ba 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -761,9 +761,9 @@ class RequestParameters(dict):
except (KeyError, TypeError):
return default
- def getlist(self, name, default=None):
+ def getlist(self, name):
"Return full list"
- return super().get(name, default)
+ return super().get(name) or []
class ConnectionProblem(Exception):
diff --git a/docs/internals.rst b/docs/internals.rst
index 5bcb9da9..bbf10cae 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -276,4 +276,4 @@ Conider the querystring ``?foo=1&foo=2``. This will produce a ``request.args`` t
Calling ``request.args.get("foo")`` will return the first value, ``"1"``. If that key is not present it will return ``None`` - or the second argument if you passed one, which will be used as the default.
-Calling ``request.args.getlist("foo")`` will return the full list, ``["1", "2"]``.
\ No newline at end of file
+Calling ``request.args.getlist("foo")`` will return the full list, ``["1", "2"]``. If you call it on a missing key it will return ``[]``.
diff --git a/tests/plugins/register_output_renderer.py b/tests/plugins/register_output_renderer.py
index a9f0f157..82b60d01 100644
--- a/tests/plugins/register_output_renderer.py
+++ b/tests/plugins/register_output_renderer.py
@@ -26,7 +26,7 @@ async def render_test_all_parameters(
datasette, columns, rows, sql, query_name, database, table, request, view_name, data
):
headers = {}
- for custom_header in request.args.getlist("header") or []:
+ for custom_header in request.args.getlist("header"):
key, value = custom_header.split(":")
headers[key] = value
result = await datasette.databases["fixtures"].execute("select 1 + 1")
diff --git a/tests/test_utils.py b/tests/test_utils.py
index 59b80a67..ffb66ca5 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -448,6 +448,16 @@ async def test_request_post_vars():
assert {"foo": "bar", "baz": "1"} == await request.post_vars()
+def test_request_args():
+ request = Request.fake("/foo?multi=1&multi=2&single=3")
+ assert "1" == request.args.get("multi")
+ assert "3" == request.args.get("single")
+ assert ["1", "2"] == request.args.getlist("multi")
+ assert [] == request.args.getlist("missing")
+ with pytest.raises(KeyError):
+ request.args["missing"]
+
+
def test_call_with_supported_arguments():
def foo(a, b):
return "{}+{}".format(a, b)
From f272cbc65fbf56368413320e21c87dc842e0a083 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 29 May 2020 15:57:46 -0700
Subject: [PATCH 0261/2124] Use request.args.getlist instead of
request.args[...], refs #774
---
datasette/views/table.py | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index d014db71..d1d92bb1 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -319,19 +319,19 @@ class TableView(RowTableShared):
if not self.ds.config("allow_sql"):
raise DatasetteError("_where= is not allowed", status=400)
else:
- where_clauses.extend(request.args["_where"])
+ where_clauses.extend(request.args.getlist("_where"))
extra_wheres_for_ui = [
{
"text": text,
"remove_url": path_with_removed_args(request, {"_where": text}),
}
- for text in request.args["_where"]
+ for text in request.args.getlist("_where")
]
# Support for ?_through={table, column, value}
extra_human_descriptions = []
if "_through" in request.args:
- for through in request.args["_through"]:
+ for through in request.args.getlist("_through"):
through_data = json.loads(through)
through_table = through_data["table"]
other_column = through_data["column"]
@@ -559,7 +559,7 @@ class TableView(RowTableShared):
)
if request.args.get("_timelimit"):
- extra_args["custom_time_limit"] = int(request.args["_timelimit"])
+ extra_args["custom_time_limit"] = int(request.args.get("_timelimit"))
results = await db.execute(sql, params, truncate=True, **extra_args)
@@ -633,7 +633,7 @@ class TableView(RowTableShared):
all_labels = default_labels
# Check for explicit _label=
if "_label" in request.args:
- columns_to_expand = request.args["_label"]
+ columns_to_expand = request.args.getlist("_label")
if columns_to_expand is None and all_labels:
# expand all columns with foreign keys
columns_to_expand = [fk["column"] for fk, _ in expandable_columns]
@@ -746,7 +746,7 @@ class TableView(RowTableShared):
if arg in special_args:
form_hidden_args.append((arg, special_args[arg]))
if request.args.get("_where"):
- for where_text in request.args["_where"]:
+ for where_text in request.args.getlist("_where"):
form_hidden_args.append(("_where", where_text))
# if no sort specified AND table has a single primary key,
From 81be31322a968d23cf57cee62b58df55433385e3 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 29 May 2020 16:18:01 -0700
Subject: [PATCH 0262/2124] New implementation for RequestParams
- no longer subclasses dict
- request.args[key] now returns first item, not all items
- removed request.raw_args entirely
Closes #774
---
datasette/renderer.py | 2 +-
datasette/utils/__init__.py | 30 +++++++++++++++++++++++++++---
datasette/utils/asgi.py | 5 -----
datasette/views/table.py | 6 +++---
docs/internals.rst | 12 ++++++++----
tests/test_utils.py | 10 ++++++++++
6 files changed, 49 insertions(+), 16 deletions(-)
diff --git a/datasette/renderer.py b/datasette/renderer.py
index 349c2922..3f921fe7 100644
--- a/datasette/renderer.py
+++ b/datasette/renderer.py
@@ -32,7 +32,7 @@ def json_renderer(args, data, view_name):
# Handle the _json= parameter which may modify data["rows"]
json_cols = []
if "_json" in args:
- json_cols = args["_json"]
+ json_cols = args.getlist("_json")
if json_cols and "rows" in data and "columns" in data:
data["rows"] = convert_specific_columns_to_json(
data["rows"], data["columns"], json_cols
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index 9b4f21ba..bf965413 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -753,17 +753,41 @@ def escape_fts(query):
)
-class RequestParameters(dict):
+class RequestParameters:
+ def __init__(self, data):
+ # data is a dictionary of key => [list, of, values]
+ assert isinstance(data, dict), "data should be a dictionary of key => [list]"
+ for key in data:
+ assert isinstance(
+ data[key], list
+ ), "data should be a dictionary of key => [list]"
+ self._data = data
+
+ def __contains__(self, key):
+ return key in self._data
+
+ def __getitem__(self, key):
+ return self._data[key][0]
+
+ def keys(self):
+ return self._data.keys()
+
+ def __iter__(self):
+ yield from self._data.keys()
+
+ def __len__(self):
+ return len(self._data)
+
def get(self, name, default=None):
"Return first value in the list, if available"
try:
- return super().get(name)[0]
+ return self._data.get(name)[0]
except (KeyError, TypeError):
return default
def getlist(self, name):
"Return full list"
- return super().get(name) or []
+ return self._data.get(name) or []
class ConnectionProblem(Exception):
diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py
index 62a2a0c8..24398b77 100644
--- a/datasette/utils/asgi.py
+++ b/datasette/utils/asgi.py
@@ -63,11 +63,6 @@ class Request:
def args(self):
return RequestParameters(parse_qs(qs=self.query_string))
- @property
- def raw_args(self):
- # Deprecated, undocumented - may be removed in Datasette 1.0
- return {key: value[0] for key, value in self.args.items()}
-
async def post_vars(self):
body = []
body = b""
diff --git a/datasette/views/table.py b/datasette/views/table.py
index d1d92bb1..a629346f 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -277,11 +277,11 @@ class TableView(RowTableShared):
# it can still be queried using ?_col__exact=blah
special_args = {}
other_args = []
- for key, value in args.items():
+ for key in args:
if key.startswith("_") and "__" not in key:
- special_args[key] = value[0]
+ special_args[key] = args[key]
else:
- for v in value:
+ for v in args.getlist(key):
other_args.append((key, v))
# Handle ?_filter_column and redirect, if present
diff --git a/docs/internals.rst b/docs/internals.rst
index bbf10cae..ea015dbc 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -268,12 +268,16 @@ The object also has one awaitable method:
The RequestParameters class
---------------------------
-This class, returned by ``request.args``, is a subclass of a Python dictionary that provides methods for working with keys that map to lists of values.
+This class, returned by ``request.args``, is a dictionary-like object.
-Conider the querystring ``?foo=1&foo=2``. This will produce a ``request.args`` that looks like this::
+Consider the querystring ``?foo=1&foo=2``. This will produce a ``request.args`` that looks like this::
RequestParameters({"foo": ["1", "2"]})
-Calling ``request.args.get("foo")`` will return the first value, ``"1"``. If that key is not present it will return ``None`` - or the second argument if you passed one, which will be used as the default.
+``request.args["foo"]`` returns the first value, ``"1"`` - or raises ``KeyError`` if that key is missing.
-Calling ``request.args.getlist("foo")`` will return the full list, ``["1", "2"]``. If you call it on a missing key it will return ``[]``.
+``request.args.get("foo")`` returns ``"1"`` - or ``None`` if the key is missing. A second argument can be used to specify a different default value.
+
+``request.args.getlist("foo")`` returns the full list, ``["1", "2"]``. If you call it on a missing key it will return ``[]``.
+
+You can use ``if key in request.args`` to check if a key is present. ``for key in request.args`` will iterate through the keys, or you can use ``request.args.keys()`` to get all of the keys.
diff --git a/tests/test_utils.py b/tests/test_utils.py
index ffb66ca5..9d6f45b0 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -452,8 +452,18 @@ def test_request_args():
request = Request.fake("/foo?multi=1&multi=2&single=3")
assert "1" == request.args.get("multi")
assert "3" == request.args.get("single")
+ assert "1" == request.args["multi"]
+ assert "3" == request.args["single"]
assert ["1", "2"] == request.args.getlist("multi")
assert [] == request.args.getlist("missing")
+ assert "multi" in request.args
+ assert "single" in request.args
+ assert "missing" not in request.args
+ expected = ["multi", "single"]
+ assert expected == list(request.args.keys())
+ for i, key in enumerate(request.args):
+ assert expected[i] == key
+ assert 2 == len(request.args)
with pytest.raises(KeyError):
request.args["missing"]
From 31fb006a9b05067a8eb2f774ad3a3b15b4565924 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 30 May 2020 07:28:29 -0700
Subject: [PATCH 0263/2124] Added datasette.get_database() method
Refs #576
---
datasette/app.py | 5 +++++
docs/internals.rst | 10 ++++++++++
docs/plugins.rst | 2 +-
tests/test_database.py | 3 +++
tests/test_internals_datasette.py | 23 +++++++++++++++++++++++
5 files changed, 42 insertions(+), 1 deletion(-)
create mode 100644 tests/test_internals_datasette.py
diff --git a/datasette/app.py b/datasette/app.py
index 07190c16..30eb3dba 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -281,6 +281,11 @@ class Datasette:
self.register_renderers()
+ def get_database(self, name=None):
+ if name is None:
+ return next(iter(self.databases.values()))
+ return self.databases[name]
+
def add_database(self, name, db):
self.databases[name] = db
diff --git a/docs/internals.rst b/docs/internals.rst
index ea015dbc..886cb7e7 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -44,6 +44,16 @@ This method lets you read plugin configuration values that were set in ``metadat
Renders a `Jinja template `__ using Datasette's preconfigured instance of Jinja and returns the resulting string. The template will have access to Datasette's default template functions and any functions that have been made available by other plugins.
+.. _datasette_get_database:
+
+.get_database(name)
+-------------------
+
+``name`` - string, optional
+ The name of the database - optional.
+
+Returns the specified database object. Raises a ``KeyError`` if the database does not exist. Call this method without an argument to return the first connected database.
+
.. _datasette_add_database:
.add_database(name, db)
diff --git a/docs/plugins.rst b/docs/plugins.rst
index b27daf3f..f08f1217 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -811,7 +811,7 @@ Here is a more complex example:
.. code-block:: python
async def render_demo(datasette, columns, rows):
- db = next(iter(datasette.databases.values()))
+ db = datasette.get_database()
result = await db.execute("select sqlite_version()")
first_row = " | ".join(columns)
lines = [first_row]
diff --git a/tests/test_database.py b/tests/test_database.py
index 1f1a3a7e..bd7e7666 100644
--- a/tests/test_database.py
+++ b/tests/test_database.py
@@ -1,3 +1,6 @@
+"""
+Tests for the datasette.database.Database class
+"""
from datasette.database import Results, MultipleValues
from datasette.utils import sqlite3
from .fixtures import app_client
diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py
new file mode 100644
index 00000000..4993250d
--- /dev/null
+++ b/tests/test_internals_datasette.py
@@ -0,0 +1,23 @@
+"""
+Tests for the datasette.app.Datasette class
+"""
+from .fixtures import app_client
+import pytest
+
+
+@pytest.fixture
+def datasette(app_client):
+ return app_client.ds
+
+
+def test_get_database(datasette):
+ db = datasette.get_database("fixtures")
+ assert "fixtures" == db.name
+ with pytest.raises(KeyError):
+ datasette.get_database("missing")
+
+
+def test_get_database_no_argument(datasette):
+ # Returns the first available database:
+ db = datasette.get_database()
+ assert "fixtures" == db.name
From ca56c226a9f1b02e871d7d7b392619a805b7f1ed Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 30 May 2020 07:33:02 -0700
Subject: [PATCH 0264/2124] Renamed test_database.py to
test_internals_database.py
Also added a db fixture to remove some boilerplate.
---
...database.py => test_internals_database.py} | 45 +++++++------------
1 file changed, 17 insertions(+), 28 deletions(-)
rename tests/{test_database.py => test_internals_database.py} (80%)
diff --git a/tests/test_database.py b/tests/test_internals_database.py
similarity index 80%
rename from tests/test_database.py
rename to tests/test_internals_database.py
index bd7e7666..fde7ad2c 100644
--- a/tests/test_database.py
+++ b/tests/test_internals_database.py
@@ -9,17 +9,20 @@ import time
import uuid
+@pytest.fixture
+def db(app_client):
+ return app_client.ds.get_database("fixtures")
+
+
@pytest.mark.asyncio
-async def test_execute(app_client):
- db = app_client.ds.databases["fixtures"]
+async def test_execute(db):
results = await db.execute("select * from facetable")
assert isinstance(results, Results)
assert 15 == len(results)
@pytest.mark.asyncio
-async def test_results_first(app_client):
- db = app_client.ds.databases["fixtures"]
+async def test_results_first(db):
assert None is (await db.execute("select * from facetable where pk > 100")).first()
results = await db.execute("select * from facetable")
row = results.first()
@@ -35,8 +38,7 @@ async def test_results_first(app_client):
],
)
@pytest.mark.asyncio
-async def test_results_single_value(app_client, query, expected):
- db = app_client.ds.databases["fixtures"]
+async def test_results_single_value(db, query, expected):
results = await db.execute(query)
if expected:
assert expected == results.single_value()
@@ -46,9 +48,7 @@ async def test_results_single_value(app_client, query, expected):
@pytest.mark.asyncio
-async def test_execute_fn(app_client):
- db = app_client.ds.databases["fixtures"]
-
+async def test_execute_fn(db):
def get_1_plus_1(conn):
return conn.execute("select 1 + 1").fetchall()[0][0]
@@ -63,16 +63,14 @@ async def test_execute_fn(app_client):
),
)
@pytest.mark.asyncio
-async def test_table_exists(app_client, tables, exists):
- db = app_client.ds.databases["fixtures"]
+async def test_table_exists(db, tables, exists):
for table in tables:
actual = await db.table_exists(table)
assert exists == actual
@pytest.mark.asyncio
-async def test_get_all_foreign_keys(app_client):
- db = app_client.ds.databases["fixtures"]
+async def test_get_all_foreign_keys(db):
all_foreign_keys = await db.get_all_foreign_keys()
assert {
"incoming": [],
@@ -102,8 +100,7 @@ async def test_get_all_foreign_keys(app_client):
@pytest.mark.asyncio
-async def test_table_names(app_client):
- db = app_client.ds.databases["fixtures"]
+async def test_table_names(db):
table_names = await db.table_names()
assert [
"simple_primary_key",
@@ -139,8 +136,7 @@ async def test_table_names(app_client):
@pytest.mark.asyncio
-async def test_execute_write_block_true(app_client):
- db = app_client.ds.databases["fixtures"]
+async def test_execute_write_block_true(db):
await db.execute_write(
"update roadside_attractions set name = ? where pk = ?",
["Mystery!", 1],
@@ -151,8 +147,7 @@ async def test_execute_write_block_true(app_client):
@pytest.mark.asyncio
-async def test_execute_write_block_false(app_client):
- db = app_client.ds.databases["fixtures"]
+async def test_execute_write_block_false(db):
await db.execute_write(
"update roadside_attractions set name = ? where pk = ?", ["Mystery!", 1],
)
@@ -162,9 +157,7 @@ async def test_execute_write_block_false(app_client):
@pytest.mark.asyncio
-async def test_execute_write_fn_block_false(app_client):
- db = app_client.ds.databases["fixtures"]
-
+async def test_execute_write_fn_block_false(db):
def write_fn(conn):
with conn:
conn.execute("delete from roadside_attractions where pk = 1;")
@@ -177,9 +170,7 @@ async def test_execute_write_fn_block_false(app_client):
@pytest.mark.asyncio
-async def test_execute_write_fn_block_true(app_client):
- db = app_client.ds.databases["fixtures"]
-
+async def test_execute_write_fn_block_true(db):
def write_fn(conn):
with conn:
conn.execute("delete from roadside_attractions where pk = 1;")
@@ -191,9 +182,7 @@ async def test_execute_write_fn_block_true(app_client):
@pytest.mark.asyncio
-async def test_execute_write_fn_exception(app_client):
- db = app_client.ds.databases["fixtures"]
-
+async def test_execute_write_fn_exception(db):
def write_fn(conn):
assert False
From 012c76901af65442e90eac4b36db43455e3c922f Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 30 May 2020 07:38:46 -0700
Subject: [PATCH 0265/2124] _ prefix for many private methods of Datasette,
refs #576
---
datasette/app.py | 28 ++++++++++++++--------------
datasette/database.py | 2 +-
2 files changed, 15 insertions(+), 15 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 30eb3dba..4b9807b0 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -279,7 +279,7 @@ class Datasette:
# pylint: disable=no-member
pm.hook.prepare_jinja2_environment(env=self.jinja_env)
- self.register_renderers()
+ self._register_renderers()
def get_database(self, name=None):
if name is None:
@@ -392,7 +392,7 @@ class Datasette:
}
)
- def prepare_connection(self, conn, database):
+ def _prepare_connection(self, conn, database):
conn.row_factory = sqlite3.Row
conn.text_factory = lambda x: str(x, "utf-8", "replace")
for name, num_args, func in self.sqlite_functions:
@@ -468,12 +468,12 @@ class Datasette:
url = "https://" + url[len("http://") :]
return url
- def register_custom_units(self):
+ def _register_custom_units(self):
"Register any custom units defined in the metadata.json with Pint"
for unit in self.metadata("custom_units") or []:
ureg.define(unit)
- def connected_databases(self):
+ def _connected_databases(self):
return [
{
"name": d.name,
@@ -486,9 +486,9 @@ class Datasette:
for d in sorted(self.databases.values(), key=lambda d: d.name)
]
- def versions(self):
+ def _versions(self):
conn = sqlite3.connect(":memory:")
- self.prepare_connection(conn, ":memory:")
+ self._prepare_connection(conn, ":memory:")
sqlite_version = conn.execute("select sqlite_version()").fetchone()[0]
sqlite_extensions = {}
for extension, testsql, hasversion in (
@@ -534,7 +534,7 @@ class Datasette:
},
}
- def plugins(self, show_all=False):
+ def _plugins(self, show_all=False):
ps = list(get_plugins())
if not show_all:
ps = [p for p in ps if p["name"] not in DEFAULT_PLUGINS]
@@ -548,7 +548,7 @@ class Datasette:
for p in ps
]
- def threads(self):
+ def _threads(self):
threads = list(threading.enumerate())
d = {
"num_threads": len(threads),
@@ -576,7 +576,7 @@ class Datasette:
.get(table, {})
)
- def register_renderers(self):
+ def _register_renderers(self):
""" Register output renderers which output data in custom formats. """
# Built-in renderers
self.renderers["json"] = (json_renderer, lambda: True)
@@ -724,11 +724,11 @@ class Datasette:
r"/-/metadata(?P(\.json)?)$",
)
add_route(
- JsonDataView.as_asgi(self, "versions.json", self.versions),
+ JsonDataView.as_asgi(self, "versions.json", self._versions),
r"/-/versions(?P(\.json)?)$",
)
add_route(
- JsonDataView.as_asgi(self, "plugins.json", self.plugins),
+ JsonDataView.as_asgi(self, "plugins.json", self._plugins),
r"/-/plugins(?P(\.json)?)$",
)
add_route(
@@ -736,11 +736,11 @@ class Datasette:
r"/-/config(?P(\.json)?)$",
)
add_route(
- JsonDataView.as_asgi(self, "threads.json", self.threads),
+ JsonDataView.as_asgi(self, "threads.json", self._threads),
r"/-/threads(?P(\.json)?)$",
)
add_route(
- JsonDataView.as_asgi(self, "databases.json", self.connected_databases),
+ JsonDataView.as_asgi(self, "databases.json", self._connected_databases),
r"/-/databases(?P(\.json)?)$",
)
add_route(
@@ -765,7 +765,7 @@ class Datasette:
+ renderer_regex
+ r")?$",
)
- self.register_custom_units()
+ self._register_custom_units()
async def setup_db():
# First time server starts up, calculate table counts for immutable databases
diff --git a/datasette/database.py b/datasette/database.py
index e6154caa..89bf47f4 100644
--- a/datasette/database.py
+++ b/datasette/database.py
@@ -104,7 +104,7 @@ class Database:
conn = getattr(connections, self.name, None)
if not conn:
conn = self.connect()
- self.ds.prepare_connection(conn, self.name)
+ self.ds._prepare_connection(conn, self.name)
setattr(connections, self.name, conn)
return fn(conn)
From de1cde65a67cf9acb227b4df67230b47fdfc9a0e Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 30 May 2020 10:45:11 -0700
Subject: [PATCH 0266/2124] Moved request tests to test_internals_request.py
---
tests/test_internals_request.py | 42 +++++++++++++++++++++++++++++++++
tests/test_utils.py | 40 -------------------------------
2 files changed, 42 insertions(+), 40 deletions(-)
create mode 100644 tests/test_internals_request.py
diff --git a/tests/test_internals_request.py b/tests/test_internals_request.py
new file mode 100644
index 00000000..5c9b254b
--- /dev/null
+++ b/tests/test_internals_request.py
@@ -0,0 +1,42 @@
+from datasette.utils.asgi import Request
+import pytest
+
+
+@pytest.mark.asyncio
+async def test_request_post_vars():
+ scope = {
+ "http_version": "1.1",
+ "method": "POST",
+ "path": "/",
+ "raw_path": b"/",
+ "query_string": b"",
+ "scheme": "http",
+ "type": "http",
+ "headers": [[b"content-type", b"application/x-www-form-urlencoded"]],
+ }
+
+ async def receive():
+ return {"type": "http.request", "body": b"foo=bar&baz=1", "more_body": False}
+
+ request = Request(scope, receive)
+ assert {"foo": "bar", "baz": "1"} == await request.post_vars()
+
+
+def test_request_args():
+ request = Request.fake("/foo?multi=1&multi=2&single=3")
+ assert "1" == request.args.get("multi")
+ assert "3" == request.args.get("single")
+ assert "1" == request.args["multi"]
+ assert "3" == request.args["single"]
+ assert ["1", "2"] == request.args.getlist("multi")
+ assert [] == request.args.getlist("missing")
+ assert "multi" in request.args
+ assert "single" in request.args
+ assert "missing" not in request.args
+ expected = ["multi", "single"]
+ assert expected == list(request.args.keys())
+ for i, key in enumerate(request.args):
+ assert expected[i] == key
+ assert 2 == len(request.args)
+ with pytest.raises(KeyError):
+ request.args["missing"]
diff --git a/tests/test_utils.py b/tests/test_utils.py
index 9d6f45b0..01a10468 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -428,46 +428,6 @@ def test_check_connection_passes():
utils.check_connection(conn)
-@pytest.mark.asyncio
-async def test_request_post_vars():
- scope = {
- "http_version": "1.1",
- "method": "POST",
- "path": "/",
- "raw_path": b"/",
- "query_string": b"",
- "scheme": "http",
- "type": "http",
- "headers": [[b"content-type", b"application/x-www-form-urlencoded"]],
- }
-
- async def receive():
- return {"type": "http.request", "body": b"foo=bar&baz=1", "more_body": False}
-
- request = Request(scope, receive)
- assert {"foo": "bar", "baz": "1"} == await request.post_vars()
-
-
-def test_request_args():
- request = Request.fake("/foo?multi=1&multi=2&single=3")
- assert "1" == request.args.get("multi")
- assert "3" == request.args.get("single")
- assert "1" == request.args["multi"]
- assert "3" == request.args["single"]
- assert ["1", "2"] == request.args.getlist("multi")
- assert [] == request.args.getlist("missing")
- assert "multi" in request.args
- assert "single" in request.args
- assert "missing" not in request.args
- expected = ["multi", "single"]
- assert expected == list(request.args.keys())
- for i, key in enumerate(request.args):
- assert expected[i] == key
- assert 2 == len(request.args)
- with pytest.raises(KeyError):
- request.args["missing"]
-
-
def test_call_with_supported_arguments():
def foo(a, b):
return "{}+{}".format(a, b)
From 5ae14c9f20e0dc59c588f0e93eedfefe0f0f3e8e Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 30 May 2020 10:54:22 -0700
Subject: [PATCH 0267/2124] Improved documentation for RequestParameters class
---
docs/internals.rst | 25 ++++++++++++++++++-------
1 file changed, 18 insertions(+), 7 deletions(-)
diff --git a/docs/internals.rst b/docs/internals.rst
index 886cb7e7..ca725cc4 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -278,16 +278,27 @@ The object also has one awaitable method:
The RequestParameters class
---------------------------
-This class, returned by ``request.args``, is a dictionary-like object.
+``request.args`` is a ``RequestParameters`` object - a dictionary-like object which provides access to querystring parameters that may have multiple values.
-Consider the querystring ``?foo=1&foo=2``. This will produce a ``request.args`` that looks like this::
+Consider the querystring ``?foo=1&foo=2&bar=3`` - with two values for ``foo`` and one value for ``bar``.
- RequestParameters({"foo": ["1", "2"]})
+``request.args[key]`` - string
+ Returns the first value for that key, or raises a ``KeyError`` if the key is missing. For the above example ``request.args["foo"]`` would return ``"1"``.
-``request.args["foo"]`` returns the first value, ``"1"`` - or raises ``KeyError`` if that key is missing.
+``request.args.get(key)`` - string or None
+ Returns the first value for that key, or ``None`` if the key is missing. Pass a second argument to specify a different default, e.g. ``q = request.args.get("q", "")``.
-``request.args.get("foo")`` returns ``"1"`` - or ``None`` if the key is missing. A second argument can be used to specify a different default value.
+``request.args.getlist(key)`` - list of strings
+ Returns the list of strings for that key. ``request.args.getlist("foo")`` would return ``["1", "2"]`` in the above example. ``request.args.getlist("bar")`` would return ``["3"]``. If the key is missing an empty list will be returned.
-``request.args.getlist("foo")`` returns the full list, ``["1", "2"]``. If you call it on a missing key it will return ``[]``.
+``request.args.keys()`` - list of strings
+ Returns the list of available keys - for the example this would be ``["foo", "bar"]``.
-You can use ``if key in request.args`` to check if a key is present. ``for key in request.args`` will iterate through the keys, or you can use ``request.args.keys()`` to get all of the keys.
+``key in request.args`` - True or False
+ You can use ``if key in request.args`` to check if a key is present.
+
+``for key in request.args`` - iterator
+ This lets you loop through every available key.
+
+``len(request.args)`` - integer
+ Returns the number of keys.
From 3c5afaeb231c94a55309f1c0187ff6dedd5b5fb8 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 30 May 2020 11:06:13 -0700
Subject: [PATCH 0268/2124] Re-arranged internals documentation
Request is more useful to most people than Database.
---
docs/internals.rst | 136 ++++++++++++++++++++++-----------------------
1 file changed, 68 insertions(+), 68 deletions(-)
diff --git a/docs/internals.rst b/docs/internals.rst
index ca725cc4..4db710c0 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -3,7 +3,74 @@
Internals for plugins
=====================
-Many :ref:`plugin_hooks` are passed objects that provide access to internal Datasette functionality. The interface to these objects should not be considered stable (at least until Datasette 1.0) with the exception of methods that are documented on this page.
+Many :ref:`plugin_hooks` are passed objects that provide access to internal Datasette functionality. The interface to these objects should not be considered stable with the exception of methods that are documented here.
+
+.. _internals_request:
+
+Request object
+~~~~~~~~~~~~~~
+
+The request object is passed to various plugin hooks. It represents an incoming HTTP request. It has the following properties:
+
+``.scope`` - dictionary
+ The ASGI scope that was used to construct this request, described in the `ASGI HTTP connection scope `__ specification.
+
+``.method`` - string
+ The HTTP method for this request, usually ``GET`` or ``POST``.
+
+``.url`` - string
+ The full URL for this request, e.g. ``https://latest.datasette.io/fixtures``.
+
+``.scheme`` - string
+ The request scheme - usually ``https`` or ``http``.
+
+``.headers`` - dictionary (str -> str)
+ A dictionary of incoming HTTP request headers.
+
+``.host`` - string
+ The host header from the incoming request, e.g. ``latest.datasette.io`` or ``localhost``.
+
+``.path`` - string
+ The path of the request, e.g. ``/fixtures``.
+
+``.query_string`` - string
+ The querystring component of the request, without the ``?`` - e.g. ``name__contains=sam&age__gt=10``.
+
+``.args`` - RequestParameters
+ An object representing the parsed querystring parameters, see below.
+
+The object also has one awaitable method:
+
+``await request.post_vars()`` - dictionary
+ Returns a dictionary of form variables that were submitted in the request body via ``POST``.
+
+The RequestParameters class
+---------------------------
+
+``request.args`` is a ``RequestParameters`` object - a dictionary-like object which provides access to querystring parameters that may have multiple values.
+
+Consider the querystring ``?foo=1&foo=2&bar=3`` - with two values for ``foo`` and one value for ``bar``.
+
+``request.args[key]`` - string
+ Returns the first value for that key, or raises a ``KeyError`` if the key is missing. For the above example ``request.args["foo"]`` would return ``"1"``.
+
+``request.args.get(key)`` - string or None
+ Returns the first value for that key, or ``None`` if the key is missing. Pass a second argument to specify a different default, e.g. ``q = request.args.get("q", "")``.
+
+``request.args.getlist(key)`` - list of strings
+ Returns the list of strings for that key. ``request.args.getlist("foo")`` would return ``["1", "2"]`` in the above example. ``request.args.getlist("bar")`` would return ``["3"]``. If the key is missing an empty list will be returned.
+
+``request.args.keys()`` - list of strings
+ Returns the list of available keys - for the example this would be ``["foo", "bar"]``.
+
+``key in request.args`` - True or False
+ You can use ``if key in request.args`` to check if a key is present.
+
+``for key in request.args`` - iterator
+ This lets you loop through every available key.
+
+``len(request.args)`` - integer
+ Returns the number of keys.
.. _internals_datasette:
@@ -235,70 +302,3 @@ Here's an example of ``block=True`` in action:
num_rows_left = await database.execute_write_fn(my_action, block=True)
except Exception as e:
print("An error occurred:", e)
-
-.. _internals_request:
-
-Request object
-~~~~~~~~~~~~~~
-
-The request object is passed to various plugin hooks. It represents an incoming HTTP request. It has the following properties:
-
-``.scope`` - dictionary
- The ASGI scope that was used to construct this request, described in the `ASGI HTTP connection scope `__ specification.
-
-``.method`` - string
- The HTTP method for this request, usually ``GET`` or ``POST``.
-
-``.url`` - string
- The full URL for this request, e.g. ``https://latest.datasette.io/fixtures``.
-
-``.scheme`` - string
- The request scheme - usually ``https`` or ``http``.
-
-``.headers`` - dictionary (str -> str)
- A dictionary of incoming HTTP request headers.
-
-``.host`` - string
- The host header from the incoming request, e.g. ``latest.datasette.io`` or ``localhost``.
-
-``.path`` - string
- The path of the request, e.g. ``/fixtures``.
-
-``.query_string`` - string
- The querystring component of the request, without the ``?`` - e.g. ``name__contains=sam&age__gt=10``.
-
-``.args`` - RequestParameters
- An object representing the parsed querystring parameters, see below.
-
-The object also has one awaitable method:
-
-``await request.post_vars()`` - dictionary
- Returns a dictionary of form variables that were submitted in the request body via ``POST``.
-
-The RequestParameters class
----------------------------
-
-``request.args`` is a ``RequestParameters`` object - a dictionary-like object which provides access to querystring parameters that may have multiple values.
-
-Consider the querystring ``?foo=1&foo=2&bar=3`` - with two values for ``foo`` and one value for ``bar``.
-
-``request.args[key]`` - string
- Returns the first value for that key, or raises a ``KeyError`` if the key is missing. For the above example ``request.args["foo"]`` would return ``"1"``.
-
-``request.args.get(key)`` - string or None
- Returns the first value for that key, or ``None`` if the key is missing. Pass a second argument to specify a different default, e.g. ``q = request.args.get("q", "")``.
-
-``request.args.getlist(key)`` - list of strings
- Returns the list of strings for that key. ``request.args.getlist("foo")`` would return ``["1", "2"]`` in the above example. ``request.args.getlist("bar")`` would return ``["3"]``. If the key is missing an empty list will be returned.
-
-``request.args.keys()`` - list of strings
- Returns the list of available keys - for the example this would be ``["foo", "bar"]``.
-
-``key in request.args`` - True or False
- You can use ``if key in request.args`` to check if a key is present.
-
-``for key in request.args`` - iterator
- This lets you loop through every available key.
-
-``len(request.args)`` - integer
- Returns the number of keys.
From 4d798ca0e3df246bd47f0600cc7b5118ba33ac16 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 30 May 2020 11:17:20 -0700
Subject: [PATCH 0269/2124] Added test for db.mtime_ns
---
datasette/database.py | 4 +++-
tests/test_internals_database.py | 12 +++++++++++-
2 files changed, 14 insertions(+), 2 deletions(-)
diff --git a/datasette/database.py b/datasette/database.py
index 89bf47f4..ed119542 100644
--- a/datasette/database.py
+++ b/datasette/database.py
@@ -33,7 +33,7 @@ class Database:
self.cached_table_counts = None
self._write_thread = None
self._write_queue = None
- if not self.is_mutable:
+ if not self.is_mutable and not self.is_memory:
p = Path(path)
self.hash = inspect_hash(p)
self.cached_size = p.stat().st_size
@@ -197,6 +197,8 @@ class Database:
@property
def mtime_ns(self):
+ if self.is_memory:
+ return None
return Path(self.path).stat().st_mtime_ns
@property
diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py
index fde7ad2c..5d5520dd 100644
--- a/tests/test_internals_database.py
+++ b/tests/test_internals_database.py
@@ -1,7 +1,7 @@
"""
Tests for the datasette.database.Database class
"""
-from datasette.database import Results, MultipleValues
+from datasette.database import Database, Results, MultipleValues
from datasette.utils import sqlite3
from .fixtures import app_client
import pytest
@@ -188,3 +188,13 @@ async def test_execute_write_fn_exception(db):
with pytest.raises(AssertionError):
await db.execute_write_fn(write_fn, block=True)
+
+
+@pytest.mark.asyncio
+async def test_mtime_ns(db):
+ assert isinstance(db.mtime_ns, int)
+
+
+def test_mtime_ns_is_none_for_memory(app_client):
+ memory_db = Database(app_client.ds, is_memory=True)
+ assert None is memory_db.mtime_ns
From 124acf34a678f0af438dc31a2dceebf28612f249 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 30 May 2020 11:39:46 -0700
Subject: [PATCH 0270/2124] Removed db.get_outbound_foreign_keys method
It duplicated the functionality of db.foreign_keys_for_table.
---
datasette/database.py | 5 -----
datasette/utils/__init__.py | 2 +-
datasette/views/table.py | 4 +---
3 files changed, 2 insertions(+), 9 deletions(-)
diff --git a/datasette/database.py b/datasette/database.py
index ed119542..ab3c82c9 100644
--- a/datasette/database.py
+++ b/datasette/database.py
@@ -319,11 +319,6 @@ class Database:
async def get_all_foreign_keys(self):
return await self.execute_fn(get_all_foreign_keys)
- async def get_outbound_foreign_keys(self, table):
- return await self.execute_fn(
- lambda conn: get_outbound_foreign_keys(conn, table)
- )
-
async def get_table_definition(self, table, type_="table"):
table_definition_rows = list(
await self.execute(
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index bf965413..2eb31502 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -428,7 +428,7 @@ def get_outbound_foreign_keys(conn, table):
if info is not None:
id, seq, table_name, from_, to_, on_update, on_delete, match = info
fks.append(
- {"other_table": table_name, "column": from_, "other_column": to_}
+ {"column": from_, "other_table": table_name, "other_column": to_}
)
return fks
diff --git a/datasette/views/table.py b/datasette/views/table.py
index a629346f..2e9515c3 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -336,9 +336,7 @@ class TableView(RowTableShared):
through_table = through_data["table"]
other_column = through_data["column"]
value = through_data["value"]
- outgoing_foreign_keys = await db.get_outbound_foreign_keys(
- through_table
- )
+ outgoing_foreign_keys = await db.foreign_keys_for_table(through_table)
try:
fk_to_us = [
fk for fk in outgoing_foreign_keys if fk["other_table"] == table
From c4fbe50676929b512940aab90de590a78ac5d7fc Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 30 May 2020 11:40:30 -0700
Subject: [PATCH 0271/2124] Documentation for Database introspection methods,
closes #684
Refs #576
---
docs/internals.rst | 68 ++++++++++++++++++++++++++++++++++++++++++++++
docs/metadata.rst | 2 ++
2 files changed, 70 insertions(+)
diff --git a/docs/internals.rst b/docs/internals.rst
index 4db710c0..e9ba9567 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -302,3 +302,71 @@ Here's an example of ``block=True`` in action:
num_rows_left = await database.execute_write_fn(my_action, block=True)
except Exception as e:
print("An error occurred:", e)
+
+Database introspection
+----------------------
+
+The ``Database`` class also provides properties and methods for introspecting the database.
+
+``db.name`` - string
+ The name of the database - usually the filename without the ``.db`` prefix.
+
+``db.size`` - integer
+ The size of the database file in bytes. 0 for ``:memory:`` databases.
+
+``db.mtime_ns`` - integer or None
+ The last modification time of the database file in nanoseconds since the epoch. ``None`` for ``:memory:`` databases.
+
+``await db.table_exists(table)`` - boolean
+ Check if a table called ``table`` exists.
+
+``await db.table_names()`` - list of strings
+ List of names of tables in the database.
+
+``await db.view_names()`` - list of strings
+ List of names of views in tha database.
+
+``await db.table_columns(table)`` - list of strings
+ Names of columns in a specific table.
+
+``await db.primary_keys(table)`` - list of strings
+ Names of the columns that are part of the primary key for this table.
+
+``await db.fts_table(table)`` - string or None
+ The name of the FTS table associated with this table, if one exists.
+
+``await db.label_column_for_table(table)`` - string or None
+ The label column that is associated with this table - either automatically detected or using the ``"label_column"`` key from :ref:`metadata`, see :ref:`label_columns`.
+
+``await db.foreign_keys_for_table(table)`` - list of dictionaries
+ Details of columns in this table which are foreign keys to other tables. A list of dictionaries where each dictionary is shaped like this: ``{"column": string, "other_table": string, "other_column": string}``.
+
+``await db.hidden_table_names()`` - list of strings
+ List of tables which Datasette "hides" by default - usually these are tables associated with SQLite's full-text search feature, the SpatiaLite extension or tables hidden using the :ref:`metadata_hiding_tables` feature.
+
+``await db.get_table_definition(table)`` - string
+ Returns the SQL definition for the table - the ``CREATE TABLE`` statement and any associated ``CREATE INDEX`` statements.
+
+``await db.get_view_definition(view)`` - string
+ Returns the SQL definition of the named view.
+
+``await db.get_all_foreign_keys()`` - dictionary
+ Dictionary representing both incoming and outgoing foreign keys for this table. It has two keys, ``"incoming"`` and ``"outgoing"``, each of which is a list of dictionaries with keys ``"column"``, ``"other_table"`` and ``"other_column"``. For example:
+
+ .. code-block:: json
+
+ {
+ "incoming": [],
+ "outgoing": [
+ {
+ "other_table": "attraction_characteristic",
+ "column": "characteristic_id",
+ "other_column": "pk",
+ },
+ {
+ "other_table": "roadside_attractions",
+ "column": "attraction_id",
+ "other_column": "pk",
+ }
+ ]
+ }
diff --git a/docs/metadata.rst b/docs/metadata.rst
index 024af01e..471a52e3 100644
--- a/docs/metadata.rst
+++ b/docs/metadata.rst
@@ -256,6 +256,8 @@ used for the link label with the ``label_column`` property:
}
}
+.. _metadata_hiding_tables:
+
Hiding tables
-------------
From 060a56735c1d3bde0a4c7674e82b5f45bef34dee Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 30 May 2020 13:24:00 -0700
Subject: [PATCH 0272/2124] actor_from_request and permission_allowed
hookspecs, refs #699
---
datasette/hookspecs.py | 10 ++++++++++
docs/plugins.rst | 37 +++++++++++++++++++++++++++++++++++++
2 files changed, 47 insertions(+)
diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py
index c2fc0126..65c1c859 100644
--- a/datasette/hookspecs.py
+++ b/datasette/hookspecs.py
@@ -58,3 +58,13 @@ def register_output_renderer(datasette):
@hookspec
def register_facet_classes():
"Register Facet subclasses"
+
+
+@hookspec
+def actor_from_request(datasette, request):
+ "Return an actor dictionary based on the incoming request"
+
+
+@hookspec
+def permission_allowed(actor, action, resource_type, resource_identifier):
+ "Check if actor is allowed to perfom this action - return True, False or None"
diff --git a/docs/plugins.rst b/docs/plugins.rst
index f08f1217..09e8f5e3 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -941,3 +941,40 @@ This example plugin adds a ``x-databases`` HTTP header listing the currently att
return wrap_with_databases_header
Examples: `datasette-auth-github `_, `datasette-search-all `_, `datasette-media `_
+
+.. _plugin_actor_from_request:
+
+actor_from_request(datasette, request)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+``datasette`` - :ref:`internals_datasette`
+ You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
+
+``request`` - object
+ The current HTTP :ref:`internals_request`.
+
+This is part of Datasette's authentication and permissions system. The function should attempt to authenticate an actor (either a user or an API actor of some sort) based on information in the request.
+
+If it cannot authenticate an actor, it should return ``None``. Otherwise it should return a dictionary representing that actor.
+
+.. _plugin_permission_allowed:
+
+permission_allowed(datasette, actor, action, resource_type, resource_identifier)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+``datasette`` - :ref:`internals_datasette`
+ You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
+
+``actor`` - dictionary
+ The current actor, as decided by :ref:`plugin_actor_from_request`.
+
+``action`` - string
+ The action to be performed, e.g. ``"edit-table"``.
+
+``resource_type`` - string
+ The type of resource being acted on, e.g. ``"table"``.
+
+``resource`` - string
+ An identifier for the individual resource, e.g. the name of the table.
+
+Called to check that an actor has permission to perform an action on a resource. Can return ``True`` if the action is allowed, ``False`` if the action is not allowed or ``None`` if the plugin does not have an opinion one way or the other.
From 461c82838d65dd9f61c5be725343a82c61b5c3f3 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 30 May 2020 15:06:33 -0700
Subject: [PATCH 0273/2124] Implemented actor_from_request with tests, refs
#699
Also added datasette argument to permission_allowed hook
---
datasette/app.py | 13 ++++++++++++-
datasette/hookspecs.py | 2 +-
docs/plugins.rst | 23 +++++++++++++++++++++++
tests/plugins/my_plugin.py | 8 ++++++++
tests/plugins/my_plugin_2.py | 12 ++++++++++++
tests/test_plugins.py | 24 ++++++++++++++++++++++++
6 files changed, 80 insertions(+), 2 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 4b9807b0..3f2876ec 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -798,7 +798,18 @@ class DatasetteRouter(AsgiRouter):
and scope.get("scheme") != "https"
):
scope = dict(scope, scheme="https")
- return await super().route_path(scope, receive, send, path)
+ # Handle authentication
+ actor = None
+ for actor in pm.hook.actor_from_request(
+ datasette=self.ds, request=Request(scope, receive)
+ ):
+ if callable(actor):
+ actor = actor()
+ if asyncio.iscoroutine(actor):
+ actor = await actor
+ if actor:
+ break
+ return await super().route_path(dict(scope, actor=actor), receive, send, path)
async def handle_404(self, scope, receive, send, exception=None):
# If URL has a trailing slash, redirect to URL without it
diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py
index 65c1c859..71d06661 100644
--- a/datasette/hookspecs.py
+++ b/datasette/hookspecs.py
@@ -66,5 +66,5 @@ def actor_from_request(datasette, request):
@hookspec
-def permission_allowed(actor, action, resource_type, resource_identifier):
+def permission_allowed(datasette, actor, action, resource_type, resource_identifier):
"Check if actor is allowed to perfom this action - return True, False or None"
diff --git a/docs/plugins.rst b/docs/plugins.rst
index 09e8f5e3..fb2843f4 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -957,6 +957,29 @@ This is part of Datasette's authentication and permissions system. The function
If it cannot authenticate an actor, it should return ``None``. Otherwise it should return a dictionary representing that actor.
+Instead of returning a dictionary, this function can return an awaitable function which itself returns either ``None`` or a dictionary. This is useful for authentication functions that need to make a database query - for example:
+
+.. code-block:: python
+
+ from datasette import hookimpl
+
+ @hookimpl
+ def actor_from_request(datasette, request):
+ async def inner():
+ token = request.args.get("_token")
+ if not token:
+ return None
+ # Look up ?_token=xxx in sessions table
+ result = await datasette.get_database().execute(
+ "select count(*) from sessions where token = ?", [token]
+ )
+ if result.first()[0]:
+ return {"token": token}
+ else:
+ return None
+
+ return inner
+
.. _plugin_permission_allowed:
permission_allowed(datasette, actor, action, resource_type, resource_identifier)
diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py
index 10d7e7e6..305cb3b7 100644
--- a/tests/plugins/my_plugin.py
+++ b/tests/plugins/my_plugin.py
@@ -126,3 +126,11 @@ class DummyFacet(Facet):
facet_results = {}
facets_timed_out = []
return facet_results, facets_timed_out
+
+
+@hookimpl
+def actor_from_request(datasette, request):
+ if request.args.get("_bot"):
+ return {"id": "bot"}
+ else:
+ return None
diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py
index c9e7c78f..0a5cbba5 100644
--- a/tests/plugins/my_plugin_2.py
+++ b/tests/plugins/my_plugin_2.py
@@ -95,3 +95,15 @@ def asgi_wrapper(datasette):
return add_x_databases_header
return wrap_with_databases_header
+
+
+@hookimpl
+def actor_from_request(datasette, request):
+ async def inner():
+ if request.args.get("_bot2"):
+ result = await datasette.get_database().execute("select 1 + 1")
+ return {"id": "bot2", "1+1": result.first()[0]}
+ else:
+ return None
+
+ return inner
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index a34328a9..3ad26986 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -503,3 +503,27 @@ def test_register_facet_classes(app_client):
"toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet=pk3",
},
] == data["suggested_facets"]
+
+
+def test_actor_from_request(app_client):
+ app_client.get("/")
+ # Should have no actor
+ assert None == app_client.ds._last_request.scope["actor"]
+ app_client.get("/?_bot=1")
+ # Should have bot actor
+ assert {"id": "bot"} == app_client.ds._last_request.scope["actor"]
+
+
+def test_actor_from_request_async(app_client):
+ app_client.get("/")
+ # Should have no actor
+ assert None == app_client.ds._last_request.scope["actor"]
+ app_client.get("/?_bot2=1")
+ # Should have bot2 actor
+ assert {"id": "bot2", "1+1": 2} == app_client.ds._last_request.scope["actor"]
+
+
+@pytest.mark.xfail
+def test_permission_allowed(app_client):
+ # TODO
+ assert False
From 9315bacf6f63e20781d21d170e55a55b2c54fcdd Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 30 May 2020 15:24:43 -0700
Subject: [PATCH 0274/2124] Implemented datasette.permission_allowed(), refs
#699
---
datasette/app.py | 19 +++++++++++++++++++
docs/internals.rst | 19 +++++++++++++++++++
tests/plugins/my_plugin.py | 8 ++++++++
tests/plugins/my_plugin_2.py | 13 +++++++++++++
tests/test_plugins.py | 20 ++++++++++++++++----
5 files changed, 75 insertions(+), 4 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 3f2876ec..773dee31 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -406,6 +406,25 @@ class Datasette:
# pylint: disable=no-member
pm.hook.prepare_connection(conn=conn, database=database, datasette=self)
+ async def permission_allowed(
+ self, actor, action, resource_type=None, resource_identifier=None, default=False
+ ):
+ "Check permissions using the permissions_allowed plugin hook"
+ for check in pm.hook.permission_allowed(
+ datasette=self,
+ actor=actor,
+ action=action,
+ resource_type=resource_type,
+ resource_identifier=resource_identifier,
+ ):
+ if callable(check):
+ check = check()
+ if asyncio.iscoroutine(check):
+ check = await check
+ if check is not None:
+ return check
+ return default
+
async def execute(
self,
db_name,
diff --git a/docs/internals.rst b/docs/internals.rst
index e9ba9567..2ba70722 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -111,6 +111,25 @@ This method lets you read plugin configuration values that were set in ``metadat
Renders a `Jinja template `__ using Datasette's preconfigured instance of Jinja and returns the resulting string. The template will have access to Datasette's default template functions and any functions that have been made available by other plugins.
+await .permission_allowed(actor, action, resource_type=None, resource_identifier=None, default=False)
+-----------------------------------------------------------------------------------------------------
+
+``actor`` - dictionary
+ The authenticated actor. This is usually ``request.scope.get("actor")``.
+
+``action`` - string
+ The name of the action that is being permission checked.
+
+``resource_type`` - string, optional
+ The type of resource being checked, e.g. ``"table"``.
+
+``resource_identifier`` - string, optional
+ The resource identifier, e.g. the name of the table.
+
+Check if the given actor has permission to perform the given action on the given resource. This uses plugins that implement the :ref:`plugin_permission_allowed` plugin hook to decide if the action is allowed or not.
+
+If none of the plugins express an opinion, the return value will be the ``default`` argument. This is deny, but you can pass ``default=True`` to default allow instead.
+
.. _datasette_get_database:
.get_database(name)
diff --git a/tests/plugins/my_plugin.py b/tests/plugins/my_plugin.py
index 305cb3b7..46893710 100644
--- a/tests/plugins/my_plugin.py
+++ b/tests/plugins/my_plugin.py
@@ -134,3 +134,11 @@ def actor_from_request(datasette, request):
return {"id": "bot"}
else:
return None
+
+
+@hookimpl
+def permission_allowed(actor, action):
+ if action == "this_is_allowed":
+ return True
+ elif action == "this_is_denied":
+ return False
diff --git a/tests/plugins/my_plugin_2.py b/tests/plugins/my_plugin_2.py
index 0a5cbba5..039112f4 100644
--- a/tests/plugins/my_plugin_2.py
+++ b/tests/plugins/my_plugin_2.py
@@ -107,3 +107,16 @@ def actor_from_request(datasette, request):
return None
return inner
+
+
+@hookimpl
+def permission_allowed(datasette, actor, action):
+ # Testing asyncio version of permission_allowed
+ async def inner():
+ assert 2 == (await datasette.get_database().execute("select 1 + 1")).first()[0]
+ if action == "this_is_allowed_async":
+ return True
+ elif action == "this_is_denied_async":
+ return False
+
+ return inner
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index 3ad26986..e123b7a0 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -523,7 +523,19 @@ def test_actor_from_request_async(app_client):
assert {"id": "bot2", "1+1": 2} == app_client.ds._last_request.scope["actor"]
-@pytest.mark.xfail
-def test_permission_allowed(app_client):
- # TODO
- assert False
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "action,expected",
+ [
+ ("this_is_allowed", True),
+ ("this_is_denied", False),
+ ("this_is_allowed_async", True),
+ ("this_is_denied_async", False),
+ ("no_match", None),
+ ],
+)
+async def test_permission_allowed(app_client, action, expected):
+ actual = await app_client.ds.permission_allowed(
+ {"id": "actor"}, action, default=None
+ )
+ assert expected == actual
From 1fc6ceefb9eddd29844e7bfe3e06a83df6ce3dc4 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 30 May 2020 18:51:00 -0700
Subject: [PATCH 0275/2124] Added /-/actor.json - refs #699
Also added JSON highlighting to introspection documentation.
---
datasette/app.py | 7 ++++++
datasette/views/special.py | 8 +++++--
docs/introspection.rst | 44 ++++++++++++++++++++++++++++++++------
tests/test_plugins.py | 7 ++++++
4 files changed, 57 insertions(+), 9 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 773dee31..37b4ed3d 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -586,6 +586,9 @@ class Datasette:
)
return d
+ def _actor(self, request):
+ return {"actor": request.scope.get("actor", None)}
+
def table_metadata(self, database, table):
"Fetch table-specific metadata."
return (
@@ -762,6 +765,10 @@ class Datasette:
JsonDataView.as_asgi(self, "databases.json", self._connected_databases),
r"/-/databases(?P(\.json)?)$",
)
+ add_route(
+ JsonDataView.as_asgi(self, "actor.json", self._actor, needs_request=True),
+ r"/-/actor(?P(\.json)?)$",
+ )
add_route(
PatternPortfolioView.as_asgi(self), r"/-/patterns$",
)
diff --git a/datasette/views/special.py b/datasette/views/special.py
index dfe5ea8c..840473a7 100644
--- a/datasette/views/special.py
+++ b/datasette/views/special.py
@@ -6,13 +6,17 @@ from .base import BaseView
class JsonDataView(BaseView):
name = "json_data"
- def __init__(self, datasette, filename, data_callback):
+ def __init__(self, datasette, filename, data_callback, needs_request=False):
self.ds = datasette
self.filename = filename
self.data_callback = data_callback
+ self.needs_request = needs_request
async def get(self, request, as_format):
- data = self.data_callback()
+ if self.needs_request:
+ data = self.data_callback(request)
+ else:
+ data = self.data_callback()
if as_format:
headers = {}
if self.ds.cors:
diff --git a/docs/introspection.rst b/docs/introspection.rst
index 3cd4a40f..e5d08dbc 100644
--- a/docs/introspection.rst
+++ b/docs/introspection.rst
@@ -10,7 +10,9 @@ Each of these pages can be viewed in your browser. Add ``.json`` to the URL to g
/-/metadata
-----------
-Shows the contents of the ``metadata.json`` file that was passed to ``datasette serve``, if any. `Metadata example `_::
+Shows the contents of the ``metadata.json`` file that was passed to ``datasette serve``, if any. `Metadata example `_:
+
+.. code-block:: json
{
"license": "CC Attribution 4.0 License",
@@ -18,7 +20,9 @@ Shows the contents of the ``metadata.json`` file that was passed to ``datasette
"source": "fivethirtyeight/data on GitHub",
"source_url": "https://github.com/fivethirtyeight/data",
"title": "Five Thirty Eight",
- "databases": {...}
+ "databases": {
+
+ }
}
.. _JsonDataView_versions:
@@ -26,7 +30,9 @@ Shows the contents of the ``metadata.json`` file that was passed to ``datasette
/-/versions
-----------
-Shows the version of Datasette, Python and SQLite. `Versions example `_::
+Shows the version of Datasette, Python and SQLite. `Versions example `_:
+
+.. code-block:: json
{
"datasette": {
@@ -63,7 +69,9 @@ Shows the version of Datasette, Python and SQLite. `Versions example `_::
+Shows a list of currently installed plugins and their versions. `Plugins example `_:
+
+.. code-block:: json
[
{
@@ -79,7 +87,9 @@ Shows a list of currently installed plugins and their versions. `Plugins example
/-/config
---------
-Shows the :ref:`config` options for this instance of Datasette. `Config example `_::
+Shows the :ref:`config` options for this instance of Datasette. `Config example `_:
+
+.. code-block:: json
{
"default_facet_size": 30,
@@ -95,7 +105,9 @@ Shows the :ref:`config` options for this instance of Datasette. `Config example
/-/databases
------------
-Shows currently attached databases. `Databases example `_::
+Shows currently attached databases. `Databases example `_:
+
+.. code-block:: json
[
{
@@ -113,7 +125,9 @@ Shows currently attached databases. `Databases example `_::
+Shows details of threads and ``asyncio`` tasks. `Threads example `_:
+
+.. code-block:: json
{
"num_threads": 2,
@@ -136,3 +150,19 @@ Shows details of threads and ``asyncio`` tasks. `Threads example wait_for=()]>>"
]
}
+
+.. _JsonDataView_actor:
+
+/-/actor
+--------
+
+Shows the currently authenticated actor. Useful for debugging Datasette authentication plugins.
+
+.. code-block:: json
+
+ {
+ "actor": {
+ "id": 1,
+ "username": "some-user"
+ }
+ }
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index e123b7a0..7a3fb49a 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -539,3 +539,10 @@ async def test_permission_allowed(app_client, action, expected):
{"id": "actor"}, action, default=None
)
assert expected == actual
+
+
+def test_actor_json(app_client):
+ assert {"actor": None} == app_client.get("/-/actor.json").json
+ assert {"actor": {"id": "bot2", "1+1": 2}} == app_client.get(
+ "/-/actor.json/?_bot2=1"
+ ).json
From fa27e44fe09f57dcb87157be97f15b6add7f14ad Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 31 May 2020 15:42:08 -0700
Subject: [PATCH 0276/2124] datasette.sign() and datasette.unsign() methods,
refs #785
---
datasette/app.py | 9 +++++++++
datasette/cli.py | 7 +++++++
docs/datasette-serve-help.txt | 3 +++
docs/internals.rst | 28 ++++++++++++++++++++++++++++
setup.py | 1 +
tests/test_cli.py | 1 +
tests/test_internals_datasette.py | 12 ++++++++++++
7 files changed, 61 insertions(+)
diff --git a/datasette/app.py b/datasette/app.py
index 37b4ed3d..5e3d3af5 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -14,6 +14,7 @@ from pathlib import Path
import click
from markupsafe import Markup
+from itsdangerous import URLSafeSerializer
import jinja2
from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader, escape
from jinja2.environment import Template
@@ -163,12 +164,14 @@ class Datasette:
static_mounts=None,
memory=False,
config=None,
+ secret=None,
version_note=None,
config_dir=None,
):
assert config_dir is None or isinstance(
config_dir, Path
), "config_dir= should be a pathlib.Path"
+ self._secret = secret or os.urandom(32).hex()
self.files = tuple(files) + tuple(immutables or [])
if config_dir:
self.files += tuple([str(p) for p in config_dir.glob("*.db")])
@@ -281,6 +284,12 @@ class Datasette:
self._register_renderers()
+ def sign(self, value, namespace="default"):
+ return URLSafeSerializer(self._secret, namespace).dumps(value)
+
+ def unsign(self, signed, namespace="default"):
+ return URLSafeSerializer(self._secret, namespace).loads(signed)
+
def get_database(self, name=None):
if name is None:
return next(iter(self.databases.values()))
diff --git a/datasette/cli.py b/datasette/cli.py
index c59fb6e0..dba3a612 100644
--- a/datasette/cli.py
+++ b/datasette/cli.py
@@ -299,6 +299,11 @@ def package(
help="Set config option using configname:value datasette.readthedocs.io/en/latest/config.html",
multiple=True,
)
+@click.option(
+ "--secret",
+ help="Secret used for signing secure values, such as signed cookies",
+ envvar="DATASETTE_SECRET",
+)
@click.option("--version-note", help="Additional note to show on /-/versions")
@click.option("--help-config", is_flag=True, help="Show available config options")
def serve(
@@ -317,6 +322,7 @@ def serve(
static,
memory,
config,
+ secret,
version_note,
help_config,
return_instance=False,
@@ -362,6 +368,7 @@ def serve(
static_mounts=static,
config=dict(config),
memory=memory,
+ secret=secret,
version_note=version_note,
)
diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt
index 5265c294..ab27714a 100644
--- a/docs/datasette-serve-help.txt
+++ b/docs/datasette-serve-help.txt
@@ -29,6 +29,9 @@ Options:
--config CONFIG Set config option using configname:value
datasette.readthedocs.io/en/latest/config.html
+ --secret TEXT Secret used for signing secure values, such as signed
+ cookies
+
--version-note TEXT Additional note to show on /-/versions
--help-config Show available config options
--help Show this message and exit.
diff --git a/docs/internals.rst b/docs/internals.rst
index 2ba70722..68a35312 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -183,6 +183,34 @@ Use ``is_memory`` if the connection is to an in-memory SQLite database.
This removes a database that has been previously added. ``name=`` is the unique name of that database, also used in the URL for it.
+.. _datasette_sign:
+
+.sign(value, namespace="default")
+---------------------------------
+
+``value`` - any serializable type
+ The value to be signed.
+
+``namespace`` - string, optional
+ An alternative namespace, see the `itsdangerous salt documentation `__.
+
+Utility method for signing values, such that you can safely pass data to and from an untrusted environment. This is a wrapper around the `itsdangerous `__ library.
+
+This method returns a signed string, which can be decoded and verified using :ref:`datasette_unsign`.
+
+.. _datasette_unsign:
+
+.unsign(value, namespace="default")
+-----------------------------------
+
+``signed`` - any serializable type
+ The signed string that was created using :ref:`datasette_sign`.
+
+``namespace`` - string, optional
+ The alternative namespace, if one was used.
+
+Returns the original, decoded object that was passed to :ref:`datasette_sign`. If the signature is not valid this raises a ``itsdangerous.BadSignature`` exception.
+
.. _internals_database:
Database class
diff --git a/setup.py b/setup.py
index d9c70de5..93628266 100644
--- a/setup.py
+++ b/setup.py
@@ -55,6 +55,7 @@ setup(
"janus>=0.4,<0.6",
"PyYAML~=5.3",
"mergedeep>=1.1.1,<1.4.0",
+ "itsdangerous~=1.1",
],
entry_points="""
[console_scripts]
diff --git a/tests/test_cli.py b/tests/test_cli.py
index ac5746c6..f52f17b4 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -75,6 +75,7 @@ def test_metadata_yaml():
static=[],
memory=False,
config=[],
+ secret=None,
version_note=None,
help_config=False,
return_instance=True,
diff --git a/tests/test_internals_datasette.py b/tests/test_internals_datasette.py
index 4993250d..0be0b932 100644
--- a/tests/test_internals_datasette.py
+++ b/tests/test_internals_datasette.py
@@ -1,6 +1,7 @@
"""
Tests for the datasette.app.Datasette class
"""
+from itsdangerous import BadSignature
from .fixtures import app_client
import pytest
@@ -21,3 +22,14 @@ def test_get_database_no_argument(datasette):
# Returns the first available database:
db = datasette.get_database()
assert "fixtures" == db.name
+
+
+@pytest.mark.parametrize("value", ["hello", 123, {"key": "value"}])
+@pytest.mark.parametrize("namespace", [None, "two"])
+def test_sign_unsign(datasette, value, namespace):
+ extra_args = [namespace] if namespace else []
+ signed = datasette.sign(value, *extra_args)
+ assert value != signed
+ assert value == datasette.unsign(signed, *extra_args)
+ with pytest.raises(BadSignature):
+ datasette.unsign(signed[:-1] + ("!" if signed[-1] != "!" else ":"))
From 7690d5ba40fda37ba4ba38ad56fe06c3aed071de Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 31 May 2020 17:18:06 -0700
Subject: [PATCH 0277/2124] Docs for --secret/DATASETTE_SECRET - closes #785
---
docs/config.rst | 27 +++++++++++++++++++++++++++
1 file changed, 27 insertions(+)
diff --git a/docs/config.rst b/docs/config.rst
index d8c2f550..da93e40a 100644
--- a/docs/config.rst
+++ b/docs/config.rst
@@ -288,3 +288,30 @@ For example, if you are sending traffic from ``https://www.example.com/tools/dat
You can do that like so::
datasette mydatabase.db --config base_url:/tools/datasette/
+
+.. _config_secret:
+
+Configuring the secret
+----------------------
+
+Datasette uses a secret string to sign secure values such as cookies.
+
+If you do not provide a secret, Datasette will create one when it starts up. This secret will reset every time the Datasette server restarts though, so things like authentication cookies will not stay valid between restarts.
+
+You can pass a secret to Datasette in two ways: with the ``--secret`` command-line option or by setting a ``DATASETTE_SECRET`` environment variable.
+
+::
+
+ $ datasette mydb.db --secret=SECRET_VALUE_HERE
+
+Or::
+
+ $ export DATASETTE_SECRET=SECRET_VALUE_HERE
+ $ datasette mydb.db
+
+One way to generate a secure random secret is to use Python like this::
+
+ $ python3 -c 'import os; print(os.urandom(32).hex())'
+ cdb19e94283a20f9d42cca50c5a4871c0aa07392db308755d60a1a5b9bb0fa52
+
+Plugin authors make use of this signing mechanism in their plugins using :ref:`datasette_sign` and :ref:`datasette_unsign`.
From 9f3d4aba31baf1e2de1910a40bc9663ef53b94e9 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 31 May 2020 18:03:17 -0700
Subject: [PATCH 0278/2124] --root option and /-/auth-token view, refs #784
---
datasette/app.py | 6 +++++-
datasette/cli.py | 8 ++++++++
datasette/plugins.py | 1 +
datasette/views/special.py | 32 +++++++++++++++++++++++++++++++-
docs/datasette-serve-help.txt | 1 +
tests/fixtures.py | 19 +++++++++++++++----
tests/test_auth.py | 25 +++++++++++++++++++++++++
tests/test_cli.py | 1 +
tests/test_docs.py | 4 ++--
9 files changed, 89 insertions(+), 8 deletions(-)
create mode 100644 tests/test_auth.py
diff --git a/datasette/app.py b/datasette/app.py
index 5e3d3af5..6b39ce12 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -24,7 +24,7 @@ import uvicorn
from .views.base import DatasetteError, ureg, AsgiRouter
from .views.database import DatabaseDownload, DatabaseView
from .views.index import IndexView
-from .views.special import JsonDataView, PatternPortfolioView
+from .views.special import JsonDataView, PatternPortfolioView, AuthTokenView
from .views.table import RowView, TableView
from .renderer import json_renderer
from .database import Database, QueryInterrupted
@@ -283,6 +283,7 @@ class Datasette:
pm.hook.prepare_jinja2_environment(env=self.jinja_env)
self._register_renderers()
+ self._root_token = os.urandom(32).hex()
def sign(self, value, namespace="default"):
return URLSafeSerializer(self._secret, namespace).dumps(value)
@@ -778,6 +779,9 @@ class Datasette:
JsonDataView.as_asgi(self, "actor.json", self._actor, needs_request=True),
r"/-/actor(?P(\.json)?)$",
)
+ add_route(
+ AuthTokenView.as_asgi(self), r"/-/auth-token$",
+ )
add_route(
PatternPortfolioView.as_asgi(self), r"/-/patterns$",
)
diff --git a/datasette/cli.py b/datasette/cli.py
index dba3a612..23f9e36b 100644
--- a/datasette/cli.py
+++ b/datasette/cli.py
@@ -304,6 +304,11 @@ def package(
help="Secret used for signing secure values, such as signed cookies",
envvar="DATASETTE_SECRET",
)
+@click.option(
+ "--root",
+ help="Output URL that sets a cookie authenticating the root user",
+ is_flag=True,
+)
@click.option("--version-note", help="Additional note to show on /-/versions")
@click.option("--help-config", is_flag=True, help="Show available config options")
def serve(
@@ -323,6 +328,7 @@ def serve(
memory,
config,
secret,
+ root,
version_note,
help_config,
return_instance=False,
@@ -387,6 +393,8 @@ def serve(
asyncio.get_event_loop().run_until_complete(check_databases(ds))
# Start the server
+ if root:
+ print("http://{}:{}/-/auth-token?token={}".format(host, port, ds._root_token))
uvicorn.run(ds.app(), host=host, port=port, log_level="info")
diff --git a/datasette/plugins.py b/datasette/plugins.py
index 6c9677d0..487fce4d 100644
--- a/datasette/plugins.py
+++ b/datasette/plugins.py
@@ -9,6 +9,7 @@ DEFAULT_PLUGINS = (
"datasette.publish.cloudrun",
"datasette.facets",
"datasette.sql_functions",
+ "datasette.actor_auth_cookie",
)
pm = pluggy.PluginManager("datasette")
diff --git a/datasette/views/special.py b/datasette/views/special.py
index 840473a7..910193e8 100644
--- a/datasette/views/special.py
+++ b/datasette/views/special.py
@@ -1,6 +1,8 @@
import json
from datasette.utils.asgi import Response
from .base import BaseView
+from http.cookies import SimpleCookie
+import secrets
class JsonDataView(BaseView):
@@ -45,4 +47,32 @@ class PatternPortfolioView(BaseView):
self.ds = datasette
async def get(self, request):
- return await self.render(["patterns.html"], request=request,)
+ return await self.render(["patterns.html"], request=request)
+
+
+class AuthTokenView(BaseView):
+ name = "auth_token"
+
+ def __init__(self, datasette):
+ self.ds = datasette
+
+ async def get(self, request):
+ token = request.args.get("token") or ""
+ if not self.ds._root_token:
+ return Response("Root token has already been used", status=403)
+ if secrets.compare_digest(token, self.ds._root_token):
+ self.ds._root_token = None
+ cookie = SimpleCookie()
+ cookie["ds_actor"] = self.ds.sign({"id": "root"}, "actor")
+ cookie["ds_actor"]["path"] = "/"
+ response = Response(
+ body="",
+ status=302,
+ headers={
+ "Location": "/",
+ "set-cookie": cookie.output(header="").lstrip(),
+ },
+ )
+ return response
+ else:
+ return Response("Invalid token", status=403)
diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt
index ab27714a..183ecc14 100644
--- a/docs/datasette-serve-help.txt
+++ b/docs/datasette-serve-help.txt
@@ -32,6 +32,7 @@ Options:
--secret TEXT Secret used for signing secure values, such as signed
cookies
+ --root Output URL that sets a cookie authenticating the root user
--version-note TEXT Additional note to show on /-/versions
--help-config Show available config options
--help Show this message and exit.
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 9479abf6..b2cfd3d6 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -2,6 +2,7 @@ from datasette.app import Datasette
from datasette.utils import sqlite3
from asgiref.testing import ApplicationCommunicator
from asgiref.sync import async_to_sync
+from http.cookies import SimpleCookie
import itertools
import json
import os
@@ -44,10 +45,14 @@ class TestClient:
self.asgi_app = asgi_app
@async_to_sync
- async def get(self, path, allow_redirects=True, redirect_count=0, method="GET"):
- return await self._get(path, allow_redirects, redirect_count, method)
+ async def get(
+ self, path, allow_redirects=True, redirect_count=0, method="GET", cookies=None
+ ):
+ return await self._get(path, allow_redirects, redirect_count, method, cookies)
- async def _get(self, path, allow_redirects=True, redirect_count=0, method="GET"):
+ async def _get(
+ self, path, allow_redirects=True, redirect_count=0, method="GET", cookies=None
+ ):
query_string = b""
if "?" in path:
path, _, query_string = path.partition("?")
@@ -56,6 +61,12 @@ class TestClient:
raw_path = path.encode("latin-1")
else:
raw_path = quote(path, safe="/:,").encode("latin-1")
+ headers = [[b"host", b"localhost"]]
+ if cookies:
+ sc = SimpleCookie()
+ for key, value in cookies.items():
+ sc[key] = value
+ headers.append([b"cookie", sc.output(header="").encode("utf-8")])
scope = {
"type": "http",
"http_version": "1.0",
@@ -63,7 +74,7 @@ class TestClient:
"path": unquote(path),
"raw_path": raw_path,
"query_string": query_string,
- "headers": [[b"host", b"localhost"]],
+ "headers": headers,
}
instance = ApplicationCommunicator(self.asgi_app, scope)
await instance.send_input({"type": "http.request"})
diff --git a/tests/test_auth.py b/tests/test_auth.py
new file mode 100644
index 00000000..6b69ab93
--- /dev/null
+++ b/tests/test_auth.py
@@ -0,0 +1,25 @@
+from .fixtures import app_client
+
+
+def test_auth_token(app_client):
+ "The /-/auth-token endpoint sets the correct cookie"
+ assert app_client.ds._root_token is not None
+ path = "/-/auth-token?token={}".format(app_client.ds._root_token)
+ response = app_client.get(path, allow_redirects=False,)
+ assert 302 == response.status
+ assert "/" == response.headers["Location"]
+ set_cookie = response.headers["set-cookie"]
+ assert set_cookie.endswith("; Path=/")
+ assert set_cookie.startswith("ds_actor=")
+ cookie_value = set_cookie.split("ds_actor=")[1].split("; Path=/")[0]
+ assert {"id": "root"} == app_client.ds.unsign(cookie_value, "actor")
+ # Check that a second with same token fails
+ assert app_client.ds._root_token is None
+ assert 403 == app_client.get(path, allow_redirects=False,).status
+
+
+def test_actor_cookie(app_client):
+ "A valid actor cookie sets request.scope['actor']"
+ cookie = app_client.ds.sign({"id": "test"}, "actor")
+ response = app_client.get("/", cookies={"ds_actor": cookie})
+ assert {"id": "test"} == app_client.ds._last_request.scope["actor"]
diff --git a/tests/test_cli.py b/tests/test_cli.py
index f52f17b4..529661ce 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -76,6 +76,7 @@ def test_metadata_yaml():
memory=False,
config=[],
secret=None,
+ root=False,
version_note=None,
help_config=False,
return_instance=True,
diff --git a/tests/test_docs.py b/tests/test_docs.py
index 77c2a611..09c00ddf 100644
--- a/tests/test_docs.py
+++ b/tests/test_docs.py
@@ -65,8 +65,8 @@ def documented_views():
first_word = label.split("_")[0]
if first_word.endswith("View"):
view_labels.add(first_word)
- # We deliberately don't document this one:
- view_labels.add("PatternPortfolioView")
+ # We deliberately don't document these:
+ view_labels.update(("PatternPortfolioView", "AuthTokenView"))
return view_labels
From 57cf5139c552cb7feab9947daa949ca434cc0a66 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 31 May 2020 18:06:16 -0700
Subject: [PATCH 0279/2124] Default actor_from_request hook supporting ds_actor
signed cookie
Refs #784, refs #699
---
datasette/actor_auth_cookie.py | 18 ++++++++++++++++++
1 file changed, 18 insertions(+)
create mode 100644 datasette/actor_auth_cookie.py
diff --git a/datasette/actor_auth_cookie.py b/datasette/actor_auth_cookie.py
new file mode 100644
index 00000000..41f33fe9
--- /dev/null
+++ b/datasette/actor_auth_cookie.py
@@ -0,0 +1,18 @@
+from datasette import hookimpl
+from itsdangerous import BadSignature
+from http.cookies import SimpleCookie
+
+
+@hookimpl
+def actor_from_request(datasette, request):
+ cookies = SimpleCookie()
+ cookies.load(
+ dict(request.scope.get("headers") or []).get(b"cookie", b"").decode("utf-8")
+ )
+ if "ds_actor" not in cookies:
+ return None
+ ds_actor = cookies["ds_actor"].value
+ try:
+ return datasette.unsign(ds_actor, "actor")
+ except BadSignature:
+ return None
From dfdbdf378aba9afb66666f66b78df2f2069d2595 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 31 May 2020 22:00:36 -0700
Subject: [PATCH 0280/2124] Added /-/permissions debug tool, closes #788
Also started the authentication.rst docs page, refs #786.
Part of authentication work, refs #699.
---
datasette/app.py | 32 +++++++++++--
datasette/default_permissions.py | 7 +++
datasette/plugins.py | 1 +
datasette/templates/permissions_debug.html | 55 ++++++++++++++++++++++
datasette/views/special.py | 18 +++++++
docs/authentication.rst | 18 +++++++
docs/index.rst | 1 +
tests/test_auth.py | 23 +++++++++
8 files changed, 152 insertions(+), 3 deletions(-)
create mode 100644 datasette/default_permissions.py
create mode 100644 datasette/templates/permissions_debug.html
create mode 100644 docs/authentication.rst
diff --git a/datasette/app.py b/datasette/app.py
index 6b39ce12..b8a5e23d 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -1,5 +1,6 @@
import asyncio
import collections
+import datetime
import hashlib
import itertools
import json
@@ -24,7 +25,12 @@ import uvicorn
from .views.base import DatasetteError, ureg, AsgiRouter
from .views.database import DatabaseDownload, DatabaseView
from .views.index import IndexView
-from .views.special import JsonDataView, PatternPortfolioView, AuthTokenView
+from .views.special import (
+ JsonDataView,
+ PatternPortfolioView,
+ AuthTokenView,
+ PermissionsDebugView,
+)
from .views.table import RowView, TableView
from .renderer import json_renderer
from .database import Database, QueryInterrupted
@@ -283,6 +289,7 @@ class Datasette:
pm.hook.prepare_jinja2_environment(env=self.jinja_env)
self._register_renderers()
+ self.permission_checks = collections.deque(maxlen=30)
self._root_token = os.urandom(32).hex()
def sign(self, value, namespace="default"):
@@ -420,6 +427,7 @@ class Datasette:
self, actor, action, resource_type=None, resource_identifier=None, default=False
):
"Check permissions using the permissions_allowed plugin hook"
+ result = None
for check in pm.hook.permission_allowed(
datasette=self,
actor=actor,
@@ -432,8 +440,23 @@ class Datasette:
if asyncio.iscoroutine(check):
check = await check
if check is not None:
- return check
- return default
+ result = check
+ used_default = False
+ if result is None:
+ result = default
+ used_default = True
+ self.permission_checks.append(
+ {
+ "when": datetime.datetime.utcnow().isoformat(),
+ "actor": actor,
+ "action": action,
+ "resource_type": resource_type,
+ "resource_identifier": resource_identifier,
+ "used_default": used_default,
+ "result": result,
+ }
+ )
+ return result
async def execute(
self,
@@ -782,6 +805,9 @@ class Datasette:
add_route(
AuthTokenView.as_asgi(self), r"/-/auth-token$",
)
+ add_route(
+ PermissionsDebugView.as_asgi(self), r"/-/permissions$",
+ )
add_route(
PatternPortfolioView.as_asgi(self), r"/-/patterns$",
)
diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py
new file mode 100644
index 00000000..0b0d17f9
--- /dev/null
+++ b/datasette/default_permissions.py
@@ -0,0 +1,7 @@
+from datasette import hookimpl
+
+
+@hookimpl
+def permission_allowed(actor, action, resource_type, resource_identifier):
+ if actor and actor.get("id") == "root" and action == "permissions-debug":
+ return True
diff --git a/datasette/plugins.py b/datasette/plugins.py
index 487fce4d..26d4fd63 100644
--- a/datasette/plugins.py
+++ b/datasette/plugins.py
@@ -10,6 +10,7 @@ DEFAULT_PLUGINS = (
"datasette.facets",
"datasette.sql_functions",
"datasette.actor_auth_cookie",
+ "datasette.default_permissions",
)
pm = pluggy.PluginManager("datasette")
diff --git a/datasette/templates/permissions_debug.html b/datasette/templates/permissions_debug.html
new file mode 100644
index 00000000..fb098c5c
--- /dev/null
+++ b/datasette/templates/permissions_debug.html
@@ -0,0 +1,55 @@
+{% extends "base.html" %}
+
+{% block title %}Debug permissions{% endblock %}
+
+{% block extra_head %}
+
+{% endblock %}
+
+{% block nav %}
+
{% endif %}
diff --git a/datasette/views/database.py b/datasette/views/database.py
index 30817106..824cb632 100644
--- a/datasette/views/database.py
+++ b/datasette/views/database.py
@@ -37,10 +37,19 @@ class DatabaseView(DataView):
db = self.ds.databases[database]
table_counts = await db.table_counts(5)
- views = await db.view_names()
hidden_table_names = set(await db.hidden_table_names())
all_foreign_keys = await db.get_all_foreign_keys()
+ views = []
+ for view_name in await db.view_names():
+ visible, private = await check_visibility(
+ self.ds, request.actor, "view-table", "table", (database, view_name),
+ )
+ if visible:
+ views.append(
+ {"name": view_name, "private": private,}
+ )
+
tables = []
for table in table_counts:
visible, private = await check_visibility(
diff --git a/tests/test_permissions.py b/tests/test_permissions.py
index 55b2d673..5c338e04 100644
--- a/tests/test_permissions.py
+++ b/tests/test_permissions.py
@@ -107,19 +107,27 @@ def test_table_list_respects_view_table():
metadata={
"databases": {
"fixtures": {
- "tables": {"compound_three_primary_keys": {"allow": {"id": "root"}}}
+ "tables": {
+ "compound_three_primary_keys": {"allow": {"id": "root"}},
+ # And a SQL view too:
+ "paginated_view": {"allow": {"id": "root"}},
+ }
}
}
}
) as client:
- html_fragment = 'compound_three_primary_keys 🔒'
+ html_fragments = [
+ ">compound_three_primary_keys 🔒",
+ ">paginated_view 🔒",
+ ]
anon_response = client.get("/fixtures")
- assert html_fragment not in anon_response.text
- assert '"/fixtures/compound_three_primary_keys"' not in anon_response.text
+ for html_fragment in html_fragments:
+ assert html_fragment not in anon_response.text
auth_response = client.get(
"/fixtures", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")}
)
- assert html_fragment in auth_response.text
+ for html_fragment in html_fragments:
+ assert html_fragment in auth_response.text
@pytest.mark.parametrize(
From 5598c5de011db95396b65b5c8c251cbe6884d6ae Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 8 Jun 2020 11:34:14 -0700
Subject: [PATCH 0334/2124] Database list on index page respects table/view
permissions, refs #811
---
datasette/templates/index.html | 2 +-
datasette/views/index.py | 25 ++++++++++++++++++++-----
tests/test_permissions.py | 31 +++++++++++++++++++++++++++++++
3 files changed, 52 insertions(+), 6 deletions(-)
diff --git a/datasette/templates/index.html b/datasette/templates/index.html
index 5a8dccae..c1adfc59 100644
--- a/datasette/templates/index.html
+++ b/datasette/templates/index.html
@@ -22,7 +22,7 @@
{% endif %}
{% for table in database.tables_and_views_truncated %}{{ table.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}
+ }}"{% if table.count %} title="{{ table.count }} rows"{% endif %}>{{ table.name }}{% if table.private %} 🔒{% endif %}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}
{% endfor %}
{% endblock %}
diff --git a/datasette/views/index.py b/datasette/views/index.py
index 59d3e042..a3e8388c 100644
--- a/datasette/views/index.py
+++ b/datasette/views/index.py
@@ -25,14 +25,22 @@ class IndexView(BaseView):
await self.check_permission(request, "view-instance")
databases = []
for name, db in self.ds.databases.items():
- visible, private = await check_visibility(
+ visible, database_private = await check_visibility(
self.ds, request.actor, "view-database", "database", name,
)
if not visible:
continue
table_names = await db.table_names()
hidden_table_names = set(await db.hidden_table_names())
- views = await db.view_names()
+
+ views = []
+ for view_name in await db.view_names():
+ visible, private = await check_visibility(
+ self.ds, request.actor, "view-table", "table", (name, view_name),
+ )
+ if visible:
+ views.append({"name": view_name, "private": private})
+
# Perform counts only for immutable or DBS with <= COUNT_TABLE_LIMIT tables
table_counts = {}
if not db.is_mutable or db.size < COUNT_DB_SIZE_LIMIT:
@@ -40,8 +48,14 @@ class IndexView(BaseView):
# If any of these are None it means at least one timed out - ignore them all
if any(v is None for v in table_counts.values()):
table_counts = {}
+
tables = {}
for table in table_names:
+ visible, private = await check_visibility(
+ self.ds, request.actor, "view-table", "table", (name, table),
+ )
+ if not visible:
+ continue
table_columns = await db.table_columns(table)
tables[table] = {
"name": table,
@@ -51,6 +65,7 @@ class IndexView(BaseView):
"hidden": table in hidden_table_names,
"fts_table": await db.fts_table(table),
"num_relationships_for_sorting": 0,
+ "private": private,
}
if request.args.get("_sort") == "relationships" or not table_counts:
@@ -78,8 +93,8 @@ class IndexView(BaseView):
# Only add views if this is less than TRUNCATE_AT
if len(tables_and_views_truncated) < TRUNCATE_AT:
num_views_to_add = TRUNCATE_AT - len(tables_and_views_truncated)
- for view_name in views[:num_views_to_add]:
- tables_and_views_truncated.append({"name": view_name})
+ for view in views[:num_views_to_add]:
+ tables_and_views_truncated.append(view)
databases.append(
{
@@ -100,7 +115,7 @@ class IndexView(BaseView):
),
"hidden_tables_count": len(hidden_tables),
"views_count": len(views),
- "private": private,
+ "private": database_private,
}
)
diff --git a/tests/test_permissions.py b/tests/test_permissions.py
index 5c338e04..475f93dd 100644
--- a/tests/test_permissions.py
+++ b/tests/test_permissions.py
@@ -74,6 +74,37 @@ def test_database_list_respects_view_database():
assert 'fixtures 🔒' in auth_response.text
+def test_database_list_respects_view_table():
+ with make_app_client(
+ metadata={
+ "databases": {
+ "data": {
+ "tables": {
+ "names": {"allow": {"id": "root"}},
+ "v": {"allow": {"id": "root"}},
+ }
+ }
+ }
+ },
+ extra_databases={
+ "data.db": "create table names (name text); create view v as select * from names"
+ },
+ ) as client:
+ html_fragments = [
+ ">names 🔒",
+ ">v 🔒",
+ ]
+ anon_response_text = client.get("/").text
+ assert "0 rows in 0 tables" in anon_response_text
+ for html_fragment in html_fragments:
+ assert html_fragment not in anon_response_text
+ auth_response_text = client.get(
+ "/", cookies={"ds_actor": client.ds.sign({"id": "root"}, "actor")},
+ ).text
+ for html_fragment in html_fragments:
+ assert html_fragment in auth_response_text
+
+
@pytest.mark.parametrize(
"allow,expected_anon,expected_auth",
[(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),],
From c9f1ec616e5a8c83f554baaedd38663569fb9b91 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 8 Jun 2020 11:51:03 -0700
Subject: [PATCH 0335/2124] Removed resource_type from permissions system,
closes #817
Refs #811, #699
---
datasette/app.py | 4 +---
datasette/default_permissions.py | 5 +---
datasette/hookspecs.py | 2 +-
datasette/templates/permissions_debug.html | 4 ++--
datasette/utils/__init__.py | 16 +++----------
datasette/views/base.py | 5 +---
datasette/views/database.py | 28 ++++++++--------------
datasette/views/index.py | 6 ++---
datasette/views/table.py | 10 ++++----
docs/authentication.rst | 19 ++-------------
docs/internals.rst | 7 ++----
docs/plugins.rst | 9 +++----
tests/conftest.py | 4 ++--
tests/fixtures.py | 9 +++----
14 files changed, 39 insertions(+), 89 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 87e542c1..c12e0af0 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -465,7 +465,7 @@ class Datasette:
return []
async def permission_allowed(
- self, actor, action, resource_type=None, resource_identifier=None, default=False
+ self, actor, action, resource_identifier=None, default=False
):
"Check permissions using the permissions_allowed plugin hook"
result = None
@@ -473,7 +473,6 @@ class Datasette:
datasette=self,
actor=actor,
action=action,
- resource_type=resource_type,
resource_identifier=resource_identifier,
):
if callable(check):
@@ -491,7 +490,6 @@ class Datasette:
"when": datetime.datetime.utcnow().isoformat(),
"actor": actor,
"action": action,
- "resource_type": resource_type,
"resource_identifier": resource_identifier,
"used_default": used_default,
"result": result,
diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py
index dd1770a3..d27704aa 100644
--- a/datasette/default_permissions.py
+++ b/datasette/default_permissions.py
@@ -3,7 +3,7 @@ from datasette.utils import actor_matches_allow
@hookimpl
-def permission_allowed(datasette, actor, action, resource_type, resource_identifier):
+def permission_allowed(datasette, actor, action, resource_identifier):
if action == "permissions-debug":
if actor and actor.get("id") == "root":
return True
@@ -12,13 +12,11 @@ def permission_allowed(datasette, actor, action, resource_type, resource_identif
if allow is not None:
return actor_matches_allow(actor, allow)
elif action == "view-database":
- assert resource_type == "database"
database_allow = datasette.metadata("allow", database=resource_identifier)
if database_allow is None:
return True
return actor_matches_allow(actor, database_allow)
elif action == "view-table":
- assert resource_type == "table"
database, table = resource_identifier
tables = datasette.metadata("tables", database=database) or {}
table_allow = (tables.get(table) or {}).get("allow")
@@ -27,7 +25,6 @@ def permission_allowed(datasette, actor, action, resource_type, resource_identif
return actor_matches_allow(actor, table_allow)
elif action == "view-query":
# Check if this query has a "allow" block in metadata
- assert resource_type == "query"
database, query_name = resource_identifier
queries_metadata = datasette.metadata("queries", database=database)
assert query_name in queries_metadata
diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py
index 71d06661..3c202553 100644
--- a/datasette/hookspecs.py
+++ b/datasette/hookspecs.py
@@ -66,5 +66,5 @@ def actor_from_request(datasette, request):
@hookspec
-def permission_allowed(datasette, actor, action, resource_type, resource_identifier):
+def permission_allowed(datasette, actor, action, resource_identifier):
"Check if actor is allowed to perfom this action - return True, False or None"
diff --git a/datasette/templates/permissions_debug.html b/datasette/templates/permissions_debug.html
index dda57dfa..7d3ee712 100644
--- a/datasette/templates/permissions_debug.html
+++ b/datasette/templates/permissions_debug.html
@@ -46,8 +46,8 @@
{% endif %}
{% endif %}
{% endfor %}
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index 3d964049..257d1285 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -876,24 +876,14 @@ def actor_matches_allow(actor, allow):
return False
-async def check_visibility(
- datasette, actor, action, resource_type, resource_identifier, default=True
-):
+async def check_visibility(datasette, actor, action, resource_identifier, default=True):
"Returns (visible, private) - visible = can you see it, private = can others see it too"
visible = await datasette.permission_allowed(
- actor,
- action,
- resource_type=resource_type,
- resource_identifier=resource_identifier,
- default=default,
+ actor, action, resource_identifier=resource_identifier, default=default,
)
if not visible:
return (False, False)
private = not await datasette.permission_allowed(
- None,
- action,
- resource_type=resource_type,
- resource_identifier=resource_identifier,
- default=default,
+ None, action, resource_identifier=resource_identifier, default=default,
)
return visible, private
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 000d354b..2ca5e86a 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -64,13 +64,10 @@ class BaseView(AsgiView):
response.body = b""
return response
- async def check_permission(
- self, request, action, resource_type=None, resource_identifier=None
- ):
+ async def check_permission(self, request, action, resource_identifier=None):
ok = await self.ds.permission_allowed(
request.actor,
action,
- resource_type=resource_type,
resource_identifier=resource_identifier,
default=True,
)
diff --git a/datasette/views/database.py b/datasette/views/database.py
index 824cb632..d562ecb1 100644
--- a/datasette/views/database.py
+++ b/datasette/views/database.py
@@ -21,7 +21,7 @@ class DatabaseView(DataView):
async def data(self, request, database, hash, default_labels=False, _size=None):
await self.check_permission(request, "view-instance")
- await self.check_permission(request, "view-database", "database", database)
+ await self.check_permission(request, "view-database", database)
metadata = (self.ds.metadata("databases") or {}).get(database, {})
self.ds.update_with_inherited_metadata(metadata)
@@ -43,7 +43,7 @@ class DatabaseView(DataView):
views = []
for view_name in await db.view_names():
visible, private = await check_visibility(
- self.ds, request.actor, "view-table", "table", (database, view_name),
+ self.ds, request.actor, "view-table", (database, view_name),
)
if visible:
views.append(
@@ -53,7 +53,7 @@ class DatabaseView(DataView):
tables = []
for table in table_counts:
visible, private = await check_visibility(
- self.ds, request.actor, "view-table", "table", (database, table),
+ self.ds, request.actor, "view-table", (database, table),
)
if not visible:
continue
@@ -75,11 +75,7 @@ class DatabaseView(DataView):
canned_queries = []
for query in self.ds.get_canned_queries(database):
visible, private = await check_visibility(
- self.ds,
- request.actor,
- "view-query",
- "query",
- (database, query["name"]),
+ self.ds, request.actor, "view-query", (database, query["name"]),
)
if visible:
canned_queries.append(dict(query, private=private))
@@ -112,10 +108,8 @@ class DatabaseDownload(DataView):
async def view_get(self, request, database, hash, correct_hash_present, **kwargs):
await self.check_permission(request, "view-instance")
- await self.check_permission(request, "view-database", "database", database)
- await self.check_permission(
- request, "view-database-download", "database", database
- )
+ await self.check_permission(request, "view-database", database)
+ await self.check_permission(request, "view-database-download", database)
if database not in self.ds.databases:
raise DatasetteError("Invalid database", status=404)
db = self.ds.databases[database]
@@ -155,17 +149,15 @@ class QueryView(DataView):
# Respect canned query permissions
await self.check_permission(request, "view-instance")
- await self.check_permission(request, "view-database", "database", database)
+ await self.check_permission(request, "view-database", database)
private = False
if canned_query:
- await self.check_permission(
- request, "view-query", "query", (database, canned_query)
- )
+ await self.check_permission(request, "view-query", (database, canned_query))
private = not await self.ds.permission_allowed(
- None, "view-query", "query", (database, canned_query), default=True
+ None, "view-query", (database, canned_query), default=True
)
else:
- await self.check_permission(request, "execute-sql", "database", database)
+ await self.check_permission(request, "execute-sql", database)
# Extract any :named parameters
named_parameters = named_parameters or self.re_named_parameter.findall(sql)
named_parameter_values = {
diff --git a/datasette/views/index.py b/datasette/views/index.py
index a3e8388c..b2706251 100644
--- a/datasette/views/index.py
+++ b/datasette/views/index.py
@@ -26,7 +26,7 @@ class IndexView(BaseView):
databases = []
for name, db in self.ds.databases.items():
visible, database_private = await check_visibility(
- self.ds, request.actor, "view-database", "database", name,
+ self.ds, request.actor, "view-database", name,
)
if not visible:
continue
@@ -36,7 +36,7 @@ class IndexView(BaseView):
views = []
for view_name in await db.view_names():
visible, private = await check_visibility(
- self.ds, request.actor, "view-table", "table", (name, view_name),
+ self.ds, request.actor, "view-table", (name, view_name),
)
if visible:
views.append({"name": view_name, "private": private})
@@ -52,7 +52,7 @@ class IndexView(BaseView):
tables = {}
for table in table_names:
visible, private = await check_visibility(
- self.ds, request.actor, "view-table", "table", (name, table),
+ self.ds, request.actor, "view-table", (name, table),
)
if not visible:
continue
diff --git a/datasette/views/table.py b/datasette/views/table.py
index cd952568..4cec0cda 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -268,11 +268,11 @@ class TableView(RowTableShared):
raise NotFound("Table not found: {}".format(table))
await self.check_permission(request, "view-instance")
- await self.check_permission(request, "view-database", "database", database)
- await self.check_permission(request, "view-table", "table", (database, table))
+ await self.check_permission(request, "view-database", database)
+ await self.check_permission(request, "view-table", (database, table))
private = not await self.ds.permission_allowed(
- None, "view-table", "table", (database, table), default=True
+ None, "view-table", (database, table), default=True
)
pks = await db.primary_keys(table)
@@ -854,8 +854,8 @@ class RowView(RowTableShared):
async def data(self, request, database, hash, table, pk_path, default_labels=False):
pk_values = urlsafe_components(pk_path)
await self.check_permission(request, "view-instance")
- await self.check_permission(request, "view-database", "database", database)
- await self.check_permission(request, "view-table", "table", (database, table))
+ await self.check_permission(request, "view-database", database)
+ await self.check_permission(request, "view-table", (database, table))
db = self.ds.databases[database]
pks = await db.primary_keys(table)
use_rowid = not pks
diff --git a/docs/authentication.rst b/docs/authentication.rst
index bda6a0b7..67112969 100644
--- a/docs/authentication.rst
+++ b/docs/authentication.rst
@@ -52,7 +52,7 @@ The URL on the first line includes a one-use token which can be used to sign in
Permissions
===========
-Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. This method is also used by Datasette core code itself, which allows plugins to help make decisions on which actions are allowed by implementing the :ref:`permission_allowed(...) ` plugin hook.
+Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. This method is also used by Datasette core code itself, which allows plugins to help make decisions on which actions are allowed by implementing the :ref:`plugin_permission_allowed` plugin hook.
.. _authentication_permissions_canned_queries:
@@ -159,7 +159,7 @@ This is designed to help administrators and plugin authors understand exactly ho
Permissions
===========
-This section lists all of the permission checks that are carried out by Datasette core, along with their ``resource_type`` and ``resource_identifier`` if those are passed.
+This section lists all of the permission checks that are carried out by Datasette core, along with the ``resource_identifier`` if it was passed.
.. _permissions_view_instance:
@@ -176,9 +176,6 @@ view-database
Actor is allowed to view a database page, e.g. https://latest.datasette.io/fixtures
-``resource_type`` - string
- "database"
-
``resource_identifier`` - string
The name of the database
@@ -189,9 +186,6 @@ view-database-download
Actor is allowed to download a database, e.g. https://latest.datasette.io/fixtures.db
-``resource_type`` - string
- "database"
-
``resource_identifier`` - string
The name of the database
@@ -202,9 +196,6 @@ view-table
Actor is allowed to view a table (or view) page, e.g. https://latest.datasette.io/fixtures/complex_foreign_keys
-``resource_type`` - string
- "table" - even if this is actually a SQL view
-
``resource_identifier`` - tuple: (string, string)
The name of the database, then the name of the table
@@ -215,9 +206,6 @@ view-query
Actor is allowed to view a :ref:`canned query ` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size
-``resource_type`` - string
- "query"
-
``resource_identifier`` - string
The name of the canned query
@@ -228,9 +216,6 @@ execute-sql
Actor is allowed to run arbitrary SQL queries against a specific database, e.g. https://latest.datasette.io/fixtures?sql=select+100
-``resource_type`` - string
- "database"
-
``resource_identifier`` - string
The name of the database
diff --git a/docs/internals.rst b/docs/internals.rst
index 7498f017..1d61b6cb 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -121,8 +121,8 @@ Renders a `Jinja template `__ usin
.. _datasette_permission_allowed:
-await .permission_allowed(actor, action, resource_type=None, resource_identifier=None, default=False)
------------------------------------------------------------------------------------------------------
+await .permission_allowed(actor, action, resource_identifier=None, default=False)
+---------------------------------------------------------------------------------
``actor`` - dictionary
The authenticated actor. This is usually ``request.actor``.
@@ -130,9 +130,6 @@ await .permission_allowed(actor, action, resource_type=None, resource_identifier
``action`` - string
The name of the action that is being permission checked.
-``resource_type`` - string, optional
- The type of resource being checked, e.g. ``"table"``.
-
``resource_identifier`` - string, optional
The resource identifier, e.g. the name of the table.
diff --git a/docs/plugins.rst b/docs/plugins.rst
index ecc7cbf1..118fab84 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -1005,8 +1005,8 @@ Instead of returning a dictionary, this function can return an awaitable functio
.. _plugin_permission_allowed:
-permission_allowed(datasette, actor, action, resource_type, resource_identifier)
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+permission_allowed(datasette, actor, action, resource_identifier)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
``datasette`` - :ref:`internals_datasette`
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
@@ -1017,10 +1017,7 @@ permission_allowed(datasette, actor, action, resource_type, resource_identifier)
``action`` - string
The action to be performed, e.g. ``"edit-table"``.
-``resource_type`` - string
- The type of resource being acted on, e.g. ``"table"``.
-
-``resource`` - string
+``resource_identifier`` - string
An identifier for the individual resource, e.g. the name of the table.
Called to check that an actor has permission to perform an action on a resource. Can return ``True`` if the action is allowed, ``False`` if the action is not allowed or ``None`` if the plugin does not have an opinion one way or the other.
diff --git a/tests/conftest.py b/tests/conftest.py
index 1921ae3a..7f1e9387 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -70,8 +70,8 @@ def check_permission_actions_are_documented():
action = kwargs.get("action").replace("-", "_")
assert (
action in documented_permission_actions
- ), "Undocumented permission action: {}, resource_type: {}, resource_identifier: {}".format(
- action, kwargs["resource_type"], kwargs["resource_identifier"]
+ ), "Undocumented permission action: {}, resource_identifier: {}".format(
+ action, kwargs["resource_identifier"]
)
pm.add_hookcall_monitoring(
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 2ac73fb1..8210d34f 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -857,24 +857,21 @@ if __name__ == "__main__":
def assert_permissions_checked(datasette, actions):
- # actions is a list of "action" or (action, resource_type, resource_identifier) tuples
+ # actions is a list of "action" or (action, resource_identifier) tuples
for action in actions:
if isinstance(action, str):
- resource_type = None
resource_identifier = None
else:
- action, resource_type, resource_identifier = action
+ action, resource_identifier = action
assert [
pc
for pc in datasette._permission_checks
if pc["action"] == action
- and pc["resource_type"] == resource_type
and pc["resource_identifier"] == resource_identifier
- ], """Missing expected permission check: action={}, resource_type={}, resource_identifier={}
+ ], """Missing expected permission check: action={}, resource_identifier={}
Permission checks seen: {}
""".format(
action,
- resource_type,
resource_identifier,
json.dumps(list(datasette._permission_checks), indent=4),
)
From 799c5d53570d773203527f19530cf772dc2eeb24 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 8 Jun 2020 11:59:11 -0700
Subject: [PATCH 0336/2124] Renamed resource_identifier to resource, refs #817
---
datasette/app.py | 11 +++--------
datasette/default_permissions.py | 8 ++++----
datasette/hookspecs.py | 2 +-
datasette/templates/permissions_debug.html | 4 ++--
datasette/utils/__init__.py | 6 +++---
datasette/views/base.py | 7 ++-----
datasette/views/database.py | 2 +-
docs/authentication.rst | 12 ++++++------
docs/internals.rst | 10 ++++++----
docs/plugins.rst | 6 ++++--
tests/conftest.py | 4 ++--
tests/fixtures.py | 15 ++++++---------
12 files changed, 40 insertions(+), 47 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index c12e0af0..2f89d17c 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -464,16 +464,11 @@ class Datasette:
else:
return []
- async def permission_allowed(
- self, actor, action, resource_identifier=None, default=False
- ):
+ async def permission_allowed(self, actor, action, resource=None, default=False):
"Check permissions using the permissions_allowed plugin hook"
result = None
for check in pm.hook.permission_allowed(
- datasette=self,
- actor=actor,
- action=action,
- resource_identifier=resource_identifier,
+ datasette=self, actor=actor, action=action, resource=resource,
):
if callable(check):
check = check()
@@ -490,7 +485,7 @@ class Datasette:
"when": datetime.datetime.utcnow().isoformat(),
"actor": actor,
"action": action,
- "resource_identifier": resource_identifier,
+ "resource": resource,
"used_default": used_default,
"result": result,
}
diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py
index d27704aa..e989c0fa 100644
--- a/datasette/default_permissions.py
+++ b/datasette/default_permissions.py
@@ -3,7 +3,7 @@ from datasette.utils import actor_matches_allow
@hookimpl
-def permission_allowed(datasette, actor, action, resource_identifier):
+def permission_allowed(datasette, actor, action, resource):
if action == "permissions-debug":
if actor and actor.get("id") == "root":
return True
@@ -12,12 +12,12 @@ def permission_allowed(datasette, actor, action, resource_identifier):
if allow is not None:
return actor_matches_allow(actor, allow)
elif action == "view-database":
- database_allow = datasette.metadata("allow", database=resource_identifier)
+ database_allow = datasette.metadata("allow", database=resource)
if database_allow is None:
return True
return actor_matches_allow(actor, database_allow)
elif action == "view-table":
- database, table = resource_identifier
+ database, table = resource
tables = datasette.metadata("tables", database=database) or {}
table_allow = (tables.get(table) or {}).get("allow")
if table_allow is None:
@@ -25,7 +25,7 @@ def permission_allowed(datasette, actor, action, resource_identifier):
return actor_matches_allow(actor, table_allow)
elif action == "view-query":
# Check if this query has a "allow" block in metadata
- database, query_name = resource_identifier
+ database, query_name = resource
queries_metadata = datasette.metadata("queries", database=database)
assert query_name in queries_metadata
if isinstance(queries_metadata[query_name], str):
diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py
index 3c202553..d5fd232f 100644
--- a/datasette/hookspecs.py
+++ b/datasette/hookspecs.py
@@ -66,5 +66,5 @@ def actor_from_request(datasette, request):
@hookspec
-def permission_allowed(datasette, actor, action, resource_identifier):
+def permission_allowed(datasette, actor, action, resource):
"Check if actor is allowed to perfom this action - return True, False or None"
diff --git a/datasette/templates/permissions_debug.html b/datasette/templates/permissions_debug.html
index 7d3ee712..d898ea8c 100644
--- a/datasette/templates/permissions_debug.html
+++ b/datasette/templates/permissions_debug.html
@@ -46,8 +46,8 @@
{% endif %}
Actor: {{ check.actor|tojson }}
- {% if check.resource_identifier %}
-
Resource: {{ check.resource_identifier }}
+ {% if check.resource %}
+
Resource: {{ check.resource }}
{% endif %}
{% endfor %}
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index 257d1285..7c1f34e0 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -876,14 +876,14 @@ def actor_matches_allow(actor, allow):
return False
-async def check_visibility(datasette, actor, action, resource_identifier, default=True):
+async def check_visibility(datasette, actor, action, resource, default=True):
"Returns (visible, private) - visible = can you see it, private = can others see it too"
visible = await datasette.permission_allowed(
- actor, action, resource_identifier=resource_identifier, default=default,
+ actor, action, resource=resource, default=default,
)
if not visible:
return (False, False)
private = not await datasette.permission_allowed(
- None, action, resource_identifier=resource_identifier, default=default,
+ None, action, resource=resource, default=default,
)
return visible, private
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 2ca5e86a..f327c6cd 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -64,12 +64,9 @@ class BaseView(AsgiView):
response.body = b""
return response
- async def check_permission(self, request, action, resource_identifier=None):
+ async def check_permission(self, request, action, resource=None):
ok = await self.ds.permission_allowed(
- request.actor,
- action,
- resource_identifier=resource_identifier,
- default=True,
+ request.actor, action, resource=resource, default=True,
)
if not ok:
raise Forbidden(action)
diff --git a/datasette/views/database.py b/datasette/views/database.py
index d562ecb1..e1b29c27 100644
--- a/datasette/views/database.py
+++ b/datasette/views/database.py
@@ -88,7 +88,7 @@ class DatabaseView(DataView):
"views": views,
"queries": canned_queries,
"private": not await self.ds.permission_allowed(
- None, "view-database", "database", database
+ None, "view-database", database
),
},
{
diff --git a/docs/authentication.rst b/docs/authentication.rst
index 67112969..f5209dfc 100644
--- a/docs/authentication.rst
+++ b/docs/authentication.rst
@@ -159,7 +159,7 @@ This is designed to help administrators and plugin authors understand exactly ho
Permissions
===========
-This section lists all of the permission checks that are carried out by Datasette core, along with the ``resource_identifier`` if it was passed.
+This section lists all of the permission checks that are carried out by Datasette core, along with the ``resource`` if it was passed.
.. _permissions_view_instance:
@@ -176,7 +176,7 @@ view-database
Actor is allowed to view a database page, e.g. https://latest.datasette.io/fixtures
-``resource_identifier`` - string
+``resource`` - string
The name of the database
.. _permissions_view_database_download:
@@ -186,7 +186,7 @@ view-database-download
Actor is allowed to download a database, e.g. https://latest.datasette.io/fixtures.db
-``resource_identifier`` - string
+``resource`` - string
The name of the database
.. _permissions_view_table:
@@ -196,7 +196,7 @@ view-table
Actor is allowed to view a table (or view) page, e.g. https://latest.datasette.io/fixtures/complex_foreign_keys
-``resource_identifier`` - tuple: (string, string)
+``resource`` - tuple: (string, string)
The name of the database, then the name of the table
.. _permissions_view_query:
@@ -206,7 +206,7 @@ view-query
Actor is allowed to view a :ref:`canned query ` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size
-``resource_identifier`` - string
+``resource`` - string
The name of the canned query
.. _permissions_execute_sql:
@@ -216,7 +216,7 @@ execute-sql
Actor is allowed to run arbitrary SQL queries against a specific database, e.g. https://latest.datasette.io/fixtures?sql=select+100
-``resource_identifier`` - string
+``resource`` - string
The name of the database
.. _permissions_permissions_debug:
diff --git a/docs/internals.rst b/docs/internals.rst
index 1d61b6cb..83dbd897 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -121,8 +121,8 @@ Renders a `Jinja template `__ usin
.. _datasette_permission_allowed:
-await .permission_allowed(actor, action, resource_identifier=None, default=False)
----------------------------------------------------------------------------------
+await .permission_allowed(actor, action, resource=None, default=False)
+----------------------------------------------------------------------
``actor`` - dictionary
The authenticated actor. This is usually ``request.actor``.
@@ -130,13 +130,15 @@ await .permission_allowed(actor, action, resource_identifier=None, default=False
``action`` - string
The name of the action that is being permission checked.
-``resource_identifier`` - string, optional
- The resource identifier, e.g. the name of the table.
+``resource`` - string, optional
+ The resource, e.g. the name of the table. Only some permissions apply to a resource.
Check if the given actor has permission to perform the given action on the given resource. This uses plugins that implement the :ref:`plugin_permission_allowed` plugin hook to decide if the action is allowed or not.
If none of the plugins express an opinion, the return value will be the ``default`` argument. This is deny, but you can pass ``default=True`` to default allow instead.
+See :ref:`permissions` for a full list of permissions included in Datasette core.
+
.. _datasette_get_database:
.get_database(name)
diff --git a/docs/plugins.rst b/docs/plugins.rst
index 118fab84..56041d0c 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -1005,7 +1005,7 @@ Instead of returning a dictionary, this function can return an awaitable functio
.. _plugin_permission_allowed:
-permission_allowed(datasette, actor, action, resource_identifier)
+permission_allowed(datasette, actor, action, resource)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
``datasette`` - :ref:`internals_datasette`
@@ -1017,7 +1017,9 @@ permission_allowed(datasette, actor, action, resource_identifier)
``action`` - string
The action to be performed, e.g. ``"edit-table"``.
-``resource_identifier`` - string
+``resource`` - string or None
An identifier for the individual resource, e.g. the name of the table.
Called to check that an actor has permission to perform an action on a resource. Can return ``True`` if the action is allowed, ``False`` if the action is not allowed or ``None`` if the plugin does not have an opinion one way or the other.
+
+See :ref:`permissions` for a full list of permissions included in Datasette core.
diff --git a/tests/conftest.py b/tests/conftest.py
index 7f1e9387..320aa45b 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -70,8 +70,8 @@ def check_permission_actions_are_documented():
action = kwargs.get("action").replace("-", "_")
assert (
action in documented_permission_actions
- ), "Undocumented permission action: {}, resource_identifier: {}".format(
- action, kwargs["resource_identifier"]
+ ), "Undocumented permission action: {}, resource: {}".format(
+ action, kwargs["resource"]
)
pm.add_hookcall_monitoring(
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 8210d34f..e9175b57 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -857,21 +857,18 @@ if __name__ == "__main__":
def assert_permissions_checked(datasette, actions):
- # actions is a list of "action" or (action, resource_identifier) tuples
+ # actions is a list of "action" or (action, resource) tuples
for action in actions:
if isinstance(action, str):
- resource_identifier = None
+ resource = None
else:
- action, resource_identifier = action
+ action, resource = action
assert [
pc
for pc in datasette._permission_checks
- if pc["action"] == action
- and pc["resource_identifier"] == resource_identifier
- ], """Missing expected permission check: action={}, resource_identifier={}
+ if pc["action"] == action and pc["resource"] == resource
+ ], """Missing expected permission check: action={}, resource={}
Permission checks seen: {}
""".format(
- action,
- resource_identifier,
- json.dumps(list(datasette._permission_checks), indent=4),
+ action, resource, json.dumps(list(datasette._permission_checks), indent=4),
)
From 040fc0546f1ad602125ecdc27d9d013d830aa808 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 8 Jun 2020 12:02:56 -0700
Subject: [PATCH 0337/2124] Updated tests, refs #817
---
tests/test_permissions.py | 20 ++++++++++----------
1 file changed, 10 insertions(+), 10 deletions(-)
diff --git a/tests/test_permissions.py b/tests/test_permissions.py
index 475f93dd..90ba1494 100644
--- a/tests/test_permissions.py
+++ b/tests/test_permissions.py
@@ -210,41 +210,41 @@ def test_query_list_respects_view_query():
"path,permissions",
[
("/", ["view-instance"]),
- ("/fixtures", ["view-instance", ("view-database", "database", "fixtures")]),
+ ("/fixtures", ["view-instance", ("view-database", "fixtures")]),
(
"/fixtures/facetable/1",
- ["view-instance", ("view-table", "table", ("fixtures", "facetable"))],
+ ["view-instance", ("view-table", ("fixtures", "facetable"))],
),
(
"/fixtures/simple_primary_key",
[
"view-instance",
- ("view-database", "database", "fixtures"),
- ("view-table", "table", ("fixtures", "simple_primary_key")),
+ ("view-database", "fixtures"),
+ ("view-table", ("fixtures", "simple_primary_key")),
],
),
(
"/fixtures?sql=select+1",
[
"view-instance",
- ("view-database", "database", "fixtures"),
- ("execute-sql", "database", "fixtures"),
+ ("view-database", "fixtures"),
+ ("execute-sql", "fixtures"),
],
),
(
"/fixtures.db",
[
"view-instance",
- ("view-database", "database", "fixtures"),
- ("view-database-download", "database", "fixtures"),
+ ("view-database", "fixtures"),
+ ("view-database-download", "fixtures"),
],
),
(
"/fixtures/neighborhood_search",
[
"view-instance",
- ("view-database", "database", "fixtures"),
- ("view-query", "query", ("fixtures", "neighborhood_search")),
+ ("view-database", "fixtures"),
+ ("view-query", ("fixtures", "neighborhood_search")),
],
),
],
From c7d145e016522dd6ee229d4d0b3ba79a7a8877c1 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 8 Jun 2020 12:06:05 -0700
Subject: [PATCH 0338/2124] Updated example for extra_template_vars hook,
closes #816
---
docs/plugins.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/docs/plugins.rst b/docs/plugins.rst
index 56041d0c..6b1e60f2 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -689,14 +689,14 @@ Function that returns an awaitable function that returns a dictionary
Datasette runs Jinja2 in `async mode `__, which means you can add awaitable functions to the template scope and they will be automatically awaited when they are rendered by the template.
-Here's an example plugin that returns an authentication object from the ASGI scope:
+Here's an example plugin that adds a ``"user_agent"`` variable to the template context containing the current request's User-Agent header:
.. code-block:: python
@hookimpl
def extra_template_vars(request):
return {
- "auth": request.scope.get("auth")
+ "user_agent": request.headers.get("user-agent")
}
This example returns an awaitable function which adds a list of ``hidden_table_names`` to the context:
From 54370853828bdf87ca844fd0fc00900e0e2e659d Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 8 Jun 2020 12:32:27 -0700
Subject: [PATCH 0339/2124] Documentation for allow blocks on more stuff,
closes #811
---
docs/authentication.rst | 121 ++++++++++++++++++++++++++++++++--------
docs/sql_queries.rst | 2 +-
2 files changed, 100 insertions(+), 23 deletions(-)
diff --git a/docs/authentication.rst b/docs/authentication.rst
index f5209dfc..a6c4ee79 100644
--- a/docs/authentication.rst
+++ b/docs/authentication.rst
@@ -15,7 +15,7 @@ Actors
Through plugins, Datasette can support both authenticated users (with cookies) and authenticated API agents (via authentication tokens). The word "actor" is used to cover both of these cases.
-Every request to Datasette has an associated actor value. This can be ``None`` for unauthenticated requests, or a JSON compatible Python dictionary for authenticated users or API agents.
+Every request to Datasette has an associated actor value, available in the code as ``request.actor``. This can be ``None`` for unauthenticated requests, or a JSON compatible Python dictionary for authenticated users or API agents.
The only required field in an actor is ``"id"``, which must be a string. Plugins may decide to add any other fields to the actor dictionary.
@@ -24,7 +24,7 @@ Plugins can use the :ref:`plugin_actor_from_request` hook to implement custom lo
.. _authentication_root:
Using the "root" actor
-======================
+----------------------
Datasette currently leaves almost all forms of authentication to plugins - `datasette-auth-github `__ for example.
@@ -49,37 +49,40 @@ The URL on the first line includes a one-use token which can be used to sign in
.. _authentication_permissions:
-Permissions
-===========
+Checking permission
+===================
Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)` method. This method is also used by Datasette core code itself, which allows plugins to help make decisions on which actions are allowed by implementing the :ref:`plugin_permission_allowed` plugin hook.
-.. _authentication_permissions_canned_queries:
+.. _authentication_permissions_metadata:
-Permissions for canned queries
-==============================
+Configuring permissions in metadata.json
+========================================
-Datasette's :ref:`canned queries ` default to allowing any user to execute them.
+You can limit who is allowed to view different parts of your Datasette instance using ``"allow"`` keys in your :ref:`metadata` configuration.
-You can limit who is allowed to execute a specific query with the ``"allow"`` key in the :ref:`metadata` configuration for that query.
+You can control the following:
-Here's how to restrict access to a write query to just the "root" user:
+* Access to the entire Datasette instance
+* Access to specific databases
+* Access to specific tables and views
+* Access to specific :ref:`canned_queries`
+
+If a user cannot access a specific database, they will not be able to access tables, views or queries within that database. If a user cannot access the instance they will not be able to access any of the databases, tables, views or queries.
+
+.. _authentication_permissions_instance:
+
+Controlling access to an instance
+---------------------------------
+
+Here's how to restrict access to your entire Datasette instance to just the ``"id": "root"`` user:
.. code-block:: json
{
- "databases": {
- "mydatabase": {
- "queries": {
- "add_name": {
- "sql": "INSERT INTO names (name) VALUES (:name)",
- "write": true,
- "allow": {
- "id": ["root"]
- }
- }
- }
- }
+ "title": "My private Datasette instance",
+ "allow": {
+ "id": "root"
}
}
@@ -126,6 +129,80 @@ If you want to provide access to any actor with a value for a specific key, use
These keys act as an "or" mechanism. A actor will be able to execute the query if any of their JSON properties match any of the values in the corresponding lists in the ``allow`` block.
+.. _authentication_permissions_database:
+
+Controlling access to specific databases
+----------------------------------------
+
+To limit access to a specific ``private.db`` database to just authenticated users, use the ``"allow"`` block like this:
+
+.. code-block:: json
+
+ {
+ "databases": {
+ "private": {
+ "allow": {
+ "id": "*"
+ }
+ }
+ }
+ }
+
+.. _authentication_permissions_table:
+
+Controlling access to specific tables and views
+-----------------------------------------------
+
+To limit access to the ``users`` table in your ``bakery.db`` database:
+
+.. code-block:: json
+
+ {
+ "databases": {
+ "bakery": {
+ "tables": {
+ "users": {
+ "allow": {
+ "id": "*"
+ }
+ }
+ }
+ }
+ }
+ }
+
+This works for SQL views as well - you can treat them as if they are tables.
+
+.. warning::
+ Restricting access to tables and views in this way will NOT prevent users from querying them using arbitrary SQL queries.
+
+ If you are restricting access to specific tables you should also use the ``"allow_sql"`` block to prevent users from accessing
+
+.. _authentication_permissions_table:
+
+Controlling access to specific canned queries
+---------------------------------------------
+
+To limit access to the ``add_name`` canned query in your ``dogs.db`` database to just the :ref:`root user`:
+
+.. code-block:: json
+
+ {
+ "databases": {
+ "dogs": {
+ "queries": {
+ "add_name": {
+ "sql": "INSERT INTO names (name) VALUES (:name)",
+ "write": true,
+ "allow": {
+ "id": ["root"]
+ }
+ }
+ }
+ }
+ }
+ }
+
.. _authentication_actor_matches_allow:
actor_matches_allow()
diff --git a/docs/sql_queries.rst b/docs/sql_queries.rst
index 5df8bdb0..5295a2e0 100644
--- a/docs/sql_queries.rst
+++ b/docs/sql_queries.rst
@@ -217,7 +217,7 @@ Writable canned queries
Canned queries by default are read-only. You can use the ``"write": true`` key to indicate that a canned query can write to the database.
-See :ref:`authentication_permissions_canned_queries` for details on how to add permission checks to canned queries, using the ``"allow"`` key.
+See :ref:`authentication_permissions_metadata` for details on how to add permission checks to canned queries, using the ``"allow"`` key.
.. code-block:: json
From 8205d58316ced1d5ae589b29a5a1b5ecb6257ab0 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 8 Jun 2020 13:10:40 -0700
Subject: [PATCH 0340/2124] Corrected documentation for resource in view-query
---
docs/authentication.rst | 5 ++---
1 file changed, 2 insertions(+), 3 deletions(-)
diff --git a/docs/authentication.rst b/docs/authentication.rst
index a6c4ee79..88808428 100644
--- a/docs/authentication.rst
+++ b/docs/authentication.rst
@@ -245,7 +245,6 @@ view-instance
Top level permission - Actor is allowed to view any pages within this instance, starting at https://latest.datasette.io/
-
.. _permissions_view_database:
view-database
@@ -283,8 +282,8 @@ view-query
Actor is allowed to view a :ref:`canned query ` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size
-``resource`` - string
- The name of the canned query
+``resource`` - tuple: (string, string)
+ The name of the database, then the name of the canned query
.. _permissions_execute_sql:
From e0a4664fbab5556454dac7f3c798253a34db2928 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 8 Jun 2020 15:09:57 -0700
Subject: [PATCH 0341/2124] Better example plugin for permission_allowed
Also fixed it so default permission checks run after plugin permission checks, refs #818
---
datasette/default_permissions.py | 2 +-
docs/authentication.rst | 4 ++--
docs/plugins.rst | 40 ++++++++++++++++++++++++++++++--
3 files changed, 41 insertions(+), 5 deletions(-)
diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py
index e989c0fa..a2f4a315 100644
--- a/datasette/default_permissions.py
+++ b/datasette/default_permissions.py
@@ -2,7 +2,7 @@ from datasette import hookimpl
from datasette.utils import actor_matches_allow
-@hookimpl
+@hookimpl(tryfirst=True)
def permission_allowed(datasette, actor, action, resource):
if action == "permissions-debug":
if actor and actor.get("id") == "root":
diff --git a/docs/authentication.rst b/docs/authentication.rst
index 88808428..34d46511 100644
--- a/docs/authentication.rst
+++ b/docs/authentication.rst
@@ -174,11 +174,11 @@ To limit access to the ``users`` table in your ``bakery.db`` database:
This works for SQL views as well - you can treat them as if they are tables.
.. warning::
- Restricting access to tables and views in this way will NOT prevent users from querying them using arbitrary SQL queries.
+ Restricting access to tables and views in this way will NOT prevent users from querying them using arbitrary SQL queries, `like this `__ for example.
If you are restricting access to specific tables you should also use the ``"allow_sql"`` block to prevent users from accessing
-.. _authentication_permissions_table:
+.. _authentication_permissions_query:
Controlling access to specific canned queries
---------------------------------------------
diff --git a/docs/plugins.rst b/docs/plugins.rst
index 6b1e60f2..73d2eabd 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -1006,7 +1006,7 @@ Instead of returning a dictionary, this function can return an awaitable functio
.. _plugin_permission_allowed:
permission_allowed(datasette, actor, action, resource)
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
``datasette`` - :ref:`internals_datasette`
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
@@ -1022,4 +1022,40 @@ permission_allowed(datasette, actor, action, resource)
Called to check that an actor has permission to perform an action on a resource. Can return ``True`` if the action is allowed, ``False`` if the action is not allowed or ``None`` if the plugin does not have an opinion one way or the other.
-See :ref:`permissions` for a full list of permissions included in Datasette core.
+Here's an example plugin which randomly selects if a permission should be allowed or denied, except for ``view-instance`` which always uses the default permission scheme instead.
+
+.. code-block:: python
+
+ from datasette import hookimpl
+ import random
+
+ @hookimpl
+ def permission_allowed(action):
+ if action != "view-instance":
+ # Return True or False at random
+ return random.random() > 0.5
+ # Returning None falls back to default permissions
+
+This function can alternatively return an awaitable function which itself returns ``True``, ``False`` or ``None``. You can use this option if you need to execute additional database queries using ``await datasette.execute(...)``.
+
+Here's an example that allows users to view the ``admin_log`` table only if their actor ``id`` is present in the ``admin_users`` table. It aso disallows arbitrary SQL queries for the ``staff.db`` database for all users.
+
+.. code-block:: python
+
+ @hookimpl
+ def permission_allowed(datasette, actor, action, resource):
+ async def inner():
+ if action == "execute-sql" and resource == "staff":
+ return False
+ if action == "view-table" and resource == ("staff", "admin_log"):
+ if not actor:
+ return False
+ user_id = actor["id"]
+ return await datasette.get_database("staff").execute(
+ "select count(*) from admin_users where user_id = :user_id",
+ {"user_id": user_id},
+ )
+
+ return inner
+
+See :ref:`permissions` for a full list of permissions that are included in Datasette core.
From 49d6d2f7b0f6cb02e25022e1c9403811f1fa0a7c Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 8 Jun 2020 17:05:44 -0700
Subject: [PATCH 0342/2124] allow_sql block to control execute-sql upermission
in metadata.json, closes #813
Also removed the --config allow_sql:0 mechanism in favour of the new allow_sql block.
---
datasette/app.py | 1 -
datasette/default_permissions.py | 8 ++++++++
datasette/templates/database.html | 2 +-
datasette/templates/query.html | 2 +-
datasette/templates/table.html | 2 +-
datasette/views/database.py | 8 ++++++--
datasette/views/table.py | 9 +++++++--
docs/authentication.rst | 33 ++++++++++++++++++++++++++++++-
docs/config.rst | 9 ---------
docs/json_api.rst | 2 +-
docs/pages.rst | 2 +-
docs/sql_queries.rst | 4 ++--
tests/test_api.py | 12 ++---------
tests/test_config_dir.py | 3 ---
tests/test_html.py | 10 +---------
tests/test_permissions.py | 29 +++++++++++++++++++++++++++
16 files changed, 92 insertions(+), 44 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 2f89d17c..a7c3c66a 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -110,7 +110,6 @@ CONFIG_OPTIONS = (
"Allow users to download the original SQLite database files",
),
ConfigOption("suggest_facets", True, "Calculate and display suggested facets"),
- ConfigOption("allow_sql", True, "Allow arbitrary SQL queries via ?sql= parameter"),
ConfigOption(
"default_cache_ttl",
5,
diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py
index a2f4a315..e750acbf 100644
--- a/datasette/default_permissions.py
+++ b/datasette/default_permissions.py
@@ -34,3 +34,11 @@ def permission_allowed(datasette, actor, action, resource):
if allow is None:
return True
return actor_matches_allow(actor, allow)
+ elif action == "execute-sql":
+ # Use allow_sql block from database block, or from top-level
+ database_allow_sql = datasette.metadata("allow_sql", database=resource)
+ if database_allow_sql is None:
+ database_allow_sql = datasette.metadata("allow_sql")
+ if database_allow_sql is None:
+ return True
+ return actor_matches_allow(actor, database_allow_sql)
diff --git a/datasette/templates/database.html b/datasette/templates/database.html
index 100faee4..5ae51ef7 100644
--- a/datasette/templates/database.html
+++ b/datasette/templates/database.html
@@ -22,7 +22,7 @@
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
-{% if config.allow_sql %}
+{% if allow_execute_sql %}