{{ "{:,}".format(database.table_rows_sum) }} rows in {{ database.tables_count }} table{% if database.tables_count != 1 %}s{% endif %}{% if database.tables_count and database.hidden_tables_count %}, {% endif %}
{% if database.hidden_tables_count %}
@@ -21,7 +21,7 @@
{{ "{:,}".format(database.views_count) }} view{% if database.views_count != 1 %}s{% endif %}
{% endif %}
-
{% for table in database.tables_truncated %}{{ table.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_more %}, ...{% endif %}
+
{% for table in database.tables_truncated %}{{ table.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_more %}, ...{% endif %}
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
-
-
{% for table in database.tables_and_views_truncated %}{{ table.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}
+
{% for table in database.tables_and_views_truncated %}{{ table.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}
',
+ ],
]
assert expected == [
[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")
@@ -611,7 +616,7 @@ def test_table_html_foreign_key_links(app_client):
def test_table_html_disable_foreign_key_links_with_labels(app_client):
- response = app_client.get("/fixtures/foreign_key_references?_labels=off")
+ response = app_client.get("/fixtures/foreign_key_references?_labels=off&_size=1")
assert response.status == 200
table = Soup(response.body, "html.parser").find("table")
expected = [
From c3181d9a840dff7be8c990b21f5749db393a4ea0 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 2 Nov 2019 15:47:20 -0700
Subject: [PATCH 0250/2303] Release notes for 0.30.2
---
docs/changelog.rst | 13 ++++++++++++-
1 file changed, 12 insertions(+), 1 deletion(-)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 8ac32c45..f4761efe 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,16 @@
Changelog
=========
+.. _v0_30_2:
+
+0.30.2 (2019-11-02)
+-------------------
+
+- ``/-/plugins`` page now uses distribution name e.g. ``datasette-cluster-map`` instead of the name of the underlying Python package (``datasette_cluster_map``) (`#606 `__)
+- Array faceting is now only suggested for columns that contain arrays of strings (`#562 `__)
+- Better documentation for the ``--host`` argument (`#574 `__)
+- Don't show ``None`` with a broken link for the label on a nullable foreign key (`#406 `__)
+
.. _v0_30_1:
0.30.1 (2019-10-30)
@@ -14,6 +24,7 @@ Changelog
.. _v0_30:
+
0.30 (2019-10-18)
-----------------
@@ -82,7 +93,7 @@ Two new plugins take advantage of this hook:
New plugin hook: extra_template_vars
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The :ref:`plugin_extra_template_vars` plugin hook allows plugins to inject their own additional variables into the Datasette template context. This can be used in conjunction with custom templates to customize the Datasette interface. `datasette-auth-github `__ uses this hook to add custom HTML to the new top navigation bar (which is designed to be modified by plugins, see `#540 `__).
+The :ref:`plugin_hook_extra_template_vars` plugin hook allows plugins to inject their own additional variables into the Datasette template context. This can be used in conjunction with custom templates to customize the Datasette interface. `datasette-auth-github `__ uses this hook to add custom HTML to the new top navigation bar (which is designed to be modified by plugins, see `#540 `__).
Secret plugin configuration options
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
From 2bf7ce5f517d772a16d7855a35a8a75d4456aad7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 2 Nov 2019 16:12:46 -0700
Subject: [PATCH 0251/2303] Fix CSV export for nullable foreign keys, closes
#612
---
datasette/views/base.py | 12 ++++++++----
tests/test_csv.py | 15 +++++++++++++++
2 files changed, 23 insertions(+), 4 deletions(-)
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 1568b084..94945304 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -330,10 +330,14 @@ class DataView(BaseView):
else:
# Look for {"value": "label": } dicts and expand
new_row = []
- for cell in row:
- if isinstance(cell, dict):
- new_row.append(cell["value"])
- new_row.append(cell["label"])
+ for heading, cell in zip(data["columns"], row):
+ if heading in expanded_columns:
+ if cell is None:
+ new_row.extend(("", ""))
+ else:
+ assert isinstance(cell, dict)
+ new_row.append(cell["value"])
+ new_row.append(cell["label"])
else:
new_row.append(cell)
await writer.writerow(new_row)
diff --git a/tests/test_csv.py b/tests/test_csv.py
index b148b6db..13aca489 100644
--- a/tests/test_csv.py
+++ b/tests/test_csv.py
@@ -41,6 +41,14 @@ pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,com
"\n", "\r\n"
)
+EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV = """
+pk,foreign_key_with_label,foreign_key_with_label_label,foreign_key_with_no_label,foreign_key_with_no_label_label
+1,1,hello,1,1
+2,,,,
+""".lstrip().replace(
+ "\n", "\r\n"
+)
+
def test_table_csv(app_client):
response = app_client.get("/fixtures/simple_primary_key.csv")
@@ -63,6 +71,13 @@ def test_table_csv_with_labels(app_client):
assert EXPECTED_TABLE_WITH_LABELS_CSV == response.text
+def test_table_csv_with_nullable_labels(app_client):
+ response = app_client.get("/fixtures/foreign_key_references.csv?_labels=1")
+ assert response.status == 200
+ assert "text/plain; charset=utf-8" == response.headers["content-type"]
+ assert EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV == response.text
+
+
def test_custom_sql_csv(app_client):
response = app_client.get(
"/fixtures.csv?sql=select+content+from+simple_primary_key+limit+2"
From ee330222f4c3ee66c2fe41ebc76fed56b9cb9a00 Mon Sep 17 00:00:00 2001
From: Tobias Kunze
Date: Mon, 4 Nov 2019 03:39:55 +0100
Subject: [PATCH 0252/2303] Offer to format readonly SQL (#602)
Following discussion in #601, this PR adds a "Format SQL" button to
read-only SQL (if the SQL actually differs from the formatting result).
It also removes a console error on readonly SQL queries.
Thanks, @rixx!
---
datasette/templates/_codemirror_foot.html | 41 ++++++++++++++---------
1 file changed, 26 insertions(+), 15 deletions(-)
diff --git a/datasette/templates/_codemirror_foot.html b/datasette/templates/_codemirror_foot.html
index 9aba61ab..4019d448 100644
--- a/datasette/templates/_codemirror_foot.html
+++ b/datasette/templates/_codemirror_foot.html
@@ -6,21 +6,32 @@ window.onload = () => {
if (sqlFormat && !readOnly) {
sqlFormat.hidden = false;
}
- var editor = CodeMirror.fromTextArea(sqlInput, {
- lineNumbers: true,
- mode: "text/x-sql",
- lineWrapping: true,
- });
- editor.setOption("extraKeys", {
- "Shift-Enter": function() {
- document.getElementsByClassName("sql")[0].submit();
- },
- Tab: false
- });
- if (sqlInput && sqlFormat) {
- sqlFormat.addEventListener("click", ev => {
- editor.setValue(sqlFormatter.format(editor.getValue()));
- })
+ if (sqlInput) {
+ var editor = CodeMirror.fromTextArea(sqlInput, {
+ lineNumbers: true,
+ mode: "text/x-sql",
+ lineWrapping: true,
+ });
+ editor.setOption("extraKeys", {
+ "Shift-Enter": function() {
+ document.getElementsByClassName("sql")[0].submit();
+ },
+ Tab: false
+ });
+ if (sqlFormat) {
+ sqlFormat.addEventListener("click", ev => {
+ editor.setValue(sqlFormatter.format(editor.getValue()));
+ })
+ }
+ }
+ if (sqlFormat && readOnly) {
+ const formatted = sqlFormatter.format(readOnly.innerHTML);
+ if (formatted != readOnly.innerHTML) {
+ sqlFormat.hidden = false;
+ sqlFormat.addEventListener("click", ev => {
+ readOnly.innerHTML = formatted;
+ })
+ }
}
}
From 9db22cdf1809fb78a7b183cd2f617cd5e26efc68 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 3 Nov 2019 20:11:55 -0800
Subject: [PATCH 0253/2303] pk__notin= filter, closes #614
---
datasette/filters.py | 15 +++++++++++++++
docs/json_api.rst | 3 +++
tests/test_filters.py | 3 +++
3 files changed, 21 insertions(+)
diff --git a/datasette/filters.py b/datasette/filters.py
index efe014ae..5897a3ed 100644
--- a/datasette/filters.py
+++ b/datasette/filters.py
@@ -77,6 +77,20 @@ class InFilter(Filter):
return "{} in {}".format(column, json.dumps(self.split_value(value)))
+class NotInFilter(InFilter):
+ key = "notin"
+ display = "not in"
+
+ def where_clause(self, table, column, value, param_counter):
+ values = self.split_value(value)
+ params = [":p{}".format(param_counter + i) for i in range(len(values))]
+ sql = "{} not in ({})".format(escape_sqlite(column), ", ".join(params))
+ return sql, values
+
+ def human_clause(self, column, value):
+ return "{} not in {}".format(column, json.dumps(self.split_value(value)))
+
+
class Filters:
_filters = (
[
@@ -125,6 +139,7 @@ class Filters:
TemplatedFilter("like", "like", '"{c}" like :{p}', '{c} like "{v}"'),
TemplatedFilter("glob", "glob", '"{c}" glob :{p}', '{c} glob "{v}"'),
InFilter(),
+ NotInFilter(),
]
+ (
[
diff --git a/docs/json_api.rst b/docs/json_api.rst
index 4b365e14..de70362c 100644
--- a/docs/json_api.rst
+++ b/docs/json_api.rst
@@ -228,6 +228,9 @@ You can filter the data returned by the table based on column values using a que
``?column__in=["value","value,with,commas"]``
+``?column__notin=value1,value2,value3``
+ Rows where column does not match any of the provided values. The inverse of ``__in=``. Also supports JSON arrays.
+
``?column__arraycontains=value``
Works against columns that contain JSON arrays - matches if any of the values in that array match.
diff --git a/tests/test_filters.py b/tests/test_filters.py
index fd682cd9..8598087f 100644
--- a/tests/test_filters.py
+++ b/tests/test_filters.py
@@ -47,6 +47,9 @@ import pytest
["foo in (:p0, :p1)"],
["dog,cat", "cat[dog]"],
),
+ # Not in, and JSON array not in
+ ((("foo__notin", "1,2,3"),), ["foo not in (:p0, :p1, :p2)"], ["1", "2", "3"]),
+ ((("foo__notin", "[1,2,3]"),), ["foo not in (:p0, :p1, :p2)"], [1, 2, 3]),
],
)
def test_build_where(args, expected_where, expected_params):
From 52fa79c6075f0830ff635b81d957c64d877a05aa Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 4 Nov 2019 15:03:48 -0800
Subject: [PATCH 0254/2303] Use select colnames, not select * for table view -
refs #615
---
datasette/views/table.py | 8 ++++++--
tests/test_api.py | 3 ++-
2 files changed, 8 insertions(+), 3 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 326c11ae..139ff80b 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -235,13 +235,17 @@ class TableView(RowTableShared):
raise NotFound("Table not found: {}".format(table))
pks = await db.primary_keys(table)
+ table_columns = await db.table_columns(table)
+
+ select_columns = ", ".join(escape_sqlite(t) for t in table_columns)
+
use_rowid = not pks and not is_view
if use_rowid:
- select = "rowid, *"
+ select = "rowid, {}".format(select_columns)
order_by = "rowid"
order_by_pks = "rowid"
else:
- select = "*"
+ select = select_columns
order_by_pks = ", ".join([escape_sqlite(pk) for pk in pks])
order_by = order_by_pks
diff --git a/tests/test_api.py b/tests/test_api.py
index c6acbab1..4a09b238 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -610,7 +610,8 @@ def test_table_json(app_client):
assert response.status == 200
data = response.json
assert (
- data["query"]["sql"] == "select * from simple_primary_key order by id limit 51"
+ data["query"]["sql"]
+ == "select id, content from simple_primary_key order by id limit 51"
)
assert data["query"]["params"] == {}
assert data["rows"] == [
From 931bfc66613aa3e22f8314df5c0d0758baf31f38 Mon Sep 17 00:00:00 2001
From: Tobias Kunze
Date: Tue, 5 Nov 2019 00:16:30 +0100
Subject: [PATCH 0255/2303] Handle spaces in DB names (#590)
Closes #503 - thanks, @rixx
---
datasette/views/base.py | 3 ++-
tests/fixtures.py | 4 ++--
tests/test_api.py | 19 ++++++++++++++++++-
tests/test_html.py | 8 ++++----
4 files changed, 26 insertions(+), 8 deletions(-)
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 94945304..062c6956 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -203,12 +203,13 @@ class DataView(BaseView):
hash = hash_bit
else:
name = db_name
- # Verify the hash
+ name = urllib.parse.unquote_plus(name)
try:
db = self.ds.databases[name]
except KeyError:
raise NotFound("Database not found: {}".format(name))
+ # Verify the hash
expected = "000"
if db.hash is not None:
expected = db.hash[:HASH_LENGTH]
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 8aa44687..dcc414bf 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -174,7 +174,7 @@ def app_client_no_files():
@pytest.fixture(scope="session")
def app_client_two_attached_databases():
yield from make_app_client(
- extra_databases={"extra_database.db": EXTRA_DATABASE_SQL}
+ extra_databases={"extra database.db": EXTRA_DATABASE_SQL}
)
@@ -188,7 +188,7 @@ def app_client_conflicting_database_names():
@pytest.fixture(scope="session")
def app_client_two_attached_databases_one_immutable():
yield from make_app_client(
- is_immutable=True, extra_databases={"extra_database.db": EXTRA_DATABASE_SQL}
+ is_immutable=True, extra_databases={"extra database.db": EXTRA_DATABASE_SQL}
)
diff --git a/tests/test_api.py b/tests/test_api.py
index 4a09b238..1fa8642f 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -6,6 +6,7 @@ from .fixtures import ( # noqa
app_client_shorter_time_limit,
app_client_larger_cache_size,
app_client_returned_rows_matches_page_size,
+ app_client_two_attached_databases,
app_client_two_attached_databases_one_immutable,
app_client_conflicting_database_names,
app_client_with_cors,
@@ -1188,7 +1189,7 @@ def test_databases_json(app_client_two_attached_databases_one_immutable):
databases = response.json
assert 2 == len(databases)
extra_database, fixtures_database = databases
- assert "extra_database" == extra_database["name"]
+ assert "extra database" == extra_database["name"]
assert None == extra_database["hash"]
assert True == extra_database["is_mutable"]
assert False == extra_database["is_memory"]
@@ -1679,6 +1680,22 @@ def test_cors(app_client_with_cors, path, status_code):
assert "*" == response.headers["Access-Control-Allow-Origin"]
+@pytest.mark.parametrize(
+ "path",
+ (
+ "/",
+ ".json",
+ "/searchable",
+ "/searchable.json",
+ "/searchable_view",
+ "/searchable_view.json",
+ ),
+)
+def test_database_with_space_in_name(app_client_two_attached_databases, path):
+ response = app_client_two_attached_databases.get("/extra database" + path)
+ assert response.status == 200
+
+
def test_common_prefix_database_names(app_client_conflicting_database_names):
# https://github.com/simonw/datasette/issues/597
assert ["fixtures", "foo", "foo-bar"] == [
diff --git a/tests/test_html.py b/tests/test_html.py
index f63e595b..7f1af86e 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -27,11 +27,11 @@ def test_homepage(app_client_two_attached_databases):
# Should be two attached databases
assert [
{"href": "/fixtures", "text": "fixtures"},
- {"href": "/extra_database", "text": "extra_database"},
+ {"href": "/extra database", "text": "extra database"},
] == [{"href": a["href"], "text": a.text.strip()} for a in soup.select("h2 a")]
# The first attached database should show count text and attached tables
h2 = soup.select("h2")[1]
- assert "extra_database" == h2.text.strip()
+ assert "extra database" == h2.text.strip()
counts_p, links_p = h2.find_all_next("p")[:2]
assert (
"2 rows in 1 table, 5 rows in 4 hidden tables, 1 view" == counts_p.text.strip()
@@ -41,8 +41,8 @@ def test_homepage(app_client_two_attached_databases):
{"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a")
]
assert [
- {"href": "/extra_database/searchable", "text": "searchable"},
- {"href": "/extra_database/searchable_view", "text": "searchable_view"},
+ {"href": "/extra database/searchable", "text": "searchable"},
+ {"href": "/extra database/searchable_view", "text": "searchable_view"},
] == table_links
From c30f07c58e410ee296b28aeabe4dc461dd40b435 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 5 Nov 2019 21:12:55 -0800
Subject: [PATCH 0256/2303] Removed _group_count=col feature, closes #504
---
datasette/views/table.py | 12 ------------
docs/json_api.rst | 9 ---------
2 files changed, 21 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 139ff80b..920693d7 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -499,18 +499,6 @@ class TableView(RowTableShared):
if order_by:
order_by = "order by {} ".format(order_by)
- # _group_count=col1&_group_count=col2
- group_count = special_args_lists.get("_group_count") or []
- if group_count:
- sql = 'select {group_cols}, count(*) as "count" from {table_name} {where} group by {group_cols} order by "count" desc limit 100'.format(
- group_cols=", ".join(
- '"{}"'.format(group_count_col) for group_count_col in group_count
- ),
- table_name=escape_sqlite(table),
- where=where_clause,
- )
- return await self.custom_sql(request, database, hash, sql, editable=True)
-
extra_args = {}
# Handle ?_size=500
page_size = _size or request.raw_args.get("_size")
diff --git a/docs/json_api.rst b/docs/json_api.rst
index de70362c..e369bee7 100644
--- a/docs/json_api.rst
+++ b/docs/json_api.rst
@@ -321,15 +321,6 @@ Special table arguments
Here's `an example `__.
-
-``?_group_count=COLUMN``
- Executes a SQL query that returns a count of the number of rows matching
- each unique value in that column, with the most common ordered first.
-
-``?_group_count=COLUMN1&_group_count=column2``
- You can pass multiple ``_group_count`` columns to return counts against
- unique combinations of those columns.
-
``?_next=TOKEN``
Pagination by continuation token - pass the token that was returned in the
``"next"`` property by the previous page.
From f9c146b893856a48afa810ebcce1714f30d0d3a2 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 6 Nov 2019 16:55:44 -0800
Subject: [PATCH 0257/2303] Removed unused special_args_lists variable
---
datasette/views/table.py | 2 --
1 file changed, 2 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 920693d7..a60a3941 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -261,12 +261,10 @@ class TableView(RowTableShared):
# That's so if there is a column that starts with _
# it can still be queried using ?_col__exact=blah
special_args = {}
- special_args_lists = {}
other_args = []
for key, value in args.items():
if key.startswith("_") and "__" not in key:
special_args[key] = value[0]
- special_args_lists[key] = value
else:
for v in value:
other_args.append((key, v))
From 83fc5165ac724f69cd57d8f15cd3038e7b30f878 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 7 Nov 2019 18:48:39 -0800
Subject: [PATCH 0258/2303] Improved UI for publish cloudrun, closes #608
---
datasette/publish/cloudrun.py | 39 ++++++++++++++++++++++--
tests/test_publish_cloudrun.py | 55 ++++++++++++++++++++++++++++++++--
2 files changed, 90 insertions(+), 4 deletions(-)
diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py
index c2d77746..a833a32b 100644
--- a/datasette/publish/cloudrun.py
+++ b/datasette/publish/cloudrun.py
@@ -60,6 +60,23 @@ def publish_subcommand(publish):
"gcloud config get-value project", shell=True, universal_newlines=True
).strip()
+ if not service:
+ # Show the user their current services, then prompt for one
+ click.echo("Please provide a service name for this deployment\n")
+ click.echo("Using an existing service name will over-write it")
+ click.echo("")
+ existing_services = get_existing_services()
+ if existing_services:
+ click.echo("Your existing services:\n")
+ for existing_service in existing_services:
+ click.echo(
+ " {name} - created {created} - {url}".format(
+ **existing_service
+ )
+ )
+ click.echo("")
+ service = click.prompt("Service name", type=str)
+
extra_metadata = {
"title": title,
"license": license,
@@ -110,8 +127,26 @@ def publish_subcommand(publish):
image_id = "gcr.io/{project}/{name}".format(project=project, name=name)
check_call("gcloud builds submit --tag {}".format(image_id), shell=True)
check_call(
- "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {}{}".format(
- image_id, " {}".format(service) if service else ""
+ "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {} {}".format(
+ image_id, service,
),
shell=True,
)
+
+
+def get_existing_services():
+ services = json.loads(
+ check_output(
+ "gcloud beta run services list --platform=managed --format json",
+ shell=True,
+ universal_newlines=True,
+ )
+ )
+ return [
+ {
+ "name": service["metadata"]["name"],
+ "created": service["metadata"]["creationTimestamp"],
+ "url": service["status"]["address"]["url"],
+ }
+ for service in services
+ ]
diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py
index 481ac04d..a038b60e 100644
--- a/tests/test_publish_cloudrun.py
+++ b/tests/test_publish_cloudrun.py
@@ -24,6 +24,53 @@ def test_publish_cloudrun_invalid_database(mock_which):
assert 'Path "woop.db" does not exist' in result.output
+@mock.patch("shutil.which")
+@mock.patch("datasette.publish.cloudrun.check_output")
+@mock.patch("datasette.publish.cloudrun.check_call")
+@mock.patch("datasette.publish.cloudrun.get_existing_services")
+def test_publish_cloudrun_prompts_for_service(
+ mock_get_existing_services, mock_call, mock_output, mock_which
+):
+ mock_get_existing_services.return_value = [
+ {"name": "existing", "created": "2019-01-01", "url": "http://www.example.com/"}
+ ]
+ mock_output.return_value = "myproject"
+ mock_which.return_value = True
+ runner = CliRunner()
+ with runner.isolated_filesystem():
+ open("test.db", "w").write("data")
+ result = runner.invoke(
+ cli.cli, ["publish", "cloudrun", "test.db"], input="input-service"
+ )
+ assert (
+ """
+Please provide a service name for this deployment
+
+Using an existing service name will over-write it
+
+Your existing services:
+
+ existing - created 2019-01-01 - http://www.example.com/
+
+Service name: input-service
+""".strip()
+ == result.output.strip()
+ )
+ assert 0 == result.exit_code
+ tag = "gcr.io/myproject/datasette"
+ mock_call.assert_has_calls(
+ [
+ mock.call("gcloud builds submit --tag {}".format(tag), shell=True),
+ mock.call(
+ "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {} input-service".format(
+ tag
+ ),
+ shell=True,
+ ),
+ ]
+ )
+
+
@mock.patch("shutil.which")
@mock.patch("datasette.publish.cloudrun.check_output")
@mock.patch("datasette.publish.cloudrun.check_call")
@@ -33,14 +80,16 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which):
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
- result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"])
+ result = runner.invoke(
+ cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"]
+ )
assert 0 == result.exit_code
tag = "gcr.io/{}/datasette".format(mock_output.return_value)
mock_call.assert_has_calls(
[
mock.call("gcloud builds submit --tag {}".format(tag), shell=True),
mock.call(
- "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {}".format(
+ "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {} test".format(
tag
),
shell=True,
@@ -65,6 +114,8 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which):
"publish",
"cloudrun",
"test.db",
+ "--service",
+ "datasette",
"--plugin-secret",
"datasette-auth-github",
"client_id",
From 9f5d19c254d1bfbd99f576dff47a6e32e01c76ed Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 Nov 2019 18:12:20 -0800
Subject: [PATCH 0259/2303] Improved documentation for "publish cloudrun"
---
docs/publish.rst | 18 ++++++++++--------
1 file changed, 10 insertions(+), 8 deletions(-)
diff --git a/docs/publish.rst b/docs/publish.rst
index 304be8ef..89d33085 100644
--- a/docs/publish.rst
+++ b/docs/publish.rst
@@ -43,14 +43,16 @@ You will first need to install and configure the Google Cloud CLI tools by follo
You can then publish a database to Google Cloud Run using the following command::
- datasette publish cloudrun mydatabase.db
+ datasette publish cloudrun mydatabase.db --service=my-database
+
+A Cloud Run **service** is a single hosted application. The service name you specify will be used as part of the Cloud Run URL. If you deploy to a service name that you have used in the past your new deployment will replace the previous one.
+
+If you omit the ``--service`` option you will be asked to pick a service name interactively during the deploy.
You may need to interact with prompts from the tool. Once it has finished it will output a URL like this one::
- Service [datasette] revision [datasette-00001] has been deployed
- and is serving traffic at https://datasette-j7hipcg4aq-uc.a.run.app
-
-During the deployment the tool will prompt you for the name of your service. You can reuse an existing name to replace your previous deployment with your new version, or pick a new name to deploy to a new URL.
+ Service [my-service] revision [my-service-00001] has been deployed
+ and is serving traffic at https://my-service-j7hipcg4aq-uc.a.run.app
.. literalinclude:: datasette-publish-cloudrun-help.txt
@@ -90,18 +92,18 @@ Custom metadata and plugins
You can define your own :ref:`metadata` and deploy that with your instance like so::
- datasette publish nowv1 mydatabase.db -m metadata.json
+ datasette publish cloudrun --service=my-service mydatabase.db -m metadata.json
If you just want to set the title, license or source information you can do that directly using extra options to ``datasette publish``::
- datasette publish nowv1 mydatabase.db \
+ datasette publish cloudrun mydatabase.db --service=my-service \
--title="Title of my database" \
--source="Where the data originated" \
--source_url="http://www.example.com/"
You can also specify plugins you would like to install. For example, if you want to include the `datasette-vega `_ visualization plugin you can use the following::
- datasette publish nowv1 mydatabase.db --install=datasette-vega
+ datasette publish cloudrun mydatabase.db --service=my-service --install=datasette-vega
If a plugin has any :ref:`plugins_configuration_secret` you can use the ``--plugin-secret`` option to set those secrets at publish time. For example, using Heroku with `datasette-auth-github `__ you might run the following command::
From 10b9d85edaaf198879344aa1c498000cfb27dff8 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 Nov 2019 18:15:13 -0800
Subject: [PATCH 0260/2303] datasette-csvs on Glitch now uses sqlite-utils
It previously used csvs-to-sqlite but that had heavy dependencies.
See https://support.glitch.com/t/can-you-upgrade-python-to-latest-version/7980/33
---
docs/getting_started.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/getting_started.rst b/docs/getting_started.rst
index d0c22583..fdf7d23c 100644
--- a/docs/getting_started.rst
+++ b/docs/getting_started.rst
@@ -25,7 +25,7 @@ Glitch allows you to "remix" any project to create your own copy and start editi
.. image:: https://cdn.glitch.com/2703baf2-b643-4da7-ab91-7ee2a2d00b5b%2Fremix-button.svg
:target: https://glitch.com/edit/#!/remix/datasette-csvs
-Find a CSV file and drag it onto the Glitch file explorer panel - ``datasette-csvs`` will automatically convert it to a SQLite database (using `csvs-to-sqlite `__) and allow you to start exploring it using Datasette.
+Find a CSV file and drag it onto the Glitch file explorer panel - ``datasette-csvs`` will automatically convert it to a SQLite database (using `sqlite-utils `__) and allow you to start exploring it using Datasette.
If your CSV file has a ``latitude`` and ``longitude`` column you can visualize it on a map by uncommenting the ``datasette-cluster-map`` line in the ``requirements.txt`` file using the Glitch file editor.
From 28c4a6db5b5e512db630d7ba6127196185de67c7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 9 Nov 2019 17:29:36 -0800
Subject: [PATCH 0261/2303] CREATE INDEX statements on table page, closes #618
---
datasette/database.py | 13 ++++++++++++-
tests/fixtures.py | 1 +
tests/test_html.py | 33 +++++++++++++++++++++++++++++++++
3 files changed, 46 insertions(+), 1 deletion(-)
diff --git a/datasette/database.py b/datasette/database.py
index 7e6f7245..3a1cea94 100644
--- a/datasette/database.py
+++ b/datasette/database.py
@@ -232,7 +232,18 @@ class Database:
)
if not table_definition_rows:
return None
- return table_definition_rows[0][0]
+ bits = [table_definition_rows[0][0] + ";"]
+ # Add on any indexes
+ index_rows = list(
+ await self.ds.execute(
+ self.name,
+ "select sql from sqlite_master where tbl_name = :n and type='index' and sql is not null",
+ {"n": table},
+ )
+ )
+ for index_row in index_rows:
+ bits.append(index_row[0] + ";")
+ return "\n".join(bits)
async def get_view_definition(self, view):
return await self.get_table_definition(view, "view")
diff --git a/tests/fixtures.py b/tests/fixtures.py
index dcc414bf..87e66f99 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -514,6 +514,7 @@ CREATE TABLE compound_three_primary_keys (
content text,
PRIMARY KEY (pk1, pk2, pk3)
);
+CREATE INDEX idx_compound_three_primary_keys_content ON compound_three_primary_keys(content);
CREATE TABLE foreign_key_references (
pk varchar(30) primary key,
diff --git a/tests/test_html.py b/tests/test_html.py
index 7f1af86e..44627cdc 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -119,6 +119,39 @@ def test_row_strange_table_name_with_url_hash(app_client_with_hash):
assert response.status == 200
+@pytest.mark.parametrize(
+ "path,expected_definition_sql",
+ [
+ (
+ "/fixtures/facet_cities",
+ """
+CREATE TABLE facet_cities (
+ id integer primary key,
+ name text
+);
+ """.strip(),
+ ),
+ (
+ "/fixtures/compound_three_primary_keys",
+ """
+CREATE TABLE compound_three_primary_keys (
+ pk1 varchar(30),
+ pk2 varchar(30),
+ pk3 varchar(30),
+ content text,
+ PRIMARY KEY (pk1, pk2, pk3)
+);
+CREATE INDEX idx_compound_three_primary_keys_content ON compound_three_primary_keys(content);
+ """.strip(),
+ ),
+ ],
+)
+def test_definition_sql(path, expected_definition_sql, app_client):
+ response = app_client.get(path)
+ pre = Soup(response.body, "html.parser").select_one("pre.wrapped-sql")
+ assert expected_definition_sql == pre.string
+
+
def test_table_cell_truncation():
for client in make_app_client(config={"truncate_cells_html": 5}):
response = client.get("/fixtures/facetable")
From 1c063fae9dba70f70244db010d55a18846640f07 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 10 Nov 2019 19:45:34 -0800
Subject: [PATCH 0262/2303] Test against Python 3.8 in Travis (#623)
* Test against Python 3.8 in Travis
* Avoid current_task warnings in Python 3.8
---
.travis.yml | 1 +
datasette/tracer.py | 9 ++++++++-
2 files changed, 9 insertions(+), 1 deletion(-)
diff --git a/.travis.yml b/.travis.yml
index 29388bc1..a6b15b7e 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -5,6 +5,7 @@ dist: xenial
python:
- "3.6"
- "3.7"
+ - "3.8"
- "3.5"
# Executed for 3.5 AND 3.5 as the first "test" stage:
diff --git a/datasette/tracer.py b/datasette/tracer.py
index e46a6fda..a638b140 100644
--- a/datasette/tracer.py
+++ b/datasette/tracer.py
@@ -9,12 +9,19 @@ tracers = {}
TRACE_RESERVED_KEYS = {"type", "start", "end", "duration_ms", "traceback"}
+# asyncio.current_task was introduced in Python 3.7:
+for obj in (asyncio, asyncio.Task):
+ current_task = getattr(obj, "current_task", None)
+ if current_task is not None:
+ break
+
+
def get_task_id():
try:
loop = asyncio.get_event_loop()
except RuntimeError:
return None
- return id(asyncio.Task.current_task(loop=loop))
+ return id(current_task(loop=loop))
@contextmanager
From 42ee3e16a9ba7cc513b8da944cc1609a5407cf42 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 10 Nov 2019 20:19:01 -0800
Subject: [PATCH 0263/2303] Bump pint to 0.9 (#624)
This fixes 2 deprecation warnings in Python 3.8 - refs #623 #622
---
setup.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.py b/setup.py
index 9ae56306..e8229de1 100644
--- a/setup.py
+++ b/setup.py
@@ -45,7 +45,7 @@ setup(
"click-default-group~=1.2.1",
"Jinja2~=2.10.1",
"hupper~=1.0",
- "pint~=0.8.1",
+ "pint~=0.9",
"pluggy~=0.12.0",
"uvicorn~=0.8.4",
"aiofiles~=0.4.0",
From 5bc2570121aea8141ff88790e214765472882b08 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 20:45:12 -0800
Subject: [PATCH 0264/2303] Include uvicorn version in /-/versions, refs #622
---
datasette/app.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/datasette/app.py b/datasette/app.py
index 203e0991..4ba4adfb 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -12,6 +12,7 @@ from pathlib import Path
import click
from markupsafe import Markup
from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader
+import uvicorn
from .views.base import DatasetteError, ureg, AsgiRouter
from .views.database import DatabaseDownload, DatabaseView
@@ -433,6 +434,7 @@ class Datasette:
},
"datasette": datasette_version,
"asgi": "3.0",
+ "uvicorn": uvicorn.__version__,
"sqlite": {
"version": sqlite_version,
"fts_versions": fts_versions,
From cf7776d36fbacefa874cbd6e5fcdc9fff7661203 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:09:11 -0800
Subject: [PATCH 0265/2303] Support Python 3.8, stop supporting Python 3.5
(#627)
* Upgrade to uvicorn 0.10.4
* Drop support for Python 3.5
* Bump all dependencies to latest releases
* Update docs to reflect we no longer support 3.5
* Removed code that skipped black unit test on 3.5
Closes #622
---
.travis.yml | 1 -
README.md | 2 +-
docs/contributing.rst | 2 +-
docs/installation.rst | 7 +++++--
setup.py | 20 ++++++++++----------
tests/test_black.py | 7 +------
6 files changed, 18 insertions(+), 21 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index a6b15b7e..0fc87d93 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -6,7 +6,6 @@ python:
- "3.6"
- "3.7"
- "3.8"
- - "3.5"
# Executed for 3.5 AND 3.5 as the first "test" stage:
script:
diff --git a/README.md b/README.md
index 9f85f1ba..14c9cfd6 100644
--- a/README.md
+++ b/README.md
@@ -69,7 +69,7 @@ sqlite-utils: a Python library and CLI tool for building SQLite databases](https
pip3 install datasette
-Datasette requires Python 3.5 or higher. We also have [detailed installation instructions](https://datasette.readthedocs.io/en/stable/installation.html) covering other options such as Docker.
+Datasette requires Python 3.6 or higher. We also have [detailed installation instructions](https://datasette.readthedocs.io/en/stable/installation.html) covering other options such as Docker.
## Basic usage
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 43834edc..078fd841 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -18,7 +18,7 @@ General guidelines
Setting up a development environment
------------------------------------
-If you have Python 3.5 or higher installed on your computer (on OS X the easiest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps.
+If you have Python 3.6 or higher installed on your computer (on OS X the easiest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps.
If you want to use GitHub to publish your changes, first `create a fork of datasette `__ under your own GitHub account.
diff --git a/docs/installation.rst b/docs/installation.rst
index e65d8ee3..9ee7eb4e 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -69,16 +69,19 @@ You can now run the new custom image like so::
You can confirm that the plugins are installed by visiting
http://127.0.0.1:8001/-/plugins
-
Install using pip
-----------------
-To run Datasette without Docker you will need Python 3.5 or higher.
+To run Datasette without Docker you will need Python 3.6 or higher.
You can install Datasette and its dependencies using ``pip``::
pip install datasette
+The last version to support Python 3.5 was 0.30.2 - you can install that version like so::
+
+ pip install datasette==0.30.2
+
If you want to install Datasette in its own virtual environment, use this::
python -mvenv datasette-venv
diff --git a/setup.py b/setup.py
index e8229de1..7a4cdcb3 100644
--- a/setup.py
+++ b/setup.py
@@ -42,12 +42,12 @@ setup(
include_package_data=True,
install_requires=[
"click~=7.0",
- "click-default-group~=1.2.1",
- "Jinja2~=2.10.1",
- "hupper~=1.0",
+ "click-default-group~=1.2.2",
+ "Jinja2~=2.10.3",
+ "hupper~=1.9",
"pint~=0.9",
- "pluggy~=0.12.0",
- "uvicorn~=0.8.4",
+ "pluggy~=0.13.0",
+ "uvicorn~=0.10.4",
"aiofiles~=0.4.0",
],
entry_points="""
@@ -58,11 +58,11 @@ setup(
extras_require={
"docs": ["sphinx_rtd_theme", "sphinx-autobuild"],
"test": [
- "pytest~=5.0.0",
+ "pytest~=5.2.2",
"pytest-asyncio~=0.10.0",
- "aiohttp~=3.5.3",
- "beautifulsoup4~=4.6.1",
- "asgiref~=3.1.2",
+ "aiohttp~=3.6.2",
+ "beautifulsoup4~=4.8.1",
+ "asgiref~=3.2.3",
]
+ maybe_black,
},
@@ -74,8 +74,8 @@ setup(
"Intended Audience :: End Users/Desktop",
"Topic :: Database",
"License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.6",
- "Programming Language :: Python :: 3.5",
],
)
diff --git a/tests/test_black.py b/tests/test_black.py
index 68e2dcc0..b5bfcfd0 100644
--- a/tests/test_black.py
+++ b/tests/test_black.py
@@ -1,3 +1,4 @@
+import black
from click.testing import CliRunner
from pathlib import Path
import pytest
@@ -6,13 +7,7 @@ import sys
code_root = Path(__file__).parent.parent
-@pytest.mark.skipif(
- sys.version_info[:2] < (3, 6), reason="Black requires Python 3.6 or later"
-)
def test_black():
- # Do not import at top of module because Python 3.5 will not have it installed
- import black
-
runner = CliRunner()
result = runner.invoke(
black.main, [str(code_root / "tests"), str(code_root / "datasette"), "--check"]
From 76fc6a9c7317ce4fbf3cc3d327c849f7274d960a Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:17:59 -0800
Subject: [PATCH 0266/2303] Release notes for 0.31
---
docs/changelog.rst | 22 ++++++++++++++++++++++
1 file changed, 22 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index f4761efe..6e260be9 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,28 @@
Changelog
=========
+.. _v0_31:
+
+0.31 (2019-11-11)
+-----------------
+
+This version adds compatibility with Python 3.8 and breaks compatibility with Python 3.5.
+
+If you are still running Python 3.5 you should stick with ``0.30.2``, which you can install like this::
+
+ pip install datasette==0.30.2
+
+- Format SQL button now works with read-only SQL queries - thanks, Tobias Kunze (`#602 `__)
+- New ``?column__notin=x,y,z`` filter for table views (`#614 `__)
+- Table view now uses ``select col1, col2, col3`` instead of ``select *``
+- Database filenames can now contain spaces - thanks, Tobias Kunze (`#590 `__)
+- Removed obsolete ``?_group_count=col`` feature (`#504 `__)
+- Improved user interface and documentation for ``datasette publish cloudrun`` (`#608 `__)
+- Tables with indexes now show the `` CREATE INDEX`` statements on the table page (`#618 `__)
+- Current version of `uvicorn `__ is now shown on ``/-/versions``
+- Python 3.8 is now supported! (`#622 `__)
+- Python 3.5 is no longer supported.
+
.. _v0_30_2:
0.30.2 (2019-11-02)
From c633c035dc8d4c60f1d13cb074918406bbdb3734 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:26:56 -0800
Subject: [PATCH 0267/2303] Datasette 0.31 in news section
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 14c9cfd6..05995a74 100644
--- a/README.md
+++ b/README.md
@@ -21,6 +21,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
## News
+ * 11th November 2019: [Datasette 0.31](https://datasette.readthedocs.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5.
* 18th October 2019: [Datasette 0.30](https://datasette.readthedocs.io/en/stable/changelog.html#v0-30)
* 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail.
* 7th July 2019: [Datasette 0.29](https://datasette.readthedocs.io/en/stable/changelog.html#v0-29) - ASGI, new plugin hooks, facet by date and much, much more...
From 7f89928062b1a1fdb2625a946f7cd5161e597401 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:33:51 -0800
Subject: [PATCH 0268/2303] Removed code that conditionally installs black
Since we no longer support Python 3.5 we don't need this any more.
---
setup.py | 9 ++-------
1 file changed, 2 insertions(+), 7 deletions(-)
diff --git a/setup.py b/setup.py
index 7a4cdcb3..15284779 100644
--- a/setup.py
+++ b/setup.py
@@ -22,11 +22,6 @@ def get_version():
return g["__version__"]
-# Only install black on Python 3.6 or higher
-maybe_black = []
-if sys.version_info > (3, 6):
- maybe_black = ["black~=19.10b0"]
-
setup(
name="datasette",
version=versioneer.get_version(),
@@ -63,8 +58,8 @@ setup(
"aiohttp~=3.6.2",
"beautifulsoup4~=4.8.1",
"asgiref~=3.2.3",
- ]
- + maybe_black,
+ "black~=19.10b0",
+ ],
},
tests_require=["datasette[test]"],
classifiers=[
From 1c518680e9692a9a77022af54f3de3e77fb1aaf4 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:57:48 -0800
Subject: [PATCH 0269/2303] Final steps: build stable branch of Read The Docs
---
docs/contributing.rst | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 078fd841..48930332 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -150,4 +150,7 @@ Wait long enough for Travis to build and deploy the demo version of that commit
git tag 0.25.2
git push --tags
-Once the release is out, you can manually update https://github.com/simonw/datasette/releases
+Final steps once the release has deployed to https://pypi.org/project/datasette/
+
+* Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases
+* Manually kick off a build of the `stable` branch on Read The Docs: https://readthedocs.org/projects/datasette/builds/
From f554be39fc14ddc18921ca29d3920d55aad03d46 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 22:00:13 -0800
Subject: [PATCH 0270/2303] ReST fix
---
docs/changelog.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 6e260be9..763b178e 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -21,7 +21,7 @@ If you are still running Python 3.5 you should stick with ``0.30.2``, which you
- Database filenames can now contain spaces - thanks, Tobias Kunze (`#590 `__)
- Removed obsolete ``?_group_count=col`` feature (`#504 `__)
- Improved user interface and documentation for ``datasette publish cloudrun`` (`#608 `__)
-- Tables with indexes now show the `` CREATE INDEX`` statements on the table page (`#618 `__)
+- Tables with indexes now show the ``CREATE INDEX`` statements on the table page (`#618 `__)
- Current version of `uvicorn `__ is now shown on ``/-/versions``
- Python 3.8 is now supported! (`#622 `__)
- Python 3.5 is no longer supported.
From d977fbadf70a96bf2eea1407d01f99d98e092dec Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 22:03:09 -0800
Subject: [PATCH 0271/2303] datasette publish uses python:3.8 base Docker
image, closes #629
---
datasette/utils/__init__.py | 2 +-
tests/test_publish_cloudrun.py | 2 +-
tests/test_publish_now.py | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index 3d28a36b..b8df48cf 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -306,7 +306,7 @@ def make_dockerfile(
install = ["datasette"] + list(install)
return """
-FROM python:3.6
+FROM python:3.8
COPY . /app
WORKDIR /app
{spatialite_extras}
diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py
index a038b60e..c5b18cdf 100644
--- a/tests/test_publish_cloudrun.py
+++ b/tests/test_publish_cloudrun.py
@@ -128,7 +128,7 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which):
.split("\n====================\n")[0]
.strip()
)
- expected = """FROM python:3.6
+ expected = """FROM python:3.8
COPY . /app
WORKDIR /app
diff --git a/tests/test_publish_now.py b/tests/test_publish_now.py
index 72aa71db..27fd1245 100644
--- a/tests/test_publish_now.py
+++ b/tests/test_publish_now.py
@@ -138,7 +138,7 @@ def test_publish_now_plugin_secrets(mock_run, mock_which):
.split("\n====================\n")[0]
.strip()
)
- expected = """FROM python:3.6
+ expected = """FROM python:3.8
COPY . /app
WORKDIR /app
From 16265f6a1a7c547e3925e0fc2d6b88754afb0435 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 12 Nov 2019 18:18:04 -0800
Subject: [PATCH 0272/2303] Release notes for 0.31.1
---
docs/changelog.rst | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 763b178e..746f5b42 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,13 @@
Changelog
=========
+.. _v0_31_1:
+
+0.31.1 (2019-11-12)
+-------------------
+
+- Deploymens created using ``datasette publish`` now use ``python:3.8`` base Docker image (`#629 `__)
+
.. _v0_31:
0.31 (2019-11-11)
From a22c7761b61baa61b8e3da7d30887468d61d6b83 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 12 Nov 2019 18:18:39 -0800
Subject: [PATCH 0273/2303] Fixed typo in release notes
---
docs/changelog.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 746f5b42..e527518e 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -9,7 +9,7 @@ Changelog
0.31.1 (2019-11-12)
-------------------
-- Deploymens created using ``datasette publish`` now use ``python:3.8`` base Docker image (`#629 `__)
+- Deployments created using ``datasette publish`` now use ``python:3.8`` base Docker image (`#629 `__)
.. _v0_31:
From bbd00e903cdd49067ecdbdb60a4d225833a44b05 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 12 Nov 2019 18:38:13 -0800
Subject: [PATCH 0274/2303] Badge linking to datasette on hub.docker.com
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 05995a74..9a22c2b2 100644
--- a/README.md
+++ b/README.md
@@ -6,6 +6,7 @@
[](http://datasette.readthedocs.io/en/latest/?badge=latest)
[](https://github.com/simonw/datasette/blob/master/LICENSE)
[](https://black.readthedocs.io/en/stable/)
+[](https://hub.docker.com/r/datasetteproject/datasette)
*A tool for exploring and publishing data*
From 848dec4deb0d3c140a4e0394cac45fbb2593349b Mon Sep 17 00:00:00 2001
From: Stanley Zheng
Date: Tue, 12 Nov 2019 23:28:42 -0500
Subject: [PATCH 0275/2303] Fix for datasette publish with just --source_url
(#631)
Closes #572
---
datasette/templates/_description_source_license.html | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/datasette/templates/_description_source_license.html b/datasette/templates/_description_source_license.html
index 3327706e..a2bc18f2 100644
--- a/datasette/templates/_description_source_license.html
+++ b/datasette/templates/_description_source_license.html
@@ -21,7 +21,7 @@
{% endif %}{{ metadata.source or metadata.source_url }}{% if metadata.source_url %}{% endif %}
{% endif %}
- {% if metadata.about or metadata.about_url %}{% if metadata.license or metadata.license_url or metadata.source or metadat.source_url %}·{% endif %}
+ {% if metadata.about or metadata.about_url %}{% if metadata.license or metadata.license_url or metadata.source or metadata.source_url %}·{% endif %}
About: {% if metadata.about_url %}
{% endif %}{{ metadata.about or metadata.about_url }}{% if metadata.about_url %}{% endif %}
From f52451023025579ae9a13de4a7f00d69200184cd Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 13 Nov 2019 08:42:47 -0800
Subject: [PATCH 0276/2303] Fix "publish heroku" + upgrade to use Python 3.8.0
Closes #633. Closes #632.
---
datasette/publish/heroku.py | 7 +++++--
tests/test_publish_heroku.py | 9 +++++++--
2 files changed, 12 insertions(+), 4 deletions(-)
diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py
index 34d1f773..e75f76df 100644
--- a/datasette/publish/heroku.py
+++ b/datasette/publish/heroku.py
@@ -72,7 +72,10 @@ def publish_subcommand(publish):
"about_url": about_url,
}
- environment_variables = {}
+ environment_variables = {
+ # Avoid uvicorn error: https://github.com/simonw/datasette/issues/633
+ "WEB_CONCURRENCY": "1"
+ }
if plugin_secret:
extra_metadata["plugins"] = {}
for plugin_name, plugin_setting, setting_value in plugin_secret:
@@ -164,7 +167,7 @@ def temporary_heroku_directory(
if metadata_content:
open("metadata.json", "w").write(json.dumps(metadata_content, indent=2))
- open("runtime.txt", "w").write("python-3.6.8")
+ open("runtime.txt", "w").write("python-3.8.0")
if branch:
install = [
diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py
index 4cd66219..87386e93 100644
--- a/tests/test_publish_heroku.py
+++ b/tests/test_publish_heroku.py
@@ -57,8 +57,13 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which):
open("test.db", "w").write("data")
result = runner.invoke(cli.cli, ["publish", "heroku", "test.db"])
assert 0 == result.exit_code, result.output
- mock_call.assert_called_once_with(
- ["heroku", "builds:create", "-a", "f", "--include-vcs-ignore"]
+ mock_call.assert_has_calls(
+ [
+ mock.call(["heroku", "config:set", "-a", "f", "WEB_CONCURRENCY=1",]),
+ mock.call(
+ ["heroku", "builds:create", "-a", "f", "--include-vcs-ignore"]
+ ),
+ ]
)
From b51f258d00bb3c3b401f15d46a1fbd50394dbe1c Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 13 Nov 2019 08:48:36 -0800
Subject: [PATCH 0277/2303] Release notes for 0.31.2
---
docs/changelog.rst | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index e527518e..f4958399 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,15 @@
Changelog
=========
+.. _v0_31_2:
+
+0.31.2 (2019-11-13)
+-------------------
+
+- Fixed a bug where ``datasette publish heroku`` applications failed to start (`#633 `__)
+- Fix for ``datasette publish`` with just ``--source_url`` - thanks, Stanley Zheng (`#572 `__)
+- Deployments to Heroku now use Python 3.8.0 (`#632 `__)
+
.. _v0_31_1:
0.31.1 (2019-11-12)
From 8c642f04e0608bf537fdd1f76d64c2367fb04d57 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Nov 2019 15:14:22 -0800
Subject: [PATCH 0278/2303] Render templates using Jinja async mode
Closes #628
---
datasette/app.py | 6 ++++--
datasette/views/base.py | 2 +-
docs/plugins.rst | 23 ++++++++++++-----------
tests/fixtures.py | 8 +++++++-
tests/test_plugins.py | 18 ++++++++++++++++++
tests/test_templates/show_json.html | 1 +
6 files changed, 43 insertions(+), 15 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 4ba4adfb..02fcf303 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -583,7 +583,9 @@ class Datasette:
),
]
)
- self.jinja_env = Environment(loader=template_loader, autoescape=True)
+ self.jinja_env = Environment(
+ loader=template_loader, autoescape=True, enable_async=True
+ )
self.jinja_env.filters["escape_css_string"] = escape_css_string
self.jinja_env.filters["quote_plus"] = lambda u: urllib.parse.quote_plus(u)
self.jinja_env.filters["escape_sqlite"] = escape_sqlite
@@ -730,5 +732,5 @@ class DatasetteRouter(AsgiRouter):
else:
template = self.ds.jinja_env.select_template(templates)
await asgi_send_html(
- send, template.render(info), status=status, headers=headers
+ send, await template.render_async(info), status=status, headers=headers
)
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 062c6956..5182479c 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -139,7 +139,7 @@ class BaseView(AsgiView):
extra_template_vars.update(extra_vars)
return Response.html(
- template.render(
+ await template.render_async(
{
**context,
**{
diff --git a/docs/plugins.rst b/docs/plugins.rst
index 6df7ff6a..e5a3d7dd 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -629,7 +629,9 @@ Function that returns a dictionary
If you return a function it will be executed. If it returns a dictionary those values will will be merged into the template context.
Function that returns an awaitable function that returns a dictionary
- You can also return a function which returns an awaitable function which returns a dictionary. This means you can execute additional SQL queries using ``datasette.execute()``.
+ You can also return a function which returns an awaitable function which returns a dictionary.
+
+Datasette runs Jinja2 in `async mode `__, which means you can add awaitable functions to the template scope and they will be automatically awaited when they are rendered by the template.
Here's an example plugin that returns an authentication object from the ASGI scope:
@@ -641,20 +643,19 @@ Here's an example plugin that returns an authentication object from the ASGI sco
"auth": request.scope.get("auth")
}
-And here's an example which returns the current version of SQLite:
+And here's an example which adds a ``sql_first(sql_query)`` function which executes a SQL statement and returns the first column of the first row of results:
.. code-block:: python
@hookimpl
- def extra_template_vars(datasette):
- async def inner():
- first_db = list(datasette.databases.keys())[0]
- return {
- "sqlite_version": (
- await datasette.execute(first_db, "select sqlite_version()")
- ).rows[0][0]
- }
- return inner
+ def extra_template_vars(datasette, database):
+ async def sql_first(sql, dbname=None):
+ dbname = dbname or database or next(iter(datasette.databases.keys()))
+ return (await datasette.execute(dbname, sql)).rows[0][0]
+
+You can then use the new function in a template like so::
+
+ SQLite version: {{ sql_first("select sqlite_version()") }}
.. _plugin_register_output_renderer:
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 87e66f99..3e4203f7 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -446,13 +446,19 @@ def render_cell(value, database):
@hookimpl
def extra_template_vars(template, database, table, view_name, request, datasette):
+ async def query_database(sql):
+ first_db = list(datasette.databases.keys())[0]
+ return (
+ await datasette.execute(first_db, sql)
+ ).rows[0][0]
async def inner():
return {
"extra_template_vars_from_awaitable": json.dumps({
"template": template,
"scope_path": request.scope["path"],
"awaitable": True,
- }, default=lambda b: b.decode("utf8"))
+ }, default=lambda b: b.decode("utf8")),
+ "query_database": query_database,
}
return inner
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index b1c7fd9a..42d063f4 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -1,5 +1,6 @@
from bs4 import BeautifulSoup as Soup
from .fixtures import app_client, make_app_client, TEMP_PLUGIN_SECRET_FILE # noqa
+from datasette.utils import sqlite3
import base64
import json
import os
@@ -214,3 +215,20 @@ def test_plugins_extra_template_vars(restore_working_directory):
"awaitable": True,
"scope_path": "/-/metadata",
} == extra_template_vars_from_awaitable
+
+
+def test_plugins_async_template_function(restore_working_directory):
+ for client in make_app_client(
+ template_dir=str(pathlib.Path(__file__).parent / "test_templates")
+ ):
+ response = client.get("/-/metadata")
+ assert response.status == 200
+ extra_from_awaitable_function = (
+ Soup(response.body, "html.parser")
+ .select("pre.extra_from_awaitable_function")[0]
+ .text
+ )
+ expected = (
+ sqlite3.connect(":memory:").execute("select sqlite_version()").fetchone()[0]
+ )
+ assert expected == extra_from_awaitable_function
diff --git a/tests/test_templates/show_json.html b/tests/test_templates/show_json.html
index bbf1bc06..cff04fb4 100644
--- a/tests/test_templates/show_json.html
+++ b/tests/test_templates/show_json.html
@@ -5,4 +5,5 @@
Test data for extra_template_vars:
{{ extra_template_vars|safe }}
{{ extra_template_vars_from_awaitable|safe }}
+
{{ query_database("select sqlite_version();") }}
{% endblock %}
From a95bedb9c423fa6d772c93ef47bc40f13a5bea50 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Nov 2019 15:18:53 -0800
Subject: [PATCH 0279/2303] Release notes for 0.32
---
docs/changelog.rst | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index f4958399..2f909364 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,13 @@
Changelog
=========
+.. _v0_32:
+
+0.32 (2019-11-14)
+-----------------
+
+Datasette now renders templates using `Jinja async mode `__. This makes it easy for plugins to provide custom template functions that perform asynchronous actions, for example the new `datasette-template-sql `__ plugin which allows custom templates to directly execute SQL queries and render their results. (`#628 `__)
+
.. _v0_31_2:
0.31.2 (2019-11-13)
From 8fc9a5d877d26dbf2654e125f407ddd2fd767335 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Nov 2019 15:46:37 -0800
Subject: [PATCH 0280/2303] Datasette 0.32 and datasette-template-sql in news
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 9a22c2b2..030c507f 100644
--- a/README.md
+++ b/README.md
@@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
## News
+ * 14th November 2019: [Datasette 0.32](https://datasette.readthedocs.io/en/stable/changelog.html#v0-32) now uses asynchronous rendering in Jinja templates, which means template functions can perform asynchronous operations such as executing SQL queries. [datasette-template-sql](https://github.com/simonw/datasette-template-sql) is a new plugin uses this capability to add a new custom `sql(sql_query)` template function.
* 11th November 2019: [Datasette 0.31](https://datasette.readthedocs.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5.
* 18th October 2019: [Datasette 0.30](https://datasette.readthedocs.io/en/stable/changelog.html#v0-30)
* 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail.
From a9909c29ccac771c23c2ef22b89d10697b5256b9 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 15 Nov 2019 14:49:45 -0800
Subject: [PATCH 0281/2303] Move .execute() from Datasette to Database
Refs #569 - I split this change out from #579
---
datasette/app.py | 90 ++++++---------------------
datasette/database.py | 137 +++++++++++++++++++++++++++++++-----------
2 files changed, 121 insertions(+), 106 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 02fcf303..119d0e19 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -24,13 +24,11 @@ from .database import Database
from .utils import (
QueryInterrupted,
- Results,
escape_css_string,
escape_sqlite,
get_plugins,
module_from_path,
sqlite3,
- sqlite_timelimit,
to_css_class,
)
from .utils.asgi import (
@@ -42,13 +40,12 @@ from .utils.asgi import (
asgi_send_json,
asgi_send_redirect,
)
-from .tracer import trace, AsgiTracer
+from .tracer import AsgiTracer
from .plugins import pm, DEFAULT_PLUGINS
from .version import __version__
app_root = Path(__file__).parent.parent
-connections = threading.local()
MEMORY = object()
ConfigOption = collections.namedtuple("ConfigOption", ("name", "default", "help"))
@@ -336,6 +333,25 @@ class Datasette:
# pylint: disable=no-member
pm.hook.prepare_connection(conn=conn)
+ async def execute(
+ self,
+ db_name,
+ sql,
+ params=None,
+ truncate=False,
+ custom_time_limit=None,
+ page_size=None,
+ log_sql_errors=True,
+ ):
+ return await self.databases[db_name].execute(
+ sql,
+ params=params,
+ truncate=truncate,
+ custom_time_limit=custom_time_limit,
+ page_size=page_size,
+ log_sql_errors=log_sql_errors,
+ )
+
async def expand_foreign_keys(self, database, table, column, values):
"Returns dict mapping (column, value) -> label"
labeled_fks = {}
@@ -477,72 +493,6 @@ class Datasette:
.get(table, {})
)
- async def execute_against_connection_in_thread(self, db_name, fn):
- def in_thread():
- conn = getattr(connections, db_name, None)
- if not conn:
- conn = self.databases[db_name].connect()
- self.prepare_connection(conn)
- setattr(connections, db_name, conn)
- return fn(conn)
-
- return await asyncio.get_event_loop().run_in_executor(self.executor, in_thread)
-
- async def execute(
- self,
- db_name,
- sql,
- params=None,
- truncate=False,
- custom_time_limit=None,
- page_size=None,
- log_sql_errors=True,
- ):
- """Executes sql against db_name in a thread"""
- page_size = page_size or self.page_size
-
- def sql_operation_in_thread(conn):
- time_limit_ms = self.sql_time_limit_ms
- if custom_time_limit and custom_time_limit < time_limit_ms:
- time_limit_ms = custom_time_limit
-
- with sqlite_timelimit(conn, time_limit_ms):
- try:
- cursor = conn.cursor()
- cursor.execute(sql, params or {})
- max_returned_rows = self.max_returned_rows
- if max_returned_rows == page_size:
- max_returned_rows += 1
- if max_returned_rows and truncate:
- rows = cursor.fetchmany(max_returned_rows + 1)
- truncated = len(rows) > max_returned_rows
- rows = rows[:max_returned_rows]
- else:
- rows = cursor.fetchall()
- truncated = False
- except sqlite3.OperationalError as e:
- if e.args == ("interrupted",):
- raise QueryInterrupted(e, sql, params)
- if log_sql_errors:
- print(
- "ERROR: conn={}, sql = {}, params = {}: {}".format(
- conn, repr(sql), params, e
- )
- )
- raise
-
- if truncate:
- return Results(rows, truncated, cursor.description)
-
- else:
- return Results(rows, False, cursor.description)
-
- with trace("sql", database=db_name, sql=sql.strip(), params=params):
- results = await self.execute_against_connection_in_thread(
- db_name, sql_operation_in_thread
- )
- return results
-
def register_renderers(self):
""" Register output renderers which output data in custom formats. """
# Built-in renderers
diff --git a/datasette/database.py b/datasette/database.py
index 3a1cea94..9a8ae4d4 100644
--- a/datasette/database.py
+++ b/datasette/database.py
@@ -1,17 +1,25 @@
+import asyncio
+import contextlib
from pathlib import Path
+import threading
+from .tracer import trace
from .utils import (
QueryInterrupted,
+ Results,
detect_fts,
detect_primary_keys,
detect_spatialite,
get_all_foreign_keys,
get_outbound_foreign_keys,
+ sqlite_timelimit,
sqlite3,
table_columns,
)
from .inspect import inspect_hash
+connections = threading.local()
+
class Database:
def __init__(self, ds, path=None, is_mutable=False, is_memory=False):
@@ -45,6 +53,73 @@ class Database:
"file:{}?{}".format(self.path, qs), uri=True, check_same_thread=False
)
+ async def execute_against_connection_in_thread(self, fn):
+ def in_thread():
+ conn = getattr(connections, self.name, None)
+ if not conn:
+ conn = self.connect()
+ self.ds.prepare_connection(conn)
+ setattr(connections, self.name, conn)
+ return fn(conn)
+
+ return await asyncio.get_event_loop().run_in_executor(
+ self.ds.executor, in_thread
+ )
+
+ async def execute(
+ self,
+ sql,
+ params=None,
+ truncate=False,
+ custom_time_limit=None,
+ page_size=None,
+ log_sql_errors=True,
+ ):
+ """Executes sql against db_name in a thread"""
+ page_size = page_size or self.ds.page_size
+
+ def sql_operation_in_thread(conn):
+ time_limit_ms = self.ds.sql_time_limit_ms
+ if custom_time_limit and custom_time_limit < time_limit_ms:
+ time_limit_ms = custom_time_limit
+
+ with sqlite_timelimit(conn, time_limit_ms):
+ try:
+ cursor = conn.cursor()
+ cursor.execute(sql, params or {})
+ max_returned_rows = self.ds.max_returned_rows
+ if max_returned_rows == page_size:
+ max_returned_rows += 1
+ if max_returned_rows and truncate:
+ rows = cursor.fetchmany(max_returned_rows + 1)
+ truncated = len(rows) > max_returned_rows
+ rows = rows[:max_returned_rows]
+ else:
+ rows = cursor.fetchall()
+ truncated = False
+ except sqlite3.OperationalError as e:
+ if e.args == ("interrupted",):
+ raise QueryInterrupted(e, sql, params)
+ if log_sql_errors:
+ print(
+ "ERROR: conn={}, sql = {}, params = {}: {}".format(
+ conn, repr(sql), params, e
+ )
+ )
+ raise
+
+ if truncate:
+ return Results(rows, truncated, cursor.description)
+
+ else:
+ return Results(rows, False, cursor.description)
+
+ with trace("sql", database=self.name, sql=sql.strip(), params=params):
+ results = await self.execute_against_connection_in_thread(
+ sql_operation_in_thread
+ )
+ return results
+
@property
def size(self):
if self.is_memory:
@@ -62,8 +137,7 @@ class Database:
for table in await self.table_names():
try:
table_count = (
- await self.ds.execute(
- self.name,
+ await self.execute(
"select count(*) from [{}]".format(table),
custom_time_limit=limit,
)
@@ -89,32 +163,30 @@ class Database:
return Path(self.path).stem
async def table_exists(self, table):
- results = await self.ds.execute(
- self.name,
- "select 1 from sqlite_master where type='table' and name=?",
- params=(table,),
+ results = await self.execute(
+ "select 1 from sqlite_master where type='table' and name=?", params=(table,)
)
return bool(results.rows)
async def table_names(self):
- results = await self.ds.execute(
- self.name, "select name from sqlite_master where type='table'"
+ results = await self.execute(
+ "select name from sqlite_master where type='table'"
)
return [r[0] for r in results.rows]
async def table_columns(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: table_columns(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: table_columns(conn, table)
)
async def primary_keys(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: detect_primary_keys(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: detect_primary_keys(conn, table)
)
async def fts_table(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: detect_fts(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: detect_fts(conn, table)
)
async def label_column_for_table(self, table):
@@ -124,8 +196,8 @@ class Database:
if explicit_label_column:
return explicit_label_column
# If a table has two columns, one of which is ID, then label_column is the other one
- column_names = await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: table_columns(conn, table)
+ column_names = await self.execute_against_connection_in_thread(
+ lambda conn: table_columns(conn, table)
)
# Is there a name or title column?
name_or_title = [c for c in column_names if c in ("name", "title")]
@@ -141,8 +213,8 @@ class Database:
return None
async def foreign_keys_for_table(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: get_outbound_foreign_keys(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: get_outbound_foreign_keys(conn, table)
)
async def hidden_table_names(self):
@@ -150,18 +222,17 @@ class Database:
hidden_tables = [
r[0]
for r in (
- await self.ds.execute(
- self.name,
+ await self.execute(
"""
select name from sqlite_master
where rootpage = 0
and sql like '%VIRTUAL TABLE%USING FTS%'
- """,
+ """
)
).rows
]
- has_spatialite = await self.ds.execute_against_connection_in_thread(
- self.name, detect_spatialite
+ has_spatialite = await self.execute_against_connection_in_thread(
+ detect_spatialite
)
if has_spatialite:
# Also hide Spatialite internal tables
@@ -178,13 +249,12 @@ class Database:
] + [
r[0]
for r in (
- await self.ds.execute(
- self.name,
+ await self.execute(
"""
select name from sqlite_master
where name like "idx_%"
and type = "table"
- """,
+ """
)
).rows
]
@@ -207,25 +277,20 @@ class Database:
return hidden_tables
async def view_names(self):
- results = await self.ds.execute(
- self.name, "select name from sqlite_master where type='view'"
- )
+ results = await self.execute("select name from sqlite_master where type='view'")
return [r[0] for r in results.rows]
async def get_all_foreign_keys(self):
- return await self.ds.execute_against_connection_in_thread(
- self.name, get_all_foreign_keys
- )
+ return await self.execute_against_connection_in_thread(get_all_foreign_keys)
async def get_outbound_foreign_keys(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: get_outbound_foreign_keys(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: get_outbound_foreign_keys(conn, table)
)
async def get_table_definition(self, table, type_="table"):
table_definition_rows = list(
- await self.ds.execute(
- self.name,
+ await self.execute(
"select sql from sqlite_master where name = :n and type=:t",
{"n": table, "t": type_},
)
From 440a70428c624f6e27b630026acdba2032acc9a7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 19 Nov 2019 15:01:10 -0800
Subject: [PATCH 0282/2303] Include rowid in filter select, closes #636
---
datasette/views/table.py | 6 +-----
tests/test_html.py | 24 ++++++++++++++++++++++++
2 files changed, 25 insertions(+), 5 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index a60a3941..516b474d 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -587,10 +587,6 @@ class TableView(RowTableShared):
columns = [r[0] for r in results.description]
rows = list(results.rows)
- filter_columns = columns[:]
- if use_rowid and filter_columns[0] == "rowid":
- filter_columns = filter_columns[1:]
-
# Expand labeled columns if requested
expanded_columns = []
expandable_columns = await self.expandable_columns(database, table)
@@ -720,7 +716,7 @@ class TableView(RowTableShared):
"use_rowid": use_rowid,
"filters": filters,
"display_columns": display_columns,
- "filter_columns": filter_columns,
+ "filter_columns": columns,
"display_rows": display_rows,
"facets_timed_out": facets_timed_out,
"sorted_facet_results": sorted(
diff --git a/tests/test_html.py b/tests/test_html.py
index 44627cdc..3b331f38 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -679,6 +679,30 @@ def test_table_html_foreign_key_custom_label_column(app_client):
]
+@pytest.mark.parametrize(
+ "path,expected_column_options",
+ [
+ ("/fixtures/infinity", ["- column -", "rowid", "value"]),
+ (
+ "/fixtures/primary_key_multiple_columns",
+ ["- column -", "id", "content", "content2"],
+ ),
+ ("/fixtures/compound_primary_key", ["- column -", "pk1", "pk2", "content"]),
+ ],
+)
+def test_table_html_filter_form_column_options(
+ path, expected_column_options, app_client
+):
+ response = app_client.get(path)
+ assert response.status == 200
+ form = Soup(response.body, "html.parser").find("form")
+ column_options = [
+ o.attrs.get("value") or o.string
+ for o in form.select("select[name=_filter_column] option")
+ ]
+ assert expected_column_options == column_options
+
+
def test_row_html_compound_primary_key(app_client):
response = app_client.get("/fixtures/compound_primary_key/a,b")
assert response.status == 200
From c16be14517414a94e1fdbd888e8a3ad0669e3bca Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 20 Nov 2019 10:02:07 -0800
Subject: [PATCH 0283/2303] How to upgrade using Docker
---
docs/installation.rst | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/docs/installation.rst b/docs/installation.rst
index 9ee7eb4e..c547f9e4 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -33,6 +33,10 @@ Now visit http://127.0.0.1:8001/ to access Datasette.
(You can download a copy of ``fixtures.db`` from
https://latest.datasette.io/fixtures.db )
+To upgrade to the most recent release of Datasette, run the following::
+
+ docker pull datasetteproject/datasette
+
Loading Spatialite
~~~~~~~~~~~~~~~~~~
From fd137da7f83c117b18e189707a1039e319dd5c91 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 21 Nov 2019 16:56:55 -0800
Subject: [PATCH 0284/2303] Suggest column facet only if at least one count > 1
Fixes #638
---
datasette/facets.py | 5 ++++-
tests/fixtures.py | 33 +++++++++++++++++----------------
tests/test_api.py | 30 ++++++++++++++++++++++++++++--
tests/test_csv.py | 32 ++++++++++++++++----------------
4 files changed, 65 insertions(+), 35 deletions(-)
diff --git a/datasette/facets.py b/datasette/facets.py
index 0c6459d6..a314faaf 100644
--- a/datasette/facets.py
+++ b/datasette/facets.py
@@ -143,9 +143,10 @@ class ColumnFacet(Facet):
if column in already_enabled:
continue
suggested_facet_sql = """
- select distinct {column} from (
+ select {column}, count(*) as n from (
{sql}
) where {column} is not null
+ group by {column}
limit {limit}
""".format(
column=escape_sqlite(column), sql=self.sql, limit=facet_size + 1
@@ -165,6 +166,8 @@ class ColumnFacet(Facet):
and num_distinct_values > 1
and num_distinct_values <= facet_size
and num_distinct_values < row_count
+ # And at least one has n > 1
+ and any(r["n"] > 1 for r in distinct_values)
):
suggested_facets.append(
{
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 3e4203f7..bb01d171 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -669,26 +669,27 @@ CREATE TABLE facetable (
neighborhood text,
tags text,
complex_array text,
+ distinct_some_null,
FOREIGN KEY ("city_id") REFERENCES [facet_cities](id)
);
INSERT INTO facetable
- (created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array)
+ (created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array, distinct_some_null)
VALUES
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]'),
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]'),
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'SOMA', '[]', '[]'),
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Tenderloin', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Bernal Heights', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Hayes Valley', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Hollywood', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Downtown', '[]', '[]'),
- ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Los Feliz', '[]', '[]'),
- ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Koreatown', '[]', '[]'),
- ("2019-01-16 08:00:00", 1, 1, 'MI', 3, 'Downtown', '[]', '[]'),
- ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Greektown', '[]', '[]'),
- ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Corktown', '[]', '[]'),
- ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Mexicantown', '[]', '[]'),
- ("2019-01-17 08:00:00", 2, 0, 'MC', 4, 'Arcadia Planitia', '[]', '[]')
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]', 'one'),
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]', 'two'),
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'SOMA', '[]', '[]', null),
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Tenderloin', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Bernal Heights', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Hayes Valley', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Hollywood', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Downtown', '[]', '[]', null),
+ ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Los Feliz', '[]', '[]', null),
+ ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Koreatown', '[]', '[]', null),
+ ("2019-01-16 08:00:00", 1, 1, 'MI', 3, 'Downtown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Greektown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Corktown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Mexicantown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 2, 0, 'MC', 4, 'Arcadia Planitia', '[]', '[]', null)
;
CREATE TABLE binary_data (
diff --git a/tests/test_api.py b/tests/test_api.py
index 1fa8642f..34eef4ce 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -197,6 +197,7 @@ def test_database_page(app_client):
"neighborhood",
"tags",
"complex_array",
+ "distinct_some_null",
],
"primary_keys": ["pk"],
"count": 15,
@@ -1042,15 +1043,38 @@ def test_table_filter_json_arraycontains(app_client):
"Mission",
'["tag1", "tag2"]',
'[{"foo": "bar"}]',
+ "one",
+ ],
+ [
+ 2,
+ "2019-01-14 08:00:00",
+ 1,
+ 1,
+ "CA",
+ 1,
+ "Dogpatch",
+ '["tag1", "tag3"]',
+ "[]",
+ "two",
],
- [2, "2019-01-14 08:00:00", 1, 1, "CA", 1, "Dogpatch", '["tag1", "tag3"]', "[]"],
] == response.json["rows"]
def test_table_filter_extra_where(app_client):
response = app_client.get("/fixtures/facetable.json?_where=neighborhood='Dogpatch'")
assert [
- [2, "2019-01-14 08:00:00", 1, 1, "CA", 1, "Dogpatch", '["tag1", "tag3"]', "[]"]
+ [
+ 2,
+ "2019-01-14 08:00:00",
+ 1,
+ 1,
+ "CA",
+ 1,
+ "Dogpatch",
+ '["tag1", "tag3"]',
+ "[]",
+ "two",
+ ]
] == response.json["rows"]
@@ -1503,6 +1527,7 @@ def test_expand_labels(app_client):
"neighborhood": "Dogpatch",
"tags": '["tag1", "tag3"]',
"complex_array": "[]",
+ "distinct_some_null": "two",
},
"13": {
"pk": 13,
@@ -1514,6 +1539,7 @@ def test_expand_labels(app_client):
"neighborhood": "Corktown",
"tags": "[]",
"complex_array": "[]",
+ "distinct_some_null": None,
},
} == response.json
diff --git a/tests/test_csv.py b/tests/test_csv.py
index 13aca489..1030c2bb 100644
--- a/tests/test_csv.py
+++ b/tests/test_csv.py
@@ -21,22 +21,22 @@ world
)
EXPECTED_TABLE_WITH_LABELS_CSV = """
-pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,complex_array
-1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]","[{""foo"": ""bar""}]"
-2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]",[]
-3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[],[]
-4,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Tenderloin,[],[]
-5,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Bernal Heights,[],[]
-6,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Hayes Valley,[],[]
-7,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Hollywood,[],[]
-8,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Downtown,[],[]
-9,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Los Feliz,[],[]
-10,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Koreatown,[],[]
-11,2019-01-16 08:00:00,1,1,MI,3,Detroit,Downtown,[],[]
-12,2019-01-17 08:00:00,1,1,MI,3,Detroit,Greektown,[],[]
-13,2019-01-17 08:00:00,1,1,MI,3,Detroit,Corktown,[],[]
-14,2019-01-17 08:00:00,1,1,MI,3,Detroit,Mexicantown,[],[]
-15,2019-01-17 08:00:00,2,0,MC,4,Memnonia,Arcadia Planitia,[],[]
+pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,complex_array,distinct_some_null
+1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]","[{""foo"": ""bar""}]",one
+2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]",[],two
+3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[],[],
+4,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Tenderloin,[],[],
+5,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Bernal Heights,[],[],
+6,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Hayes Valley,[],[],
+7,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Hollywood,[],[],
+8,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Downtown,[],[],
+9,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Los Feliz,[],[],
+10,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Koreatown,[],[],
+11,2019-01-16 08:00:00,1,1,MI,3,Detroit,Downtown,[],[],
+12,2019-01-17 08:00:00,1,1,MI,3,Detroit,Greektown,[],[],
+13,2019-01-17 08:00:00,1,1,MI,3,Detroit,Corktown,[],[],
+14,2019-01-17 08:00:00,1,1,MI,3,Detroit,Mexicantown,[],[],
+15,2019-01-17 08:00:00,2,0,MC,4,Memnonia,Arcadia Planitia,[],[],
""".lstrip().replace(
"\n", "\r\n"
)
From d3e1c3017ee2f606a731208d59fe48805cdc3259 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 22 Nov 2019 22:07:01 -0800
Subject: [PATCH 0285/2303] Display 0 results, closes #637
---
datasette/static/app.css | 7 +++++
datasette/templates/_table.html | 56 ++++++++++++++++++---------------
datasette/templates/query.html | 2 ++
tests/test_html.py | 14 +++++++++
4 files changed, 53 insertions(+), 26 deletions(-)
diff --git a/datasette/static/app.css b/datasette/static/app.css
index 34eb122c..d7cf6334 100644
--- a/datasette/static/app.css
+++ b/datasette/static/app.css
@@ -327,3 +327,10 @@ a.not-underlined {
pre.wrapped-sql {
white-space: pre-wrap;
}
+
+p.zero-results {
+ border: 2px solid #ccc;
+ background-color: #eee;
+ padding: 0.5em;
+ font-style: italic;
+}
diff --git a/datasette/templates/_table.html b/datasette/templates/_table.html
index c7a72253..42c37c55 100644
--- a/datasette/templates/_table.html
+++ b/datasette/templates/_table.html
@@ -1,28 +1,32 @@
-
-
-
- {% for column in display_columns %}
-
- {% if not column.sortable %}
- {{ column.name }}
- {% else %}
- {% if column.name == sort %}
- {{ column.name }} ▼
+{% if display_rows %}
+