{% endif %}
{% include "_codemirror_foot.html" %}
diff --git a/datasette/utils.py b/datasette/utils.py
index b0f74f0d..30fc4231 100644
--- a/datasette/utils.py
+++ b/datasette/utils.py
@@ -901,3 +901,15 @@ class StaticMount(click.ParamType):
if not os.path.exists(dirpath) or not os.path.isdir(dirpath):
self.fail("%s is not a valid directory path" % value, param, ctx)
return path, dirpath
+
+
+def format_bytes(bytes):
+ current = float(bytes)
+ for unit in ("bytes", "KB", "MB", "GB", "TB"):
+ if current < 1024:
+ break
+ current = current / 1024
+ if unit == "bytes":
+ return "{} {}".format(int(current), unit)
+ else:
+ return "{:.1f} {}".format(current, unit)
diff --git a/datasette/views/base.py b/datasette/views/base.py
index f4a8afaf..e98762b7 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -19,6 +19,7 @@ from datasette.utils import (
InterruptedError,
InvalidSql,
LimitedWriter,
+ format_bytes,
is_url,
path_from_row_pks,
path_with_added_args,
@@ -102,6 +103,7 @@ class RenderMixin(HTTPMethodView):
"extra_js_urls": self._asset_urls(
"extra_js_urls", template, context
),
+ "format_bytes": format_bytes,
}
}
)
diff --git a/datasette/views/database.py b/datasette/views/database.py
index fb6b242b..9c44a800 100644
--- a/datasette/views/database.py
+++ b/datasette/views/database.py
@@ -24,6 +24,7 @@ class DatabaseView(BaseView):
tables.sort(key=lambda t: (t["hidden"], t["name"]))
return {
"database": database,
+ "size": info["size"],
"tables": tables,
"hidden_count": len([t for t in tables if t["hidden"]]),
"views": info["views"],
diff --git a/tests/test_utils.py b/tests/test_utils.py
index 43e2f44b..b406d70b 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -374,3 +374,18 @@ def test_path_with_format(path, format, extra_qs, expected):
)
actual = utils.path_with_format(request, format, extra_qs)
assert expected == actual
+
+
+@pytest.mark.parametrize(
+ "bytes,expected",
+ [
+ (120, '120 bytes'),
+ (1024, '1.0 KB'),
+ (1024 * 1024, '1.0 MB'),
+ (1024 * 1024 * 1024, '1.0 GB'),
+ (1024 * 1024 * 1024 * 1.3, '1.3 GB'),
+ (1024 * 1024 * 1024 * 1024, '1.0 TB'),
+ ]
+)
+def test_format_bytes(bytes, expected):
+ assert expected == utils.format_bytes(bytes)
From d5d39da12b26c06a426e97872bce4e94248db42d Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 7 Feb 2019 22:23:27 -0800
Subject: [PATCH 0003/2288] Expanded section on db-to-sqlite
---
docs/ecosystem.rst | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst
index d235ab7b..401cd22a 100644
--- a/docs/ecosystem.rst
+++ b/docs/ecosystem.rst
@@ -28,6 +28,10 @@ db-to-sqlite
`db-to-sqlite `__ is a CLI tool that builds on top of `SQLAlchemy `__ and allows you to connect to any database supported by that library (including MySQL, oracle and PostgreSQL), run a SQL query and save the results to a new table in a SQLite database.
+You can mirror an entire database (including copying foreign key relationships) with the ``--all`` option::
+
+ $ db-to-sqlite --connection="postgresql://simonw@localhost/myblog" --all blog.db
+
dbf-to-sqlite
-------------
@@ -73,4 +77,4 @@ datasette-pretty-json
datasette-sqlite-fts4
---------------------
-`datasette-sqlite-fts4 `__ provides search relevance ranking algorithms that can be used with SQLite's FTS4 search module. You can read more about it in `Exploring search relevance algorithms with SQLite `__.
\ No newline at end of file
+`datasette-sqlite-fts4 `__ provides search relevance ranking algorithms that can be used with SQLite's FTS4 search module. You can read more about it in `Exploring search relevance algorithms with SQLite `__.
From 1f91065b20cbc691f464bccfd8eef7d1ce4b14a8 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 Feb 2019 08:06:10 -0800
Subject: [PATCH 0004/2288] Added socrata2sql to the ecosystem page
A fantastic new tool created by @achavez at the Dallas Morning News.
---
docs/ecosystem.rst | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst
index 401cd22a..18390122 100644
--- a/docs/ecosystem.rst
+++ b/docs/ecosystem.rst
@@ -42,6 +42,15 @@ markdown-to-sqlite
`markdown-to-sqlite `__ reads Markdown files with embedded YAML metadata (e.g. for `Jekyll Front Matter `__) and creates a SQLite table with a schema matching the metadata. This is useful if you want to keep structured data in text form in a GitHub repository and use that to build a SQLite database.
+socrata2sql
+-----------
+
+`socrata2sql `__ is a tool by Andrew Chavez at the Dallas Morning News. It works with Socrata, a widely used platform for local and national government open data portals. It uses the Socrata API to pull down government datasets and store them in a local SQLite database (it can also export data to PostgreSQL, MySQL and other SQLAlchemy-supported databases).
+
+For example, to create a SQLite database of the `City of Dallas Payment Register `__ you would run the following command::
+
+ $ socrata2sql insert www.dallasopendata.com 64pp-jeba
+
Datasette Plugins
=================
From 5b3af3d015a6048a3977234bf46d20905ce720c0 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 24 Feb 2019 19:47:11 -0800
Subject: [PATCH 0005/2288] Added sqlite-utils blog entry to news section
---
README.md | 2 ++
1 file changed, 2 insertions(+)
diff --git a/README.md b/README.md
index 6ad43ca6..7fe15b0e 100644
--- a/README.md
+++ b/README.md
@@ -19,6 +19,8 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
## News
+ * 24th February 2019: [
+sqlite-utils: a Python library and CLI tool for building SQLite databases](https://simonwillison.net/2019/Feb/25/sqlite-utils/) - a partner tool for easily creating SQLite databases for use with Datasette.
* 31st Janary 2019: [Datasette 0.27](https://datasette.readthedocs.io/en/latest/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://datasette.readthedocs.io/en/latest/ecosystem.html).
* 10th January 2019: [Datasette 0.26.1](http://datasette.readthedocs.io/en/latest/changelog.html#v0-26-1) - SQLite upgrade in Docker image, `/-/versions` now shows SQLite compile options.
* 2nd January 2019: [Datasette 0.26](http://datasette.readthedocs.io/en/latest/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument.
From 4147c388cbb9b14d137dd5229ae3f574ee6a25fe Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 7 Mar 2019 20:30:21 -0800
Subject: [PATCH 0006/2288] Link to sqlite-utils blog entry
---
docs/ecosystem.rst | 2 ++
1 file changed, 2 insertions(+)
diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst
index 18390122..29b80707 100644
--- a/docs/ecosystem.rst
+++ b/docs/ecosystem.rst
@@ -23,6 +23,8 @@ sqlite-utils
The CLI tool can consume JSON streams directly and use them to create tables. It can also be used to query SQLite databases and output the results as CSV or JSON.
+See `sqlite-utils: a Python library and CLI tool for building SQLite databases `__ for more.
+
db-to-sqlite
------------
From 41744d76908bbef0edc038a5050a709cdd26e529 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 9 Mar 2019 10:43:12 -0800
Subject: [PATCH 0007/2288] Added datasette-jellyfish
---
docs/ecosystem.rst | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/docs/ecosystem.rst b/docs/ecosystem.rst
index 29b80707..69f935d0 100644
--- a/docs/ecosystem.rst
+++ b/docs/ecosystem.rst
@@ -75,6 +75,11 @@ datasette-json-html
`datasette-json-html `__ renders HTML in Datasette's table view driven by JSON returned from your SQL queries. This provides a way to embed images, links and lists of links directly in Datasette's main interface, defined using custom SQL statements.
+datasette-jellyfish
+-------------------
+
+`datasette-jellyfish `__ exposes custom SQL functions for a range of common fuzzy string matching functions, including soundex, porter stemming and levenshtein distance. It builds on top of the `Jellyfish Python library `__.
+
datasette-render-images
-----------------------
From bf6b0f918de4aeee7c1036ac975ce2fb23237da7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 10 Mar 2019 14:37:11 -0700
Subject: [PATCH 0008/2288] about and about_url metadata options
---
datasette/app.py | 3 +++
datasette/cli.py | 2 ++
datasette/publish/common.py | 2 ++
datasette/publish/heroku.py | 4 ++++
datasette/publish/now.py | 4 ++++
datasette/templates/_description_source_license.html | 5 +++++
datasette/templates/base.html | 5 +++++
docs/datasette-package-help.txt | 2 ++
docs/datasette-publish-heroku-help.txt | 2 ++
docs/datasette-publish-now-help.txt | 2 ++
tests/fixtures.py | 2 ++
tests/test_html.py | 6 ++++--
12 files changed, 37 insertions(+), 2 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 3a74a5e5..453ce5c0 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -267,6 +267,9 @@ class Datasette:
"license": metadata.get("license") or self.metadata("license"),
"license_url": metadata.get("license_url")
or self.metadata("license_url"),
+ "about": metadata.get("about") or self.metadata("about"),
+ "about_url": metadata.get("about_url")
+ or self.metadata("about_url"),
}
)
diff --git a/datasette/cli.py b/datasette/cli.py
index 34ed0020..446456f4 100644
--- a/datasette/cli.py
+++ b/datasette/cli.py
@@ -217,6 +217,8 @@ def plugins(all, plugins_dir):
@click.option("--license_url", help="License URL for metadata")
@click.option("--source", help="Source label for metadata")
@click.option("--source_url", help="Source URL for metadata")
+@click.option("--about", help="About label for metadata")
+@click.option("--about_url", help="About URL for metadata")
def package(
files,
tag,
diff --git a/datasette/publish/common.py b/datasette/publish/common.py
index 9dd2ae9e..4cc69b4a 100644
--- a/datasette/publish/common.py
+++ b/datasette/publish/common.py
@@ -42,6 +42,8 @@ def add_common_publish_arguments_and_options(subcommand):
click.option("--license_url", help="License URL for metadata"),
click.option("--source", help="Source label for metadata"),
click.option("--source_url", help="Source URL for metadata"),
+ click.option("--about", help="About label for metadata"),
+ click.option("--about_url", help="About URL for metadata"),
)):
subcommand = decorator(subcommand)
return subcommand
diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py
index d42f4e83..32c861cd 100644
--- a/datasette/publish/heroku.py
+++ b/datasette/publish/heroku.py
@@ -35,6 +35,8 @@ def publish_subcommand(publish):
license_url,
source,
source_url,
+ about,
+ about_url,
name,
):
fail_if_publish_binary_not_installed(
@@ -72,6 +74,8 @@ def publish_subcommand(publish):
"license_url": license_url,
"source": source,
"source_url": source_url,
+ "about": about,
+ "about_url": about_url,
},
):
app_name = None
diff --git a/datasette/publish/now.py b/datasette/publish/now.py
index bd2b051b..64a73279 100644
--- a/datasette/publish/now.py
+++ b/datasette/publish/now.py
@@ -39,6 +39,8 @@ def publish_subcommand(publish):
license_url,
source,
source_url,
+ about,
+ about_url,
name,
force,
token,
@@ -70,6 +72,8 @@ def publish_subcommand(publish):
"license_url": license_url,
"source": source,
"source_url": source_url,
+ "about": about,
+ "about_url": about_url,
},
):
now_json = {
diff --git a/datasette/templates/_description_source_license.html b/datasette/templates/_description_source_license.html
index eba4eb1a..3327706e 100644
--- a/datasette/templates/_description_source_license.html
+++ b/datasette/templates/_description_source_license.html
@@ -21,5 +21,10 @@
{% endif %}{{ metadata.source or metadata.source_url }}{% if metadata.source_url %}{% endif %}
{% endif %}
+ {% if metadata.about or metadata.about_url %}{% if metadata.license or metadata.license_url or metadata.source or metadat.source_url %}·{% endif %}
+ About: {% if metadata.about_url %}
+
+ {% endif %}{{ metadata.about or metadata.about_url }}{% if metadata.about_url %}{% endif %}
+ {% endif %}
{% endif %}
diff --git a/datasette/templates/base.html b/datasette/templates/base.html
index c766a44e..0ea41d7e 100644
--- a/datasette/templates/base.html
+++ b/datasette/templates/base.html
@@ -33,6 +33,11 @@
{% endif %}{{ metadata.source or metadata.source_url }}{% if metadata.source_url %}{% endif %}
{% endif %}
+ {% if metadata.about or metadata.about_url %}·
+ About: {% if metadata.about_url %}
+
+ {% endif %}{{ metadata.about or metadata.about_url }}{% if metadata.about_url %}{% endif %}
+ {% endif %}
{% endif %}
diff --git a/docs/datasette-package-help.txt b/docs/datasette-package-help.txt
index d0ad2d3e..39be04e9 100644
--- a/docs/datasette-package-help.txt
+++ b/docs/datasette-package-help.txt
@@ -21,4 +21,6 @@ Options:
--license_url TEXT License URL for metadata
--source TEXT Source label for metadata
--source_url TEXT Source URL for metadata
+ --about TEXT About label for metadata
+ --about_url TEXT About URL for metadata
--help Show this message and exit.
diff --git a/docs/datasette-publish-heroku-help.txt b/docs/datasette-publish-heroku-help.txt
index f82eaf3e..cd9af09b 100644
--- a/docs/datasette-publish-heroku-help.txt
+++ b/docs/datasette-publish-heroku-help.txt
@@ -16,5 +16,7 @@ Options:
--license_url TEXT License URL for metadata
--source TEXT Source label for metadata
--source_url TEXT Source URL for metadata
+ --about TEXT About label for metadata
+ --about_url TEXT About URL for metadata
-n, --name TEXT Application name to use when deploying
--help Show this message and exit.
diff --git a/docs/datasette-publish-now-help.txt b/docs/datasette-publish-now-help.txt
index a9c01f39..445c1bc7 100644
--- a/docs/datasette-publish-now-help.txt
+++ b/docs/datasette-publish-now-help.txt
@@ -16,6 +16,8 @@ Options:
--license_url TEXT License URL for metadata
--source TEXT Source label for metadata
--source_url TEXT Source URL for metadata
+ --about TEXT About label for metadata
+ --about_url TEXT About URL for metadata
-n, --name TEXT Application name to use when deploying
--force Pass --force option to now
--token TEXT Auth token to use for deploy
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 16395553..a77a3f4a 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -133,6 +133,8 @@ METADATA = {
'license_url': 'https://github.com/simonw/datasette/blob/master/LICENSE',
'source': 'tests/fixtures.py',
'source_url': 'https://github.com/simonw/datasette/blob/master/tests/fixtures.py',
+ 'about': 'About Datasette',
+ 'about_url': 'https://github.com/simonw/datasette',
"plugins": {
"name-of-plugin": {
"depth": "root"
diff --git a/tests/test_html.py b/tests/test_html.py
index 9adfe8f2..ca6d62aa 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -744,14 +744,16 @@ def assert_querystring_equal(expected, actual):
def assert_footer_links(soup):
footer_links = soup.find('div', {'class': 'ft'}).findAll('a')
- assert 3 == len(footer_links)
- datasette_link, license_link, source_link = footer_links
+ assert 4 == len(footer_links)
+ datasette_link, license_link, source_link, about_link = footer_links
assert 'Datasette' == datasette_link.text.strip()
assert 'tests/fixtures.py' == source_link.text.strip()
assert 'Apache License 2.0' == license_link.text.strip()
+ assert 'About Datasette' == about_link.text.strip()
assert 'https://github.com/simonw/datasette' == datasette_link['href']
assert 'https://github.com/simonw/datasette/blob/master/tests/fixtures.py' == source_link['href']
assert 'https://github.com/simonw/datasette/blob/master/LICENSE' == license_link['href']
+ assert 'https://github.com/simonw/datasette' == about_link['href']
def inner_html(soup):
From 9743e1d91b5f0a2b3c1c0bd6ffce8739341f43c4 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Mar 2019 16:42:38 -0700
Subject: [PATCH 0009/2288] Support for :memory: databases
If you start Datasette with no files, it will connect to :memory: instead.
When starting it with files you can add --memory to also get a :memory: database.
---
datasette/app.py | 79 +++++++++++++++++++------------
datasette/cli.py | 5 ++
datasette/templates/database.html | 4 +-
docs/datasette-serve-help.txt | 1 +
tests/fixtures.py | 8 ++++
tests/test_api.py | 26 ++++++++++
6 files changed, 90 insertions(+), 33 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 453ce5c0..37b199a4 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -42,7 +42,7 @@ from .version import __version__
app_root = Path(__file__).parent.parent
connections = threading.local()
-
+MEMORY = object()
ConfigOption = collections.namedtuple(
"ConfigOption", ("name", "default", "help")
@@ -123,10 +123,15 @@ class Datasette:
template_dir=None,
plugins_dir=None,
static_mounts=None,
+ memory=False,
config=None,
version_note=None,
):
self.files = files
+ if not self.files:
+ self.files = [MEMORY]
+ elif memory:
+ self.files = (MEMORY,) + self.files
self.cache_headers = cache_headers
self.cors = cors
self._inspect = inspect_data
@@ -296,31 +301,40 @@ class Datasette:
self._inspect = {}
for filename in self.files:
- path = Path(filename)
- name = path.stem
- if name in self._inspect:
- raise Exception("Multiple files with same stem %s" % name)
- try:
- with sqlite3.connect(
- "file:{}?immutable=1".format(path), uri=True
- ) as conn:
- self.prepare_connection(conn)
- self._inspect[name] = {
- "hash": inspect_hash(path),
- "file": str(path),
- "size": path.stat().st_size,
- "views": inspect_views(conn),
- "tables": inspect_tables(conn, (self.metadata("databases") or {}).get(name, {}))
- }
- except sqlite3.OperationalError as e:
- if (e.args[0] == 'no such module: VirtualSpatialIndex'):
- raise click.UsageError(
- "It looks like you're trying to load a SpatiaLite"
- " database without first loading the SpatiaLite module."
- "\n\nRead more: https://datasette.readthedocs.io/en/latest/spatialite.html"
- )
- else:
- raise
+ if filename is MEMORY:
+ self._inspect[":memory:"] = {
+ "hash": "000",
+ "file": ":memory:",
+ "size": 0,
+ "views": {},
+ "tables": {},
+ }
+ else:
+ path = Path(filename)
+ name = path.stem
+ if name in self._inspect:
+ raise Exception("Multiple files with same stem %s" % name)
+ try:
+ with sqlite3.connect(
+ "file:{}?immutable=1".format(path), uri=True
+ ) as conn:
+ self.prepare_connection(conn)
+ self._inspect[name] = {
+ "hash": inspect_hash(path),
+ "file": str(path),
+ "size": path.stat().st_size,
+ "views": inspect_views(conn),
+ "tables": inspect_tables(conn, (self.metadata("databases") or {}).get(name, {}))
+ }
+ except sqlite3.OperationalError as e:
+ if (e.args[0] == 'no such module: VirtualSpatialIndex'):
+ raise click.UsageError(
+ "It looks like you're trying to load a SpatiaLite"
+ " database without first loading the SpatiaLite module."
+ "\n\nRead more: https://datasette.readthedocs.io/en/latest/spatialite.html"
+ )
+ else:
+ raise
return self._inspect
def register_custom_units(self):
@@ -403,11 +417,14 @@ class Datasette:
conn = getattr(connections, db_name, None)
if not conn:
info = self.inspect()[db_name]
- conn = sqlite3.connect(
- "file:{}?immutable=1".format(info["file"]),
- uri=True,
- check_same_thread=False,
- )
+ if info["file"] == ":memory:":
+ conn = sqlite3.connect(":memory:")
+ else:
+ conn = sqlite3.connect(
+ "file:{}?immutable=1".format(info["file"]),
+ uri=True,
+ check_same_thread=False,
+ )
self.prepare_connection(conn)
setattr(connections, db_name, conn)
diff --git a/datasette/cli.py b/datasette/cli.py
index 446456f4..6fbc9908 100644
--- a/datasette/cli.py
+++ b/datasette/cli.py
@@ -315,6 +315,9 @@ def package(
help="mountpoint:path-to-directory for serving static files",
multiple=True,
)
+@click.option(
+ "--memory", is_flag=True, help="Make :memory: database available"
+)
@click.option(
"--config",
type=Config(),
@@ -340,6 +343,7 @@ def serve(
template_dir,
plugins_dir,
static,
+ memory,
config,
version_note,
help_config,
@@ -384,6 +388,7 @@ def serve(
plugins_dir=plugins_dir,
static_mounts=static,
config=dict(config),
+ memory=memory,
version_note=version_note,
)
# Force initial hashing/table counting
diff --git a/datasette/templates/database.html b/datasette/templates/database.html
index f64d5c90..f827e584 100644
--- a/datasette/templates/database.html
+++ b/datasette/templates/database.html
@@ -19,7 +19,7 @@
{% if config.allow_sql %}
{% endif %}
@@ -56,7 +56,7 @@
{% endif %}
-{% if config.allow_download %}
+{% if config.allow_download and database != ":memory:" %}
{% endif %}
diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt
index caa00e33..65b9aceb 100644
--- a/docs/datasette-serve-help.txt
+++ b/docs/datasette-serve-help.txt
@@ -17,6 +17,7 @@ Options:
--template-dir DIRECTORY Path to directory containing custom templates
--plugins-dir DIRECTORY Path to directory containing custom plugins
--static STATIC MOUNT mountpoint:path-to-directory for serving static files
+ --memory Make :memory: database available
--config CONFIG Set config option using configname:value
datasette.readthedocs.io/en/latest/config.html
--version-note TEXT Additional note to show on /-/versions
diff --git a/tests/fixtures.py b/tests/fixtures.py
index a77a3f4a..efd85fab 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -65,6 +65,14 @@ def app_client():
yield from make_app_client()
+@pytest.fixture(scope="session")
+def app_client_no_files():
+ ds = Datasette([])
+ client = TestClient(ds.app().test_client)
+ client.ds = ds
+ yield client
+
+
@pytest.fixture(scope='session')
def app_client_shorter_time_limit():
yield from make_app_client(20)
diff --git a/tests/test_api.py b/tests/test_api.py
index 8cd1e94e..a6ba3f37 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -1,5 +1,6 @@
from .fixtures import ( # noqa
app_client,
+ app_client_no_files,
app_client_shorter_time_limit,
app_client_larger_cache_size,
app_client_returned_rows_matches_page_size,
@@ -368,6 +369,31 @@ def test_database_page(app_client):
}] == data['tables']
+def test_no_files_uses_memory_database(app_client_no_files):
+ response = app_client_no_files.get("/.json")
+ assert response.status == 200
+ assert {
+ ":memory:": {
+ "hash": "000",
+ "hidden_table_rows_sum": 0,
+ "hidden_tables_count": 0,
+ "name": ":memory:",
+ "path": ":memory:-000",
+ "table_rows_sum": 0,
+ "tables_count": 0,
+ "tables_more": False,
+ "tables_truncated": [],
+ "views_count": 0,
+ }
+ } == response.json
+ # Try that SQL query
+ response = app_client_no_files.get(
+ "/:memory:-0.json?sql=select+sqlite_version()&_shape=array"
+ )
+ assert 1 == len(response.json)
+ assert ["sqlite_version()"] == list(response.json[0].keys())
+
+
def test_database_page_for_database_with_dot_in_name(app_client_with_dot):
response = app_client_with_dot.get("/fixtures.dot.json")
assert 200 == response.status
From b3e739332624c2d4f2668a105afd727af774100b Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Mar 2019 21:41:43 -0700
Subject: [PATCH 0010/2288] Allow more recent versions of Click
Closes #414
---
setup.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.py b/setup.py
index 92b92c3f..fb00f2d0 100644
--- a/setup.py
+++ b/setup.py
@@ -34,7 +34,7 @@ setup(
package_data={'datasette': ['templates/*.html']},
include_package_data=True,
install_requires=[
- 'click==6.7',
+ 'click>=6.7',
'click-default-group==1.2',
'Sanic==0.7.0',
'Jinja2==2.10',
From 285566790879b31d2fdd2a8c6f56825162eb71b9 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Mar 2019 22:00:13 -0700
Subject: [PATCH 0011/2288] Fix for test failure with Click 7.0
---
tests/test_publish_heroku.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py
index 852403ca..da4e213a 100644
--- a/tests/test_publish_heroku.py
+++ b/tests/test_publish_heroku.py
@@ -24,7 +24,7 @@ def test_publish_heroku_installs_plugin(mock_call, mock_check_output, mock_which
with runner.isolated_filesystem():
open("t.db", "w").write("data")
result = runner.invoke(cli.cli, ["publish", "heroku", "t.db"], input="y\n")
- assert -1 == result.exit_code
+ assert 0 != result.exit_code
mock_check_output.assert_has_calls(
[mock.call(["heroku", "plugins"]), mock.call(["heroku", "apps:list", "--json"])]
)
From 9e8c36793bfbb17c2f67371cc7f9aa8b9202fdc4 Mon Sep 17 00:00:00 2001
From: joelondon
Date: Fri, 15 Mar 2019 05:06:45 +0000
Subject: [PATCH 0012/2288] Update spatialite.rst (#413)
a line of sql added to create the idx_ in the python recipe
---
docs/spatialite.rst | 2 ++
1 file changed, 2 insertions(+)
diff --git a/docs/spatialite.rst b/docs/spatialite.rst
index 5a8a31b1..58179e70 100644
--- a/docs/spatialite.rst
+++ b/docs/spatialite.rst
@@ -68,6 +68,8 @@ Here's a recipe for taking a table with existing latitude and longitude columns,
UPDATE events SET
point_geom = GeomFromText('POINT('||"longitude"||' '||"latitude"||')',4326);
''')
+ # Now add a spatial index to that column
+ conn.execute('select CreateSpatialIndex("museums", "point_geom");')
# If you don't commit your changes will not be persisted:
conn.commit()
conn.close()
From afe9aa3ae03c485c5d6652741438d09445a486c1 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Mar 2019 22:22:24 -0700
Subject: [PATCH 0013/2288] show/hide link for SQL on custom query page
Closes #415
---
datasette/templates/query.html | 12 +++++++-----
datasette/views/base.py | 5 +++++
tests/test_html.py | 19 +++++++++++++++++++
3 files changed, 31 insertions(+), 5 deletions(-)
diff --git a/datasette/templates/query.html b/datasette/templates/query.html
index b23c67d8..06651689 100644
--- a/datasette/templates/query.html
+++ b/datasette/templates/query.html
@@ -26,11 +26,13 @@
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 2727565b..b7c9a4b0 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -296,10 +296,12 @@ class TableView(RowTableShared):
where_clauses, params = filters.build_where_clauses(table)
# _search support:
- fts_table = await self.ds.execute_against_connection_in_thread(
+ fts_table = special_args.get("_fts_table")
+ fts_table = fts_table or table_metadata.get("fts_table")
+ fts_table = fts_table or await self.ds.execute_against_connection_in_thread(
database, lambda conn: detect_fts(conn, table)
)
- fts_pk = table_metadata.get("fts_pk", "rowid")
+ fts_pk = special_args.get("_fts_pk", table_metadata.get("fts_pk", "rowid"))
search_args = dict(
pair for pair in special_args.items() if pair[0].startswith("_search")
)
@@ -731,6 +733,10 @@ class TableView(RowTableShared):
table, {}
)
self.ds.update_with_inherited_metadata(metadata)
+ form_hidden_args = []
+ for arg in ("_fts_table", "_fts_pk"):
+ if arg in special_args:
+ form_hidden_args.append((arg, special_args[arg]))
return {
"supports_search": bool(fts_table),
"search": search or "",
@@ -745,6 +751,7 @@ class TableView(RowTableShared):
key=lambda f: (len(f["results"]), f["name"]),
reverse=True
),
+ "form_hidden_args": form_hidden_args,
"facet_hideable": lambda facet: facet not in metadata_facets,
"is_sortable": any(c["sortable"] for c in display_columns),
"path_with_replaced_args": path_with_replaced_args,
diff --git a/docs/full_text_search.rst b/docs/full_text_search.rst
index 987e2272..08e85c90 100644
--- a/docs/full_text_search.rst
+++ b/docs/full_text_search.rst
@@ -78,9 +78,13 @@ Configuring full-text search for a table or view
If a table has a corresponding FTS table set up using the ``content=`` argument to ``CREATE VIRTUAL TABLE`` shown above, Datasette will detect it automatically and add a search interface to the table page for that table.
-You can also manually configure which table should be used for full-text search using :ref:`metadata`. You can set the associated FTS table for a specific table and you can also set one for a view - if you do that, the page for that SQL view will offer a search option.
+You can also manually configure which table should be used for full-text search using querystring parameters or :ref:`metadata`. You can set the associated FTS table for a specific table and you can also set one for a view - if you do that, the page for that SQL view will offer a search option.
-The ``fts_table`` property can be used to specify an associated FTS table. If the primary key column in your table which was used to populate the FTS table is something other than ``rowid``, you can specify the column to use with the ``fts_pk`` property.
+Use ``?_fts_table=x`` to over-ride the FTS table for a specific page. If the primary key was something other than ``rowid`` you can use ``?_fts_pk=col`` to set that as well. This is particularly useful for views, for example:
+
+https://latest.datasette.io/fixtures/searchable_view?_fts_table=searchable_fts&_fts_pk=pk
+
+The ``fts_table`` metadata property can be used to specify an associated FTS table. If the primary key column in your table which was used to populate the FTS table is something other than ``rowid``, you can specify the column to use with the ``fts_pk`` property.
Here is an example which enables full-text search for a ``display_ads`` view which is defined against the ``ads`` table and hence needs to run FTS against the ``ads_fts`` table, using the ``id`` as the primary key::
diff --git a/tests/fixtures.py b/tests/fixtures.py
index b3b38c95..cb6f7a39 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -209,6 +209,10 @@ METADATA = {
},
'simple_view': {
'sortable_columns': ['content'],
+ },
+ 'searchable_view_configured_by_metadata': {
+ 'fts_table': 'searchable_fts',
+ 'fts_pk': 'pk'
}
},
'queries': {
@@ -564,6 +568,12 @@ INSERT INTO [table/with/slashes.csv] VALUES (3, 'hey');
CREATE VIEW simple_view AS
SELECT content, upper(content) AS upper_content FROM simple_primary_key;
+CREATE VIEW searchable_view AS
+ SELECT * from searchable;
+
+CREATE VIEW searchable_view_configured_by_metadata AS
+ SELECT * from searchable;
+
''' + '\n'.join([
'INSERT INTO no_primary_key VALUES ({i}, "a{i}", "b{i}", "c{i}");'.format(i=i + 1)
for i in range(201)
diff --git a/tests/test_api.py b/tests/test_api.py
index 188a60e8..b822d23f 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -847,6 +847,24 @@ def test_searchable(app_client, path, expected_rows):
assert expected_rows == response.json['rows']
+@pytest.mark.parametrize('path,expected_rows', [
+ ('/fixtures/searchable_view_configured_by_metadata.json?_search=weasel', [
+ [2, 'terry dog', 'sara weasel', 'puma'],
+ ]),
+ # This should return all results because search is not configured:
+ ('/fixtures/searchable_view.json?_search=weasel', [
+ [1, 'barry cat', 'terry dog', 'panther'],
+ [2, 'terry dog', 'sara weasel', 'puma'],
+ ]),
+ ('/fixtures/searchable_view.json?_search=weasel&_fts_table=searchable_fts&_fts_pk=pk', [
+ [2, 'terry dog', 'sara weasel', 'puma'],
+ ]),
+])
+def test_searchable_views(app_client, path, expected_rows):
+ response = app_client.get(path)
+ assert expected_rows == response.json['rows']
+
+
def test_searchable_invalid_column(app_client):
response = app_client.get(
'/fixtures/searchable.json?_search_invalid=x'
diff --git a/tests/test_html.py b/tests/test_html.py
index 1babaa60..3e2ea845 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -185,6 +185,20 @@ def test_empty_search_parameter_gets_removed(app_client):
)
+def test_searchable_view_persists_fts_table(app_client):
+ # The search form should persist ?_fts_table as a hidden field
+ response = app_client.get(
+ "/fixtures/searchable_view?_fts_table=searchable_fts&_fts_pk=pk"
+ )
+ inputs = Soup(response.body, "html.parser").find("form").findAll("input")
+ hiddens = [i for i in inputs if i["type"] == "hidden"]
+ assert [
+ ('_fts_table', 'searchable_fts'), ('_fts_pk', 'pk')
+ ] == [
+ (hidden['name'], hidden['value']) for hidden in hiddens
+ ]
+
+
def test_sort_by_desc_redirects(app_client):
path_base = '/fixtures/sortable'
path = path_base + '?' + urllib.parse.urlencode({
From e11cb4c66442abca2a6b6159521a6cf4da8739c1 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 11 Apr 2019 22:00:47 -0700
Subject: [PATCH 0032/2288] Persist show/hide state better, closes #425
---
datasette/templates/_codemirror_foot.html | 2 +-
datasette/templates/query.html | 5 ++++-
tests/test_html.py | 8 ++++++++
3 files changed, 13 insertions(+), 2 deletions(-)
diff --git a/datasette/templates/_codemirror_foot.html b/datasette/templates/_codemirror_foot.html
index 1e07fc72..4b55bf8d 100644
--- a/datasette/templates/_codemirror_foot.html
+++ b/datasette/templates/_codemirror_foot.html
@@ -1,5 +1,5 @@
{% endfor %}
diff --git a/datasette/templates/database.html b/datasette/templates/database.html
index 9fb4d6eb..f168db97 100644
--- a/datasette/templates/database.html
+++ b/datasette/templates/database.html
@@ -9,8 +9,14 @@
{% block body_class %}db db-{{ database|to_css_class }}{% endblock %}
+{% block nav %}
+
{% for table in database.tables_and_views_truncated %}{{ table.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}
+
{% for table in database.tables_and_views_truncated %}{{ table.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}
',
+ ],
]
assert expected == [
[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")
@@ -611,7 +616,7 @@ def test_table_html_foreign_key_links(app_client):
def test_table_html_disable_foreign_key_links_with_labels(app_client):
- response = app_client.get("/fixtures/foreign_key_references?_labels=off")
+ response = app_client.get("/fixtures/foreign_key_references?_labels=off&_size=1")
assert response.status == 200
table = Soup(response.body, "html.parser").find("table")
expected = [
From c3181d9a840dff7be8c990b21f5749db393a4ea0 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 2 Nov 2019 15:47:20 -0700
Subject: [PATCH 0235/2288] Release notes for 0.30.2
---
docs/changelog.rst | 13 ++++++++++++-
1 file changed, 12 insertions(+), 1 deletion(-)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 8ac32c45..f4761efe 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,16 @@
Changelog
=========
+.. _v0_30_2:
+
+0.30.2 (2019-11-02)
+-------------------
+
+- ``/-/plugins`` page now uses distribution name e.g. ``datasette-cluster-map`` instead of the name of the underlying Python package (``datasette_cluster_map``) (`#606 `__)
+- Array faceting is now only suggested for columns that contain arrays of strings (`#562 `__)
+- Better documentation for the ``--host`` argument (`#574 `__)
+- Don't show ``None`` with a broken link for the label on a nullable foreign key (`#406 `__)
+
.. _v0_30_1:
0.30.1 (2019-10-30)
@@ -14,6 +24,7 @@ Changelog
.. _v0_30:
+
0.30 (2019-10-18)
-----------------
@@ -82,7 +93,7 @@ Two new plugins take advantage of this hook:
New plugin hook: extra_template_vars
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The :ref:`plugin_extra_template_vars` plugin hook allows plugins to inject their own additional variables into the Datasette template context. This can be used in conjunction with custom templates to customize the Datasette interface. `datasette-auth-github `__ uses this hook to add custom HTML to the new top navigation bar (which is designed to be modified by plugins, see `#540 `__).
+The :ref:`plugin_hook_extra_template_vars` plugin hook allows plugins to inject their own additional variables into the Datasette template context. This can be used in conjunction with custom templates to customize the Datasette interface. `datasette-auth-github `__ uses this hook to add custom HTML to the new top navigation bar (which is designed to be modified by plugins, see `#540 `__).
Secret plugin configuration options
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
From 2bf7ce5f517d772a16d7855a35a8a75d4456aad7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 2 Nov 2019 16:12:46 -0700
Subject: [PATCH 0236/2288] Fix CSV export for nullable foreign keys, closes
#612
---
datasette/views/base.py | 12 ++++++++----
tests/test_csv.py | 15 +++++++++++++++
2 files changed, 23 insertions(+), 4 deletions(-)
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 1568b084..94945304 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -330,10 +330,14 @@ class DataView(BaseView):
else:
# Look for {"value": "label": } dicts and expand
new_row = []
- for cell in row:
- if isinstance(cell, dict):
- new_row.append(cell["value"])
- new_row.append(cell["label"])
+ for heading, cell in zip(data["columns"], row):
+ if heading in expanded_columns:
+ if cell is None:
+ new_row.extend(("", ""))
+ else:
+ assert isinstance(cell, dict)
+ new_row.append(cell["value"])
+ new_row.append(cell["label"])
else:
new_row.append(cell)
await writer.writerow(new_row)
diff --git a/tests/test_csv.py b/tests/test_csv.py
index b148b6db..13aca489 100644
--- a/tests/test_csv.py
+++ b/tests/test_csv.py
@@ -41,6 +41,14 @@ pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,com
"\n", "\r\n"
)
+EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV = """
+pk,foreign_key_with_label,foreign_key_with_label_label,foreign_key_with_no_label,foreign_key_with_no_label_label
+1,1,hello,1,1
+2,,,,
+""".lstrip().replace(
+ "\n", "\r\n"
+)
+
def test_table_csv(app_client):
response = app_client.get("/fixtures/simple_primary_key.csv")
@@ -63,6 +71,13 @@ def test_table_csv_with_labels(app_client):
assert EXPECTED_TABLE_WITH_LABELS_CSV == response.text
+def test_table_csv_with_nullable_labels(app_client):
+ response = app_client.get("/fixtures/foreign_key_references.csv?_labels=1")
+ assert response.status == 200
+ assert "text/plain; charset=utf-8" == response.headers["content-type"]
+ assert EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV == response.text
+
+
def test_custom_sql_csv(app_client):
response = app_client.get(
"/fixtures.csv?sql=select+content+from+simple_primary_key+limit+2"
From ee330222f4c3ee66c2fe41ebc76fed56b9cb9a00 Mon Sep 17 00:00:00 2001
From: Tobias Kunze
Date: Mon, 4 Nov 2019 03:39:55 +0100
Subject: [PATCH 0237/2288] Offer to format readonly SQL (#602)
Following discussion in #601, this PR adds a "Format SQL" button to
read-only SQL (if the SQL actually differs from the formatting result).
It also removes a console error on readonly SQL queries.
Thanks, @rixx!
---
datasette/templates/_codemirror_foot.html | 41 ++++++++++++++---------
1 file changed, 26 insertions(+), 15 deletions(-)
diff --git a/datasette/templates/_codemirror_foot.html b/datasette/templates/_codemirror_foot.html
index 9aba61ab..4019d448 100644
--- a/datasette/templates/_codemirror_foot.html
+++ b/datasette/templates/_codemirror_foot.html
@@ -6,21 +6,32 @@ window.onload = () => {
if (sqlFormat && !readOnly) {
sqlFormat.hidden = false;
}
- var editor = CodeMirror.fromTextArea(sqlInput, {
- lineNumbers: true,
- mode: "text/x-sql",
- lineWrapping: true,
- });
- editor.setOption("extraKeys", {
- "Shift-Enter": function() {
- document.getElementsByClassName("sql")[0].submit();
- },
- Tab: false
- });
- if (sqlInput && sqlFormat) {
- sqlFormat.addEventListener("click", ev => {
- editor.setValue(sqlFormatter.format(editor.getValue()));
- })
+ if (sqlInput) {
+ var editor = CodeMirror.fromTextArea(sqlInput, {
+ lineNumbers: true,
+ mode: "text/x-sql",
+ lineWrapping: true,
+ });
+ editor.setOption("extraKeys", {
+ "Shift-Enter": function() {
+ document.getElementsByClassName("sql")[0].submit();
+ },
+ Tab: false
+ });
+ if (sqlFormat) {
+ sqlFormat.addEventListener("click", ev => {
+ editor.setValue(sqlFormatter.format(editor.getValue()));
+ })
+ }
+ }
+ if (sqlFormat && readOnly) {
+ const formatted = sqlFormatter.format(readOnly.innerHTML);
+ if (formatted != readOnly.innerHTML) {
+ sqlFormat.hidden = false;
+ sqlFormat.addEventListener("click", ev => {
+ readOnly.innerHTML = formatted;
+ })
+ }
}
}
From 9db22cdf1809fb78a7b183cd2f617cd5e26efc68 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 3 Nov 2019 20:11:55 -0800
Subject: [PATCH 0238/2288] pk__notin= filter, closes #614
---
datasette/filters.py | 15 +++++++++++++++
docs/json_api.rst | 3 +++
tests/test_filters.py | 3 +++
3 files changed, 21 insertions(+)
diff --git a/datasette/filters.py b/datasette/filters.py
index efe014ae..5897a3ed 100644
--- a/datasette/filters.py
+++ b/datasette/filters.py
@@ -77,6 +77,20 @@ class InFilter(Filter):
return "{} in {}".format(column, json.dumps(self.split_value(value)))
+class NotInFilter(InFilter):
+ key = "notin"
+ display = "not in"
+
+ def where_clause(self, table, column, value, param_counter):
+ values = self.split_value(value)
+ params = [":p{}".format(param_counter + i) for i in range(len(values))]
+ sql = "{} not in ({})".format(escape_sqlite(column), ", ".join(params))
+ return sql, values
+
+ def human_clause(self, column, value):
+ return "{} not in {}".format(column, json.dumps(self.split_value(value)))
+
+
class Filters:
_filters = (
[
@@ -125,6 +139,7 @@ class Filters:
TemplatedFilter("like", "like", '"{c}" like :{p}', '{c} like "{v}"'),
TemplatedFilter("glob", "glob", '"{c}" glob :{p}', '{c} glob "{v}"'),
InFilter(),
+ NotInFilter(),
]
+ (
[
diff --git a/docs/json_api.rst b/docs/json_api.rst
index 4b365e14..de70362c 100644
--- a/docs/json_api.rst
+++ b/docs/json_api.rst
@@ -228,6 +228,9 @@ You can filter the data returned by the table based on column values using a que
``?column__in=["value","value,with,commas"]``
+``?column__notin=value1,value2,value3``
+ Rows where column does not match any of the provided values. The inverse of ``__in=``. Also supports JSON arrays.
+
``?column__arraycontains=value``
Works against columns that contain JSON arrays - matches if any of the values in that array match.
diff --git a/tests/test_filters.py b/tests/test_filters.py
index fd682cd9..8598087f 100644
--- a/tests/test_filters.py
+++ b/tests/test_filters.py
@@ -47,6 +47,9 @@ import pytest
["foo in (:p0, :p1)"],
["dog,cat", "cat[dog]"],
),
+ # Not in, and JSON array not in
+ ((("foo__notin", "1,2,3"),), ["foo not in (:p0, :p1, :p2)"], ["1", "2", "3"]),
+ ((("foo__notin", "[1,2,3]"),), ["foo not in (:p0, :p1, :p2)"], [1, 2, 3]),
],
)
def test_build_where(args, expected_where, expected_params):
From 52fa79c6075f0830ff635b81d957c64d877a05aa Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 4 Nov 2019 15:03:48 -0800
Subject: [PATCH 0239/2288] Use select colnames, not select * for table view -
refs #615
---
datasette/views/table.py | 8 ++++++--
tests/test_api.py | 3 ++-
2 files changed, 8 insertions(+), 3 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 326c11ae..139ff80b 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -235,13 +235,17 @@ class TableView(RowTableShared):
raise NotFound("Table not found: {}".format(table))
pks = await db.primary_keys(table)
+ table_columns = await db.table_columns(table)
+
+ select_columns = ", ".join(escape_sqlite(t) for t in table_columns)
+
use_rowid = not pks and not is_view
if use_rowid:
- select = "rowid, *"
+ select = "rowid, {}".format(select_columns)
order_by = "rowid"
order_by_pks = "rowid"
else:
- select = "*"
+ select = select_columns
order_by_pks = ", ".join([escape_sqlite(pk) for pk in pks])
order_by = order_by_pks
diff --git a/tests/test_api.py b/tests/test_api.py
index c6acbab1..4a09b238 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -610,7 +610,8 @@ def test_table_json(app_client):
assert response.status == 200
data = response.json
assert (
- data["query"]["sql"] == "select * from simple_primary_key order by id limit 51"
+ data["query"]["sql"]
+ == "select id, content from simple_primary_key order by id limit 51"
)
assert data["query"]["params"] == {}
assert data["rows"] == [
From 931bfc66613aa3e22f8314df5c0d0758baf31f38 Mon Sep 17 00:00:00 2001
From: Tobias Kunze
Date: Tue, 5 Nov 2019 00:16:30 +0100
Subject: [PATCH 0240/2288] Handle spaces in DB names (#590)
Closes #503 - thanks, @rixx
---
datasette/views/base.py | 3 ++-
tests/fixtures.py | 4 ++--
tests/test_api.py | 19 ++++++++++++++++++-
tests/test_html.py | 8 ++++----
4 files changed, 26 insertions(+), 8 deletions(-)
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 94945304..062c6956 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -203,12 +203,13 @@ class DataView(BaseView):
hash = hash_bit
else:
name = db_name
- # Verify the hash
+ name = urllib.parse.unquote_plus(name)
try:
db = self.ds.databases[name]
except KeyError:
raise NotFound("Database not found: {}".format(name))
+ # Verify the hash
expected = "000"
if db.hash is not None:
expected = db.hash[:HASH_LENGTH]
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 8aa44687..dcc414bf 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -174,7 +174,7 @@ def app_client_no_files():
@pytest.fixture(scope="session")
def app_client_two_attached_databases():
yield from make_app_client(
- extra_databases={"extra_database.db": EXTRA_DATABASE_SQL}
+ extra_databases={"extra database.db": EXTRA_DATABASE_SQL}
)
@@ -188,7 +188,7 @@ def app_client_conflicting_database_names():
@pytest.fixture(scope="session")
def app_client_two_attached_databases_one_immutable():
yield from make_app_client(
- is_immutable=True, extra_databases={"extra_database.db": EXTRA_DATABASE_SQL}
+ is_immutable=True, extra_databases={"extra database.db": EXTRA_DATABASE_SQL}
)
diff --git a/tests/test_api.py b/tests/test_api.py
index 4a09b238..1fa8642f 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -6,6 +6,7 @@ from .fixtures import ( # noqa
app_client_shorter_time_limit,
app_client_larger_cache_size,
app_client_returned_rows_matches_page_size,
+ app_client_two_attached_databases,
app_client_two_attached_databases_one_immutable,
app_client_conflicting_database_names,
app_client_with_cors,
@@ -1188,7 +1189,7 @@ def test_databases_json(app_client_two_attached_databases_one_immutable):
databases = response.json
assert 2 == len(databases)
extra_database, fixtures_database = databases
- assert "extra_database" == extra_database["name"]
+ assert "extra database" == extra_database["name"]
assert None == extra_database["hash"]
assert True == extra_database["is_mutable"]
assert False == extra_database["is_memory"]
@@ -1679,6 +1680,22 @@ def test_cors(app_client_with_cors, path, status_code):
assert "*" == response.headers["Access-Control-Allow-Origin"]
+@pytest.mark.parametrize(
+ "path",
+ (
+ "/",
+ ".json",
+ "/searchable",
+ "/searchable.json",
+ "/searchable_view",
+ "/searchable_view.json",
+ ),
+)
+def test_database_with_space_in_name(app_client_two_attached_databases, path):
+ response = app_client_two_attached_databases.get("/extra database" + path)
+ assert response.status == 200
+
+
def test_common_prefix_database_names(app_client_conflicting_database_names):
# https://github.com/simonw/datasette/issues/597
assert ["fixtures", "foo", "foo-bar"] == [
diff --git a/tests/test_html.py b/tests/test_html.py
index f63e595b..7f1af86e 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -27,11 +27,11 @@ def test_homepage(app_client_two_attached_databases):
# Should be two attached databases
assert [
{"href": "/fixtures", "text": "fixtures"},
- {"href": "/extra_database", "text": "extra_database"},
+ {"href": "/extra database", "text": "extra database"},
] == [{"href": a["href"], "text": a.text.strip()} for a in soup.select("h2 a")]
# The first attached database should show count text and attached tables
h2 = soup.select("h2")[1]
- assert "extra_database" == h2.text.strip()
+ assert "extra database" == h2.text.strip()
counts_p, links_p = h2.find_all_next("p")[:2]
assert (
"2 rows in 1 table, 5 rows in 4 hidden tables, 1 view" == counts_p.text.strip()
@@ -41,8 +41,8 @@ def test_homepage(app_client_two_attached_databases):
{"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a")
]
assert [
- {"href": "/extra_database/searchable", "text": "searchable"},
- {"href": "/extra_database/searchable_view", "text": "searchable_view"},
+ {"href": "/extra database/searchable", "text": "searchable"},
+ {"href": "/extra database/searchable_view", "text": "searchable_view"},
] == table_links
From c30f07c58e410ee296b28aeabe4dc461dd40b435 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 5 Nov 2019 21:12:55 -0800
Subject: [PATCH 0241/2288] Removed _group_count=col feature, closes #504
---
datasette/views/table.py | 12 ------------
docs/json_api.rst | 9 ---------
2 files changed, 21 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 139ff80b..920693d7 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -499,18 +499,6 @@ class TableView(RowTableShared):
if order_by:
order_by = "order by {} ".format(order_by)
- # _group_count=col1&_group_count=col2
- group_count = special_args_lists.get("_group_count") or []
- if group_count:
- sql = 'select {group_cols}, count(*) as "count" from {table_name} {where} group by {group_cols} order by "count" desc limit 100'.format(
- group_cols=", ".join(
- '"{}"'.format(group_count_col) for group_count_col in group_count
- ),
- table_name=escape_sqlite(table),
- where=where_clause,
- )
- return await self.custom_sql(request, database, hash, sql, editable=True)
-
extra_args = {}
# Handle ?_size=500
page_size = _size or request.raw_args.get("_size")
diff --git a/docs/json_api.rst b/docs/json_api.rst
index de70362c..e369bee7 100644
--- a/docs/json_api.rst
+++ b/docs/json_api.rst
@@ -321,15 +321,6 @@ Special table arguments
Here's `an example `__.
-
-``?_group_count=COLUMN``
- Executes a SQL query that returns a count of the number of rows matching
- each unique value in that column, with the most common ordered first.
-
-``?_group_count=COLUMN1&_group_count=column2``
- You can pass multiple ``_group_count`` columns to return counts against
- unique combinations of those columns.
-
``?_next=TOKEN``
Pagination by continuation token - pass the token that was returned in the
``"next"`` property by the previous page.
From f9c146b893856a48afa810ebcce1714f30d0d3a2 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 6 Nov 2019 16:55:44 -0800
Subject: [PATCH 0242/2288] Removed unused special_args_lists variable
---
datasette/views/table.py | 2 --
1 file changed, 2 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 920693d7..a60a3941 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -261,12 +261,10 @@ class TableView(RowTableShared):
# That's so if there is a column that starts with _
# it can still be queried using ?_col__exact=blah
special_args = {}
- special_args_lists = {}
other_args = []
for key, value in args.items():
if key.startswith("_") and "__" not in key:
special_args[key] = value[0]
- special_args_lists[key] = value
else:
for v in value:
other_args.append((key, v))
From 83fc5165ac724f69cd57d8f15cd3038e7b30f878 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 7 Nov 2019 18:48:39 -0800
Subject: [PATCH 0243/2288] Improved UI for publish cloudrun, closes #608
---
datasette/publish/cloudrun.py | 39 ++++++++++++++++++++++--
tests/test_publish_cloudrun.py | 55 ++++++++++++++++++++++++++++++++--
2 files changed, 90 insertions(+), 4 deletions(-)
diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py
index c2d77746..a833a32b 100644
--- a/datasette/publish/cloudrun.py
+++ b/datasette/publish/cloudrun.py
@@ -60,6 +60,23 @@ def publish_subcommand(publish):
"gcloud config get-value project", shell=True, universal_newlines=True
).strip()
+ if not service:
+ # Show the user their current services, then prompt for one
+ click.echo("Please provide a service name for this deployment\n")
+ click.echo("Using an existing service name will over-write it")
+ click.echo("")
+ existing_services = get_existing_services()
+ if existing_services:
+ click.echo("Your existing services:\n")
+ for existing_service in existing_services:
+ click.echo(
+ " {name} - created {created} - {url}".format(
+ **existing_service
+ )
+ )
+ click.echo("")
+ service = click.prompt("Service name", type=str)
+
extra_metadata = {
"title": title,
"license": license,
@@ -110,8 +127,26 @@ def publish_subcommand(publish):
image_id = "gcr.io/{project}/{name}".format(project=project, name=name)
check_call("gcloud builds submit --tag {}".format(image_id), shell=True)
check_call(
- "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {}{}".format(
- image_id, " {}".format(service) if service else ""
+ "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {} {}".format(
+ image_id, service,
),
shell=True,
)
+
+
+def get_existing_services():
+ services = json.loads(
+ check_output(
+ "gcloud beta run services list --platform=managed --format json",
+ shell=True,
+ universal_newlines=True,
+ )
+ )
+ return [
+ {
+ "name": service["metadata"]["name"],
+ "created": service["metadata"]["creationTimestamp"],
+ "url": service["status"]["address"]["url"],
+ }
+ for service in services
+ ]
diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py
index 481ac04d..a038b60e 100644
--- a/tests/test_publish_cloudrun.py
+++ b/tests/test_publish_cloudrun.py
@@ -24,6 +24,53 @@ def test_publish_cloudrun_invalid_database(mock_which):
assert 'Path "woop.db" does not exist' in result.output
+@mock.patch("shutil.which")
+@mock.patch("datasette.publish.cloudrun.check_output")
+@mock.patch("datasette.publish.cloudrun.check_call")
+@mock.patch("datasette.publish.cloudrun.get_existing_services")
+def test_publish_cloudrun_prompts_for_service(
+ mock_get_existing_services, mock_call, mock_output, mock_which
+):
+ mock_get_existing_services.return_value = [
+ {"name": "existing", "created": "2019-01-01", "url": "http://www.example.com/"}
+ ]
+ mock_output.return_value = "myproject"
+ mock_which.return_value = True
+ runner = CliRunner()
+ with runner.isolated_filesystem():
+ open("test.db", "w").write("data")
+ result = runner.invoke(
+ cli.cli, ["publish", "cloudrun", "test.db"], input="input-service"
+ )
+ assert (
+ """
+Please provide a service name for this deployment
+
+Using an existing service name will over-write it
+
+Your existing services:
+
+ existing - created 2019-01-01 - http://www.example.com/
+
+Service name: input-service
+""".strip()
+ == result.output.strip()
+ )
+ assert 0 == result.exit_code
+ tag = "gcr.io/myproject/datasette"
+ mock_call.assert_has_calls(
+ [
+ mock.call("gcloud builds submit --tag {}".format(tag), shell=True),
+ mock.call(
+ "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {} input-service".format(
+ tag
+ ),
+ shell=True,
+ ),
+ ]
+ )
+
+
@mock.patch("shutil.which")
@mock.patch("datasette.publish.cloudrun.check_output")
@mock.patch("datasette.publish.cloudrun.check_call")
@@ -33,14 +80,16 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which):
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
- result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"])
+ result = runner.invoke(
+ cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"]
+ )
assert 0 == result.exit_code
tag = "gcr.io/{}/datasette".format(mock_output.return_value)
mock_call.assert_has_calls(
[
mock.call("gcloud builds submit --tag {}".format(tag), shell=True),
mock.call(
- "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {}".format(
+ "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {} test".format(
tag
),
shell=True,
@@ -65,6 +114,8 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which):
"publish",
"cloudrun",
"test.db",
+ "--service",
+ "datasette",
"--plugin-secret",
"datasette-auth-github",
"client_id",
From 9f5d19c254d1bfbd99f576dff47a6e32e01c76ed Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 Nov 2019 18:12:20 -0800
Subject: [PATCH 0244/2288] Improved documentation for "publish cloudrun"
---
docs/publish.rst | 18 ++++++++++--------
1 file changed, 10 insertions(+), 8 deletions(-)
diff --git a/docs/publish.rst b/docs/publish.rst
index 304be8ef..89d33085 100644
--- a/docs/publish.rst
+++ b/docs/publish.rst
@@ -43,14 +43,16 @@ You will first need to install and configure the Google Cloud CLI tools by follo
You can then publish a database to Google Cloud Run using the following command::
- datasette publish cloudrun mydatabase.db
+ datasette publish cloudrun mydatabase.db --service=my-database
+
+A Cloud Run **service** is a single hosted application. The service name you specify will be used as part of the Cloud Run URL. If you deploy to a service name that you have used in the past your new deployment will replace the previous one.
+
+If you omit the ``--service`` option you will be asked to pick a service name interactively during the deploy.
You may need to interact with prompts from the tool. Once it has finished it will output a URL like this one::
- Service [datasette] revision [datasette-00001] has been deployed
- and is serving traffic at https://datasette-j7hipcg4aq-uc.a.run.app
-
-During the deployment the tool will prompt you for the name of your service. You can reuse an existing name to replace your previous deployment with your new version, or pick a new name to deploy to a new URL.
+ Service [my-service] revision [my-service-00001] has been deployed
+ and is serving traffic at https://my-service-j7hipcg4aq-uc.a.run.app
.. literalinclude:: datasette-publish-cloudrun-help.txt
@@ -90,18 +92,18 @@ Custom metadata and plugins
You can define your own :ref:`metadata` and deploy that with your instance like so::
- datasette publish nowv1 mydatabase.db -m metadata.json
+ datasette publish cloudrun --service=my-service mydatabase.db -m metadata.json
If you just want to set the title, license or source information you can do that directly using extra options to ``datasette publish``::
- datasette publish nowv1 mydatabase.db \
+ datasette publish cloudrun mydatabase.db --service=my-service \
--title="Title of my database" \
--source="Where the data originated" \
--source_url="http://www.example.com/"
You can also specify plugins you would like to install. For example, if you want to include the `datasette-vega `_ visualization plugin you can use the following::
- datasette publish nowv1 mydatabase.db --install=datasette-vega
+ datasette publish cloudrun mydatabase.db --service=my-service --install=datasette-vega
If a plugin has any :ref:`plugins_configuration_secret` you can use the ``--plugin-secret`` option to set those secrets at publish time. For example, using Heroku with `datasette-auth-github `__ you might run the following command::
From 10b9d85edaaf198879344aa1c498000cfb27dff8 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 Nov 2019 18:15:13 -0800
Subject: [PATCH 0245/2288] datasette-csvs on Glitch now uses sqlite-utils
It previously used csvs-to-sqlite but that had heavy dependencies.
See https://support.glitch.com/t/can-you-upgrade-python-to-latest-version/7980/33
---
docs/getting_started.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/getting_started.rst b/docs/getting_started.rst
index d0c22583..fdf7d23c 100644
--- a/docs/getting_started.rst
+++ b/docs/getting_started.rst
@@ -25,7 +25,7 @@ Glitch allows you to "remix" any project to create your own copy and start editi
.. image:: https://cdn.glitch.com/2703baf2-b643-4da7-ab91-7ee2a2d00b5b%2Fremix-button.svg
:target: https://glitch.com/edit/#!/remix/datasette-csvs
-Find a CSV file and drag it onto the Glitch file explorer panel - ``datasette-csvs`` will automatically convert it to a SQLite database (using `csvs-to-sqlite `__) and allow you to start exploring it using Datasette.
+Find a CSV file and drag it onto the Glitch file explorer panel - ``datasette-csvs`` will automatically convert it to a SQLite database (using `sqlite-utils `__) and allow you to start exploring it using Datasette.
If your CSV file has a ``latitude`` and ``longitude`` column you can visualize it on a map by uncommenting the ``datasette-cluster-map`` line in the ``requirements.txt`` file using the Glitch file editor.
From 28c4a6db5b5e512db630d7ba6127196185de67c7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 9 Nov 2019 17:29:36 -0800
Subject: [PATCH 0246/2288] CREATE INDEX statements on table page, closes #618
---
datasette/database.py | 13 ++++++++++++-
tests/fixtures.py | 1 +
tests/test_html.py | 33 +++++++++++++++++++++++++++++++++
3 files changed, 46 insertions(+), 1 deletion(-)
diff --git a/datasette/database.py b/datasette/database.py
index 7e6f7245..3a1cea94 100644
--- a/datasette/database.py
+++ b/datasette/database.py
@@ -232,7 +232,18 @@ class Database:
)
if not table_definition_rows:
return None
- return table_definition_rows[0][0]
+ bits = [table_definition_rows[0][0] + ";"]
+ # Add on any indexes
+ index_rows = list(
+ await self.ds.execute(
+ self.name,
+ "select sql from sqlite_master where tbl_name = :n and type='index' and sql is not null",
+ {"n": table},
+ )
+ )
+ for index_row in index_rows:
+ bits.append(index_row[0] + ";")
+ return "\n".join(bits)
async def get_view_definition(self, view):
return await self.get_table_definition(view, "view")
diff --git a/tests/fixtures.py b/tests/fixtures.py
index dcc414bf..87e66f99 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -514,6 +514,7 @@ CREATE TABLE compound_three_primary_keys (
content text,
PRIMARY KEY (pk1, pk2, pk3)
);
+CREATE INDEX idx_compound_three_primary_keys_content ON compound_three_primary_keys(content);
CREATE TABLE foreign_key_references (
pk varchar(30) primary key,
diff --git a/tests/test_html.py b/tests/test_html.py
index 7f1af86e..44627cdc 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -119,6 +119,39 @@ def test_row_strange_table_name_with_url_hash(app_client_with_hash):
assert response.status == 200
+@pytest.mark.parametrize(
+ "path,expected_definition_sql",
+ [
+ (
+ "/fixtures/facet_cities",
+ """
+CREATE TABLE facet_cities (
+ id integer primary key,
+ name text
+);
+ """.strip(),
+ ),
+ (
+ "/fixtures/compound_three_primary_keys",
+ """
+CREATE TABLE compound_three_primary_keys (
+ pk1 varchar(30),
+ pk2 varchar(30),
+ pk3 varchar(30),
+ content text,
+ PRIMARY KEY (pk1, pk2, pk3)
+);
+CREATE INDEX idx_compound_three_primary_keys_content ON compound_three_primary_keys(content);
+ """.strip(),
+ ),
+ ],
+)
+def test_definition_sql(path, expected_definition_sql, app_client):
+ response = app_client.get(path)
+ pre = Soup(response.body, "html.parser").select_one("pre.wrapped-sql")
+ assert expected_definition_sql == pre.string
+
+
def test_table_cell_truncation():
for client in make_app_client(config={"truncate_cells_html": 5}):
response = client.get("/fixtures/facetable")
From 1c063fae9dba70f70244db010d55a18846640f07 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 10 Nov 2019 19:45:34 -0800
Subject: [PATCH 0247/2288] Test against Python 3.8 in Travis (#623)
* Test against Python 3.8 in Travis
* Avoid current_task warnings in Python 3.8
---
.travis.yml | 1 +
datasette/tracer.py | 9 ++++++++-
2 files changed, 9 insertions(+), 1 deletion(-)
diff --git a/.travis.yml b/.travis.yml
index 29388bc1..a6b15b7e 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -5,6 +5,7 @@ dist: xenial
python:
- "3.6"
- "3.7"
+ - "3.8"
- "3.5"
# Executed for 3.5 AND 3.5 as the first "test" stage:
diff --git a/datasette/tracer.py b/datasette/tracer.py
index e46a6fda..a638b140 100644
--- a/datasette/tracer.py
+++ b/datasette/tracer.py
@@ -9,12 +9,19 @@ tracers = {}
TRACE_RESERVED_KEYS = {"type", "start", "end", "duration_ms", "traceback"}
+# asyncio.current_task was introduced in Python 3.7:
+for obj in (asyncio, asyncio.Task):
+ current_task = getattr(obj, "current_task", None)
+ if current_task is not None:
+ break
+
+
def get_task_id():
try:
loop = asyncio.get_event_loop()
except RuntimeError:
return None
- return id(asyncio.Task.current_task(loop=loop))
+ return id(current_task(loop=loop))
@contextmanager
From 42ee3e16a9ba7cc513b8da944cc1609a5407cf42 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 10 Nov 2019 20:19:01 -0800
Subject: [PATCH 0248/2288] Bump pint to 0.9 (#624)
This fixes 2 deprecation warnings in Python 3.8 - refs #623 #622
---
setup.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.py b/setup.py
index 9ae56306..e8229de1 100644
--- a/setup.py
+++ b/setup.py
@@ -45,7 +45,7 @@ setup(
"click-default-group~=1.2.1",
"Jinja2~=2.10.1",
"hupper~=1.0",
- "pint~=0.8.1",
+ "pint~=0.9",
"pluggy~=0.12.0",
"uvicorn~=0.8.4",
"aiofiles~=0.4.0",
From 5bc2570121aea8141ff88790e214765472882b08 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 20:45:12 -0800
Subject: [PATCH 0249/2288] Include uvicorn version in /-/versions, refs #622
---
datasette/app.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/datasette/app.py b/datasette/app.py
index 203e0991..4ba4adfb 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -12,6 +12,7 @@ from pathlib import Path
import click
from markupsafe import Markup
from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader
+import uvicorn
from .views.base import DatasetteError, ureg, AsgiRouter
from .views.database import DatabaseDownload, DatabaseView
@@ -433,6 +434,7 @@ class Datasette:
},
"datasette": datasette_version,
"asgi": "3.0",
+ "uvicorn": uvicorn.__version__,
"sqlite": {
"version": sqlite_version,
"fts_versions": fts_versions,
From cf7776d36fbacefa874cbd6e5fcdc9fff7661203 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:09:11 -0800
Subject: [PATCH 0250/2288] Support Python 3.8, stop supporting Python 3.5
(#627)
* Upgrade to uvicorn 0.10.4
* Drop support for Python 3.5
* Bump all dependencies to latest releases
* Update docs to reflect we no longer support 3.5
* Removed code that skipped black unit test on 3.5
Closes #622
---
.travis.yml | 1 -
README.md | 2 +-
docs/contributing.rst | 2 +-
docs/installation.rst | 7 +++++--
setup.py | 20 ++++++++++----------
tests/test_black.py | 7 +------
6 files changed, 18 insertions(+), 21 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index a6b15b7e..0fc87d93 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -6,7 +6,6 @@ python:
- "3.6"
- "3.7"
- "3.8"
- - "3.5"
# Executed for 3.5 AND 3.5 as the first "test" stage:
script:
diff --git a/README.md b/README.md
index 9f85f1ba..14c9cfd6 100644
--- a/README.md
+++ b/README.md
@@ -69,7 +69,7 @@ sqlite-utils: a Python library and CLI tool for building SQLite databases](https
pip3 install datasette
-Datasette requires Python 3.5 or higher. We also have [detailed installation instructions](https://datasette.readthedocs.io/en/stable/installation.html) covering other options such as Docker.
+Datasette requires Python 3.6 or higher. We also have [detailed installation instructions](https://datasette.readthedocs.io/en/stable/installation.html) covering other options such as Docker.
## Basic usage
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 43834edc..078fd841 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -18,7 +18,7 @@ General guidelines
Setting up a development environment
------------------------------------
-If you have Python 3.5 or higher installed on your computer (on OS X the easiest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps.
+If you have Python 3.6 or higher installed on your computer (on OS X the easiest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps.
If you want to use GitHub to publish your changes, first `create a fork of datasette `__ under your own GitHub account.
diff --git a/docs/installation.rst b/docs/installation.rst
index e65d8ee3..9ee7eb4e 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -69,16 +69,19 @@ You can now run the new custom image like so::
You can confirm that the plugins are installed by visiting
http://127.0.0.1:8001/-/plugins
-
Install using pip
-----------------
-To run Datasette without Docker you will need Python 3.5 or higher.
+To run Datasette without Docker you will need Python 3.6 or higher.
You can install Datasette and its dependencies using ``pip``::
pip install datasette
+The last version to support Python 3.5 was 0.30.2 - you can install that version like so::
+
+ pip install datasette==0.30.2
+
If you want to install Datasette in its own virtual environment, use this::
python -mvenv datasette-venv
diff --git a/setup.py b/setup.py
index e8229de1..7a4cdcb3 100644
--- a/setup.py
+++ b/setup.py
@@ -42,12 +42,12 @@ setup(
include_package_data=True,
install_requires=[
"click~=7.0",
- "click-default-group~=1.2.1",
- "Jinja2~=2.10.1",
- "hupper~=1.0",
+ "click-default-group~=1.2.2",
+ "Jinja2~=2.10.3",
+ "hupper~=1.9",
"pint~=0.9",
- "pluggy~=0.12.0",
- "uvicorn~=0.8.4",
+ "pluggy~=0.13.0",
+ "uvicorn~=0.10.4",
"aiofiles~=0.4.0",
],
entry_points="""
@@ -58,11 +58,11 @@ setup(
extras_require={
"docs": ["sphinx_rtd_theme", "sphinx-autobuild"],
"test": [
- "pytest~=5.0.0",
+ "pytest~=5.2.2",
"pytest-asyncio~=0.10.0",
- "aiohttp~=3.5.3",
- "beautifulsoup4~=4.6.1",
- "asgiref~=3.1.2",
+ "aiohttp~=3.6.2",
+ "beautifulsoup4~=4.8.1",
+ "asgiref~=3.2.3",
]
+ maybe_black,
},
@@ -74,8 +74,8 @@ setup(
"Intended Audience :: End Users/Desktop",
"Topic :: Database",
"License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.6",
- "Programming Language :: Python :: 3.5",
],
)
diff --git a/tests/test_black.py b/tests/test_black.py
index 68e2dcc0..b5bfcfd0 100644
--- a/tests/test_black.py
+++ b/tests/test_black.py
@@ -1,3 +1,4 @@
+import black
from click.testing import CliRunner
from pathlib import Path
import pytest
@@ -6,13 +7,7 @@ import sys
code_root = Path(__file__).parent.parent
-@pytest.mark.skipif(
- sys.version_info[:2] < (3, 6), reason="Black requires Python 3.6 or later"
-)
def test_black():
- # Do not import at top of module because Python 3.5 will not have it installed
- import black
-
runner = CliRunner()
result = runner.invoke(
black.main, [str(code_root / "tests"), str(code_root / "datasette"), "--check"]
From 76fc6a9c7317ce4fbf3cc3d327c849f7274d960a Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:17:59 -0800
Subject: [PATCH 0251/2288] Release notes for 0.31
---
docs/changelog.rst | 22 ++++++++++++++++++++++
1 file changed, 22 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index f4761efe..6e260be9 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,28 @@
Changelog
=========
+.. _v0_31:
+
+0.31 (2019-11-11)
+-----------------
+
+This version adds compatibility with Python 3.8 and breaks compatibility with Python 3.5.
+
+If you are still running Python 3.5 you should stick with ``0.30.2``, which you can install like this::
+
+ pip install datasette==0.30.2
+
+- Format SQL button now works with read-only SQL queries - thanks, Tobias Kunze (`#602 `__)
+- New ``?column__notin=x,y,z`` filter for table views (`#614 `__)
+- Table view now uses ``select col1, col2, col3`` instead of ``select *``
+- Database filenames can now contain spaces - thanks, Tobias Kunze (`#590 `__)
+- Removed obsolete ``?_group_count=col`` feature (`#504 `__)
+- Improved user interface and documentation for ``datasette publish cloudrun`` (`#608 `__)
+- Tables with indexes now show the `` CREATE INDEX`` statements on the table page (`#618 `__)
+- Current version of `uvicorn `__ is now shown on ``/-/versions``
+- Python 3.8 is now supported! (`#622 `__)
+- Python 3.5 is no longer supported.
+
.. _v0_30_2:
0.30.2 (2019-11-02)
From c633c035dc8d4c60f1d13cb074918406bbdb3734 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:26:56 -0800
Subject: [PATCH 0252/2288] Datasette 0.31 in news section
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 14c9cfd6..05995a74 100644
--- a/README.md
+++ b/README.md
@@ -21,6 +21,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
## News
+ * 11th November 2019: [Datasette 0.31](https://datasette.readthedocs.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5.
* 18th October 2019: [Datasette 0.30](https://datasette.readthedocs.io/en/stable/changelog.html#v0-30)
* 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail.
* 7th July 2019: [Datasette 0.29](https://datasette.readthedocs.io/en/stable/changelog.html#v0-29) - ASGI, new plugin hooks, facet by date and much, much more...
From 7f89928062b1a1fdb2625a946f7cd5161e597401 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:33:51 -0800
Subject: [PATCH 0253/2288] Removed code that conditionally installs black
Since we no longer support Python 3.5 we don't need this any more.
---
setup.py | 9 ++-------
1 file changed, 2 insertions(+), 7 deletions(-)
diff --git a/setup.py b/setup.py
index 7a4cdcb3..15284779 100644
--- a/setup.py
+++ b/setup.py
@@ -22,11 +22,6 @@ def get_version():
return g["__version__"]
-# Only install black on Python 3.6 or higher
-maybe_black = []
-if sys.version_info > (3, 6):
- maybe_black = ["black~=19.10b0"]
-
setup(
name="datasette",
version=versioneer.get_version(),
@@ -63,8 +58,8 @@ setup(
"aiohttp~=3.6.2",
"beautifulsoup4~=4.8.1",
"asgiref~=3.2.3",
- ]
- + maybe_black,
+ "black~=19.10b0",
+ ],
},
tests_require=["datasette[test]"],
classifiers=[
From 1c518680e9692a9a77022af54f3de3e77fb1aaf4 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:57:48 -0800
Subject: [PATCH 0254/2288] Final steps: build stable branch of Read The Docs
---
docs/contributing.rst | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 078fd841..48930332 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -150,4 +150,7 @@ Wait long enough for Travis to build and deploy the demo version of that commit
git tag 0.25.2
git push --tags
-Once the release is out, you can manually update https://github.com/simonw/datasette/releases
+Final steps once the release has deployed to https://pypi.org/project/datasette/
+
+* Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases
+* Manually kick off a build of the `stable` branch on Read The Docs: https://readthedocs.org/projects/datasette/builds/
From f554be39fc14ddc18921ca29d3920d55aad03d46 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 22:00:13 -0800
Subject: [PATCH 0255/2288] ReST fix
---
docs/changelog.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 6e260be9..763b178e 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -21,7 +21,7 @@ If you are still running Python 3.5 you should stick with ``0.30.2``, which you
- Database filenames can now contain spaces - thanks, Tobias Kunze (`#590 `__)
- Removed obsolete ``?_group_count=col`` feature (`#504 `__)
- Improved user interface and documentation for ``datasette publish cloudrun`` (`#608 `__)
-- Tables with indexes now show the `` CREATE INDEX`` statements on the table page (`#618 `__)
+- Tables with indexes now show the ``CREATE INDEX`` statements on the table page (`#618 `__)
- Current version of `uvicorn `__ is now shown on ``/-/versions``
- Python 3.8 is now supported! (`#622 `__)
- Python 3.5 is no longer supported.
From d977fbadf70a96bf2eea1407d01f99d98e092dec Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 22:03:09 -0800
Subject: [PATCH 0256/2288] datasette publish uses python:3.8 base Docker
image, closes #629
---
datasette/utils/__init__.py | 2 +-
tests/test_publish_cloudrun.py | 2 +-
tests/test_publish_now.py | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index 3d28a36b..b8df48cf 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -306,7 +306,7 @@ def make_dockerfile(
install = ["datasette"] + list(install)
return """
-FROM python:3.6
+FROM python:3.8
COPY . /app
WORKDIR /app
{spatialite_extras}
diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py
index a038b60e..c5b18cdf 100644
--- a/tests/test_publish_cloudrun.py
+++ b/tests/test_publish_cloudrun.py
@@ -128,7 +128,7 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which):
.split("\n====================\n")[0]
.strip()
)
- expected = """FROM python:3.6
+ expected = """FROM python:3.8
COPY . /app
WORKDIR /app
diff --git a/tests/test_publish_now.py b/tests/test_publish_now.py
index 72aa71db..27fd1245 100644
--- a/tests/test_publish_now.py
+++ b/tests/test_publish_now.py
@@ -138,7 +138,7 @@ def test_publish_now_plugin_secrets(mock_run, mock_which):
.split("\n====================\n")[0]
.strip()
)
- expected = """FROM python:3.6
+ expected = """FROM python:3.8
COPY . /app
WORKDIR /app
From 16265f6a1a7c547e3925e0fc2d6b88754afb0435 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 12 Nov 2019 18:18:04 -0800
Subject: [PATCH 0257/2288] Release notes for 0.31.1
---
docs/changelog.rst | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 763b178e..746f5b42 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,13 @@
Changelog
=========
+.. _v0_31_1:
+
+0.31.1 (2019-11-12)
+-------------------
+
+- Deploymens created using ``datasette publish`` now use ``python:3.8`` base Docker image (`#629 `__)
+
.. _v0_31:
0.31 (2019-11-11)
From a22c7761b61baa61b8e3da7d30887468d61d6b83 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 12 Nov 2019 18:18:39 -0800
Subject: [PATCH 0258/2288] Fixed typo in release notes
---
docs/changelog.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 746f5b42..e527518e 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -9,7 +9,7 @@ Changelog
0.31.1 (2019-11-12)
-------------------
-- Deploymens created using ``datasette publish`` now use ``python:3.8`` base Docker image (`#629 `__)
+- Deployments created using ``datasette publish`` now use ``python:3.8`` base Docker image (`#629 `__)
.. _v0_31:
From bbd00e903cdd49067ecdbdb60a4d225833a44b05 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 12 Nov 2019 18:38:13 -0800
Subject: [PATCH 0259/2288] Badge linking to datasette on hub.docker.com
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 05995a74..9a22c2b2 100644
--- a/README.md
+++ b/README.md
@@ -6,6 +6,7 @@
[](http://datasette.readthedocs.io/en/latest/?badge=latest)
[](https://github.com/simonw/datasette/blob/master/LICENSE)
[](https://black.readthedocs.io/en/stable/)
+[](https://hub.docker.com/r/datasetteproject/datasette)
*A tool for exploring and publishing data*
From 848dec4deb0d3c140a4e0394cac45fbb2593349b Mon Sep 17 00:00:00 2001
From: Stanley Zheng
Date: Tue, 12 Nov 2019 23:28:42 -0500
Subject: [PATCH 0260/2288] Fix for datasette publish with just --source_url
(#631)
Closes #572
---
datasette/templates/_description_source_license.html | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/datasette/templates/_description_source_license.html b/datasette/templates/_description_source_license.html
index 3327706e..a2bc18f2 100644
--- a/datasette/templates/_description_source_license.html
+++ b/datasette/templates/_description_source_license.html
@@ -21,7 +21,7 @@
{% endif %}{{ metadata.source or metadata.source_url }}{% if metadata.source_url %}{% endif %}
{% endif %}
- {% if metadata.about or metadata.about_url %}{% if metadata.license or metadata.license_url or metadata.source or metadat.source_url %}·{% endif %}
+ {% if metadata.about or metadata.about_url %}{% if metadata.license or metadata.license_url or metadata.source or metadata.source_url %}·{% endif %}
About: {% if metadata.about_url %}
{% endif %}{{ metadata.about or metadata.about_url }}{% if metadata.about_url %}{% endif %}
From f52451023025579ae9a13de4a7f00d69200184cd Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 13 Nov 2019 08:42:47 -0800
Subject: [PATCH 0261/2288] Fix "publish heroku" + upgrade to use Python 3.8.0
Closes #633. Closes #632.
---
datasette/publish/heroku.py | 7 +++++--
tests/test_publish_heroku.py | 9 +++++++--
2 files changed, 12 insertions(+), 4 deletions(-)
diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py
index 34d1f773..e75f76df 100644
--- a/datasette/publish/heroku.py
+++ b/datasette/publish/heroku.py
@@ -72,7 +72,10 @@ def publish_subcommand(publish):
"about_url": about_url,
}
- environment_variables = {}
+ environment_variables = {
+ # Avoid uvicorn error: https://github.com/simonw/datasette/issues/633
+ "WEB_CONCURRENCY": "1"
+ }
if plugin_secret:
extra_metadata["plugins"] = {}
for plugin_name, plugin_setting, setting_value in plugin_secret:
@@ -164,7 +167,7 @@ def temporary_heroku_directory(
if metadata_content:
open("metadata.json", "w").write(json.dumps(metadata_content, indent=2))
- open("runtime.txt", "w").write("python-3.6.8")
+ open("runtime.txt", "w").write("python-3.8.0")
if branch:
install = [
diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py
index 4cd66219..87386e93 100644
--- a/tests/test_publish_heroku.py
+++ b/tests/test_publish_heroku.py
@@ -57,8 +57,13 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which):
open("test.db", "w").write("data")
result = runner.invoke(cli.cli, ["publish", "heroku", "test.db"])
assert 0 == result.exit_code, result.output
- mock_call.assert_called_once_with(
- ["heroku", "builds:create", "-a", "f", "--include-vcs-ignore"]
+ mock_call.assert_has_calls(
+ [
+ mock.call(["heroku", "config:set", "-a", "f", "WEB_CONCURRENCY=1",]),
+ mock.call(
+ ["heroku", "builds:create", "-a", "f", "--include-vcs-ignore"]
+ ),
+ ]
)
From b51f258d00bb3c3b401f15d46a1fbd50394dbe1c Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 13 Nov 2019 08:48:36 -0800
Subject: [PATCH 0262/2288] Release notes for 0.31.2
---
docs/changelog.rst | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index e527518e..f4958399 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,15 @@
Changelog
=========
+.. _v0_31_2:
+
+0.31.2 (2019-11-13)
+-------------------
+
+- Fixed a bug where ``datasette publish heroku`` applications failed to start (`#633 `__)
+- Fix for ``datasette publish`` with just ``--source_url`` - thanks, Stanley Zheng (`#572 `__)
+- Deployments to Heroku now use Python 3.8.0 (`#632 `__)
+
.. _v0_31_1:
0.31.1 (2019-11-12)
From 8c642f04e0608bf537fdd1f76d64c2367fb04d57 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Nov 2019 15:14:22 -0800
Subject: [PATCH 0263/2288] Render templates using Jinja async mode
Closes #628
---
datasette/app.py | 6 ++++--
datasette/views/base.py | 2 +-
docs/plugins.rst | 23 ++++++++++++-----------
tests/fixtures.py | 8 +++++++-
tests/test_plugins.py | 18 ++++++++++++++++++
tests/test_templates/show_json.html | 1 +
6 files changed, 43 insertions(+), 15 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 4ba4adfb..02fcf303 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -583,7 +583,9 @@ class Datasette:
),
]
)
- self.jinja_env = Environment(loader=template_loader, autoescape=True)
+ self.jinja_env = Environment(
+ loader=template_loader, autoescape=True, enable_async=True
+ )
self.jinja_env.filters["escape_css_string"] = escape_css_string
self.jinja_env.filters["quote_plus"] = lambda u: urllib.parse.quote_plus(u)
self.jinja_env.filters["escape_sqlite"] = escape_sqlite
@@ -730,5 +732,5 @@ class DatasetteRouter(AsgiRouter):
else:
template = self.ds.jinja_env.select_template(templates)
await asgi_send_html(
- send, template.render(info), status=status, headers=headers
+ send, await template.render_async(info), status=status, headers=headers
)
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 062c6956..5182479c 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -139,7 +139,7 @@ class BaseView(AsgiView):
extra_template_vars.update(extra_vars)
return Response.html(
- template.render(
+ await template.render_async(
{
**context,
**{
diff --git a/docs/plugins.rst b/docs/plugins.rst
index 6df7ff6a..e5a3d7dd 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -629,7 +629,9 @@ Function that returns a dictionary
If you return a function it will be executed. If it returns a dictionary those values will will be merged into the template context.
Function that returns an awaitable function that returns a dictionary
- You can also return a function which returns an awaitable function which returns a dictionary. This means you can execute additional SQL queries using ``datasette.execute()``.
+ You can also return a function which returns an awaitable function which returns a dictionary.
+
+Datasette runs Jinja2 in `async mode `__, which means you can add awaitable functions to the template scope and they will be automatically awaited when they are rendered by the template.
Here's an example plugin that returns an authentication object from the ASGI scope:
@@ -641,20 +643,19 @@ Here's an example plugin that returns an authentication object from the ASGI sco
"auth": request.scope.get("auth")
}
-And here's an example which returns the current version of SQLite:
+And here's an example which adds a ``sql_first(sql_query)`` function which executes a SQL statement and returns the first column of the first row of results:
.. code-block:: python
@hookimpl
- def extra_template_vars(datasette):
- async def inner():
- first_db = list(datasette.databases.keys())[0]
- return {
- "sqlite_version": (
- await datasette.execute(first_db, "select sqlite_version()")
- ).rows[0][0]
- }
- return inner
+ def extra_template_vars(datasette, database):
+ async def sql_first(sql, dbname=None):
+ dbname = dbname or database or next(iter(datasette.databases.keys()))
+ return (await datasette.execute(dbname, sql)).rows[0][0]
+
+You can then use the new function in a template like so::
+
+ SQLite version: {{ sql_first("select sqlite_version()") }}
.. _plugin_register_output_renderer:
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 87e66f99..3e4203f7 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -446,13 +446,19 @@ def render_cell(value, database):
@hookimpl
def extra_template_vars(template, database, table, view_name, request, datasette):
+ async def query_database(sql):
+ first_db = list(datasette.databases.keys())[0]
+ return (
+ await datasette.execute(first_db, sql)
+ ).rows[0][0]
async def inner():
return {
"extra_template_vars_from_awaitable": json.dumps({
"template": template,
"scope_path": request.scope["path"],
"awaitable": True,
- }, default=lambda b: b.decode("utf8"))
+ }, default=lambda b: b.decode("utf8")),
+ "query_database": query_database,
}
return inner
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index b1c7fd9a..42d063f4 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -1,5 +1,6 @@
from bs4 import BeautifulSoup as Soup
from .fixtures import app_client, make_app_client, TEMP_PLUGIN_SECRET_FILE # noqa
+from datasette.utils import sqlite3
import base64
import json
import os
@@ -214,3 +215,20 @@ def test_plugins_extra_template_vars(restore_working_directory):
"awaitable": True,
"scope_path": "/-/metadata",
} == extra_template_vars_from_awaitable
+
+
+def test_plugins_async_template_function(restore_working_directory):
+ for client in make_app_client(
+ template_dir=str(pathlib.Path(__file__).parent / "test_templates")
+ ):
+ response = client.get("/-/metadata")
+ assert response.status == 200
+ extra_from_awaitable_function = (
+ Soup(response.body, "html.parser")
+ .select("pre.extra_from_awaitable_function")[0]
+ .text
+ )
+ expected = (
+ sqlite3.connect(":memory:").execute("select sqlite_version()").fetchone()[0]
+ )
+ assert expected == extra_from_awaitable_function
diff --git a/tests/test_templates/show_json.html b/tests/test_templates/show_json.html
index bbf1bc06..cff04fb4 100644
--- a/tests/test_templates/show_json.html
+++ b/tests/test_templates/show_json.html
@@ -5,4 +5,5 @@
Test data for extra_template_vars:
{{ extra_template_vars|safe }}
{{ extra_template_vars_from_awaitable|safe }}
+
{{ query_database("select sqlite_version();") }}
{% endblock %}
From a95bedb9c423fa6d772c93ef47bc40f13a5bea50 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Nov 2019 15:18:53 -0800
Subject: [PATCH 0264/2288] Release notes for 0.32
---
docs/changelog.rst | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index f4958399..2f909364 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,13 @@
Changelog
=========
+.. _v0_32:
+
+0.32 (2019-11-14)
+-----------------
+
+Datasette now renders templates using `Jinja async mode `__. This makes it easy for plugins to provide custom template functions that perform asynchronous actions, for example the new `datasette-template-sql `__ plugin which allows custom templates to directly execute SQL queries and render their results. (`#628 `__)
+
.. _v0_31_2:
0.31.2 (2019-11-13)
From 8fc9a5d877d26dbf2654e125f407ddd2fd767335 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Nov 2019 15:46:37 -0800
Subject: [PATCH 0265/2288] Datasette 0.32 and datasette-template-sql in news
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 9a22c2b2..030c507f 100644
--- a/README.md
+++ b/README.md
@@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
## News
+ * 14th November 2019: [Datasette 0.32](https://datasette.readthedocs.io/en/stable/changelog.html#v0-32) now uses asynchronous rendering in Jinja templates, which means template functions can perform asynchronous operations such as executing SQL queries. [datasette-template-sql](https://github.com/simonw/datasette-template-sql) is a new plugin uses this capability to add a new custom `sql(sql_query)` template function.
* 11th November 2019: [Datasette 0.31](https://datasette.readthedocs.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5.
* 18th October 2019: [Datasette 0.30](https://datasette.readthedocs.io/en/stable/changelog.html#v0-30)
* 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail.
From a9909c29ccac771c23c2ef22b89d10697b5256b9 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 15 Nov 2019 14:49:45 -0800
Subject: [PATCH 0266/2288] Move .execute() from Datasette to Database
Refs #569 - I split this change out from #579
---
datasette/app.py | 90 ++++++---------------------
datasette/database.py | 137 +++++++++++++++++++++++++++++++-----------
2 files changed, 121 insertions(+), 106 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 02fcf303..119d0e19 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -24,13 +24,11 @@ from .database import Database
from .utils import (
QueryInterrupted,
- Results,
escape_css_string,
escape_sqlite,
get_plugins,
module_from_path,
sqlite3,
- sqlite_timelimit,
to_css_class,
)
from .utils.asgi import (
@@ -42,13 +40,12 @@ from .utils.asgi import (
asgi_send_json,
asgi_send_redirect,
)
-from .tracer import trace, AsgiTracer
+from .tracer import AsgiTracer
from .plugins import pm, DEFAULT_PLUGINS
from .version import __version__
app_root = Path(__file__).parent.parent
-connections = threading.local()
MEMORY = object()
ConfigOption = collections.namedtuple("ConfigOption", ("name", "default", "help"))
@@ -336,6 +333,25 @@ class Datasette:
# pylint: disable=no-member
pm.hook.prepare_connection(conn=conn)
+ async def execute(
+ self,
+ db_name,
+ sql,
+ params=None,
+ truncate=False,
+ custom_time_limit=None,
+ page_size=None,
+ log_sql_errors=True,
+ ):
+ return await self.databases[db_name].execute(
+ sql,
+ params=params,
+ truncate=truncate,
+ custom_time_limit=custom_time_limit,
+ page_size=page_size,
+ log_sql_errors=log_sql_errors,
+ )
+
async def expand_foreign_keys(self, database, table, column, values):
"Returns dict mapping (column, value) -> label"
labeled_fks = {}
@@ -477,72 +493,6 @@ class Datasette:
.get(table, {})
)
- async def execute_against_connection_in_thread(self, db_name, fn):
- def in_thread():
- conn = getattr(connections, db_name, None)
- if not conn:
- conn = self.databases[db_name].connect()
- self.prepare_connection(conn)
- setattr(connections, db_name, conn)
- return fn(conn)
-
- return await asyncio.get_event_loop().run_in_executor(self.executor, in_thread)
-
- async def execute(
- self,
- db_name,
- sql,
- params=None,
- truncate=False,
- custom_time_limit=None,
- page_size=None,
- log_sql_errors=True,
- ):
- """Executes sql against db_name in a thread"""
- page_size = page_size or self.page_size
-
- def sql_operation_in_thread(conn):
- time_limit_ms = self.sql_time_limit_ms
- if custom_time_limit and custom_time_limit < time_limit_ms:
- time_limit_ms = custom_time_limit
-
- with sqlite_timelimit(conn, time_limit_ms):
- try:
- cursor = conn.cursor()
- cursor.execute(sql, params or {})
- max_returned_rows = self.max_returned_rows
- if max_returned_rows == page_size:
- max_returned_rows += 1
- if max_returned_rows and truncate:
- rows = cursor.fetchmany(max_returned_rows + 1)
- truncated = len(rows) > max_returned_rows
- rows = rows[:max_returned_rows]
- else:
- rows = cursor.fetchall()
- truncated = False
- except sqlite3.OperationalError as e:
- if e.args == ("interrupted",):
- raise QueryInterrupted(e, sql, params)
- if log_sql_errors:
- print(
- "ERROR: conn={}, sql = {}, params = {}: {}".format(
- conn, repr(sql), params, e
- )
- )
- raise
-
- if truncate:
- return Results(rows, truncated, cursor.description)
-
- else:
- return Results(rows, False, cursor.description)
-
- with trace("sql", database=db_name, sql=sql.strip(), params=params):
- results = await self.execute_against_connection_in_thread(
- db_name, sql_operation_in_thread
- )
- return results
-
def register_renderers(self):
""" Register output renderers which output data in custom formats. """
# Built-in renderers
diff --git a/datasette/database.py b/datasette/database.py
index 3a1cea94..9a8ae4d4 100644
--- a/datasette/database.py
+++ b/datasette/database.py
@@ -1,17 +1,25 @@
+import asyncio
+import contextlib
from pathlib import Path
+import threading
+from .tracer import trace
from .utils import (
QueryInterrupted,
+ Results,
detect_fts,
detect_primary_keys,
detect_spatialite,
get_all_foreign_keys,
get_outbound_foreign_keys,
+ sqlite_timelimit,
sqlite3,
table_columns,
)
from .inspect import inspect_hash
+connections = threading.local()
+
class Database:
def __init__(self, ds, path=None, is_mutable=False, is_memory=False):
@@ -45,6 +53,73 @@ class Database:
"file:{}?{}".format(self.path, qs), uri=True, check_same_thread=False
)
+ async def execute_against_connection_in_thread(self, fn):
+ def in_thread():
+ conn = getattr(connections, self.name, None)
+ if not conn:
+ conn = self.connect()
+ self.ds.prepare_connection(conn)
+ setattr(connections, self.name, conn)
+ return fn(conn)
+
+ return await asyncio.get_event_loop().run_in_executor(
+ self.ds.executor, in_thread
+ )
+
+ async def execute(
+ self,
+ sql,
+ params=None,
+ truncate=False,
+ custom_time_limit=None,
+ page_size=None,
+ log_sql_errors=True,
+ ):
+ """Executes sql against db_name in a thread"""
+ page_size = page_size or self.ds.page_size
+
+ def sql_operation_in_thread(conn):
+ time_limit_ms = self.ds.sql_time_limit_ms
+ if custom_time_limit and custom_time_limit < time_limit_ms:
+ time_limit_ms = custom_time_limit
+
+ with sqlite_timelimit(conn, time_limit_ms):
+ try:
+ cursor = conn.cursor()
+ cursor.execute(sql, params or {})
+ max_returned_rows = self.ds.max_returned_rows
+ if max_returned_rows == page_size:
+ max_returned_rows += 1
+ if max_returned_rows and truncate:
+ rows = cursor.fetchmany(max_returned_rows + 1)
+ truncated = len(rows) > max_returned_rows
+ rows = rows[:max_returned_rows]
+ else:
+ rows = cursor.fetchall()
+ truncated = False
+ except sqlite3.OperationalError as e:
+ if e.args == ("interrupted",):
+ raise QueryInterrupted(e, sql, params)
+ if log_sql_errors:
+ print(
+ "ERROR: conn={}, sql = {}, params = {}: {}".format(
+ conn, repr(sql), params, e
+ )
+ )
+ raise
+
+ if truncate:
+ return Results(rows, truncated, cursor.description)
+
+ else:
+ return Results(rows, False, cursor.description)
+
+ with trace("sql", database=self.name, sql=sql.strip(), params=params):
+ results = await self.execute_against_connection_in_thread(
+ sql_operation_in_thread
+ )
+ return results
+
@property
def size(self):
if self.is_memory:
@@ -62,8 +137,7 @@ class Database:
for table in await self.table_names():
try:
table_count = (
- await self.ds.execute(
- self.name,
+ await self.execute(
"select count(*) from [{}]".format(table),
custom_time_limit=limit,
)
@@ -89,32 +163,30 @@ class Database:
return Path(self.path).stem
async def table_exists(self, table):
- results = await self.ds.execute(
- self.name,
- "select 1 from sqlite_master where type='table' and name=?",
- params=(table,),
+ results = await self.execute(
+ "select 1 from sqlite_master where type='table' and name=?", params=(table,)
)
return bool(results.rows)
async def table_names(self):
- results = await self.ds.execute(
- self.name, "select name from sqlite_master where type='table'"
+ results = await self.execute(
+ "select name from sqlite_master where type='table'"
)
return [r[0] for r in results.rows]
async def table_columns(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: table_columns(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: table_columns(conn, table)
)
async def primary_keys(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: detect_primary_keys(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: detect_primary_keys(conn, table)
)
async def fts_table(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: detect_fts(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: detect_fts(conn, table)
)
async def label_column_for_table(self, table):
@@ -124,8 +196,8 @@ class Database:
if explicit_label_column:
return explicit_label_column
# If a table has two columns, one of which is ID, then label_column is the other one
- column_names = await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: table_columns(conn, table)
+ column_names = await self.execute_against_connection_in_thread(
+ lambda conn: table_columns(conn, table)
)
# Is there a name or title column?
name_or_title = [c for c in column_names if c in ("name", "title")]
@@ -141,8 +213,8 @@ class Database:
return None
async def foreign_keys_for_table(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: get_outbound_foreign_keys(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: get_outbound_foreign_keys(conn, table)
)
async def hidden_table_names(self):
@@ -150,18 +222,17 @@ class Database:
hidden_tables = [
r[0]
for r in (
- await self.ds.execute(
- self.name,
+ await self.execute(
"""
select name from sqlite_master
where rootpage = 0
and sql like '%VIRTUAL TABLE%USING FTS%'
- """,
+ """
)
).rows
]
- has_spatialite = await self.ds.execute_against_connection_in_thread(
- self.name, detect_spatialite
+ has_spatialite = await self.execute_against_connection_in_thread(
+ detect_spatialite
)
if has_spatialite:
# Also hide Spatialite internal tables
@@ -178,13 +249,12 @@ class Database:
] + [
r[0]
for r in (
- await self.ds.execute(
- self.name,
+ await self.execute(
"""
select name from sqlite_master
where name like "idx_%"
and type = "table"
- """,
+ """
)
).rows
]
@@ -207,25 +277,20 @@ class Database:
return hidden_tables
async def view_names(self):
- results = await self.ds.execute(
- self.name, "select name from sqlite_master where type='view'"
- )
+ results = await self.execute("select name from sqlite_master where type='view'")
return [r[0] for r in results.rows]
async def get_all_foreign_keys(self):
- return await self.ds.execute_against_connection_in_thread(
- self.name, get_all_foreign_keys
- )
+ return await self.execute_against_connection_in_thread(get_all_foreign_keys)
async def get_outbound_foreign_keys(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: get_outbound_foreign_keys(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: get_outbound_foreign_keys(conn, table)
)
async def get_table_definition(self, table, type_="table"):
table_definition_rows = list(
- await self.ds.execute(
- self.name,
+ await self.execute(
"select sql from sqlite_master where name = :n and type=:t",
{"n": table, "t": type_},
)
From 440a70428c624f6e27b630026acdba2032acc9a7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 19 Nov 2019 15:01:10 -0800
Subject: [PATCH 0267/2288] Include rowid in filter select, closes #636
---
datasette/views/table.py | 6 +-----
tests/test_html.py | 24 ++++++++++++++++++++++++
2 files changed, 25 insertions(+), 5 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index a60a3941..516b474d 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -587,10 +587,6 @@ class TableView(RowTableShared):
columns = [r[0] for r in results.description]
rows = list(results.rows)
- filter_columns = columns[:]
- if use_rowid and filter_columns[0] == "rowid":
- filter_columns = filter_columns[1:]
-
# Expand labeled columns if requested
expanded_columns = []
expandable_columns = await self.expandable_columns(database, table)
@@ -720,7 +716,7 @@ class TableView(RowTableShared):
"use_rowid": use_rowid,
"filters": filters,
"display_columns": display_columns,
- "filter_columns": filter_columns,
+ "filter_columns": columns,
"display_rows": display_rows,
"facets_timed_out": facets_timed_out,
"sorted_facet_results": sorted(
diff --git a/tests/test_html.py b/tests/test_html.py
index 44627cdc..3b331f38 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -679,6 +679,30 @@ def test_table_html_foreign_key_custom_label_column(app_client):
]
+@pytest.mark.parametrize(
+ "path,expected_column_options",
+ [
+ ("/fixtures/infinity", ["- column -", "rowid", "value"]),
+ (
+ "/fixtures/primary_key_multiple_columns",
+ ["- column -", "id", "content", "content2"],
+ ),
+ ("/fixtures/compound_primary_key", ["- column -", "pk1", "pk2", "content"]),
+ ],
+)
+def test_table_html_filter_form_column_options(
+ path, expected_column_options, app_client
+):
+ response = app_client.get(path)
+ assert response.status == 200
+ form = Soup(response.body, "html.parser").find("form")
+ column_options = [
+ o.attrs.get("value") or o.string
+ for o in form.select("select[name=_filter_column] option")
+ ]
+ assert expected_column_options == column_options
+
+
def test_row_html_compound_primary_key(app_client):
response = app_client.get("/fixtures/compound_primary_key/a,b")
assert response.status == 200
From c16be14517414a94e1fdbd888e8a3ad0669e3bca Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 20 Nov 2019 10:02:07 -0800
Subject: [PATCH 0268/2288] How to upgrade using Docker
---
docs/installation.rst | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/docs/installation.rst b/docs/installation.rst
index 9ee7eb4e..c547f9e4 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -33,6 +33,10 @@ Now visit http://127.0.0.1:8001/ to access Datasette.
(You can download a copy of ``fixtures.db`` from
https://latest.datasette.io/fixtures.db )
+To upgrade to the most recent release of Datasette, run the following::
+
+ docker pull datasetteproject/datasette
+
Loading Spatialite
~~~~~~~~~~~~~~~~~~
From fd137da7f83c117b18e189707a1039e319dd5c91 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 21 Nov 2019 16:56:55 -0800
Subject: [PATCH 0269/2288] Suggest column facet only if at least one count > 1
Fixes #638
---
datasette/facets.py | 5 ++++-
tests/fixtures.py | 33 +++++++++++++++++----------------
tests/test_api.py | 30 ++++++++++++++++++++++++++++--
tests/test_csv.py | 32 ++++++++++++++++----------------
4 files changed, 65 insertions(+), 35 deletions(-)
diff --git a/datasette/facets.py b/datasette/facets.py
index 0c6459d6..a314faaf 100644
--- a/datasette/facets.py
+++ b/datasette/facets.py
@@ -143,9 +143,10 @@ class ColumnFacet(Facet):
if column in already_enabled:
continue
suggested_facet_sql = """
- select distinct {column} from (
+ select {column}, count(*) as n from (
{sql}
) where {column} is not null
+ group by {column}
limit {limit}
""".format(
column=escape_sqlite(column), sql=self.sql, limit=facet_size + 1
@@ -165,6 +166,8 @@ class ColumnFacet(Facet):
and num_distinct_values > 1
and num_distinct_values <= facet_size
and num_distinct_values < row_count
+ # And at least one has n > 1
+ and any(r["n"] > 1 for r in distinct_values)
):
suggested_facets.append(
{
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 3e4203f7..bb01d171 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -669,26 +669,27 @@ CREATE TABLE facetable (
neighborhood text,
tags text,
complex_array text,
+ distinct_some_null,
FOREIGN KEY ("city_id") REFERENCES [facet_cities](id)
);
INSERT INTO facetable
- (created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array)
+ (created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array, distinct_some_null)
VALUES
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]'),
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]'),
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'SOMA', '[]', '[]'),
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Tenderloin', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Bernal Heights', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Hayes Valley', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Hollywood', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Downtown', '[]', '[]'),
- ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Los Feliz', '[]', '[]'),
- ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Koreatown', '[]', '[]'),
- ("2019-01-16 08:00:00", 1, 1, 'MI', 3, 'Downtown', '[]', '[]'),
- ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Greektown', '[]', '[]'),
- ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Corktown', '[]', '[]'),
- ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Mexicantown', '[]', '[]'),
- ("2019-01-17 08:00:00", 2, 0, 'MC', 4, 'Arcadia Planitia', '[]', '[]')
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]', 'one'),
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]', 'two'),
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'SOMA', '[]', '[]', null),
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Tenderloin', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Bernal Heights', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Hayes Valley', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Hollywood', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Downtown', '[]', '[]', null),
+ ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Los Feliz', '[]', '[]', null),
+ ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Koreatown', '[]', '[]', null),
+ ("2019-01-16 08:00:00", 1, 1, 'MI', 3, 'Downtown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Greektown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Corktown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Mexicantown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 2, 0, 'MC', 4, 'Arcadia Planitia', '[]', '[]', null)
;
CREATE TABLE binary_data (
diff --git a/tests/test_api.py b/tests/test_api.py
index 1fa8642f..34eef4ce 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -197,6 +197,7 @@ def test_database_page(app_client):
"neighborhood",
"tags",
"complex_array",
+ "distinct_some_null",
],
"primary_keys": ["pk"],
"count": 15,
@@ -1042,15 +1043,38 @@ def test_table_filter_json_arraycontains(app_client):
"Mission",
'["tag1", "tag2"]',
'[{"foo": "bar"}]',
+ "one",
+ ],
+ [
+ 2,
+ "2019-01-14 08:00:00",
+ 1,
+ 1,
+ "CA",
+ 1,
+ "Dogpatch",
+ '["tag1", "tag3"]',
+ "[]",
+ "two",
],
- [2, "2019-01-14 08:00:00", 1, 1, "CA", 1, "Dogpatch", '["tag1", "tag3"]', "[]"],
] == response.json["rows"]
def test_table_filter_extra_where(app_client):
response = app_client.get("/fixtures/facetable.json?_where=neighborhood='Dogpatch'")
assert [
- [2, "2019-01-14 08:00:00", 1, 1, "CA", 1, "Dogpatch", '["tag1", "tag3"]', "[]"]
+ [
+ 2,
+ "2019-01-14 08:00:00",
+ 1,
+ 1,
+ "CA",
+ 1,
+ "Dogpatch",
+ '["tag1", "tag3"]',
+ "[]",
+ "two",
+ ]
] == response.json["rows"]
@@ -1503,6 +1527,7 @@ def test_expand_labels(app_client):
"neighborhood": "Dogpatch",
"tags": '["tag1", "tag3"]',
"complex_array": "[]",
+ "distinct_some_null": "two",
},
"13": {
"pk": 13,
@@ -1514,6 +1539,7 @@ def test_expand_labels(app_client):
"neighborhood": "Corktown",
"tags": "[]",
"complex_array": "[]",
+ "distinct_some_null": None,
},
} == response.json
diff --git a/tests/test_csv.py b/tests/test_csv.py
index 13aca489..1030c2bb 100644
--- a/tests/test_csv.py
+++ b/tests/test_csv.py
@@ -21,22 +21,22 @@ world
)
EXPECTED_TABLE_WITH_LABELS_CSV = """
-pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,complex_array
-1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]","[{""foo"": ""bar""}]"
-2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]",[]
-3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[],[]
-4,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Tenderloin,[],[]
-5,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Bernal Heights,[],[]
-6,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Hayes Valley,[],[]
-7,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Hollywood,[],[]
-8,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Downtown,[],[]
-9,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Los Feliz,[],[]
-10,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Koreatown,[],[]
-11,2019-01-16 08:00:00,1,1,MI,3,Detroit,Downtown,[],[]
-12,2019-01-17 08:00:00,1,1,MI,3,Detroit,Greektown,[],[]
-13,2019-01-17 08:00:00,1,1,MI,3,Detroit,Corktown,[],[]
-14,2019-01-17 08:00:00,1,1,MI,3,Detroit,Mexicantown,[],[]
-15,2019-01-17 08:00:00,2,0,MC,4,Memnonia,Arcadia Planitia,[],[]
+pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,complex_array,distinct_some_null
+1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]","[{""foo"": ""bar""}]",one
+2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]",[],two
+3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[],[],
+4,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Tenderloin,[],[],
+5,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Bernal Heights,[],[],
+6,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Hayes Valley,[],[],
+7,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Hollywood,[],[],
+8,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Downtown,[],[],
+9,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Los Feliz,[],[],
+10,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Koreatown,[],[],
+11,2019-01-16 08:00:00,1,1,MI,3,Detroit,Downtown,[],[],
+12,2019-01-17 08:00:00,1,1,MI,3,Detroit,Greektown,[],[],
+13,2019-01-17 08:00:00,1,1,MI,3,Detroit,Corktown,[],[],
+14,2019-01-17 08:00:00,1,1,MI,3,Detroit,Mexicantown,[],[],
+15,2019-01-17 08:00:00,2,0,MC,4,Memnonia,Arcadia Planitia,[],[],
""".lstrip().replace(
"\n", "\r\n"
)
From d3e1c3017ee2f606a731208d59fe48805cdc3259 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 22 Nov 2019 22:07:01 -0800
Subject: [PATCH 0270/2288] Display 0 results, closes #637
---
datasette/static/app.css | 7 +++++
datasette/templates/_table.html | 56 ++++++++++++++++++---------------
datasette/templates/query.html | 2 ++
tests/test_html.py | 14 +++++++++
4 files changed, 53 insertions(+), 26 deletions(-)
diff --git a/datasette/static/app.css b/datasette/static/app.css
index 34eb122c..d7cf6334 100644
--- a/datasette/static/app.css
+++ b/datasette/static/app.css
@@ -327,3 +327,10 @@ a.not-underlined {
pre.wrapped-sql {
white-space: pre-wrap;
}
+
+p.zero-results {
+ border: 2px solid #ccc;
+ background-color: #eee;
+ padding: 0.5em;
+ font-style: italic;
+}
diff --git a/datasette/templates/_table.html b/datasette/templates/_table.html
index c7a72253..42c37c55 100644
--- a/datasette/templates/_table.html
+++ b/datasette/templates/_table.html
@@ -1,28 +1,32 @@
-
-
-
- {% for column in display_columns %}
-
- {% if not column.sortable %}
- {{ column.name }}
- {% else %}
- {% if column.name == sort %}
- {{ column.name }} ▼
+{% if display_rows %}
+