From c2c03f8e7be6973ef3cbdb3ca424e55a81a956d7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 4 Nov 2018 22:40:03 -0800
Subject: [PATCH 0001/2321] Compatible with Python 3.7
---
.travis.yml | 2 +-
setup.py | 1 +
2 files changed, 2 insertions(+), 1 deletion(-)
diff --git a/.travis.yml b/.travis.yml
index 1bc9361f..3e2bdbfd 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -3,7 +3,7 @@ language: python
# 3.6 is listed first so it gets used for the later build stages
python:
- "3.6"
- - "3.7-dev"
+ - "3.7"
- "3.5"
# Executed for 3.5 AND 3.5 as the first "test" stage:
diff --git a/setup.py b/setup.py
index b2fce1c0..a8132366 100644
--- a/setup.py
+++ b/setup.py
@@ -64,6 +64,7 @@ setup(
'Intended Audience :: End Users/Desktop',
'Topic :: Database',
'License :: OSI Approved :: Apache Software License',
+ 'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.5',
],
From d0393e821e5914a7c1d659eaf0a5d612a1aa8e01 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 4 Nov 2018 22:45:38 -0800
Subject: [PATCH 0002/2321] Travis to use Python 3.7-dev for a little longer
3.7 produces a 403 forbidden error: https://travis-ci.org/simonw/datasette/jobs/450716231#L6
---
.travis.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.travis.yml b/.travis.yml
index 3e2bdbfd..1bc9361f 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -3,7 +3,7 @@ language: python
# 3.6 is listed first so it gets used for the later build stages
python:
- "3.6"
- - "3.7"
+ - "3.7-dev"
- "3.5"
# Executed for 3.5 AND 3.5 as the first "test" stage:
From 5e3a432a0caa23837fa58134f69e2f82e4f632a6 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 14 Nov 2018 16:54:54 -0800
Subject: [PATCH 0003/2321] How to activate your virtual environment
---
docs/contributing.rst | 2 ++
1 file changed, 2 insertions(+)
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 74f43e2f..b9a077d6 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -29,6 +29,8 @@ The next step is to create a virtual environment for your project and use it to
cd datasette
# Create a virtual environment in venv/
python3 -mvenv venv
+ # Now activate the virtual environment, so pip can install into it
+ source venv/bin/activate
# Install Datasette and its testing dependencies
pip install -e .[test]
From 3de8fac1d322cbab6c8c55899e0e8511b36337d0 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 20 Nov 2018 09:16:31 -0800
Subject: [PATCH 0004/2321] New make_app_client() pattern
Because next version of pytest discourages calling fixture factories as functions
---
tests/fixtures.py | 20 ++++++++++++--------
tests/test_api.py | 9 +++++----
tests/test_html.py | 9 +++++----
3 files changed, 22 insertions(+), 16 deletions(-)
diff --git a/tests/fixtures.py b/tests/fixtures.py
index faf9c9cc..a77d141d 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -23,8 +23,7 @@ class TestClient:
)
-@pytest.fixture(scope="session")
-def app_client(
+def make_app_client(
sql_time_limit_ms=None,
max_returned_rows=None,
cors=False,
@@ -61,38 +60,43 @@ def app_client(
yield client
+@pytest.fixture(scope="session")
+def app_client(**kwargs):
+ yield from make_app_client(**kwargs)
+
+
@pytest.fixture(scope='session')
def app_client_shorter_time_limit():
- yield from app_client(20)
+ yield from make_app_client(20)
@pytest.fixture(scope='session')
def app_client_returned_rows_matches_page_size():
- yield from app_client(max_returned_rows=50)
+ yield from make_app_client(max_returned_rows=50)
@pytest.fixture(scope='session')
def app_client_larger_cache_size():
- yield from app_client(config={
+ yield from make_app_client(config={
'cache_size_kb': 2500,
})
@pytest.fixture(scope='session')
def app_client_csv_max_mb_one():
- yield from app_client(config={
+ yield from make_app_client(config={
'max_csv_mb': 1,
})
@pytest.fixture(scope="session")
def app_client_with_dot():
- yield from app_client(filename="fixtures.dot.db")
+ yield from make_app_client(filename="fixtures.dot.db")
@pytest.fixture(scope='session')
def app_client_with_cors():
- yield from app_client(cors=True)
+ yield from make_app_client(cors=True)
def generate_compound_rows(num):
diff --git a/tests/test_api.py b/tests/test_api.py
index c1f22733..1ab236cf 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -6,6 +6,7 @@ from .fixtures import ( # noqa
app_client_with_dot,
generate_compound_rows,
generate_sortable_rows,
+ make_app_client,
METADATA,
)
import pytest
@@ -435,7 +436,7 @@ def test_invalid_custom_sql(app_client):
def test_allow_sql_off():
- for client in app_client(config={
+ for client in make_app_client(config={
'allow_sql': False,
}):
assert 400 == client.get(
@@ -1179,7 +1180,7 @@ def test_suggested_facets(app_client):
def test_allow_facet_off():
- for client in app_client(config={
+ for client in make_app_client(config={
'allow_facet': False,
}):
assert 400 == client.get(
@@ -1192,7 +1193,7 @@ def test_allow_facet_off():
def test_suggest_facets_off():
- for client in app_client(config={
+ for client in make_app_client(config={
'suggest_facets': False,
}):
# Now suggested_facets should be []
@@ -1304,7 +1305,7 @@ def test_config_cache_size(app_client_larger_cache_size):
def test_config_force_https_urls():
- for client in app_client(config={"force_https_urls": True}):
+ for client in make_app_client(config={"force_https_urls": True}):
response = client.get("/fixtures/facetable.json?_size=3&_facet=state")
assert response.json["next_url"].startswith("https://")
assert response.json["facet_results"]["state"]["results"][0][
diff --git a/tests/test_html.py b/tests/test_html.py
index 47738604..41af9b25 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -2,6 +2,7 @@ from bs4 import BeautifulSoup as Soup
from .fixtures import ( # noqa
app_client,
app_client_shorter_time_limit,
+ make_app_client,
)
import pytest
import re
@@ -65,7 +66,7 @@ def test_row_strange_table_name(app_client):
def test_table_cell_truncation():
- for client in app_client(config={
+ for client in make_app_client(config={
"truncate_cells_html": 5,
}):
response = client.get("/fixtures/facetable")
@@ -84,7 +85,7 @@ def test_table_cell_truncation():
def test_row_page_does_not_truncate():
- for client in app_client(config={
+ for client in make_app_client(config={
"truncate_cells_html": 5,
}):
response = client.get("/fixtures/facetable/1")
@@ -690,7 +691,7 @@ def test_allow_download_on(app_client):
def test_allow_download_off():
- for client in app_client(config={
+ for client in make_app_client(config={
'allow_download': False,
}):
response = client.get(
@@ -720,7 +721,7 @@ def test_allow_sql_on(app_client):
def test_allow_sql_off():
- for client in app_client(config={
+ for client in make_app_client(config={
'allow_sql': False,
}):
response = client.get(
From 54a59b9fdb40b9c86ff065a5b7047e6ef305bc22 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 16 Dec 2018 12:53:00 -0800
Subject: [PATCH 0005/2321] Use python-3.6.7 runtime for Heroku deploys
---
datasette/utils.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/datasette/utils.py b/datasette/utils.py
index 1033535b..b0f74f0d 100644
--- a/datasette/utils.py
+++ b/datasette/utils.py
@@ -413,7 +413,7 @@ def temporary_heroku_directory(
if metadata_content:
open('metadata.json', 'w').write(json.dumps(metadata_content, indent=2))
- open('runtime.txt', 'w').write('python-3.6.6')
+ open('runtime.txt', 'w').write('python-3.6.7')
if branch:
install = ['https://github.com/simonw/datasette/archive/{branch}.zip'.format(
From c5f90dd1b20e41d1a3736dcbda5101d27279390b Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 16 Dec 2018 13:14:50 -0800
Subject: [PATCH 0006/2321] Added docs on updating docs + the release process
---
docs/changelog.rst | 2 ++
docs/contributing.rst | 53 +++++++++++++++++++++++++++++++++++++++++++
2 files changed, 55 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index a383b228..3c095330 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -1,3 +1,5 @@
+.. _changelog:
+
=========
Changelog
=========
diff --git a/docs/contributing.rst b/docs/contributing.rst
index b9a077d6..f7b9f936 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -69,3 +69,56 @@ You can also use the ``fixtures.py`` script to recreate the testing version of `
Then run Datasette like this::
datasette fixtures.db -m fixtures-metadata.json
+
+Editing and building the documentation
+--------------------------------------
+
+Datasette's documentation lives in the ``docs/`` directory and is deployed automatically using `Read The Docs `__.
+
+You can build it locally by installing ``sphinx`` and ``sphinx_rtd_theme`` in your Datasette development environment and then running ``make`` directly in the ``docs/`` directory::
+
+ source venv/bin/activate
+ pip install sphinx sphinx_rtd_theme
+ cd docs/
+ make
+
+This will create the HTML version of the documentation in ``docs/_build/html``. You can open it in your browser like so::
+
+ open _build/html/index.html
+
+Any time you make changes to a ``.rst`` file you can re-run ``make`` to update the built documents, then refresh them in your browser.
+
+The documentation is written using reStructuredText. You may find this article on `The subset of reStructuredText worth committing to memory `__ useful.
+
+Release process
+---------------
+
+Datasette releases are performed using tags. When a new version tag is pushed to GitHub, a `Travis CI task `__ will perform the following:
+
+* Run the unit tests against all supported Python versions. If the tests pass...
+* Set up https://v0-25-1.datasette.io/ (but with the new tag) to point to a live demo of this release
+* Build a Docker image of the release and push a tag to https://hub.docker.com/r/datasetteproject/datasette
+* Re-point the "latest" tag on Docker Hub to the new image
+* Build a wheel bundle of the underlying Python source code
+* Push that new wheel up to PyPI: https://pypi.org/project/datasette/
+
+Datasette follows `Semantic Versioning `__::
+
+ major.minor.patch
+
+We increment ``major`` for backwards-incompatible releases. Datasette is currently pre-1.0 so the major version is always ``0``.
+
+We increment ``minor`` for new features.
+
+We increment ``patch`` for bugfix releass.
+
+To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__.
+
+Then run the following::
+
+ git tag 0.25.2
+ git push && git push --tags
+
+You will need to have push access to the main Datasette GitHub repository.
+
+Once the release is out, you can manually update https://github.com/simonw/datasette/releases
From 84d7890b63f6aa43e7369fa8fc7a0a1bbf98c91f Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 16 Dec 2018 13:18:55 -0800
Subject: [PATCH 0007/2321] Upgrade pytest to 4.0.2
---
setup.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.py b/setup.py
index a8132366..7edd78a1 100644
--- a/setup.py
+++ b/setup.py
@@ -49,7 +49,7 @@ setup(
setup_requires=['pytest-runner'],
extras_require={
'test': [
- 'pytest==3.7.1',
+ 'pytest==4.0.2',
'aiohttp==3.3.2',
'beautifulsoup4==4.6.1',
]
From b5128fc53fce6a1bf3b16bad9f318451bc1d1263 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 16 Dec 2018 13:26:37 -0800
Subject: [PATCH 0008/2321] Release 0.25.2
---
docs/changelog.rst | 10 ++++++++++
docs/contributing.rst | 4 ++++
2 files changed, 14 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 3c095330..236237ba 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,16 @@
Changelog
=========
+.. _v0_25_2:
+
+0.25.2 (2018-12-16)
+-------------------
+
+- ``datasette publish heroku`` now uses the ``python-3.6.7`` runtime
+- Added documentation on :ref:`how to build the documentation `
+- Added documentation covering :ref:`our release process `
+- Upgraded to pytest 4.0.2
+
.. _v0_25_1:
0.25.1 (2018-11-04)
diff --git a/docs/contributing.rst b/docs/contributing.rst
index f7b9f936..5f17e501 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -70,6 +70,8 @@ Then run Datasette like this::
datasette fixtures.db -m fixtures-metadata.json
+.. _contributing_documentation:
+
Editing and building the documentation
--------------------------------------
@@ -90,6 +92,8 @@ Any time you make changes to a ``.rst`` file you can re-run ``make`` to update t
The documentation is written using reStructuredText. You may find this article on `The subset of reStructuredText worth committing to memory `__ useful.
+.. _contributing_release:
+
Release process
---------------
From ed78922ae38b51513319b60ac39990b7c2aca810 Mon Sep 17 00:00:00 2001
From: Jay Graves
Date: Sun, 16 Dec 2018 15:32:55 -0600
Subject: [PATCH 0009/2321] Tiny typo in customization docs (#390)
Thanks, @jaywgraves
---
docs/custom_templates.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/custom_templates.rst b/docs/custom_templates.rst
index 7f70c133..b0863381 100644
--- a/docs/custom_templates.rst
+++ b/docs/custom_templates.rst
@@ -12,7 +12,7 @@ When you launch Datasette, you can specify a custom metadata file like this::
datasette mydb.db --metadata metadata.json
-Your ``metadata.json`` file can include linke that look like this::
+Your ``metadata.json`` file can include links that look like this::
{
"extra_css_urls": [
From 1e2db8547be111e7c1583f096f40cec7c0824281 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 16 Dec 2018 13:47:26 -0800
Subject: [PATCH 0010/2321] Improved release process documentation
---
docs/contributing.rst | 16 +++++++++++-----
1 file changed, 11 insertions(+), 5 deletions(-)
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 5f17e501..0b298cec 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -106,6 +106,8 @@ Datasette releases are performed using tags. When a new version tag is pushed to
* Build a wheel bundle of the underlying Python source code
* Push that new wheel up to PyPI: https://pypi.org/project/datasette/
+To deploy new releases you will need to have push access to the main Datasette GitHub repository.
+
Datasette follows `Semantic Versioning `__::
major.minor.patch
@@ -116,13 +118,17 @@ We increment ``minor`` for new features.
We increment ``patch`` for bugfix releass.
-To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__.
+To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__::
-Then run the following::
+ # Update changelog
+ git commit -m "Release 0.25.2" -a
+ git push
+
+For non-bugfix releases you may want to update the news section of ``README.md`` as part of the same commit.
+
+Wait long enough for Travis to build and deploy the demo version of that commit (otherwise the tag deployment may fail to alias to it properly). Then run the following::
git tag 0.25.2
- git push && git push --tags
-
-You will need to have push access to the main Datasette GitHub repository.
+ git push --tags
Once the release is out, you can manually update https://github.com/simonw/datasette/releases
From 595da9054e391b05a59b57c0c0a4487ea10062ce Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 16 Dec 2018 14:56:56 -0800
Subject: [PATCH 0011/2321] Better example commit
This one updates the README news section as well
---
docs/contributing.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 0b298cec..5b7d75d2 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -118,7 +118,7 @@ We increment ``minor`` for new features.
We increment ``patch`` for bugfix releass.
-To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__::
+To release a new version, first create a commit that updates :ref:`the changelog ` with highlights of the new version. An example `commit can be seen here `__::
# Update changelog
git commit -m "Release 0.25.2" -a
From 74ad06e1283615cceb49fd95929b60329d47d3d4 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 18 Dec 2018 13:49:51 -0800
Subject: [PATCH 0012/2321] Updated notes on FTS5 v.s. FTS4
---
docs/full_text_search.rst | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/docs/full_text_search.rst b/docs/full_text_search.rst
index 6bf3b567..987e2272 100644
--- a/docs/full_text_search.rst
+++ b/docs/full_text_search.rst
@@ -14,7 +14,9 @@ FTS versions
There are three different versions of the SQLite FTS module: FTS3, FTS4 and FTS5. You can tell which versions are supported by your instance of Datasette by checking the ``/-/versions`` page.
-FTS5 is the most advanced module, but is usually not available is the SQLite version that is bundled with Python. If in doubt, you should use FTS4.
+FTS5 is the most advanced module but may not be available in the SQLite version that is bundled with your Python installation. Most importantly, FTS5 is the only version that has the ability to order by search relevance without needing extra code.
+
+If you can't be sure that FTS5 will be available, you should use FTS4.
Adding full-text search to a SQLite table
-----------------------------------------
From 48d24c6589f214c7e1bab21d32e2f35a0038e71f Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 18 Dec 2018 21:39:35 -0800
Subject: [PATCH 0013/2321] Link to new tutorial from the README
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 56986668..d4158c46 100644
--- a/README.md
+++ b/README.md
@@ -19,6 +19,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
## News
+* 18th December 2018: [Fast Autocomplete Search for Your Website](https://24ways.org/2018/fast-autocomplete-search-for-your-website/) - a new tutorial on using Datasette to build a JavaScript autocomplete search engine.
* 3rd October 2018: [The interesting ideas in Datasette](https://simonwillison.net/2018/Oct/4/datasette-ideas/) - a write-up of some of the less obvious interesting ideas embedded in the Datasette project.
* 19th September 2018: [Datasette 0.25](http://datasette.readthedocs.io/en/latest/changelog.html#v0-25) - New plugin hooks, improved database view support and an easier way to use more recent versions of SQLite.
* 23rd July 2018: [Datasette 0.24](http://datasette.readthedocs.io/en/latest/changelog.html#v0-24) - a number of small new features
From eac08f0dfc61a99e8887442fc247656d419c76f8 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 20 Dec 2018 07:51:08 -0800
Subject: [PATCH 0014/2321] --reload now also reloads if databases change on
disk
---
datasette/cli.py | 3 ++-
docs/datasette-serve-help.txt | 4 ++--
2 files changed, 4 insertions(+), 3 deletions(-)
diff --git a/datasette/cli.py b/datasette/cli.py
index 820367ac..2cadabbf 100644
--- a/datasette/cli.py
+++ b/datasette/cli.py
@@ -262,7 +262,7 @@ def package(
@click.option(
"--reload",
is_flag=True,
- help="Automatically reload if code change detected - useful for development",
+ help="Automatically reload if database or code change detected - useful for development",
)
@click.option(
"--cors", is_flag=True, help="Enable CORS by serving Access-Control-Allow-Origin: *"
@@ -345,6 +345,7 @@ def serve(
import hupper
reloader = hupper.start_reloader("datasette.cli.serve")
+ reloader.watch_files(files)
if metadata:
reloader.watch_files([metadata.name])
diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt
index 11ee105a..caa00e33 100644
--- a/docs/datasette-serve-help.txt
+++ b/docs/datasette-serve-help.txt
@@ -8,8 +8,8 @@ Options:
-h, --host TEXT host for server, defaults to 127.0.0.1
-p, --port INTEGER port for server, defaults to 8001
--debug Enable debug mode - useful for development
- --reload Automatically reload if code change detected - useful for
- development
+ --reload Automatically reload if database or code change detected -
+ useful for development
--cors Enable CORS by serving Access-Control-Allow-Origin: *
--load-extension PATH Path to a SQLite extension to load
--inspect-file TEXT Path to JSON file created using "datasette inspect"
From a2bfcfc1b1c60dac3526364af17c2fa2f3d41a0a Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 28 Dec 2018 18:22:27 -0800
Subject: [PATCH 0015/2321] Fix some regex DeprecationWarnings (#392)
---
datasette/app.py | 20 ++++++++++----------
tests/test_html.py | 4 ++--
2 files changed, 12 insertions(+), 12 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index fe461d42..cdfcb11d 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -468,7 +468,7 @@ class Datasette:
self.jinja_env.filters["escape_sqlite"] = escape_sqlite
self.jinja_env.filters["to_css_class"] = to_css_class
pm.hook.prepare_jinja2_environment(env=self.jinja_env)
- app.add_route(IndexView.as_view(self), "/")
+ app.add_route(IndexView.as_view(self), r"/")
# TODO: /favicon.ico and /-/static/ deserve far-future cache expires
app.add_route(favicon, "/favicon.ico")
app.static("/-/static/", str(app_root / "datasette" / "static"))
@@ -481,37 +481,37 @@ class Datasette:
app.static(modpath, plugin["static_path"])
app.add_route(
JsonDataView.as_view(self, "inspect.json", self.inspect),
- "/-/inspect",
+ r"/-/inspect",
)
app.add_route(
JsonDataView.as_view(self, "metadata.json", lambda: self._metadata),
- "/-/metadata",
+ r"/-/metadata",
)
app.add_route(
JsonDataView.as_view(self, "versions.json", self.versions),
- "/-/versions",
+ r"/-/versions",
)
app.add_route(
JsonDataView.as_view(self, "plugins.json", self.plugins),
- "/-/plugins",
+ r"/-/plugins",
)
app.add_route(
JsonDataView.as_view(self, "config.json", lambda: self._config),
- "/-/config",
+ r"/-/config",
)
app.add_route(
- DatabaseDownload.as_view(self), "/"
+ DatabaseDownload.as_view(self), r"/"
)
app.add_route(
- DatabaseView.as_view(self), "/"
+ DatabaseView.as_view(self), r"/"
)
app.add_route(
TableView.as_view(self),
- "//",
+ r"//",
)
app.add_route(
RowView.as_view(self),
- "///",
+ r"///",
)
self.register_custom_units()
# On 404 with a trailing slash redirect to path without that slash:
diff --git a/tests/test_html.py b/tests/test_html.py
index 41af9b25..913eac94 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -687,7 +687,7 @@ def test_allow_download_on(app_client):
"/fixtures"
)
soup = Soup(response.body, 'html.parser')
- assert len(soup.findAll('a', {'href': re.compile('\.db$')}))
+ assert len(soup.findAll('a', {'href': re.compile(r'\.db$')}))
def test_allow_download_off():
@@ -699,7 +699,7 @@ def test_allow_download_off():
)
soup = Soup(response.body, 'html.parser')
- assert not len(soup.findAll('a', {'href': re.compile('\.db$')}))
+ assert not len(soup.findAll('a', {'href': re.compile(r'\.db$')}))
# Accessing URL directly should 403
response = client.get(
"/fixtures.db",
From d95b46b09bf53530b212cc17202a242daa62fd64 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 1 Jan 2019 21:14:46 -0800
Subject: [PATCH 0016/2321] datasette publish now --alias option
You can now use --alias to attempt to alias after you deploy.
Also updated now.json to use version: 1
---
datasette/publish/now.py | 17 +++++++++++------
docs/datasette-publish-now-help.txt | 3 ++-
2 files changed, 13 insertions(+), 7 deletions(-)
diff --git a/datasette/publish/now.py b/datasette/publish/now.py
index 92654b68..3cc657fa 100644
--- a/datasette/publish/now.py
+++ b/datasette/publish/now.py
@@ -21,7 +21,8 @@ def publish_subcommand(publish):
help="Application name to use when deploying",
)
@click.option("--force", is_flag=True, help="Pass --force option to now")
- @click.option("--token", help="Auth token to use for deploy (Now only)")
+ @click.option("--token", help="Auth token to use for deploy")
+ @click.option("--alias", help="Desired alias e.g. yoursite.now.sh")
@click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension")
def now(
files,
@@ -41,6 +42,7 @@ def publish_subcommand(publish):
name,
force,
token,
+ alias,
spatialite,
):
fail_if_publish_binary_not_installed("now", "Zeit Now", "https://zeit.co/now")
@@ -70,11 +72,12 @@ def publish_subcommand(publish):
"source_url": source_url,
},
):
- open("now.json", "w").write(json.dumps({
- "features": {
- "cloud": "v1"
- }
- }))
+ now_json = {
+ "version": 1
+ }
+ if alias:
+ now_json["alias"] = alias
+ open("now.json", "w").write(json.dumps(now_json))
args = []
if force:
args.append("--force")
@@ -84,3 +87,5 @@ def publish_subcommand(publish):
call(["now"] + args)
else:
call("now")
+ if alias:
+ call(["now", "alias"])
diff --git a/docs/datasette-publish-now-help.txt b/docs/datasette-publish-now-help.txt
index ce09030f..a9c01f39 100644
--- a/docs/datasette-publish-now-help.txt
+++ b/docs/datasette-publish-now-help.txt
@@ -18,6 +18,7 @@ Options:
--source_url TEXT Source URL for metadata
-n, --name TEXT Application name to use when deploying
--force Pass --force option to now
- --token TEXT Auth token to use for deploy (Now only)
+ --token TEXT Auth token to use for deploy
+ --alias TEXT Desired alias e.g. yoursite.now.sh
--spatialite Enable SpatialLite extension
--help Show this message and exit.
From 8b8ae55e7c8b9e1dceef53f55a330b596ca44d41 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 1 Jan 2019 21:35:16 -0800
Subject: [PATCH 0017/2321] Pass --token to 'now alias', if provided
---
datasette/publish/now.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/datasette/publish/now.py b/datasette/publish/now.py
index 3cc657fa..bd2b051b 100644
--- a/datasette/publish/now.py
+++ b/datasette/publish/now.py
@@ -88,4 +88,7 @@ def publish_subcommand(publish):
else:
call("now")
if alias:
- call(["now", "alias"])
+ alias_args = ["alias"]
+ if token:
+ alias_args.append("--token={}".format(token))
+ call(["now"] + alias_args)
From 996e8822d2cb551c0ed821ff1fda24af905332d3 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 2 Jan 2019 18:43:56 -0800
Subject: [PATCH 0018/2321] Fix CSV export hidden form fields, closes #393
---
datasette/templates/table.html | 6 ++----
datasette/views/base.py | 6 +++++-
tests/test_html.py | 13 +++++++------
3 files changed, 14 insertions(+), 11 deletions(-)
diff --git a/datasette/templates/table.html b/datasette/templates/table.html
index a768a9fc..6177163a 100644
--- a/datasette/templates/table.html
+++ b/datasette/templates/table.html
@@ -148,10 +148,8 @@
{% if expandable_columns %}{% endif %}
{% if next_url and config.allow_csv_stream %}{% endif %}
- {% for key, value in url_csv_args.items() %}
- {% if key != "_labels" %}
-
- {% endif %}
+ {% for key, value in url_csv_hidden_args %}
+
{% endfor %}
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 119b376b..4b89c975 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -464,7 +464,11 @@ class BaseView(RenderMixin):
}),
"url_csv": url_csv,
"url_csv_path": url_csv_path,
- "url_csv_args": url_csv_args,
+ "url_csv_hidden_args": [
+ (key, value)
+ for key, value in urllib.parse.parse_qsl(request.query_string)
+ if key not in ("_labels", "_facet", "_size")
+ ] + [("_size", "max")],
"datasette_version": __version__,
"config": self.ds.config_dict(),
}
diff --git a/tests/test_html.py b/tests/test_html.py
index 913eac94..233b85a0 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -401,7 +401,7 @@ def test_table_html_simple_primary_key(app_client):
def test_table_csv_json_export_interface(app_client):
- response = app_client.get('/fixtures/simple_primary_key')
+ response = app_client.get('/fixtures/simple_primary_key?id__gt=2')
assert response.status == 200
# The links at the top of the page
links = Soup(response.body, "html.parser").find("p", {
@@ -409,8 +409,8 @@ def test_table_csv_json_export_interface(app_client):
}).findAll("a")
actual = [l["href"].split("/")[-1] for l in links]
expected = [
- "simple_primary_key.json",
- "simple_primary_key.csv?_size=max",
+ "simple_primary_key.json?id__gt=2",
+ "simple_primary_key.csv?id__gt=2&_size=max",
"#export"
]
assert expected == actual
@@ -420,9 +420,9 @@ def test_table_csv_json_export_interface(app_client):
})
json_links = [a["href"].split("/")[-1] for a in div.find("p").findAll("a")]
assert [
- "simple_primary_key.json",
- "simple_primary_key.json?_shape=array",
- "simple_primary_key.json?_shape=object"
+ "simple_primary_key.json?id__gt=2",
+ "simple_primary_key.json?id__gt=2&_shape=array",
+ "simple_primary_key.json?id__gt=2&_shape=object"
] == json_links
# And the CSV form
form = div.find("form")
@@ -431,6 +431,7 @@ def test_table_csv_json_export_interface(app_client):
assert [
'',
'',
+ '',
''
] == inputs
From 424e146697309a54c05d5d1ba1f840849ddbafdc Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 2 Jan 2019 18:53:59 -0800
Subject: [PATCH 0019/2321] Datasette 0.26 release notes
---
README.md | 1 +
docs/changelog.rst | 9 +++++++++
2 files changed, 10 insertions(+)
diff --git a/README.md b/README.md
index d4158c46..c2acab1c 100644
--- a/README.md
+++ b/README.md
@@ -19,6 +19,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
## News
+ * 2nd January 2019: [Datasette 0.26](http://datasette.readthedocs.io/en/latest/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument.
* 18th December 2018: [Fast Autocomplete Search for Your Website](https://24ways.org/2018/fast-autocomplete-search-for-your-website/) - a new tutorial on using Datasette to build a JavaScript autocomplete search engine.
* 3rd October 2018: [The interesting ideas in Datasette](https://simonwillison.net/2018/Oct/4/datasette-ideas/) - a write-up of some of the less obvious interesting ideas embedded in the Datasette project.
* 19th September 2018: [Datasette 0.25](http://datasette.readthedocs.io/en/latest/changelog.html#v0-25) - New plugin hooks, improved database view support and an easier way to use more recent versions of SQLite.
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 236237ba..e59142e2 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,15 @@
Changelog
=========
+.. _v0_26:
+
+0.26 (2019-01-02)
+-----------------
+
+- ``datasette serve --reload`` now restarts Datasette if a database file changes on disk.
+- ``datasette publish now`` now takes an optional ``--alias mysite.now.sh`` argument. This will attempt to set an alias after the deploy completes.
+- Fixed a bug where the advanced CSV export form failed to include the currently selected filters (`#393 `__)
+
.. _v0_25_2:
0.25.2 (2018-12-16)
From b65d97792a53f78cb14b226231063209d22c4602 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 2 Jan 2019 19:14:21 -0800
Subject: [PATCH 0020/2321] Switch to using PYPI_PASSWORD env var in Travis
---
.travis.yml | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 1bc9361f..d292f194 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -50,8 +50,7 @@ jobs:
- provider: pypi
user: simonw
distributions: bdist_wheel
- password:
- secure: ikY6iF+/2AxFlwcun35iAEz7Wbrnp0Fp7b32Z7uob5RsGpIDCYYDa3T/qNhiyNsg5fecGp01rBf1F+dL357b0rWEeaoONtsDrTKFeBUB6bMnzNtr3QbHq/TeQ1f+Vn6FWpY9360Ihbz0pKPzWWYHJxOjXPM793rGWmPu7siCc4oQZOpwLK28GZeFP803wq/QG81hFRWR8IiMlqljkecEGhaM6ftxzizk1LBoTZCw3DdL2xDwzrLvF3Hg1jXX08pJm9WrLJNS6i+LRhiLv9IJ5KxRDwNHJhrvblRrZn0CKVbiWR/8985r4R7CXaeG68uJ505RcVoXYRsq9D7mAcYAB3U5AzU9TsPQlvUgwJlFpKOsZlCw+5AHFIYUXvlP2Qo5kfXUYC9KKdJwbWjcW7isUqWpzInGfnmRNMJGzmIm2q65ua0FNLV4vK1wl98O60HFSyhXJi84YmCAPgidJ22sEA6sHAdMEFiWglhap12zPc6i7trXKC8aLjW/3qGXhUq3BK8vQcbutGer+Q3z9UM7kthB76FtCUVc8eqljtdAcGt5kXoHIEPwGb50ikOxo/qpXsXVH36Z6plbELbayXb3ocUrMdhWP5+kfnrdLEmx+X4IpUauxGUqLGliYSQpfjp8XyA87ASCjsSqUEjr/4t4YztW5a3mWBYZ+IjjYXW6PQg=
+ password: ${PYPI_PASSWORD}
on:
branch: master
tags: true
From 788f218e267ad9e29df904c6c6a61e78bcc8e744 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 5 Jan 2019 20:44:34 -0800
Subject: [PATCH 0021/2321] Supress pytest warnings from 3rd party modules
---
pytest.ini | 8 ++++++++
1 file changed, 8 insertions(+)
create mode 100644 pytest.ini
diff --git a/pytest.ini b/pytest.ini
new file mode 100644
index 00000000..92b08b4d
--- /dev/null
+++ b/pytest.ini
@@ -0,0 +1,8 @@
+[pytest]
+filterwarnings=
+ # https://github.com/pallets/jinja/issues/927
+ ignore:Using or importing the ABCs::jinja2
+ # https://bugs.launchpad.net/beautifulsoup/+bug/1778909
+ ignore:Using or importing the ABCs::bs4.element
+ # Sanic verify_ssl=True
+ ignore:verify_ssl is deprecated::sanic
From 5b026115126bedbb66457767e169139146d1c9fd Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 8 Jan 2019 16:25:29 -0800
Subject: [PATCH 0022/2321] Corrected import path in plugin docs
---
docs/plugins.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/plugins.rst b/docs/plugins.rst
index 3380bbc4..0ec30434 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -380,7 +380,7 @@ Let's say you want to build a plugin that adds a ``datasette publish my_hosting_
.. code-block:: python
from datasette import hookimpl
- from datasette.common import add_common_publish_arguments_and_options
+ from datasette.publish.common import add_common_publish_arguments_and_options
import click
From 8ce7866312f9b7c873541d6e3662ada3e850fa85 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 10 Jan 2019 16:44:37 -0800
Subject: [PATCH 0023/2321] compile_options output in /-/versions, closes #396
---
datasette/app.py | 3 +++
docs/introspection.rst | 12 +++++++++++-
tests/test_api.py | 1 +
3 files changed, 15 insertions(+), 1 deletion(-)
diff --git a/datasette/app.py b/datasette/app.py
index cdfcb11d..8252184c 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -363,6 +363,9 @@ class Datasette:
"version": sqlite_version,
"fts_versions": fts_versions,
"extensions": sqlite_extensions,
+ "compile_options": [
+ r[0] for r in conn.execute("pragma compile_options;").fetchall()
+ ],
},
}
diff --git a/docs/introspection.rst b/docs/introspection.rst
index 1a0ab952..b4dbfc6e 100644
--- a/docs/introspection.rst
+++ b/docs/introspection.rst
@@ -61,7 +61,7 @@ This is an internal implementation detail of Datasette and the format should not
/-/versions
-----------
-Shows the version of Datasette, Python and SQLite. `Versions example `_::
+Shows the version of Datasette, Python and SQLite. `Versions example `_::
{
"datasette": {
@@ -76,9 +76,19 @@ Shows the version of Datasette, Python and SQLite. `Versions example
Date: Thu, 10 Jan 2019 16:47:15 -0800
Subject: [PATCH 0024/2321] Bump aiohttp to 3.5.3 to fix a warning
---
setup.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.py b/setup.py
index 7edd78a1..92b92c3f 100644
--- a/setup.py
+++ b/setup.py
@@ -50,7 +50,7 @@ setup(
extras_require={
'test': [
'pytest==4.0.2',
- 'aiohttp==3.3.2',
+ 'aiohttp==3.5.3',
'beautifulsoup4==4.6.1',
]
},
From 50d8d8216dcc5557e6b581a13bb871f033722117 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 10 Jan 2019 16:47:54 -0800
Subject: [PATCH 0025/2321] Dockerfile now builds SQLite 3.26.0, closes #397
---
Dockerfile | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index cb3d6621..cd9b642c 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -6,8 +6,8 @@ RUN apt update \
&& apt clean
-RUN wget "https://www.sqlite.org/2018/sqlite-autoconf-3230100.tar.gz" && tar xzf sqlite-autoconf-3230100.tar.gz \
- && cd sqlite-autoconf-3230100 && ./configure --disable-static --enable-fts5 --enable-json1 CFLAGS="-g -O2 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS4=1 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_JSON1" \
+RUN wget "https://www.sqlite.org/2018/sqlite-autoconf-3260000.tar.gz" && tar xzf sqlite-autoconf-3260000.tar.gz \
+ && cd sqlite-autoconf-3260000 && ./configure --disable-static --enable-fts5 --enable-json1 CFLAGS="-g -O2 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS4=1 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_JSON1" \
&& make && make install
RUN wget "https://www.gaia-gis.it/gaia-sins/freexl-1.0.5.tar.gz" && tar zxf freexl-1.0.5.tar.gz \
From 4722acc73ce761556b18f5dcbe36b7fef2ee2c69 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 10 Jan 2019 16:51:38 -0800
Subject: [PATCH 0026/2321] Release 0.26.1
---
README.md | 1 +
docs/changelog.rst | 9 +++++++++
2 files changed, 10 insertions(+)
diff --git a/README.md b/README.md
index c2acab1c..9eb6dafe 100644
--- a/README.md
+++ b/README.md
@@ -19,6 +19,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
## News
+ * 10th January 2019: [Datasette 0.26.1](http://datasette.readthedocs.io/en/latest/changelog.html#v0-26-1) - SQLite upgrade in Docker image, `/-/versions` now shows SQLite compile options.
* 2nd January 2019: [Datasette 0.26](http://datasette.readthedocs.io/en/latest/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument.
* 18th December 2018: [Fast Autocomplete Search for Your Website](https://24ways.org/2018/fast-autocomplete-search-for-your-website/) - a new tutorial on using Datasette to build a JavaScript autocomplete search engine.
* 3rd October 2018: [The interesting ideas in Datasette](https://simonwillison.net/2018/Oct/4/datasette-ideas/) - a write-up of some of the less obvious interesting ideas embedded in the Datasette project.
diff --git a/docs/changelog.rst b/docs/changelog.rst
index e59142e2..b6c46c69 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,15 @@
Changelog
=========
+.. _v0_26_1:
+
+0.26.1 (2019-01-10)
+-------------------
+
+- ``/-/versions`` now includes SQLite ``compile_options`` (`#396 `__)
+- `datasetteproject/datasette `__ Docker image now uses SQLite 3.26.0 (`#397 `__)
+- Cleaned up some deprecation warnings under Python 3.7
+
.. _v0_26:
0.26 (2019-01-02)
From c3a78eb05c0b146b2eb37b50c1668b9430b8f50a Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 13 Jan 2019 14:23:44 -0800
Subject: [PATCH 0027/2321] app_client() fixture doesn't need to take **kwargs
---
tests/fixtures.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/tests/fixtures.py b/tests/fixtures.py
index a77d141d..16395553 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -61,8 +61,8 @@ def make_app_client(
@pytest.fixture(scope="session")
-def app_client(**kwargs):
- yield from make_app_client(**kwargs)
+def app_client():
+ yield from make_app_client()
@pytest.fixture(scope='session')
From a418c8b44f82d456be523c8690cf7236bb648c22 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 13 Jan 2019 15:09:48 -0800
Subject: [PATCH 0028/2321] Expose current git tag to Docker build, closes #399
---
.dockerignore | 1 -
Dockerfile | 2 +-
2 files changed, 1 insertion(+), 2 deletions(-)
diff --git a/.dockerignore b/.dockerignore
index 2c34db66..938173e9 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -1,7 +1,6 @@
.DS_Store
.cache
.eggs
-.git
.gitignore
.ipynb_checkpoints
.travis.yml
diff --git a/Dockerfile b/Dockerfile
index cd9b642c..7001e799 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -2,7 +2,7 @@ FROM python:3.6-slim-stretch as build
# Setup build dependencies
RUN apt update \
-&& apt install -y python3-dev build-essential wget libxml2-dev libproj-dev libgeos-dev libsqlite3-dev zlib1g-dev pkg-config \
+&& apt install -y python3-dev build-essential wget libxml2-dev libproj-dev libgeos-dev libsqlite3-dev zlib1g-dev pkg-config git \
&& apt clean
From 7950105c278b140e6cb665c68b59df219870f9bc Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 13 Jan 2019 15:33:50 -0800
Subject: [PATCH 0029/2321] Python 3.7.2 as base for Docker image
---
Dockerfile | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index 7001e799..08639e52 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM python:3.6-slim-stretch as build
+FROM python:3.7.2-slim-stretch as build
# Setup build dependencies
RUN apt update \
@@ -27,7 +27,7 @@ COPY . /datasette
RUN pip install /datasette
-FROM python:3.6-slim-stretch
+FROM python:3.7.2-slim-stretch
# Copy python dependencies and spatialite libraries
COPY --from=build /usr/local/lib/ /usr/local/lib/
From 909cc8fbdfc9c05e447f40e9a73489809602c3cd Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 26 Jan 2019 12:01:16 -0800
Subject: [PATCH 0030/2321] New 'datasette plugins' command to list installed
plugins
---
datasette/app.py | 7 +++++--
datasette/cli.py | 13 +++++++++++++
docs/plugins.rst | 43 +++++++++++++++++++++++++++++++++++++++++++
3 files changed, 61 insertions(+), 2 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 8252184c..7bbdef3e 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -369,7 +369,10 @@ class Datasette:
},
}
- def plugins(self):
+ def plugins(self, show_all=False):
+ ps = list(get_plugins(pm))
+ if not show_all:
+ ps = [p for p in ps if p["name"] not in DEFAULT_PLUGINS]
return [
{
"name": p["name"],
@@ -377,7 +380,7 @@ class Datasette:
"templates": p["templates_path"] is not None,
"version": p.get("version"),
}
- for p in get_plugins(pm) if p["name"] not in DEFAULT_PLUGINS
+ for p in ps
]
async def execute(
diff --git a/datasette/cli.py b/datasette/cli.py
index 2cadabbf..34ed0020 100644
--- a/datasette/cli.py
+++ b/datasette/cli.py
@@ -159,6 +159,19 @@ def skeleton(files, metadata, sqlite_extensions):
click.echo("Wrote skeleton to {}".format(metadata))
+@cli.command()
+@click.option("--all", help="Include built-in default plugins", is_flag=True)
+@click.option(
+ "--plugins-dir",
+ type=click.Path(exists=True, file_okay=False, dir_okay=True),
+ help="Path to directory containing custom plugins",
+)
+def plugins(all, plugins_dir):
+ "List currently available plugins"
+ app = Datasette([], plugins_dir=plugins_dir)
+ click.echo(json.dumps(app.plugins(all), indent=4))
+
+
@cli.command()
@click.argument("files", type=click.Path(exists=True), nargs=-1, required=True)
@click.option(
diff --git a/docs/plugins.rst b/docs/plugins.rst
index 0ec30434..6efa131b 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -58,6 +58,49 @@ Now you can navigate to http://localhost:8001/mydb and run this SQL::
To see the output of your plugin.
+Seeing what plugins are installed
+---------------------------------
+
+You can see a list of installed plugins by navigating to the ``/-/plugins`` page of your Datasette instance - for example: https://fivethirtyeight.datasettes.com/-/plugins
+
+You can also use the ``datasette plugins`` command::
+
+ $ datasette plugins
+ [
+ {
+ "name": "datasette_json_html",
+ "static": false,
+ "templates": false,
+ "version": "0.4.0"
+ }
+ ]
+
+If you run ``datasette plugins --all`` it will include default plugins that ship as part of Datasette::
+
+ $ datasette plugins --all
+ [
+ {
+ "name": "datasette_json_html",
+ "static": false,
+ "templates": false,
+ "version": "0.4.0"
+ },
+ {
+ "name": "datasette.publish.heroku",
+ "static": false,
+ "templates": false,
+ "version": null
+ },
+ {
+ "name": "datasette.publish.now",
+ "static": false,
+ "templates": false,
+ "version": null
+ }
+ ]
+
+You can add the ``--plugins-dir=`` option to include any plugins found in that directory.
+
Packaging a plugin
------------------
From b5dd83981a7dbff571284d4d90a950c740245b05 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 27 Jan 2019 17:40:23 -0800
Subject: [PATCH 0031/2321] Export option: _shape=array&_nl=on for
newline-delimited JSON
---
datasette/templates/table.html | 8 +++++++-
datasette/views/base.py | 12 ++++++++++--
docs/advanced_export.png | Bin 7662 -> 24148 bytes
docs/json_api.rst | 24 +++++++++++++++++-------
tests/test_api.py | 22 ++++++++++++++++++++++
tests/test_html.py | 3 ++-
6 files changed, 58 insertions(+), 11 deletions(-)
diff --git a/datasette/templates/table.html b/datasette/templates/table.html
index 6177163a..7a9f9115 100644
--- a/datasette/templates/table.html
+++ b/datasette/templates/table.html
@@ -140,7 +140,13 @@
{% if display_rows %}
{% endif %}
diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt
index caa00e33..65b9aceb 100644
--- a/docs/datasette-serve-help.txt
+++ b/docs/datasette-serve-help.txt
@@ -17,6 +17,7 @@ Options:
--template-dir DIRECTORY Path to directory containing custom templates
--plugins-dir DIRECTORY Path to directory containing custom plugins
--static STATIC MOUNT mountpoint:path-to-directory for serving static files
+ --memory Make :memory: database available
--config CONFIG Set config option using configname:value
datasette.readthedocs.io/en/latest/config.html
--version-note TEXT Additional note to show on /-/versions
diff --git a/tests/fixtures.py b/tests/fixtures.py
index a77a3f4a..efd85fab 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -65,6 +65,14 @@ def app_client():
yield from make_app_client()
+@pytest.fixture(scope="session")
+def app_client_no_files():
+ ds = Datasette([])
+ client = TestClient(ds.app().test_client)
+ client.ds = ds
+ yield client
+
+
@pytest.fixture(scope='session')
def app_client_shorter_time_limit():
yield from make_app_client(20)
diff --git a/tests/test_api.py b/tests/test_api.py
index 8cd1e94e..a6ba3f37 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -1,5 +1,6 @@
from .fixtures import ( # noqa
app_client,
+ app_client_no_files,
app_client_shorter_time_limit,
app_client_larger_cache_size,
app_client_returned_rows_matches_page_size,
@@ -368,6 +369,31 @@ def test_database_page(app_client):
}] == data['tables']
+def test_no_files_uses_memory_database(app_client_no_files):
+ response = app_client_no_files.get("/.json")
+ assert response.status == 200
+ assert {
+ ":memory:": {
+ "hash": "000",
+ "hidden_table_rows_sum": 0,
+ "hidden_tables_count": 0,
+ "name": ":memory:",
+ "path": ":memory:-000",
+ "table_rows_sum": 0,
+ "tables_count": 0,
+ "tables_more": False,
+ "tables_truncated": [],
+ "views_count": 0,
+ }
+ } == response.json
+ # Try that SQL query
+ response = app_client_no_files.get(
+ "/:memory:-0.json?sql=select+sqlite_version()&_shape=array"
+ )
+ assert 1 == len(response.json)
+ assert ["sqlite_version()"] == list(response.json[0].keys())
+
+
def test_database_page_for_database_with_dot_in_name(app_client_with_dot):
response = app_client_with_dot.get("/fixtures.dot.json")
assert 200 == response.status
From b3e739332624c2d4f2668a105afd727af774100b Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Mar 2019 21:41:43 -0700
Subject: [PATCH 0043/2321] Allow more recent versions of Click
Closes #414
---
setup.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.py b/setup.py
index 92b92c3f..fb00f2d0 100644
--- a/setup.py
+++ b/setup.py
@@ -34,7 +34,7 @@ setup(
package_data={'datasette': ['templates/*.html']},
include_package_data=True,
install_requires=[
- 'click==6.7',
+ 'click>=6.7',
'click-default-group==1.2',
'Sanic==0.7.0',
'Jinja2==2.10',
From 285566790879b31d2fdd2a8c6f56825162eb71b9 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Mar 2019 22:00:13 -0700
Subject: [PATCH 0044/2321] Fix for test failure with Click 7.0
---
tests/test_publish_heroku.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py
index 852403ca..da4e213a 100644
--- a/tests/test_publish_heroku.py
+++ b/tests/test_publish_heroku.py
@@ -24,7 +24,7 @@ def test_publish_heroku_installs_plugin(mock_call, mock_check_output, mock_which
with runner.isolated_filesystem():
open("t.db", "w").write("data")
result = runner.invoke(cli.cli, ["publish", "heroku", "t.db"], input="y\n")
- assert -1 == result.exit_code
+ assert 0 != result.exit_code
mock_check_output.assert_has_calls(
[mock.call(["heroku", "plugins"]), mock.call(["heroku", "apps:list", "--json"])]
)
From 9e8c36793bfbb17c2f67371cc7f9aa8b9202fdc4 Mon Sep 17 00:00:00 2001
From: joelondon
Date: Fri, 15 Mar 2019 05:06:45 +0000
Subject: [PATCH 0045/2321] Update spatialite.rst (#413)
a line of sql added to create the idx_ in the python recipe
---
docs/spatialite.rst | 2 ++
1 file changed, 2 insertions(+)
diff --git a/docs/spatialite.rst b/docs/spatialite.rst
index 5a8a31b1..58179e70 100644
--- a/docs/spatialite.rst
+++ b/docs/spatialite.rst
@@ -68,6 +68,8 @@ Here's a recipe for taking a table with existing latitude and longitude columns,
UPDATE events SET
point_geom = GeomFromText('POINT('||"longitude"||' '||"latitude"||')',4326);
''')
+ # Now add a spatial index to that column
+ conn.execute('select CreateSpatialIndex("museums", "point_geom");')
# If you don't commit your changes will not be persisted:
conn.commit()
conn.close()
From afe9aa3ae03c485c5d6652741438d09445a486c1 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Mar 2019 22:22:24 -0700
Subject: [PATCH 0046/2321] show/hide link for SQL on custom query page
Closes #415
---
datasette/templates/query.html | 12 +++++++-----
datasette/views/base.py | 5 +++++
tests/test_html.py | 19 +++++++++++++++++++
3 files changed, 31 insertions(+), 5 deletions(-)
diff --git a/datasette/templates/query.html b/datasette/templates/query.html
index b23c67d8..06651689 100644
--- a/datasette/templates/query.html
+++ b/datasette/templates/query.html
@@ -26,11 +26,13 @@
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 2727565b..b7c9a4b0 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -296,10 +296,12 @@ class TableView(RowTableShared):
where_clauses, params = filters.build_where_clauses(table)
# _search support:
- fts_table = await self.ds.execute_against_connection_in_thread(
+ fts_table = special_args.get("_fts_table")
+ fts_table = fts_table or table_metadata.get("fts_table")
+ fts_table = fts_table or await self.ds.execute_against_connection_in_thread(
database, lambda conn: detect_fts(conn, table)
)
- fts_pk = table_metadata.get("fts_pk", "rowid")
+ fts_pk = special_args.get("_fts_pk", table_metadata.get("fts_pk", "rowid"))
search_args = dict(
pair for pair in special_args.items() if pair[0].startswith("_search")
)
@@ -731,6 +733,10 @@ class TableView(RowTableShared):
table, {}
)
self.ds.update_with_inherited_metadata(metadata)
+ form_hidden_args = []
+ for arg in ("_fts_table", "_fts_pk"):
+ if arg in special_args:
+ form_hidden_args.append((arg, special_args[arg]))
return {
"supports_search": bool(fts_table),
"search": search or "",
@@ -745,6 +751,7 @@ class TableView(RowTableShared):
key=lambda f: (len(f["results"]), f["name"]),
reverse=True
),
+ "form_hidden_args": form_hidden_args,
"facet_hideable": lambda facet: facet not in metadata_facets,
"is_sortable": any(c["sortable"] for c in display_columns),
"path_with_replaced_args": path_with_replaced_args,
diff --git a/docs/full_text_search.rst b/docs/full_text_search.rst
index 987e2272..08e85c90 100644
--- a/docs/full_text_search.rst
+++ b/docs/full_text_search.rst
@@ -78,9 +78,13 @@ Configuring full-text search for a table or view
If a table has a corresponding FTS table set up using the ``content=`` argument to ``CREATE VIRTUAL TABLE`` shown above, Datasette will detect it automatically and add a search interface to the table page for that table.
-You can also manually configure which table should be used for full-text search using :ref:`metadata`. You can set the associated FTS table for a specific table and you can also set one for a view - if you do that, the page for that SQL view will offer a search option.
+You can also manually configure which table should be used for full-text search using querystring parameters or :ref:`metadata`. You can set the associated FTS table for a specific table and you can also set one for a view - if you do that, the page for that SQL view will offer a search option.
-The ``fts_table`` property can be used to specify an associated FTS table. If the primary key column in your table which was used to populate the FTS table is something other than ``rowid``, you can specify the column to use with the ``fts_pk`` property.
+Use ``?_fts_table=x`` to over-ride the FTS table for a specific page. If the primary key was something other than ``rowid`` you can use ``?_fts_pk=col`` to set that as well. This is particularly useful for views, for example:
+
+https://latest.datasette.io/fixtures/searchable_view?_fts_table=searchable_fts&_fts_pk=pk
+
+The ``fts_table`` metadata property can be used to specify an associated FTS table. If the primary key column in your table which was used to populate the FTS table is something other than ``rowid``, you can specify the column to use with the ``fts_pk`` property.
Here is an example which enables full-text search for a ``display_ads`` view which is defined against the ``ads`` table and hence needs to run FTS against the ``ads_fts`` table, using the ``id`` as the primary key::
diff --git a/tests/fixtures.py b/tests/fixtures.py
index b3b38c95..cb6f7a39 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -209,6 +209,10 @@ METADATA = {
},
'simple_view': {
'sortable_columns': ['content'],
+ },
+ 'searchable_view_configured_by_metadata': {
+ 'fts_table': 'searchable_fts',
+ 'fts_pk': 'pk'
}
},
'queries': {
@@ -564,6 +568,12 @@ INSERT INTO [table/with/slashes.csv] VALUES (3, 'hey');
CREATE VIEW simple_view AS
SELECT content, upper(content) AS upper_content FROM simple_primary_key;
+CREATE VIEW searchable_view AS
+ SELECT * from searchable;
+
+CREATE VIEW searchable_view_configured_by_metadata AS
+ SELECT * from searchable;
+
''' + '\n'.join([
'INSERT INTO no_primary_key VALUES ({i}, "a{i}", "b{i}", "c{i}");'.format(i=i + 1)
for i in range(201)
diff --git a/tests/test_api.py b/tests/test_api.py
index 188a60e8..b822d23f 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -847,6 +847,24 @@ def test_searchable(app_client, path, expected_rows):
assert expected_rows == response.json['rows']
+@pytest.mark.parametrize('path,expected_rows', [
+ ('/fixtures/searchable_view_configured_by_metadata.json?_search=weasel', [
+ [2, 'terry dog', 'sara weasel', 'puma'],
+ ]),
+ # This should return all results because search is not configured:
+ ('/fixtures/searchable_view.json?_search=weasel', [
+ [1, 'barry cat', 'terry dog', 'panther'],
+ [2, 'terry dog', 'sara weasel', 'puma'],
+ ]),
+ ('/fixtures/searchable_view.json?_search=weasel&_fts_table=searchable_fts&_fts_pk=pk', [
+ [2, 'terry dog', 'sara weasel', 'puma'],
+ ]),
+])
+def test_searchable_views(app_client, path, expected_rows):
+ response = app_client.get(path)
+ assert expected_rows == response.json['rows']
+
+
def test_searchable_invalid_column(app_client):
response = app_client.get(
'/fixtures/searchable.json?_search_invalid=x'
diff --git a/tests/test_html.py b/tests/test_html.py
index 1babaa60..3e2ea845 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -185,6 +185,20 @@ def test_empty_search_parameter_gets_removed(app_client):
)
+def test_searchable_view_persists_fts_table(app_client):
+ # The search form should persist ?_fts_table as a hidden field
+ response = app_client.get(
+ "/fixtures/searchable_view?_fts_table=searchable_fts&_fts_pk=pk"
+ )
+ inputs = Soup(response.body, "html.parser").find("form").findAll("input")
+ hiddens = [i for i in inputs if i["type"] == "hidden"]
+ assert [
+ ('_fts_table', 'searchable_fts'), ('_fts_pk', 'pk')
+ ] == [
+ (hidden['name'], hidden['value']) for hidden in hiddens
+ ]
+
+
def test_sort_by_desc_redirects(app_client):
path_base = '/fixtures/sortable'
path = path_base + '?' + urllib.parse.urlencode({
From e11cb4c66442abca2a6b6159521a6cf4da8739c1 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 11 Apr 2019 22:00:47 -0700
Subject: [PATCH 0065/2321] Persist show/hide state better, closes #425
---
datasette/templates/_codemirror_foot.html | 2 +-
datasette/templates/query.html | 5 ++++-
tests/test_html.py | 8 ++++++++
3 files changed, 13 insertions(+), 2 deletions(-)
diff --git a/datasette/templates/_codemirror_foot.html b/datasette/templates/_codemirror_foot.html
index 1e07fc72..4b55bf8d 100644
--- a/datasette/templates/_codemirror_foot.html
+++ b/datasette/templates/_codemirror_foot.html
@@ -1,5 +1,5 @@
{% endfor %}
diff --git a/datasette/templates/database.html b/datasette/templates/database.html
index 9fb4d6eb..f168db97 100644
--- a/datasette/templates/database.html
+++ b/datasette/templates/database.html
@@ -9,8 +9,14 @@
{% block body_class %}db db-{{ database|to_css_class }}{% endblock %}
+{% block nav %}
+
{% for table in database.tables_and_views_truncated %}{{ table.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}
+
{% for table in database.tables_and_views_truncated %}{{ table.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% if database.tables_and_views_more %}, ...{% endif %}
',
+ ],
]
assert expected == [
[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")
@@ -611,7 +616,7 @@ def test_table_html_foreign_key_links(app_client):
def test_table_html_disable_foreign_key_links_with_labels(app_client):
- response = app_client.get("/fixtures/foreign_key_references?_labels=off")
+ response = app_client.get("/fixtures/foreign_key_references?_labels=off&_size=1")
assert response.status == 200
table = Soup(response.body, "html.parser").find("table")
expected = [
From c3181d9a840dff7be8c990b21f5749db393a4ea0 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 2 Nov 2019 15:47:20 -0700
Subject: [PATCH 0268/2321] Release notes for 0.30.2
---
docs/changelog.rst | 13 ++++++++++++-
1 file changed, 12 insertions(+), 1 deletion(-)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 8ac32c45..f4761efe 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,16 @@
Changelog
=========
+.. _v0_30_2:
+
+0.30.2 (2019-11-02)
+-------------------
+
+- ``/-/plugins`` page now uses distribution name e.g. ``datasette-cluster-map`` instead of the name of the underlying Python package (``datasette_cluster_map``) (`#606 `__)
+- Array faceting is now only suggested for columns that contain arrays of strings (`#562 `__)
+- Better documentation for the ``--host`` argument (`#574 `__)
+- Don't show ``None`` with a broken link for the label on a nullable foreign key (`#406 `__)
+
.. _v0_30_1:
0.30.1 (2019-10-30)
@@ -14,6 +24,7 @@ Changelog
.. _v0_30:
+
0.30 (2019-10-18)
-----------------
@@ -82,7 +93,7 @@ Two new plugins take advantage of this hook:
New plugin hook: extra_template_vars
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The :ref:`plugin_extra_template_vars` plugin hook allows plugins to inject their own additional variables into the Datasette template context. This can be used in conjunction with custom templates to customize the Datasette interface. `datasette-auth-github `__ uses this hook to add custom HTML to the new top navigation bar (which is designed to be modified by plugins, see `#540 `__).
+The :ref:`plugin_hook_extra_template_vars` plugin hook allows plugins to inject their own additional variables into the Datasette template context. This can be used in conjunction with custom templates to customize the Datasette interface. `datasette-auth-github `__ uses this hook to add custom HTML to the new top navigation bar (which is designed to be modified by plugins, see `#540 `__).
Secret plugin configuration options
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
From 2bf7ce5f517d772a16d7855a35a8a75d4456aad7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 2 Nov 2019 16:12:46 -0700
Subject: [PATCH 0269/2321] Fix CSV export for nullable foreign keys, closes
#612
---
datasette/views/base.py | 12 ++++++++----
tests/test_csv.py | 15 +++++++++++++++
2 files changed, 23 insertions(+), 4 deletions(-)
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 1568b084..94945304 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -330,10 +330,14 @@ class DataView(BaseView):
else:
# Look for {"value": "label": } dicts and expand
new_row = []
- for cell in row:
- if isinstance(cell, dict):
- new_row.append(cell["value"])
- new_row.append(cell["label"])
+ for heading, cell in zip(data["columns"], row):
+ if heading in expanded_columns:
+ if cell is None:
+ new_row.extend(("", ""))
+ else:
+ assert isinstance(cell, dict)
+ new_row.append(cell["value"])
+ new_row.append(cell["label"])
else:
new_row.append(cell)
await writer.writerow(new_row)
diff --git a/tests/test_csv.py b/tests/test_csv.py
index b148b6db..13aca489 100644
--- a/tests/test_csv.py
+++ b/tests/test_csv.py
@@ -41,6 +41,14 @@ pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,com
"\n", "\r\n"
)
+EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV = """
+pk,foreign_key_with_label,foreign_key_with_label_label,foreign_key_with_no_label,foreign_key_with_no_label_label
+1,1,hello,1,1
+2,,,,
+""".lstrip().replace(
+ "\n", "\r\n"
+)
+
def test_table_csv(app_client):
response = app_client.get("/fixtures/simple_primary_key.csv")
@@ -63,6 +71,13 @@ def test_table_csv_with_labels(app_client):
assert EXPECTED_TABLE_WITH_LABELS_CSV == response.text
+def test_table_csv_with_nullable_labels(app_client):
+ response = app_client.get("/fixtures/foreign_key_references.csv?_labels=1")
+ assert response.status == 200
+ assert "text/plain; charset=utf-8" == response.headers["content-type"]
+ assert EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV == response.text
+
+
def test_custom_sql_csv(app_client):
response = app_client.get(
"/fixtures.csv?sql=select+content+from+simple_primary_key+limit+2"
From ee330222f4c3ee66c2fe41ebc76fed56b9cb9a00 Mon Sep 17 00:00:00 2001
From: Tobias Kunze
Date: Mon, 4 Nov 2019 03:39:55 +0100
Subject: [PATCH 0270/2321] Offer to format readonly SQL (#602)
Following discussion in #601, this PR adds a "Format SQL" button to
read-only SQL (if the SQL actually differs from the formatting result).
It also removes a console error on readonly SQL queries.
Thanks, @rixx!
---
datasette/templates/_codemirror_foot.html | 41 ++++++++++++++---------
1 file changed, 26 insertions(+), 15 deletions(-)
diff --git a/datasette/templates/_codemirror_foot.html b/datasette/templates/_codemirror_foot.html
index 9aba61ab..4019d448 100644
--- a/datasette/templates/_codemirror_foot.html
+++ b/datasette/templates/_codemirror_foot.html
@@ -6,21 +6,32 @@ window.onload = () => {
if (sqlFormat && !readOnly) {
sqlFormat.hidden = false;
}
- var editor = CodeMirror.fromTextArea(sqlInput, {
- lineNumbers: true,
- mode: "text/x-sql",
- lineWrapping: true,
- });
- editor.setOption("extraKeys", {
- "Shift-Enter": function() {
- document.getElementsByClassName("sql")[0].submit();
- },
- Tab: false
- });
- if (sqlInput && sqlFormat) {
- sqlFormat.addEventListener("click", ev => {
- editor.setValue(sqlFormatter.format(editor.getValue()));
- })
+ if (sqlInput) {
+ var editor = CodeMirror.fromTextArea(sqlInput, {
+ lineNumbers: true,
+ mode: "text/x-sql",
+ lineWrapping: true,
+ });
+ editor.setOption("extraKeys", {
+ "Shift-Enter": function() {
+ document.getElementsByClassName("sql")[0].submit();
+ },
+ Tab: false
+ });
+ if (sqlFormat) {
+ sqlFormat.addEventListener("click", ev => {
+ editor.setValue(sqlFormatter.format(editor.getValue()));
+ })
+ }
+ }
+ if (sqlFormat && readOnly) {
+ const formatted = sqlFormatter.format(readOnly.innerHTML);
+ if (formatted != readOnly.innerHTML) {
+ sqlFormat.hidden = false;
+ sqlFormat.addEventListener("click", ev => {
+ readOnly.innerHTML = formatted;
+ })
+ }
}
}
From 9db22cdf1809fb78a7b183cd2f617cd5e26efc68 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 3 Nov 2019 20:11:55 -0800
Subject: [PATCH 0271/2321] pk__notin= filter, closes #614
---
datasette/filters.py | 15 +++++++++++++++
docs/json_api.rst | 3 +++
tests/test_filters.py | 3 +++
3 files changed, 21 insertions(+)
diff --git a/datasette/filters.py b/datasette/filters.py
index efe014ae..5897a3ed 100644
--- a/datasette/filters.py
+++ b/datasette/filters.py
@@ -77,6 +77,20 @@ class InFilter(Filter):
return "{} in {}".format(column, json.dumps(self.split_value(value)))
+class NotInFilter(InFilter):
+ key = "notin"
+ display = "not in"
+
+ def where_clause(self, table, column, value, param_counter):
+ values = self.split_value(value)
+ params = [":p{}".format(param_counter + i) for i in range(len(values))]
+ sql = "{} not in ({})".format(escape_sqlite(column), ", ".join(params))
+ return sql, values
+
+ def human_clause(self, column, value):
+ return "{} not in {}".format(column, json.dumps(self.split_value(value)))
+
+
class Filters:
_filters = (
[
@@ -125,6 +139,7 @@ class Filters:
TemplatedFilter("like", "like", '"{c}" like :{p}', '{c} like "{v}"'),
TemplatedFilter("glob", "glob", '"{c}" glob :{p}', '{c} glob "{v}"'),
InFilter(),
+ NotInFilter(),
]
+ (
[
diff --git a/docs/json_api.rst b/docs/json_api.rst
index 4b365e14..de70362c 100644
--- a/docs/json_api.rst
+++ b/docs/json_api.rst
@@ -228,6 +228,9 @@ You can filter the data returned by the table based on column values using a que
``?column__in=["value","value,with,commas"]``
+``?column__notin=value1,value2,value3``
+ Rows where column does not match any of the provided values. The inverse of ``__in=``. Also supports JSON arrays.
+
``?column__arraycontains=value``
Works against columns that contain JSON arrays - matches if any of the values in that array match.
diff --git a/tests/test_filters.py b/tests/test_filters.py
index fd682cd9..8598087f 100644
--- a/tests/test_filters.py
+++ b/tests/test_filters.py
@@ -47,6 +47,9 @@ import pytest
["foo in (:p0, :p1)"],
["dog,cat", "cat[dog]"],
),
+ # Not in, and JSON array not in
+ ((("foo__notin", "1,2,3"),), ["foo not in (:p0, :p1, :p2)"], ["1", "2", "3"]),
+ ((("foo__notin", "[1,2,3]"),), ["foo not in (:p0, :p1, :p2)"], [1, 2, 3]),
],
)
def test_build_where(args, expected_where, expected_params):
From 52fa79c6075f0830ff635b81d957c64d877a05aa Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 4 Nov 2019 15:03:48 -0800
Subject: [PATCH 0272/2321] Use select colnames, not select * for table view -
refs #615
---
datasette/views/table.py | 8 ++++++--
tests/test_api.py | 3 ++-
2 files changed, 8 insertions(+), 3 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 326c11ae..139ff80b 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -235,13 +235,17 @@ class TableView(RowTableShared):
raise NotFound("Table not found: {}".format(table))
pks = await db.primary_keys(table)
+ table_columns = await db.table_columns(table)
+
+ select_columns = ", ".join(escape_sqlite(t) for t in table_columns)
+
use_rowid = not pks and not is_view
if use_rowid:
- select = "rowid, *"
+ select = "rowid, {}".format(select_columns)
order_by = "rowid"
order_by_pks = "rowid"
else:
- select = "*"
+ select = select_columns
order_by_pks = ", ".join([escape_sqlite(pk) for pk in pks])
order_by = order_by_pks
diff --git a/tests/test_api.py b/tests/test_api.py
index c6acbab1..4a09b238 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -610,7 +610,8 @@ def test_table_json(app_client):
assert response.status == 200
data = response.json
assert (
- data["query"]["sql"] == "select * from simple_primary_key order by id limit 51"
+ data["query"]["sql"]
+ == "select id, content from simple_primary_key order by id limit 51"
)
assert data["query"]["params"] == {}
assert data["rows"] == [
From 931bfc66613aa3e22f8314df5c0d0758baf31f38 Mon Sep 17 00:00:00 2001
From: Tobias Kunze
Date: Tue, 5 Nov 2019 00:16:30 +0100
Subject: [PATCH 0273/2321] Handle spaces in DB names (#590)
Closes #503 - thanks, @rixx
---
datasette/views/base.py | 3 ++-
tests/fixtures.py | 4 ++--
tests/test_api.py | 19 ++++++++++++++++++-
tests/test_html.py | 8 ++++----
4 files changed, 26 insertions(+), 8 deletions(-)
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 94945304..062c6956 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -203,12 +203,13 @@ class DataView(BaseView):
hash = hash_bit
else:
name = db_name
- # Verify the hash
+ name = urllib.parse.unquote_plus(name)
try:
db = self.ds.databases[name]
except KeyError:
raise NotFound("Database not found: {}".format(name))
+ # Verify the hash
expected = "000"
if db.hash is not None:
expected = db.hash[:HASH_LENGTH]
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 8aa44687..dcc414bf 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -174,7 +174,7 @@ def app_client_no_files():
@pytest.fixture(scope="session")
def app_client_two_attached_databases():
yield from make_app_client(
- extra_databases={"extra_database.db": EXTRA_DATABASE_SQL}
+ extra_databases={"extra database.db": EXTRA_DATABASE_SQL}
)
@@ -188,7 +188,7 @@ def app_client_conflicting_database_names():
@pytest.fixture(scope="session")
def app_client_two_attached_databases_one_immutable():
yield from make_app_client(
- is_immutable=True, extra_databases={"extra_database.db": EXTRA_DATABASE_SQL}
+ is_immutable=True, extra_databases={"extra database.db": EXTRA_DATABASE_SQL}
)
diff --git a/tests/test_api.py b/tests/test_api.py
index 4a09b238..1fa8642f 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -6,6 +6,7 @@ from .fixtures import ( # noqa
app_client_shorter_time_limit,
app_client_larger_cache_size,
app_client_returned_rows_matches_page_size,
+ app_client_two_attached_databases,
app_client_two_attached_databases_one_immutable,
app_client_conflicting_database_names,
app_client_with_cors,
@@ -1188,7 +1189,7 @@ def test_databases_json(app_client_two_attached_databases_one_immutable):
databases = response.json
assert 2 == len(databases)
extra_database, fixtures_database = databases
- assert "extra_database" == extra_database["name"]
+ assert "extra database" == extra_database["name"]
assert None == extra_database["hash"]
assert True == extra_database["is_mutable"]
assert False == extra_database["is_memory"]
@@ -1679,6 +1680,22 @@ def test_cors(app_client_with_cors, path, status_code):
assert "*" == response.headers["Access-Control-Allow-Origin"]
+@pytest.mark.parametrize(
+ "path",
+ (
+ "/",
+ ".json",
+ "/searchable",
+ "/searchable.json",
+ "/searchable_view",
+ "/searchable_view.json",
+ ),
+)
+def test_database_with_space_in_name(app_client_two_attached_databases, path):
+ response = app_client_two_attached_databases.get("/extra database" + path)
+ assert response.status == 200
+
+
def test_common_prefix_database_names(app_client_conflicting_database_names):
# https://github.com/simonw/datasette/issues/597
assert ["fixtures", "foo", "foo-bar"] == [
diff --git a/tests/test_html.py b/tests/test_html.py
index f63e595b..7f1af86e 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -27,11 +27,11 @@ def test_homepage(app_client_two_attached_databases):
# Should be two attached databases
assert [
{"href": "/fixtures", "text": "fixtures"},
- {"href": "/extra_database", "text": "extra_database"},
+ {"href": "/extra database", "text": "extra database"},
] == [{"href": a["href"], "text": a.text.strip()} for a in soup.select("h2 a")]
# The first attached database should show count text and attached tables
h2 = soup.select("h2")[1]
- assert "extra_database" == h2.text.strip()
+ assert "extra database" == h2.text.strip()
counts_p, links_p = h2.find_all_next("p")[:2]
assert (
"2 rows in 1 table, 5 rows in 4 hidden tables, 1 view" == counts_p.text.strip()
@@ -41,8 +41,8 @@ def test_homepage(app_client_two_attached_databases):
{"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a")
]
assert [
- {"href": "/extra_database/searchable", "text": "searchable"},
- {"href": "/extra_database/searchable_view", "text": "searchable_view"},
+ {"href": "/extra database/searchable", "text": "searchable"},
+ {"href": "/extra database/searchable_view", "text": "searchable_view"},
] == table_links
From c30f07c58e410ee296b28aeabe4dc461dd40b435 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 5 Nov 2019 21:12:55 -0800
Subject: [PATCH 0274/2321] Removed _group_count=col feature, closes #504
---
datasette/views/table.py | 12 ------------
docs/json_api.rst | 9 ---------
2 files changed, 21 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 139ff80b..920693d7 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -499,18 +499,6 @@ class TableView(RowTableShared):
if order_by:
order_by = "order by {} ".format(order_by)
- # _group_count=col1&_group_count=col2
- group_count = special_args_lists.get("_group_count") or []
- if group_count:
- sql = 'select {group_cols}, count(*) as "count" from {table_name} {where} group by {group_cols} order by "count" desc limit 100'.format(
- group_cols=", ".join(
- '"{}"'.format(group_count_col) for group_count_col in group_count
- ),
- table_name=escape_sqlite(table),
- where=where_clause,
- )
- return await self.custom_sql(request, database, hash, sql, editable=True)
-
extra_args = {}
# Handle ?_size=500
page_size = _size or request.raw_args.get("_size")
diff --git a/docs/json_api.rst b/docs/json_api.rst
index de70362c..e369bee7 100644
--- a/docs/json_api.rst
+++ b/docs/json_api.rst
@@ -321,15 +321,6 @@ Special table arguments
Here's `an example `__.
-
-``?_group_count=COLUMN``
- Executes a SQL query that returns a count of the number of rows matching
- each unique value in that column, with the most common ordered first.
-
-``?_group_count=COLUMN1&_group_count=column2``
- You can pass multiple ``_group_count`` columns to return counts against
- unique combinations of those columns.
-
``?_next=TOKEN``
Pagination by continuation token - pass the token that was returned in the
``"next"`` property by the previous page.
From f9c146b893856a48afa810ebcce1714f30d0d3a2 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 6 Nov 2019 16:55:44 -0800
Subject: [PATCH 0275/2321] Removed unused special_args_lists variable
---
datasette/views/table.py | 2 --
1 file changed, 2 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 920693d7..a60a3941 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -261,12 +261,10 @@ class TableView(RowTableShared):
# That's so if there is a column that starts with _
# it can still be queried using ?_col__exact=blah
special_args = {}
- special_args_lists = {}
other_args = []
for key, value in args.items():
if key.startswith("_") and "__" not in key:
special_args[key] = value[0]
- special_args_lists[key] = value
else:
for v in value:
other_args.append((key, v))
From 83fc5165ac724f69cd57d8f15cd3038e7b30f878 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 7 Nov 2019 18:48:39 -0800
Subject: [PATCH 0276/2321] Improved UI for publish cloudrun, closes #608
---
datasette/publish/cloudrun.py | 39 ++++++++++++++++++++++--
tests/test_publish_cloudrun.py | 55 ++++++++++++++++++++++++++++++++--
2 files changed, 90 insertions(+), 4 deletions(-)
diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py
index c2d77746..a833a32b 100644
--- a/datasette/publish/cloudrun.py
+++ b/datasette/publish/cloudrun.py
@@ -60,6 +60,23 @@ def publish_subcommand(publish):
"gcloud config get-value project", shell=True, universal_newlines=True
).strip()
+ if not service:
+ # Show the user their current services, then prompt for one
+ click.echo("Please provide a service name for this deployment\n")
+ click.echo("Using an existing service name will over-write it")
+ click.echo("")
+ existing_services = get_existing_services()
+ if existing_services:
+ click.echo("Your existing services:\n")
+ for existing_service in existing_services:
+ click.echo(
+ " {name} - created {created} - {url}".format(
+ **existing_service
+ )
+ )
+ click.echo("")
+ service = click.prompt("Service name", type=str)
+
extra_metadata = {
"title": title,
"license": license,
@@ -110,8 +127,26 @@ def publish_subcommand(publish):
image_id = "gcr.io/{project}/{name}".format(project=project, name=name)
check_call("gcloud builds submit --tag {}".format(image_id), shell=True)
check_call(
- "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {}{}".format(
- image_id, " {}".format(service) if service else ""
+ "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {} {}".format(
+ image_id, service,
),
shell=True,
)
+
+
+def get_existing_services():
+ services = json.loads(
+ check_output(
+ "gcloud beta run services list --platform=managed --format json",
+ shell=True,
+ universal_newlines=True,
+ )
+ )
+ return [
+ {
+ "name": service["metadata"]["name"],
+ "created": service["metadata"]["creationTimestamp"],
+ "url": service["status"]["address"]["url"],
+ }
+ for service in services
+ ]
diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py
index 481ac04d..a038b60e 100644
--- a/tests/test_publish_cloudrun.py
+++ b/tests/test_publish_cloudrun.py
@@ -24,6 +24,53 @@ def test_publish_cloudrun_invalid_database(mock_which):
assert 'Path "woop.db" does not exist' in result.output
+@mock.patch("shutil.which")
+@mock.patch("datasette.publish.cloudrun.check_output")
+@mock.patch("datasette.publish.cloudrun.check_call")
+@mock.patch("datasette.publish.cloudrun.get_existing_services")
+def test_publish_cloudrun_prompts_for_service(
+ mock_get_existing_services, mock_call, mock_output, mock_which
+):
+ mock_get_existing_services.return_value = [
+ {"name": "existing", "created": "2019-01-01", "url": "http://www.example.com/"}
+ ]
+ mock_output.return_value = "myproject"
+ mock_which.return_value = True
+ runner = CliRunner()
+ with runner.isolated_filesystem():
+ open("test.db", "w").write("data")
+ result = runner.invoke(
+ cli.cli, ["publish", "cloudrun", "test.db"], input="input-service"
+ )
+ assert (
+ """
+Please provide a service name for this deployment
+
+Using an existing service name will over-write it
+
+Your existing services:
+
+ existing - created 2019-01-01 - http://www.example.com/
+
+Service name: input-service
+""".strip()
+ == result.output.strip()
+ )
+ assert 0 == result.exit_code
+ tag = "gcr.io/myproject/datasette"
+ mock_call.assert_has_calls(
+ [
+ mock.call("gcloud builds submit --tag {}".format(tag), shell=True),
+ mock.call(
+ "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {} input-service".format(
+ tag
+ ),
+ shell=True,
+ ),
+ ]
+ )
+
+
@mock.patch("shutil.which")
@mock.patch("datasette.publish.cloudrun.check_output")
@mock.patch("datasette.publish.cloudrun.check_call")
@@ -33,14 +80,16 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which):
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
- result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"])
+ result = runner.invoke(
+ cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"]
+ )
assert 0 == result.exit_code
tag = "gcr.io/{}/datasette".format(mock_output.return_value)
mock_call.assert_has_calls(
[
mock.call("gcloud builds submit --tag {}".format(tag), shell=True),
mock.call(
- "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {}".format(
+ "gcloud beta run deploy --allow-unauthenticated --platform=managed --image {} test".format(
tag
),
shell=True,
@@ -65,6 +114,8 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which):
"publish",
"cloudrun",
"test.db",
+ "--service",
+ "datasette",
"--plugin-secret",
"datasette-auth-github",
"client_id",
From 9f5d19c254d1bfbd99f576dff47a6e32e01c76ed Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 Nov 2019 18:12:20 -0800
Subject: [PATCH 0277/2321] Improved documentation for "publish cloudrun"
---
docs/publish.rst | 18 ++++++++++--------
1 file changed, 10 insertions(+), 8 deletions(-)
diff --git a/docs/publish.rst b/docs/publish.rst
index 304be8ef..89d33085 100644
--- a/docs/publish.rst
+++ b/docs/publish.rst
@@ -43,14 +43,16 @@ You will first need to install and configure the Google Cloud CLI tools by follo
You can then publish a database to Google Cloud Run using the following command::
- datasette publish cloudrun mydatabase.db
+ datasette publish cloudrun mydatabase.db --service=my-database
+
+A Cloud Run **service** is a single hosted application. The service name you specify will be used as part of the Cloud Run URL. If you deploy to a service name that you have used in the past your new deployment will replace the previous one.
+
+If you omit the ``--service`` option you will be asked to pick a service name interactively during the deploy.
You may need to interact with prompts from the tool. Once it has finished it will output a URL like this one::
- Service [datasette] revision [datasette-00001] has been deployed
- and is serving traffic at https://datasette-j7hipcg4aq-uc.a.run.app
-
-During the deployment the tool will prompt you for the name of your service. You can reuse an existing name to replace your previous deployment with your new version, or pick a new name to deploy to a new URL.
+ Service [my-service] revision [my-service-00001] has been deployed
+ and is serving traffic at https://my-service-j7hipcg4aq-uc.a.run.app
.. literalinclude:: datasette-publish-cloudrun-help.txt
@@ -90,18 +92,18 @@ Custom metadata and plugins
You can define your own :ref:`metadata` and deploy that with your instance like so::
- datasette publish nowv1 mydatabase.db -m metadata.json
+ datasette publish cloudrun --service=my-service mydatabase.db -m metadata.json
If you just want to set the title, license or source information you can do that directly using extra options to ``datasette publish``::
- datasette publish nowv1 mydatabase.db \
+ datasette publish cloudrun mydatabase.db --service=my-service \
--title="Title of my database" \
--source="Where the data originated" \
--source_url="http://www.example.com/"
You can also specify plugins you would like to install. For example, if you want to include the `datasette-vega `_ visualization plugin you can use the following::
- datasette publish nowv1 mydatabase.db --install=datasette-vega
+ datasette publish cloudrun mydatabase.db --service=my-service --install=datasette-vega
If a plugin has any :ref:`plugins_configuration_secret` you can use the ``--plugin-secret`` option to set those secrets at publish time. For example, using Heroku with `datasette-auth-github `__ you might run the following command::
From 10b9d85edaaf198879344aa1c498000cfb27dff8 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 8 Nov 2019 18:15:13 -0800
Subject: [PATCH 0278/2321] datasette-csvs on Glitch now uses sqlite-utils
It previously used csvs-to-sqlite but that had heavy dependencies.
See https://support.glitch.com/t/can-you-upgrade-python-to-latest-version/7980/33
---
docs/getting_started.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/getting_started.rst b/docs/getting_started.rst
index d0c22583..fdf7d23c 100644
--- a/docs/getting_started.rst
+++ b/docs/getting_started.rst
@@ -25,7 +25,7 @@ Glitch allows you to "remix" any project to create your own copy and start editi
.. image:: https://cdn.glitch.com/2703baf2-b643-4da7-ab91-7ee2a2d00b5b%2Fremix-button.svg
:target: https://glitch.com/edit/#!/remix/datasette-csvs
-Find a CSV file and drag it onto the Glitch file explorer panel - ``datasette-csvs`` will automatically convert it to a SQLite database (using `csvs-to-sqlite `__) and allow you to start exploring it using Datasette.
+Find a CSV file and drag it onto the Glitch file explorer panel - ``datasette-csvs`` will automatically convert it to a SQLite database (using `sqlite-utils `__) and allow you to start exploring it using Datasette.
If your CSV file has a ``latitude`` and ``longitude`` column you can visualize it on a map by uncommenting the ``datasette-cluster-map`` line in the ``requirements.txt`` file using the Glitch file editor.
From 28c4a6db5b5e512db630d7ba6127196185de67c7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sat, 9 Nov 2019 17:29:36 -0800
Subject: [PATCH 0279/2321] CREATE INDEX statements on table page, closes #618
---
datasette/database.py | 13 ++++++++++++-
tests/fixtures.py | 1 +
tests/test_html.py | 33 +++++++++++++++++++++++++++++++++
3 files changed, 46 insertions(+), 1 deletion(-)
diff --git a/datasette/database.py b/datasette/database.py
index 7e6f7245..3a1cea94 100644
--- a/datasette/database.py
+++ b/datasette/database.py
@@ -232,7 +232,18 @@ class Database:
)
if not table_definition_rows:
return None
- return table_definition_rows[0][0]
+ bits = [table_definition_rows[0][0] + ";"]
+ # Add on any indexes
+ index_rows = list(
+ await self.ds.execute(
+ self.name,
+ "select sql from sqlite_master where tbl_name = :n and type='index' and sql is not null",
+ {"n": table},
+ )
+ )
+ for index_row in index_rows:
+ bits.append(index_row[0] + ";")
+ return "\n".join(bits)
async def get_view_definition(self, view):
return await self.get_table_definition(view, "view")
diff --git a/tests/fixtures.py b/tests/fixtures.py
index dcc414bf..87e66f99 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -514,6 +514,7 @@ CREATE TABLE compound_three_primary_keys (
content text,
PRIMARY KEY (pk1, pk2, pk3)
);
+CREATE INDEX idx_compound_three_primary_keys_content ON compound_three_primary_keys(content);
CREATE TABLE foreign_key_references (
pk varchar(30) primary key,
diff --git a/tests/test_html.py b/tests/test_html.py
index 7f1af86e..44627cdc 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -119,6 +119,39 @@ def test_row_strange_table_name_with_url_hash(app_client_with_hash):
assert response.status == 200
+@pytest.mark.parametrize(
+ "path,expected_definition_sql",
+ [
+ (
+ "/fixtures/facet_cities",
+ """
+CREATE TABLE facet_cities (
+ id integer primary key,
+ name text
+);
+ """.strip(),
+ ),
+ (
+ "/fixtures/compound_three_primary_keys",
+ """
+CREATE TABLE compound_three_primary_keys (
+ pk1 varchar(30),
+ pk2 varchar(30),
+ pk3 varchar(30),
+ content text,
+ PRIMARY KEY (pk1, pk2, pk3)
+);
+CREATE INDEX idx_compound_three_primary_keys_content ON compound_three_primary_keys(content);
+ """.strip(),
+ ),
+ ],
+)
+def test_definition_sql(path, expected_definition_sql, app_client):
+ response = app_client.get(path)
+ pre = Soup(response.body, "html.parser").select_one("pre.wrapped-sql")
+ assert expected_definition_sql == pre.string
+
+
def test_table_cell_truncation():
for client in make_app_client(config={"truncate_cells_html": 5}):
response = client.get("/fixtures/facetable")
From 1c063fae9dba70f70244db010d55a18846640f07 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 10 Nov 2019 19:45:34 -0800
Subject: [PATCH 0280/2321] Test against Python 3.8 in Travis (#623)
* Test against Python 3.8 in Travis
* Avoid current_task warnings in Python 3.8
---
.travis.yml | 1 +
datasette/tracer.py | 9 ++++++++-
2 files changed, 9 insertions(+), 1 deletion(-)
diff --git a/.travis.yml b/.travis.yml
index 29388bc1..a6b15b7e 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -5,6 +5,7 @@ dist: xenial
python:
- "3.6"
- "3.7"
+ - "3.8"
- "3.5"
# Executed for 3.5 AND 3.5 as the first "test" stage:
diff --git a/datasette/tracer.py b/datasette/tracer.py
index e46a6fda..a638b140 100644
--- a/datasette/tracer.py
+++ b/datasette/tracer.py
@@ -9,12 +9,19 @@ tracers = {}
TRACE_RESERVED_KEYS = {"type", "start", "end", "duration_ms", "traceback"}
+# asyncio.current_task was introduced in Python 3.7:
+for obj in (asyncio, asyncio.Task):
+ current_task = getattr(obj, "current_task", None)
+ if current_task is not None:
+ break
+
+
def get_task_id():
try:
loop = asyncio.get_event_loop()
except RuntimeError:
return None
- return id(asyncio.Task.current_task(loop=loop))
+ return id(current_task(loop=loop))
@contextmanager
From 42ee3e16a9ba7cc513b8da944cc1609a5407cf42 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Sun, 10 Nov 2019 20:19:01 -0800
Subject: [PATCH 0281/2321] Bump pint to 0.9 (#624)
This fixes 2 deprecation warnings in Python 3.8 - refs #623 #622
---
setup.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.py b/setup.py
index 9ae56306..e8229de1 100644
--- a/setup.py
+++ b/setup.py
@@ -45,7 +45,7 @@ setup(
"click-default-group~=1.2.1",
"Jinja2~=2.10.1",
"hupper~=1.0",
- "pint~=0.8.1",
+ "pint~=0.9",
"pluggy~=0.12.0",
"uvicorn~=0.8.4",
"aiofiles~=0.4.0",
From 5bc2570121aea8141ff88790e214765472882b08 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 20:45:12 -0800
Subject: [PATCH 0282/2321] Include uvicorn version in /-/versions, refs #622
---
datasette/app.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/datasette/app.py b/datasette/app.py
index 203e0991..4ba4adfb 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -12,6 +12,7 @@ from pathlib import Path
import click
from markupsafe import Markup
from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader
+import uvicorn
from .views.base import DatasetteError, ureg, AsgiRouter
from .views.database import DatabaseDownload, DatabaseView
@@ -433,6 +434,7 @@ class Datasette:
},
"datasette": datasette_version,
"asgi": "3.0",
+ "uvicorn": uvicorn.__version__,
"sqlite": {
"version": sqlite_version,
"fts_versions": fts_versions,
From cf7776d36fbacefa874cbd6e5fcdc9fff7661203 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:09:11 -0800
Subject: [PATCH 0283/2321] Support Python 3.8, stop supporting Python 3.5
(#627)
* Upgrade to uvicorn 0.10.4
* Drop support for Python 3.5
* Bump all dependencies to latest releases
* Update docs to reflect we no longer support 3.5
* Removed code that skipped black unit test on 3.5
Closes #622
---
.travis.yml | 1 -
README.md | 2 +-
docs/contributing.rst | 2 +-
docs/installation.rst | 7 +++++--
setup.py | 20 ++++++++++----------
tests/test_black.py | 7 +------
6 files changed, 18 insertions(+), 21 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index a6b15b7e..0fc87d93 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -6,7 +6,6 @@ python:
- "3.6"
- "3.7"
- "3.8"
- - "3.5"
# Executed for 3.5 AND 3.5 as the first "test" stage:
script:
diff --git a/README.md b/README.md
index 9f85f1ba..14c9cfd6 100644
--- a/README.md
+++ b/README.md
@@ -69,7 +69,7 @@ sqlite-utils: a Python library and CLI tool for building SQLite databases](https
pip3 install datasette
-Datasette requires Python 3.5 or higher. We also have [detailed installation instructions](https://datasette.readthedocs.io/en/stable/installation.html) covering other options such as Docker.
+Datasette requires Python 3.6 or higher. We also have [detailed installation instructions](https://datasette.readthedocs.io/en/stable/installation.html) covering other options such as Docker.
## Basic usage
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 43834edc..078fd841 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -18,7 +18,7 @@ General guidelines
Setting up a development environment
------------------------------------
-If you have Python 3.5 or higher installed on your computer (on OS X the easiest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps.
+If you have Python 3.6 or higher installed on your computer (on OS X the easiest way to do this `is using homebrew `__) you can install an editable copy of Datasette using the following steps.
If you want to use GitHub to publish your changes, first `create a fork of datasette `__ under your own GitHub account.
diff --git a/docs/installation.rst b/docs/installation.rst
index e65d8ee3..9ee7eb4e 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -69,16 +69,19 @@ You can now run the new custom image like so::
You can confirm that the plugins are installed by visiting
http://127.0.0.1:8001/-/plugins
-
Install using pip
-----------------
-To run Datasette without Docker you will need Python 3.5 or higher.
+To run Datasette without Docker you will need Python 3.6 or higher.
You can install Datasette and its dependencies using ``pip``::
pip install datasette
+The last version to support Python 3.5 was 0.30.2 - you can install that version like so::
+
+ pip install datasette==0.30.2
+
If you want to install Datasette in its own virtual environment, use this::
python -mvenv datasette-venv
diff --git a/setup.py b/setup.py
index e8229de1..7a4cdcb3 100644
--- a/setup.py
+++ b/setup.py
@@ -42,12 +42,12 @@ setup(
include_package_data=True,
install_requires=[
"click~=7.0",
- "click-default-group~=1.2.1",
- "Jinja2~=2.10.1",
- "hupper~=1.0",
+ "click-default-group~=1.2.2",
+ "Jinja2~=2.10.3",
+ "hupper~=1.9",
"pint~=0.9",
- "pluggy~=0.12.0",
- "uvicorn~=0.8.4",
+ "pluggy~=0.13.0",
+ "uvicorn~=0.10.4",
"aiofiles~=0.4.0",
],
entry_points="""
@@ -58,11 +58,11 @@ setup(
extras_require={
"docs": ["sphinx_rtd_theme", "sphinx-autobuild"],
"test": [
- "pytest~=5.0.0",
+ "pytest~=5.2.2",
"pytest-asyncio~=0.10.0",
- "aiohttp~=3.5.3",
- "beautifulsoup4~=4.6.1",
- "asgiref~=3.1.2",
+ "aiohttp~=3.6.2",
+ "beautifulsoup4~=4.8.1",
+ "asgiref~=3.2.3",
]
+ maybe_black,
},
@@ -74,8 +74,8 @@ setup(
"Intended Audience :: End Users/Desktop",
"Topic :: Database",
"License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.6",
- "Programming Language :: Python :: 3.5",
],
)
diff --git a/tests/test_black.py b/tests/test_black.py
index 68e2dcc0..b5bfcfd0 100644
--- a/tests/test_black.py
+++ b/tests/test_black.py
@@ -1,3 +1,4 @@
+import black
from click.testing import CliRunner
from pathlib import Path
import pytest
@@ -6,13 +7,7 @@ import sys
code_root = Path(__file__).parent.parent
-@pytest.mark.skipif(
- sys.version_info[:2] < (3, 6), reason="Black requires Python 3.6 or later"
-)
def test_black():
- # Do not import at top of module because Python 3.5 will not have it installed
- import black
-
runner = CliRunner()
result = runner.invoke(
black.main, [str(code_root / "tests"), str(code_root / "datasette"), "--check"]
From 76fc6a9c7317ce4fbf3cc3d327c849f7274d960a Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:17:59 -0800
Subject: [PATCH 0284/2321] Release notes for 0.31
---
docs/changelog.rst | 22 ++++++++++++++++++++++
1 file changed, 22 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index f4761efe..6e260be9 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,28 @@
Changelog
=========
+.. _v0_31:
+
+0.31 (2019-11-11)
+-----------------
+
+This version adds compatibility with Python 3.8 and breaks compatibility with Python 3.5.
+
+If you are still running Python 3.5 you should stick with ``0.30.2``, which you can install like this::
+
+ pip install datasette==0.30.2
+
+- Format SQL button now works with read-only SQL queries - thanks, Tobias Kunze (`#602 `__)
+- New ``?column__notin=x,y,z`` filter for table views (`#614 `__)
+- Table view now uses ``select col1, col2, col3`` instead of ``select *``
+- Database filenames can now contain spaces - thanks, Tobias Kunze (`#590 `__)
+- Removed obsolete ``?_group_count=col`` feature (`#504 `__)
+- Improved user interface and documentation for ``datasette publish cloudrun`` (`#608 `__)
+- Tables with indexes now show the `` CREATE INDEX`` statements on the table page (`#618 `__)
+- Current version of `uvicorn `__ is now shown on ``/-/versions``
+- Python 3.8 is now supported! (`#622 `__)
+- Python 3.5 is no longer supported.
+
.. _v0_30_2:
0.30.2 (2019-11-02)
From c633c035dc8d4c60f1d13cb074918406bbdb3734 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:26:56 -0800
Subject: [PATCH 0285/2321] Datasette 0.31 in news section
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 14c9cfd6..05995a74 100644
--- a/README.md
+++ b/README.md
@@ -21,6 +21,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
## News
+ * 11th November 2019: [Datasette 0.31](https://datasette.readthedocs.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5.
* 18th October 2019: [Datasette 0.30](https://datasette.readthedocs.io/en/stable/changelog.html#v0-30)
* 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail.
* 7th July 2019: [Datasette 0.29](https://datasette.readthedocs.io/en/stable/changelog.html#v0-29) - ASGI, new plugin hooks, facet by date and much, much more...
From 7f89928062b1a1fdb2625a946f7cd5161e597401 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:33:51 -0800
Subject: [PATCH 0286/2321] Removed code that conditionally installs black
Since we no longer support Python 3.5 we don't need this any more.
---
setup.py | 9 ++-------
1 file changed, 2 insertions(+), 7 deletions(-)
diff --git a/setup.py b/setup.py
index 7a4cdcb3..15284779 100644
--- a/setup.py
+++ b/setup.py
@@ -22,11 +22,6 @@ def get_version():
return g["__version__"]
-# Only install black on Python 3.6 or higher
-maybe_black = []
-if sys.version_info > (3, 6):
- maybe_black = ["black~=19.10b0"]
-
setup(
name="datasette",
version=versioneer.get_version(),
@@ -63,8 +58,8 @@ setup(
"aiohttp~=3.6.2",
"beautifulsoup4~=4.8.1",
"asgiref~=3.2.3",
- ]
- + maybe_black,
+ "black~=19.10b0",
+ ],
},
tests_require=["datasette[test]"],
classifiers=[
From 1c518680e9692a9a77022af54f3de3e77fb1aaf4 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 21:57:48 -0800
Subject: [PATCH 0287/2321] Final steps: build stable branch of Read The Docs
---
docs/contributing.rst | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 078fd841..48930332 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -150,4 +150,7 @@ Wait long enough for Travis to build and deploy the demo version of that commit
git tag 0.25.2
git push --tags
-Once the release is out, you can manually update https://github.com/simonw/datasette/releases
+Final steps once the release has deployed to https://pypi.org/project/datasette/
+
+* Manually post the new release to GitHub releases: https://github.com/simonw/datasette/releases
+* Manually kick off a build of the `stable` branch on Read The Docs: https://readthedocs.org/projects/datasette/builds/
From f554be39fc14ddc18921ca29d3920d55aad03d46 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 22:00:13 -0800
Subject: [PATCH 0288/2321] ReST fix
---
docs/changelog.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 6e260be9..763b178e 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -21,7 +21,7 @@ If you are still running Python 3.5 you should stick with ``0.30.2``, which you
- Database filenames can now contain spaces - thanks, Tobias Kunze (`#590 `__)
- Removed obsolete ``?_group_count=col`` feature (`#504 `__)
- Improved user interface and documentation for ``datasette publish cloudrun`` (`#608 `__)
-- Tables with indexes now show the `` CREATE INDEX`` statements on the table page (`#618 `__)
+- Tables with indexes now show the ``CREATE INDEX`` statements on the table page (`#618 `__)
- Current version of `uvicorn `__ is now shown on ``/-/versions``
- Python 3.8 is now supported! (`#622 `__)
- Python 3.5 is no longer supported.
From d977fbadf70a96bf2eea1407d01f99d98e092dec Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 11 Nov 2019 22:03:09 -0800
Subject: [PATCH 0289/2321] datasette publish uses python:3.8 base Docker
image, closes #629
---
datasette/utils/__init__.py | 2 +-
tests/test_publish_cloudrun.py | 2 +-
tests/test_publish_now.py | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index 3d28a36b..b8df48cf 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -306,7 +306,7 @@ def make_dockerfile(
install = ["datasette"] + list(install)
return """
-FROM python:3.6
+FROM python:3.8
COPY . /app
WORKDIR /app
{spatialite_extras}
diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py
index a038b60e..c5b18cdf 100644
--- a/tests/test_publish_cloudrun.py
+++ b/tests/test_publish_cloudrun.py
@@ -128,7 +128,7 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which):
.split("\n====================\n")[0]
.strip()
)
- expected = """FROM python:3.6
+ expected = """FROM python:3.8
COPY . /app
WORKDIR /app
diff --git a/tests/test_publish_now.py b/tests/test_publish_now.py
index 72aa71db..27fd1245 100644
--- a/tests/test_publish_now.py
+++ b/tests/test_publish_now.py
@@ -138,7 +138,7 @@ def test_publish_now_plugin_secrets(mock_run, mock_which):
.split("\n====================\n")[0]
.strip()
)
- expected = """FROM python:3.6
+ expected = """FROM python:3.8
COPY . /app
WORKDIR /app
From 16265f6a1a7c547e3925e0fc2d6b88754afb0435 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 12 Nov 2019 18:18:04 -0800
Subject: [PATCH 0290/2321] Release notes for 0.31.1
---
docs/changelog.rst | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 763b178e..746f5b42 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,13 @@
Changelog
=========
+.. _v0_31_1:
+
+0.31.1 (2019-11-12)
+-------------------
+
+- Deploymens created using ``datasette publish`` now use ``python:3.8`` base Docker image (`#629 `__)
+
.. _v0_31:
0.31 (2019-11-11)
From a22c7761b61baa61b8e3da7d30887468d61d6b83 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 12 Nov 2019 18:18:39 -0800
Subject: [PATCH 0291/2321] Fixed typo in release notes
---
docs/changelog.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 746f5b42..e527518e 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -9,7 +9,7 @@ Changelog
0.31.1 (2019-11-12)
-------------------
-- Deploymens created using ``datasette publish`` now use ``python:3.8`` base Docker image (`#629 `__)
+- Deployments created using ``datasette publish`` now use ``python:3.8`` base Docker image (`#629 `__)
.. _v0_31:
From bbd00e903cdd49067ecdbdb60a4d225833a44b05 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 12 Nov 2019 18:38:13 -0800
Subject: [PATCH 0292/2321] Badge linking to datasette on hub.docker.com
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 05995a74..9a22c2b2 100644
--- a/README.md
+++ b/README.md
@@ -6,6 +6,7 @@
[](http://datasette.readthedocs.io/en/latest/?badge=latest)
[](https://github.com/simonw/datasette/blob/master/LICENSE)
[](https://black.readthedocs.io/en/stable/)
+[](https://hub.docker.com/r/datasetteproject/datasette)
*A tool for exploring and publishing data*
From 848dec4deb0d3c140a4e0394cac45fbb2593349b Mon Sep 17 00:00:00 2001
From: Stanley Zheng
Date: Tue, 12 Nov 2019 23:28:42 -0500
Subject: [PATCH 0293/2321] Fix for datasette publish with just --source_url
(#631)
Closes #572
---
datasette/templates/_description_source_license.html | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/datasette/templates/_description_source_license.html b/datasette/templates/_description_source_license.html
index 3327706e..a2bc18f2 100644
--- a/datasette/templates/_description_source_license.html
+++ b/datasette/templates/_description_source_license.html
@@ -21,7 +21,7 @@
{% endif %}{{ metadata.source or metadata.source_url }}{% if metadata.source_url %}{% endif %}
{% endif %}
- {% if metadata.about or metadata.about_url %}{% if metadata.license or metadata.license_url or metadata.source or metadat.source_url %}·{% endif %}
+ {% if metadata.about or metadata.about_url %}{% if metadata.license or metadata.license_url or metadata.source or metadata.source_url %}·{% endif %}
About: {% if metadata.about_url %}
{% endif %}{{ metadata.about or metadata.about_url }}{% if metadata.about_url %}{% endif %}
From f52451023025579ae9a13de4a7f00d69200184cd Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 13 Nov 2019 08:42:47 -0800
Subject: [PATCH 0294/2321] Fix "publish heroku" + upgrade to use Python 3.8.0
Closes #633. Closes #632.
---
datasette/publish/heroku.py | 7 +++++--
tests/test_publish_heroku.py | 9 +++++++--
2 files changed, 12 insertions(+), 4 deletions(-)
diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py
index 34d1f773..e75f76df 100644
--- a/datasette/publish/heroku.py
+++ b/datasette/publish/heroku.py
@@ -72,7 +72,10 @@ def publish_subcommand(publish):
"about_url": about_url,
}
- environment_variables = {}
+ environment_variables = {
+ # Avoid uvicorn error: https://github.com/simonw/datasette/issues/633
+ "WEB_CONCURRENCY": "1"
+ }
if plugin_secret:
extra_metadata["plugins"] = {}
for plugin_name, plugin_setting, setting_value in plugin_secret:
@@ -164,7 +167,7 @@ def temporary_heroku_directory(
if metadata_content:
open("metadata.json", "w").write(json.dumps(metadata_content, indent=2))
- open("runtime.txt", "w").write("python-3.6.8")
+ open("runtime.txt", "w").write("python-3.8.0")
if branch:
install = [
diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py
index 4cd66219..87386e93 100644
--- a/tests/test_publish_heroku.py
+++ b/tests/test_publish_heroku.py
@@ -57,8 +57,13 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which):
open("test.db", "w").write("data")
result = runner.invoke(cli.cli, ["publish", "heroku", "test.db"])
assert 0 == result.exit_code, result.output
- mock_call.assert_called_once_with(
- ["heroku", "builds:create", "-a", "f", "--include-vcs-ignore"]
+ mock_call.assert_has_calls(
+ [
+ mock.call(["heroku", "config:set", "-a", "f", "WEB_CONCURRENCY=1",]),
+ mock.call(
+ ["heroku", "builds:create", "-a", "f", "--include-vcs-ignore"]
+ ),
+ ]
)
From b51f258d00bb3c3b401f15d46a1fbd50394dbe1c Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 13 Nov 2019 08:48:36 -0800
Subject: [PATCH 0295/2321] Release notes for 0.31.2
---
docs/changelog.rst | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index e527518e..f4958399 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,15 @@
Changelog
=========
+.. _v0_31_2:
+
+0.31.2 (2019-11-13)
+-------------------
+
+- Fixed a bug where ``datasette publish heroku`` applications failed to start (`#633 `__)
+- Fix for ``datasette publish`` with just ``--source_url`` - thanks, Stanley Zheng (`#572 `__)
+- Deployments to Heroku now use Python 3.8.0 (`#632 `__)
+
.. _v0_31_1:
0.31.1 (2019-11-12)
From 8c642f04e0608bf537fdd1f76d64c2367fb04d57 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Nov 2019 15:14:22 -0800
Subject: [PATCH 0296/2321] Render templates using Jinja async mode
Closes #628
---
datasette/app.py | 6 ++++--
datasette/views/base.py | 2 +-
docs/plugins.rst | 23 ++++++++++++-----------
tests/fixtures.py | 8 +++++++-
tests/test_plugins.py | 18 ++++++++++++++++++
tests/test_templates/show_json.html | 1 +
6 files changed, 43 insertions(+), 15 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 4ba4adfb..02fcf303 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -583,7 +583,9 @@ class Datasette:
),
]
)
- self.jinja_env = Environment(loader=template_loader, autoescape=True)
+ self.jinja_env = Environment(
+ loader=template_loader, autoescape=True, enable_async=True
+ )
self.jinja_env.filters["escape_css_string"] = escape_css_string
self.jinja_env.filters["quote_plus"] = lambda u: urllib.parse.quote_plus(u)
self.jinja_env.filters["escape_sqlite"] = escape_sqlite
@@ -730,5 +732,5 @@ class DatasetteRouter(AsgiRouter):
else:
template = self.ds.jinja_env.select_template(templates)
await asgi_send_html(
- send, template.render(info), status=status, headers=headers
+ send, await template.render_async(info), status=status, headers=headers
)
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 062c6956..5182479c 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -139,7 +139,7 @@ class BaseView(AsgiView):
extra_template_vars.update(extra_vars)
return Response.html(
- template.render(
+ await template.render_async(
{
**context,
**{
diff --git a/docs/plugins.rst b/docs/plugins.rst
index 6df7ff6a..e5a3d7dd 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -629,7 +629,9 @@ Function that returns a dictionary
If you return a function it will be executed. If it returns a dictionary those values will will be merged into the template context.
Function that returns an awaitable function that returns a dictionary
- You can also return a function which returns an awaitable function which returns a dictionary. This means you can execute additional SQL queries using ``datasette.execute()``.
+ You can also return a function which returns an awaitable function which returns a dictionary.
+
+Datasette runs Jinja2 in `async mode `__, which means you can add awaitable functions to the template scope and they will be automatically awaited when they are rendered by the template.
Here's an example plugin that returns an authentication object from the ASGI scope:
@@ -641,20 +643,19 @@ Here's an example plugin that returns an authentication object from the ASGI sco
"auth": request.scope.get("auth")
}
-And here's an example which returns the current version of SQLite:
+And here's an example which adds a ``sql_first(sql_query)`` function which executes a SQL statement and returns the first column of the first row of results:
.. code-block:: python
@hookimpl
- def extra_template_vars(datasette):
- async def inner():
- first_db = list(datasette.databases.keys())[0]
- return {
- "sqlite_version": (
- await datasette.execute(first_db, "select sqlite_version()")
- ).rows[0][0]
- }
- return inner
+ def extra_template_vars(datasette, database):
+ async def sql_first(sql, dbname=None):
+ dbname = dbname or database or next(iter(datasette.databases.keys()))
+ return (await datasette.execute(dbname, sql)).rows[0][0]
+
+You can then use the new function in a template like so::
+
+ SQLite version: {{ sql_first("select sqlite_version()") }}
.. _plugin_register_output_renderer:
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 87e66f99..3e4203f7 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -446,13 +446,19 @@ def render_cell(value, database):
@hookimpl
def extra_template_vars(template, database, table, view_name, request, datasette):
+ async def query_database(sql):
+ first_db = list(datasette.databases.keys())[0]
+ return (
+ await datasette.execute(first_db, sql)
+ ).rows[0][0]
async def inner():
return {
"extra_template_vars_from_awaitable": json.dumps({
"template": template,
"scope_path": request.scope["path"],
"awaitable": True,
- }, default=lambda b: b.decode("utf8"))
+ }, default=lambda b: b.decode("utf8")),
+ "query_database": query_database,
}
return inner
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index b1c7fd9a..42d063f4 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -1,5 +1,6 @@
from bs4 import BeautifulSoup as Soup
from .fixtures import app_client, make_app_client, TEMP_PLUGIN_SECRET_FILE # noqa
+from datasette.utils import sqlite3
import base64
import json
import os
@@ -214,3 +215,20 @@ def test_plugins_extra_template_vars(restore_working_directory):
"awaitable": True,
"scope_path": "/-/metadata",
} == extra_template_vars_from_awaitable
+
+
+def test_plugins_async_template_function(restore_working_directory):
+ for client in make_app_client(
+ template_dir=str(pathlib.Path(__file__).parent / "test_templates")
+ ):
+ response = client.get("/-/metadata")
+ assert response.status == 200
+ extra_from_awaitable_function = (
+ Soup(response.body, "html.parser")
+ .select("pre.extra_from_awaitable_function")[0]
+ .text
+ )
+ expected = (
+ sqlite3.connect(":memory:").execute("select sqlite_version()").fetchone()[0]
+ )
+ assert expected == extra_from_awaitable_function
diff --git a/tests/test_templates/show_json.html b/tests/test_templates/show_json.html
index bbf1bc06..cff04fb4 100644
--- a/tests/test_templates/show_json.html
+++ b/tests/test_templates/show_json.html
@@ -5,4 +5,5 @@
Test data for extra_template_vars:
{{ extra_template_vars|safe }}
{{ extra_template_vars_from_awaitable|safe }}
+
{{ query_database("select sqlite_version();") }}
{% endblock %}
From a95bedb9c423fa6d772c93ef47bc40f13a5bea50 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Nov 2019 15:18:53 -0800
Subject: [PATCH 0297/2321] Release notes for 0.32
---
docs/changelog.rst | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/docs/changelog.rst b/docs/changelog.rst
index f4958399..2f909364 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,13 @@
Changelog
=========
+.. _v0_32:
+
+0.32 (2019-11-14)
+-----------------
+
+Datasette now renders templates using `Jinja async mode `__. This makes it easy for plugins to provide custom template functions that perform asynchronous actions, for example the new `datasette-template-sql `__ plugin which allows custom templates to directly execute SQL queries and render their results. (`#628 `__)
+
.. _v0_31_2:
0.31.2 (2019-11-13)
From 8fc9a5d877d26dbf2654e125f407ddd2fd767335 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 14 Nov 2019 15:46:37 -0800
Subject: [PATCH 0298/2321] Datasette 0.32 and datasette-template-sql in news
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 9a22c2b2..030c507f 100644
--- a/README.md
+++ b/README.md
@@ -22,6 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
## News
+ * 14th November 2019: [Datasette 0.32](https://datasette.readthedocs.io/en/stable/changelog.html#v0-32) now uses asynchronous rendering in Jinja templates, which means template functions can perform asynchronous operations such as executing SQL queries. [datasette-template-sql](https://github.com/simonw/datasette-template-sql) is a new plugin uses this capability to add a new custom `sql(sql_query)` template function.
* 11th November 2019: [Datasette 0.31](https://datasette.readthedocs.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5.
* 18th October 2019: [Datasette 0.30](https://datasette.readthedocs.io/en/stable/changelog.html#v0-30)
* 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail.
From a9909c29ccac771c23c2ef22b89d10697b5256b9 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 15 Nov 2019 14:49:45 -0800
Subject: [PATCH 0299/2321] Move .execute() from Datasette to Database
Refs #569 - I split this change out from #579
---
datasette/app.py | 90 ++++++---------------------
datasette/database.py | 137 +++++++++++++++++++++++++++++++-----------
2 files changed, 121 insertions(+), 106 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index 02fcf303..119d0e19 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -24,13 +24,11 @@ from .database import Database
from .utils import (
QueryInterrupted,
- Results,
escape_css_string,
escape_sqlite,
get_plugins,
module_from_path,
sqlite3,
- sqlite_timelimit,
to_css_class,
)
from .utils.asgi import (
@@ -42,13 +40,12 @@ from .utils.asgi import (
asgi_send_json,
asgi_send_redirect,
)
-from .tracer import trace, AsgiTracer
+from .tracer import AsgiTracer
from .plugins import pm, DEFAULT_PLUGINS
from .version import __version__
app_root = Path(__file__).parent.parent
-connections = threading.local()
MEMORY = object()
ConfigOption = collections.namedtuple("ConfigOption", ("name", "default", "help"))
@@ -336,6 +333,25 @@ class Datasette:
# pylint: disable=no-member
pm.hook.prepare_connection(conn=conn)
+ async def execute(
+ self,
+ db_name,
+ sql,
+ params=None,
+ truncate=False,
+ custom_time_limit=None,
+ page_size=None,
+ log_sql_errors=True,
+ ):
+ return await self.databases[db_name].execute(
+ sql,
+ params=params,
+ truncate=truncate,
+ custom_time_limit=custom_time_limit,
+ page_size=page_size,
+ log_sql_errors=log_sql_errors,
+ )
+
async def expand_foreign_keys(self, database, table, column, values):
"Returns dict mapping (column, value) -> label"
labeled_fks = {}
@@ -477,72 +493,6 @@ class Datasette:
.get(table, {})
)
- async def execute_against_connection_in_thread(self, db_name, fn):
- def in_thread():
- conn = getattr(connections, db_name, None)
- if not conn:
- conn = self.databases[db_name].connect()
- self.prepare_connection(conn)
- setattr(connections, db_name, conn)
- return fn(conn)
-
- return await asyncio.get_event_loop().run_in_executor(self.executor, in_thread)
-
- async def execute(
- self,
- db_name,
- sql,
- params=None,
- truncate=False,
- custom_time_limit=None,
- page_size=None,
- log_sql_errors=True,
- ):
- """Executes sql against db_name in a thread"""
- page_size = page_size or self.page_size
-
- def sql_operation_in_thread(conn):
- time_limit_ms = self.sql_time_limit_ms
- if custom_time_limit and custom_time_limit < time_limit_ms:
- time_limit_ms = custom_time_limit
-
- with sqlite_timelimit(conn, time_limit_ms):
- try:
- cursor = conn.cursor()
- cursor.execute(sql, params or {})
- max_returned_rows = self.max_returned_rows
- if max_returned_rows == page_size:
- max_returned_rows += 1
- if max_returned_rows and truncate:
- rows = cursor.fetchmany(max_returned_rows + 1)
- truncated = len(rows) > max_returned_rows
- rows = rows[:max_returned_rows]
- else:
- rows = cursor.fetchall()
- truncated = False
- except sqlite3.OperationalError as e:
- if e.args == ("interrupted",):
- raise QueryInterrupted(e, sql, params)
- if log_sql_errors:
- print(
- "ERROR: conn={}, sql = {}, params = {}: {}".format(
- conn, repr(sql), params, e
- )
- )
- raise
-
- if truncate:
- return Results(rows, truncated, cursor.description)
-
- else:
- return Results(rows, False, cursor.description)
-
- with trace("sql", database=db_name, sql=sql.strip(), params=params):
- results = await self.execute_against_connection_in_thread(
- db_name, sql_operation_in_thread
- )
- return results
-
def register_renderers(self):
""" Register output renderers which output data in custom formats. """
# Built-in renderers
diff --git a/datasette/database.py b/datasette/database.py
index 3a1cea94..9a8ae4d4 100644
--- a/datasette/database.py
+++ b/datasette/database.py
@@ -1,17 +1,25 @@
+import asyncio
+import contextlib
from pathlib import Path
+import threading
+from .tracer import trace
from .utils import (
QueryInterrupted,
+ Results,
detect_fts,
detect_primary_keys,
detect_spatialite,
get_all_foreign_keys,
get_outbound_foreign_keys,
+ sqlite_timelimit,
sqlite3,
table_columns,
)
from .inspect import inspect_hash
+connections = threading.local()
+
class Database:
def __init__(self, ds, path=None, is_mutable=False, is_memory=False):
@@ -45,6 +53,73 @@ class Database:
"file:{}?{}".format(self.path, qs), uri=True, check_same_thread=False
)
+ async def execute_against_connection_in_thread(self, fn):
+ def in_thread():
+ conn = getattr(connections, self.name, None)
+ if not conn:
+ conn = self.connect()
+ self.ds.prepare_connection(conn)
+ setattr(connections, self.name, conn)
+ return fn(conn)
+
+ return await asyncio.get_event_loop().run_in_executor(
+ self.ds.executor, in_thread
+ )
+
+ async def execute(
+ self,
+ sql,
+ params=None,
+ truncate=False,
+ custom_time_limit=None,
+ page_size=None,
+ log_sql_errors=True,
+ ):
+ """Executes sql against db_name in a thread"""
+ page_size = page_size or self.ds.page_size
+
+ def sql_operation_in_thread(conn):
+ time_limit_ms = self.ds.sql_time_limit_ms
+ if custom_time_limit and custom_time_limit < time_limit_ms:
+ time_limit_ms = custom_time_limit
+
+ with sqlite_timelimit(conn, time_limit_ms):
+ try:
+ cursor = conn.cursor()
+ cursor.execute(sql, params or {})
+ max_returned_rows = self.ds.max_returned_rows
+ if max_returned_rows == page_size:
+ max_returned_rows += 1
+ if max_returned_rows and truncate:
+ rows = cursor.fetchmany(max_returned_rows + 1)
+ truncated = len(rows) > max_returned_rows
+ rows = rows[:max_returned_rows]
+ else:
+ rows = cursor.fetchall()
+ truncated = False
+ except sqlite3.OperationalError as e:
+ if e.args == ("interrupted",):
+ raise QueryInterrupted(e, sql, params)
+ if log_sql_errors:
+ print(
+ "ERROR: conn={}, sql = {}, params = {}: {}".format(
+ conn, repr(sql), params, e
+ )
+ )
+ raise
+
+ if truncate:
+ return Results(rows, truncated, cursor.description)
+
+ else:
+ return Results(rows, False, cursor.description)
+
+ with trace("sql", database=self.name, sql=sql.strip(), params=params):
+ results = await self.execute_against_connection_in_thread(
+ sql_operation_in_thread
+ )
+ return results
+
@property
def size(self):
if self.is_memory:
@@ -62,8 +137,7 @@ class Database:
for table in await self.table_names():
try:
table_count = (
- await self.ds.execute(
- self.name,
+ await self.execute(
"select count(*) from [{}]".format(table),
custom_time_limit=limit,
)
@@ -89,32 +163,30 @@ class Database:
return Path(self.path).stem
async def table_exists(self, table):
- results = await self.ds.execute(
- self.name,
- "select 1 from sqlite_master where type='table' and name=?",
- params=(table,),
+ results = await self.execute(
+ "select 1 from sqlite_master where type='table' and name=?", params=(table,)
)
return bool(results.rows)
async def table_names(self):
- results = await self.ds.execute(
- self.name, "select name from sqlite_master where type='table'"
+ results = await self.execute(
+ "select name from sqlite_master where type='table'"
)
return [r[0] for r in results.rows]
async def table_columns(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: table_columns(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: table_columns(conn, table)
)
async def primary_keys(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: detect_primary_keys(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: detect_primary_keys(conn, table)
)
async def fts_table(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: detect_fts(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: detect_fts(conn, table)
)
async def label_column_for_table(self, table):
@@ -124,8 +196,8 @@ class Database:
if explicit_label_column:
return explicit_label_column
# If a table has two columns, one of which is ID, then label_column is the other one
- column_names = await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: table_columns(conn, table)
+ column_names = await self.execute_against_connection_in_thread(
+ lambda conn: table_columns(conn, table)
)
# Is there a name or title column?
name_or_title = [c for c in column_names if c in ("name", "title")]
@@ -141,8 +213,8 @@ class Database:
return None
async def foreign_keys_for_table(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: get_outbound_foreign_keys(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: get_outbound_foreign_keys(conn, table)
)
async def hidden_table_names(self):
@@ -150,18 +222,17 @@ class Database:
hidden_tables = [
r[0]
for r in (
- await self.ds.execute(
- self.name,
+ await self.execute(
"""
select name from sqlite_master
where rootpage = 0
and sql like '%VIRTUAL TABLE%USING FTS%'
- """,
+ """
)
).rows
]
- has_spatialite = await self.ds.execute_against_connection_in_thread(
- self.name, detect_spatialite
+ has_spatialite = await self.execute_against_connection_in_thread(
+ detect_spatialite
)
if has_spatialite:
# Also hide Spatialite internal tables
@@ -178,13 +249,12 @@ class Database:
] + [
r[0]
for r in (
- await self.ds.execute(
- self.name,
+ await self.execute(
"""
select name from sqlite_master
where name like "idx_%"
and type = "table"
- """,
+ """
)
).rows
]
@@ -207,25 +277,20 @@ class Database:
return hidden_tables
async def view_names(self):
- results = await self.ds.execute(
- self.name, "select name from sqlite_master where type='view'"
- )
+ results = await self.execute("select name from sqlite_master where type='view'")
return [r[0] for r in results.rows]
async def get_all_foreign_keys(self):
- return await self.ds.execute_against_connection_in_thread(
- self.name, get_all_foreign_keys
- )
+ return await self.execute_against_connection_in_thread(get_all_foreign_keys)
async def get_outbound_foreign_keys(self, table):
- return await self.ds.execute_against_connection_in_thread(
- self.name, lambda conn: get_outbound_foreign_keys(conn, table)
+ return await self.execute_against_connection_in_thread(
+ lambda conn: get_outbound_foreign_keys(conn, table)
)
async def get_table_definition(self, table, type_="table"):
table_definition_rows = list(
- await self.ds.execute(
- self.name,
+ await self.execute(
"select sql from sqlite_master where name = :n and type=:t",
{"n": table, "t": type_},
)
From 440a70428c624f6e27b630026acdba2032acc9a7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 19 Nov 2019 15:01:10 -0800
Subject: [PATCH 0300/2321] Include rowid in filter select, closes #636
---
datasette/views/table.py | 6 +-----
tests/test_html.py | 24 ++++++++++++++++++++++++
2 files changed, 25 insertions(+), 5 deletions(-)
diff --git a/datasette/views/table.py b/datasette/views/table.py
index a60a3941..516b474d 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -587,10 +587,6 @@ class TableView(RowTableShared):
columns = [r[0] for r in results.description]
rows = list(results.rows)
- filter_columns = columns[:]
- if use_rowid and filter_columns[0] == "rowid":
- filter_columns = filter_columns[1:]
-
# Expand labeled columns if requested
expanded_columns = []
expandable_columns = await self.expandable_columns(database, table)
@@ -720,7 +716,7 @@ class TableView(RowTableShared):
"use_rowid": use_rowid,
"filters": filters,
"display_columns": display_columns,
- "filter_columns": filter_columns,
+ "filter_columns": columns,
"display_rows": display_rows,
"facets_timed_out": facets_timed_out,
"sorted_facet_results": sorted(
diff --git a/tests/test_html.py b/tests/test_html.py
index 44627cdc..3b331f38 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -679,6 +679,30 @@ def test_table_html_foreign_key_custom_label_column(app_client):
]
+@pytest.mark.parametrize(
+ "path,expected_column_options",
+ [
+ ("/fixtures/infinity", ["- column -", "rowid", "value"]),
+ (
+ "/fixtures/primary_key_multiple_columns",
+ ["- column -", "id", "content", "content2"],
+ ),
+ ("/fixtures/compound_primary_key", ["- column -", "pk1", "pk2", "content"]),
+ ],
+)
+def test_table_html_filter_form_column_options(
+ path, expected_column_options, app_client
+):
+ response = app_client.get(path)
+ assert response.status == 200
+ form = Soup(response.body, "html.parser").find("form")
+ column_options = [
+ o.attrs.get("value") or o.string
+ for o in form.select("select[name=_filter_column] option")
+ ]
+ assert expected_column_options == column_options
+
+
def test_row_html_compound_primary_key(app_client):
response = app_client.get("/fixtures/compound_primary_key/a,b")
assert response.status == 200
From c16be14517414a94e1fdbd888e8a3ad0669e3bca Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Wed, 20 Nov 2019 10:02:07 -0800
Subject: [PATCH 0301/2321] How to upgrade using Docker
---
docs/installation.rst | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/docs/installation.rst b/docs/installation.rst
index 9ee7eb4e..c547f9e4 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -33,6 +33,10 @@ Now visit http://127.0.0.1:8001/ to access Datasette.
(You can download a copy of ``fixtures.db`` from
https://latest.datasette.io/fixtures.db )
+To upgrade to the most recent release of Datasette, run the following::
+
+ docker pull datasetteproject/datasette
+
Loading Spatialite
~~~~~~~~~~~~~~~~~~
From fd137da7f83c117b18e189707a1039e319dd5c91 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 21 Nov 2019 16:56:55 -0800
Subject: [PATCH 0302/2321] Suggest column facet only if at least one count > 1
Fixes #638
---
datasette/facets.py | 5 ++++-
tests/fixtures.py | 33 +++++++++++++++++----------------
tests/test_api.py | 30 ++++++++++++++++++++++++++++--
tests/test_csv.py | 32 ++++++++++++++++----------------
4 files changed, 65 insertions(+), 35 deletions(-)
diff --git a/datasette/facets.py b/datasette/facets.py
index 0c6459d6..a314faaf 100644
--- a/datasette/facets.py
+++ b/datasette/facets.py
@@ -143,9 +143,10 @@ class ColumnFacet(Facet):
if column in already_enabled:
continue
suggested_facet_sql = """
- select distinct {column} from (
+ select {column}, count(*) as n from (
{sql}
) where {column} is not null
+ group by {column}
limit {limit}
""".format(
column=escape_sqlite(column), sql=self.sql, limit=facet_size + 1
@@ -165,6 +166,8 @@ class ColumnFacet(Facet):
and num_distinct_values > 1
and num_distinct_values <= facet_size
and num_distinct_values < row_count
+ # And at least one has n > 1
+ and any(r["n"] > 1 for r in distinct_values)
):
suggested_facets.append(
{
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 3e4203f7..bb01d171 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -669,26 +669,27 @@ CREATE TABLE facetable (
neighborhood text,
tags text,
complex_array text,
+ distinct_some_null,
FOREIGN KEY ("city_id") REFERENCES [facet_cities](id)
);
INSERT INTO facetable
- (created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array)
+ (created, planet_int, on_earth, state, city_id, neighborhood, tags, complex_array, distinct_some_null)
VALUES
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]'),
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]'),
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'SOMA', '[]', '[]'),
- ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Tenderloin', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Bernal Heights', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Hayes Valley', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Hollywood', '[]', '[]'),
- ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Downtown', '[]', '[]'),
- ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Los Feliz', '[]', '[]'),
- ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Koreatown', '[]', '[]'),
- ("2019-01-16 08:00:00", 1, 1, 'MI', 3, 'Downtown', '[]', '[]'),
- ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Greektown', '[]', '[]'),
- ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Corktown', '[]', '[]'),
- ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Mexicantown', '[]', '[]'),
- ("2019-01-17 08:00:00", 2, 0, 'MC', 4, 'Arcadia Planitia', '[]', '[]')
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]', 'one'),
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]', 'two'),
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'SOMA', '[]', '[]', null),
+ ("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Tenderloin', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Bernal Heights', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Hayes Valley', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Hollywood', '[]', '[]', null),
+ ("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Downtown', '[]', '[]', null),
+ ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Los Feliz', '[]', '[]', null),
+ ("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Koreatown', '[]', '[]', null),
+ ("2019-01-16 08:00:00", 1, 1, 'MI', 3, 'Downtown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Greektown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Corktown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Mexicantown', '[]', '[]', null),
+ ("2019-01-17 08:00:00", 2, 0, 'MC', 4, 'Arcadia Planitia', '[]', '[]', null)
;
CREATE TABLE binary_data (
diff --git a/tests/test_api.py b/tests/test_api.py
index 1fa8642f..34eef4ce 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -197,6 +197,7 @@ def test_database_page(app_client):
"neighborhood",
"tags",
"complex_array",
+ "distinct_some_null",
],
"primary_keys": ["pk"],
"count": 15,
@@ -1042,15 +1043,38 @@ def test_table_filter_json_arraycontains(app_client):
"Mission",
'["tag1", "tag2"]',
'[{"foo": "bar"}]',
+ "one",
+ ],
+ [
+ 2,
+ "2019-01-14 08:00:00",
+ 1,
+ 1,
+ "CA",
+ 1,
+ "Dogpatch",
+ '["tag1", "tag3"]',
+ "[]",
+ "two",
],
- [2, "2019-01-14 08:00:00", 1, 1, "CA", 1, "Dogpatch", '["tag1", "tag3"]', "[]"],
] == response.json["rows"]
def test_table_filter_extra_where(app_client):
response = app_client.get("/fixtures/facetable.json?_where=neighborhood='Dogpatch'")
assert [
- [2, "2019-01-14 08:00:00", 1, 1, "CA", 1, "Dogpatch", '["tag1", "tag3"]', "[]"]
+ [
+ 2,
+ "2019-01-14 08:00:00",
+ 1,
+ 1,
+ "CA",
+ 1,
+ "Dogpatch",
+ '["tag1", "tag3"]',
+ "[]",
+ "two",
+ ]
] == response.json["rows"]
@@ -1503,6 +1527,7 @@ def test_expand_labels(app_client):
"neighborhood": "Dogpatch",
"tags": '["tag1", "tag3"]',
"complex_array": "[]",
+ "distinct_some_null": "two",
},
"13": {
"pk": 13,
@@ -1514,6 +1539,7 @@ def test_expand_labels(app_client):
"neighborhood": "Corktown",
"tags": "[]",
"complex_array": "[]",
+ "distinct_some_null": None,
},
} == response.json
diff --git a/tests/test_csv.py b/tests/test_csv.py
index 13aca489..1030c2bb 100644
--- a/tests/test_csv.py
+++ b/tests/test_csv.py
@@ -21,22 +21,22 @@ world
)
EXPECTED_TABLE_WITH_LABELS_CSV = """
-pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,complex_array
-1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]","[{""foo"": ""bar""}]"
-2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]",[]
-3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[],[]
-4,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Tenderloin,[],[]
-5,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Bernal Heights,[],[]
-6,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Hayes Valley,[],[]
-7,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Hollywood,[],[]
-8,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Downtown,[],[]
-9,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Los Feliz,[],[]
-10,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Koreatown,[],[]
-11,2019-01-16 08:00:00,1,1,MI,3,Detroit,Downtown,[],[]
-12,2019-01-17 08:00:00,1,1,MI,3,Detroit,Greektown,[],[]
-13,2019-01-17 08:00:00,1,1,MI,3,Detroit,Corktown,[],[]
-14,2019-01-17 08:00:00,1,1,MI,3,Detroit,Mexicantown,[],[]
-15,2019-01-17 08:00:00,2,0,MC,4,Memnonia,Arcadia Planitia,[],[]
+pk,created,planet_int,on_earth,state,city_id,city_id_label,neighborhood,tags,complex_array,distinct_some_null
+1,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Mission,"[""tag1"", ""tag2""]","[{""foo"": ""bar""}]",one
+2,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Dogpatch,"[""tag1"", ""tag3""]",[],two
+3,2019-01-14 08:00:00,1,1,CA,1,San Francisco,SOMA,[],[],
+4,2019-01-14 08:00:00,1,1,CA,1,San Francisco,Tenderloin,[],[],
+5,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Bernal Heights,[],[],
+6,2019-01-15 08:00:00,1,1,CA,1,San Francisco,Hayes Valley,[],[],
+7,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Hollywood,[],[],
+8,2019-01-15 08:00:00,1,1,CA,2,Los Angeles,Downtown,[],[],
+9,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Los Feliz,[],[],
+10,2019-01-16 08:00:00,1,1,CA,2,Los Angeles,Koreatown,[],[],
+11,2019-01-16 08:00:00,1,1,MI,3,Detroit,Downtown,[],[],
+12,2019-01-17 08:00:00,1,1,MI,3,Detroit,Greektown,[],[],
+13,2019-01-17 08:00:00,1,1,MI,3,Detroit,Corktown,[],[],
+14,2019-01-17 08:00:00,1,1,MI,3,Detroit,Mexicantown,[],[],
+15,2019-01-17 08:00:00,2,0,MC,4,Memnonia,Arcadia Planitia,[],[],
""".lstrip().replace(
"\n", "\r\n"
)
From d3e1c3017ee2f606a731208d59fe48805cdc3259 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 22 Nov 2019 22:07:01 -0800
Subject: [PATCH 0303/2321] Display 0 results, closes #637
---
datasette/static/app.css | 7 +++++
datasette/templates/_table.html | 56 ++++++++++++++++++---------------
datasette/templates/query.html | 2 ++
tests/test_html.py | 14 +++++++++
4 files changed, 53 insertions(+), 26 deletions(-)
diff --git a/datasette/static/app.css b/datasette/static/app.css
index 34eb122c..d7cf6334 100644
--- a/datasette/static/app.css
+++ b/datasette/static/app.css
@@ -327,3 +327,10 @@ a.not-underlined {
pre.wrapped-sql {
white-space: pre-wrap;
}
+
+p.zero-results {
+ border: 2px solid #ccc;
+ background-color: #eee;
+ padding: 0.5em;
+ font-style: italic;
+}
diff --git a/datasette/templates/_table.html b/datasette/templates/_table.html
index c7a72253..42c37c55 100644
--- a/datasette/templates/_table.html
+++ b/datasette/templates/_table.html
@@ -1,28 +1,32 @@
-
-
-
- {% for column in display_columns %}
-
- {% if not column.sortable %}
- {{ column.name }}
- {% else %}
- {% if column.name == sort %}
- {{ column.name }} ▼
+{% if display_rows %}
+