Date: Thu, 12 Aug 2021 18:10:36 -0700
Subject: [PATCH 0003/1160] Rename config= to settings=, refs #1432
---
datasette/app.py | 8 ++++----
datasette/cli.py | 8 ++++----
datasette/templates/table.html | 2 +-
datasette/views/base.py | 2 +-
datasette/views/database.py | 2 +-
tests/fixtures.py | 20 ++++++++++----------
tests/test_api.py | 8 ++++----
tests/test_custom_pages.py | 2 +-
tests/test_facets.py | 2 +-
tests/test_html.py | 14 ++++++++------
10 files changed, 35 insertions(+), 33 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index f2f75884..8cbaaf9f 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -200,7 +200,7 @@ class Datasette:
plugins_dir=None,
static_mounts=None,
memory=False,
- config=None,
+ settings=None,
secret=None,
version_note=None,
config_dir=None,
@@ -279,7 +279,7 @@ class Datasette:
raise StartupError("config.json should be renamed to settings.json")
if config_dir and (config_dir / "settings.json").exists() and not config:
config = json.loads((config_dir / "settings.json").read_text())
- self._settings = dict(DEFAULT_SETTINGS, **(config or {}))
+ self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
self.renderers = {} # File extension -> (renderer, can_render) functions
self.version_note = version_note
self.executor = futures.ThreadPoolExecutor(
@@ -419,8 +419,8 @@ class Datasette:
def setting(self, key):
return self._settings.get(key, None)
- def config_dict(self):
- # Returns a fully resolved config dictionary, useful for templates
+ def settings_dict(self):
+ # Returns a fully resolved settings dictionary, useful for templates
return {option.name: self.setting(option.name) for option in SETTINGS}
def _metadata_recursive_update(self, orig, updated):
diff --git a/datasette/cli.py b/datasette/cli.py
index d4e23c70..ea6da748 100644
--- a/datasette/cli.py
+++ b/datasette/cli.py
@@ -495,14 +495,14 @@ def serve(
if metadata:
metadata_data = parse_metadata(metadata.read())
- combined_config = {}
+ combined_settings = {}
if config:
click.echo(
"--config name:value will be deprecated in Datasette 1.0, use --setting name value instead",
err=True,
)
- combined_config.update(config)
- combined_config.update(settings)
+ combined_settings.update(config)
+ combined_settings.update(settings)
kwargs = dict(
immutables=immutable,
@@ -514,7 +514,7 @@ def serve(
template_dir=template_dir,
plugins_dir=plugins_dir,
static_mounts=static,
- config=combined_config,
+ settings=combined_settings,
memory=memory,
secret=secret,
version_note=version_note,
diff --git a/datasette/templates/table.html b/datasette/templates/table.html
index 466e8a47..a28945ad 100644
--- a/datasette/templates/table.html
+++ b/datasette/templates/table.html
@@ -201,7 +201,7 @@
CSV options:
{% if expandable_columns %}{% endif %}
- {% if next_url and config.allow_csv_stream %}{% endif %}
+ {% if next_url and settings.allow_csv_stream %}{% endif %}
{% for key, value in url_csv_hidden_args %}
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 1cea1386..3333781c 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -614,7 +614,7 @@ class DataView(BaseView):
]
+ [("_size", "max")],
"datasette_version": __version__,
- "config": self.ds.config_dict(),
+ "settings": self.ds.settings_dict(),
},
}
if "metadata" not in context:
diff --git a/datasette/views/database.py b/datasette/views/database.py
index 7c36034c..e3070ce6 100644
--- a/datasette/views/database.py
+++ b/datasette/views/database.py
@@ -456,7 +456,7 @@ class QueryView(DataView):
"canned_query": canned_query,
"edit_sql_url": edit_sql_url,
"metadata": metadata,
- "config": self.ds.config_dict(),
+ "settings": self.ds.settings_dict(),
"request": request,
"show_hide_link": show_hide_link,
"show_hide_text": show_hide_text,
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 4a420e4b..dc22c609 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -99,7 +99,7 @@ def make_app_client(
max_returned_rows=None,
cors=False,
memory=False,
- config=None,
+ settings=None,
filename="fixtures.db",
is_immutable=False,
extra_databases=None,
@@ -129,7 +129,7 @@ def make_app_client(
# Insert at start to help test /-/databases ordering:
files.insert(0, extra_filepath)
os.chdir(os.path.dirname(filepath))
- config = config or {}
+ settings = settings or {}
for key, value in {
"default_page_size": 50,
"max_returned_rows": max_returned_rows or 100,
@@ -138,8 +138,8 @@ def make_app_client(
# errors when running the full test suite:
"num_sql_threads": 1,
}.items():
- if key not in config:
- config[key] = value
+ if key not in settings:
+ settings[key] = value
ds = Datasette(
files,
immutables=immutables,
@@ -147,7 +147,7 @@ def make_app_client(
cors=cors,
metadata=metadata or METADATA,
plugins_dir=PLUGINS_DIR,
- config=config,
+ settings=settings,
inspect_data=inspect_data,
static_mounts=static_mounts,
template_dir=template_dir,
@@ -171,7 +171,7 @@ def app_client_no_files():
@pytest.fixture(scope="session")
def app_client_base_url_prefix():
- with make_app_client(config={"base_url": "/prefix/"}) as client:
+ with make_app_client(settings={"base_url": "/prefix/"}) as client:
yield client
@@ -210,13 +210,13 @@ def app_client_two_attached_databases_one_immutable():
@pytest.fixture(scope="session")
def app_client_with_hash():
- with make_app_client(config={"hash_urls": True}, is_immutable=True) as client:
+ with make_app_client(settings={"hash_urls": True}, is_immutable=True) as client:
yield client
@pytest.fixture(scope="session")
def app_client_with_trace():
- with make_app_client(config={"trace_debug": True}, is_immutable=True) as client:
+ with make_app_client(settings={"trace_debug": True}, is_immutable=True) as client:
yield client
@@ -234,13 +234,13 @@ def app_client_returned_rows_matches_page_size():
@pytest.fixture(scope="session")
def app_client_larger_cache_size():
- with make_app_client(config={"cache_size_kb": 2500}) as client:
+ with make_app_client(settings={"cache_size_kb": 2500}) as client:
yield client
@pytest.fixture(scope="session")
def app_client_csv_max_mb_one():
- with make_app_client(config={"max_csv_mb": 1}) as client:
+ with make_app_client(settings={"max_csv_mb": 1}) as client:
yield client
diff --git a/tests/test_api.py b/tests/test_api.py
index 83cca521..1e93c62e 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -1711,14 +1711,14 @@ def test_suggested_facets(app_client):
def test_allow_facet_off():
- with make_app_client(config={"allow_facet": False}) as client:
+ with make_app_client(settings={"allow_facet": False}) as client:
assert 400 == client.get("/fixtures/facetable.json?_facet=planet_int").status
# Should not suggest any facets either:
assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"]
def test_suggest_facets_off():
- with make_app_client(config={"suggest_facets": False}) as client:
+ with make_app_client(settings={"suggest_facets": False}) as client:
# Now suggested_facets should be []
assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"]
@@ -1883,7 +1883,7 @@ def test_config_cache_size(app_client_larger_cache_size):
def test_config_force_https_urls():
- with make_app_client(config={"force_https_urls": True}) as client:
+ with make_app_client(settings={"force_https_urls": True}) as client:
response = client.get("/fixtures/facetable.json?_size=3&_facet=state")
assert response.json["next_url"].startswith("https://")
assert response.json["facet_results"]["state"]["results"][0][
@@ -1921,7 +1921,7 @@ def test_custom_query_with_unicode_characters(app_client):
@pytest.mark.parametrize("trace_debug", (True, False))
def test_trace(trace_debug):
- with make_app_client(config={"trace_debug": trace_debug}) as client:
+ with make_app_client(settings={"trace_debug": trace_debug}) as client:
response = client.get("/fixtures/simple_primary_key.json?_trace=1")
assert response.status == 200
diff --git a/tests/test_custom_pages.py b/tests/test_custom_pages.py
index 5a71f56d..76c67397 100644
--- a/tests/test_custom_pages.py
+++ b/tests/test_custom_pages.py
@@ -14,7 +14,7 @@ def custom_pages_client():
@pytest.fixture(scope="session")
def custom_pages_client_with_base_url():
with make_app_client(
- template_dir=TEST_TEMPLATE_DIRS, config={"base_url": "/prefix/"}
+ template_dir=TEST_TEMPLATE_DIRS, settings={"base_url": "/prefix/"}
) as client:
yield client
diff --git a/tests/test_facets.py b/tests/test_facets.py
index 18fb8c3b..22927512 100644
--- a/tests/test_facets.py
+++ b/tests/test_facets.py
@@ -351,7 +351,7 @@ async def test_json_array_with_blanks_and_nulls():
@pytest.mark.asyncio
async def test_facet_size():
- ds = Datasette([], memory=True, config={"max_returned_rows": 50})
+ ds = Datasette([], memory=True, settings={"max_returned_rows": 50})
db = ds.add_database(Database(ds, memory_name="test_facet_size"))
await db.execute_write(
"create table neighbourhoods(city text, neighbourhood text)", block=True
diff --git a/tests/test_html.py b/tests/test_html.py
index f12f89cd..90fcdae7 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -214,7 +214,7 @@ def test_definition_sql(path, expected_definition_sql, app_client):
def test_table_cell_truncation():
- with make_app_client(config={"truncate_cells_html": 5}) as client:
+ with make_app_client(settings={"truncate_cells_html": 5}) as client:
response = client.get("/fixtures/facetable")
assert response.status == 200
table = Soup(response.body, "html.parser").find("table")
@@ -239,7 +239,7 @@ def test_table_cell_truncation():
def test_row_page_does_not_truncate():
- with make_app_client(config={"truncate_cells_html": 5}) as client:
+ with make_app_client(settings={"truncate_cells_html": 5}) as client:
response = client.get("/fixtures/facetable/1")
assert response.status == 200
table = Soup(response.body, "html.parser").find("table")
@@ -1072,7 +1072,9 @@ def test_database_download_disallowed_for_memory():
def test_allow_download_off():
- with make_app_client(is_immutable=True, config={"allow_download": False}) as client:
+ with make_app_client(
+ is_immutable=True, settings={"allow_download": False}
+ ) as client:
response = client.get("/fixtures")
soup = Soup(response.body, "html.parser")
assert not len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
@@ -1486,7 +1488,7 @@ def test_query_error(app_client):
def test_config_template_debug_on():
- with make_app_client(config={"template_debug": True}) as client:
+ with make_app_client(settings={"template_debug": True}) as client:
response = client.get("/fixtures/facetable?_context=1")
assert response.status == 200
assert response.text.startswith("{")
@@ -1500,7 +1502,7 @@ def test_config_template_debug_off(app_client):
def test_debug_context_includes_extra_template_vars():
# https://github.com/simonw/datasette/issues/693
- with make_app_client(config={"template_debug": True}) as client:
+ with make_app_client(settings={"template_debug": True}) as client:
response = client.get("/fixtures/facetable?_context=1")
# scope_path is added by PLUGIN1
assert "scope_path" in response.text
@@ -1744,7 +1746,7 @@ def test_facet_more_links(
expected_ellipses_url,
):
with make_app_client(
- config={"max_returned_rows": max_returned_rows, "default_facet_size": 2}
+ settings={"max_returned_rows": max_returned_rows, "default_facet_size": 2}
) as client:
response = client.get(path)
soup = Soup(response.body, "html.parser")
From bbc4756f9e8180c7a40c57f8a35e39dee7be7807 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 12 Aug 2021 20:54:25 -0700
Subject: [PATCH 0004/1160] Settings fix, refs #1433
---
datasette/app.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/datasette/app.py b/datasette/app.py
index 8cbaaf9f..adc543ef 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -277,7 +277,7 @@ class Datasette:
self.static_mounts = static_mounts or []
if config_dir and (config_dir / "config.json").exists():
raise StartupError("config.json should be renamed to settings.json")
- if config_dir and (config_dir / "settings.json").exists() and not config:
+ if config_dir and (config_dir / "settings.json").exists() and not settings:
config = json.loads((config_dir / "settings.json").read_text())
self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
self.renderers = {} # File extension -> (renderer, can_render) functions
From 2883098770fc66e50183b2b231edbde20848d4d6 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 12 Aug 2021 22:10:07 -0700
Subject: [PATCH 0005/1160] Fixed config_dir mode, refs #1432
---
datasette/app.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/datasette/app.py b/datasette/app.py
index adc543ef..06db740e 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -278,7 +278,7 @@ class Datasette:
if config_dir and (config_dir / "config.json").exists():
raise StartupError("config.json should be renamed to settings.json")
if config_dir and (config_dir / "settings.json").exists() and not settings:
- config = json.loads((config_dir / "settings.json").read_text())
+ settings = json.loads((config_dir / "settings.json").read_text())
self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
self.renderers = {} # File extension -> (renderer, can_render) functions
self.version_note = version_note
From d2de17987bf504fdf5485df6d19adf6810575e2d Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 12 Aug 2021 18:01:57 -0700
Subject: [PATCH 0006/1160] Rename --help-config to --help-settings, closes
#1431
---
datasette/cli.py | 12 ++++++------
docs/datasette-serve-help.txt | 2 +-
tests/test_cli.py | 10 +++++++++-
3 files changed, 16 insertions(+), 8 deletions(-)
diff --git a/datasette/cli.py b/datasette/cli.py
index e53f3d8e..d4e23c70 100644
--- a/datasette/cli.py
+++ b/datasette/cli.py
@@ -51,7 +51,7 @@ class Config(click.ParamType):
name, value = config.split(":", 1)
if name not in DEFAULT_SETTINGS:
self.fail(
- f"{name} is not a valid option (--help-config to see all)",
+ f"{name} is not a valid option (--help-settings to see all)",
param,
ctx,
)
@@ -84,7 +84,7 @@ class Setting(CompositeParamType):
name, value = config
if name not in DEFAULT_SETTINGS:
self.fail(
- f"{name} is not a valid option (--help-config to see all)",
+ f"{name} is not a valid option (--help-settings to see all)",
param,
ctx,
)
@@ -408,7 +408,7 @@ def uninstall(packages, yes):
help="Run an HTTP GET request against this path, print results and exit",
)
@click.option("--version-note", help="Additional note to show on /-/versions")
-@click.option("--help-config", is_flag=True, help="Show available config options")
+@click.option("--help-settings", is_flag=True, help="Show available settings")
@click.option("--pdb", is_flag=True, help="Launch debugger on any errors")
@click.option(
"-o",
@@ -456,7 +456,7 @@ def serve(
root,
get,
version_note,
- help_config,
+ help_settings,
pdb,
open_browser,
create,
@@ -466,9 +466,9 @@ def serve(
return_instance=False,
):
"""Serve up specified SQLite database files with a web UI"""
- if help_config:
+ if help_settings:
formatter = formatting.HelpFormatter()
- with formatter.section("Config options"):
+ with formatter.section("Settings"):
formatter.write_dl(
[
(option.name, f"{option.help} (default={option.default})")
diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt
index ec3f41a0..2911977a 100644
--- a/docs/datasette-serve-help.txt
+++ b/docs/datasette-serve-help.txt
@@ -32,7 +32,7 @@ Options:
--get TEXT Run an HTTP GET request against this path, print results and
exit
--version-note TEXT Additional note to show on /-/versions
- --help-config Show available config options
+ --help-settings Show available settings
--pdb Launch debugger on any errors
-o, --open Open Datasette in your web browser
--create Create database files if they do not exist
diff --git a/tests/test_cli.py b/tests/test_cli.py
index e31a305e..763fe2e7 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -5,6 +5,7 @@ from .fixtures import (
EXPECTED_PLUGINS,
)
import asyncio
+from datasette.app import SETTINGS
from datasette.plugins import DEFAULT_PLUGINS
from datasette.cli import cli, serve
from datasette.version import __version__
@@ -147,7 +148,7 @@ def test_metadata_yaml():
root=False,
version_note=None,
get=None,
- help_config=False,
+ help_settings=False,
pdb=False,
crossdb=False,
open_browser=False,
@@ -291,3 +292,10 @@ def test_weird_database_names(ensure_eventloop, tmpdir, filename):
cli, [db_path, "--get", "/{}".format(urllib.parse.quote(filename_no_stem))]
)
assert result2.exit_code == 0, result2.output
+
+
+def test_help_settings():
+ runner = CliRunner()
+ result = runner.invoke(cli, ["--help-settings"])
+ for setting in SETTINGS:
+ assert setting.name in result.output
From 07b6c9dd35079a4cbfba5ad6c93ca86320b8b9b5 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 12 Aug 2021 18:10:36 -0700
Subject: [PATCH 0007/1160] Rename config= to settings=, refs #1432
---
datasette/app.py | 8 ++++----
datasette/cli.py | 8 ++++----
datasette/templates/table.html | 2 +-
datasette/views/base.py | 2 +-
datasette/views/database.py | 2 +-
tests/fixtures.py | 20 ++++++++++----------
tests/test_api.py | 8 ++++----
tests/test_custom_pages.py | 2 +-
tests/test_facets.py | 2 +-
tests/test_html.py | 14 ++++++++------
10 files changed, 35 insertions(+), 33 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index f2f75884..8cbaaf9f 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -200,7 +200,7 @@ class Datasette:
plugins_dir=None,
static_mounts=None,
memory=False,
- config=None,
+ settings=None,
secret=None,
version_note=None,
config_dir=None,
@@ -279,7 +279,7 @@ class Datasette:
raise StartupError("config.json should be renamed to settings.json")
if config_dir and (config_dir / "settings.json").exists() and not config:
config = json.loads((config_dir / "settings.json").read_text())
- self._settings = dict(DEFAULT_SETTINGS, **(config or {}))
+ self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
self.renderers = {} # File extension -> (renderer, can_render) functions
self.version_note = version_note
self.executor = futures.ThreadPoolExecutor(
@@ -419,8 +419,8 @@ class Datasette:
def setting(self, key):
return self._settings.get(key, None)
- def config_dict(self):
- # Returns a fully resolved config dictionary, useful for templates
+ def settings_dict(self):
+ # Returns a fully resolved settings dictionary, useful for templates
return {option.name: self.setting(option.name) for option in SETTINGS}
def _metadata_recursive_update(self, orig, updated):
diff --git a/datasette/cli.py b/datasette/cli.py
index d4e23c70..ea6da748 100644
--- a/datasette/cli.py
+++ b/datasette/cli.py
@@ -495,14 +495,14 @@ def serve(
if metadata:
metadata_data = parse_metadata(metadata.read())
- combined_config = {}
+ combined_settings = {}
if config:
click.echo(
"--config name:value will be deprecated in Datasette 1.0, use --setting name value instead",
err=True,
)
- combined_config.update(config)
- combined_config.update(settings)
+ combined_settings.update(config)
+ combined_settings.update(settings)
kwargs = dict(
immutables=immutable,
@@ -514,7 +514,7 @@ def serve(
template_dir=template_dir,
plugins_dir=plugins_dir,
static_mounts=static,
- config=combined_config,
+ settings=combined_settings,
memory=memory,
secret=secret,
version_note=version_note,
diff --git a/datasette/templates/table.html b/datasette/templates/table.html
index 466e8a47..a28945ad 100644
--- a/datasette/templates/table.html
+++ b/datasette/templates/table.html
@@ -201,7 +201,7 @@
CSV options:
{% if expandable_columns %}{% endif %}
- {% if next_url and config.allow_csv_stream %}{% endif %}
+ {% if next_url and settings.allow_csv_stream %}{% endif %}
{% for key, value in url_csv_hidden_args %}
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 1cea1386..3333781c 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -614,7 +614,7 @@ class DataView(BaseView):
]
+ [("_size", "max")],
"datasette_version": __version__,
- "config": self.ds.config_dict(),
+ "settings": self.ds.settings_dict(),
},
}
if "metadata" not in context:
diff --git a/datasette/views/database.py b/datasette/views/database.py
index 7b1f1923..ddea1d88 100644
--- a/datasette/views/database.py
+++ b/datasette/views/database.py
@@ -465,7 +465,7 @@ class QueryView(DataView):
"canned_query": canned_query,
"edit_sql_url": edit_sql_url,
"metadata": metadata,
- "config": self.ds.config_dict(),
+ "settings": self.ds.settings_dict(),
"request": request,
"show_hide_link": show_hide_link,
"show_hide_text": show_hide_text,
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 4a420e4b..dc22c609 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -99,7 +99,7 @@ def make_app_client(
max_returned_rows=None,
cors=False,
memory=False,
- config=None,
+ settings=None,
filename="fixtures.db",
is_immutable=False,
extra_databases=None,
@@ -129,7 +129,7 @@ def make_app_client(
# Insert at start to help test /-/databases ordering:
files.insert(0, extra_filepath)
os.chdir(os.path.dirname(filepath))
- config = config or {}
+ settings = settings or {}
for key, value in {
"default_page_size": 50,
"max_returned_rows": max_returned_rows or 100,
@@ -138,8 +138,8 @@ def make_app_client(
# errors when running the full test suite:
"num_sql_threads": 1,
}.items():
- if key not in config:
- config[key] = value
+ if key not in settings:
+ settings[key] = value
ds = Datasette(
files,
immutables=immutables,
@@ -147,7 +147,7 @@ def make_app_client(
cors=cors,
metadata=metadata or METADATA,
plugins_dir=PLUGINS_DIR,
- config=config,
+ settings=settings,
inspect_data=inspect_data,
static_mounts=static_mounts,
template_dir=template_dir,
@@ -171,7 +171,7 @@ def app_client_no_files():
@pytest.fixture(scope="session")
def app_client_base_url_prefix():
- with make_app_client(config={"base_url": "/prefix/"}) as client:
+ with make_app_client(settings={"base_url": "/prefix/"}) as client:
yield client
@@ -210,13 +210,13 @@ def app_client_two_attached_databases_one_immutable():
@pytest.fixture(scope="session")
def app_client_with_hash():
- with make_app_client(config={"hash_urls": True}, is_immutable=True) as client:
+ with make_app_client(settings={"hash_urls": True}, is_immutable=True) as client:
yield client
@pytest.fixture(scope="session")
def app_client_with_trace():
- with make_app_client(config={"trace_debug": True}, is_immutable=True) as client:
+ with make_app_client(settings={"trace_debug": True}, is_immutable=True) as client:
yield client
@@ -234,13 +234,13 @@ def app_client_returned_rows_matches_page_size():
@pytest.fixture(scope="session")
def app_client_larger_cache_size():
- with make_app_client(config={"cache_size_kb": 2500}) as client:
+ with make_app_client(settings={"cache_size_kb": 2500}) as client:
yield client
@pytest.fixture(scope="session")
def app_client_csv_max_mb_one():
- with make_app_client(config={"max_csv_mb": 1}) as client:
+ with make_app_client(settings={"max_csv_mb": 1}) as client:
yield client
diff --git a/tests/test_api.py b/tests/test_api.py
index 83cca521..1e93c62e 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -1711,14 +1711,14 @@ def test_suggested_facets(app_client):
def test_allow_facet_off():
- with make_app_client(config={"allow_facet": False}) as client:
+ with make_app_client(settings={"allow_facet": False}) as client:
assert 400 == client.get("/fixtures/facetable.json?_facet=planet_int").status
# Should not suggest any facets either:
assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"]
def test_suggest_facets_off():
- with make_app_client(config={"suggest_facets": False}) as client:
+ with make_app_client(settings={"suggest_facets": False}) as client:
# Now suggested_facets should be []
assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"]
@@ -1883,7 +1883,7 @@ def test_config_cache_size(app_client_larger_cache_size):
def test_config_force_https_urls():
- with make_app_client(config={"force_https_urls": True}) as client:
+ with make_app_client(settings={"force_https_urls": True}) as client:
response = client.get("/fixtures/facetable.json?_size=3&_facet=state")
assert response.json["next_url"].startswith("https://")
assert response.json["facet_results"]["state"]["results"][0][
@@ -1921,7 +1921,7 @@ def test_custom_query_with_unicode_characters(app_client):
@pytest.mark.parametrize("trace_debug", (True, False))
def test_trace(trace_debug):
- with make_app_client(config={"trace_debug": trace_debug}) as client:
+ with make_app_client(settings={"trace_debug": trace_debug}) as client:
response = client.get("/fixtures/simple_primary_key.json?_trace=1")
assert response.status == 200
diff --git a/tests/test_custom_pages.py b/tests/test_custom_pages.py
index 5a71f56d..76c67397 100644
--- a/tests/test_custom_pages.py
+++ b/tests/test_custom_pages.py
@@ -14,7 +14,7 @@ def custom_pages_client():
@pytest.fixture(scope="session")
def custom_pages_client_with_base_url():
with make_app_client(
- template_dir=TEST_TEMPLATE_DIRS, config={"base_url": "/prefix/"}
+ template_dir=TEST_TEMPLATE_DIRS, settings={"base_url": "/prefix/"}
) as client:
yield client
diff --git a/tests/test_facets.py b/tests/test_facets.py
index 18fb8c3b..22927512 100644
--- a/tests/test_facets.py
+++ b/tests/test_facets.py
@@ -351,7 +351,7 @@ async def test_json_array_with_blanks_and_nulls():
@pytest.mark.asyncio
async def test_facet_size():
- ds = Datasette([], memory=True, config={"max_returned_rows": 50})
+ ds = Datasette([], memory=True, settings={"max_returned_rows": 50})
db = ds.add_database(Database(ds, memory_name="test_facet_size"))
await db.execute_write(
"create table neighbourhoods(city text, neighbourhood text)", block=True
diff --git a/tests/test_html.py b/tests/test_html.py
index f12f89cd..90fcdae7 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -214,7 +214,7 @@ def test_definition_sql(path, expected_definition_sql, app_client):
def test_table_cell_truncation():
- with make_app_client(config={"truncate_cells_html": 5}) as client:
+ with make_app_client(settings={"truncate_cells_html": 5}) as client:
response = client.get("/fixtures/facetable")
assert response.status == 200
table = Soup(response.body, "html.parser").find("table")
@@ -239,7 +239,7 @@ def test_table_cell_truncation():
def test_row_page_does_not_truncate():
- with make_app_client(config={"truncate_cells_html": 5}) as client:
+ with make_app_client(settings={"truncate_cells_html": 5}) as client:
response = client.get("/fixtures/facetable/1")
assert response.status == 200
table = Soup(response.body, "html.parser").find("table")
@@ -1072,7 +1072,9 @@ def test_database_download_disallowed_for_memory():
def test_allow_download_off():
- with make_app_client(is_immutable=True, config={"allow_download": False}) as client:
+ with make_app_client(
+ is_immutable=True, settings={"allow_download": False}
+ ) as client:
response = client.get("/fixtures")
soup = Soup(response.body, "html.parser")
assert not len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
@@ -1486,7 +1488,7 @@ def test_query_error(app_client):
def test_config_template_debug_on():
- with make_app_client(config={"template_debug": True}) as client:
+ with make_app_client(settings={"template_debug": True}) as client:
response = client.get("/fixtures/facetable?_context=1")
assert response.status == 200
assert response.text.startswith("{")
@@ -1500,7 +1502,7 @@ def test_config_template_debug_off(app_client):
def test_debug_context_includes_extra_template_vars():
# https://github.com/simonw/datasette/issues/693
- with make_app_client(config={"template_debug": True}) as client:
+ with make_app_client(settings={"template_debug": True}) as client:
response = client.get("/fixtures/facetable?_context=1")
# scope_path is added by PLUGIN1
assert "scope_path" in response.text
@@ -1744,7 +1746,7 @@ def test_facet_more_links(
expected_ellipses_url,
):
with make_app_client(
- config={"max_returned_rows": max_returned_rows, "default_facet_size": 2}
+ settings={"max_returned_rows": max_returned_rows, "default_facet_size": 2}
) as client:
response = client.get(path)
soup = Soup(response.body, "html.parser")
From f7d2bcc75a6f407b1c8726e9ee1058e7e2dc2f60 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 12 Aug 2021 20:54:25 -0700
Subject: [PATCH 0008/1160] Settings fix, refs #1433
---
datasette/app.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/datasette/app.py b/datasette/app.py
index 8cbaaf9f..adc543ef 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -277,7 +277,7 @@ class Datasette:
self.static_mounts = static_mounts or []
if config_dir and (config_dir / "config.json").exists():
raise StartupError("config.json should be renamed to settings.json")
- if config_dir and (config_dir / "settings.json").exists() and not config:
+ if config_dir and (config_dir / "settings.json").exists() and not settings:
config = json.loads((config_dir / "settings.json").read_text())
self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
self.renderers = {} # File extension -> (renderer, can_render) functions
From 44699ebb6388c0ff1d5299dccaede46014dee1a2 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 12 Aug 2021 22:10:07 -0700
Subject: [PATCH 0009/1160] Fixed config_dir mode, refs #1432
---
datasette/app.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/datasette/app.py b/datasette/app.py
index adc543ef..06db740e 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -278,7 +278,7 @@ class Datasette:
if config_dir and (config_dir / "config.json").exists():
raise StartupError("config.json should be renamed to settings.json")
if config_dir and (config_dir / "settings.json").exists() and not settings:
- config = json.loads((config_dir / "settings.json").read_text())
+ settings = json.loads((config_dir / "settings.json").read_text())
self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
self.renderers = {} # File extension -> (renderer, can_render) functions
self.version_note = version_note
From 62aac6593a12bbdd3d19ea184147fe650bdd6f5e Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 13 Aug 2021 08:33:13 -0700
Subject: [PATCH 0010/1160] Handle some error conditions
---
datasette/utils/__init__.py | 9 +++++++--
1 file changed, 7 insertions(+), 2 deletions(-)
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index 69c72566..a66bf0a1 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -1098,6 +1098,8 @@ def columns_for_query(conn, sql, params=None):
per returned column. ``(None, None)`` if no table and column
could be derived.
"""
+ if sql.lower().strip().startswith("explain"):
+ return []
rows = conn.execute("explain " + sql, params).fetchall()
table_rootpage_by_register = {
r["p1"]: r["p2"] for r in rows if r["opcode"] == "OpenRead"
@@ -1113,8 +1115,11 @@ def columns_for_query(conn, sql, params=None):
for row in rows:
if row["opcode"] in ("Rowid", "Column"):
addr, opcode, table_id, cid, column_register, p4, p5, comment = row
- table = names_by_rootpage[table_rootpage_by_register[table_id]]
- columns_by_column_register[column_register] = (table, cid)
+ try:
+ table = names_by_rootpage[table_rootpage_by_register[table_id]]
+ columns_by_column_register[column_register] = (table, cid)
+ except KeyError:
+ pass
result_row = [dict(r) for r in rows if r["opcode"] == "ResultRow"][0]
registers = list(range(result_row["p1"], result_row["p1"] + result_row["p2"]))
all_column_names = {}
From 91315e07a76877e4d58e0032a7e49504a86a7f61 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 16 Aug 2021 11:36:53 -0700
Subject: [PATCH 0011/1160] More WIP
---
datasette/utils/__init__.py | 59 +++++++++++++++++++++++--------------
1 file changed, 37 insertions(+), 22 deletions(-)
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index a66bf0a1..d5856087 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -1093,44 +1093,59 @@ async def derive_named_parameters(db, sql):
def columns_for_query(conn, sql, params=None):
"""
- Given a SQLite connection ``conn`` and a SQL query ``sql``,
- returns a list of ``(table_name, column_name)`` pairs, one
- per returned column. ``(None, None)`` if no table and column
- could be derived.
+ Given a SQLite connection ``conn`` and a SQL query ``sql``, returns a list of
+ ``(table_name, column_name)`` pairs corresponding to the columns that would be
+ returned by that SQL query.
+
+ Each pair indicates the source table and column for the returned column, or
+ ``(None, None)`` if no table and column could be derived (e.g. for "select 1")
"""
if sql.lower().strip().startswith("explain"):
return []
- rows = conn.execute("explain " + sql, params).fetchall()
+ opcodes = conn.execute("explain " + sql, params).fetchall()
table_rootpage_by_register = {
- r["p1"]: r["p2"] for r in rows if r["opcode"] == "OpenRead"
+ r["p1"]: r["p2"] for r in opcodes if r["opcode"] == "OpenRead"
}
- names_by_rootpage = dict(
- conn.execute(
- "select rootpage, name from sqlite_master where rootpage in ({})".format(
+ print(f"{table_rootpage_by_register=}")
+ names_and_types_by_rootpage = dict(
+ [(r[0], (r[1], r[2])) for r in conn.execute(
+ "select rootpage, name, type from sqlite_master where rootpage in ({})".format(
", ".join(map(str, table_rootpage_by_register.values()))
)
- )
+ )]
)
+ print(f"{names_and_types_by_rootpage=}")
columns_by_column_register = {}
- for row in rows:
- if row["opcode"] in ("Rowid", "Column"):
- addr, opcode, table_id, cid, column_register, p4, p5, comment = row
+ for opcode_row in opcodes:
+ if opcode_row["opcode"] in ("Rowid", "Column"):
+ addr, opcode, table_id, cid, column_register, p4, p5, comment = opcode_row
+ print(f"{table_id=} {cid=} {column_register=}")
+ table = None
try:
- table = names_by_rootpage[table_rootpage_by_register[table_id]]
+ table = names_and_types_by_rootpage[table_rootpage_by_register[table_id]][0]
columns_by_column_register[column_register] = (table, cid)
- except KeyError:
+ except KeyError as e:
+ print(" KeyError")
+ print(" ", e)
+ print(" table = names_and_types_by_rootpage[table_rootpage_by_register[table_id]][0]")
+ print(f" {names_and_types_by_rootpage=} {table_rootpage_by_register=} {table_id=}")
+ print(" columns_by_column_register[column_register] = (table, cid)")
+ print(f" {column_register=} = ({table=}, {cid=})")
pass
- result_row = [dict(r) for r in rows if r["opcode"] == "ResultRow"][0]
- registers = list(range(result_row["p1"], result_row["p1"] + result_row["p2"]))
+ result_row = [dict(r) for r in opcodes if r["opcode"] == "ResultRow"][0]
+ result_registers = list(range(result_row["p1"], result_row["p1"] + result_row["p2"]))
+ print(f"{result_registers=}")
+ print(f"{columns_by_column_register=}")
all_column_names = {}
- for table in names_by_rootpage.values():
+ for (table, _) in names_and_types_by_rootpage.values():
table_xinfo = conn.execute("pragma table_xinfo({})".format(table)).fetchall()
- for row in table_xinfo:
- all_column_names[(table, row["cid"])] = row["name"]
+ for column_info in table_xinfo:
+ all_column_names[(table, column_info["cid"])] = column_info["name"]
+ print(f"{all_column_names=}")
final_output = []
- for r in registers:
+ for register in result_registers:
try:
- table, cid = columns_by_column_register[r]
+ table, cid = columns_by_column_register[register]
final_output.append((table, all_column_names[table, cid]))
except KeyError:
final_output.append((None, None))
From adb5b70de5cec3c3dd37184defe606a082c232cf Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 16 Aug 2021 11:56:32 -0700
Subject: [PATCH 0012/1160] Show count of facet values if ?_facet_size=max,
closes #1423
---
datasette/static/app.css | 5 +++++
datasette/templates/table.html | 4 +++-
datasette/views/table.py | 1 +
tests/test_html.py | 22 +++++++++++++++++++++-
4 files changed, 30 insertions(+), 2 deletions(-)
diff --git a/datasette/static/app.css b/datasette/static/app.css
index bf068fdf..af3e14d5 100644
--- a/datasette/static/app.css
+++ b/datasette/static/app.css
@@ -633,6 +633,11 @@ form button[type=button] {
width: 250px;
margin-right: 15px;
}
+.facet-info-total {
+ font-size: 0.8em;
+ color: #666;
+ padding-right: 0.25em;
+}
.facet-info li,
.facet-info ul {
margin: 0;
diff --git a/datasette/templates/table.html b/datasette/templates/table.html
index a28945ad..6ba301b5 100644
--- a/datasette/templates/table.html
+++ b/datasette/templates/table.html
@@ -156,7 +156,9 @@
{% for facet_info in sorted_facet_results %}
- {{ facet_info.name }}{% if facet_info.type != "column" %} ({{ facet_info.type }}){% endif %}
+ {{ facet_info.name }}{% if facet_info.type != "column" %} ({{ facet_info.type }}){% endif %}
+ {% if show_facet_counts %} {% if facet_info.truncated %}>{% endif %}{{ facet_info.results|length }}{% endif %}
+
{% if facet_info.hideable %}
✖
{% endif %}
diff --git a/datasette/views/table.py b/datasette/views/table.py
index 486a6131..83f7c7cb 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -928,6 +928,7 @@ class TableView(RowTableShared):
key=lambda f: (len(f["results"]), f["name"]),
reverse=True,
),
+ "show_facet_counts": special_args.get("_facet_size") == "max",
"extra_wheres_for_ui": extra_wheres_for_ui,
"form_hidden_args": form_hidden_args,
"is_sortable": any(c["sortable"] for c in display_columns),
diff --git a/tests/test_html.py b/tests/test_html.py
index 90fcdae7..e73ccd2f 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -479,7 +479,7 @@ def test_facet_display(app_client):
for div in divs:
actual.append(
{
- "name": div.find("strong").text,
+ "name": div.find("strong").text.split()[0],
"items": [
{
"name": a.text,
@@ -1797,3 +1797,23 @@ def test_column_metadata(app_client):
soup.select("th[data-column=address]")[0]["data-column-description"]
== "The street address for the attraction"
)
+
+
+@pytest.mark.parametrize("use_facet_size_max", (True, False))
+def test_facet_total_shown_if_facet_max_size(use_facet_size_max):
+ # https://github.com/simonw/datasette/issues/1423
+ with make_app_client(settings={"max_returned_rows": 100}) as client:
+ path = "/fixtures/sortable?_facet=content&_facet=pk1"
+ if use_facet_size_max:
+ path += "&_facet_size=max"
+ response = client.get(path)
+ assert response.status == 200
+ fragments = (
+ '>100',
+ '8',
+ )
+ for fragment in fragments:
+ if use_facet_size_max:
+ assert fragment in response.text
+ else:
+ assert fragment not in response.text
From 450ab1a36b0a6d83c37c99d1ee509c686f381eac Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 19 Aug 2021 13:27:34 -0700
Subject: [PATCH 0013/1160] Applied Black
---
datasette/utils/__init__.py | 27 +++++++++++++++++++--------
1 file changed, 19 insertions(+), 8 deletions(-)
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index d5856087..a477c117 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -1108,11 +1108,14 @@ def columns_for_query(conn, sql, params=None):
}
print(f"{table_rootpage_by_register=}")
names_and_types_by_rootpage = dict(
- [(r[0], (r[1], r[2])) for r in conn.execute(
- "select rootpage, name, type from sqlite_master where rootpage in ({})".format(
- ", ".join(map(str, table_rootpage_by_register.values()))
+ [
+ (r[0], (r[1], r[2]))
+ for r in conn.execute(
+ "select rootpage, name, type from sqlite_master where rootpage in ({})".format(
+ ", ".join(map(str, table_rootpage_by_register.values()))
+ )
)
- )]
+ ]
)
print(f"{names_and_types_by_rootpage=}")
columns_by_column_register = {}
@@ -1122,18 +1125,26 @@ def columns_for_query(conn, sql, params=None):
print(f"{table_id=} {cid=} {column_register=}")
table = None
try:
- table = names_and_types_by_rootpage[table_rootpage_by_register[table_id]][0]
+ table = names_and_types_by_rootpage[
+ table_rootpage_by_register[table_id]
+ ][0]
columns_by_column_register[column_register] = (table, cid)
except KeyError as e:
print(" KeyError")
print(" ", e)
- print(" table = names_and_types_by_rootpage[table_rootpage_by_register[table_id]][0]")
- print(f" {names_and_types_by_rootpage=} {table_rootpage_by_register=} {table_id=}")
+ print(
+ " table = names_and_types_by_rootpage[table_rootpage_by_register[table_id]][0]"
+ )
+ print(
+ f" {names_and_types_by_rootpage=} {table_rootpage_by_register=} {table_id=}"
+ )
print(" columns_by_column_register[column_register] = (table, cid)")
print(f" {column_register=} = ({table=}, {cid=})")
pass
result_row = [dict(r) for r in opcodes if r["opcode"] == "ResultRow"][0]
- result_registers = list(range(result_row["p1"], result_row["p1"] + result_row["p2"]))
+ result_registers = list(
+ range(result_row["p1"], result_row["p1"] + result_row["p2"])
+ )
print(f"{result_registers=}")
print(f"{columns_by_column_register=}")
all_column_names = {}
From d84e574e59c51ddcd6cf60a6f9b3d45182daf824 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 19 Aug 2021 14:09:38 -0700
Subject: [PATCH 0014/1160] Ability to deploy demos of branches
* Ability to deploy additional branch demos, closes #1442
* Only run tests before deploy on main branch
* Documentation for continuous deployment
---
.github/workflows/deploy-latest.yml | 8 +++++++-
docs/contributing.rst | 11 +++++++++++
2 files changed, 18 insertions(+), 1 deletion(-)
diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml
index 849adb40..1a07503a 100644
--- a/.github/workflows/deploy-latest.yml
+++ b/.github/workflows/deploy-latest.yml
@@ -29,6 +29,7 @@ jobs:
python -m pip install -e .[docs]
python -m pip install sphinx-to-sqlite==0.1a1
- name: Run tests
+ if: ${{ github.ref == 'refs/heads/main' }}
run: |
pytest -n auto -m "not serial"
pytest -m "serial"
@@ -50,6 +51,8 @@ jobs:
run: |-
gcloud config set run/region us-central1
gcloud config set project datasette-222320
+ export SUFFIX="-${GITHUB_REF#refs/heads/}"
+ export SUFFIX=${SUFFIX#-main}
datasette publish cloudrun fixtures.db extra_database.db \
-m fixtures.json \
--plugins-dir=plugins \
@@ -57,7 +60,10 @@ jobs:
--version-note=$GITHUB_SHA \
--extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \
--install=pysqlite3-binary \
- --service=datasette-latest
+ --service "datasette-latest$SUFFIX"
+ - name: Deploy to docs as well (only for main)
+ if: ${{ github.ref == 'refs/heads/main' }}
+ run: |-
# Deploy docs.db to a different service
datasette publish cloudrun docs.db \
--branch=$GITHUB_SHA \
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 8a638e0b..07f2a0e4 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -202,6 +202,17 @@ For added productivity, you can use use `sphinx-autobuild `__ is re-deployed automatically to Google Cloud Run for every push to ``main`` that passes the test suite. This is implemented by the GitHub Actions workflow at `.github/workflows/deploy-latest.yml `__.
+
+Specific branches can also be set to automatically deploy by adding them to the ``on: push: branches`` block at the top of the workflow YAML file. Branches configured in this way will be deployed to a new Cloud Run service whether or not their tests pass.
+
+The Cloud Run URL for a branch demo can be found in the GitHub Actions logs.
+
.. _contributing_release:
Release process
From a8228b018b64a4f2a0ded70a402374f4ee2ccd93 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 19 Aug 2021 14:09:38 -0700
Subject: [PATCH 0015/1160] Ability to deploy demos of branches
* Ability to deploy additional branch demos, closes #1442
* Only run tests before deploy on main branch
* Documentation for continuous deployment
---
.github/workflows/deploy-latest.yml | 8 +++++++-
docs/contributing.rst | 11 +++++++++++
2 files changed, 18 insertions(+), 1 deletion(-)
diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml
index 849adb40..1a07503a 100644
--- a/.github/workflows/deploy-latest.yml
+++ b/.github/workflows/deploy-latest.yml
@@ -29,6 +29,7 @@ jobs:
python -m pip install -e .[docs]
python -m pip install sphinx-to-sqlite==0.1a1
- name: Run tests
+ if: ${{ github.ref == 'refs/heads/main' }}
run: |
pytest -n auto -m "not serial"
pytest -m "serial"
@@ -50,6 +51,8 @@ jobs:
run: |-
gcloud config set run/region us-central1
gcloud config set project datasette-222320
+ export SUFFIX="-${GITHUB_REF#refs/heads/}"
+ export SUFFIX=${SUFFIX#-main}
datasette publish cloudrun fixtures.db extra_database.db \
-m fixtures.json \
--plugins-dir=plugins \
@@ -57,7 +60,10 @@ jobs:
--version-note=$GITHUB_SHA \
--extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \
--install=pysqlite3-binary \
- --service=datasette-latest
+ --service "datasette-latest$SUFFIX"
+ - name: Deploy to docs as well (only for main)
+ if: ${{ github.ref == 'refs/heads/main' }}
+ run: |-
# Deploy docs.db to a different service
datasette publish cloudrun docs.db \
--branch=$GITHUB_SHA \
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 8a638e0b..07f2a0e4 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -202,6 +202,17 @@ For added productivity, you can use use `sphinx-autobuild `__ is re-deployed automatically to Google Cloud Run for every push to ``main`` that passes the test suite. This is implemented by the GitHub Actions workflow at `.github/workflows/deploy-latest.yml `__.
+
+Specific branches can also be set to automatically deploy by adding them to the ``on: push: branches`` block at the top of the workflow YAML file. Branches configured in this way will be deployed to a new Cloud Run service whether or not their tests pass.
+
+The Cloud Run URL for a branch demo can be found in the GitHub Actions logs.
+
.. _contributing_release:
Release process
From 281c0872d5b8a462c9d7b2b2d77a924da4ed25a7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 19 Aug 2021 14:15:45 -0700
Subject: [PATCH 0016/1160] Deploy this as a preview
---
.github/workflows/deploy-latest.yml | 1 +
1 file changed, 1 insertion(+)
diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml
index 1a07503a..2ecb3924 100644
--- a/.github/workflows/deploy-latest.yml
+++ b/.github/workflows/deploy-latest.yml
@@ -4,6 +4,7 @@ on:
push:
branches:
- main
+ - query-info
jobs:
deploy:
From 4eb3ae40fb223a66ae574fb84fac99e96183b08d Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 19 Aug 2021 14:17:44 -0700
Subject: [PATCH 0017/1160] Don't bother building docs if not on main
Refs ##1442
---
.github/workflows/deploy-latest.yml | 1 +
1 file changed, 1 insertion(+)
diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml
index 1a07503a..1ae96e89 100644
--- a/.github/workflows/deploy-latest.yml
+++ b/.github/workflows/deploy-latest.yml
@@ -36,6 +36,7 @@ jobs:
- name: Build fixtures.db
run: python tests/fixtures.py fixtures.db fixtures.json plugins --extra-db-filename extra_database.db
- name: Build docs.db
+ if: ${{ github.ref == 'refs/heads/main' }}
run: |-
cd docs
sphinx-build -b xml . _build
From 7e15422aacfa9e9735cb9f9beaa32250edbf4905 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 19 Aug 2021 14:23:43 -0700
Subject: [PATCH 0018/1160] Documentation for datasette.databases property,
closes #1443
---
docs/internals.rst | 11 +++++++++++
1 file changed, 11 insertions(+)
diff --git a/docs/internals.rst b/docs/internals.rst
index 058a8969..d5db7ffa 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -196,6 +196,17 @@ Datasette class
This object is an instance of the ``Datasette`` class, passed to many plugin hooks as an argument called ``datasette``.
+.. _datasette_databases:
+
+.databases
+----------
+
+Property exposing an ordered dictionary of databases currently connected to Datasette.
+
+The dictionary keys are the name of the database that is used in the URL - e.g. ``/fixtures`` would have a key of ``"fixtures"``. The values are :ref:`internals_database` instances.
+
+All databases are listed, irrespective of user permissions. This means that the ``_internal`` database will always be listed here.
+
.. _datasette_plugin_config:
.plugin_config(plugin_name, database=None, table=None)
From 92a99d969c01633dba14cceebeda65daaedaec17 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Tue, 24 Aug 2021 11:13:42 -0700
Subject: [PATCH 0019/1160] Added not-footer wrapper div, refs #1446
---
datasette/templates/base.html | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/datasette/templates/base.html b/datasette/templates/base.html
index e61edc4f..c9aa7e31 100644
--- a/datasette/templates/base.html
+++ b/datasette/templates/base.html
@@ -13,6 +13,7 @@
{% block extra_head %}{% endblock %}
+
{% include "_close_open_menus.html" %}
From 93c3a7ffbfb3378f743ebce87d033cf1ce7689e0 Mon Sep 17 00:00:00 2001
From: Tim Sherratt
Date: Wed, 25 Aug 2021 11:28:58 +1000
Subject: [PATCH 0020/1160] Remove underscore from search mode parameter name
(#1447)
The text refers to the parameter as `searchmode` but the `metadata.json` example uses `search_mode`. The latter doesn't actually seem to work.
---
docs/full_text_search.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/full_text_search.rst b/docs/full_text_search.rst
index f549296f..90b2e8c1 100644
--- a/docs/full_text_search.rst
+++ b/docs/full_text_search.rst
@@ -70,7 +70,7 @@ Here is an example which enables full-text search (with SQLite advanced search o
"display_ads": {
"fts_table": "ads_fts",
"fts_pk": "id",
- "search_mode": "raw"
+ "searchmode": "raw"
}
}
}
From 5161422b7fa249c6b7d6dc47ec6f483d3fdbd170 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 24 Aug 2021 18:29:26 -0700
Subject: [PATCH 0021/1160] Update trustme requirement from <0.9,>=0.7 to
>=0.7,<0.10 (#1433)
Updates the requirements on [trustme](https://github.com/python-trio/trustme) to permit the latest version.
- [Release notes](https://github.com/python-trio/trustme/releases)
- [Commits](https://github.com/python-trio/trustme/compare/v0.7.0...v0.9.0)
---
updated-dependencies:
- dependency-name: trustme
dependency-type: direct:development
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
setup.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.py b/setup.py
index 65e99848..a3866515 100644
--- a/setup.py
+++ b/setup.py
@@ -73,7 +73,7 @@ setup(
"beautifulsoup4>=4.8.1,<4.10.0",
"black==21.6b0",
"pytest-timeout>=1.4.2,<1.5",
- "trustme>=0.7,<0.9",
+ "trustme>=0.7,<0.10",
],
"rich": ["rich"],
},
From a1a33bb5822214be1cebd98cd858b2058d91a4aa Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 24 Aug 2021 18:29:55 -0700
Subject: [PATCH 0022/1160] Bump black from 21.6b0 to 21.7b0 (#1400)
Bumps [black](https://github.com/psf/black) from 21.6b0 to 21.7b0.
- [Release notes](https://github.com/psf/black/releases)
- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md)
- [Commits](https://github.com/psf/black/commits)
---
updated-dependencies:
- dependency-name: black
dependency-type: direct:development
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
setup.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.py b/setup.py
index a3866515..84f32087 100644
--- a/setup.py
+++ b/setup.py
@@ -71,7 +71,7 @@ setup(
"pytest-xdist>=2.2.1,<2.4",
"pytest-asyncio>=0.10,<0.16",
"beautifulsoup4>=4.8.1,<4.10.0",
- "black==21.6b0",
+ "black==21.7b0",
"pytest-timeout>=1.4.2,<1.5",
"trustme>=0.7,<0.10",
],
From 3655bb49a464bcc8004e491cc4d4de292f1acd62 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 27 Aug 2021 17:48:54 -0700
Subject: [PATCH 0023/1160] Better default help text, closes #1450
---
datasette/cli.py | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/datasette/cli.py b/datasette/cli.py
index ea6da748..65da5613 100644
--- a/datasette/cli.py
+++ b/datasette/cli.py
@@ -123,7 +123,11 @@ def sqlite_extensions(fn):
@click.version_option(version=__version__)
def cli():
"""
- Datasette!
+ Datasette is an open source multi-tool for exploring and publishing data
+
+ \b
+ About Datasette: https://datasette.io/
+ Full documentation: https://docs.datasette.io/
"""
From 30c18576d603366dc3bd83ba50de1b7e70844430 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 27 Aug 2021 18:39:42 -0700
Subject: [PATCH 0024/1160] register_commands() plugin hook, closes #1449
---
datasette/cli.py | 3 +++
datasette/hookspecs.py | 5 ++++
docs/plugin_hooks.rst | 45 +++++++++++++++++++++++++++++++++
tests/test_plugins.py | 57 +++++++++++++++++++++++++++++++++++++++++-
4 files changed, 109 insertions(+), 1 deletion(-)
diff --git a/datasette/cli.py b/datasette/cli.py
index 65da5613..22e2338a 100644
--- a/datasette/cli.py
+++ b/datasette/cli.py
@@ -595,6 +595,9 @@ def serve(
uvicorn.run(ds.app(), **uvicorn_kwargs)
+pm.hook.register_commands(cli=cli)
+
+
async def check_databases(ds):
# Run check_connection against every connected database
# to confirm they are all usable
diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py
index 56c79d23..1d4e3b27 100644
--- a/datasette/hookspecs.py
+++ b/datasette/hookspecs.py
@@ -79,6 +79,11 @@ def register_routes(datasette):
"""Register URL routes: return a list of (regex, view_function) pairs"""
+@hookspec
+def register_commands(cli):
+ """Register additional CLI commands, e.g. 'datasette mycommand ...'"""
+
+
@hookspec
def actor_from_request(datasette, request):
"""Return an actor dictionary based on the incoming request"""
diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst
index 5cdb1623..a6fe1071 100644
--- a/docs/plugin_hooks.rst
+++ b/docs/plugin_hooks.rst
@@ -587,6 +587,51 @@ See :ref:`writing_plugins_designing_urls` for tips on designing the URL routes u
Examples: `datasette-auth-github `__, `datasette-psutil `__
+.. _plugin_register_commands:
+
+register_commands(cli)
+----------------------
+
+``cli`` - the root Datasette `Click command group `__
+ Use this to register additional CLI commands
+
+Register additional CLI commands that can be run using ``datsette yourcommand ...``. This provides a mechanism by which plugins can add new CLI commands to Datasette.
+
+This example registers a new ``datasette verify file1.db file2.db`` command that checks if the provided file paths are valid SQLite databases:
+
+.. code-block:: python
+
+ from datasette import hookimpl
+ import click
+ import sqlite3
+
+ @hookimpl
+ def register_commands(cli):
+ @cli.command()
+ @click.argument("files", type=click.Path(exists=True), nargs=-1)
+ def verify(files):
+ "Verify that files can be opened by Datasette"
+ for file in files:
+ conn = sqlite3.connect(str(file))
+ try:
+ conn.execute("select * from sqlite_master")
+ except sqlite3.DatabaseError:
+ raise click.ClickException("Invalid database: {}".format(file))
+
+The new command can then be executed like so::
+
+ datasette verify fixtures.db
+
+Help text (from the docstring for the function plus any defined Click arguments or options) will become available using::
+
+ datasette verify --help
+
+Plugins can register multiple commands by making multiple calls to the ``@cli.command()`` decorator.Consult the `Click documentation `__ for full details on how to build a CLI command, including how to define arguments and options.
+
+Note that ``register_commands()`` plugins cannot used with the :ref:`--plugins-dir mechanism ` - they need to be installed into the same virtual environment as Datasette using ``pip install``. Provided it has a ``setup.py`` file (see :ref:`writing_plugins_packaging`) you can run ``pip install`` directly against the directory in which you are developing your plugin like so::
+
+ pip install -e path/to/my/datasette-plugin
+
.. _plugin_register_facet_classes:
register_facet_classes()
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index ec8ff0c5..a024c39b 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -6,13 +6,15 @@ from .fixtures import (
TEMP_PLUGIN_SECRET_FILE,
TestClient as _TestClient,
) # noqa
+from click.testing import CliRunner
from datasette.app import Datasette
-from datasette import cli
+from datasette import cli, hookimpl
from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm
from datasette.utils.sqlite import sqlite3
from datasette.utils import CustomRow
from jinja2.environment import Template
import base64
+import importlib
import json
import os
import pathlib
@@ -902,3 +904,56 @@ def test_hook_get_metadata(app_client):
assert "Hello from local metadata" == meta["databases"]["from-local"]["title"]
assert "Hello from the plugin hook" == meta["databases"]["from-hook"]["title"]
pm.hook.get_metadata = og_pm_hook_get_metadata
+
+
+def _extract_commands(output):
+ lines = output.split("Commands:\n", 1)[1].split("\n")
+ return {line.split()[0].replace("*", "") for line in lines if line.strip()}
+
+
+def test_hook_register_commands():
+ # Without the plugin should have seven commands
+ runner = CliRunner()
+ result = runner.invoke(cli.cli, "--help")
+ commands = _extract_commands(result.output)
+ assert commands == {
+ "serve",
+ "inspect",
+ "install",
+ "package",
+ "plugins",
+ "publish",
+ "uninstall",
+ }
+
+ # Now install a plugin
+ class VerifyPlugin:
+ __name__ = "VerifyPlugin"
+
+ @hookimpl
+ def register_commands(self, cli):
+ @cli.command()
+ def verify():
+ pass
+
+ @cli.command()
+ def unverify():
+ pass
+
+ pm.register(VerifyPlugin(), name="verify")
+ importlib.reload(cli)
+ result2 = runner.invoke(cli.cli, "--help")
+ commands2 = _extract_commands(result2.output)
+ assert commands2 == {
+ "serve",
+ "inspect",
+ "install",
+ "package",
+ "plugins",
+ "publish",
+ "uninstall",
+ "verify",
+ "unverify",
+ }
+ pm.unregister(name="verify")
+ importlib.reload(cli)
From d3ea36713194e3d92ed4c066337400146c921d0e Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 27 Aug 2021 18:55:54 -0700
Subject: [PATCH 0025/1160] Release 0.59a2
Refs #942, #1421, #1423, #1431, #1443, #1446, #1449
---
datasette/version.py | 2 +-
docs/changelog.rst | 13 +++++++++++++
docs/plugin_hooks.rst | 2 +-
3 files changed, 15 insertions(+), 2 deletions(-)
diff --git a/datasette/version.py b/datasette/version.py
index f5fbfb3f..87b18fab 100644
--- a/datasette/version.py
+++ b/datasette/version.py
@@ -1,2 +1,2 @@
-__version__ = "0.59a1"
+__version__ = "0.59a2"
__version_info__ = tuple(__version__.split("."))
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 1406a7ca..737a151b 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,19 @@
Changelog
=========
+.. _v0_59a2:
+
+0.59a2 (2021-08-27)
+-------------------
+
+- Columns can now have associated metadata descriptions in ``metadata.json``, see :ref:`metadata_column_descriptions`. (:issue:`942`)
+- New :ref:`register_commands() ` plugin hook allows plugins to register additional Datasette CLI commands, e.g. ``datasette mycommand file.db``. (:issue:`1449`)
+- Adding ``?_facet_size=max`` to a table page now shows the number of unique values in each facet. (:issue:`1423`)
+- Code that figures out which named parameters a SQL query takes in order to display form fields for them is no longer confused by strings that contain colon characters. (:issue:`1421`)
+- Renamed ``--help-config`` option to ``--help-settings``. (:issue:`1431`)
+- ``datasette.databases`` property is now a documented API. (:issue:`1443`)
+- Datasette base template now wraps everything other than the ``