From 1db116e20eda43c95d3c60a82548e355862f7212 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 12 Aug 2021 17:47:40 -0700 Subject: [PATCH 01/10] WIP extra query column information, refs #1293 --- datasette/templates/query.html | 2 ++ datasette/utils/__init__.py | 41 ++++++++++++++++++++++++++++++++++ datasette/views/database.py | 10 +++++++++ 3 files changed, 53 insertions(+) diff --git a/datasette/templates/query.html b/datasette/templates/query.html index 75f7f1b1..9fe1d4f5 100644 --- a/datasette/templates/query.html +++ b/datasette/templates/query.html @@ -67,6 +67,8 @@

+extra_column_info: {{ extra_column_info }} + {% if display_rows %}
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 70ac8976..69c72566 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -1089,3 +1089,44 @@ async def derive_named_parameters(db, sql): return [row["p4"].lstrip(":") for row in results if row["opcode"] == "Variable"] except sqlite3.DatabaseError: return possible_params + + +def columns_for_query(conn, sql, params=None): + """ + Given a SQLite connection ``conn`` and a SQL query ``sql``, + returns a list of ``(table_name, column_name)`` pairs, one + per returned column. ``(None, None)`` if no table and column + could be derived. + """ + rows = conn.execute("explain " + sql, params).fetchall() + table_rootpage_by_register = { + r["p1"]: r["p2"] for r in rows if r["opcode"] == "OpenRead" + } + names_by_rootpage = dict( + conn.execute( + "select rootpage, name from sqlite_master where rootpage in ({})".format( + ", ".join(map(str, table_rootpage_by_register.values())) + ) + ) + ) + columns_by_column_register = {} + for row in rows: + if row["opcode"] in ("Rowid", "Column"): + addr, opcode, table_id, cid, column_register, p4, p5, comment = row + table = names_by_rootpage[table_rootpage_by_register[table_id]] + columns_by_column_register[column_register] = (table, cid) + result_row = [dict(r) for r in rows if r["opcode"] == "ResultRow"][0] + registers = list(range(result_row["p1"], result_row["p1"] + result_row["p2"])) + all_column_names = {} + for table in names_by_rootpage.values(): + table_xinfo = conn.execute("pragma table_xinfo({})".format(table)).fetchall() + for row in table_xinfo: + all_column_names[(table, row["cid"])] = row["name"] + final_output = [] + for r in registers: + try: + table, cid = columns_by_column_register[r] + final_output.append((table, all_column_names[table, cid])) + except KeyError: + final_output.append((None, None)) + return final_output diff --git a/datasette/views/database.py b/datasette/views/database.py index 7c36034c..7b1f1923 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -10,6 +10,7 @@ import markupsafe from datasette.utils import ( await_me_maybe, check_visibility, + columns_for_query, derive_named_parameters, to_css_class, validate_sql_select, @@ -248,6 +249,8 @@ class QueryView(DataView): query_error = None + extra_column_info = None + # Execute query - as write or as read if write: if request.method == "POST": @@ -334,6 +337,12 @@ class QueryView(DataView): database, sql, params_for_query, truncate=True, **extra_args ) columns = [r[0] for r in results.description] + + # Try to figure out extra column information + db = self.ds.get_database(database) + extra_column_info = await db.execute_fn( + lambda conn: columns_for_query(conn, sql, params_for_query) + ) except sqlite3.DatabaseError as e: query_error = e results = None @@ -462,6 +471,7 @@ class QueryView(DataView): "show_hide_text": show_hide_text, "show_hide_hidden": markupsafe.Markup(show_hide_hidden), "hide_sql": hide_sql, + "extra_column_info": extra_column_info, } return ( From d2de17987bf504fdf5485df6d19adf6810575e2d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 12 Aug 2021 18:01:57 -0700 Subject: [PATCH 02/10] Rename --help-config to --help-settings, closes #1431 --- datasette/cli.py | 12 ++++++------ docs/datasette-serve-help.txt | 2 +- tests/test_cli.py | 10 +++++++++- 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index e53f3d8e..d4e23c70 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -51,7 +51,7 @@ class Config(click.ParamType): name, value = config.split(":", 1) if name not in DEFAULT_SETTINGS: self.fail( - f"{name} is not a valid option (--help-config to see all)", + f"{name} is not a valid option (--help-settings to see all)", param, ctx, ) @@ -84,7 +84,7 @@ class Setting(CompositeParamType): name, value = config if name not in DEFAULT_SETTINGS: self.fail( - f"{name} is not a valid option (--help-config to see all)", + f"{name} is not a valid option (--help-settings to see all)", param, ctx, ) @@ -408,7 +408,7 @@ def uninstall(packages, yes): help="Run an HTTP GET request against this path, print results and exit", ) @click.option("--version-note", help="Additional note to show on /-/versions") -@click.option("--help-config", is_flag=True, help="Show available config options") +@click.option("--help-settings", is_flag=True, help="Show available settings") @click.option("--pdb", is_flag=True, help="Launch debugger on any errors") @click.option( "-o", @@ -456,7 +456,7 @@ def serve( root, get, version_note, - help_config, + help_settings, pdb, open_browser, create, @@ -466,9 +466,9 @@ def serve( return_instance=False, ): """Serve up specified SQLite database files with a web UI""" - if help_config: + if help_settings: formatter = formatting.HelpFormatter() - with formatter.section("Config options"): + with formatter.section("Settings"): formatter.write_dl( [ (option.name, f"{option.help} (default={option.default})") diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index ec3f41a0..2911977a 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -32,7 +32,7 @@ Options: --get TEXT Run an HTTP GET request against this path, print results and exit --version-note TEXT Additional note to show on /-/versions - --help-config Show available config options + --help-settings Show available settings --pdb Launch debugger on any errors -o, --open Open Datasette in your web browser --create Create database files if they do not exist diff --git a/tests/test_cli.py b/tests/test_cli.py index e31a305e..763fe2e7 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -5,6 +5,7 @@ from .fixtures import ( EXPECTED_PLUGINS, ) import asyncio +from datasette.app import SETTINGS from datasette.plugins import DEFAULT_PLUGINS from datasette.cli import cli, serve from datasette.version import __version__ @@ -147,7 +148,7 @@ def test_metadata_yaml(): root=False, version_note=None, get=None, - help_config=False, + help_settings=False, pdb=False, crossdb=False, open_browser=False, @@ -291,3 +292,10 @@ def test_weird_database_names(ensure_eventloop, tmpdir, filename): cli, [db_path, "--get", "/{}".format(urllib.parse.quote(filename_no_stem))] ) assert result2.exit_code == 0, result2.output + + +def test_help_settings(): + runner = CliRunner() + result = runner.invoke(cli, ["--help-settings"]) + for setting in SETTINGS: + assert setting.name in result.output From 07b6c9dd35079a4cbfba5ad6c93ca86320b8b9b5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 12 Aug 2021 18:10:36 -0700 Subject: [PATCH 03/10] Rename config= to settings=, refs #1432 --- datasette/app.py | 8 ++++---- datasette/cli.py | 8 ++++---- datasette/templates/table.html | 2 +- datasette/views/base.py | 2 +- datasette/views/database.py | 2 +- tests/fixtures.py | 20 ++++++++++---------- tests/test_api.py | 8 ++++---- tests/test_custom_pages.py | 2 +- tests/test_facets.py | 2 +- tests/test_html.py | 14 ++++++++------ 10 files changed, 35 insertions(+), 33 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index f2f75884..8cbaaf9f 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -200,7 +200,7 @@ class Datasette: plugins_dir=None, static_mounts=None, memory=False, - config=None, + settings=None, secret=None, version_note=None, config_dir=None, @@ -279,7 +279,7 @@ class Datasette: raise StartupError("config.json should be renamed to settings.json") if config_dir and (config_dir / "settings.json").exists() and not config: config = json.loads((config_dir / "settings.json").read_text()) - self._settings = dict(DEFAULT_SETTINGS, **(config or {})) + self._settings = dict(DEFAULT_SETTINGS, **(settings or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note self.executor = futures.ThreadPoolExecutor( @@ -419,8 +419,8 @@ class Datasette: def setting(self, key): return self._settings.get(key, None) - def config_dict(self): - # Returns a fully resolved config dictionary, useful for templates + def settings_dict(self): + # Returns a fully resolved settings dictionary, useful for templates return {option.name: self.setting(option.name) for option in SETTINGS} def _metadata_recursive_update(self, orig, updated): diff --git a/datasette/cli.py b/datasette/cli.py index d4e23c70..ea6da748 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -495,14 +495,14 @@ def serve( if metadata: metadata_data = parse_metadata(metadata.read()) - combined_config = {} + combined_settings = {} if config: click.echo( "--config name:value will be deprecated in Datasette 1.0, use --setting name value instead", err=True, ) - combined_config.update(config) - combined_config.update(settings) + combined_settings.update(config) + combined_settings.update(settings) kwargs = dict( immutables=immutable, @@ -514,7 +514,7 @@ def serve( template_dir=template_dir, plugins_dir=plugins_dir, static_mounts=static, - config=combined_config, + settings=combined_settings, memory=memory, secret=secret, version_note=version_note, diff --git a/datasette/templates/table.html b/datasette/templates/table.html index 466e8a47..a28945ad 100644 --- a/datasette/templates/table.html +++ b/datasette/templates/table.html @@ -201,7 +201,7 @@ CSV options: {% if expandable_columns %}{% endif %} - {% if next_url and config.allow_csv_stream %}{% endif %} + {% if next_url and settings.allow_csv_stream %}{% endif %} {% for key, value in url_csv_hidden_args %} diff --git a/datasette/views/base.py b/datasette/views/base.py index 1cea1386..3333781c 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -614,7 +614,7 @@ class DataView(BaseView): ] + [("_size", "max")], "datasette_version": __version__, - "config": self.ds.config_dict(), + "settings": self.ds.settings_dict(), }, } if "metadata" not in context: diff --git a/datasette/views/database.py b/datasette/views/database.py index 7b1f1923..ddea1d88 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -465,7 +465,7 @@ class QueryView(DataView): "canned_query": canned_query, "edit_sql_url": edit_sql_url, "metadata": metadata, - "config": self.ds.config_dict(), + "settings": self.ds.settings_dict(), "request": request, "show_hide_link": show_hide_link, "show_hide_text": show_hide_text, diff --git a/tests/fixtures.py b/tests/fixtures.py index 4a420e4b..dc22c609 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -99,7 +99,7 @@ def make_app_client( max_returned_rows=None, cors=False, memory=False, - config=None, + settings=None, filename="fixtures.db", is_immutable=False, extra_databases=None, @@ -129,7 +129,7 @@ def make_app_client( # Insert at start to help test /-/databases ordering: files.insert(0, extra_filepath) os.chdir(os.path.dirname(filepath)) - config = config or {} + settings = settings or {} for key, value in { "default_page_size": 50, "max_returned_rows": max_returned_rows or 100, @@ -138,8 +138,8 @@ def make_app_client( # errors when running the full test suite: "num_sql_threads": 1, }.items(): - if key not in config: - config[key] = value + if key not in settings: + settings[key] = value ds = Datasette( files, immutables=immutables, @@ -147,7 +147,7 @@ def make_app_client( cors=cors, metadata=metadata or METADATA, plugins_dir=PLUGINS_DIR, - config=config, + settings=settings, inspect_data=inspect_data, static_mounts=static_mounts, template_dir=template_dir, @@ -171,7 +171,7 @@ def app_client_no_files(): @pytest.fixture(scope="session") def app_client_base_url_prefix(): - with make_app_client(config={"base_url": "/prefix/"}) as client: + with make_app_client(settings={"base_url": "/prefix/"}) as client: yield client @@ -210,13 +210,13 @@ def app_client_two_attached_databases_one_immutable(): @pytest.fixture(scope="session") def app_client_with_hash(): - with make_app_client(config={"hash_urls": True}, is_immutable=True) as client: + with make_app_client(settings={"hash_urls": True}, is_immutable=True) as client: yield client @pytest.fixture(scope="session") def app_client_with_trace(): - with make_app_client(config={"trace_debug": True}, is_immutable=True) as client: + with make_app_client(settings={"trace_debug": True}, is_immutable=True) as client: yield client @@ -234,13 +234,13 @@ def app_client_returned_rows_matches_page_size(): @pytest.fixture(scope="session") def app_client_larger_cache_size(): - with make_app_client(config={"cache_size_kb": 2500}) as client: + with make_app_client(settings={"cache_size_kb": 2500}) as client: yield client @pytest.fixture(scope="session") def app_client_csv_max_mb_one(): - with make_app_client(config={"max_csv_mb": 1}) as client: + with make_app_client(settings={"max_csv_mb": 1}) as client: yield client diff --git a/tests/test_api.py b/tests/test_api.py index 83cca521..1e93c62e 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1711,14 +1711,14 @@ def test_suggested_facets(app_client): def test_allow_facet_off(): - with make_app_client(config={"allow_facet": False}) as client: + with make_app_client(settings={"allow_facet": False}) as client: assert 400 == client.get("/fixtures/facetable.json?_facet=planet_int").status # Should not suggest any facets either: assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] def test_suggest_facets_off(): - with make_app_client(config={"suggest_facets": False}) as client: + with make_app_client(settings={"suggest_facets": False}) as client: # Now suggested_facets should be [] assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"] @@ -1883,7 +1883,7 @@ def test_config_cache_size(app_client_larger_cache_size): def test_config_force_https_urls(): - with make_app_client(config={"force_https_urls": True}) as client: + with make_app_client(settings={"force_https_urls": True}) as client: response = client.get("/fixtures/facetable.json?_size=3&_facet=state") assert response.json["next_url"].startswith("https://") assert response.json["facet_results"]["state"]["results"][0][ @@ -1921,7 +1921,7 @@ def test_custom_query_with_unicode_characters(app_client): @pytest.mark.parametrize("trace_debug", (True, False)) def test_trace(trace_debug): - with make_app_client(config={"trace_debug": trace_debug}) as client: + with make_app_client(settings={"trace_debug": trace_debug}) as client: response = client.get("/fixtures/simple_primary_key.json?_trace=1") assert response.status == 200 diff --git a/tests/test_custom_pages.py b/tests/test_custom_pages.py index 5a71f56d..76c67397 100644 --- a/tests/test_custom_pages.py +++ b/tests/test_custom_pages.py @@ -14,7 +14,7 @@ def custom_pages_client(): @pytest.fixture(scope="session") def custom_pages_client_with_base_url(): with make_app_client( - template_dir=TEST_TEMPLATE_DIRS, config={"base_url": "/prefix/"} + template_dir=TEST_TEMPLATE_DIRS, settings={"base_url": "/prefix/"} ) as client: yield client diff --git a/tests/test_facets.py b/tests/test_facets.py index 18fb8c3b..22927512 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -351,7 +351,7 @@ async def test_json_array_with_blanks_and_nulls(): @pytest.mark.asyncio async def test_facet_size(): - ds = Datasette([], memory=True, config={"max_returned_rows": 50}) + ds = Datasette([], memory=True, settings={"max_returned_rows": 50}) db = ds.add_database(Database(ds, memory_name="test_facet_size")) await db.execute_write( "create table neighbourhoods(city text, neighbourhood text)", block=True diff --git a/tests/test_html.py b/tests/test_html.py index f12f89cd..90fcdae7 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -214,7 +214,7 @@ def test_definition_sql(path, expected_definition_sql, app_client): def test_table_cell_truncation(): - with make_app_client(config={"truncate_cells_html": 5}) as client: + with make_app_client(settings={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") @@ -239,7 +239,7 @@ def test_table_cell_truncation(): def test_row_page_does_not_truncate(): - with make_app_client(config={"truncate_cells_html": 5}) as client: + with make_app_client(settings={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable/1") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") @@ -1072,7 +1072,9 @@ def test_database_download_disallowed_for_memory(): def test_allow_download_off(): - with make_app_client(is_immutable=True, config={"allow_download": False}) as client: + with make_app_client( + is_immutable=True, settings={"allow_download": False} + ) as client: response = client.get("/fixtures") soup = Soup(response.body, "html.parser") assert not len(soup.findAll("a", {"href": re.compile(r"\.db$")})) @@ -1486,7 +1488,7 @@ def test_query_error(app_client): def test_config_template_debug_on(): - with make_app_client(config={"template_debug": True}) as client: + with make_app_client(settings={"template_debug": True}) as client: response = client.get("/fixtures/facetable?_context=1") assert response.status == 200 assert response.text.startswith("
{")
@@ -1500,7 +1502,7 @@ def test_config_template_debug_off(app_client):
 
 def test_debug_context_includes_extra_template_vars():
     # https://github.com/simonw/datasette/issues/693
-    with make_app_client(config={"template_debug": True}) as client:
+    with make_app_client(settings={"template_debug": True}) as client:
         response = client.get("/fixtures/facetable?_context=1")
         # scope_path is added by PLUGIN1
         assert "scope_path" in response.text
@@ -1744,7 +1746,7 @@ def test_facet_more_links(
     expected_ellipses_url,
 ):
     with make_app_client(
-        config={"max_returned_rows": max_returned_rows, "default_facet_size": 2}
+        settings={"max_returned_rows": max_returned_rows, "default_facet_size": 2}
     ) as client:
         response = client.get(path)
         soup = Soup(response.body, "html.parser")

From f7d2bcc75a6f407b1c8726e9ee1058e7e2dc2f60 Mon Sep 17 00:00:00 2001
From: Simon Willison 
Date: Thu, 12 Aug 2021 20:54:25 -0700
Subject: [PATCH 04/10] Settings fix, refs #1433

---
 datasette/app.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/datasette/app.py b/datasette/app.py
index 8cbaaf9f..adc543ef 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -277,7 +277,7 @@ class Datasette:
         self.static_mounts = static_mounts or []
         if config_dir and (config_dir / "config.json").exists():
             raise StartupError("config.json should be renamed to settings.json")
-        if config_dir and (config_dir / "settings.json").exists() and not config:
+        if config_dir and (config_dir / "settings.json").exists() and not settings:
             config = json.loads((config_dir / "settings.json").read_text())
         self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
         self.renderers = {}  # File extension -> (renderer, can_render) functions

From 44699ebb6388c0ff1d5299dccaede46014dee1a2 Mon Sep 17 00:00:00 2001
From: Simon Willison 
Date: Thu, 12 Aug 2021 22:10:07 -0700
Subject: [PATCH 05/10] Fixed config_dir mode, refs #1432

---
 datasette/app.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/datasette/app.py b/datasette/app.py
index adc543ef..06db740e 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -278,7 +278,7 @@ class Datasette:
         if config_dir and (config_dir / "config.json").exists():
             raise StartupError("config.json should be renamed to settings.json")
         if config_dir and (config_dir / "settings.json").exists() and not settings:
-            config = json.loads((config_dir / "settings.json").read_text())
+            settings = json.loads((config_dir / "settings.json").read_text())
         self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
         self.renderers = {}  # File extension -> (renderer, can_render) functions
         self.version_note = version_note

From 62aac6593a12bbdd3d19ea184147fe650bdd6f5e Mon Sep 17 00:00:00 2001
From: Simon Willison 
Date: Fri, 13 Aug 2021 08:33:13 -0700
Subject: [PATCH 06/10] Handle some error conditions

---
 datasette/utils/__init__.py | 9 +++++++--
 1 file changed, 7 insertions(+), 2 deletions(-)

diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index 69c72566..a66bf0a1 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -1098,6 +1098,8 @@ def columns_for_query(conn, sql, params=None):
     per returned column. ``(None, None)`` if no table and column
     could be derived.
     """
+    if sql.lower().strip().startswith("explain"):
+        return []
     rows = conn.execute("explain " + sql, params).fetchall()
     table_rootpage_by_register = {
         r["p1"]: r["p2"] for r in rows if r["opcode"] == "OpenRead"
@@ -1113,8 +1115,11 @@ def columns_for_query(conn, sql, params=None):
     for row in rows:
         if row["opcode"] in ("Rowid", "Column"):
             addr, opcode, table_id, cid, column_register, p4, p5, comment = row
-            table = names_by_rootpage[table_rootpage_by_register[table_id]]
-            columns_by_column_register[column_register] = (table, cid)
+            try:
+                table = names_by_rootpage[table_rootpage_by_register[table_id]]
+                columns_by_column_register[column_register] = (table, cid)
+            except KeyError:
+                pass
     result_row = [dict(r) for r in rows if r["opcode"] == "ResultRow"][0]
     registers = list(range(result_row["p1"], result_row["p1"] + result_row["p2"]))
     all_column_names = {}

From 91315e07a76877e4d58e0032a7e49504a86a7f61 Mon Sep 17 00:00:00 2001
From: Simon Willison 
Date: Mon, 16 Aug 2021 11:36:53 -0700
Subject: [PATCH 07/10] More WIP

---
 datasette/utils/__init__.py | 59 +++++++++++++++++++++++--------------
 1 file changed, 37 insertions(+), 22 deletions(-)

diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index a66bf0a1..d5856087 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -1093,44 +1093,59 @@ async def derive_named_parameters(db, sql):
 
 def columns_for_query(conn, sql, params=None):
     """
-    Given a SQLite connection ``conn`` and a SQL query ``sql``,
-    returns a list of ``(table_name, column_name)`` pairs, one
-    per returned column. ``(None, None)`` if no table and column
-    could be derived.
+    Given a SQLite connection ``conn`` and a SQL query ``sql``, returns a list of
+    ``(table_name, column_name)`` pairs corresponding to the columns that would be
+    returned by that SQL query.
+
+    Each pair indicates the source table and column for the returned column, or
+    ``(None, None)`` if no table and column could be derived (e.g. for "select 1")
     """
     if sql.lower().strip().startswith("explain"):
         return []
-    rows = conn.execute("explain " + sql, params).fetchall()
+    opcodes = conn.execute("explain " + sql, params).fetchall()
     table_rootpage_by_register = {
-        r["p1"]: r["p2"] for r in rows if r["opcode"] == "OpenRead"
+        r["p1"]: r["p2"] for r in opcodes if r["opcode"] == "OpenRead"
     }
-    names_by_rootpage = dict(
-        conn.execute(
-            "select rootpage, name from sqlite_master where rootpage in ({})".format(
+    print(f"{table_rootpage_by_register=}")
+    names_and_types_by_rootpage = dict(
+        [(r[0], (r[1], r[2])) for r in conn.execute(
+            "select rootpage, name, type from sqlite_master where rootpage in ({})".format(
                 ", ".join(map(str, table_rootpage_by_register.values()))
             )
-        )
+        )]
     )
+    print(f"{names_and_types_by_rootpage=}")
     columns_by_column_register = {}
-    for row in rows:
-        if row["opcode"] in ("Rowid", "Column"):
-            addr, opcode, table_id, cid, column_register, p4, p5, comment = row
+    for opcode_row in opcodes:
+        if opcode_row["opcode"] in ("Rowid", "Column"):
+            addr, opcode, table_id, cid, column_register, p4, p5, comment = opcode_row
+            print(f"{table_id=} {cid=} {column_register=}")
+            table = None
             try:
-                table = names_by_rootpage[table_rootpage_by_register[table_id]]
+                table = names_and_types_by_rootpage[table_rootpage_by_register[table_id]][0]
                 columns_by_column_register[column_register] = (table, cid)
-            except KeyError:
+            except KeyError as e:
+                print("  KeyError")
+                print("   ", e)
+                print("    table = names_and_types_by_rootpage[table_rootpage_by_register[table_id]][0]")
+                print(f"    {names_and_types_by_rootpage=} {table_rootpage_by_register=} {table_id=}")
+                print("    columns_by_column_register[column_register] = (table, cid)")
+                print(f"    {column_register=} = ({table=}, {cid=})")
                 pass
-    result_row = [dict(r) for r in rows if r["opcode"] == "ResultRow"][0]
-    registers = list(range(result_row["p1"], result_row["p1"] + result_row["p2"]))
+    result_row = [dict(r) for r in opcodes if r["opcode"] == "ResultRow"][0]
+    result_registers = list(range(result_row["p1"], result_row["p1"] + result_row["p2"]))
+    print(f"{result_registers=}")
+    print(f"{columns_by_column_register=}")
     all_column_names = {}
-    for table in names_by_rootpage.values():
+    for (table, _) in names_and_types_by_rootpage.values():
         table_xinfo = conn.execute("pragma table_xinfo({})".format(table)).fetchall()
-        for row in table_xinfo:
-            all_column_names[(table, row["cid"])] = row["name"]
+        for column_info in table_xinfo:
+            all_column_names[(table, column_info["cid"])] = column_info["name"]
+    print(f"{all_column_names=}")
     final_output = []
-    for r in registers:
+    for register in result_registers:
         try:
-            table, cid = columns_by_column_register[r]
+            table, cid = columns_by_column_register[register]
             final_output.append((table, all_column_names[table, cid]))
         except KeyError:
             final_output.append((None, None))

From 450ab1a36b0a6d83c37c99d1ee509c686f381eac Mon Sep 17 00:00:00 2001
From: Simon Willison 
Date: Thu, 19 Aug 2021 13:27:34 -0700
Subject: [PATCH 08/10] Applied Black

---
 datasette/utils/__init__.py | 27 +++++++++++++++++++--------
 1 file changed, 19 insertions(+), 8 deletions(-)

diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index d5856087..a477c117 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -1108,11 +1108,14 @@ def columns_for_query(conn, sql, params=None):
     }
     print(f"{table_rootpage_by_register=}")
     names_and_types_by_rootpage = dict(
-        [(r[0], (r[1], r[2])) for r in conn.execute(
-            "select rootpage, name, type from sqlite_master where rootpage in ({})".format(
-                ", ".join(map(str, table_rootpage_by_register.values()))
+        [
+            (r[0], (r[1], r[2]))
+            for r in conn.execute(
+                "select rootpage, name, type from sqlite_master where rootpage in ({})".format(
+                    ", ".join(map(str, table_rootpage_by_register.values()))
+                )
             )
-        )]
+        ]
     )
     print(f"{names_and_types_by_rootpage=}")
     columns_by_column_register = {}
@@ -1122,18 +1125,26 @@ def columns_for_query(conn, sql, params=None):
             print(f"{table_id=} {cid=} {column_register=}")
             table = None
             try:
-                table = names_and_types_by_rootpage[table_rootpage_by_register[table_id]][0]
+                table = names_and_types_by_rootpage[
+                    table_rootpage_by_register[table_id]
+                ][0]
                 columns_by_column_register[column_register] = (table, cid)
             except KeyError as e:
                 print("  KeyError")
                 print("   ", e)
-                print("    table = names_and_types_by_rootpage[table_rootpage_by_register[table_id]][0]")
-                print(f"    {names_and_types_by_rootpage=} {table_rootpage_by_register=} {table_id=}")
+                print(
+                    "    table = names_and_types_by_rootpage[table_rootpage_by_register[table_id]][0]"
+                )
+                print(
+                    f"    {names_and_types_by_rootpage=} {table_rootpage_by_register=} {table_id=}"
+                )
                 print("    columns_by_column_register[column_register] = (table, cid)")
                 print(f"    {column_register=} = ({table=}, {cid=})")
                 pass
     result_row = [dict(r) for r in opcodes if r["opcode"] == "ResultRow"][0]
-    result_registers = list(range(result_row["p1"], result_row["p1"] + result_row["p2"]))
+    result_registers = list(
+        range(result_row["p1"], result_row["p1"] + result_row["p2"])
+    )
     print(f"{result_registers=}")
     print(f"{columns_by_column_register=}")
     all_column_names = {}

From a8228b018b64a4f2a0ded70a402374f4ee2ccd93 Mon Sep 17 00:00:00 2001
From: Simon Willison 
Date: Thu, 19 Aug 2021 14:09:38 -0700
Subject: [PATCH 09/10] Ability to deploy demos of branches

* Ability to deploy additional branch demos, closes #1442
* Only run tests before deploy on main branch
* Documentation for continuous deployment
---
 .github/workflows/deploy-latest.yml |  8 +++++++-
 docs/contributing.rst               | 11 +++++++++++
 2 files changed, 18 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml
index 849adb40..1a07503a 100644
--- a/.github/workflows/deploy-latest.yml
+++ b/.github/workflows/deploy-latest.yml
@@ -29,6 +29,7 @@ jobs:
         python -m pip install -e .[docs]
         python -m pip install sphinx-to-sqlite==0.1a1
     - name: Run tests
+      if: ${{ github.ref == 'refs/heads/main' }}
       run: |
         pytest -n auto -m "not serial"
         pytest -m "serial"
@@ -50,6 +51,8 @@ jobs:
       run: |-
         gcloud config set run/region us-central1
         gcloud config set project datasette-222320
+        export SUFFIX="-${GITHUB_REF#refs/heads/}"
+        export SUFFIX=${SUFFIX#-main}
         datasette publish cloudrun fixtures.db extra_database.db \
             -m fixtures.json \
             --plugins-dir=plugins \
@@ -57,7 +60,10 @@ jobs:
             --version-note=$GITHUB_SHA \
             --extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \
             --install=pysqlite3-binary \
-            --service=datasette-latest
+            --service "datasette-latest$SUFFIX"
+    - name: Deploy to docs as well (only for main)
+      if: ${{ github.ref == 'refs/heads/main' }}
+      run: |-
         # Deploy docs.db to a different service
         datasette publish cloudrun docs.db \
             --branch=$GITHUB_SHA \
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 8a638e0b..07f2a0e4 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -202,6 +202,17 @@ For added productivity, you can use use `sphinx-autobuild `__ is re-deployed automatically to Google Cloud Run for every push to ``main`` that passes the test suite. This is implemented by the GitHub Actions workflow at `.github/workflows/deploy-latest.yml `__.
+
+Specific branches can also be set to automatically deploy by adding them to the ``on: push: branches`` block at the top of the workflow YAML file. Branches configured in this way will be deployed to a new Cloud Run service whether or not their tests pass.
+
+The Cloud Run URL for a branch demo can be found in the GitHub Actions logs.
+
 .. _contributing_release:
 
 Release process

From 281c0872d5b8a462c9d7b2b2d77a924da4ed25a7 Mon Sep 17 00:00:00 2001
From: Simon Willison 
Date: Thu, 19 Aug 2021 14:15:45 -0700
Subject: [PATCH 10/10] Deploy this as a preview

---
 .github/workflows/deploy-latest.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml
index 1a07503a..2ecb3924 100644
--- a/.github/workflows/deploy-latest.yml
+++ b/.github/workflows/deploy-latest.yml
@@ -4,6 +4,7 @@ on:
   push:
     branches:
       - main
+      - query-info
 
 jobs:
   deploy: