Date: Thu, 12 Aug 2021 18:10:36 -0700
Subject: [PATCH 03/10] Rename config= to settings=, refs #1432
---
datasette/app.py | 8 ++++----
datasette/cli.py | 8 ++++----
datasette/templates/table.html | 2 +-
datasette/views/base.py | 2 +-
datasette/views/database.py | 2 +-
tests/fixtures.py | 20 ++++++++++----------
tests/test_api.py | 8 ++++----
tests/test_custom_pages.py | 2 +-
tests/test_facets.py | 2 +-
tests/test_html.py | 14 ++++++++------
10 files changed, 35 insertions(+), 33 deletions(-)
diff --git a/datasette/app.py b/datasette/app.py
index f2f75884..8cbaaf9f 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -200,7 +200,7 @@ class Datasette:
plugins_dir=None,
static_mounts=None,
memory=False,
- config=None,
+ settings=None,
secret=None,
version_note=None,
config_dir=None,
@@ -279,7 +279,7 @@ class Datasette:
raise StartupError("config.json should be renamed to settings.json")
if config_dir and (config_dir / "settings.json").exists() and not config:
config = json.loads((config_dir / "settings.json").read_text())
- self._settings = dict(DEFAULT_SETTINGS, **(config or {}))
+ self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
self.renderers = {} # File extension -> (renderer, can_render) functions
self.version_note = version_note
self.executor = futures.ThreadPoolExecutor(
@@ -419,8 +419,8 @@ class Datasette:
def setting(self, key):
return self._settings.get(key, None)
- def config_dict(self):
- # Returns a fully resolved config dictionary, useful for templates
+ def settings_dict(self):
+ # Returns a fully resolved settings dictionary, useful for templates
return {option.name: self.setting(option.name) for option in SETTINGS}
def _metadata_recursive_update(self, orig, updated):
diff --git a/datasette/cli.py b/datasette/cli.py
index d4e23c70..ea6da748 100644
--- a/datasette/cli.py
+++ b/datasette/cli.py
@@ -495,14 +495,14 @@ def serve(
if metadata:
metadata_data = parse_metadata(metadata.read())
- combined_config = {}
+ combined_settings = {}
if config:
click.echo(
"--config name:value will be deprecated in Datasette 1.0, use --setting name value instead",
err=True,
)
- combined_config.update(config)
- combined_config.update(settings)
+ combined_settings.update(config)
+ combined_settings.update(settings)
kwargs = dict(
immutables=immutable,
@@ -514,7 +514,7 @@ def serve(
template_dir=template_dir,
plugins_dir=plugins_dir,
static_mounts=static,
- config=combined_config,
+ settings=combined_settings,
memory=memory,
secret=secret,
version_note=version_note,
diff --git a/datasette/templates/table.html b/datasette/templates/table.html
index 466e8a47..a28945ad 100644
--- a/datasette/templates/table.html
+++ b/datasette/templates/table.html
@@ -201,7 +201,7 @@
CSV options:
{% if expandable_columns %}{% endif %}
- {% if next_url and config.allow_csv_stream %}{% endif %}
+ {% if next_url and settings.allow_csv_stream %}{% endif %}
{% for key, value in url_csv_hidden_args %}
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 1cea1386..3333781c 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -614,7 +614,7 @@ class DataView(BaseView):
]
+ [("_size", "max")],
"datasette_version": __version__,
- "config": self.ds.config_dict(),
+ "settings": self.ds.settings_dict(),
},
}
if "metadata" not in context:
diff --git a/datasette/views/database.py b/datasette/views/database.py
index 7b1f1923..ddea1d88 100644
--- a/datasette/views/database.py
+++ b/datasette/views/database.py
@@ -465,7 +465,7 @@ class QueryView(DataView):
"canned_query": canned_query,
"edit_sql_url": edit_sql_url,
"metadata": metadata,
- "config": self.ds.config_dict(),
+ "settings": self.ds.settings_dict(),
"request": request,
"show_hide_link": show_hide_link,
"show_hide_text": show_hide_text,
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 4a420e4b..dc22c609 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -99,7 +99,7 @@ def make_app_client(
max_returned_rows=None,
cors=False,
memory=False,
- config=None,
+ settings=None,
filename="fixtures.db",
is_immutable=False,
extra_databases=None,
@@ -129,7 +129,7 @@ def make_app_client(
# Insert at start to help test /-/databases ordering:
files.insert(0, extra_filepath)
os.chdir(os.path.dirname(filepath))
- config = config or {}
+ settings = settings or {}
for key, value in {
"default_page_size": 50,
"max_returned_rows": max_returned_rows or 100,
@@ -138,8 +138,8 @@ def make_app_client(
# errors when running the full test suite:
"num_sql_threads": 1,
}.items():
- if key not in config:
- config[key] = value
+ if key not in settings:
+ settings[key] = value
ds = Datasette(
files,
immutables=immutables,
@@ -147,7 +147,7 @@ def make_app_client(
cors=cors,
metadata=metadata or METADATA,
plugins_dir=PLUGINS_DIR,
- config=config,
+ settings=settings,
inspect_data=inspect_data,
static_mounts=static_mounts,
template_dir=template_dir,
@@ -171,7 +171,7 @@ def app_client_no_files():
@pytest.fixture(scope="session")
def app_client_base_url_prefix():
- with make_app_client(config={"base_url": "/prefix/"}) as client:
+ with make_app_client(settings={"base_url": "/prefix/"}) as client:
yield client
@@ -210,13 +210,13 @@ def app_client_two_attached_databases_one_immutable():
@pytest.fixture(scope="session")
def app_client_with_hash():
- with make_app_client(config={"hash_urls": True}, is_immutable=True) as client:
+ with make_app_client(settings={"hash_urls": True}, is_immutable=True) as client:
yield client
@pytest.fixture(scope="session")
def app_client_with_trace():
- with make_app_client(config={"trace_debug": True}, is_immutable=True) as client:
+ with make_app_client(settings={"trace_debug": True}, is_immutable=True) as client:
yield client
@@ -234,13 +234,13 @@ def app_client_returned_rows_matches_page_size():
@pytest.fixture(scope="session")
def app_client_larger_cache_size():
- with make_app_client(config={"cache_size_kb": 2500}) as client:
+ with make_app_client(settings={"cache_size_kb": 2500}) as client:
yield client
@pytest.fixture(scope="session")
def app_client_csv_max_mb_one():
- with make_app_client(config={"max_csv_mb": 1}) as client:
+ with make_app_client(settings={"max_csv_mb": 1}) as client:
yield client
diff --git a/tests/test_api.py b/tests/test_api.py
index 83cca521..1e93c62e 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -1711,14 +1711,14 @@ def test_suggested_facets(app_client):
def test_allow_facet_off():
- with make_app_client(config={"allow_facet": False}) as client:
+ with make_app_client(settings={"allow_facet": False}) as client:
assert 400 == client.get("/fixtures/facetable.json?_facet=planet_int").status
# Should not suggest any facets either:
assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"]
def test_suggest_facets_off():
- with make_app_client(config={"suggest_facets": False}) as client:
+ with make_app_client(settings={"suggest_facets": False}) as client:
# Now suggested_facets should be []
assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"]
@@ -1883,7 +1883,7 @@ def test_config_cache_size(app_client_larger_cache_size):
def test_config_force_https_urls():
- with make_app_client(config={"force_https_urls": True}) as client:
+ with make_app_client(settings={"force_https_urls": True}) as client:
response = client.get("/fixtures/facetable.json?_size=3&_facet=state")
assert response.json["next_url"].startswith("https://")
assert response.json["facet_results"]["state"]["results"][0][
@@ -1921,7 +1921,7 @@ def test_custom_query_with_unicode_characters(app_client):
@pytest.mark.parametrize("trace_debug", (True, False))
def test_trace(trace_debug):
- with make_app_client(config={"trace_debug": trace_debug}) as client:
+ with make_app_client(settings={"trace_debug": trace_debug}) as client:
response = client.get("/fixtures/simple_primary_key.json?_trace=1")
assert response.status == 200
diff --git a/tests/test_custom_pages.py b/tests/test_custom_pages.py
index 5a71f56d..76c67397 100644
--- a/tests/test_custom_pages.py
+++ b/tests/test_custom_pages.py
@@ -14,7 +14,7 @@ def custom_pages_client():
@pytest.fixture(scope="session")
def custom_pages_client_with_base_url():
with make_app_client(
- template_dir=TEST_TEMPLATE_DIRS, config={"base_url": "/prefix/"}
+ template_dir=TEST_TEMPLATE_DIRS, settings={"base_url": "/prefix/"}
) as client:
yield client
diff --git a/tests/test_facets.py b/tests/test_facets.py
index 18fb8c3b..22927512 100644
--- a/tests/test_facets.py
+++ b/tests/test_facets.py
@@ -351,7 +351,7 @@ async def test_json_array_with_blanks_and_nulls():
@pytest.mark.asyncio
async def test_facet_size():
- ds = Datasette([], memory=True, config={"max_returned_rows": 50})
+ ds = Datasette([], memory=True, settings={"max_returned_rows": 50})
db = ds.add_database(Database(ds, memory_name="test_facet_size"))
await db.execute_write(
"create table neighbourhoods(city text, neighbourhood text)", block=True
diff --git a/tests/test_html.py b/tests/test_html.py
index f12f89cd..90fcdae7 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -214,7 +214,7 @@ def test_definition_sql(path, expected_definition_sql, app_client):
def test_table_cell_truncation():
- with make_app_client(config={"truncate_cells_html": 5}) as client:
+ with make_app_client(settings={"truncate_cells_html": 5}) as client:
response = client.get("/fixtures/facetable")
assert response.status == 200
table = Soup(response.body, "html.parser").find("table")
@@ -239,7 +239,7 @@ def test_table_cell_truncation():
def test_row_page_does_not_truncate():
- with make_app_client(config={"truncate_cells_html": 5}) as client:
+ with make_app_client(settings={"truncate_cells_html": 5}) as client:
response = client.get("/fixtures/facetable/1")
assert response.status == 200
table = Soup(response.body, "html.parser").find("table")
@@ -1072,7 +1072,9 @@ def test_database_download_disallowed_for_memory():
def test_allow_download_off():
- with make_app_client(is_immutable=True, config={"allow_download": False}) as client:
+ with make_app_client(
+ is_immutable=True, settings={"allow_download": False}
+ ) as client:
response = client.get("/fixtures")
soup = Soup(response.body, "html.parser")
assert not len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
@@ -1486,7 +1488,7 @@ def test_query_error(app_client):
def test_config_template_debug_on():
- with make_app_client(config={"template_debug": True}) as client:
+ with make_app_client(settings={"template_debug": True}) as client:
response = client.get("/fixtures/facetable?_context=1")
assert response.status == 200
assert response.text.startswith("{")
@@ -1500,7 +1502,7 @@ def test_config_template_debug_off(app_client):
def test_debug_context_includes_extra_template_vars():
# https://github.com/simonw/datasette/issues/693
- with make_app_client(config={"template_debug": True}) as client:
+ with make_app_client(settings={"template_debug": True}) as client:
response = client.get("/fixtures/facetable?_context=1")
# scope_path is added by PLUGIN1
assert "scope_path" in response.text
@@ -1744,7 +1746,7 @@ def test_facet_more_links(
expected_ellipses_url,
):
with make_app_client(
- config={"max_returned_rows": max_returned_rows, "default_facet_size": 2}
+ settings={"max_returned_rows": max_returned_rows, "default_facet_size": 2}
) as client:
response = client.get(path)
soup = Soup(response.body, "html.parser")
From f7d2bcc75a6f407b1c8726e9ee1058e7e2dc2f60 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 12 Aug 2021 20:54:25 -0700
Subject: [PATCH 04/10] Settings fix, refs #1433
---
datasette/app.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/datasette/app.py b/datasette/app.py
index 8cbaaf9f..adc543ef 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -277,7 +277,7 @@ class Datasette:
self.static_mounts = static_mounts or []
if config_dir and (config_dir / "config.json").exists():
raise StartupError("config.json should be renamed to settings.json")
- if config_dir and (config_dir / "settings.json").exists() and not config:
+ if config_dir and (config_dir / "settings.json").exists() and not settings:
config = json.loads((config_dir / "settings.json").read_text())
self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
self.renderers = {} # File extension -> (renderer, can_render) functions
From 44699ebb6388c0ff1d5299dccaede46014dee1a2 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 12 Aug 2021 22:10:07 -0700
Subject: [PATCH 05/10] Fixed config_dir mode, refs #1432
---
datasette/app.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/datasette/app.py b/datasette/app.py
index adc543ef..06db740e 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -278,7 +278,7 @@ class Datasette:
if config_dir and (config_dir / "config.json").exists():
raise StartupError("config.json should be renamed to settings.json")
if config_dir and (config_dir / "settings.json").exists() and not settings:
- config = json.loads((config_dir / "settings.json").read_text())
+ settings = json.loads((config_dir / "settings.json").read_text())
self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
self.renderers = {} # File extension -> (renderer, can_render) functions
self.version_note = version_note
From 62aac6593a12bbdd3d19ea184147fe650bdd6f5e Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Fri, 13 Aug 2021 08:33:13 -0700
Subject: [PATCH 06/10] Handle some error conditions
---
datasette/utils/__init__.py | 9 +++++++--
1 file changed, 7 insertions(+), 2 deletions(-)
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index 69c72566..a66bf0a1 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -1098,6 +1098,8 @@ def columns_for_query(conn, sql, params=None):
per returned column. ``(None, None)`` if no table and column
could be derived.
"""
+ if sql.lower().strip().startswith("explain"):
+ return []
rows = conn.execute("explain " + sql, params).fetchall()
table_rootpage_by_register = {
r["p1"]: r["p2"] for r in rows if r["opcode"] == "OpenRead"
@@ -1113,8 +1115,11 @@ def columns_for_query(conn, sql, params=None):
for row in rows:
if row["opcode"] in ("Rowid", "Column"):
addr, opcode, table_id, cid, column_register, p4, p5, comment = row
- table = names_by_rootpage[table_rootpage_by_register[table_id]]
- columns_by_column_register[column_register] = (table, cid)
+ try:
+ table = names_by_rootpage[table_rootpage_by_register[table_id]]
+ columns_by_column_register[column_register] = (table, cid)
+ except KeyError:
+ pass
result_row = [dict(r) for r in rows if r["opcode"] == "ResultRow"][0]
registers = list(range(result_row["p1"], result_row["p1"] + result_row["p2"]))
all_column_names = {}
From 91315e07a76877e4d58e0032a7e49504a86a7f61 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Mon, 16 Aug 2021 11:36:53 -0700
Subject: [PATCH 07/10] More WIP
---
datasette/utils/__init__.py | 59 +++++++++++++++++++++++--------------
1 file changed, 37 insertions(+), 22 deletions(-)
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index a66bf0a1..d5856087 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -1093,44 +1093,59 @@ async def derive_named_parameters(db, sql):
def columns_for_query(conn, sql, params=None):
"""
- Given a SQLite connection ``conn`` and a SQL query ``sql``,
- returns a list of ``(table_name, column_name)`` pairs, one
- per returned column. ``(None, None)`` if no table and column
- could be derived.
+ Given a SQLite connection ``conn`` and a SQL query ``sql``, returns a list of
+ ``(table_name, column_name)`` pairs corresponding to the columns that would be
+ returned by that SQL query.
+
+ Each pair indicates the source table and column for the returned column, or
+ ``(None, None)`` if no table and column could be derived (e.g. for "select 1")
"""
if sql.lower().strip().startswith("explain"):
return []
- rows = conn.execute("explain " + sql, params).fetchall()
+ opcodes = conn.execute("explain " + sql, params).fetchall()
table_rootpage_by_register = {
- r["p1"]: r["p2"] for r in rows if r["opcode"] == "OpenRead"
+ r["p1"]: r["p2"] for r in opcodes if r["opcode"] == "OpenRead"
}
- names_by_rootpage = dict(
- conn.execute(
- "select rootpage, name from sqlite_master where rootpage in ({})".format(
+ print(f"{table_rootpage_by_register=}")
+ names_and_types_by_rootpage = dict(
+ [(r[0], (r[1], r[2])) for r in conn.execute(
+ "select rootpage, name, type from sqlite_master where rootpage in ({})".format(
", ".join(map(str, table_rootpage_by_register.values()))
)
- )
+ )]
)
+ print(f"{names_and_types_by_rootpage=}")
columns_by_column_register = {}
- for row in rows:
- if row["opcode"] in ("Rowid", "Column"):
- addr, opcode, table_id, cid, column_register, p4, p5, comment = row
+ for opcode_row in opcodes:
+ if opcode_row["opcode"] in ("Rowid", "Column"):
+ addr, opcode, table_id, cid, column_register, p4, p5, comment = opcode_row
+ print(f"{table_id=} {cid=} {column_register=}")
+ table = None
try:
- table = names_by_rootpage[table_rootpage_by_register[table_id]]
+ table = names_and_types_by_rootpage[table_rootpage_by_register[table_id]][0]
columns_by_column_register[column_register] = (table, cid)
- except KeyError:
+ except KeyError as e:
+ print(" KeyError")
+ print(" ", e)
+ print(" table = names_and_types_by_rootpage[table_rootpage_by_register[table_id]][0]")
+ print(f" {names_and_types_by_rootpage=} {table_rootpage_by_register=} {table_id=}")
+ print(" columns_by_column_register[column_register] = (table, cid)")
+ print(f" {column_register=} = ({table=}, {cid=})")
pass
- result_row = [dict(r) for r in rows if r["opcode"] == "ResultRow"][0]
- registers = list(range(result_row["p1"], result_row["p1"] + result_row["p2"]))
+ result_row = [dict(r) for r in opcodes if r["opcode"] == "ResultRow"][0]
+ result_registers = list(range(result_row["p1"], result_row["p1"] + result_row["p2"]))
+ print(f"{result_registers=}")
+ print(f"{columns_by_column_register=}")
all_column_names = {}
- for table in names_by_rootpage.values():
+ for (table, _) in names_and_types_by_rootpage.values():
table_xinfo = conn.execute("pragma table_xinfo({})".format(table)).fetchall()
- for row in table_xinfo:
- all_column_names[(table, row["cid"])] = row["name"]
+ for column_info in table_xinfo:
+ all_column_names[(table, column_info["cid"])] = column_info["name"]
+ print(f"{all_column_names=}")
final_output = []
- for r in registers:
+ for register in result_registers:
try:
- table, cid = columns_by_column_register[r]
+ table, cid = columns_by_column_register[register]
final_output.append((table, all_column_names[table, cid]))
except KeyError:
final_output.append((None, None))
From 450ab1a36b0a6d83c37c99d1ee509c686f381eac Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 19 Aug 2021 13:27:34 -0700
Subject: [PATCH 08/10] Applied Black
---
datasette/utils/__init__.py | 27 +++++++++++++++++++--------
1 file changed, 19 insertions(+), 8 deletions(-)
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index d5856087..a477c117 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -1108,11 +1108,14 @@ def columns_for_query(conn, sql, params=None):
}
print(f"{table_rootpage_by_register=}")
names_and_types_by_rootpage = dict(
- [(r[0], (r[1], r[2])) for r in conn.execute(
- "select rootpage, name, type from sqlite_master where rootpage in ({})".format(
- ", ".join(map(str, table_rootpage_by_register.values()))
+ [
+ (r[0], (r[1], r[2]))
+ for r in conn.execute(
+ "select rootpage, name, type from sqlite_master where rootpage in ({})".format(
+ ", ".join(map(str, table_rootpage_by_register.values()))
+ )
)
- )]
+ ]
)
print(f"{names_and_types_by_rootpage=}")
columns_by_column_register = {}
@@ -1122,18 +1125,26 @@ def columns_for_query(conn, sql, params=None):
print(f"{table_id=} {cid=} {column_register=}")
table = None
try:
- table = names_and_types_by_rootpage[table_rootpage_by_register[table_id]][0]
+ table = names_and_types_by_rootpage[
+ table_rootpage_by_register[table_id]
+ ][0]
columns_by_column_register[column_register] = (table, cid)
except KeyError as e:
print(" KeyError")
print(" ", e)
- print(" table = names_and_types_by_rootpage[table_rootpage_by_register[table_id]][0]")
- print(f" {names_and_types_by_rootpage=} {table_rootpage_by_register=} {table_id=}")
+ print(
+ " table = names_and_types_by_rootpage[table_rootpage_by_register[table_id]][0]"
+ )
+ print(
+ f" {names_and_types_by_rootpage=} {table_rootpage_by_register=} {table_id=}"
+ )
print(" columns_by_column_register[column_register] = (table, cid)")
print(f" {column_register=} = ({table=}, {cid=})")
pass
result_row = [dict(r) for r in opcodes if r["opcode"] == "ResultRow"][0]
- result_registers = list(range(result_row["p1"], result_row["p1"] + result_row["p2"]))
+ result_registers = list(
+ range(result_row["p1"], result_row["p1"] + result_row["p2"])
+ )
print(f"{result_registers=}")
print(f"{columns_by_column_register=}")
all_column_names = {}
From a8228b018b64a4f2a0ded70a402374f4ee2ccd93 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 19 Aug 2021 14:09:38 -0700
Subject: [PATCH 09/10] Ability to deploy demos of branches
* Ability to deploy additional branch demos, closes #1442
* Only run tests before deploy on main branch
* Documentation for continuous deployment
---
.github/workflows/deploy-latest.yml | 8 +++++++-
docs/contributing.rst | 11 +++++++++++
2 files changed, 18 insertions(+), 1 deletion(-)
diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml
index 849adb40..1a07503a 100644
--- a/.github/workflows/deploy-latest.yml
+++ b/.github/workflows/deploy-latest.yml
@@ -29,6 +29,7 @@ jobs:
python -m pip install -e .[docs]
python -m pip install sphinx-to-sqlite==0.1a1
- name: Run tests
+ if: ${{ github.ref == 'refs/heads/main' }}
run: |
pytest -n auto -m "not serial"
pytest -m "serial"
@@ -50,6 +51,8 @@ jobs:
run: |-
gcloud config set run/region us-central1
gcloud config set project datasette-222320
+ export SUFFIX="-${GITHUB_REF#refs/heads/}"
+ export SUFFIX=${SUFFIX#-main}
datasette publish cloudrun fixtures.db extra_database.db \
-m fixtures.json \
--plugins-dir=plugins \
@@ -57,7 +60,10 @@ jobs:
--version-note=$GITHUB_SHA \
--extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \
--install=pysqlite3-binary \
- --service=datasette-latest
+ --service "datasette-latest$SUFFIX"
+ - name: Deploy to docs as well (only for main)
+ if: ${{ github.ref == 'refs/heads/main' }}
+ run: |-
# Deploy docs.db to a different service
datasette publish cloudrun docs.db \
--branch=$GITHUB_SHA \
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 8a638e0b..07f2a0e4 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -202,6 +202,17 @@ For added productivity, you can use use `sphinx-autobuild `__ is re-deployed automatically to Google Cloud Run for every push to ``main`` that passes the test suite. This is implemented by the GitHub Actions workflow at `.github/workflows/deploy-latest.yml `__.
+
+Specific branches can also be set to automatically deploy by adding them to the ``on: push: branches`` block at the top of the workflow YAML file. Branches configured in this way will be deployed to a new Cloud Run service whether or not their tests pass.
+
+The Cloud Run URL for a branch demo can be found in the GitHub Actions logs.
+
.. _contributing_release:
Release process
From 281c0872d5b8a462c9d7b2b2d77a924da4ed25a7 Mon Sep 17 00:00:00 2001
From: Simon Willison
Date: Thu, 19 Aug 2021 14:15:45 -0700
Subject: [PATCH 10/10] Deploy this as a preview
---
.github/workflows/deploy-latest.yml | 1 +
1 file changed, 1 insertion(+)
diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml
index 1a07503a..2ecb3924 100644
--- a/.github/workflows/deploy-latest.yml
+++ b/.github/workflows/deploy-latest.yml
@@ -4,6 +4,7 @@ on:
push:
branches:
- main
+ - query-info
jobs:
deploy: