From 09033c08bec8555e0e893e077afa10a7a75d7d35 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 29 Nov 2020 12:13:16 -0800 Subject: [PATCH 0001/1404] Suggest --load-extension=spatialite, closes #1115 --- datasette/cli.py | 12 ++++++++++-- tests/test_cli.py | 29 ++++++++++++++++++++++------- 2 files changed, 32 insertions(+), 9 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 5feab51e..e84695e3 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -16,6 +16,7 @@ from .app import Datasette, DEFAULT_SETTINGS, SETTINGS, pm from .utils import ( StartupError, check_connection, + find_spatialite, parse_metadata, ConnectionProblem, SpatialiteConnectionProblem, @@ -537,10 +538,17 @@ async def check_databases(ds): try: await database.execute_fn(check_connection) except SpatialiteConnectionProblem: + suggestion = "" + try: + find_spatialite() + suggestion = "\n\nTry adding the --load-extension=spatialite option." + except SpatialiteNotFound: + pass raise click.UsageError( "It looks like you're trying to load a SpatiaLite" - " database without first loading the SpatiaLite module." - "\n\nRead more: https://docs.datasette.io/en/stable/spatialite.html" + + " database without first loading the SpatiaLite module." + + suggestion + + "\n\nRead more: https://docs.datasette.io/en/stable/spatialite.html" ) except ConnectionProblem as e: raise click.UsageError( diff --git a/tests/test_cli.py b/tests/test_cli.py index 36b9a092..409408ae 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -59,13 +59,28 @@ def test_serve_with_inspect_file_prepopulates_table_counts_cache(): assert {"hithere": 44} == db.cached_table_counts -def test_spatialite_error_if_attempt_to_open_spatialite(): - runner = CliRunner() - result = runner.invoke( - cli, ["serve", str(pathlib.Path(__file__).parent / "spatialite.db")] - ) - assert result.exit_code != 0 - assert "trying to load a SpatiaLite database" in result.output +@pytest.mark.parametrize( + "spatialite_paths,should_suggest_load_extension", + ( + ([], False), + (["/tmp"], True), + ), +) +def test_spatialite_error_if_attempt_to_open_spatialite( + spatialite_paths, should_suggest_load_extension +): + with mock.patch("datasette.utils.SPATIALITE_PATHS", spatialite_paths): + runner = CliRunner() + result = runner.invoke( + cli, ["serve", str(pathlib.Path(__file__).parent / "spatialite.db")] + ) + assert result.exit_code != 0 + assert "It looks like you're trying to load a SpatiaLite" in result.output + suggestion = "--load-extension=spatialite" + if should_suggest_load_extension: + assert suggestion in result.output + else: + assert suggestion not in result.output @mock.patch("datasette.utils.SPATIALITE_PATHS", ["/does/not/exist"]) From 4777362bf2692bc72b221ec47c3e6216151d1b89 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 29 Nov 2020 12:19:24 -0800 Subject: [PATCH 0002/1404] Work around CI bug with ensure_eventloop, refs #1115 --- tests/test_cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_cli.py b/tests/test_cli.py index 409408ae..c52960fb 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -67,7 +67,7 @@ def test_serve_with_inspect_file_prepopulates_table_counts_cache(): ), ) def test_spatialite_error_if_attempt_to_open_spatialite( - spatialite_paths, should_suggest_load_extension + ensure_eventloop, spatialite_paths, should_suggest_load_extension ): with mock.patch("datasette.utils.SPATIALITE_PATHS", spatialite_paths): runner = CliRunner() From c745c2715ab5933d7629a76bab4684632383f807 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 29 Nov 2020 12:27:34 -0800 Subject: [PATCH 0003/1404] Moved comment for clarity --- datasette/database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/database.py b/datasette/database.py index ea1424a5..71c45ba0 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -247,12 +247,12 @@ class Database: ) if explicit_label_column: return explicit_label_column - # If a table has two columns, one of which is ID, then label_column is the other one column_names = await self.execute_fn(lambda conn: table_columns(conn, table)) # Is there a name or title column? name_or_title = [c for c in column_names if c in ("name", "title")] if name_or_title: return name_or_title[0] + # If a table has two columns, one of which is ID, then label_column is the other one if ( column_names and len(column_names) == 2 From 37f87b5e52e7f8ddd1c4ffcf368bd7a62a406a6d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 12:01:15 -0800 Subject: [PATCH 0004/1404] Support for generated columns, closes #1116 --- datasette/utils/__init__.py | 12 +++---- tests/test_api.py | 55 ++++++++++++++++++++++++++++++-- tests/test_internals_database.py | 17 ++++++++++ 3 files changed, 76 insertions(+), 8 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index d467383d..28df2ef1 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -64,7 +64,7 @@ HASH_LENGTH = 7 # Can replace this with Column from sqlite_utils when I add that dependency Column = namedtuple( - "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk") + "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk", "hidden") ) @@ -460,11 +460,11 @@ def detect_primary_keys(conn, table): " Figure out primary keys for a table. " table_info_rows = [ row - for row in conn.execute(f'PRAGMA table_info("{table}")').fetchall() - if row[-1] + for row in conn.execute(f'PRAGMA table_xinfo("{table}")').fetchall() + if row["pk"] ] - table_info_rows.sort(key=lambda row: row[-1]) - return [str(r[1]) for r in table_info_rows] + table_info_rows.sort(key=lambda row: row["pk"]) + return [str(r["name"]) for r in table_info_rows] def get_outbound_foreign_keys(conn, table): @@ -572,7 +572,7 @@ def table_columns(conn, table): def table_column_details(conn, table): return [ Column(*r) - for r in conn.execute(f"PRAGMA table_info({escape_sqlite(table)});").fetchall() + for r in conn.execute(f"PRAGMA table_xinfo({escape_sqlite(table)});").fetchall() ] diff --git a/tests/test_api.py b/tests/test_api.py index 848daf9c..ebe50d10 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,5 +1,6 @@ +from datasette.app import Datasette from datasette.plugins import DEFAULT_PLUGINS -from datasette.utils import detect_json1 +from datasette.utils import detect_json1, sqlite3 from datasette.version import __version__ from .fixtures import ( # noqa app_client, @@ -514,7 +515,14 @@ def test_database_page(app_client): }, { "name": "searchable_fts", - "columns": ["text1", "text2", "name with . and spaces"], + "columns": [ + "text1", + "text2", + "name with . and spaces", + "searchable_fts", + "docid", + "__langid", + ], "primary_keys": [], "count": 2, "hidden": True, @@ -1913,3 +1921,46 @@ def test_paginate_using_link_header(app_client, qs): else: path = None assert num_pages == 21 + + +@pytest.mark.skipif( + tuple( + map( + int, + sqlite3.connect(":memory:") + .execute("select sqlite_version()") + .fetchone()[0] + .split("."), + ) + ) + < (3, 31, 0), + reason="generated columns were added in SQLite 3.31.0", +) +@pytest.mark.asyncio +async def test_generated_columns_are_visible_in_datasette(tmp_path_factory): + db_directory = tmp_path_factory.mktemp("dbs") + db_path = db_directory / "test.db" + conn = sqlite3.connect(str(db_path)) + conn.executescript( + """ + CREATE TABLE deeds ( + body TEXT, + id INT GENERATED ALWAYS AS (json_extract(body, '$.id')) STORED, + consideration INT GENERATED ALWAYS AS (json_extract(body, '$.consideration')) STORED + ); + INSERT INTO deeds (body) VALUES ('{ + "id": 1, + "consideration": "This is the consideration" + }'); + """ + ) + datasette = Datasette([db_path]) + response = await datasette.client.get("/test/deeds.json?_shape=array") + assert response.json() == [ + { + "rowid": 1, + "body": '{\n "id": 1,\n "consideration": "This is the consideration"\n }', + "id": 1, + "consideration": "This is the consideration", + } + ] diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 7c8f478c..56397dab 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -120,6 +120,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, + hidden=0, ), Column( cid=1, @@ -128,6 +129,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=2, @@ -136,6 +138,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=3, @@ -144,6 +147,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=4, @@ -152,6 +156,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=5, @@ -160,6 +165,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=6, @@ -168,6 +174,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=7, @@ -176,6 +183,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=8, @@ -184,6 +192,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=9, @@ -192,6 +201,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), ], ), @@ -205,6 +215,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, + hidden=0, ), Column( cid=1, @@ -213,6 +224,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=2, + hidden=0, ), Column( cid=2, @@ -221,6 +233,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=3, @@ -229,6 +242,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=4, @@ -237,6 +251,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=5, @@ -245,6 +260,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=6, @@ -253,6 +269,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), ], ), From dea3c508b39528e566d711c38a467b3d372d220b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 12:09:22 -0800 Subject: [PATCH 0005/1404] Revert "Support for generated columns, closes #1116" - it failed CI This reverts commit 37f87b5e52e7f8ddd1c4ffcf368bd7a62a406a6d. --- datasette/utils/__init__.py | 12 +++---- tests/test_api.py | 55 ++------------------------------ tests/test_internals_database.py | 17 ---------- 3 files changed, 8 insertions(+), 76 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 28df2ef1..d467383d 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -64,7 +64,7 @@ HASH_LENGTH = 7 # Can replace this with Column from sqlite_utils when I add that dependency Column = namedtuple( - "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk", "hidden") + "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk") ) @@ -460,11 +460,11 @@ def detect_primary_keys(conn, table): " Figure out primary keys for a table. " table_info_rows = [ row - for row in conn.execute(f'PRAGMA table_xinfo("{table}")').fetchall() - if row["pk"] + for row in conn.execute(f'PRAGMA table_info("{table}")').fetchall() + if row[-1] ] - table_info_rows.sort(key=lambda row: row["pk"]) - return [str(r["name"]) for r in table_info_rows] + table_info_rows.sort(key=lambda row: row[-1]) + return [str(r[1]) for r in table_info_rows] def get_outbound_foreign_keys(conn, table): @@ -572,7 +572,7 @@ def table_columns(conn, table): def table_column_details(conn, table): return [ Column(*r) - for r in conn.execute(f"PRAGMA table_xinfo({escape_sqlite(table)});").fetchall() + for r in conn.execute(f"PRAGMA table_info({escape_sqlite(table)});").fetchall() ] diff --git a/tests/test_api.py b/tests/test_api.py index ebe50d10..848daf9c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,6 +1,5 @@ -from datasette.app import Datasette from datasette.plugins import DEFAULT_PLUGINS -from datasette.utils import detect_json1, sqlite3 +from datasette.utils import detect_json1 from datasette.version import __version__ from .fixtures import ( # noqa app_client, @@ -515,14 +514,7 @@ def test_database_page(app_client): }, { "name": "searchable_fts", - "columns": [ - "text1", - "text2", - "name with . and spaces", - "searchable_fts", - "docid", - "__langid", - ], + "columns": ["text1", "text2", "name with . and spaces"], "primary_keys": [], "count": 2, "hidden": True, @@ -1921,46 +1913,3 @@ def test_paginate_using_link_header(app_client, qs): else: path = None assert num_pages == 21 - - -@pytest.mark.skipif( - tuple( - map( - int, - sqlite3.connect(":memory:") - .execute("select sqlite_version()") - .fetchone()[0] - .split("."), - ) - ) - < (3, 31, 0), - reason="generated columns were added in SQLite 3.31.0", -) -@pytest.mark.asyncio -async def test_generated_columns_are_visible_in_datasette(tmp_path_factory): - db_directory = tmp_path_factory.mktemp("dbs") - db_path = db_directory / "test.db" - conn = sqlite3.connect(str(db_path)) - conn.executescript( - """ - CREATE TABLE deeds ( - body TEXT, - id INT GENERATED ALWAYS AS (json_extract(body, '$.id')) STORED, - consideration INT GENERATED ALWAYS AS (json_extract(body, '$.consideration')) STORED - ); - INSERT INTO deeds (body) VALUES ('{ - "id": 1, - "consideration": "This is the consideration" - }'); - """ - ) - datasette = Datasette([db_path]) - response = await datasette.client.get("/test/deeds.json?_shape=array") - assert response.json() == [ - { - "rowid": 1, - "body": '{\n "id": 1,\n "consideration": "This is the consideration"\n }', - "id": 1, - "consideration": "This is the consideration", - } - ] diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 56397dab..7c8f478c 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -120,7 +120,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, - hidden=0, ), Column( cid=1, @@ -129,7 +128,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=2, @@ -138,7 +136,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=3, @@ -147,7 +144,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=4, @@ -156,7 +152,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=5, @@ -165,7 +160,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=6, @@ -174,7 +168,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=7, @@ -183,7 +176,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=8, @@ -192,7 +184,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=9, @@ -201,7 +192,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), ], ), @@ -215,7 +205,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, - hidden=0, ), Column( cid=1, @@ -224,7 +213,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=2, - hidden=0, ), Column( cid=2, @@ -233,7 +221,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=3, @@ -242,7 +229,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=4, @@ -251,7 +237,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=5, @@ -260,7 +245,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), Column( cid=6, @@ -269,7 +253,6 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, - hidden=0, ), ], ), From 49b6297fb7513291110d86688c688700e6f6d9cc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 13:24:23 -0800 Subject: [PATCH 0006/1404] Typo fix: messagge_is_html, closes #1118 --- datasette/app.py | 2 +- datasette/views/base.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 88d5ecc6..922046d5 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1154,7 +1154,7 @@ class DatasetteRouter: status = exception.status info = exception.error_dict message = exception.message - if exception.messagge_is_html: + if exception.message_is_html: message = Markup(message) title = exception.title else: diff --git a/datasette/views/base.py b/datasette/views/base.py index bde8449f..5ba8fcb1 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -42,13 +42,13 @@ class DatasetteError(Exception): error_dict=None, status=500, template=None, - messagge_is_html=False, + message_is_html=False, ): self.message = message self.title = title self.error_dict = error_dict or {} self.status = status - self.messagge_is_html = messagge_is_html + self.message_is_html = message_is_html class BaseView: @@ -441,7 +441,7 @@ class DataView(BaseView): """, title="SQL Interrupted", status=400, - messagge_is_html=True, + message_is_html=True, ) except (sqlite3.OperationalError, InvalidSql) as e: raise DatasetteError(str(e), title="Invalid SQL", status=400) From 461670a0b87efa953141b449a9a261919864ceb3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 13:29:57 -0800 Subject: [PATCH 0007/1404] Support for generated columns * Support for generated columns, closes #1116 * Show SQLite version in pytest report header * Use table_info() if SQLite < 3.26.0 * Cache sqlite_version() rather than re-calculate every time * Adjust test_database_page for SQLite 3.26.0 or higher --- datasette/utils/__init__.py | 41 +++++++++++++------------ datasette/utils/sqlite.py | 28 ++++++++++++++++++ tests/conftest.py | 11 +++++++ tests/fixtures.py | 2 +- tests/test_api.py | 51 +++++++++++++++++++++++++++++++- tests/test_config_dir.py | 2 +- tests/test_internals_database.py | 20 ++++++++++++- tests/test_plugins.py | 4 +-- tests/test_utils.py | 2 +- 9 files changed, 135 insertions(+), 26 deletions(-) create mode 100644 datasette/utils/sqlite.py diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index d467383d..b951539d 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -19,15 +19,9 @@ import urllib import numbers import yaml from .shutil_backport import copytree +from .sqlite import sqlite3, sqlite_version from ..plugins import pm -try: - import pysqlite3 as sqlite3 -except ImportError: - import sqlite3 - -if hasattr(sqlite3, "enable_callback_tracebacks"): - sqlite3.enable_callback_tracebacks(True) # From https://www.sqlite.org/lang_keywords.html reserved_words = set( @@ -64,7 +58,7 @@ HASH_LENGTH = 7 # Can replace this with Column from sqlite_utils when I add that dependency Column = namedtuple( - "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk") + "Column", ("cid", "name", "type", "notnull", "default_value", "is_pk", "hidden") ) @@ -458,13 +452,10 @@ def temporary_docker_directory( def detect_primary_keys(conn, table): " Figure out primary keys for a table. " - table_info_rows = [ - row - for row in conn.execute(f'PRAGMA table_info("{table}")').fetchall() - if row[-1] - ] - table_info_rows.sort(key=lambda row: row[-1]) - return [str(r[1]) for r in table_info_rows] + columns = table_column_details(conn, table) + pks = [column for column in columns if column.is_pk] + pks.sort(key=lambda column: column.is_pk) + return [column.name for column in pks] def get_outbound_foreign_keys(conn, table): @@ -570,10 +561,22 @@ def table_columns(conn, table): def table_column_details(conn, table): - return [ - Column(*r) - for r in conn.execute(f"PRAGMA table_info({escape_sqlite(table)});").fetchall() - ] + if sqlite_version() >= (3, 26, 0): + # table_xinfo was added in 3.26.0 + return [ + Column(*r) + for r in conn.execute( + f"PRAGMA table_xinfo({escape_sqlite(table)});" + ).fetchall() + ] + else: + # Treat hidden as 0 for all columns + return [ + Column(*(list(r) + [0])) + for r in conn.execute( + f"PRAGMA table_info({escape_sqlite(table)});" + ).fetchall() + ] filter_column_re = re.compile(r"^_filter_column_\d+$") diff --git a/datasette/utils/sqlite.py b/datasette/utils/sqlite.py new file mode 100644 index 00000000..9a043ccd --- /dev/null +++ b/datasette/utils/sqlite.py @@ -0,0 +1,28 @@ +try: + import pysqlite3 as sqlite3 +except ImportError: + import sqlite3 + +if hasattr(sqlite3, "enable_callback_tracebacks"): + sqlite3.enable_callback_tracebacks(True) + +_cached_sqlite_version = None + + +def sqlite_version(): + global _cached_sqlite_version + if _cached_sqlite_version is None: + _cached_sqlite_version = _sqlite_version() + return _cached_sqlite_version + + +def _sqlite_version(): + return tuple( + map( + int, + sqlite3.connect(":memory:") + .execute("select sqlite_version()") + .fetchone()[0] + .split("."), + ) + ) diff --git a/tests/conftest.py b/tests/conftest.py index 91b811e2..a963a4fd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,6 +3,11 @@ import pathlib import pytest import re +try: + import pysqlite3 as sqlite3 +except ImportError: + import sqlite3 + UNDOCUMENTED_PERMISSIONS = { "this_is_allowed", "this_is_denied", @@ -12,6 +17,12 @@ UNDOCUMENTED_PERMISSIONS = { } +def pytest_report_header(config): + return "SQLite: {}".format( + sqlite3.connect(":memory:").execute("select sqlite_version()").fetchone()[0] + ) + + def pytest_configure(config): import sys diff --git a/tests/fixtures.py b/tests/fixtures.py index f95a2d6b..b0c98f39 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -1,5 +1,5 @@ from datasette.app import Datasette -from datasette.utils import sqlite3 +from datasette.utils.sqlite import sqlite3 from datasette.utils.testing import TestClient import click import contextlib diff --git a/tests/test_api.py b/tests/test_api.py index 848daf9c..5676622e 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,5 +1,7 @@ +from datasette.app import Datasette from datasette.plugins import DEFAULT_PLUGINS from datasette.utils import detect_json1 +from datasette.utils.sqlite import sqlite3, sqlite_version from datasette.version import __version__ from .fixtures import ( # noqa app_client, @@ -514,7 +516,20 @@ def test_database_page(app_client): }, { "name": "searchable_fts", - "columns": ["text1", "text2", "name with . and spaces"], + "columns": [ + "text1", + "text2", + "name with . and spaces", + ] + + ( + [ + "searchable_fts", + "docid", + "__langid", + ] + if sqlite_version() >= (3, 26, 0) + else [] + ), "primary_keys": [], "count": 2, "hidden": True, @@ -1913,3 +1928,37 @@ def test_paginate_using_link_header(app_client, qs): else: path = None assert num_pages == 21 + + +@pytest.mark.skipif( + sqlite_version() < (3, 31, 0), + reason="generated columns were added in SQLite 3.31.0", +) +@pytest.mark.asyncio +async def test_generated_columns_are_visible_in_datasette(tmp_path_factory): + db_directory = tmp_path_factory.mktemp("dbs") + db_path = db_directory / "test.db" + conn = sqlite3.connect(str(db_path)) + conn.executescript( + """ + CREATE TABLE deeds ( + body TEXT, + id INT GENERATED ALWAYS AS (json_extract(body, '$.id')) STORED, + consideration INT GENERATED ALWAYS AS (json_extract(body, '$.consideration')) STORED + ); + INSERT INTO deeds (body) VALUES ('{ + "id": 1, + "consideration": "This is the consideration" + }'); + """ + ) + datasette = Datasette([db_path]) + response = await datasette.client.get("/test/deeds.json?_shape=array") + assert response.json() == [ + { + "rowid": 1, + "body": '{\n "id": 1,\n "consideration": "This is the consideration"\n }', + "id": 1, + "consideration": "This is the consideration", + } + ] diff --git a/tests/test_config_dir.py b/tests/test_config_dir.py index cd158474..015c6ace 100644 --- a/tests/test_config_dir.py +++ b/tests/test_config_dir.py @@ -1,9 +1,9 @@ import json import pytest -import sqlite3 from datasette.app import Datasette from datasette.cli import cli +from datasette.utils.sqlite import sqlite3 from .fixtures import TestClient as _TestClient from click.testing import CliRunner diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 7c8f478c..e50cf20e 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -2,7 +2,8 @@ Tests for the datasette.database.Database class """ from datasette.database import Database, Results, MultipleValues -from datasette.utils import sqlite3, Column +from datasette.utils.sqlite import sqlite3 +from datasette.utils import Column from .fixtures import app_client import pytest import time @@ -120,6 +121,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, + hidden=0, ), Column( cid=1, @@ -128,6 +130,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=2, @@ -136,6 +139,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=3, @@ -144,6 +148,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=4, @@ -152,6 +157,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=5, @@ -160,6 +166,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=6, @@ -168,6 +175,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=7, @@ -176,6 +184,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=8, @@ -184,6 +193,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=9, @@ -192,6 +202,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), ], ), @@ -205,6 +216,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=1, + hidden=0, ), Column( cid=1, @@ -213,6 +225,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=2, + hidden=0, ), Column( cid=2, @@ -221,6 +234,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=3, @@ -229,6 +243,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=4, @@ -237,6 +252,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=5, @@ -245,6 +261,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), Column( cid=6, @@ -253,6 +270,7 @@ async def test_table_columns(db, table, expected): notnull=0, default_value=None, is_pk=0, + hidden=0, ), ], ), diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 51faeccb..4554cfd4 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -9,14 +9,14 @@ from .fixtures import ( from datasette.app import Datasette from datasette import cli from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm -from datasette.utils import sqlite3, CustomRow +from datasette.utils.sqlite import sqlite3 +from datasette.utils import CustomRow from jinja2.environment import Template import base64 import json import os import pathlib import re -import sqlite3 import textwrap import pytest import urllib diff --git a/tests/test_utils.py b/tests/test_utils.py index 07e6f870..56306339 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -4,11 +4,11 @@ Tests for various datasette helper functions. from datasette.app import Datasette from datasette import utils from datasette.utils.asgi import Request +from datasette.utils.sqlite import sqlite3 import json import os import pathlib import pytest -import sqlite3 import tempfile from unittest.mock import patch From 17cbbb1f7f230b39650afac62dd16476626001b5 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 16:28:02 -0800 Subject: [PATCH 0008/1404] generated_columns table in fixtures.py, closes #1119 --- datasette/utils/__init__.py | 4 +- datasette/utils/sqlite.py | 8 ++++ tests/fixtures.py | 19 ++++++++- tests/test_api.py | 51 ++++++++++++----------- tests/test_internals_database.py | 70 +++++++++++++++++--------------- tests/test_plugins.py | 6 +-- 6 files changed, 93 insertions(+), 65 deletions(-) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index b951539d..2576090a 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -19,7 +19,7 @@ import urllib import numbers import yaml from .shutil_backport import copytree -from .sqlite import sqlite3, sqlite_version +from .sqlite import sqlite3, sqlite_version, supports_table_xinfo from ..plugins import pm @@ -561,7 +561,7 @@ def table_columns(conn, table): def table_column_details(conn, table): - if sqlite_version() >= (3, 26, 0): + if supports_table_xinfo(): # table_xinfo was added in 3.26.0 return [ Column(*r) diff --git a/datasette/utils/sqlite.py b/datasette/utils/sqlite.py index 9a043ccd..c8522f35 100644 --- a/datasette/utils/sqlite.py +++ b/datasette/utils/sqlite.py @@ -26,3 +26,11 @@ def _sqlite_version(): .split("."), ) ) + + +def supports_table_xinfo(): + return sqlite_version() >= (3, 26, 0) + + +def supports_generated_columns(): + return sqlite_version() >= (3, 31, 0) diff --git a/tests/fixtures.py b/tests/fixtures.py index b0c98f39..b52a531f 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -1,5 +1,5 @@ from datasette.app import Datasette -from datasette.utils.sqlite import sqlite3 +from datasette.utils.sqlite import sqlite3, sqlite_version, supports_generated_columns from datasette.utils.testing import TestClient import click import contextlib @@ -116,6 +116,8 @@ def make_app_client( immutables = [] conn = sqlite3.connect(filepath) conn.executescript(TABLES) + if supports_generated_columns(): + conn.executescript(GENERATED_COLUMNS_SQL) for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) @@ -699,6 +701,18 @@ INSERT INTO "searchable_fts" (rowid, text1, text2) SELECT rowid, text1, text2 FROM searchable; """ +GENERATED_COLUMNS_SQL = """ +CREATE TABLE generated_columns ( + body TEXT, + id INT GENERATED ALWAYS AS (json_extract(body, '$.number')) STORED, + consideration INT GENERATED ALWAYS AS (json_extract(body, '$.string')) STORED +); +INSERT INTO generated_columns (body) VALUES ('{ + "number": 1, + "string": "This is a string" +}'); +""" + def assert_permissions_checked(datasette, actions): # actions is a list of "action" or (action, resource) tuples @@ -754,6 +768,9 @@ def cli(db_filename, metadata, plugins_path, recreate): for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) + if supports_generated_columns(): + with conn: + conn.executescript(GENERATED_COLUMNS_SQL) print(f"Test tables written to {db_filename}") if metadata: open(metadata, "w").write(json.dumps(METADATA, indent=4)) diff --git a/tests/test_api.py b/tests/test_api.py index 5676622e..f82a8fe9 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,7 +1,7 @@ from datasette.app import Datasette from datasette.plugins import DEFAULT_PLUGINS from datasette.utils import detect_json1 -from datasette.utils.sqlite import sqlite3, sqlite_version +from datasette.utils.sqlite import sqlite3, sqlite_version, supports_table_xinfo from datasette.version import __version__ from .fixtures import ( # noqa app_client, @@ -19,6 +19,7 @@ from .fixtures import ( # noqa generate_compound_rows, generate_sortable_rows, make_app_client, + supports_generated_columns, EXPECTED_PLUGINS, METADATA, ) @@ -35,7 +36,7 @@ def test_homepage(app_client): assert response.json.keys() == {"fixtures": 0}.keys() d = response.json["fixtures"] assert d["name"] == "fixtures" - assert d["tables_count"] == 24 + assert d["tables_count"] == 25 if supports_generated_columns() else 24 assert len(d["tables_and_views_truncated"]) == 5 assert d["tables_and_views_more"] is True # 4 hidden FTS tables + no_primary_key (hidden in metadata) @@ -268,6 +269,22 @@ def test_database_page(app_client): }, "private": False, }, + ] + ( + [ + { + "columns": ["body", "id", "consideration"], + "count": 1, + "foreign_keys": {"incoming": [], "outgoing": []}, + "fts_table": None, + "hidden": False, + "name": "generated_columns", + "primary_keys": [], + "private": False, + } + ] + if supports_generated_columns() + else [] + ) + [ { "name": "infinity", "columns": ["value"], @@ -527,7 +544,7 @@ def test_database_page(app_client): "docid", "__langid", ] - if sqlite_version() >= (3, 26, 0) + if supports_table_xinfo() else [] ), "primary_keys": [], @@ -1934,31 +1951,13 @@ def test_paginate_using_link_header(app_client, qs): sqlite_version() < (3, 31, 0), reason="generated columns were added in SQLite 3.31.0", ) -@pytest.mark.asyncio -async def test_generated_columns_are_visible_in_datasette(tmp_path_factory): - db_directory = tmp_path_factory.mktemp("dbs") - db_path = db_directory / "test.db" - conn = sqlite3.connect(str(db_path)) - conn.executescript( - """ - CREATE TABLE deeds ( - body TEXT, - id INT GENERATED ALWAYS AS (json_extract(body, '$.id')) STORED, - consideration INT GENERATED ALWAYS AS (json_extract(body, '$.consideration')) STORED - ); - INSERT INTO deeds (body) VALUES ('{ - "id": 1, - "consideration": "This is the consideration" - }'); - """ - ) - datasette = Datasette([db_path]) - response = await datasette.client.get("/test/deeds.json?_shape=array") +async def test_generated_columns_are_visible_in_datasette(app_client): + response = app_client.get("/test/generated_columns.json?_shape=array") assert response.json() == [ { "rowid": 1, - "body": '{\n "id": 1,\n "consideration": "This is the consideration"\n }', - "id": 1, - "consideration": "This is the consideration", + "body": '{\n "number": 1,\n "string": "This is a string"\n }', + "number": 1, + "string": "This is a string", } ] diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index e50cf20e..49b8a1b3 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -2,7 +2,7 @@ Tests for the datasette.database.Database class """ from datasette.database import Database, Results, MultipleValues -from datasette.utils.sqlite import sqlite3 +from datasette.utils.sqlite import sqlite3, supports_generated_columns from datasette.utils import Column from .fixtures import app_client import pytest @@ -340,38 +340,42 @@ async def test_get_all_foreign_keys(db): @pytest.mark.asyncio async def test_table_names(db): table_names = await db.table_names() - assert table_names == [ - "simple_primary_key", - "primary_key_multiple_columns", - "primary_key_multiple_columns_explicit_label", - "compound_primary_key", - "compound_three_primary_keys", - "foreign_key_references", - "sortable", - "no_primary_key", - "123_starts_with_digits", - "Table With Space In Name", - "table/with/slashes.csv", - "complex_foreign_keys", - "custom_foreign_key_label", - "units", - "tags", - "searchable", - "searchable_tags", - "searchable_fts", - "searchable_fts_segments", - "searchable_fts_segdir", - "searchable_fts_docsize", - "searchable_fts_stat", - "select", - "infinity", - "facet_cities", - "facetable", - "binary_data", - "roadside_attractions", - "attraction_characteristic", - "roadside_attraction_characteristics", - ] + assert ( + table_names + == [ + "simple_primary_key", + "primary_key_multiple_columns", + "primary_key_multiple_columns_explicit_label", + "compound_primary_key", + "compound_three_primary_keys", + "foreign_key_references", + "sortable", + "no_primary_key", + "123_starts_with_digits", + "Table With Space In Name", + "table/with/slashes.csv", + "complex_foreign_keys", + "custom_foreign_key_label", + "units", + "tags", + "searchable", + "searchable_tags", + "searchable_fts", + "searchable_fts_segments", + "searchable_fts_segdir", + "searchable_fts_docsize", + "searchable_fts_stat", + "select", + "infinity", + "facet_cities", + "facetable", + "binary_data", + "roadside_attractions", + "attraction_characteristic", + "roadside_attraction_characteristics", + ] + + (["generated_columns"] if supports_generated_columns() else []) + ) @pytest.mark.asyncio diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 4554cfd4..dab5ef68 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -413,8 +413,7 @@ def test_hook_register_output_renderer_all_parameters(app_client): # Lots of 'at 0x103a4a690' in here - replace those so we can do # an easy comparison body = at_memory_re.sub(" at 0xXXX", response.text) - assert { - "1+1": 2, + assert json.loads(body) == { "datasette": "", "columns": [ "pk", @@ -451,7 +450,8 @@ def test_hook_register_output_renderer_all_parameters(app_client): "table": "facetable", "request": "", "view_name": "table", - } == json.loads(body) + "1+1": 2, + } # Test that query_name is set correctly query_response = app_client.get("/fixtures/pragma_cache_size.testall") assert "pragma_cache_size" == json.loads(query_response.body)["query_name"] From a970276b9999687b96c5e11ea1c817d814f5d267 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 30 Nov 2020 17:19:09 -0800 Subject: [PATCH 0009/1404] Try pysqlite3 on latest.datasette.io --install=pysqlite3-binary to get a working demo of generated columns, refs #1119 --- .github/workflows/deploy-latest.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 7a41bda2..05f0bad1 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -54,6 +54,7 @@ jobs: --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1" \ + --install=pysqlite3-binary \ --service=datasette-latest # Deploy docs.db to a different service datasette publish cloudrun docs.db \ From 88ac538b41a4753c3de9b509c3a0e13077f66182 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 2 Dec 2020 15:47:37 -0800 Subject: [PATCH 0010/1404] transfer-encoding: chunked for DB downloads, refs #749 This should get >32MB downloads working on Cloud Run. --- datasette/views/database.py | 1 + tests/test_html.py | 1 + 2 files changed, 2 insertions(+) diff --git a/datasette/views/database.py b/datasette/views/database.py index 17c78150..f6fd579c 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -169,6 +169,7 @@ class DatabaseDownload(DataView): headers = {} if self.ds.cors: headers["Access-Control-Allow-Origin"] = "*" + headers["Transfer-Encoding"] = "chunked" return AsgiFileDownload( filepath, filename=os.path.basename(filepath), diff --git a/tests/test_html.py b/tests/test_html.py index ecbf89b4..b9d3afcd 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1028,6 +1028,7 @@ def test_database_download_for_immutable(): download_response.headers["content-disposition"] == 'attachment; filename="fixtures.db"' ) + assert download_response.headers["transfer-encoding"] == "chunked" def test_database_download_disallowed_for_mutable(app_client): From daae35be46ec5cb8a207aa20986a4fa62e94777e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Abdussamet=20Ko=C3=A7ak?= Date: Thu, 3 Dec 2020 03:33:36 +0300 Subject: [PATCH 0011/1404] Fix misaligned table actions cog Closes #1121. Thanks, @abdusco --- datasette/static/app.css | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/datasette/static/app.css b/datasette/static/app.css index b9378a9e..9e498ab9 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -163,6 +163,8 @@ h6, } .page-header { + display: flex; + align-items: center; padding-left: 10px; border-left: 10px solid #666; margin-bottom: 0.75rem; @@ -175,11 +177,11 @@ h6, padding-right: 0.2em; } .page-header details { - display: inline; + display: inline-flex; } .page-header details > summary { list-style: none; - display: inline; + display: inline-flex; cursor: pointer; } .page-header details > summary::-webkit-details-marker { From a45a3dff3ea01a2382dcedae5923a7b821a12aec Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 2 Dec 2020 16:44:03 -0800 Subject: [PATCH 0012/1404] Fix for OPTIONS request against /db, closes #1100 --- datasette/utils/testing.py | 23 +++++++++++++++++++++++ datasette/views/base.py | 2 +- tests/test_api.py | 6 ++++++ 3 files changed, 30 insertions(+), 1 deletion(-) diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index bcbc1c7a..57b19ea5 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -99,6 +99,29 @@ class TestClient: content_type=content_type, ) + @async_to_sync + async def request( + self, + path, + allow_redirects=True, + redirect_count=0, + method="GET", + cookies=None, + headers=None, + post_body=None, + content_type=None, + ): + return await self._request( + path, + allow_redirects=allow_redirects, + redirect_count=redirect_count, + method=method, + cookies=cookies, + headers=headers, + post_body=post_body, + content_type=content_type, + ) + async def _request( self, path, diff --git a/datasette/views/base.py b/datasette/views/base.py index 5ba8fcb1..a93a6378 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -155,7 +155,7 @@ class DataView(BaseView): name = "" re_named_parameter = re.compile(":([a-zA-Z0-9_]+)") - def options(self, request, *args, **kwargs): + async def options(self, request, *args, **kwargs): r = Response.text("ok") if self.ds.cors: r.headers["Access-Control-Allow-Origin"] = "*" diff --git a/tests/test_api.py b/tests/test_api.py index f82a8fe9..016894b4 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1961,3 +1961,9 @@ async def test_generated_columns_are_visible_in_datasette(app_client): "string": "This is a string", } ] + + +def test_http_options_request(app_client): + response = app_client.request("/fixtures", method="OPTIONS") + assert response.status == 200 + assert response.text == "ok" From 13c960c03b46e35f3432063a19f3f528ca249e23 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 2 Dec 2020 16:49:43 -0800 Subject: [PATCH 0013/1404] Test is no longer order dependent, closes #1123 --- tests/test_plugins.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index dab5ef68..93b444ab 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -797,9 +797,11 @@ def test_hook_table_actions(app_client, table_or_view): assert get_table_actions_links(response.text) == [] response_2 = app_client.get(f"/fixtures/{table_or_view}?_bot=1") - assert get_table_actions_links(response_2.text) == [ - {"label": "From async", "href": "/"}, + assert sorted( + get_table_actions_links(response_2.text), key=lambda l: l["label"] + ) == [ {"label": "Database: fixtures", "href": "/"}, + {"label": "From async", "href": "/"}, {"label": f"Table: {table_or_view}", "href": "/"}, ] From e048791a9a2686f47d81a2c8aa88aa1966d82521 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 2 Dec 2020 16:57:40 -0800 Subject: [PATCH 0014/1404] Release 0.52.2 Refs #1116, #1115, #1100, #749, #1121 --- datasette/version.py | 2 +- docs/changelog.rst | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 119295b3..0353358a 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52.1" +__version__ = "0.52.2" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index a77cf5a5..6fb06beb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,17 @@ Changelog ========= +.. _v0_52_2: + +0.52.2 (2020-12-02) +------------------- + +- Generated columns from SQLite 3.31.0 or higher are now correctly displayed. (`#1116 `__) +- Error message if you attempt to open a SpatiaLite database now suggests using ``--load-extension=spatialite`` if it detects that the extension is available in a common location. (`#1115 `__) +- ``OPTIONS`` requests against the ``/database`` page no longer raise a 500 error. (`#1100 `__) +- Databases larger than 32MB that are published to Cloud Run can now be downloaded. (`#749 `__) +- Fix for misaligned cog icon on table and database pages. Thanks, Abdussamet Koçak. (`#1121 `__) + .. _v0_52_1: 0.52.1 (2020-11-29) From 6b4c55efea3e9d34d92cbe5f0066553ad9b14071 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 10:53:26 -0800 Subject: [PATCH 0015/1404] Fix for Amazon Linux static assets 404ing, refs #1124 --- datasette/utils/asgi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index ce78a597..31b0bdcd 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -294,7 +294,7 @@ def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None): return # Ensure full_path is within root_path to avoid weird "../" tricks try: - full_path.relative_to(root_path) + full_path.relative_to(root_path.resolve()) except ValueError: await asgi_send_html(send, "404", 404) return From 63efcb35ce879fe68ee02411c8dd2fd5f127cc32 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 11:02:53 -0800 Subject: [PATCH 0016/1404] More tweaks to root_path handling, refs #1124 --- datasette/utils/asgi.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 31b0bdcd..3b41c2d7 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -282,10 +282,12 @@ async def asgi_send_file( def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None): + root_path = Path(root_path) + async def inner_static(request, send): path = request.scope["url_route"]["kwargs"]["path"] try: - full_path = (Path(root_path) / path).resolve().absolute() + full_path = (root_path / path).resolve().absolute() except FileNotFoundError: await asgi_send_html(send, "404", 404) return From ca6e8e53dc9b094a5ce169d81a69d872546e595a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 11:05:12 -0800 Subject: [PATCH 0017/1404] More helpful 404 messages, refs #1124 --- datasette/utils/asgi.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 3b41c2d7..363f059f 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -289,7 +289,7 @@ def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None): try: full_path = (root_path / path).resolve().absolute() except FileNotFoundError: - await asgi_send_html(send, "404", 404) + await asgi_send_html(send, "404: Directory not found", 404) return if full_path.is_dir(): await asgi_send_html(send, "403: Directory listing is not allowed", 403) @@ -298,12 +298,12 @@ def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None): try: full_path.relative_to(root_path.resolve()) except ValueError: - await asgi_send_html(send, "404", 404) + await asgi_send_html(send, "404: Path not inside root path", 404) return try: await asgi_send_file(send, full_path, chunk_size=chunk_size) except FileNotFoundError: - await asgi_send_html(send, "404", 404) + await asgi_send_html(send, "404: File not found", 404) return return inner_static From 4cce5516661b24afeddaf35bee84b00fbf5c7f89 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 11:07:05 -0800 Subject: [PATCH 0018/1404] Release 0.52.3 Refs #1124 --- datasette/version.py | 2 +- docs/changelog.rst | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 0353358a..ab02947d 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52.2" +__version__ = "0.52.3" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6fb06beb..4fa7609c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_52_3: + +0.52.3 (2020-12-03) +------------------- + +- Fixed bug where static assets would 404 for Datasette installed on ARM Amazon Linux. (`#1124 `__) + .. _v0_52_2: 0.52.2 (2020-12-02) From 00185af74a91646d47aa54f2369c1a19a6f76a27 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 14:08:50 -0800 Subject: [PATCH 0019/1404] Show pysqlite3 version on /-/versions, if installed - #1125 --- datasette/app.py | 14 ++++++++++++-- datasette/utils/sqlite.py | 3 +++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 922046d5..b2f16257 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -9,6 +9,7 @@ import inspect from itsdangerous import BadSignature import json import os +import pkg_resources import re import secrets import sys @@ -57,7 +58,6 @@ from .utils import ( module_from_path, parse_metadata, resolve_env_secrets, - sqlite3, to_css_class, HASH_LENGTH, ) @@ -74,6 +74,10 @@ from .utils.asgi import ( asgi_send_json, asgi_send_redirect, ) +from .utils.sqlite import ( + sqlite3, + using_pysqlite3, +) from .tracer import AsgiTracer from .plugins import pm, DEFAULT_PLUGINS, get_plugins from .version import __version__ @@ -619,7 +623,7 @@ class Datasette: datasette_version = {"version": __version__} if self.version_note: datasette_version["note"] = self.version_note - return { + info = { "python": { "version": ".".join(map(str, sys.version_info[:3])), "full": sys.version, @@ -636,6 +640,12 @@ class Datasette: ], }, } + if using_pysqlite3: + try: + info["pysqlite3"] = pkg_resources.get_distribution("pysqlite3").version + except pkg_resources.DistributionNotFound: + pass + return info def _plugins(self, request=None, all=False): ps = list(get_plugins()) diff --git a/datasette/utils/sqlite.py b/datasette/utils/sqlite.py index c8522f35..342ff3fa 100644 --- a/datasette/utils/sqlite.py +++ b/datasette/utils/sqlite.py @@ -1,5 +1,8 @@ +using_pysqlite3 = False try: import pysqlite3 as sqlite3 + + using_pysqlite3 = True except ImportError: import sqlite3 From e2fea36540e952d8d72c1bd0af7144b85b7a4671 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 19:12:33 -0800 Subject: [PATCH 0020/1404] Switch to google-github-actions/setup-gcloud - refs #1126 --- .github/workflows/deploy-latest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 05f0bad1..2de0a8b6 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -39,7 +39,7 @@ jobs: sphinx-to-sqlite ../docs.db _build cd .. - name: Set up Cloud Run - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master + uses: google-github-actions/setup-gcloud@master with: version: '275.0.0' service_account_email: ${{ secrets.GCP_SA_EMAIL }} From 49d8fc056844d5a537d6cfd96dab0dd5686fe718 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 3 Dec 2020 20:07:10 -0800 Subject: [PATCH 0021/1404] Try pysqlite3-binary version as well, refs #1125 --- datasette/app.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index b2f16257..9bc84df0 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -641,10 +641,12 @@ class Datasette: }, } if using_pysqlite3: - try: - info["pysqlite3"] = pkg_resources.get_distribution("pysqlite3").version - except pkg_resources.DistributionNotFound: - pass + for package in ("pysqlite3", "pysqlite3-binary"): + try: + info["pysqlite3"] = pkg_resources.get_distribution(package).version + break + except pkg_resources.DistributionNotFound: + pass return info def _plugins(self, request=None, all=False): From 42efb799ea9b362f0c7598f3ff3c4bf46c18e53f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 4 Dec 2020 21:20:12 -0800 Subject: [PATCH 0022/1404] Fixed invalid test for generated columns, refs #1119 --- tests/test_api.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index 016894b4..4339507c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1951,14 +1951,14 @@ def test_paginate_using_link_header(app_client, qs): sqlite_version() < (3, 31, 0), reason="generated columns were added in SQLite 3.31.0", ) -async def test_generated_columns_are_visible_in_datasette(app_client): - response = app_client.get("/test/generated_columns.json?_shape=array") - assert response.json() == [ +def test_generated_columns_are_visible_in_datasette(app_client): + response = app_client.get("/fixtures/generated_columns.json?_shape=array") + assert response.json == [ { "rowid": 1, - "body": '{\n "number": 1,\n "string": "This is a string"\n }', - "number": 1, - "string": "This is a string", + "body": '{\n "number": 1,\n "string": "This is a string"\n}', + "id": 1, + "consideration": "This is a string", } ] From eae103a82b92949189cf718794d2ad0424005460 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 4 Dec 2020 21:21:11 -0800 Subject: [PATCH 0023/1404] Write errors to stderr, closes #1131 --- datasette/database.py | 10 ++++++---- datasette/renderer.py | 1 - datasette/views/base.py | 4 +++- tests/test_cli.py | 7 +++++++ 4 files changed, 16 insertions(+), 6 deletions(-) diff --git a/datasette/database.py b/datasette/database.py index 71c45ba0..412e0c59 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -1,8 +1,8 @@ import asyncio -import contextlib from pathlib import Path import janus import queue +import sys import threading import uuid @@ -104,7 +104,8 @@ class Database: try: result = task.fn(conn) except Exception as e: - print(e) + sys.stderr.write("{}\n".format(e)) + sys.stderr.flush() result = e task.reply_queue.sync_q.put(result) @@ -156,11 +157,12 @@ class Database: if e.args == ("interrupted",): raise QueryInterrupted(e, sql, params) if log_sql_errors: - print( - "ERROR: conn={}, sql = {}, params = {}: {}".format( + sys.stderr.write( + "ERROR: conn={}, sql = {}, params = {}: {}\n".format( conn, repr(sql), params, e ) ) + sys.stderr.flush() raise if truncate: diff --git a/datasette/renderer.py b/datasette/renderer.py index d779b44f..258199fc 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -20,7 +20,6 @@ def convert_specific_columns_to_json(rows, columns, json_cols): try: value = json.loads(value) except (TypeError, ValueError) as e: - print(e) pass new_row.append(value) new_rows.append(new_row) diff --git a/datasette/views/base.py b/datasette/views/base.py index a93a6378..b8860b74 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -2,6 +2,7 @@ import asyncio import csv import hashlib import re +import sys import time import urllib @@ -362,7 +363,8 @@ class DataView(BaseView): new_row.append(cell) await writer.writerow(new_row) except Exception as e: - print("caught this", e) + sys.stderr.write("Caught this error: {}\n".format(e)) + sys.stderr.flush() await r.write(str(e)) return diff --git a/tests/test_cli.py b/tests/test_cli.py index c52960fb..a0ac7d7a 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -214,3 +214,10 @@ def test_config_deprecated(ensure_eventloop): assert result.exit_code == 0 assert not json.loads(result.output)["allow_download"] assert "will be deprecated in" in result.stderr + + +def test_sql_errors_logged_to_stderr(ensure_eventloop): + runner = CliRunner(mix_stderr=False) + result = runner.invoke(cli, ["--get", "/:memory:.json?sql=select+blah"]) + assert result.exit_code == 1 + assert "sql = 'select blah', params = {}: no such column: blah\n" in result.stderr From 705d1a1555c4791e9be3b884285b047223ab184f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Abdussamet=20Ko=C3=A7ak?= Date: Sat, 5 Dec 2020 22:35:03 +0300 Subject: [PATCH 0024/1404] Fix startup error on windows (#1128) Fixes https://github.com/simonw/datasette/issues/1094 This import isn't used at all, and causes error on startup on Windows. --- datasette/utils/asgi.py | 1 - 1 file changed, 1 deletion(-) diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 363f059f..fc9adcff 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -1,5 +1,4 @@ import json -from os import EX_CANTCREAT from datasette.utils import MultiParams from mimetypes import guess_type from urllib.parse import parse_qs, urlunparse, parse_qsl From 2dc281645a76c550789ede80c1bc6f733fa9a82e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 5 Dec 2020 11:41:40 -0800 Subject: [PATCH 0025/1404] Release 0.52.4 Refs #1125, #1131, #1094 --- datasette/version.py | 2 +- docs/changelog.rst | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index ab02947d..ce06fe1d 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52.3" +__version__ = "0.52.4" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 4fa7609c..a9922ab3 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,15 @@ Changelog ========= +.. _v0_52_4: + +0.52.4 (2020-12-05) +------------------- + +- Show `pysqlite3 `__ version on ``/-/versions``, if installed. (`#1125 `__) +- Errors output by Datasette (e.g. for invalid SQL queries) now go to ``stderr``, not ``stdout``. (`#1131 `__) +- Fix for a startup error on windows caused by unneccessary ``from os import EX_CANTCREAT`` - thanks, Abdussamet Koçak. (`#1094 `__) + .. _v0_52_3: 0.52.3 (2020-12-03) From e5930e6f889617320454ab53ecc1c438377d49e6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 5 Dec 2020 11:42:42 -0800 Subject: [PATCH 0026/1404] Typo fix in release notes --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index a9922ab3..86d844f7 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -11,7 +11,7 @@ Changelog - Show `pysqlite3 `__ version on ``/-/versions``, if installed. (`#1125 `__) - Errors output by Datasette (e.g. for invalid SQL queries) now go to ``stderr``, not ``stdout``. (`#1131 `__) -- Fix for a startup error on windows caused by unneccessary ``from os import EX_CANTCREAT`` - thanks, Abdussamet Koçak. (`#1094 `__) +- Fix for a startup error on windows caused by unnecessary ``from os import EX_CANTCREAT`` - thanks, Abdussamet Koçak. (`#1094 `__) .. _v0_52_3: From e3143700a245d87bc532d44867b2e380b4225324 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 7 Dec 2020 11:00:10 -0800 Subject: [PATCH 0027/1404] Custom template for docs, linking to datasette.io --- docs/_templates/layout.html | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 docs/_templates/layout.html diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html new file mode 100644 index 00000000..b7b6f794 --- /dev/null +++ b/docs/_templates/layout.html @@ -0,0 +1,23 @@ +{%- extends "!layout.html" %} + +{% block sidebartitle %} + + + + + +{% if theme_display_version %} + {%- set nav_version = version %} + {% if READTHEDOCS and current_version %} + {%- set nav_version = current_version %} + {% endif %} + {% if nav_version %} +
+ {{ nav_version }} +
+ {% endif %} +{% endif %} + +{% include "searchbox.html" %} + +{% endblock %} From 62a6f70c64e4d04c15d9f386dcdf9cd465bbb0f6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 7 Dec 2020 12:10:05 -0800 Subject: [PATCH 0028/1404] Fixed Markdown indentation of news To make it easier to programmatically extract. --- README.md | 76 +++++++++++++++++++++++++++---------------------------- 1 file changed, 38 insertions(+), 38 deletions(-) diff --git a/README.md b/README.md index c0019e9b..89245cf1 100644 --- a/README.md +++ b/README.md @@ -25,53 +25,53 @@ Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly new ## News - * 28th November 2020: [Datasette 0.52](https://docs.datasette.io/en/stable/changelog.html#v0-52) - `--config` is now `--setting`, new `database_actions` plugin hook, `datasette publish cloudrun --apt-get-install` option and several bug fixes. - * 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. [Annotated release notes](https://simonwillison.net/2020/Nov/1/datasette-0-51/). - * 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). - * 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). - * 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. - * 11th August 2020: [Datasette 0.47](https://docs.datasette.io/en/stable/changelog.html#v0-47) - Datasette can now be installed using Homebrew! `brew install simonw/datasette/datasette`. Also new: `datasette install name-of-plugin` and `datasette uninstall name-of-plugin` commands, and `datasette --get '/-/versions.json'` to output the result of Datasette HTTP calls on the command-line. - * 9th August 2020: [Datasette 0.46](https://docs.datasette.io/en/stable/changelog.html#v0-46) - security fix relating to CSRF protection for writable canned queries, a new logo, new debugging tools, improved file downloads and more. - * 6th August 2020: [GraphQL in Datasette with the new datasette-graphql plugin](https://simonwillison.net/2020/Aug/7/datasette-graphql/) - * 24th July 2020: Two new plugins: [datasette-copyable and datasette-insert-api](https://simonwillison.net/2020/Jul/23/datasette-copyable-datasette-insert-api/). `datasette-copyable` adds copy-and-paste export options, and `datasette-insert-api` lets you create tables and insert or update data by POSTing JSON directly to Datasette. - * 1st July 2020: [Datasette 0.45](https://docs.datasette.io/en/stable/changelog.html#v0-45) - [Magic parameters for canned queries](https://docs.datasette.io/en/stable/sql_queries.html#canned-queries-magic-parameters), a log out feature, improved plugin documentation and four new plugin hooks. See also [Datasette 0.45: The annotated release notes](https://simonwillison.net/2020/Jul/1/datasette-045/). - * 20th June 2020: [A cookiecutter template for writing Datasette plugins](https://simonwillison.net/2020/Jun/20/cookiecutter-plugins/) - * 11th June 2020: [Datasette 0.44](https://docs.datasette.io/en/stable/changelog.html#v0-44) - [Authentication and permissions](https://docs.datasette.io/en/stable/authentication.html), [writable canned queries](https://docs.datasette.io/en/stable/sql_queries.html#writable-canned-queries), flash messages, new plugin hooks and much, much more. - * 28th May 2020: [Datasette 0.43](https://docs.datasette.io/en/stable/changelog.html#v0-43) - Redesigned [register_output_renderer](https://docs.datasette.io/en/stable/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes. - * 8th May 2020: [Datasette 0.42](https://docs.datasette.io/en/stable/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database. - * 6th May 2020: [Datasette 0.41](https://docs.datasette.io/en/stable/changelog.html#v0-41) - New mechanism for [creating custom pages](https://docs.datasette.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://docs.datasette.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements. - * 21st April 2020: [Datasette 0.40](https://docs.datasette.io/en/stable/changelog.html#v0-40) - Metadata can now be provided as YAML instead of JSON. Publishing to Zeit Now v1 is no longer supported, but Now v2 support is provided by the new [datasette-publish-now](https://github.com/simonw/datasette-publish-now) plugin. Various bug fixes. - * 24th March 2020: [Datasette 0.39](https://docs.datasette.io/en/stable/changelog.html#v0-39) - New `base_url` configuration option for running Datasette under a different URL prefix, `"sort"` and `"sort_desc"` metadata options for setting a default sort order for a table. - * 8th March 2020: [Datasette 0.38](https://docs.datasette.io/en/stable/changelog.html#v0-38) - New `--memory` option for `datasete publish cloudrun`, [Docker image](https://hub.docker.com/r/datasetteproject/datasette) upgraded to SQLite 3.31.1. - * 25th February 2020: [Datasette 0.37](https://docs.datasette.io/en/stable/changelog.html#v0-37) - new internal APIs enabling plugins to safely write to databases. Read more here: [Datasette Writes](https://simonwillison.net/2020/Feb/26/weeknotes-datasette-writes/). - * 21st February 2020: [Datasette 0.36](https://docs.datasette.io/en/stable/changelog.html#v0-36) - new internals documentation for plugins, `prepare_connection()` now accepts optional `database` and `datasette` arguments. - * 4th February 2020: [Datasette 0.35](https://docs.datasette.io/en/stable/changelog.html#v0-35) - new `.render_template()` method for plugins. - * 29th January 2020: [Datasette 0.34](https://docs.datasette.io/en/stable/changelog.html#v0-34) - improvements to search, `datasette publish cloudrun` and `datasette package`. - * 21st January 2020: [Deploying a data API using GitHub Actions and Cloud Run](https://simonwillison.net/2020/Jan/21/github-actions-cloud-run/) - how to use GitHub Actions and Google Cloud Run to automatically scrape data and deploy the result as an API with Datasette. - * 22nd December 2019: [Datasette 0.33](https://docs.datasette.io/en/stable/changelog.html#v0-33) - various small improvements. - * 19th December 2019: [Building tools to bring data-driven reporting to more newsrooms](https://medium.com/jsk-class-of-2020/building-tools-to-bring-data-driven-reporting-to-more-newsrooms-4520a0c9b3f2) - some notes on my JSK fellowship so far. - * 2nd December 2019: [Niche Museums](https://www.niche-museums.com/) is a new site entirely powered by Datasette, using custom templates and plugins. [niche-museums.com, powered by Datasette](https://simonwillison.net/2019/Nov/25/niche-museums/) describes how the site works, and [datasette-atom: Define an Atom feed using a custom SQL query](https://simonwillison.net/2019/Dec/3/datasette-atom/) describes how the new [datasette-atom plugin](https://github.com/simonw/datasette-atom) was used to add an Atom syndication feed to the site. - * 14th November 2019: [Datasette 0.32](https://docs.datasette.io/en/stable/changelog.html#v0-32) now uses asynchronous rendering in Jinja templates, which means template functions can perform asynchronous operations such as executing SQL queries. [datasette-template-sql](https://github.com/simonw/datasette-template-sql) is a new plugin uses this capability to add a new custom `sql(sql_query)` template function. - * 11th November 2019: [Datasette 0.31](https://docs.datasette.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5. - * 18th October 2019: [Datasette 0.30](https://docs.datasette.io/en/stable/changelog.html#v0-30) - * 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail. - * 7th July 2019: [Datasette 0.29](https://docs.datasette.io/en/stable/changelog.html#v0-29) - ASGI, new plugin hooks, facet by date and much, much more... +* 28th November 2020: [Datasette 0.52](https://docs.datasette.io/en/stable/changelog.html#v0-52) - `--config` is now `--setting`, new `database_actions` plugin hook, `datasette publish cloudrun --apt-get-install` option and several bug fixes. +* 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. [Annotated release notes](https://simonwillison.net/2020/Nov/1/datasette-0-51/). +* 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). +* 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). +* 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. +* 11th August 2020: [Datasette 0.47](https://docs.datasette.io/en/stable/changelog.html#v0-47) - Datasette can now be installed using Homebrew! `brew install simonw/datasette/datasette`. Also new: `datasette install name-of-plugin` and `datasette uninstall name-of-plugin` commands, and `datasette --get '/-/versions.json'` to output the result of Datasette HTTP calls on the command-line. +* 9th August 2020: [Datasette 0.46](https://docs.datasette.io/en/stable/changelog.html#v0-46) - security fix relating to CSRF protection for writable canned queries, a new logo, new debugging tools, improved file downloads and more. +* 6th August 2020: [GraphQL in Datasette with the new datasette-graphql plugin](https://simonwillison.net/2020/Aug/7/datasette-graphql/) +* 24th July 2020: Two new plugins: [datasette-copyable and datasette-insert-api](https://simonwillison.net/2020/Jul/23/datasette-copyable-datasette-insert-api/). `datasette-copyable` adds copy-and-paste export options, and `datasette-insert-api` lets you create tables and insert or update data by POSTing JSON directly to Datasette. +* 1st July 2020: [Datasette 0.45](https://docs.datasette.io/en/stable/changelog.html#v0-45) - [Magic parameters for canned queries](https://docs.datasette.io/en/stable/sql_queries.html#canned-queries-magic-parameters), a log out feature, improved plugin documentation and four new plugin hooks. See also [Datasette 0.45: The annotated release notes](https://simonwillison.net/2020/Jul/1/datasette-045/). +* 20th June 2020: [A cookiecutter template for writing Datasette plugins](https://simonwillison.net/2020/Jun/20/cookiecutter-plugins/) +* 11th June 2020: [Datasette 0.44](https://docs.datasette.io/en/stable/changelog.html#v0-44) - [Authentication and permissions](https://docs.datasette.io/en/stable/authentication.html), [writable canned queries](https://docs.datasette.io/en/stable/sql_queries.html#writable-canned-queries), flash messages, new plugin hooks and much, much more. +* 28th May 2020: [Datasette 0.43](https://docs.datasette.io/en/stable/changelog.html#v0-43) - Redesigned [register_output_renderer](https://docs.datasette.io/en/stable/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes. +* 8th May 2020: [Datasette 0.42](https://docs.datasette.io/en/stable/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database. +* 6th May 2020: [Datasette 0.41](https://docs.datasette.io/en/stable/changelog.html#v0-41) - New mechanism for [creating custom pages](https://docs.datasette.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://docs.datasette.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements. +* 21st April 2020: [Datasette 0.40](https://docs.datasette.io/en/stable/changelog.html#v0-40) - Metadata can now be provided as YAML instead of JSON. Publishing to Zeit Now v1 is no longer supported, but Now v2 support is provided by the new [datasette-publish-now](https://github.com/simonw/datasette-publish-now) plugin. Various bug fixes. +* 24th March 2020: [Datasette 0.39](https://docs.datasette.io/en/stable/changelog.html#v0-39) - New `base_url` configuration option for running Datasette under a different URL prefix, `"sort"` and `"sort_desc"` metadata options for setting a default sort order for a table. +* 8th March 2020: [Datasette 0.38](https://docs.datasette.io/en/stable/changelog.html#v0-38) - New `--memory` option for `datasete publish cloudrun`, [Docker image](https://hub.docker.com/r/datasetteproject/datasette) upgraded to SQLite 3.31.1. +* 25th February 2020: [Datasette 0.37](https://docs.datasette.io/en/stable/changelog.html#v0-37) - new internal APIs enabling plugins to safely write to databases. Read more here: [Datasette Writes](https://simonwillison.net/2020/Feb/26/weeknotes-datasette-writes/). +* 21st February 2020: [Datasette 0.36](https://docs.datasette.io/en/stable/changelog.html#v0-36) - new internals documentation for plugins, `prepare_connection()` now accepts optional `database` and `datasette` arguments. +* 4th February 2020: [Datasette 0.35](https://docs.datasette.io/en/stable/changelog.html#v0-35) - new `.render_template()` method for plugins. +* 29th January 2020: [Datasette 0.34](https://docs.datasette.io/en/stable/changelog.html#v0-34) - improvements to search, `datasette publish cloudrun` and `datasette package`. +* 21st January 2020: [Deploying a data API using GitHub Actions and Cloud Run](https://simonwillison.net/2020/Jan/21/github-actions-cloud-run/) - how to use GitHub Actions and Google Cloud Run to automatically scrape data and deploy the result as an API with Datasette. +* 22nd December 2019: [Datasette 0.33](https://docs.datasette.io/en/stable/changelog.html#v0-33) - various small improvements. +* 19th December 2019: [Building tools to bring data-driven reporting to more newsrooms](https://medium.com/jsk-class-of-2020/building-tools-to-bring-data-driven-reporting-to-more-newsrooms-4520a0c9b3f2) - some notes on my JSK fellowship so far. +* 2nd December 2019: [Niche Museums](https://www.niche-museums.com/) is a new site entirely powered by Datasette, using custom templates and plugins. [niche-museums.com, powered by Datasette](https://simonwillison.net/2019/Nov/25/niche-museums/) describes how the site works, and [datasette-atom: Define an Atom feed using a custom SQL query](https://simonwillison.net/2019/Dec/3/datasette-atom/) describes how the new [datasette-atom plugin](https://github.com/simonw/datasette-atom) was used to add an Atom syndication feed to the site. +* 14th November 2019: [Datasette 0.32](https://docs.datasette.io/en/stable/changelog.html#v0-32) now uses asynchronous rendering in Jinja templates, which means template functions can perform asynchronous operations such as executing SQL queries. [datasette-template-sql](https://github.com/simonw/datasette-template-sql) is a new plugin uses this capability to add a new custom `sql(sql_query)` template function. +* 11th November 2019: [Datasette 0.31](https://docs.datasette.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5. +* 18th October 2019: [Datasette 0.30](https://docs.datasette.io/en/stable/changelog.html#v0-30) +* 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail. +* 7th July 2019: [Datasette 0.29](https://docs.datasette.io/en/stable/changelog.html#v0-29) - ASGI, new plugin hooks, facet by date and much, much more... * [datasette-auth-github](https://github.com/simonw/datasette-auth-github) - a new plugin for Datasette 0.29 that lets you require users to authenticate against GitHub before accessing your Datasette instance. You can whitelist specific users, or you can restrict access to members of specific GitHub organizations or teams. * [datasette-cors](https://github.com/simonw/datasette-cors) - a plugin that lets you configure CORS access from a list of domains (or a set of domain wildcards) so you can make JavaScript calls to a Datasette instance from a specific set of other hosts. - * 23rd June 2019: [Porting Datasette to ASGI, and Turtles all the way down](https://simonwillison.net/2019/Jun/23/datasette-asgi/) - * 21st May 2019: The anonymized raw data from [the Stack Overflow Developer Survey 2019](https://stackoverflow.blog/2019/05/21/public-data-release-of-stack-overflows-2019-developer-survey/) has been [published in partnership with Glitch](https://glitch.com/culture/discover-insights-explore-developer-survey-results-2019/), powered by Datasette. - * 19th May 2019: [Datasette 0.28](https://docs.datasette.io/en/stable/changelog.html#v0-28) - a salmagundi of new features! +* 23rd June 2019: [Porting Datasette to ASGI, and Turtles all the way down](https://simonwillison.net/2019/Jun/23/datasette-asgi/) +* 21st May 2019: The anonymized raw data from [the Stack Overflow Developer Survey 2019](https://stackoverflow.blog/2019/05/21/public-data-release-of-stack-overflows-2019-developer-survey/) has been [published in partnership with Glitch](https://glitch.com/culture/discover-insights-explore-developer-survey-results-2019/), powered by Datasette. +* 19th May 2019: [Datasette 0.28](https://docs.datasette.io/en/stable/changelog.html#v0-28) - a salmagundi of new features! * No longer immutable! Datasette now supports [databases that change](https://docs.datasette.io/en/stable/changelog.html#supporting-databases-that-change). * [Faceting improvements](https://docs.datasette.io/en/stable/changelog.html#faceting-improvements-and-faceting-plugins) including facet-by-JSON-array and the ability to define custom faceting using plugins. * [datasette publish cloudrun](https://docs.datasette.io/en/stable/changelog.html#datasette-publish-cloudrun) lets you publish databases to Google's new Cloud Run hosting service. * New [register_output_renderer](https://docs.datasette.io/en/stable/changelog.html#register-output-renderer-plugins) plugin hook for adding custom output extensions to Datasette in addition to the default `.json` and `.csv`. * Dozens of other smaller features and tweaks - see [the release notes](https://docs.datasette.io/en/stable/changelog.html#v0-28) for full details. * Read more about this release here: [Datasette 0.28—and why master should always be releasable](https://simonwillison.net/2019/May/19/datasette-0-28/) - * 24th February 2019: [ +* 24th February 2019: [ sqlite-utils: a Python library and CLI tool for building SQLite databases](https://simonwillison.net/2019/Feb/25/sqlite-utils/) - a partner tool for easily creating SQLite databases for use with Datasette. - * 31st Janary 2019: [Datasette 0.27](https://docs.datasette.io/en/stable/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://docs.datasette.io/en/stable/ecosystem.html). - * 10th January 2019: [Datasette 0.26.1](https://docs.datasette.io/en/stable/changelog.html#v0-26-1) - SQLite upgrade in Docker image, `/-/versions` now shows SQLite compile options. - * 2nd January 2019: [Datasette 0.26](https://docs.datasette.io/en/stable/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument. +* 31st Janary 2019: [Datasette 0.27](https://docs.datasette.io/en/stable/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://docs.datasette.io/en/stable/ecosystem.html). +* 10th January 2019: [Datasette 0.26.1](https://docs.datasette.io/en/stable/changelog.html#v0-26-1) - SQLite upgrade in Docker image, `/-/versions` now shows SQLite compile options. +* 2nd January 2019: [Datasette 0.26](https://docs.datasette.io/en/stable/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument. * 18th December 2018: [Fast Autocomplete Search for Your Website](https://24ways.org/2018/fast-autocomplete-search-for-your-website/) - a new tutorial on using Datasette to build a JavaScript autocomplete search engine. * 3rd October 2018: [The interesting ideas in Datasette](https://simonwillison.net/2018/Oct/4/datasette-ideas/) - a write-up of some of the less obvious interesting ideas embedded in the Datasette project. * 19th September 2018: [Datasette 0.25](https://docs.datasette.io/en/stable/changelog.html#v0-25) - New plugin hooks, improved database view support and an easier way to use more recent versions of SQLite. From 8ae0f9f7f0d644b0161165a1084f53acd2786f7c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 7 Dec 2020 12:16:13 -0800 Subject: [PATCH 0029/1404] Fixed spelling of Janary --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 89245cf1..7861abbd 100644 --- a/README.md +++ b/README.md @@ -69,7 +69,7 @@ Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly new * Read more about this release here: [Datasette 0.28—and why master should always be releasable](https://simonwillison.net/2019/May/19/datasette-0-28/) * 24th February 2019: [ sqlite-utils: a Python library and CLI tool for building SQLite databases](https://simonwillison.net/2019/Feb/25/sqlite-utils/) - a partner tool for easily creating SQLite databases for use with Datasette. -* 31st Janary 2019: [Datasette 0.27](https://docs.datasette.io/en/stable/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://docs.datasette.io/en/stable/ecosystem.html). +* 31st January 2019: [Datasette 0.27](https://docs.datasette.io/en/stable/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://docs.datasette.io/en/stable/ecosystem.html). * 10th January 2019: [Datasette 0.26.1](https://docs.datasette.io/en/stable/changelog.html#v0-26-1) - SQLite upgrade in Docker image, `/-/versions` now shows SQLite compile options. * 2nd January 2019: [Datasette 0.26](https://docs.datasette.io/en/stable/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument. * 18th December 2018: [Fast Autocomplete Search for Your Website](https://24ways.org/2018/fast-autocomplete-search-for-your-website/) - a new tutorial on using Datasette to build a JavaScript autocomplete search engine. From 4c25b035b2370983c8dd5e0c8762e9154e379774 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 7 Dec 2020 14:41:03 -0800 Subject: [PATCH 0030/1404] arraynotcontains filter, closes #1132 --- datasette/filters.py | 11 ++++++++++- docs/json_api.rst | 7 ++++++- tests/test_api.py | 25 +++++++++++++++++++++++-- 3 files changed, 39 insertions(+), 4 deletions(-) diff --git a/datasette/filters.py b/datasette/filters.py index 1524b32a..edf2de99 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -154,7 +154,16 @@ class Filters: where j.value = :{p} )""", '{c} contains "{v}"', - ) + ), + TemplatedFilter( + "arraynotcontains", + "array does not contain", + """rowid not in ( + select {t}.rowid from {t}, json_each({t}.{c}) j + where j.value = :{p} + )""", + '{c} does not contain "{v}"', + ), ] if detect_json1() else [] diff --git a/docs/json_api.rst b/docs/json_api.rst index 8d45ac6f..582a6159 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -267,7 +267,12 @@ You can filter the data returned by the table based on column values using a que Rows where column does not match any of the provided values. The inverse of ``__in=``. Also supports JSON arrays. ``?column__arraycontains=value`` - Works against columns that contain JSON arrays - matches if any of the values in that array match. + Works against columns that contain JSON arrays - matches if any of the values in that array match the provided value. + + This is only available if the ``json1`` SQLite extension is enabled. + +``?column__arraynotcontains=value`` + Works against columns that contain JSON arrays - matches if none of the values in that array match the provided value. This is only available if the ``json1`` SQLite extension is enabled. diff --git a/tests/test_api.py b/tests/test_api.py index 4339507c..a4c30414 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1121,7 +1121,7 @@ def test_table_filter_queries_multiple_of_same_type(app_client): @pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module") def test_table_filter_json_arraycontains(app_client): response = app_client.get("/fixtures/facetable.json?tags__arraycontains=tag1") - assert [ + assert response.json["rows"] == [ [ 1, "2019-01-14 08:00:00", @@ -1146,7 +1146,28 @@ def test_table_filter_json_arraycontains(app_client): "[]", "two", ], - ] == response.json["rows"] + ] + + +@pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module") +def test_table_filter_json_arraynotcontains(app_client): + response = app_client.get( + "/fixtures/facetable.json?tags__arraynotcontains=tag3&tags__not=[]" + ) + assert response.json["rows"] == [ + [ + 1, + "2019-01-14 08:00:00", + 1, + 1, + "CA", + 1, + "Mission", + '["tag1", "tag2"]', + '[{"foo": "bar"}]', + "one", + ] + ] def test_table_filter_extra_where(app_client): From fe86d853089f324f92daa950cc56f4052bf78f98 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Dec 2020 11:45:45 -0800 Subject: [PATCH 0031/1404] datasette serve --create option, closes #1135 --- datasette/cli.py | 21 ++++++++++++++++++++- docs/datasette-serve-help.txt | 1 + tests/test_cli.py | 19 +++++++++++++++++++ 3 files changed, 40 insertions(+), 1 deletion(-) diff --git a/datasette/cli.py b/datasette/cli.py index e84695e3..32408d23 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -27,6 +27,7 @@ from .utils import ( StaticMount, ValueAsBooleanError, ) +from .utils.sqlite import sqlite3 from .utils.testing import TestClient from .version import __version__ @@ -299,7 +300,7 @@ def uninstall(packages, yes): @cli.command() -@click.argument("files", type=click.Path(exists=True), nargs=-1) +@click.argument("files", type=click.Path(), nargs=-1) @click.option( "-i", "--immutable", @@ -401,6 +402,11 @@ def uninstall(packages, yes): is_flag=True, help="Open Datasette in your web browser", ) +@click.option( + "--create", + is_flag=True, + help="Create database files if they do not exist", +) def serve( files, immutable, @@ -424,6 +430,7 @@ def serve( help_config, pdb, open_browser, + create, return_instance=False, ): """Serve up specified SQLite database files with a web UI""" @@ -486,6 +493,18 @@ def serve( kwargs["config_dir"] = pathlib.Path(files[0]) files = [] + # Verify list of files, create if needed (and --create) + for file in files: + if not pathlib.Path(file).exists(): + if create: + sqlite3.connect(file).execute("vacuum") + else: + raise click.ClickException( + "Invalid value for '[FILES]...': Path '{}' does not exist.".format( + file + ) + ) + try: ds = Datasette(files, **kwargs) except SpatialiteNotFound: diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index bdaf0894..079ec9f8 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -40,4 +40,5 @@ Options: --help-config Show available config options --pdb Launch debugger on any errors -o, --open Open Datasette in your web browser + --create Create database files if they do not exist --help Show this message and exit. diff --git a/tests/test_cli.py b/tests/test_cli.py index a0ac7d7a..3f6b1840 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -146,6 +146,7 @@ def test_metadata_yaml(): help_config=False, pdb=False, open_browser=False, + create=False, return_instance=True, ) client = _TestClient(ds) @@ -221,3 +222,21 @@ def test_sql_errors_logged_to_stderr(ensure_eventloop): result = runner.invoke(cli, ["--get", "/:memory:.json?sql=select+blah"]) assert result.exit_code == 1 assert "sql = 'select blah', params = {}: no such column: blah\n" in result.stderr + + +def test_serve_create(ensure_eventloop, tmpdir): + runner = CliRunner() + db_path = tmpdir / "does_not_exist_yet.db" + assert not db_path.exists() + result = runner.invoke( + cli, [str(db_path), "--create", "--get", "/-/databases.json"] + ) + assert result.exit_code == 0, result.output + databases = json.loads(result.output) + assert { + "name": "does_not_exist_yet", + "is_mutable": True, + "is_memory": False, + "hash": None, + }.items() <= databases[0].items() + assert db_path.exists() From 6000d1a724d0e28cdb102e7be83eac07a00b41e8 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Dec 2020 11:56:44 -0800 Subject: [PATCH 0032/1404] Fix for combining ?_search_x and ?_searchmode=raw, closes #1134 --- datasette/views/table.py | 4 +++- tests/test_api.py | 7 +++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index a0de2a8e..3e9adf88 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -443,7 +443,9 @@ class TableView(RowTableShared): fts_table = fts_table or await db.fts_table(table) fts_pk = special_args.get("_fts_pk", table_metadata.get("fts_pk", "rowid")) search_args = dict( - pair for pair in special_args.items() if pair[0].startswith("_search") + pair + for pair in special_args.items() + if pair[0].startswith("_search") and pair[0] != "_searchmode" ) search = "" search_mode_raw = special_args.get("_searchmode") == "raw" diff --git a/tests/test_api.py b/tests/test_api.py index a4c30414..10755b95 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1035,6 +1035,13 @@ def test_sortable_columns_metadata(app_client): [2, "terry dog", "sara weasel", "puma"], ], ), + ( + # _searchmode=raw combined with _search_COLUMN + "/fixtures/searchable.json?_search_text2=te*&_searchmode=raw", + [ + [1, "barry cat", "terry dog", "panther"], + ], + ), ( "/fixtures/searchable.json?_search=weasel", [[2, "terry dog", "sara weasel", "puma"]], From 387b471b88788069191bc845224b7712d92e9c0b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Dec 2020 12:03:44 -0800 Subject: [PATCH 0033/1404] Release 0.52.5 Refs #1134 --- datasette/version.py | 2 +- docs/changelog.rst | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index ce06fe1d..b0a59018 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.52.4" +__version__ = "0.52.5" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 86d844f7..c79e7c86 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_52_5: + +0.52.5 (2020-12-09) +------------------- + +- Fix for error caused by combining the ``_searchmode=raw`` and ``?_search_COLUMN`` parameters. (`#1134 `__) + .. _v0_52_4: 0.52.4 (2020-12-05) From 4c6407cd74070237fdad0dd6df4d016740806fbd Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 9 Dec 2020 12:14:33 -0800 Subject: [PATCH 0034/1404] Releasing bug fixes from a branch, closes #1136 --- docs/contributing.rst | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index ca194001..8cd9c210 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -204,6 +204,34 @@ You are welcome to try these out, but please be aware that details may change be Please join `discussions on the issue tracker `__ to share your thoughts and experiences with on alpha and beta features that you try out. +.. _contributing_bug_fix_branch: + +Releasing bug fixes from a branch +--------------------------------- + +If it's necessary to publish a bug fix release without shipping new features that have landed on ``main`` a release branch can be used. + +Create it from the relevant last tagged release like so:: + + git branch 0.52.x 0.52.4 + git checkout 0.52.x + +Next cherry-pick the commits containing the bug fixes:: + + git cherry-pick COMMIT + +Write the release notes in the branch, and update the version number in ``version.py``. Then push the branch:: + + git push -u origin 0.52.x + +Once the tests have completed, publish the release from that branch target using the GitHub `Draft a new release `__ form. + +Finally, cherry-pick the commit with the release notes and version number bump across to ``main``:: + + git checkout main + git cherry-pick COMMIT + git push + .. _contributing_upgrading_codemirror: Upgrading CodeMirror From e0b54d09115ded459e09e2e89e0962cfddcb0244 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 15:20:43 -0800 Subject: [PATCH 0035/1404] No longer using Wiki for examples --- README.md | 2 +- docs/index.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 7861abbd..71e488f7 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover [Explore a demo](https://fivethirtyeight.datasettes.com/fivethirtyeight), watch [a video about the project](https://www.youtube.com/watch?v=pTr1uLQTJNE) or try it out by [uploading and publishing your own CSV data](https://simonwillison.net/2019/Apr/23/datasette-glitch/). * Comprehensive documentation: https://docs.datasette.io/ -* Examples: https://github.com/simonw/datasette/wiki/Datasettes +* Examples: https://datasette.io/examples * Live demo of current main: https://latest.datasette.io/ * Support questions, feedback? Join our [GitHub Discussions forum](https://github.com/simonw/datasette/discussions) diff --git a/docs/index.rst b/docs/index.rst index ff8db04b..eafc5bdb 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -25,7 +25,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover `Explore a demo `__, watch `a presentation about the project `__ or :ref:`getting_started_glitch`. -More examples: https://github.com/simonw/datasette/wiki/Datasettes +More examples: https://datasette.io/examples Support questions, feedback? Join our `GitHub Discussions forum `__. From 7ef80d0145dc9a2a16c46823704517d7f35fbe45 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 15:24:16 -0800 Subject: [PATCH 0036/1404] News is now on datasette.io/news Closes #1137, closes #659 --- README.md | 83 ++++++------------------------------------------------- 1 file changed, 8 insertions(+), 75 deletions(-) diff --git a/README.md b/README.md index 71e488f7..16fc8f0e 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover [Explore a demo](https://fivethirtyeight.datasettes.com/fivethirtyeight), watch [a video about the project](https://www.youtube.com/watch?v=pTr1uLQTJNE) or try it out by [uploading and publishing your own CSV data](https://simonwillison.net/2019/Apr/23/datasette-glitch/). +* Latest [Datasette News](https://datasette.io/news) * Comprehensive documentation: https://docs.datasette.io/ * Examples: https://datasette.io/examples * Live demo of current main: https://latest.datasette.io/ @@ -23,83 +24,15 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover Want to stay up-to-date with the project? Subscribe to the [Datasette Weekly newsletter](https://datasette.substack.com/) for tips, tricks and news on what's new in the Datasette ecosystem. -## News - -* 28th November 2020: [Datasette 0.52](https://docs.datasette.io/en/stable/changelog.html#v0-52) - `--config` is now `--setting`, new `database_actions` plugin hook, `datasette publish cloudrun --apt-get-install` option and several bug fixes. -* 31st October 2020: [Datasette 0.51](https://docs.datasette.io/en/stable/changelog.html#v0-51) - A new visual design, plugin hooks for adding navigation options, better handling of binary data, URL building utility methods and better support for running Datasette behind a proxy. [Annotated release notes](https://simonwillison.net/2020/Nov/1/datasette-0-51/). -* 9th October 2020: [Datasette 0.50](https://docs.datasette.io/en/stable/changelog.html#v0-50) - New column actions menu. `datasette.client` object for plugins to make internal API requests. Improved documentation on deploying Datasette. [Annotated release notes](https://simonwillison.net/2020/Oct/9/datasette-0-50/). -* 14th September 2020: [Datasette 0.49](https://docs.datasette.io/en/stable/changelog.html#v0-49) - JSON API for writable canned queries, path parameters for custom pages. See also [Datasette 0.49: The annotated release notes](https://simonwillison.net/2020/Sep/15/datasette-0-49/). -* 16th August 2020: [Datasette 0.48](https://docs.datasette.io/en/stable/changelog.html#v0-48) - Documentation now lives at [docs.datasette.io](https://docs.datasette.io/), improvements to the `extra_template_vars`, `extra_css_urls`, `extra_js_urls` and `extra_body_script` plugin hooks. -* 11th August 2020: [Datasette 0.47](https://docs.datasette.io/en/stable/changelog.html#v0-47) - Datasette can now be installed using Homebrew! `brew install simonw/datasette/datasette`. Also new: `datasette install name-of-plugin` and `datasette uninstall name-of-plugin` commands, and `datasette --get '/-/versions.json'` to output the result of Datasette HTTP calls on the command-line. -* 9th August 2020: [Datasette 0.46](https://docs.datasette.io/en/stable/changelog.html#v0-46) - security fix relating to CSRF protection for writable canned queries, a new logo, new debugging tools, improved file downloads and more. -* 6th August 2020: [GraphQL in Datasette with the new datasette-graphql plugin](https://simonwillison.net/2020/Aug/7/datasette-graphql/) -* 24th July 2020: Two new plugins: [datasette-copyable and datasette-insert-api](https://simonwillison.net/2020/Jul/23/datasette-copyable-datasette-insert-api/). `datasette-copyable` adds copy-and-paste export options, and `datasette-insert-api` lets you create tables and insert or update data by POSTing JSON directly to Datasette. -* 1st July 2020: [Datasette 0.45](https://docs.datasette.io/en/stable/changelog.html#v0-45) - [Magic parameters for canned queries](https://docs.datasette.io/en/stable/sql_queries.html#canned-queries-magic-parameters), a log out feature, improved plugin documentation and four new plugin hooks. See also [Datasette 0.45: The annotated release notes](https://simonwillison.net/2020/Jul/1/datasette-045/). -* 20th June 2020: [A cookiecutter template for writing Datasette plugins](https://simonwillison.net/2020/Jun/20/cookiecutter-plugins/) -* 11th June 2020: [Datasette 0.44](https://docs.datasette.io/en/stable/changelog.html#v0-44) - [Authentication and permissions](https://docs.datasette.io/en/stable/authentication.html), [writable canned queries](https://docs.datasette.io/en/stable/sql_queries.html#writable-canned-queries), flash messages, new plugin hooks and much, much more. -* 28th May 2020: [Datasette 0.43](https://docs.datasette.io/en/stable/changelog.html#v0-43) - Redesigned [register_output_renderer](https://docs.datasette.io/en/stable/plugins.html#plugin-register-output-renderer) plugin hook and various small improvements and fixes. -* 8th May 2020: [Datasette 0.42](https://docs.datasette.io/en/stable/changelog.html#v0-42) - Documented internal methods for plugins to execute read queries against a database. -* 6th May 2020: [Datasette 0.41](https://docs.datasette.io/en/stable/changelog.html#v0-41) - New mechanism for [creating custom pages](https://docs.datasette.io/en/0.41/custom_templates.html#custom-pages), new [configuration directory mode](https://docs.datasette.io/en/0.41/config.html#configuration-directory-mode), new `?column__notlike=` table filter and various other smaller improvements. -* 21st April 2020: [Datasette 0.40](https://docs.datasette.io/en/stable/changelog.html#v0-40) - Metadata can now be provided as YAML instead of JSON. Publishing to Zeit Now v1 is no longer supported, but Now v2 support is provided by the new [datasette-publish-now](https://github.com/simonw/datasette-publish-now) plugin. Various bug fixes. -* 24th March 2020: [Datasette 0.39](https://docs.datasette.io/en/stable/changelog.html#v0-39) - New `base_url` configuration option for running Datasette under a different URL prefix, `"sort"` and `"sort_desc"` metadata options for setting a default sort order for a table. -* 8th March 2020: [Datasette 0.38](https://docs.datasette.io/en/stable/changelog.html#v0-38) - New `--memory` option for `datasete publish cloudrun`, [Docker image](https://hub.docker.com/r/datasetteproject/datasette) upgraded to SQLite 3.31.1. -* 25th February 2020: [Datasette 0.37](https://docs.datasette.io/en/stable/changelog.html#v0-37) - new internal APIs enabling plugins to safely write to databases. Read more here: [Datasette Writes](https://simonwillison.net/2020/Feb/26/weeknotes-datasette-writes/). -* 21st February 2020: [Datasette 0.36](https://docs.datasette.io/en/stable/changelog.html#v0-36) - new internals documentation for plugins, `prepare_connection()` now accepts optional `database` and `datasette` arguments. -* 4th February 2020: [Datasette 0.35](https://docs.datasette.io/en/stable/changelog.html#v0-35) - new `.render_template()` method for plugins. -* 29th January 2020: [Datasette 0.34](https://docs.datasette.io/en/stable/changelog.html#v0-34) - improvements to search, `datasette publish cloudrun` and `datasette package`. -* 21st January 2020: [Deploying a data API using GitHub Actions and Cloud Run](https://simonwillison.net/2020/Jan/21/github-actions-cloud-run/) - how to use GitHub Actions and Google Cloud Run to automatically scrape data and deploy the result as an API with Datasette. -* 22nd December 2019: [Datasette 0.33](https://docs.datasette.io/en/stable/changelog.html#v0-33) - various small improvements. -* 19th December 2019: [Building tools to bring data-driven reporting to more newsrooms](https://medium.com/jsk-class-of-2020/building-tools-to-bring-data-driven-reporting-to-more-newsrooms-4520a0c9b3f2) - some notes on my JSK fellowship so far. -* 2nd December 2019: [Niche Museums](https://www.niche-museums.com/) is a new site entirely powered by Datasette, using custom templates and plugins. [niche-museums.com, powered by Datasette](https://simonwillison.net/2019/Nov/25/niche-museums/) describes how the site works, and [datasette-atom: Define an Atom feed using a custom SQL query](https://simonwillison.net/2019/Dec/3/datasette-atom/) describes how the new [datasette-atom plugin](https://github.com/simonw/datasette-atom) was used to add an Atom syndication feed to the site. -* 14th November 2019: [Datasette 0.32](https://docs.datasette.io/en/stable/changelog.html#v0-32) now uses asynchronous rendering in Jinja templates, which means template functions can perform asynchronous operations such as executing SQL queries. [datasette-template-sql](https://github.com/simonw/datasette-template-sql) is a new plugin uses this capability to add a new custom `sql(sql_query)` template function. -* 11th November 2019: [Datasette 0.31](https://docs.datasette.io/en/stable/changelog.html#v0-31) - the first version of Datasette to support Python 3.8, which means dropping support for Python 3.5. -* 18th October 2019: [Datasette 0.30](https://docs.datasette.io/en/stable/changelog.html#v0-30) -* 13th July 2019: [Single sign-on against GitHub using ASGI middleware](https://simonwillison.net/2019/Jul/14/sso-asgi/) talks about the implementation of [datasette-auth-github](https://github.com/simonw/datasette-auth-github) in more detail. -* 7th July 2019: [Datasette 0.29](https://docs.datasette.io/en/stable/changelog.html#v0-29) - ASGI, new plugin hooks, facet by date and much, much more... - * [datasette-auth-github](https://github.com/simonw/datasette-auth-github) - a new plugin for Datasette 0.29 that lets you require users to authenticate against GitHub before accessing your Datasette instance. You can whitelist specific users, or you can restrict access to members of specific GitHub organizations or teams. - * [datasette-cors](https://github.com/simonw/datasette-cors) - a plugin that lets you configure CORS access from a list of domains (or a set of domain wildcards) so you can make JavaScript calls to a Datasette instance from a specific set of other hosts. -* 23rd June 2019: [Porting Datasette to ASGI, and Turtles all the way down](https://simonwillison.net/2019/Jun/23/datasette-asgi/) -* 21st May 2019: The anonymized raw data from [the Stack Overflow Developer Survey 2019](https://stackoverflow.blog/2019/05/21/public-data-release-of-stack-overflows-2019-developer-survey/) has been [published in partnership with Glitch](https://glitch.com/culture/discover-insights-explore-developer-survey-results-2019/), powered by Datasette. -* 19th May 2019: [Datasette 0.28](https://docs.datasette.io/en/stable/changelog.html#v0-28) - a salmagundi of new features! - * No longer immutable! Datasette now supports [databases that change](https://docs.datasette.io/en/stable/changelog.html#supporting-databases-that-change). - * [Faceting improvements](https://docs.datasette.io/en/stable/changelog.html#faceting-improvements-and-faceting-plugins) including facet-by-JSON-array and the ability to define custom faceting using plugins. - * [datasette publish cloudrun](https://docs.datasette.io/en/stable/changelog.html#datasette-publish-cloudrun) lets you publish databases to Google's new Cloud Run hosting service. - * New [register_output_renderer](https://docs.datasette.io/en/stable/changelog.html#register-output-renderer-plugins) plugin hook for adding custom output extensions to Datasette in addition to the default `.json` and `.csv`. - * Dozens of other smaller features and tweaks - see [the release notes](https://docs.datasette.io/en/stable/changelog.html#v0-28) for full details. - * Read more about this release here: [Datasette 0.28—and why master should always be releasable](https://simonwillison.net/2019/May/19/datasette-0-28/) -* 24th February 2019: [ -sqlite-utils: a Python library and CLI tool for building SQLite databases](https://simonwillison.net/2019/Feb/25/sqlite-utils/) - a partner tool for easily creating SQLite databases for use with Datasette. -* 31st January 2019: [Datasette 0.27](https://docs.datasette.io/en/stable/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://docs.datasette.io/en/stable/ecosystem.html). -* 10th January 2019: [Datasette 0.26.1](https://docs.datasette.io/en/stable/changelog.html#v0-26-1) - SQLite upgrade in Docker image, `/-/versions` now shows SQLite compile options. -* 2nd January 2019: [Datasette 0.26](https://docs.datasette.io/en/stable/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument. -* 18th December 2018: [Fast Autocomplete Search for Your Website](https://24ways.org/2018/fast-autocomplete-search-for-your-website/) - a new tutorial on using Datasette to build a JavaScript autocomplete search engine. -* 3rd October 2018: [The interesting ideas in Datasette](https://simonwillison.net/2018/Oct/4/datasette-ideas/) - a write-up of some of the less obvious interesting ideas embedded in the Datasette project. -* 19th September 2018: [Datasette 0.25](https://docs.datasette.io/en/stable/changelog.html#v0-25) - New plugin hooks, improved database view support and an easier way to use more recent versions of SQLite. -* 23rd July 2018: [Datasette 0.24](https://docs.datasette.io/en/stable/changelog.html#v0-24) - a number of small new features -* 29th June 2018: [datasette-vega](https://github.com/simonw/datasette-vega), a new plugin for visualizing data as bar, line or scatter charts -* 21st June 2018: [Datasette 0.23.1](https://docs.datasette.io/en/stable/changelog.html#v0-23-1) - minor bug fixes -* 18th June 2018: [Datasette 0.23: CSV, SpatiaLite and more](https://docs.datasette.io/en/stable/changelog.html#v0-23) - CSV export, foreign key expansion in JSON and CSV, new config options, improved support for SpatiaLite and a bunch of other improvements -* 23rd May 2018: [Datasette 0.22.1 bugfix](https://github.com/simonw/datasette/releases/tag/0.22.1) plus we now use [versioneer](https://github.com/warner/python-versioneer) -* 20th May 2018: [Datasette 0.22: Datasette Facets](https://simonwillison.net/2018/May/20/datasette-facets) -* 5th May 2018: [Datasette 0.21: New _shape=, new _size=, search within columns](https://github.com/simonw/datasette/releases/tag/0.21) -* 25th April 2018: [Exploring the UK Register of Members Interests with SQL and Datasette](https://simonwillison.net/2018/Apr/25/register-members-interests/) - a tutorial describing how [register-of-members-interests.datasettes.com](https://register-of-members-interests.datasettes.com/) was built ([source code here](https://github.com/simonw/register-of-members-interests)) -* 20th April 2018: [Datasette plugins, and building a clustered map visualization](https://simonwillison.net/2018/Apr/20/datasette-plugins/) - introducing Datasette's new plugin system and [datasette-cluster-map](https://pypi.org/project/datasette-cluster-map/), a plugin for visualizing data on a map -* 20th April 2018: [Datasette 0.20: static assets and templates for plugins](https://github.com/simonw/datasette/releases/tag/0.20) -* 16th April 2018: [Datasette 0.19: plugins preview](https://github.com/simonw/datasette/releases/tag/0.19) -* 14th April 2018: [Datasette 0.18: units](https://github.com/simonw/datasette/releases/tag/0.18) -* 9th April 2018: [Datasette 0.15: sort by column](https://github.com/simonw/datasette/releases/tag/0.15) -* 28th March 2018: [Baltimore Sun Public Salary Records](https://simonwillison.net/2018/Mar/28/datasette-in-the-wild/) - a data journalism project from the Baltimore Sun powered by Datasette - source code [is available here](https://github.com/baltimore-sun-data/salaries-datasette) -* 27th March 2018: [Cloud-first: Rapid webapp deployment using containers](https://wwwf.imperial.ac.uk/blog/research-software-engineering/2018/03/27/cloud-first-rapid-webapp-deployment-using-containers/) - a tutorial covering deploying Datasette using Microsoft Azure by the Research Software Engineering team at Imperial College London -* 28th January 2018: [Analyzing my Twitter followers with Datasette](https://simonwillison.net/2018/Jan/28/analyzing-my-twitter-followers/) - a tutorial on using Datasette to analyze follower data pulled from the Twitter API -* 17th January 2018: [Datasette Publish: a web app for publishing CSV files as an online database](https://simonwillison.net/2018/Jan/17/datasette-publish/) -* 12th December 2017: [Building a location to time zone API with SpatiaLite, OpenStreetMap and Datasette](https://simonwillison.net/2017/Dec/12/building-a-location-time-zone-api/) -* 9th December 2017: [Datasette 0.14: customization edition](https://github.com/simonw/datasette/releases/tag/0.14) -* 25th November 2017: [New in Datasette: filters, foreign keys and search](https://simonwillison.net/2017/Nov/25/new-in-datasette/) -* 13th November 2017: [Datasette: instantly create and publish an API for your SQLite databases](https://simonwillison.net/2017/Nov/13/datasette/) - ## Installation - pip3 install datasette +If you are on a Mac, [Homebrew](https://brew.sh/) is the easiest way to install Datasette: + + brew install datasette + +You can also install it using `pip` or `pipx`: + + pip install datasette Datasette requires Python 3.6 or higher. We also have [detailed installation instructions](https://docs.datasette.io/en/stable/installation.html) covering other options such as Docker. From 2c0aca4887ed65167606a5fd084f35d046e2a00a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 15:28:44 -0800 Subject: [PATCH 0037/1404] _header=off option for CSV export, closes #1133 --- datasette/views/base.py | 3 ++- docs/csv_export.rst | 16 ++++++++++++++++ tests/test_csv.py | 8 ++++++++ 3 files changed, 26 insertions(+), 1 deletion(-) diff --git a/datasette/views/base.py b/datasette/views/base.py index b8860b74..76e03206 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -307,7 +307,8 @@ class DataView(BaseView): if not first: data, _, _ = await self.data(request, database, hash, **kwargs) if first: - await writer.writerow(headings) + if request.args.get("_header") != "off": + await writer.writerow(headings) first = False next = data.get("next") for row in data["rows"]: diff --git a/docs/csv_export.rst b/docs/csv_export.rst index 0bda20ef..7f0d8396 100644 --- a/docs/csv_export.rst +++ b/docs/csv_export.rst @@ -28,6 +28,22 @@ file, which looks like this and has the following options: You can try that out on https://latest.datasette.io/fixtures/facetable?_size=4 +.. _csv_export_url_parameters: + +URL parameters +-------------- + +The following options can be used to customize the CSVs returned by Datasette. + +``?_header=off`` + This removes the first row of the CSV file specifying the headings - only the row data will be returned. + +``?_stream=on`` + Stream all matching records, not just the first page of results. See below. + +``?_dl=on`` + Causes Datasette to return a ``content-disposition: attachment; filename="filename.csv"`` header. + Streaming all records --------------------- diff --git a/tests/test_csv.py b/tests/test_csv.py index 0fd665a9..6b17033c 100644 --- a/tests/test_csv.py +++ b/tests/test_csv.py @@ -64,6 +64,14 @@ def test_table_csv_cors_headers(app_client_with_cors): assert "*" == response.headers["Access-Control-Allow-Origin"] +def test_table_csv_no_header(app_client): + response = app_client.get("/fixtures/simple_primary_key.csv?_header=off") + assert response.status == 200 + assert not response.headers.get("Access-Control-Allow-Origin") + assert "text/plain; charset=utf-8" == response.headers["content-type"] + assert EXPECTED_TABLE_CSV.split("\r\n", 1)[1] == response.text + + def test_table_csv_with_labels(app_client): response = app_client.get("/fixtures/facetable.csv?_labels=1") assert response.status == 200 From 967cc05545480f09d421a7bf8b6dbfc27609a181 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 10 Dec 2020 15:37:08 -0800 Subject: [PATCH 0038/1404] Powered by links to datasette.io, closes #1138 --- datasette/templates/_footer.html | 2 +- datasette/templates/patterns.html | 2 +- setup.py | 2 +- tests/test_html.py | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/datasette/templates/_footer.html b/datasette/templates/_footer.html index f930f445..b1380ae9 100644 --- a/datasette/templates/_footer.html +++ b/datasette/templates/_footer.html @@ -1,4 +1,4 @@ -Powered by Datasette +Powered by Datasette {% if query_ms %}· Query took {{ query_ms|round(3) }}ms{% endif %} {% if metadata %} {% if metadata.license or metadata.license_url %}· Data license: diff --git a/datasette/templates/patterns.html b/datasette/templates/patterns.html index 4ef2c29f..984c1bf6 100644 --- a/datasette/templates/patterns.html +++ b/datasette/templates/patterns.html @@ -476,7 +476,7 @@

.ft

-