generated_columns table in fixtures.py, closes #1119

This commit is contained in:
Simon Willison 2020-11-30 16:28:02 -08:00 committed by GitHub
commit 17cbbb1f7f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 93 additions and 65 deletions

View file

@ -19,7 +19,7 @@ import urllib
import numbers import numbers
import yaml import yaml
from .shutil_backport import copytree from .shutil_backport import copytree
from .sqlite import sqlite3, sqlite_version from .sqlite import sqlite3, sqlite_version, supports_table_xinfo
from ..plugins import pm from ..plugins import pm
@ -561,7 +561,7 @@ def table_columns(conn, table):
def table_column_details(conn, table): def table_column_details(conn, table):
if sqlite_version() >= (3, 26, 0): if supports_table_xinfo():
# table_xinfo was added in 3.26.0 # table_xinfo was added in 3.26.0
return [ return [
Column(*r) Column(*r)

View file

@ -26,3 +26,11 @@ def _sqlite_version():
.split("."), .split("."),
) )
) )
def supports_table_xinfo():
return sqlite_version() >= (3, 26, 0)
def supports_generated_columns():
return sqlite_version() >= (3, 31, 0)

View file

@ -1,5 +1,5 @@
from datasette.app import Datasette from datasette.app import Datasette
from datasette.utils.sqlite import sqlite3 from datasette.utils.sqlite import sqlite3, sqlite_version, supports_generated_columns
from datasette.utils.testing import TestClient from datasette.utils.testing import TestClient
import click import click
import contextlib import contextlib
@ -116,6 +116,8 @@ def make_app_client(
immutables = [] immutables = []
conn = sqlite3.connect(filepath) conn = sqlite3.connect(filepath)
conn.executescript(TABLES) conn.executescript(TABLES)
if supports_generated_columns():
conn.executescript(GENERATED_COLUMNS_SQL)
for sql, params in TABLE_PARAMETERIZED_SQL: for sql, params in TABLE_PARAMETERIZED_SQL:
with conn: with conn:
conn.execute(sql, params) conn.execute(sql, params)
@ -699,6 +701,18 @@ INSERT INTO "searchable_fts" (rowid, text1, text2)
SELECT rowid, text1, text2 FROM searchable; SELECT rowid, text1, text2 FROM searchable;
""" """
GENERATED_COLUMNS_SQL = """
CREATE TABLE generated_columns (
body TEXT,
id INT GENERATED ALWAYS AS (json_extract(body, '$.number')) STORED,
consideration INT GENERATED ALWAYS AS (json_extract(body, '$.string')) STORED
);
INSERT INTO generated_columns (body) VALUES ('{
"number": 1,
"string": "This is a string"
}');
"""
def assert_permissions_checked(datasette, actions): def assert_permissions_checked(datasette, actions):
# actions is a list of "action" or (action, resource) tuples # actions is a list of "action" or (action, resource) tuples
@ -754,6 +768,9 @@ def cli(db_filename, metadata, plugins_path, recreate):
for sql, params in TABLE_PARAMETERIZED_SQL: for sql, params in TABLE_PARAMETERIZED_SQL:
with conn: with conn:
conn.execute(sql, params) conn.execute(sql, params)
if supports_generated_columns():
with conn:
conn.executescript(GENERATED_COLUMNS_SQL)
print(f"Test tables written to {db_filename}") print(f"Test tables written to {db_filename}")
if metadata: if metadata:
open(metadata, "w").write(json.dumps(METADATA, indent=4)) open(metadata, "w").write(json.dumps(METADATA, indent=4))

View file

@ -1,7 +1,7 @@
from datasette.app import Datasette from datasette.app import Datasette
from datasette.plugins import DEFAULT_PLUGINS from datasette.plugins import DEFAULT_PLUGINS
from datasette.utils import detect_json1 from datasette.utils import detect_json1
from datasette.utils.sqlite import sqlite3, sqlite_version from datasette.utils.sqlite import sqlite3, sqlite_version, supports_table_xinfo
from datasette.version import __version__ from datasette.version import __version__
from .fixtures import ( # noqa from .fixtures import ( # noqa
app_client, app_client,
@ -19,6 +19,7 @@ from .fixtures import ( # noqa
generate_compound_rows, generate_compound_rows,
generate_sortable_rows, generate_sortable_rows,
make_app_client, make_app_client,
supports_generated_columns,
EXPECTED_PLUGINS, EXPECTED_PLUGINS,
METADATA, METADATA,
) )
@ -35,7 +36,7 @@ def test_homepage(app_client):
assert response.json.keys() == {"fixtures": 0}.keys() assert response.json.keys() == {"fixtures": 0}.keys()
d = response.json["fixtures"] d = response.json["fixtures"]
assert d["name"] == "fixtures" assert d["name"] == "fixtures"
assert d["tables_count"] == 24 assert d["tables_count"] == 25 if supports_generated_columns() else 24
assert len(d["tables_and_views_truncated"]) == 5 assert len(d["tables_and_views_truncated"]) == 5
assert d["tables_and_views_more"] is True assert d["tables_and_views_more"] is True
# 4 hidden FTS tables + no_primary_key (hidden in metadata) # 4 hidden FTS tables + no_primary_key (hidden in metadata)
@ -268,6 +269,22 @@ def test_database_page(app_client):
}, },
"private": False, "private": False,
}, },
] + (
[
{
"columns": ["body", "id", "consideration"],
"count": 1,
"foreign_keys": {"incoming": [], "outgoing": []},
"fts_table": None,
"hidden": False,
"name": "generated_columns",
"primary_keys": [],
"private": False,
}
]
if supports_generated_columns()
else []
) + [
{ {
"name": "infinity", "name": "infinity",
"columns": ["value"], "columns": ["value"],
@ -527,7 +544,7 @@ def test_database_page(app_client):
"docid", "docid",
"__langid", "__langid",
] ]
if sqlite_version() >= (3, 26, 0) if supports_table_xinfo()
else [] else []
), ),
"primary_keys": [], "primary_keys": [],
@ -1934,31 +1951,13 @@ def test_paginate_using_link_header(app_client, qs):
sqlite_version() < (3, 31, 0), sqlite_version() < (3, 31, 0),
reason="generated columns were added in SQLite 3.31.0", reason="generated columns were added in SQLite 3.31.0",
) )
@pytest.mark.asyncio async def test_generated_columns_are_visible_in_datasette(app_client):
async def test_generated_columns_are_visible_in_datasette(tmp_path_factory): response = app_client.get("/test/generated_columns.json?_shape=array")
db_directory = tmp_path_factory.mktemp("dbs")
db_path = db_directory / "test.db"
conn = sqlite3.connect(str(db_path))
conn.executescript(
"""
CREATE TABLE deeds (
body TEXT,
id INT GENERATED ALWAYS AS (json_extract(body, '$.id')) STORED,
consideration INT GENERATED ALWAYS AS (json_extract(body, '$.consideration')) STORED
);
INSERT INTO deeds (body) VALUES ('{
"id": 1,
"consideration": "This is the consideration"
}');
"""
)
datasette = Datasette([db_path])
response = await datasette.client.get("/test/deeds.json?_shape=array")
assert response.json() == [ assert response.json() == [
{ {
"rowid": 1, "rowid": 1,
"body": '{\n "id": 1,\n "consideration": "This is the consideration"\n }', "body": '{\n "number": 1,\n "string": "This is a string"\n }',
"id": 1, "number": 1,
"consideration": "This is the consideration", "string": "This is a string",
} }
] ]

View file

@ -2,7 +2,7 @@
Tests for the datasette.database.Database class Tests for the datasette.database.Database class
""" """
from datasette.database import Database, Results, MultipleValues from datasette.database import Database, Results, MultipleValues
from datasette.utils.sqlite import sqlite3 from datasette.utils.sqlite import sqlite3, supports_generated_columns
from datasette.utils import Column from datasette.utils import Column
from .fixtures import app_client from .fixtures import app_client
import pytest import pytest
@ -340,38 +340,42 @@ async def test_get_all_foreign_keys(db):
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_table_names(db): async def test_table_names(db):
table_names = await db.table_names() table_names = await db.table_names()
assert table_names == [ assert (
"simple_primary_key", table_names
"primary_key_multiple_columns", == [
"primary_key_multiple_columns_explicit_label", "simple_primary_key",
"compound_primary_key", "primary_key_multiple_columns",
"compound_three_primary_keys", "primary_key_multiple_columns_explicit_label",
"foreign_key_references", "compound_primary_key",
"sortable", "compound_three_primary_keys",
"no_primary_key", "foreign_key_references",
"123_starts_with_digits", "sortable",
"Table With Space In Name", "no_primary_key",
"table/with/slashes.csv", "123_starts_with_digits",
"complex_foreign_keys", "Table With Space In Name",
"custom_foreign_key_label", "table/with/slashes.csv",
"units", "complex_foreign_keys",
"tags", "custom_foreign_key_label",
"searchable", "units",
"searchable_tags", "tags",
"searchable_fts", "searchable",
"searchable_fts_segments", "searchable_tags",
"searchable_fts_segdir", "searchable_fts",
"searchable_fts_docsize", "searchable_fts_segments",
"searchable_fts_stat", "searchable_fts_segdir",
"select", "searchable_fts_docsize",
"infinity", "searchable_fts_stat",
"facet_cities", "select",
"facetable", "infinity",
"binary_data", "facet_cities",
"roadside_attractions", "facetable",
"attraction_characteristic", "binary_data",
"roadside_attraction_characteristics", "roadside_attractions",
] "attraction_characteristic",
"roadside_attraction_characteristics",
]
+ (["generated_columns"] if supports_generated_columns() else [])
)
@pytest.mark.asyncio @pytest.mark.asyncio

View file

@ -413,8 +413,7 @@ def test_hook_register_output_renderer_all_parameters(app_client):
# Lots of 'at 0x103a4a690' in here - replace those so we can do # Lots of 'at 0x103a4a690' in here - replace those so we can do
# an easy comparison # an easy comparison
body = at_memory_re.sub(" at 0xXXX", response.text) body = at_memory_re.sub(" at 0xXXX", response.text)
assert { assert json.loads(body) == {
"1+1": 2,
"datasette": "<datasette.app.Datasette object at 0xXXX>", "datasette": "<datasette.app.Datasette object at 0xXXX>",
"columns": [ "columns": [
"pk", "pk",
@ -451,7 +450,8 @@ def test_hook_register_output_renderer_all_parameters(app_client):
"table": "facetable", "table": "facetable",
"request": "<datasette.utils.asgi.Request object at 0xXXX>", "request": "<datasette.utils.asgi.Request object at 0xXXX>",
"view_name": "table", "view_name": "table",
} == json.loads(body) "1+1": 2,
}
# Test that query_name is set correctly # Test that query_name is set correctly
query_response = app_client.get("/fixtures/pragma_cache_size.testall") query_response = app_client.get("/fixtures/pragma_cache_size.testall")
assert "pragma_cache_size" == json.loads(query_response.body)["query_name"] assert "pragma_cache_size" == json.loads(query_response.body)["query_name"]