mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
table_config instead of table_metadata (#2257)
Table configuration that was incorrectly placed in metadata is now treated as if it was in config. New await datasette.table_config() method. Closes #2247
This commit is contained in:
parent
52a1dac5d2
commit
60c6692f68
6 changed files with 175 additions and 30 deletions
|
|
@ -75,6 +75,7 @@ from .utils import (
|
||||||
format_bytes,
|
format_bytes,
|
||||||
module_from_path,
|
module_from_path,
|
||||||
move_plugins,
|
move_plugins,
|
||||||
|
move_table_config,
|
||||||
parse_metadata,
|
parse_metadata,
|
||||||
resolve_env_secrets,
|
resolve_env_secrets,
|
||||||
resolve_routes,
|
resolve_routes,
|
||||||
|
|
@ -346,7 +347,9 @@ class Datasette:
|
||||||
# Move any "plugins" settings from metadata to config - updates them in place
|
# Move any "plugins" settings from metadata to config - updates them in place
|
||||||
metadata = metadata or {}
|
metadata = metadata or {}
|
||||||
config = config or {}
|
config = config or {}
|
||||||
move_plugins(metadata, config)
|
metadata, config = move_plugins(metadata, config)
|
||||||
|
# Now migrate any known table configuration settings over as well
|
||||||
|
metadata, config = move_table_config(metadata, config)
|
||||||
|
|
||||||
self._metadata_local = metadata or {}
|
self._metadata_local = metadata or {}
|
||||||
self.sqlite_extensions = []
|
self.sqlite_extensions = []
|
||||||
|
|
@ -1202,10 +1205,11 @@ class Datasette:
|
||||||
def _actor(self, request):
|
def _actor(self, request):
|
||||||
return {"actor": request.actor}
|
return {"actor": request.actor}
|
||||||
|
|
||||||
async def table_config(self, database, table):
|
async def table_config(self, database: str, table: str) -> dict:
|
||||||
"""Fetch table-specific metadata."""
|
"""Return dictionary of configuration for specified table"""
|
||||||
return (
|
return (
|
||||||
(self.metadata("databases") or {})
|
(self.config or {})
|
||||||
|
.get("databases", {})
|
||||||
.get(database, {})
|
.get(database, {})
|
||||||
.get("tables", {})
|
.get("tables", {})
|
||||||
.get(table, {})
|
.get(table, {})
|
||||||
|
|
|
||||||
|
|
@ -487,13 +487,11 @@ class Database:
|
||||||
)
|
)
|
||||||
).rows
|
).rows
|
||||||
]
|
]
|
||||||
# Add any from metadata.json
|
# Add any tables marked as hidden in config
|
||||||
db_metadata = self.ds.metadata(database=self.name)
|
db_config = self.ds.config.get("databases", {}).get(self.name, {})
|
||||||
if "tables" in db_metadata:
|
if "tables" in db_config:
|
||||||
hidden_tables += [
|
hidden_tables += [
|
||||||
t
|
t for t in db_config["tables"] if db_config["tables"][t].get("hidden")
|
||||||
for t in db_metadata["tables"]
|
|
||||||
if db_metadata["tables"][t].get("hidden")
|
|
||||||
]
|
]
|
||||||
# Also mark as hidden any tables which start with the name of a hidden table
|
# Also mark as hidden any tables which start with the name of a hidden table
|
||||||
# e.g. "searchable_fts" implies "searchable_fts_content" should be hidden
|
# e.g. "searchable_fts" implies "searchable_fts_content" should be hidden
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@ import asyncio
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
import click
|
import click
|
||||||
from collections import OrderedDict, namedtuple, Counter
|
from collections import OrderedDict, namedtuple, Counter
|
||||||
|
import copy
|
||||||
import base64
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
import inspect
|
import inspect
|
||||||
|
|
@ -17,7 +18,7 @@ import time
|
||||||
import types
|
import types
|
||||||
import secrets
|
import secrets
|
||||||
import shutil
|
import shutil
|
||||||
from typing import Iterable
|
from typing import Iterable, Tuple
|
||||||
import urllib
|
import urllib
|
||||||
import yaml
|
import yaml
|
||||||
from .shutil_backport import copytree
|
from .shutil_backport import copytree
|
||||||
|
|
@ -1290,11 +1291,24 @@ def make_slot_function(name, datasette, request, **kwargs):
|
||||||
return inner
|
return inner
|
||||||
|
|
||||||
|
|
||||||
def move_plugins(source, destination):
|
def prune_empty_dicts(d: dict):
|
||||||
|
"""
|
||||||
|
Recursively prune all empty dictionaries from a given dictionary.
|
||||||
|
"""
|
||||||
|
for key, value in list(d.items()):
|
||||||
|
if isinstance(value, dict):
|
||||||
|
prune_empty_dicts(value)
|
||||||
|
if value == {}:
|
||||||
|
d.pop(key, None)
|
||||||
|
|
||||||
|
|
||||||
|
def move_plugins(source: dict, destination: dict) -> Tuple[dict, dict]:
|
||||||
"""
|
"""
|
||||||
Move 'plugins' keys from source to destination dictionary. Creates hierarchy in destination if needed.
|
Move 'plugins' keys from source to destination dictionary. Creates hierarchy in destination if needed.
|
||||||
After moving, recursively remove any keys in the source that are left empty.
|
After moving, recursively remove any keys in the source that are left empty.
|
||||||
"""
|
"""
|
||||||
|
source = copy.deepcopy(source)
|
||||||
|
destination = copy.deepcopy(destination)
|
||||||
|
|
||||||
def recursive_move(src, dest, path=None):
|
def recursive_move(src, dest, path=None):
|
||||||
if path is None:
|
if path is None:
|
||||||
|
|
@ -1316,18 +1330,49 @@ def move_plugins(source, destination):
|
||||||
if not value:
|
if not value:
|
||||||
src.pop(key, None)
|
src.pop(key, None)
|
||||||
|
|
||||||
def prune_empty_dicts(d):
|
|
||||||
"""
|
|
||||||
Recursively prune all empty dictionaries from a given dictionary.
|
|
||||||
"""
|
|
||||||
for key, value in list(d.items()):
|
|
||||||
if isinstance(value, dict):
|
|
||||||
prune_empty_dicts(value)
|
|
||||||
if value == {}:
|
|
||||||
d.pop(key, None)
|
|
||||||
|
|
||||||
recursive_move(source, destination)
|
recursive_move(source, destination)
|
||||||
prune_empty_dicts(source)
|
prune_empty_dicts(source)
|
||||||
|
return source, destination
|
||||||
|
|
||||||
|
|
||||||
|
_table_config_keys = (
|
||||||
|
"hidden",
|
||||||
|
"sort",
|
||||||
|
"sort_desc",
|
||||||
|
"size",
|
||||||
|
"sortable_columns",
|
||||||
|
"label_column",
|
||||||
|
"facets",
|
||||||
|
"fts_table",
|
||||||
|
"fts_pk",
|
||||||
|
"searchmode",
|
||||||
|
"units",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def move_table_config(metadata: dict, config: dict):
|
||||||
|
"""
|
||||||
|
Move all known table configuration keys from metadata to config.
|
||||||
|
"""
|
||||||
|
if "databases" not in metadata:
|
||||||
|
return metadata, config
|
||||||
|
metadata = copy.deepcopy(metadata)
|
||||||
|
config = copy.deepcopy(config)
|
||||||
|
for database_name, database in metadata["databases"].items():
|
||||||
|
if "tables" not in database:
|
||||||
|
continue
|
||||||
|
for table_name, table in database["tables"].items():
|
||||||
|
for key in _table_config_keys:
|
||||||
|
if key in table:
|
||||||
|
config.setdefault("databases", {}).setdefault(
|
||||||
|
database_name, {}
|
||||||
|
).setdefault("tables", {}).setdefault(table_name, {})[
|
||||||
|
key
|
||||||
|
] = table.pop(
|
||||||
|
key
|
||||||
|
)
|
||||||
|
prune_empty_dicts(metadata)
|
||||||
|
return metadata, config
|
||||||
|
|
||||||
|
|
||||||
def redact_keys(original: dict, key_patterns: Iterable) -> dict:
|
def redact_keys(original: dict, key_patterns: Iterable) -> dict:
|
||||||
|
|
|
||||||
|
|
@ -142,11 +142,11 @@ async def display_columns_and_rows(
|
||||||
"""Returns columns, rows for specified table - including fancy foreign key treatment"""
|
"""Returns columns, rows for specified table - including fancy foreign key treatment"""
|
||||||
sortable_columns = sortable_columns or set()
|
sortable_columns = sortable_columns or set()
|
||||||
db = datasette.databases[database_name]
|
db = datasette.databases[database_name]
|
||||||
table_metadata = await datasette.table_config(database_name, table_name)
|
column_descriptions = datasette.metadata("columns", database_name, table_name) or {}
|
||||||
column_descriptions = table_metadata.get("columns") or {}
|
|
||||||
column_details = {
|
column_details = {
|
||||||
col.name: col for col in await db.table_column_details(table_name)
|
col.name: col for col in await db.table_column_details(table_name)
|
||||||
}
|
}
|
||||||
|
table_config = await datasette.table_config(database_name, table_name)
|
||||||
pks = await db.primary_keys(table_name)
|
pks = await db.primary_keys(table_name)
|
||||||
pks_for_display = pks
|
pks_for_display = pks
|
||||||
if not pks_for_display:
|
if not pks_for_display:
|
||||||
|
|
@ -193,7 +193,6 @@ async def display_columns_and_rows(
|
||||||
"raw": pk_path,
|
"raw": pk_path,
|
||||||
"value": markupsafe.Markup(
|
"value": markupsafe.Markup(
|
||||||
'<a href="{table_path}/{flat_pks_quoted}">{flat_pks}</a>'.format(
|
'<a href="{table_path}/{flat_pks_quoted}">{flat_pks}</a>'.format(
|
||||||
base_url=base_url,
|
|
||||||
table_path=datasette.urls.table(database_name, table_name),
|
table_path=datasette.urls.table(database_name, table_name),
|
||||||
flat_pks=str(markupsafe.escape(pk_path)),
|
flat_pks=str(markupsafe.escape(pk_path)),
|
||||||
flat_pks_quoted=path_from_row_pks(row, pks, not pks),
|
flat_pks_quoted=path_from_row_pks(row, pks, not pks),
|
||||||
|
|
@ -274,9 +273,9 @@ async def display_columns_and_rows(
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
elif column in table_metadata.get("units", {}) and value != "":
|
elif column in table_config.get("units", {}) and value != "":
|
||||||
# Interpret units using pint
|
# Interpret units using pint
|
||||||
value = value * ureg(table_metadata["units"][column])
|
value = value * ureg(table_config["units"][column])
|
||||||
# Pint uses floating point which sometimes introduces errors in the compact
|
# Pint uses floating point which sometimes introduces errors in the compact
|
||||||
# representation, which we have to round off to avoid ugliness. In the vast
|
# representation, which we have to round off to avoid ugliness. In the vast
|
||||||
# majority of cases this rounding will be inconsequential. I hope.
|
# majority of cases this rounding will be inconsequential. I hope.
|
||||||
|
|
@ -591,7 +590,7 @@ class TableDropView(BaseView):
|
||||||
try:
|
try:
|
||||||
data = json.loads(await request.post_body())
|
data = json.loads(await request.post_body())
|
||||||
confirm = data.get("confirm")
|
confirm = data.get("confirm")
|
||||||
except json.JSONDecodeError as e:
|
except json.JSONDecodeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if not confirm:
|
if not confirm:
|
||||||
|
|
|
||||||
|
|
@ -771,7 +771,7 @@ def test_databases_json(app_client_two_attached_databases_one_immutable):
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_metadata_json(ds_client):
|
async def test_metadata_json(ds_client):
|
||||||
response = await ds_client.get("/-/metadata.json")
|
response = await ds_client.get("/-/metadata.json")
|
||||||
assert response.json() == METADATA
|
assert response.json() == ds_client.ds.metadata()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -1061,3 +1061,102 @@ async def test_config_json(config, expected):
|
||||||
ds = Datasette(config=config)
|
ds = Datasette(config=config)
|
||||||
response = await ds.client.get("/-/config.json")
|
response = await ds.client.get("/-/config.json")
|
||||||
assert response.json() == expected
|
assert response.json() == expected
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"metadata,expected_config,expected_metadata",
|
||||||
|
(
|
||||||
|
({}, {}, {}),
|
||||||
|
(
|
||||||
|
# Metadata input
|
||||||
|
{
|
||||||
|
"title": "Datasette Fixtures",
|
||||||
|
"databases": {
|
||||||
|
"fixtures": {
|
||||||
|
"tables": {
|
||||||
|
"sortable": {
|
||||||
|
"sortable_columns": [
|
||||||
|
"sortable",
|
||||||
|
"sortable_with_nulls",
|
||||||
|
"sortable_with_nulls_2",
|
||||||
|
"text",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"no_primary_key": {"sortable_columns": [], "hidden": True},
|
||||||
|
"units": {"units": {"distance": "m", "frequency": "Hz"}},
|
||||||
|
"primary_key_multiple_columns_explicit_label": {
|
||||||
|
"label_column": "content2"
|
||||||
|
},
|
||||||
|
"simple_view": {"sortable_columns": ["content"]},
|
||||||
|
"searchable_view_configured_by_metadata": {
|
||||||
|
"fts_table": "searchable_fts",
|
||||||
|
"fts_pk": "pk",
|
||||||
|
},
|
||||||
|
"roadside_attractions": {
|
||||||
|
"columns": {
|
||||||
|
"name": "The name of the attraction",
|
||||||
|
"address": "The street address for the attraction",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"attraction_characteristic": {"sort_desc": "pk"},
|
||||||
|
"facet_cities": {"sort": "name"},
|
||||||
|
"paginated_view": {"size": 25},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
# Should produce a config with just the table configuration keys
|
||||||
|
{
|
||||||
|
"databases": {
|
||||||
|
"fixtures": {
|
||||||
|
"tables": {
|
||||||
|
"sortable": {
|
||||||
|
"sortable_columns": [
|
||||||
|
"sortable",
|
||||||
|
"sortable_with_nulls",
|
||||||
|
"sortable_with_nulls_2",
|
||||||
|
"text",
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"units": {"units": {"distance": "m", "frequency": "Hz"}},
|
||||||
|
# These one get redacted:
|
||||||
|
"no_primary_key": "***",
|
||||||
|
"primary_key_multiple_columns_explicit_label": "***",
|
||||||
|
"simple_view": {"sortable_columns": ["content"]},
|
||||||
|
"searchable_view_configured_by_metadata": {
|
||||||
|
"fts_table": "searchable_fts",
|
||||||
|
"fts_pk": "pk",
|
||||||
|
},
|
||||||
|
"attraction_characteristic": {"sort_desc": "pk"},
|
||||||
|
"facet_cities": {"sort": "name"},
|
||||||
|
"paginated_view": {"size": 25},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
# And metadata with everything else
|
||||||
|
{
|
||||||
|
"title": "Datasette Fixtures",
|
||||||
|
"databases": {
|
||||||
|
"fixtures": {
|
||||||
|
"tables": {
|
||||||
|
"roadside_attractions": {
|
||||||
|
"columns": {
|
||||||
|
"name": "The name of the attraction",
|
||||||
|
"address": "The street address for the attraction",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
async def test_upgrade_metadata(metadata, expected_config, expected_metadata):
|
||||||
|
ds = Datasette(metadata=metadata)
|
||||||
|
response = await ds.client.get("/-/config.json")
|
||||||
|
assert response.json() == expected_config
|
||||||
|
response2 = await ds.client.get("/-/metadata.json")
|
||||||
|
assert response2.json() == expected_metadata
|
||||||
|
|
|
||||||
|
|
@ -753,7 +753,7 @@ async def test_metadata_json_html(ds_client):
|
||||||
response = await ds_client.get("/-/metadata")
|
response = await ds_client.get("/-/metadata")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
pre = Soup(response.content, "html.parser").find("pre")
|
pre = Soup(response.content, "html.parser").find("pre")
|
||||||
assert METADATA == json.loads(pre.text)
|
assert ds_client.ds.metadata() == json.loads(pre.text)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue