diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 9f53b01e..8ffdbfd5 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -2,10 +2,10 @@ name: Deploy latest.datasette.io on: workflow_dispatch: - push: - branches: - - main - # - 1.0-dev + # push: + # branches: + # - main + # - 1.0-dev permissions: contents: read @@ -15,12 +15,19 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out datasette - uses: actions/checkout@v5 + uses: actions/checkout@v3 - name: Set up Python uses: actions/setup-python@v6 + # Using Python 3.10 for gcloud compatibility: with: - python-version: "3.13" - cache: pip + python-version: "3.10" + - uses: actions/cache@v4 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml') }} + restore-keys: | + ${{ runner.os }}-pip- - name: Install Python dependencies run: | python -m pip install --upgrade pip @@ -97,7 +104,7 @@ jobs: # cat metadata.json - id: auth name: Authenticate to Google Cloud - uses: google-github-actions/auth@v3 + uses: google-github-actions/auth@v2 with: credentials_json: ${{ secrets.GCP_SA_KEY }} - name: Set up Cloud SDK diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1e5e03d2..5e294f93 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -27,6 +27,14 @@ jobs: run: | pip install -e '.[test]' pip freeze + - name: Cache Playwright browsers + uses: actions/cache@v4 + with: + path: ~/.cache/ms-playwright/ + key: ${{ runner.os }}-browsers + - name: Install Playwright dependencies + run: | + playwright install - name: Run tests run: | pytest -n auto -m "not serial" diff --git a/Justfile b/Justfile index a47662c3..abb134a6 100644 --- a/Justfile +++ b/Justfile @@ -29,7 +29,7 @@ export DATASETTE_SECRET := "not_a_secret" # Serve live docs on localhost:8000 @docs: cog blacken-docs - uv run --extra docs make -C docs livehtml + uv sync --extra docs && cd docs && uv run make livehtml # Build docs as static HTML @docs-build: cog blacken-docs diff --git a/datasette/app.py b/datasette/app.py index b9955925..60a20032 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -2,7 +2,6 @@ from __future__ import annotations from asgi_csrf import Errors import asyncio -import contextvars from typing import TYPE_CHECKING, Any, Dict, Iterable, List if TYPE_CHECKING: @@ -131,22 +130,6 @@ from .resources import DatabaseResource, TableResource app_root = Path(__file__).parent.parent -# Context variable to track when code is executing within a datasette.client request -_in_datasette_client = contextvars.ContextVar("in_datasette_client", default=False) - - -class _DatasetteClientContext: - """Context manager to mark code as executing within a datasette.client request.""" - - def __enter__(self): - self.token = _in_datasette_client.set(True) - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - _in_datasette_client.reset(self.token) - return False - - @dataclasses.dataclass class PermissionCheck: """Represents a logged permission check for debugging purposes.""" @@ -321,7 +304,6 @@ class Datasette: crossdb=False, nolock=False, internal=None, - default_deny=False, ): self._startup_invoked = False assert config_dir is None or isinstance( @@ -530,7 +512,6 @@ class Datasette: self._permission_checks = collections.deque(maxlen=200) self._root_token = secrets.token_hex(32) self.root_enabled = False - self.default_deny = default_deny self.client = DatasetteClient(self) async def apply_metadata_json(self): @@ -606,15 +587,6 @@ class Datasette: "select database_name, schema_version from catalog_databases" ) } - # Delete stale entries for databases that are no longer attached - stale_databases = set(current_schema_versions.keys()) - set( - self.databases.keys() - ) - for stale_db_name in stale_databases: - await internal_db.execute_write( - "DELETE FROM catalog_databases WHERE database_name = ?", - [stale_db_name], - ) for database_name, db in self.databases.items(): schema_version = (await db.execute("PRAGMA schema_version")).first()[0] # Compare schema versions to see if we should skip it @@ -640,17 +612,6 @@ class Datasette: def urls(self): return Urls(self) - @property - def pm(self): - """ - Return the global plugin manager instance. - - This provides access to the pluggy PluginManager that manages all - Datasette plugins and hooks. Use datasette.pm.hook.hook_name() to - call plugin hooks. - """ - return pm - async def invoke_startup(self): # This must be called for Datasette to be in a usable state if self._startup_invoked: @@ -703,14 +664,6 @@ class Datasette: def unsign(self, signed, namespace="default"): return URLSafeSerializer(self._secret, namespace).loads(signed) - def in_client(self) -> bool: - """Check if the current code is executing within a datasette.client request. - - Returns: - bool: True if currently executing within a datasette.client request, False otherwise. - """ - return _in_datasette_client.get() - def create_token( self, actor_id: str, @@ -2435,10 +2388,7 @@ class DatasetteClient: def __init__(self, ds): self.ds = ds - - @property - def app(self): - return self.ds.app() + self.app = ds.app() def actor_cookie(self, actor): # Utility method, mainly for tests @@ -2454,20 +2404,19 @@ class DatasetteClient: async def _request(self, method, path, skip_permission_checks=False, **kwargs): from datasette.permissions import SkipPermissions - with _DatasetteClientContext(): - if skip_permission_checks: - with SkipPermissions(): - async with httpx.AsyncClient( - transport=httpx.ASGITransport(app=self.app), - cookies=kwargs.pop("cookies", None), - ) as client: - return await getattr(client, method)(self._fix(path), **kwargs) - else: + if skip_permission_checks: + with SkipPermissions(): async with httpx.AsyncClient( transport=httpx.ASGITransport(app=self.app), cookies=kwargs.pop("cookies", None), ) as client: return await getattr(client, method)(self._fix(path), **kwargs) + else: + async with httpx.AsyncClient( + transport=httpx.ASGITransport(app=self.app), + cookies=kwargs.pop("cookies", None), + ) as client: + return await getattr(client, method)(self._fix(path), **kwargs) async def get(self, path, skip_permission_checks=False, **kwargs): return await self._request( @@ -2519,17 +2468,8 @@ class DatasetteClient: from datasette.permissions import SkipPermissions avoid_path_rewrites = kwargs.pop("avoid_path_rewrites", None) - with _DatasetteClientContext(): - if skip_permission_checks: - with SkipPermissions(): - async with httpx.AsyncClient( - transport=httpx.ASGITransport(app=self.app), - cookies=kwargs.pop("cookies", None), - ) as client: - return await client.request( - method, self._fix(path, avoid_path_rewrites), **kwargs - ) - else: + if skip_permission_checks: + with SkipPermissions(): async with httpx.AsyncClient( transport=httpx.ASGITransport(app=self.app), cookies=kwargs.pop("cookies", None), @@ -2537,3 +2477,11 @@ class DatasetteClient: return await client.request( method, self._fix(path, avoid_path_rewrites), **kwargs ) + else: + async with httpx.AsyncClient( + transport=httpx.ASGITransport(app=self.app), + cookies=kwargs.pop("cookies", None), + ) as client: + return await client.request( + method, self._fix(path, avoid_path_rewrites), **kwargs + ) diff --git a/datasette/cli.py b/datasette/cli.py index 21420491..aaf1b244 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -438,11 +438,6 @@ def uninstall(packages, yes): help="Output URL that sets a cookie authenticating the root user", is_flag=True, ) -@click.option( - "--default-deny", - help="Deny all permissions by default", - is_flag=True, -) @click.option( "--get", help="Run an HTTP GET request against this path, print results and exit", @@ -519,7 +514,6 @@ def serve( settings, secret, root, - default_deny, get, headers, token, @@ -600,7 +594,6 @@ def serve( crossdb=crossdb, nolock=nolock, internal=internal, - default_deny=default_deny, ) # Separate directories from files diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py new file mode 100644 index 00000000..5642cdfe --- /dev/null +++ b/datasette/default_permissions.py @@ -0,0 +1,490 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from datasette.app import Datasette + +from datasette import hookimpl +from datasette.permissions import PermissionSQL +from datasette.utils import actor_matches_allow +import itsdangerous +import time + + +@hookimpl(specname="permission_resources_sql") +async def actor_restrictions_sql(datasette, actor, action): + """Handle actor restriction-based permission rules (_r key).""" + if not actor: + return None + + restrictions = actor.get("_r") if isinstance(actor, dict) else None + if restrictions is None: + return [] + + # Check if this action appears in restrictions (with abbreviations) + action_obj = datasette.actions.get(action) + action_checks = {action} + if action_obj and action_obj.abbr: + action_checks.add(action_obj.abbr) + + # Check if globally allowed in restrictions + global_actions = restrictions.get("a", []) + is_globally_allowed = action_checks.intersection(global_actions) + + if is_globally_allowed: + # Globally allowed - no restriction filtering needed + return [] + + # Not globally allowed - build restriction_sql that lists allowlisted resources + restriction_selects = [] + restriction_params = {} + param_counter = 0 + + # Add database-level allowlisted resources + db_restrictions = restrictions.get("d", {}) + for db_name, db_actions in db_restrictions.items(): + if action_checks.intersection(db_actions): + prefix = f"restr_{param_counter}" + param_counter += 1 + restriction_selects.append( + f"SELECT :{prefix}_parent AS parent, NULL AS child" + ) + restriction_params[f"{prefix}_parent"] = db_name + + # Add table-level allowlisted resources + resource_restrictions = restrictions.get("r", {}) + for db_name, tables in resource_restrictions.items(): + for table_name, table_actions in tables.items(): + if action_checks.intersection(table_actions): + prefix = f"restr_{param_counter}" + param_counter += 1 + restriction_selects.append( + f"SELECT :{prefix}_parent AS parent, :{prefix}_child AS child" + ) + restriction_params[f"{prefix}_parent"] = db_name + restriction_params[f"{prefix}_child"] = table_name + + if not restriction_selects: + # Action not in allowlist - return empty restriction (INTERSECT will return no results) + return [ + PermissionSQL( + params={"deny": f"actor restrictions: {action} not in allowlist"}, + restriction_sql="SELECT NULL AS parent, NULL AS child WHERE 0", # Empty set + ) + ] + + # Build restriction SQL that returns allowed (parent, child) pairs + restriction_sql = "\nUNION ALL\n".join(restriction_selects) + + # Return restriction-only PermissionSQL (sql=None means no permission rules) + # The restriction_sql does the actual filtering via INTERSECT + return [ + PermissionSQL( + params=restriction_params, + restriction_sql=restriction_sql, + ) + ] + + +@hookimpl(specname="permission_resources_sql") +async def root_user_permissions_sql(datasette, actor, action): + """Grant root user full permissions when enabled.""" + if datasette.root_enabled and actor and actor.get("id") == "root": + # Add a single global-level allow rule (NULL, NULL) for root + # This allows root to access everything by default, but database-level + # and table-level deny rules in config can still block specific resources + return PermissionSQL.allow(reason="root user") + return None + + +@hookimpl(specname="permission_resources_sql") +async def config_permissions_sql(datasette, actor, action): + """Apply config-based permission rules from datasette.yaml.""" + config = datasette.config or {} + + def evaluate(allow_block): + if allow_block is None: + return None + return actor_matches_allow(actor, allow_block) + + has_restrictions = actor and "_r" in actor if actor else False + restrictions = actor.get("_r", {}) if actor else {} + + action_obj = datasette.actions.get(action) + action_checks = {action} + if action_obj and action_obj.abbr: + action_checks.add(action_obj.abbr) + + restricted_databases: set[str] = set() + restricted_tables: set[tuple[str, str]] = set() + if has_restrictions: + restricted_databases = { + db_name + for db_name, db_actions in (restrictions.get("d") or {}).items() + if action_checks.intersection(db_actions) + } + restricted_tables = { + (db_name, table_name) + for db_name, tables in (restrictions.get("r") or {}).items() + for table_name, table_actions in tables.items() + if action_checks.intersection(table_actions) + } + # Tables implicitly reference their parent databases + restricted_databases.update(db for db, _ in restricted_tables) + + def is_in_restriction_allowlist(parent, child, action_name): + """Check if a resource is in the actor's restriction allowlist for this action""" + if not has_restrictions: + return True # No restrictions, all resources allowed + + # Check global allowlist + if action_checks.intersection(restrictions.get("a", [])): + return True + + # Check database-level allowlist + if parent and action_checks.intersection( + restrictions.get("d", {}).get(parent, []) + ): + return True + + # Check table-level allowlist + if parent: + table_restrictions = (restrictions.get("r", {}) or {}).get(parent, {}) + if child: + table_actions = table_restrictions.get(child, []) + if action_checks.intersection(table_actions): + return True + else: + # Parent query should proceed if any child in this database is allowlisted + for table_actions in table_restrictions.values(): + if action_checks.intersection(table_actions): + return True + + # Parent/child both None: include if any restrictions exist for this action + if parent is None and child is None: + if action_checks.intersection(restrictions.get("a", [])): + return True + if restricted_databases: + return True + if restricted_tables: + return True + + return False + + rows = [] + + def add_row(parent, child, result, scope): + if result is None: + return + rows.append( + ( + parent, + child, + bool(result), + f"config {'allow' if result else 'deny'} {scope}", + ) + ) + + def add_row_allow_block(parent, child, allow_block, scope): + """For 'allow' blocks, always add a row if the block exists - deny if no match""" + if allow_block is None: + return + + # If actor has restrictions and this resource is NOT in allowlist, skip this config rule + # Restrictions act as a gating filter - config cannot grant access to restricted-out resources + if not is_in_restriction_allowlist(parent, child, action): + return + + result = evaluate(allow_block) + bool_result = bool(result) + # If result is None (no match) or False, treat as deny + rows.append( + ( + parent, + child, + bool_result, # None becomes False, False stays False, True stays True + f"config {'allow' if result else 'deny'} {scope}", + ) + ) + if has_restrictions and not bool_result and child is None: + reason = f"config deny {scope} (restriction gate)" + if parent is None: + # Root-level deny: add more specific denies for restricted resources + if action_obj and action_obj.takes_parent: + for db_name in restricted_databases: + rows.append((db_name, None, 0, reason)) + if action_obj and action_obj.takes_child: + for db_name, table_name in restricted_tables: + rows.append((db_name, table_name, 0, reason)) + else: + # Database-level deny: add child-level denies for restricted tables + if action_obj and action_obj.takes_child: + for db_name, table_name in restricted_tables: + if db_name == parent: + rows.append((db_name, table_name, 0, reason)) + + root_perm = (config.get("permissions") or {}).get(action) + add_row(None, None, evaluate(root_perm), f"permissions for {action}") + + for db_name, db_config in (config.get("databases") or {}).items(): + db_perm = (db_config.get("permissions") or {}).get(action) + add_row( + db_name, None, evaluate(db_perm), f"permissions for {action} on {db_name}" + ) + + for table_name, table_config in (db_config.get("tables") or {}).items(): + table_perm = (table_config.get("permissions") or {}).get(action) + add_row( + db_name, + table_name, + evaluate(table_perm), + f"permissions for {action} on {db_name}/{table_name}", + ) + + if action == "view-table": + table_allow = (table_config or {}).get("allow") + add_row_allow_block( + db_name, + table_name, + table_allow, + f"allow for {action} on {db_name}/{table_name}", + ) + + for query_name, query_config in (db_config.get("queries") or {}).items(): + # query_config can be a string (just SQL) or a dict (with SQL and options) + if isinstance(query_config, dict): + query_perm = (query_config.get("permissions") or {}).get(action) + add_row( + db_name, + query_name, + evaluate(query_perm), + f"permissions for {action} on {db_name}/{query_name}", + ) + if action == "view-query": + query_allow = query_config.get("allow") + add_row_allow_block( + db_name, + query_name, + query_allow, + f"allow for {action} on {db_name}/{query_name}", + ) + + if action == "view-database": + db_allow = db_config.get("allow") + add_row_allow_block( + db_name, None, db_allow, f"allow for {action} on {db_name}" + ) + + if action == "execute-sql": + db_allow_sql = db_config.get("allow_sql") + add_row_allow_block(db_name, None, db_allow_sql, f"allow_sql for {db_name}") + + if action == "view-table": + # Database-level allow block affects all tables in that database + db_allow = db_config.get("allow") + add_row_allow_block( + db_name, None, db_allow, f"allow for {action} on {db_name}" + ) + + if action == "view-query": + # Database-level allow block affects all queries in that database + db_allow = db_config.get("allow") + add_row_allow_block( + db_name, None, db_allow, f"allow for {action} on {db_name}" + ) + + # Root-level allow block applies to all view-* actions + if action == "view-instance": + allow_block = config.get("allow") + add_row_allow_block(None, None, allow_block, "allow for view-instance") + + if action == "view-database": + # Root-level allow block also applies to view-database + allow_block = config.get("allow") + add_row_allow_block(None, None, allow_block, "allow for view-database") + + if action == "view-table": + # Root-level allow block also applies to view-table + allow_block = config.get("allow") + add_row_allow_block(None, None, allow_block, "allow for view-table") + + if action == "view-query": + # Root-level allow block also applies to view-query + allow_block = config.get("allow") + add_row_allow_block(None, None, allow_block, "allow for view-query") + + if action == "execute-sql": + allow_sql = config.get("allow_sql") + add_row_allow_block(None, None, allow_sql, "allow_sql") + + if not rows: + return [] + + parts = [] + params = {} + for idx, (parent, child, allow, reason) in enumerate(rows): + key = f"cfg_{idx}" + parts.append( + f"SELECT :{key}_parent AS parent, :{key}_child AS child, :{key}_allow AS allow, :{key}_reason AS reason" + ) + params[f"{key}_parent"] = parent + params[f"{key}_child"] = child + params[f"{key}_allow"] = 1 if allow else 0 + params[f"{key}_reason"] = reason + + sql = "\nUNION ALL\n".join(parts) + return [PermissionSQL(sql=sql, params=params)] + + +@hookimpl(specname="permission_resources_sql") +async def default_allow_sql_check(datasette, actor, action): + """Enforce default_allow_sql setting for execute-sql action.""" + if action == "execute-sql" and not datasette.setting("default_allow_sql"): + return PermissionSQL.deny(reason="default_allow_sql is false") + return None + + +@hookimpl(specname="permission_resources_sql") +async def default_action_permissions_sql(datasette, actor, action): + """Apply default allow rules for standard view/execute actions. + + With the INTERSECT-based restriction approach, these defaults are always generated + and then filtered by restriction_sql if the actor has restrictions. + """ + default_allow_actions = { + "view-instance", + "view-database", + "view-database-download", + "view-table", + "view-query", + "execute-sql", + } + if action in default_allow_actions: + reason = f"default allow for {action}".replace("'", "''") + return PermissionSQL.allow(reason=reason) + + return None + + +def restrictions_allow_action( + datasette: "Datasette", + restrictions: dict, + action: str, + resource: str | tuple[str, str], +): + """ + Check if actor restrictions allow the requested action against the requested resource. + + Restrictions work on an exact-match basis: if an actor has view-table permission, + they can view tables, but NOT automatically view-instance or view-database. + Each permission is checked independently without implication logic. + """ + # Does this action have an abbreviation? + to_check = {action} + action_obj = datasette.actions.get(action) + if action_obj and action_obj.abbr: + to_check.add(action_obj.abbr) + + # Check if restrictions explicitly allow this action + # Restrictions can be at three levels: + # - "a": global (any resource) + # - "d": per-database + # - "r": per-table/resource + + # Check global level (any resource) + all_allowed = restrictions.get("a") + if all_allowed is not None: + assert isinstance(all_allowed, list) + if to_check.intersection(all_allowed): + return True + + # Check database level + if resource: + if isinstance(resource, str): + database_name = resource + else: + database_name = resource[0] + database_allowed = restrictions.get("d", {}).get(database_name) + if database_allowed is not None: + assert isinstance(database_allowed, list) + if to_check.intersection(database_allowed): + return True + + # Check table/resource level + if resource is not None and not isinstance(resource, str) and len(resource) == 2: + database, table = resource + table_allowed = restrictions.get("r", {}).get(database, {}).get(table) + if table_allowed is not None: + assert isinstance(table_allowed, list) + if to_check.intersection(table_allowed): + return True + + # This action is not explicitly allowed, so reject it + return False + + +@hookimpl +def actor_from_request(datasette, request): + prefix = "dstok_" + if not datasette.setting("allow_signed_tokens"): + return None + max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl") + authorization = request.headers.get("authorization") + if not authorization: + return None + if not authorization.startswith("Bearer "): + return None + token = authorization[len("Bearer ") :] + if not token.startswith(prefix): + return None + token = token[len(prefix) :] + try: + decoded = datasette.unsign(token, namespace="token") + except itsdangerous.BadSignature: + return None + if "t" not in decoded: + # Missing timestamp + return None + created = decoded["t"] + if not isinstance(created, int): + # Invalid timestamp + return None + duration = decoded.get("d") + if duration is not None and not isinstance(duration, int): + # Invalid duration + return None + if (duration is None and max_signed_tokens_ttl) or ( + duration is not None + and max_signed_tokens_ttl + and duration > max_signed_tokens_ttl + ): + duration = max_signed_tokens_ttl + if duration: + if time.time() - created > duration: + # Expired + return None + actor = {"id": decoded["a"], "token": "dstok"} + if "_r" in decoded: + actor["_r"] = decoded["_r"] + if duration: + actor["token_expires"] = created + duration + return actor + + +@hookimpl +def skip_csrf(scope): + # Skip CSRF check for requests with content-type: application/json + if scope["type"] == "http": + headers = scope.get("headers") or {} + if dict(headers).get(b"content-type") == b"application/json": + return True + + +@hookimpl +def canned_queries(datasette, database, actor): + """Return canned queries from datasette configuration.""" + queries = ( + ((datasette.config or {}).get("databases") or {}).get(database) or {} + ).get("queries") or {} + return queries diff --git a/datasette/default_permissions/__init__.py b/datasette/default_permissions/__init__.py deleted file mode 100644 index 4c82d705..00000000 --- a/datasette/default_permissions/__init__.py +++ /dev/null @@ -1,59 +0,0 @@ -""" -Default permission implementations for Datasette. - -This module provides the built-in permission checking logic through implementations -of the permission_resources_sql hook. The hooks are organized by their purpose: - -1. Actor Restrictions - Enforces _r allowlists embedded in actor tokens -2. Root User - Grants full access when --root flag is used -3. Config Rules - Applies permissions from datasette.yaml -4. Default Settings - Enforces default_allow_sql and default view permissions - -IMPORTANT: These hooks return PermissionSQL objects that are combined using SQL -UNION/INTERSECT operations. The order of evaluation is: - - restriction_sql fields are INTERSECTed (all must match) - - Regular sql fields are UNIONed and evaluated with cascading priority -""" - -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -if TYPE_CHECKING: - from datasette.app import Datasette - -from datasette import hookimpl - -# Re-export all hooks and public utilities -from .restrictions import ( - actor_restrictions_sql, - restrictions_allow_action, - ActorRestrictions, -) -from .root import root_user_permissions_sql -from .config import config_permissions_sql -from .defaults import ( - default_allow_sql_check, - default_action_permissions_sql, - DEFAULT_ALLOW_ACTIONS, -) -from .tokens import actor_from_signed_api_token - - -@hookimpl -def skip_csrf(scope) -> Optional[bool]: - """Skip CSRF check for JSON content-type requests.""" - if scope["type"] == "http": - headers = scope.get("headers") or {} - if dict(headers).get(b"content-type") == b"application/json": - return True - return None - - -@hookimpl -def canned_queries(datasette: "Datasette", database: str, actor) -> dict: - """Return canned queries defined in datasette.yaml configuration.""" - queries = ( - ((datasette.config or {}).get("databases") or {}).get(database) or {} - ).get("queries") or {} - return queries diff --git a/datasette/default_permissions/config.py b/datasette/default_permissions/config.py deleted file mode 100644 index aab87c1c..00000000 --- a/datasette/default_permissions/config.py +++ /dev/null @@ -1,442 +0,0 @@ -""" -Config-based permission handling for Datasette. - -Applies permission rules from datasette.yaml configuration. -""" - -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, List, Optional, Set, Tuple - -if TYPE_CHECKING: - from datasette.app import Datasette - -from datasette import hookimpl -from datasette.permissions import PermissionSQL -from datasette.utils import actor_matches_allow - -from .helpers import PermissionRowCollector, get_action_name_variants - - -class ConfigPermissionProcessor: - """ - Processes permission rules from datasette.yaml configuration. - - Configuration structure: - - permissions: # Root-level permissions block - view-instance: - id: admin - - databases: - mydb: - permissions: # Database-level permissions - view-database: - id: admin - allow: # Database-level allow block (for view-*) - id: viewer - allow_sql: # execute-sql allow block - id: analyst - tables: - users: - permissions: # Table-level permissions - view-table: - id: admin - allow: # Table-level allow block - id: viewer - queries: - my_query: - permissions: # Query-level permissions - view-query: - id: admin - allow: # Query-level allow block - id: viewer - """ - - def __init__( - self, - datasette: "Datasette", - actor: Optional[dict], - action: str, - ): - self.datasette = datasette - self.actor = actor - self.action = action - self.config = datasette.config or {} - self.collector = PermissionRowCollector(prefix="cfg") - - # Pre-compute action variants - self.action_checks = get_action_name_variants(datasette, action) - self.action_obj = datasette.actions.get(action) - - # Parse restrictions if present - self.has_restrictions = actor and "_r" in actor if actor else False - self.restrictions = actor.get("_r", {}) if actor else {} - - # Pre-compute restriction info for efficiency - self.restricted_databases: Set[str] = set() - self.restricted_tables: Set[Tuple[str, str]] = set() - - if self.has_restrictions: - self.restricted_databases = { - db_name - for db_name, db_actions in (self.restrictions.get("d") or {}).items() - if self.action_checks.intersection(db_actions) - } - self.restricted_tables = { - (db_name, table_name) - for db_name, tables in (self.restrictions.get("r") or {}).items() - for table_name, table_actions in tables.items() - if self.action_checks.intersection(table_actions) - } - # Tables implicitly reference their parent databases - self.restricted_databases.update(db for db, _ in self.restricted_tables) - - def evaluate_allow_block(self, allow_block: Any) -> Optional[bool]: - """Evaluate an allow block against the current actor.""" - if allow_block is None: - return None - return actor_matches_allow(self.actor, allow_block) - - def is_in_restriction_allowlist( - self, - parent: Optional[str], - child: Optional[str], - ) -> bool: - """Check if resource is allowed by actor restrictions.""" - if not self.has_restrictions: - return True # No restrictions, all resources allowed - - # Check global allowlist - if self.action_checks.intersection(self.restrictions.get("a", [])): - return True - - # Check database-level allowlist - if parent and self.action_checks.intersection( - self.restrictions.get("d", {}).get(parent, []) - ): - return True - - # Check table-level allowlist - if parent: - table_restrictions = (self.restrictions.get("r", {}) or {}).get(parent, {}) - if child: - table_actions = table_restrictions.get(child, []) - if self.action_checks.intersection(table_actions): - return True - else: - # Parent query should proceed if any child in this database is allowlisted - for table_actions in table_restrictions.values(): - if self.action_checks.intersection(table_actions): - return True - - # Parent/child both None: include if any restrictions exist for this action - if parent is None and child is None: - if self.action_checks.intersection(self.restrictions.get("a", [])): - return True - if self.restricted_databases: - return True - if self.restricted_tables: - return True - - return False - - def add_permissions_rule( - self, - parent: Optional[str], - child: Optional[str], - permissions_block: Optional[dict], - scope_desc: str, - ) -> None: - """Add a rule from a permissions:{action} block.""" - if permissions_block is None: - return - - action_allow_block = permissions_block.get(self.action) - result = self.evaluate_allow_block(action_allow_block) - - self.collector.add( - parent=parent, - child=child, - allow=result, - reason=f"config {'allow' if result else 'deny'} {scope_desc}", - if_not_none=True, - ) - - def add_allow_block_rule( - self, - parent: Optional[str], - child: Optional[str], - allow_block: Any, - scope_desc: str, - ) -> None: - """ - Add rules from an allow:{} block. - - For allow blocks, if the block exists but doesn't match the actor, - this is treated as a deny. We also handle the restriction-gate logic. - """ - if allow_block is None: - return - - # Skip if resource is not in restriction allowlist - if not self.is_in_restriction_allowlist(parent, child): - return - - result = self.evaluate_allow_block(allow_block) - bool_result = bool(result) - - self.collector.add( - parent, - child, - bool_result, - f"config {'allow' if result else 'deny'} {scope_desc}", - ) - - # Handle restriction-gate: add explicit denies for restricted resources - self._add_restriction_gate_denies(parent, child, bool_result, scope_desc) - - def _add_restriction_gate_denies( - self, - parent: Optional[str], - child: Optional[str], - is_allowed: bool, - scope_desc: str, - ) -> None: - """ - When a config rule denies at a higher level, add explicit denies - for restricted resources to prevent child-level allows from - incorrectly granting access. - """ - if is_allowed or child is not None or not self.has_restrictions: - return - - if not self.action_obj: - return - - reason = f"config deny {scope_desc} (restriction gate)" - - if parent is None: - # Root-level deny: add denies for all restricted resources - if self.action_obj.takes_parent: - for db_name in self.restricted_databases: - self.collector.add(db_name, None, False, reason) - if self.action_obj.takes_child: - for db_name, table_name in self.restricted_tables: - self.collector.add(db_name, table_name, False, reason) - else: - # Database-level deny: add denies for tables in that database - if self.action_obj.takes_child: - for db_name, table_name in self.restricted_tables: - if db_name == parent: - self.collector.add(db_name, table_name, False, reason) - - def process(self) -> Optional[PermissionSQL]: - """Process all config rules and return combined PermissionSQL.""" - self._process_root_permissions() - self._process_databases() - self._process_root_allow_blocks() - - return self.collector.to_permission_sql() - - def _process_root_permissions(self) -> None: - """Process root-level permissions block.""" - root_perms = self.config.get("permissions") or {} - self.add_permissions_rule( - None, - None, - root_perms, - f"permissions for {self.action}", - ) - - def _process_databases(self) -> None: - """Process database-level and nested configurations.""" - databases = self.config.get("databases") or {} - - for db_name, db_config in databases.items(): - self._process_database(db_name, db_config or {}) - - def _process_database(self, db_name: str, db_config: dict) -> None: - """Process a single database's configuration.""" - # Database-level permissions block - db_perms = db_config.get("permissions") or {} - self.add_permissions_rule( - db_name, - None, - db_perms, - f"permissions for {self.action} on {db_name}", - ) - - # Process tables - for table_name, table_config in (db_config.get("tables") or {}).items(): - self._process_table(db_name, table_name, table_config or {}) - - # Process queries - for query_name, query_config in (db_config.get("queries") or {}).items(): - self._process_query(db_name, query_name, query_config) - - # Database-level allow blocks - self._process_database_allow_blocks(db_name, db_config) - - def _process_table( - self, - db_name: str, - table_name: str, - table_config: dict, - ) -> None: - """Process a single table's configuration.""" - # Table-level permissions block - table_perms = table_config.get("permissions") or {} - self.add_permissions_rule( - db_name, - table_name, - table_perms, - f"permissions for {self.action} on {db_name}/{table_name}", - ) - - # Table-level allow block (for view-table) - if self.action == "view-table": - self.add_allow_block_rule( - db_name, - table_name, - table_config.get("allow"), - f"allow for {self.action} on {db_name}/{table_name}", - ) - - def _process_query( - self, - db_name: str, - query_name: str, - query_config: Any, - ) -> None: - """Process a single query's configuration.""" - # Query config can be a string (just SQL) or dict - if not isinstance(query_config, dict): - return - - # Query-level permissions block - query_perms = query_config.get("permissions") or {} - self.add_permissions_rule( - db_name, - query_name, - query_perms, - f"permissions for {self.action} on {db_name}/{query_name}", - ) - - # Query-level allow block (for view-query) - if self.action == "view-query": - self.add_allow_block_rule( - db_name, - query_name, - query_config.get("allow"), - f"allow for {self.action} on {db_name}/{query_name}", - ) - - def _process_database_allow_blocks( - self, - db_name: str, - db_config: dict, - ) -> None: - """Process database-level allow/allow_sql blocks.""" - # view-database allow block - if self.action == "view-database": - self.add_allow_block_rule( - db_name, - None, - db_config.get("allow"), - f"allow for {self.action} on {db_name}", - ) - - # execute-sql allow_sql block - if self.action == "execute-sql": - self.add_allow_block_rule( - db_name, - None, - db_config.get("allow_sql"), - f"allow_sql for {db_name}", - ) - - # view-table uses database-level allow for inheritance - if self.action == "view-table": - self.add_allow_block_rule( - db_name, - None, - db_config.get("allow"), - f"allow for {self.action} on {db_name}", - ) - - # view-query uses database-level allow for inheritance - if self.action == "view-query": - self.add_allow_block_rule( - db_name, - None, - db_config.get("allow"), - f"allow for {self.action} on {db_name}", - ) - - def _process_root_allow_blocks(self) -> None: - """Process root-level allow/allow_sql blocks.""" - root_allow = self.config.get("allow") - - if self.action == "view-instance": - self.add_allow_block_rule( - None, - None, - root_allow, - "allow for view-instance", - ) - - if self.action == "view-database": - self.add_allow_block_rule( - None, - None, - root_allow, - "allow for view-database", - ) - - if self.action == "view-table": - self.add_allow_block_rule( - None, - None, - root_allow, - "allow for view-table", - ) - - if self.action == "view-query": - self.add_allow_block_rule( - None, - None, - root_allow, - "allow for view-query", - ) - - if self.action == "execute-sql": - self.add_allow_block_rule( - None, - None, - self.config.get("allow_sql"), - "allow_sql", - ) - - -@hookimpl(specname="permission_resources_sql") -async def config_permissions_sql( - datasette: "Datasette", - actor: Optional[dict], - action: str, -) -> Optional[List[PermissionSQL]]: - """ - Apply permission rules from datasette.yaml configuration. - - This processes: - - permissions: blocks at root, database, table, and query levels - - allow: blocks for view-* actions - - allow_sql: blocks for execute-sql action - """ - processor = ConfigPermissionProcessor(datasette, actor, action) - result = processor.process() - - if result is None: - return [] - - return [result] diff --git a/datasette/default_permissions/defaults.py b/datasette/default_permissions/defaults.py deleted file mode 100644 index f5a6a270..00000000 --- a/datasette/default_permissions/defaults.py +++ /dev/null @@ -1,70 +0,0 @@ -""" -Default permission settings for Datasette. - -Provides default allow rules for standard view/execute actions. -""" - -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -if TYPE_CHECKING: - from datasette.app import Datasette - -from datasette import hookimpl -from datasette.permissions import PermissionSQL - - -# Actions that are allowed by default (unless --default-deny is used) -DEFAULT_ALLOW_ACTIONS = frozenset( - { - "view-instance", - "view-database", - "view-database-download", - "view-table", - "view-query", - "execute-sql", - } -) - - -@hookimpl(specname="permission_resources_sql") -async def default_allow_sql_check( - datasette: "Datasette", - actor: Optional[dict], - action: str, -) -> Optional[PermissionSQL]: - """ - Enforce the default_allow_sql setting. - - When default_allow_sql is false (the default), execute-sql is denied - unless explicitly allowed by config or other rules. - """ - if action == "execute-sql": - if not datasette.setting("default_allow_sql"): - return PermissionSQL.deny(reason="default_allow_sql is false") - - return None - - -@hookimpl(specname="permission_resources_sql") -async def default_action_permissions_sql( - datasette: "Datasette", - actor: Optional[dict], - action: str, -) -> Optional[PermissionSQL]: - """ - Provide default allow rules for standard view/execute actions. - - These defaults are skipped when datasette is started with --default-deny. - The restriction_sql mechanism (from actor_restrictions_sql) will still - filter these results if the actor has restrictions. - """ - if datasette.default_deny: - return None - - if action in DEFAULT_ALLOW_ACTIONS: - reason = f"default allow for {action}".replace("'", "''") - return PermissionSQL.allow(reason=reason) - - return None diff --git a/datasette/default_permissions/helpers.py b/datasette/default_permissions/helpers.py deleted file mode 100644 index 47e03569..00000000 --- a/datasette/default_permissions/helpers.py +++ /dev/null @@ -1,85 +0,0 @@ -""" -Shared helper utilities for default permission implementations. -""" - -from __future__ import annotations - -from dataclasses import dataclass -from typing import TYPE_CHECKING, List, Optional, Set - -if TYPE_CHECKING: - from datasette.app import Datasette - -from datasette.permissions import PermissionSQL - - -def get_action_name_variants(datasette: "Datasette", action: str) -> Set[str]: - """ - Get all name variants for an action (full name and abbreviation). - - Example: - get_action_name_variants(ds, "view-table") -> {"view-table", "vt"} - """ - variants = {action} - action_obj = datasette.actions.get(action) - if action_obj and action_obj.abbr: - variants.add(action_obj.abbr) - return variants - - -def action_in_list(datasette: "Datasette", action: str, action_list: list) -> bool: - """Check if an action (or its abbreviation) is in a list.""" - return bool(get_action_name_variants(datasette, action).intersection(action_list)) - - -@dataclass -class PermissionRow: - """A single permission rule row.""" - - parent: Optional[str] - child: Optional[str] - allow: bool - reason: str - - -class PermissionRowCollector: - """Collects permission rows and converts them to PermissionSQL.""" - - def __init__(self, prefix: str = "row"): - self.rows: List[PermissionRow] = [] - self.prefix = prefix - - def add( - self, - parent: Optional[str], - child: Optional[str], - allow: Optional[bool], - reason: str, - if_not_none: bool = False, - ) -> None: - """Add a permission row. If if_not_none=True, only add if allow is not None.""" - if if_not_none and allow is None: - return - self.rows.append(PermissionRow(parent, child, allow, reason)) - - def to_permission_sql(self) -> Optional[PermissionSQL]: - """Convert collected rows to a PermissionSQL object.""" - if not self.rows: - return None - - parts = [] - params = {} - - for idx, row in enumerate(self.rows): - key = f"{self.prefix}_{idx}" - parts.append( - f"SELECT :{key}_parent AS parent, :{key}_child AS child, " - f":{key}_allow AS allow, :{key}_reason AS reason" - ) - params[f"{key}_parent"] = row.parent - params[f"{key}_child"] = row.child - params[f"{key}_allow"] = 1 if row.allow else 0 - params[f"{key}_reason"] = row.reason - - sql = "\nUNION ALL\n".join(parts) - return PermissionSQL(sql=sql, params=params) diff --git a/datasette/default_permissions/restrictions.py b/datasette/default_permissions/restrictions.py deleted file mode 100644 index a22cd7e5..00000000 --- a/datasette/default_permissions/restrictions.py +++ /dev/null @@ -1,195 +0,0 @@ -""" -Actor restriction handling for Datasette permissions. - -This module handles the _r (restrictions) key in actor dictionaries, which -contains allowlists of resources the actor can access. -""" - -from __future__ import annotations - -from dataclasses import dataclass -from typing import TYPE_CHECKING, List, Optional, Set, Tuple - -if TYPE_CHECKING: - from datasette.app import Datasette - -from datasette import hookimpl -from datasette.permissions import PermissionSQL - -from .helpers import action_in_list, get_action_name_variants - - -@dataclass -class ActorRestrictions: - """Parsed actor restrictions from the _r key.""" - - global_actions: List[str] # _r.a - globally allowed actions - database_actions: dict # _r.d - {db_name: [actions]} - table_actions: dict # _r.r - {db_name: {table: [actions]}} - - @classmethod - def from_actor(cls, actor: Optional[dict]) -> Optional["ActorRestrictions"]: - """Parse restrictions from actor dict. Returns None if no restrictions.""" - if not actor: - return None - assert isinstance(actor, dict), "actor must be a dictionary" - - restrictions = actor.get("_r") - if restrictions is None: - return None - - return cls( - global_actions=restrictions.get("a", []), - database_actions=restrictions.get("d", {}), - table_actions=restrictions.get("r", {}), - ) - - def is_action_globally_allowed(self, datasette: "Datasette", action: str) -> bool: - """Check if action is in the global allowlist.""" - return action_in_list(datasette, action, self.global_actions) - - def get_allowed_databases(self, datasette: "Datasette", action: str) -> Set[str]: - """Get database names where this action is allowed.""" - allowed = set() - for db_name, db_actions in self.database_actions.items(): - if action_in_list(datasette, action, db_actions): - allowed.add(db_name) - return allowed - - def get_allowed_tables( - self, datasette: "Datasette", action: str - ) -> Set[Tuple[str, str]]: - """Get (database, table) pairs where this action is allowed.""" - allowed = set() - for db_name, tables in self.table_actions.items(): - for table_name, table_actions in tables.items(): - if action_in_list(datasette, action, table_actions): - allowed.add((db_name, table_name)) - return allowed - - -@hookimpl(specname="permission_resources_sql") -async def actor_restrictions_sql( - datasette: "Datasette", - actor: Optional[dict], - action: str, -) -> Optional[List[PermissionSQL]]: - """ - Handle actor restriction-based permission rules. - - When an actor has an "_r" key, it contains an allowlist of resources they - can access. This function returns restriction_sql that filters the final - results to only include resources in that allowlist. - - The _r structure: - { - "a": ["vi", "pd"], # Global actions allowed - "d": {"mydb": ["vt", "es"]}, # Database-level actions - "r": {"mydb": {"users": ["vt"]}} # Table-level actions - } - """ - if not actor: - return None - - restrictions = ActorRestrictions.from_actor(actor) - - if restrictions is None: - # No restrictions - all resources allowed - return [] - - # If globally allowed, no filtering needed - if restrictions.is_action_globally_allowed(datasette, action): - return [] - - # Build restriction SQL - allowed_dbs = restrictions.get_allowed_databases(datasette, action) - allowed_tables = restrictions.get_allowed_tables(datasette, action) - - # If nothing is allowed for this action, return empty-set restriction - if not allowed_dbs and not allowed_tables: - return [ - PermissionSQL( - params={"deny": f"actor restrictions: {action} not in allowlist"}, - restriction_sql="SELECT NULL AS parent, NULL AS child WHERE 0", - ) - ] - - # Build UNION of allowed resources - selects = [] - params = {} - counter = 0 - - # Database-level entries (parent, NULL) - allows all children - for db_name in allowed_dbs: - key = f"restr_{counter}" - counter += 1 - selects.append(f"SELECT :{key}_parent AS parent, NULL AS child") - params[f"{key}_parent"] = db_name - - # Table-level entries (parent, child) - for db_name, table_name in allowed_tables: - key = f"restr_{counter}" - counter += 1 - selects.append(f"SELECT :{key}_parent AS parent, :{key}_child AS child") - params[f"{key}_parent"] = db_name - params[f"{key}_child"] = table_name - - restriction_sql = "\nUNION ALL\n".join(selects) - - return [PermissionSQL(params=params, restriction_sql=restriction_sql)] - - -def restrictions_allow_action( - datasette: "Datasette", - restrictions: dict, - action: str, - resource: Optional[str | Tuple[str, str]], -) -> bool: - """ - Check if restrictions allow the requested action on the requested resource. - - This is a synchronous utility function for use by other code that needs - to quickly check restriction allowlists. - - Args: - datasette: The Datasette instance - restrictions: The _r dict from an actor - action: The action name to check - resource: None for global, str for database, (db, table) tuple for table - - Returns: - True if allowed, False if denied - """ - # Does this action have an abbreviation? - to_check = get_action_name_variants(datasette, action) - - # Check global level (any resource) - all_allowed = restrictions.get("a") - if all_allowed is not None: - assert isinstance(all_allowed, list) - if to_check.intersection(all_allowed): - return True - - # Check database level - if resource: - if isinstance(resource, str): - database_name = resource - else: - database_name = resource[0] - database_allowed = restrictions.get("d", {}).get(database_name) - if database_allowed is not None: - assert isinstance(database_allowed, list) - if to_check.intersection(database_allowed): - return True - - # Check table/resource level - if resource is not None and not isinstance(resource, str) and len(resource) == 2: - database, table = resource - table_allowed = restrictions.get("r", {}).get(database, {}).get(table) - if table_allowed is not None: - assert isinstance(table_allowed, list) - if to_check.intersection(table_allowed): - return True - - # This action is not explicitly allowed, so reject it - return False diff --git a/datasette/default_permissions/root.py b/datasette/default_permissions/root.py deleted file mode 100644 index 4931f7ff..00000000 --- a/datasette/default_permissions/root.py +++ /dev/null @@ -1,29 +0,0 @@ -""" -Root user permission handling for Datasette. - -Grants full permissions to the root user when --root flag is used. -""" - -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -if TYPE_CHECKING: - from datasette.app import Datasette - -from datasette import hookimpl -from datasette.permissions import PermissionSQL - - -@hookimpl(specname="permission_resources_sql") -async def root_user_permissions_sql( - datasette: "Datasette", - actor: Optional[dict], -) -> Optional[PermissionSQL]: - """ - Grant root user full permissions when --root flag is used. - """ - if not datasette.root_enabled: - return None - if actor is not None and actor.get("id") == "root": - return PermissionSQL.allow(reason="root user") diff --git a/datasette/default_permissions/tokens.py b/datasette/default_permissions/tokens.py deleted file mode 100644 index 474b0c23..00000000 --- a/datasette/default_permissions/tokens.py +++ /dev/null @@ -1,95 +0,0 @@ -""" -Token authentication for Datasette. - -Handles signed API tokens (dstok_ prefix). -""" - -from __future__ import annotations - -import time -from typing import TYPE_CHECKING, Optional - -if TYPE_CHECKING: - from datasette.app import Datasette - -import itsdangerous - -from datasette import hookimpl - - -@hookimpl(specname="actor_from_request") -def actor_from_signed_api_token(datasette: "Datasette", request) -> Optional[dict]: - """ - Authenticate requests using signed API tokens (dstok_ prefix). - - Token structure (signed JSON): - { - "a": "actor_id", # Actor ID - "t": 1234567890, # Timestamp (Unix epoch) - "d": 3600, # Optional: Duration in seconds - "_r": {...} # Optional: Restrictions - } - """ - prefix = "dstok_" - - # Check if tokens are enabled - if not datasette.setting("allow_signed_tokens"): - return None - - max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl") - - # Get authorization header - authorization = request.headers.get("authorization") - if not authorization: - return None - if not authorization.startswith("Bearer "): - return None - - token = authorization[len("Bearer ") :] - if not token.startswith(prefix): - return None - - # Remove prefix and verify signature - token = token[len(prefix) :] - try: - decoded = datasette.unsign(token, namespace="token") - except itsdangerous.BadSignature: - return None - - # Validate timestamp - if "t" not in decoded: - return None - created = decoded["t"] - if not isinstance(created, int): - return None - - # Handle duration/expiry - duration = decoded.get("d") - if duration is not None and not isinstance(duration, int): - return None - - # Apply max TTL if configured - if (duration is None and max_signed_tokens_ttl) or ( - duration is not None - and max_signed_tokens_ttl - and duration > max_signed_tokens_ttl - ): - duration = max_signed_tokens_ttl - - # Check expiry - if duration: - if time.time() - created > duration: - return None - - # Build actor dict - actor = {"id": decoded["a"], "token": "dstok"} - - # Copy restrictions if present - if "_r" in decoded: - actor["_r"] = decoded["_r"] - - # Add expiry timestamp if applicable - if duration: - actor["token_expires"] = created + duration - - return actor diff --git a/datasette/plugins.py b/datasette/plugins.py index e9818885..392ab60d 100644 --- a/datasette/plugins.py +++ b/datasette/plugins.py @@ -94,24 +94,21 @@ def get_plugins(): for plugin in pm.get_plugins(): static_path = None templates_path = None - plugin_name = ( - plugin.__name__ - if hasattr(plugin, "__name__") - else plugin.__class__.__name__ - ) - if plugin_name not in DEFAULT_PLUGINS: + if plugin.__name__ not in DEFAULT_PLUGINS: try: - if (importlib_resources.files(plugin_name) / "static").is_dir(): - static_path = str(importlib_resources.files(plugin_name) / "static") - if (importlib_resources.files(plugin_name) / "templates").is_dir(): + if (importlib_resources.files(plugin.__name__) / "static").is_dir(): + static_path = str( + importlib_resources.files(plugin.__name__) / "static" + ) + if (importlib_resources.files(plugin.__name__) / "templates").is_dir(): templates_path = str( - importlib_resources.files(plugin_name) / "templates" + importlib_resources.files(plugin.__name__) / "templates" ) except (TypeError, ModuleNotFoundError): # Caused by --plugins_dir= plugins pass plugin_info = { - "name": plugin_name, + "name": plugin.__name__, "static_path": static_path, "templates_path": templates_path, "hooks": [h.name for h in pm.get_hookcallers(plugin)], diff --git a/datasette/static/table.js b/datasette/static/table.js index 0caeeb91..be03673c 100644 --- a/datasette/static/table.js +++ b/datasette/static/table.js @@ -330,6 +330,316 @@ function initAutocompleteForFilterValues(manager) { }); } +/** Initialize row detail side panel functionality */ +function initRowDetailPanel() { + const dialog = document.getElementById('rowDetailPanel'); + const closeButton = document.getElementById('closeRowDetail'); + const contentDiv = document.getElementById('rowDetailContent'); + const prevButton = document.getElementById('prevRowButton'); + const nextButton = document.getElementById('nextRowButton'); + const positionSpan = document.getElementById('rowPosition'); + + if (!dialog || !closeButton || !contentDiv || !prevButton || !nextButton) { + // Not on a table page with the panel + return; + } + + // State for navigation + let currentRowIndex = 0; + let allRows = []; // Array of objects: { element: DOMElement, pkValues: [...] } + let nextPageUrl = null; + let isLoadingMore = false; + let hasMoreRows = true; + + // Get primary key column names + function getPrimaryKeyNames() { + const headers = document.querySelectorAll('.rows-and-columns thead th[data-is-pk="1"]'); + return Array.from(headers).map(th => th.getAttribute('data-column')); + } + + const primaryKeyNames = getPrimaryKeyNames(); + + // Initialize the row list + function initializeRows() { + const domRows = document.querySelectorAll('.table-row-clickable'); + allRows = Array.from(domRows).map(row => ({ + element: row, + pkValues: extractPkValues(row) + })); + + // Check if there's a next page link + const nextLink = document.querySelector('a[href*="_next="]'); + nextPageUrl = nextLink ? nextLink.getAttribute('href') : null; + hasMoreRows = !!nextPageUrl; + } + + // Extract primary key values from a DOM row + function extractPkValues(row) { + const pkColumns = getPrimaryKeyColumns(); + const cells = row.querySelectorAll('td'); + return pkColumns.map(pk => { + const cell = cells[pk.index]; + if (!cell) return null; + return cell.getAttribute('data-value') || cell.textContent.trim(); + }); + } + + initializeRows(); + + // Prevent default cancel behavior (ESC key) to handle animation + dialog.addEventListener('cancel', (event) => { + event.preventDefault(); + animateCloseDialog(); + }); + + function animateCloseDialog() { + dialog.style.transform = 'translateX(100%)'; + setTimeout(() => { + dialog.close(); + }, 100); + } + + closeButton.addEventListener('click', () => { + animateCloseDialog(); + }); + + // Close on backdrop click + dialog.addEventListener('click', (event) => { + if (event.target === dialog) { + animateCloseDialog(); + } + }); + + // Get primary key column indices + function getPrimaryKeyColumns() { + const headers = document.querySelectorAll('.rows-and-columns thead th[data-is-pk="1"]'); + return Array.from(headers).map(th => { + const columnName = th.getAttribute('data-column'); + const index = Array.from(th.parentElement.children).indexOf(th); + return { name: columnName, index: index }; + }); + } + + // Construct row URL from row object (which has pkValues) + function getRowUrl(rowObj) { + if (!rowObj || !rowObj.pkValues || rowObj.pkValues.length === 0) { + return null; + } + + const pkValues = rowObj.pkValues; + + if (pkValues.some(v => v === null || v === '')) { + return null; + } + + // Construct the row path by joining PK values + const rowPath = pkValues.map(v => encodeURIComponent(v)).join(','); + + // Get current path and construct row URL + const currentPath = window.location.pathname; + return currentPath + '/' + rowPath + '.json'; + } + + // Fetch more rows from the next page using JSON API + async function fetchMoreRows() { + if (!nextPageUrl || isLoadingMore) { + return false; + } + + isLoadingMore = true; + try { + // Convert URL to JSON by adding .json before query params + let jsonUrl = nextPageUrl; + const urlParts = nextPageUrl.split('?'); + if (urlParts.length === 2) { + jsonUrl = urlParts[0] + '.json?' + urlParts[1]; + } else { + jsonUrl = nextPageUrl + '.json'; + } + + const response = await fetch(jsonUrl); + if (!response.ok) { + throw new Error(`Failed to fetch next page: ${response.status}`); + } + + const data = await response.json(); + + // Extract new rows from JSON + if (data.rows && data.rows.length > 0) { + const newRowObjects = data.rows.map(rowData => { + // Extract primary key values from the row data + const pkValues = primaryKeyNames.map(pkName => { + const value = rowData[pkName]; + return value !== null && value !== undefined ? String(value) : null; + }); + + return { + element: null, // No DOM element for paginated rows + pkValues: pkValues + }; + }); + + allRows.push(...newRowObjects); + } + + // Update next page URL from the response + nextPageUrl = data.next_url || null; + hasMoreRows = !!nextPageUrl; + + isLoadingMore = false; + return data.rows && data.rows.length > 0; + } catch (error) { + console.error('Error fetching more rows:', error); + isLoadingMore = false; + hasMoreRows = false; + return false; + } + } + + // Update navigation button states + function updateNavigationState() { + prevButton.disabled = currentRowIndex === 0; + + // Disable next if we're at the end and there are no more pages + const isAtEnd = currentRowIndex >= allRows.length - 1; + nextButton.disabled = isAtEnd && !hasMoreRows; + + // Update position display + if (allRows.length > 0) { + const displayIndex = currentRowIndex + 1; + positionSpan.textContent = `Row ${displayIndex}`; + } else { + positionSpan.textContent = ''; + } + } + + // Fetch and display row details + async function showRowDetails(rowIndex) { + if (rowIndex < 0 || rowIndex >= allRows.length) { + return; + } + + currentRowIndex = rowIndex; + const rowObj = allRows[rowIndex]; + const rowUrl = getRowUrl(rowObj); + + if (!rowUrl) { + contentDiv.innerHTML = '
Cannot display row: No primary key found
'; + showDialog(); + updateNavigationState(); + return; + } + + // Show loading state + contentDiv.innerHTML = 'Loading...
'; + updateNavigationState(); + + try { + const response = await fetch(rowUrl); + + if (!response.ok) { + throw new Error(`Failed to fetch row: ${response.status} ${response.statusText}`); + } + + const data = await response.json(); + + // Display the row data + if (data.rows && data.rows.length > 0) { + const rowData = data.rows[0]; + let html = '${escapeHtml(JSON.stringify(value, null, 2))}No row data found
'; + } + } catch (error) { + console.error('Error fetching row details:', error); + contentDiv.innerHTML = `Error loading row details: ${escapeHtml(error.message)}
`; + } + + updateNavigationState(); + } + + // Handle previous button click + prevButton.addEventListener('click', () => { + if (currentRowIndex > 0) { + showRowDetails(currentRowIndex - 1); + } + }); + + // Handle next button click + nextButton.addEventListener('click', async () => { + const nextIndex = currentRowIndex + 1; + + // If we're at the end of current rows, try to fetch more + if (nextIndex >= allRows.length && hasMoreRows && !isLoadingMore) { + nextButton.disabled = true; + nextButton.textContent = 'Loading...'; + + const fetched = await fetchMoreRows(); + + nextButton.textContent = 'Next →'; + + if (fetched && nextIndex < allRows.length) { + showRowDetails(nextIndex); + } else { + updateNavigationState(); + } + } else if (nextIndex < allRows.length) { + showRowDetails(nextIndex); + } + }); + + function showDialog() { + // Reset transform before opening + dialog.style.transition = 'none'; + dialog.style.transform = 'translateX(100%)'; + + // Open the dialog + dialog.showModal(); + + // Trigger animation + void dialog.offsetWidth; + + dialog.style.transition = 'transform 0.1s cubic-bezier(0.2, 0, 0.38, 0.9)'; + dialog.style.transform = 'translateX(0)'; + } + + function escapeHtml(text) { + const div = document.createElement('div'); + div.textContent = text; + return div.innerHTML; + } + + // Add click handlers to all table rows (only for rows with DOM elements) + allRows.forEach((rowObj, index) => { + if (rowObj.element) { + rowObj.element.addEventListener('click', (event) => { + // Don't trigger if clicking on a link or button within the row + if (event.target.tagName === 'A' || event.target.tagName === 'BUTTON') { + return; + } + + showDialog(); + showRowDetails(index); + }); + } + }); +} + // Ensures Table UI is initialized only after the Manager is ready. document.addEventListener("datasette_init", function (evt) { const { detail: manager } = evt; @@ -340,4 +650,7 @@ document.addEventListener("datasette_init", function (evt) { // Other UI functions with interactive JS needs addButtonsToFilterRows(manager); initAutocompleteForFilterValues(manager); + + // Row detail panel + initRowDetailPanel(); }); diff --git a/datasette/templates/_table.html b/datasette/templates/_table.html index a1329ba7..3748c99c 100644 --- a/datasette/templates/_table.html +++ b/datasette/templates/_table.html @@ -22,7 +22,7 @@ {% for row in display_rows %} -0 records
{% endif %} + + + + + diff --git a/datasette/templates/debug_actions.html b/datasette/templates/debug_actions.html index 0ef7b329..6dd5ac0e 100644 --- a/datasette/templates/debug_actions.html +++ b/datasette/templates/debug_actions.html @@ -31,7 +31,7 @@{{ action.abbr }}{% endif %}{{ action.resource_class }}{% endif %}{{ action.resource_class }}{{ action.also_requires }}{% endif %}None" not in response.text
- finally:
- ds_client.ds.root_enabled = original_root_enabled
-
-
@pytest.mark.asyncio
async def test_permission_debug_tabs_with_query_string(ds_client):
"""Test that navigation tabs persist query strings across Check, Allowed, and Rules pages"""
diff --git a/tests/test_internal_db.py b/tests/test_internal_db.py
index 7a0d1630..59516225 100644
--- a/tests/test_internal_db.py
+++ b/tests/test_internal_db.py
@@ -91,51 +91,3 @@ async def test_internal_foreign_key_references(ds_client):
)
await internal_db.execute_fn(inner)
-
-
-@pytest.mark.asyncio
-async def test_stale_catalog_entry_database_fix(tmp_path):
- """
- Test for https://github.com/simonw/datasette/issues/2605
-
- When the internal database persists across restarts and has entries in
- catalog_databases for databases that no longer exist, accessing the
- index page should not cause a 500 error (KeyError).
- """
- from datasette.app import Datasette
-
- internal_db_path = str(tmp_path / "internal.db")
- data_db_path = str(tmp_path / "data.db")
-
- # Create a data database file
- import sqlite3
-
- conn = sqlite3.connect(data_db_path)
- conn.execute("CREATE TABLE test_table (id INTEGER PRIMARY KEY)")
- conn.close()
-
- # First Datasette instance: with the data database and persistent internal db
- ds1 = Datasette(files=[data_db_path], internal=internal_db_path)
- await ds1.invoke_startup()
-
- # Access the index page to populate the internal catalog
- response = await ds1.client.get("/")
- assert "data" in ds1.databases
- assert response.status_code == 200
-
- # Second Datasette instance: reusing internal.db but WITHOUT the data database
- # This simulates restarting Datasette after removing a database
- ds2 = Datasette(internal=internal_db_path)
- await ds2.invoke_startup()
-
- # The database is not in ds2.databases
- assert "data" not in ds2.databases
-
- # Accessing the index page should NOT cause a 500 error
- # This is the bug: it currently raises KeyError when trying to
- # access ds.databases["data"] for the stale catalog entry
- response = await ds2.client.get("/")
- assert response.status_code == 200, (
- f"Index page should return 200, not {response.status_code}. "
- "This fails due to stale catalog entries causing KeyError."
- )
diff --git a/tests/test_internals_datasette_client.py b/tests/test_internals_datasette_client.py
index 326fcdc0..a15d294f 100644
--- a/tests/test_internals_datasette_client.py
+++ b/tests/test_internals_datasette_client.py
@@ -227,87 +227,3 @@ async def test_skip_permission_checks_shows_denied_tables():
table_names = [match["name"] for match in data["matches"]]
# Should see fixtures tables when permission checks are skipped
assert "fixtures: test_table" in table_names
-
-
-@pytest.mark.asyncio
-async def test_in_client_returns_false_outside_request(datasette):
- """Test that datasette.in_client() returns False outside of a client request"""
- assert datasette.in_client() is False
-
-
-@pytest.mark.asyncio
-async def test_in_client_returns_true_inside_request():
- """Test that datasette.in_client() returns True inside a client request"""
- from datasette import hookimpl, Response
-
- class TestPlugin:
- __name__ = "test_in_client_plugin"
-
- @hookimpl
- def register_routes(self):
- async def test_view(datasette):
- # Assert in_client() returns True within the view
- assert datasette.in_client() is True
- return Response.json({"in_client": datasette.in_client()})
-
- return [
- (r"^/-/test-in-client$", test_view),
- ]
-
- ds = Datasette()
- await ds.invoke_startup()
- ds.pm.register(TestPlugin(), name="test_in_client_plugin")
- try:
-
- # Outside of a client request, should be False
- assert ds.in_client() is False
-
- # Make a request via datasette.client
- response = await ds.client.get("/-/test-in-client")
- assert response.status_code == 200
- assert response.json()["in_client"] is True
-
- # After the request, should be False again
- assert ds.in_client() is False
- finally:
- ds.pm.unregister(name="test_in_client_plugin")
-
-
-@pytest.mark.asyncio
-async def test_in_client_with_skip_permission_checks():
- """Test that in_client() works regardless of skip_permission_checks value"""
- from datasette import hookimpl
- from datasette.utils.asgi import Response
-
- in_client_values = []
-
- class TestPlugin:
- __name__ = "test_in_client_skip_plugin"
-
- @hookimpl
- def register_routes(self):
- async def test_view(datasette):
- in_client_values.append(datasette.in_client())
- return Response.json({"in_client": datasette.in_client()})
-
- return [
- (r"^/-/test-in-client$", test_view),
- ]
-
- ds = Datasette(config={"databases": {"test_db": {"allow": {"id": "admin"}}}})
- await ds.invoke_startup()
- ds.pm.register(TestPlugin(), name="test_in_client_skip_plugin")
- try:
-
- # Request without skip_permission_checks
- await ds.client.get("/-/test-in-client")
- # Request with skip_permission_checks=True
- await ds.client.get("/-/test-in-client", skip_permission_checks=True)
-
- # Both should have detected in_client as True
- assert (
- len(in_client_values) == 2
- ), f"Expected 2 values, got {len(in_client_values)}"
- assert all(in_client_values), f"Expected all True, got {in_client_values}"
- finally:
- ds.pm.unregister(name="test_in_client_skip_plugin")
diff --git a/tests/test_permission_endpoints.py b/tests/test_permission_endpoints.py
index 84f3370f..d7b7bf07 100644
--- a/tests/test_permission_endpoints.py
+++ b/tests/test_permission_endpoints.py
@@ -439,6 +439,7 @@ async def test_execute_sql_requires_view_database():
be able to execute SQL on that database.
"""
from datasette.permissions import PermissionSQL
+ from datasette.plugins import pm
from datasette import hookimpl
class TestPermissionPlugin:
@@ -463,12 +464,11 @@ async def test_execute_sql_requires_view_database():
return []
plugin = TestPermissionPlugin()
-
- ds = Datasette()
- await ds.invoke_startup()
- ds.pm.register(plugin, name="test_plugin")
+ pm.register(plugin, name="test_plugin")
try:
+ ds = Datasette()
+ await ds.invoke_startup()
ds.add_memory_database("secret")
await ds.refresh_schemas()
@@ -498,4 +498,4 @@ async def test_execute_sql_requires_view_database():
f"but got {response.status_code}"
)
finally:
- ds.pm.unregister(plugin)
+ pm.unregister(plugin)
diff --git a/tests/test_permissions.py b/tests/test_permissions.py
index e2dd92b8..6def3840 100644
--- a/tests/test_permissions.py
+++ b/tests/test_permissions.py
@@ -1323,20 +1323,6 @@ async def test_actor_restrictions(
("dbname2", "tablename"),
False,
),
- # Table-level restriction allows access to that specific table
- (
- {"r": {"dbname": {"tablename": ["view-table"]}}},
- "view-table",
- ("dbname", "tablename"),
- True,
- ),
- # But not to a different table in the same database
- (
- {"r": {"dbname": {"tablename": ["view-table"]}}},
- "view-table",
- ("dbname", "other_table"),
- False,
- ),
),
)
async def test_restrictions_allow_action(restrictions, action, resource, expected):
@@ -1667,48 +1653,3 @@ async def test_permission_check_view_requires_debug_permission():
data = response.json()
assert data["action"] == "view-instance"
assert data["allowed"] is True
-
-
-@pytest.mark.asyncio
-async def test_root_allow_block_with_table_restricted_actor():
- """
- Test that root-level allow: blocks are processed for actors with
- table-level restrictions.
-
- This covers the case in config.py is_in_restriction_allowlist() where
- parent=None, child=None and actor has table restrictions but not global.
- """
- from datasette.resources import TableResource
-
- # Config with root-level allow block that denies non-admin users
- ds = Datasette(
- config={
- "allow": {"id": "admin"}, # Root-level allow block
- }
- )
- await ds.invoke_startup()
- db = ds.add_memory_database("mydb")
- await db.execute_write("create table t1 (id integer primary key)")
- await ds.client.get("/") # Trigger catalog refresh
-
- # Actor with table-level restrictions only (not global)
- actor = {"id": "user", "_r": {"r": {"mydb": {"t1": ["view-table"]}}}}
-
- # The root-level allow: {id: admin} should be processed and deny this user
- # because they're not "admin", even though they have table restrictions
- result = await ds.allowed(
- action="view-table",
- resource=TableResource("mydb", "t1"),
- actor=actor,
- )
- # Should be False because root allow: {id: admin} denies non-admin users
- assert result is False
-
- # But admin with same restrictions should be allowed
- admin_actor = {"id": "admin", "_r": {"r": {"mydb": {"t1": ["view-table"]}}}}
- result = await ds.allowed(
- action="view-table",
- resource=TableResource("mydb", "t1"),
- actor=admin_actor,
- )
- assert result is True
diff --git a/tests/test_plugins.py b/tests/test_plugins.py
index 42995c0d..4a8c60d7 100644
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -691,7 +691,7 @@ async def test_hook_permission_resources_sql():
await ds.invoke_startup()
collected = []
- for block in ds.pm.hook.permission_resources_sql(
+ for block in pm.hook.permission_resources_sql(
datasette=ds,
actor={"id": "alice"},
action="view-table",
@@ -1161,12 +1161,12 @@ async def test_hook_filters_from_request(ds_client):
if request.args.get("_nothing"):
return FilterArguments(["1 = 0"], human_descriptions=["NOTHING"])
- ds_client.ds.pm.register(ReturnNothingPlugin(), name="ReturnNothingPlugin")
+ pm.register(ReturnNothingPlugin(), name="ReturnNothingPlugin")
response = await ds_client.get("/fixtures/facetable?_nothing=1")
assert "0 rows\n where NOTHING" in response.text
json_response = await ds_client.get("/fixtures/facetable.json?_nothing=1")
assert json_response.json()["rows"] == []
- ds_client.ds.pm.unregister(name="ReturnNothingPlugin")
+ pm.unregister(name="ReturnNothingPlugin")
@pytest.mark.asyncio
@@ -1327,7 +1327,7 @@ async def test_hook_actors_from_ids():
return inner
try:
- ds.pm.register(ActorsFromIdsPlugin(), name="ActorsFromIdsPlugin")
+ pm.register(ActorsFromIdsPlugin(), name="ActorsFromIdsPlugin")
actors2 = await ds.actors_from_ids(["3", "5", "7"])
assert actors2 == {
"3": {"id": "3", "name": "Cate Blanchett"},
@@ -1335,7 +1335,7 @@ async def test_hook_actors_from_ids():
"7": {"id": "7", "name": "Sarah Paulson"},
}
finally:
- ds.pm.unregister(name="ReturnNothingPlugin")
+ pm.unregister(name="ReturnNothingPlugin")
@pytest.mark.asyncio
@@ -1350,14 +1350,14 @@ async def test_plugin_is_installed():
return {}
try:
- datasette.pm.register(DummyPlugin(), name="DummyPlugin")
+ pm.register(DummyPlugin(), name="DummyPlugin")
response = await datasette.client.get("/-/plugins.json")
assert response.status_code == 200
installed_plugins = {p["name"] for p in response.json()}
assert "DummyPlugin" in installed_plugins
finally:
- datasette.pm.unregister(name="DummyPlugin")
+ pm.unregister(name="DummyPlugin")
@pytest.mark.asyncio
@@ -1384,7 +1384,7 @@ async def test_hook_jinja2_environment_from_request(tmpdir):
datasette = Datasette(memory=True)
try:
- datasette.pm.register(EnvironmentPlugin(), name="EnvironmentPlugin")
+ pm.register(EnvironmentPlugin(), name="EnvironmentPlugin")
response = await datasette.client.get("/")
assert response.status_code == 200
assert "Hello museums!" not in response.text
@@ -1395,7 +1395,7 @@ async def test_hook_jinja2_environment_from_request(tmpdir):
assert response2.status_code == 200
assert "Hello museums!" in response2.text
finally:
- datasette.pm.unregister(name="EnvironmentPlugin")
+ pm.unregister(name="EnvironmentPlugin")
class SlotPlugin:
@@ -1433,48 +1433,48 @@ class SlotPlugin:
@pytest.mark.asyncio
async def test_hook_top_homepage():
- datasette = Datasette(memory=True)
try:
- datasette.pm.register(SlotPlugin(), name="SlotPlugin")
+ pm.register(SlotPlugin(), name="SlotPlugin")
+ datasette = Datasette(memory=True)
response = await datasette.client.get("/?z=foo")
assert response.status_code == 200
assert "Xtop_homepage:foo" in response.text
finally:
- datasette.pm.unregister(name="SlotPlugin")
+ pm.unregister(name="SlotPlugin")
@pytest.mark.asyncio
async def test_hook_top_database():
- datasette = Datasette(memory=True)
try:
- datasette.pm.register(SlotPlugin(), name="SlotPlugin")
+ pm.register(SlotPlugin(), name="SlotPlugin")
+ datasette = Datasette(memory=True)
response = await datasette.client.get("/_memory?z=bar")
assert response.status_code == 200
assert "Xtop_database:_memory:bar" in response.text
finally:
- datasette.pm.unregister(name="SlotPlugin")
+ pm.unregister(name="SlotPlugin")
@pytest.mark.asyncio
async def test_hook_top_table(ds_client):
try:
- ds_client.ds.pm.register(SlotPlugin(), name="SlotPlugin")
+ pm.register(SlotPlugin(), name="SlotPlugin")
response = await ds_client.get("/fixtures/facetable?z=baz")
assert response.status_code == 200
assert "Xtop_table:fixtures:facetable:baz" in response.text
finally:
- ds_client.ds.pm.unregister(name="SlotPlugin")
+ pm.unregister(name="SlotPlugin")
@pytest.mark.asyncio
async def test_hook_top_row(ds_client):
try:
- ds_client.ds.pm.register(SlotPlugin(), name="SlotPlugin")
+ pm.register(SlotPlugin(), name="SlotPlugin")
response = await ds_client.get("/fixtures/facet_cities/1?z=bax")
assert response.status_code == 200
assert "Xtop_row:fixtures:facet_cities:San Francisco:bax" in response.text
finally:
- ds_client.ds.pm.unregister(name="SlotPlugin")
+ pm.unregister(name="SlotPlugin")
@pytest.mark.asyncio
diff --git a/tests/test_restriction_sql.py b/tests/test_restriction_sql.py
index f23eb839..7d6d8a5a 100644
--- a/tests/test_restriction_sql.py
+++ b/tests/test_restriction_sql.py
@@ -13,6 +13,7 @@ async def test_multiple_restriction_sources_intersect():
provide restriction_sql - both must pass for access to be granted.
"""
from datasette import hookimpl
+ from datasette.plugins import pm
class RestrictivePlugin:
__name__ = "RestrictivePlugin"
@@ -28,12 +29,11 @@ async def test_multiple_restriction_sources_intersect():
return None
plugin = RestrictivePlugin()
-
- ds = Datasette()
- await ds.invoke_startup()
- ds.pm.register(plugin, name="restrictive_plugin")
+ pm.register(plugin, name="restrictive_plugin")
try:
+ ds = Datasette()
+ await ds.invoke_startup()
db1 = ds.add_memory_database("db1_multi_intersect")
db2 = ds.add_memory_database("db2_multi_intersect")
await db1.execute_write("CREATE TABLE t1 (id INTEGER)")
@@ -55,7 +55,7 @@ async def test_multiple_restriction_sources_intersect():
assert ("db1_multi_intersect", "t1") in resources
assert ("db2_multi_intersect", "t1") not in resources
finally:
- ds.pm.unregister(name="restrictive_plugin")
+ pm.unregister(name="restrictive_plugin")
@pytest.mark.asyncio
@@ -265,6 +265,7 @@ async def test_permission_resources_sql_multiple_restriction_sources_intersect()
provide restriction_sql - both must pass for access to be granted.
"""
from datasette import hookimpl
+ from datasette.plugins import pm
class RestrictivePlugin:
__name__ = "RestrictivePlugin"
@@ -280,12 +281,11 @@ async def test_permission_resources_sql_multiple_restriction_sources_intersect()
return None
plugin = RestrictivePlugin()
-
- ds = Datasette()
- await ds.invoke_startup()
- ds.pm.register(plugin, name="restrictive_plugin")
+ pm.register(plugin, name="restrictive_plugin")
try:
+ ds = Datasette()
+ await ds.invoke_startup()
db1 = ds.add_memory_database("db1_multi_restrictions")
db2 = ds.add_memory_database("db2_multi_restrictions")
await db1.execute_write("CREATE TABLE t1 (id INTEGER)")
@@ -312,4 +312,4 @@ async def test_permission_resources_sql_multiple_restriction_sources_intersect()
assert ("db1_multi_restrictions", "t1") in resources
assert ("db2_multi_restrictions", "t1") not in resources
finally:
- ds.pm.unregister(name="restrictive_plugin")
+ pm.unregister(name="restrictive_plugin")
diff --git a/tests/test_row_detail_panel.py b/tests/test_row_detail_panel.py
new file mode 100644
index 00000000..01424938
--- /dev/null
+++ b/tests/test_row_detail_panel.py
@@ -0,0 +1,531 @@
+"""
+Playwright tests for the row detail side panel feature.
+"""
+
+import pytest
+import subprocess
+import sys
+import tempfile
+import time
+import httpx
+from playwright.sync_api import expect
+
+
+def wait_until_responds(url, timeout=5.0):
+ """Wait until a URL responds to HTTP requests"""
+ start = time.time()
+ while time.time() - start < timeout:
+ try:
+ httpx.get(url)
+ return
+ except httpx.ConnectError:
+ time.sleep(0.1)
+ raise AssertionError(f"Timed out waiting for {url} to respond")
+
+
+@pytest.fixture(scope="module")
+def datasette_server():
+ """Start a Datasette server for testing"""
+ # Create a simple test database
+ import sqlite3
+ import os
+
+ db_path = os.path.join(tempfile.gettempdir(), "test_products.db")
+ # Remove if exists
+ if os.path.exists(db_path):
+ os.remove(db_path)
+
+ conn = sqlite3.connect(db_path)
+ conn.execute(
+ """
+ CREATE TABLE products (
+ id INTEGER PRIMARY KEY,
+ name TEXT,
+ description TEXT,
+ price REAL,
+ category TEXT
+ )
+ """
+ )
+ conn.execute(
+ """
+ INSERT INTO products (name, description, price, category) VALUES
+ ('Laptop', 'High-performance laptop', 999.99, 'Electronics'),
+ ('Mouse', 'Wireless mouse', 29.99, 'Electronics'),
+ ('Desk', 'Standing desk', 499.99, 'Furniture'),
+ ('Chair', 'Ergonomic chair', 299.99, 'Furniture'),
+ ('Notebook', 'Spiral notebook', 4.99, 'Stationery')
+ """
+ )
+ conn.commit()
+ conn.close()
+
+ # Start Datasette server
+ ds_proc = subprocess.Popen(
+ [sys.executable, "-m", "datasette", db_path, "-p", "8042"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ cwd=tempfile.gettempdir(),
+ )
+ wait_until_responds("http://localhost:8042/")
+
+ # Check it started successfully
+ assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8")
+
+ yield {"base_url": "http://localhost:8042", "db_name": "test_products"}
+
+ # Shut down the server
+ ds_proc.terminate()
+ ds_proc.wait()
+
+ # Clean up
+ if os.path.exists(db_path):
+ os.remove(db_path)
+
+
+def test_row_detail_panel_elements_exist(page, datasette_server):
+ """Test that the row detail panel HTML elements exist"""
+ base_url = datasette_server["base_url"]
+ db_name = datasette_server["db_name"]
+ page.goto(f"{base_url}/{db_name}/products")
+
+ # Wait for the page to load
+ page.wait_for_selector(".rows-and-columns")
+
+ # Check that the dialog element exists
+ dialog = page.locator("#rowDetailPanel")
+ assert dialog.count() == 1
+
+ # Check that the close button exists
+ close_button = page.locator("#closeRowDetail")
+ assert close_button.count() == 1
+
+ # Check that the content div exists
+ content_div = page.locator("#rowDetailContent")
+ assert content_div.count() == 1
+
+
+def test_row_click_opens_panel(page, datasette_server):
+ """Test that clicking a table row opens the side panel"""
+ base_url = datasette_server["base_url"]
+ db_name = datasette_server["db_name"]
+ page.goto(f"{base_url}/{db_name}/products")
+
+ # Wait for the table to load
+ page.wait_for_selector(".rows-and-columns tbody tr")
+
+ # Get the dialog
+ dialog = page.locator("#rowDetailPanel")
+
+ # Dialog should not be open initially
+ assert not dialog.evaluate("el => el.hasAttribute('open')")
+
+ # Click the first row
+ first_row = page.locator(".table-row-clickable").first
+ first_row.click()
+
+ # Wait for the dialog to open
+ page.wait_for_selector("#rowDetailPanel[open]", timeout=2000)
+
+ # Dialog should now be open
+ assert dialog.evaluate("el => el.hasAttribute('open')")
+
+ # Content should be loaded (not showing "Loading...")
+ content = page.locator("#rowDetailContent")
+ expect(content).not_to_contain_text("Loading...")
+
+
+def test_row_panel_displays_data(page, datasette_server):
+ """Test that the row panel displays the correct data"""
+ base_url = datasette_server["base_url"]
+ db_name = datasette_server["db_name"]
+ page.goto(f"{base_url}/{db_name}/products")
+
+ # Wait for the table to load
+ page.wait_for_selector(".rows-and-columns tbody tr")
+
+ # Click the first row (Laptop)
+ first_row = page.locator(".table-row-clickable").first
+ first_row.click()
+
+ # Wait for the dialog to open and content to load
+ page.wait_for_selector("#rowDetailPanel[open]")
+ page.wait_for_selector("#rowDetailContent dl")
+
+ # Check that the content includes the expected data
+ content = page.locator("#rowDetailContent")
+ expect(content).to_contain_text("Laptop")
+ expect(content).to_contain_text("High-performance laptop")
+ expect(content).to_contain_text("999.99")
+ expect(content).to_contain_text("Electronics")
+
+
+def test_close_button_closes_panel(page, datasette_server):
+ """Test that clicking the close button closes the panel"""
+ base_url = datasette_server["base_url"]
+ db_name = datasette_server["db_name"]
+ page.goto(f"{base_url}/{db_name}/products")
+
+ # Wait for the table to load
+ page.wait_for_selector(".rows-and-columns tbody tr")
+
+ # Click a row to open the panel
+ first_row = page.locator(".table-row-clickable").first
+ first_row.click()
+
+ # Wait for the dialog to open
+ page.wait_for_selector("#rowDetailPanel[open]")
+
+ # Click the close button
+ close_button = page.locator("#closeRowDetail")
+ close_button.click()
+
+ # Wait for the dialog to close
+ page.wait_for_timeout(200) # Wait for animation
+
+ # Dialog should be closed
+ dialog = page.locator("#rowDetailPanel")
+ assert not dialog.evaluate("el => el.hasAttribute('open')")
+
+
+def test_escape_key_closes_panel(page, datasette_server):
+ """Test that pressing Escape closes the panel"""
+ base_url = datasette_server["base_url"]
+ db_name = datasette_server["db_name"]
+ page.goto(f"{base_url}/{db_name}/products")
+
+ # Wait for the table to load
+ page.wait_for_selector(".rows-and-columns tbody tr")
+
+ # Click a row to open the panel
+ first_row = page.locator(".table-row-clickable").first
+ first_row.click()
+
+ # Wait for the dialog to open
+ page.wait_for_selector("#rowDetailPanel[open]")
+
+ # Press Escape
+ page.keyboard.press("Escape")
+
+ # Wait for the dialog to close
+ page.wait_for_timeout(200) # Wait for animation
+
+ # Dialog should be closed
+ dialog = page.locator("#rowDetailPanel")
+ assert not dialog.evaluate("el => el.hasAttribute('open')")
+
+
+@pytest.mark.skip(
+ reason="Backdrop click is difficult to test programmatically - works in manual testing"
+)
+def test_backdrop_click_closes_panel(page, datasette_server):
+ """Test that clicking the backdrop closes the panel"""
+ base_url = datasette_server["base_url"]
+ db_name = datasette_server["db_name"]
+ page.goto(f"{base_url}/{db_name}/products")
+
+ # Wait for the table to load
+ page.wait_for_selector(".rows-and-columns tbody tr")
+
+ # Click a row to open the panel
+ first_row = page.locator(".table-row-clickable").first
+ first_row.click()
+
+ # Wait for the dialog to open
+ page.wait_for_selector("#rowDetailPanel[open]")
+
+ # Click the dialog backdrop (the dialog element itself, not the content)
+ dialog = page.locator("#rowDetailPanel")
+ # Get the bounding box and click outside the content area
+ box = dialog.bounding_box()
+ if box:
+ # Click on the left side of the dialog (the backdrop)
+ page.mouse.click(box["x"] + 10, box["y"] + box["height"] / 2)
+
+ # Wait for the dialog to close
+ page.wait_for_timeout(200) # Wait for animation
+
+ # Dialog should be closed
+ assert not dialog.evaluate("el => el.hasAttribute('open')")
+
+
+def test_multiple_rows_different_data(page, datasette_server):
+ """Test that clicking different rows shows different data"""
+ base_url = datasette_server["base_url"]
+ db_name = datasette_server["db_name"]
+ page.goto(f"{base_url}/{db_name}/products")
+
+ # Wait for the table to load
+ page.wait_for_selector(".rows-and-columns tbody tr")
+
+ # Click the first row
+ first_row = page.locator(".table-row-clickable").first
+ first_row.click()
+
+ # Wait for the dialog to open
+ page.wait_for_selector("#rowDetailPanel[open]")
+ page.wait_for_selector("#rowDetailContent dl")
+
+ # Check for first row data
+ content = page.locator("#rowDetailContent")
+ expect(content).to_contain_text("Laptop")
+
+ # Close the panel
+ close_button = page.locator("#closeRowDetail")
+ close_button.click()
+ page.wait_for_timeout(200)
+
+ # Click the second row
+ second_row = page.locator(".table-row-clickable").nth(1)
+ second_row.click()
+
+ # Wait for the dialog to open again
+ page.wait_for_selector("#rowDetailPanel[open]")
+ page.wait_for_selector("#rowDetailContent dl")
+
+ # Check for second row data
+ expect(content).to_contain_text("Mouse")
+ expect(content).to_contain_text("Wireless mouse")
+
+
+def test_row_hover_state(page, datasette_server):
+ """Test that rows have hover state styling"""
+ base_url = datasette_server["base_url"]
+ db_name = datasette_server["db_name"]
+ page.goto(f"{base_url}/{db_name}/products")
+
+ # Wait for the table to load
+ page.wait_for_selector(".rows-and-columns tbody tr")
+
+ # Get the first row
+ first_row = page.locator(".table-row-clickable").first
+
+ # Check that the row has cursor: pointer
+ cursor_style = first_row.evaluate("el => window.getComputedStyle(el).cursor")
+ assert cursor_style == "pointer"
+
+
+def test_navigation_buttons_exist(page, datasette_server):
+ """Test that navigation buttons are present"""
+ base_url = datasette_server["base_url"]
+ db_name = datasette_server["db_name"]
+ page.goto(f"{base_url}/{db_name}/products")
+
+ # Wait for the table to load
+ page.wait_for_selector(".rows-and-columns tbody tr")
+
+ # Click a row to open the panel
+ first_row = page.locator(".table-row-clickable").first
+ first_row.click()
+
+ # Wait for the dialog to open
+ page.wait_for_selector("#rowDetailPanel[open]")
+
+ # Check that navigation buttons exist
+ prev_button = page.locator("#prevRowButton")
+ next_button = page.locator("#nextRowButton")
+ position = page.locator("#rowPosition")
+
+ assert prev_button.count() == 1
+ assert next_button.count() == 1
+ assert position.count() == 1
+
+
+def test_previous_button_disabled_on_first_row(page, datasette_server):
+ """Test that previous button is disabled on the first row"""
+ base_url = datasette_server["base_url"]
+ db_name = datasette_server["db_name"]
+ page.goto(f"{base_url}/{db_name}/products")
+
+ # Wait for the table to load
+ page.wait_for_selector(".rows-and-columns tbody tr")
+
+ # Click the first row
+ first_row = page.locator(".table-row-clickable").first
+ first_row.click()
+
+ # Wait for the dialog to open
+ page.wait_for_selector("#rowDetailPanel[open]")
+
+ # Previous button should be disabled
+ prev_button = page.locator("#prevRowButton")
+ assert prev_button.is_disabled()
+
+ # Next button should be enabled
+ next_button = page.locator("#nextRowButton")
+ assert not next_button.is_disabled()
+
+
+def test_next_button_navigation(page, datasette_server):
+ """Test that next button navigates to the next row"""
+ base_url = datasette_server["base_url"]
+ db_name = datasette_server["db_name"]
+ page.goto(f"{base_url}/{db_name}/products")
+
+ # Wait for the table to load
+ page.wait_for_selector(".rows-and-columns tbody tr")
+
+ # Click the first row
+ first_row = page.locator(".table-row-clickable").first
+ first_row.click()
+
+ # Wait for the dialog to open and content to load
+ page.wait_for_selector("#rowDetailPanel[open]")
+ page.wait_for_selector("#rowDetailContent dl")
+
+ # Should show Laptop data
+ content = page.locator("#rowDetailContent")
+ expect(content).to_contain_text("Laptop")
+
+ # Click next button
+ next_button = page.locator("#nextRowButton")
+ next_button.click()
+
+ # Wait for content to update
+ page.wait_for_timeout(300)
+
+ # Should now show Mouse data
+ expect(content).to_contain_text("Mouse")
+ expect(content).to_contain_text("29.99")
+
+
+def test_previous_button_navigation(page, datasette_server):
+ """Test that previous button navigates to the previous row"""
+ base_url = datasette_server["base_url"]
+ db_name = datasette_server["db_name"]
+ page.goto(f"{base_url}/{db_name}/products")
+
+ # Wait for the table to load
+ page.wait_for_selector(".rows-and-columns tbody tr")
+
+ # Click the second row
+ second_row = page.locator(".table-row-clickable").nth(1)
+ second_row.click()
+
+ # Wait for the dialog to open and content to load
+ page.wait_for_selector("#rowDetailPanel[open]")
+ page.wait_for_selector("#rowDetailContent dl")
+
+ # Should show Mouse data
+ content = page.locator("#rowDetailContent")
+ expect(content).to_contain_text("Mouse")
+
+ # Previous button should be enabled now
+ prev_button = page.locator("#prevRowButton")
+ assert not prev_button.is_disabled()
+
+ # Click previous button
+ prev_button.click()
+
+ # Wait for content to update
+ page.wait_for_timeout(300)
+
+ # Should now show Laptop data
+ expect(content).to_contain_text("Laptop")
+
+ # Previous button should now be disabled (we're at first row)
+ assert prev_button.is_disabled()
+
+
+def test_row_position_updates(page, datasette_server):
+ """Test that row position indicator updates correctly"""
+ base_url = datasette_server["base_url"]
+ db_name = datasette_server["db_name"]
+ page.goto(f"{base_url}/{db_name}/products")
+
+ # Wait for the table to load
+ page.wait_for_selector(".rows-and-columns tbody tr")
+
+ # Click the first row
+ first_row = page.locator(".table-row-clickable").first
+ first_row.click()
+
+ # Wait for the dialog to open
+ page.wait_for_selector("#rowDetailPanel[open]")
+
+ # Check position indicator shows "Row 1"
+ position = page.locator("#rowPosition")
+ expect(position).to_have_text("Row 1")
+
+ # Click next
+ next_button = page.locator("#nextRowButton")
+ next_button.click()
+ page.wait_for_timeout(300)
+
+ # Position should update to "Row 2"
+ expect(position).to_have_text("Row 2")
+
+
+def test_pagination_navigation(page, datasette_server):
+ """Test that navigation works across pagination boundaries"""
+ base_url = datasette_server["base_url"]
+ db_name = datasette_server["db_name"]
+
+ # Add page_size parameter to force pagination
+ page.goto(f"{base_url}/{db_name}/products?_size=2")
+
+ # Wait for the table to load
+ page.wait_for_selector(".rows-and-columns tbody tr")
+
+ # Click the second (last visible) row
+ second_row = page.locator(".table-row-clickable").nth(1)
+ second_row.click()
+
+ # Wait for the dialog to open and content to load
+ page.wait_for_selector("#rowDetailPanel[open]")
+ page.wait_for_selector("#rowDetailContent dl")
+
+ # Should show Mouse data (second row)
+ content = page.locator("#rowDetailContent")
+ expect(content).to_contain_text("Mouse")
+
+ # Next button should be enabled (there are more rows via pagination)
+ next_button = page.locator("#nextRowButton")
+ assert not next_button.is_disabled()
+
+ # Click next button - should load the third row from the next page
+ next_button.click()
+
+ # Wait for loading and content update
+ page.wait_for_timeout(1000) # Give time for pagination fetch
+
+ # Should now show Desk data (third row, from next page)
+ expect(content).to_contain_text("Desk")
+
+ # Previous button should work to go back
+ prev_button = page.locator("#prevRowButton")
+ assert not prev_button.is_disabled()
+ prev_button.click()
+ page.wait_for_timeout(300)
+
+ # Should be back to Mouse
+ expect(content).to_contain_text("Mouse")
+
+
+@pytest.mark.skip(reason="Mobile viewport test - enable if needed")
+def test_panel_responsive_on_mobile(page, datasette_server):
+ """Test that the panel is responsive on mobile viewports"""
+ # Set mobile viewport
+ page.set_viewport_size({"width": 375, "height": 667})
+
+ base_url = datasette_server["base_url"]
+ db_name = datasette_server["db_name"]
+ page.goto(f"{base_url}/{db_name}/products")
+
+ # Wait for the table to load
+ page.wait_for_selector(".rows-and-columns tbody tr")
+
+ # Click a row
+ first_row = page.locator(".table-row-clickable").first
+ first_row.click()
+
+ # Wait for the dialog to open
+ page.wait_for_selector("#rowDetailPanel[open]")
+
+ # Check that the panel width is appropriate for mobile
+ dialog = page.locator("#rowDetailPanel")
+ width = dialog.evaluate("el => el.offsetWidth")
+ viewport_width = page.viewport_size["width"]
+
+ # Panel should take most of the width on mobile (90%)
+ assert width > viewport_width * 0.85 # Allow some margin