diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 10cdac01..9f53b01e 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -1,10 +1,11 @@ name: Deploy latest.datasette.io on: + workflow_dispatch: push: branches: - main - - 1.0-dev + # - 1.0-dev permissions: contents: read @@ -14,19 +15,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out datasette - uses: actions/checkout@v3 + uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v6 - # Using Python 3.10 for gcloud compatibility: with: - python-version: "3.10" - - uses: actions/cache@v4 - name: Configure pip caching - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} - restore-keys: | - ${{ runner.os }}-pip- + python-version: "3.13" + cache: pip - name: Install Python dependencies run: | python -m pip install --upgrade pip @@ -101,12 +95,13 @@ jobs: # jq '.plugins |= . + {"datasette-ephemeral-tables": {"table_ttl": 900}}' \ # > metadata.json # cat metadata.json - - name: Set up Cloud Run - uses: google-github-actions/setup-gcloud@v0 + - id: auth + name: Authenticate to Google Cloud + uses: google-github-actions/auth@v3 with: - version: '318.0.0' - service_account_email: ${{ secrets.GCP_SA_EMAIL }} - service_account_key: ${{ secrets.GCP_SA_KEY }} + credentials_json: ${{ secrets.GCP_SA_KEY }} + - name: Set up Cloud SDK + uses: google-github-actions/setup-gcloud@v3 - name: Deploy to Cloud Run env: LATEST_DATASETTE_SECRET: ${{ secrets.LATEST_DATASETTE_SECRET }} diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 5acb4899..e94d0bdd 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -20,7 +20,7 @@ jobs: with: python-version: ${{ matrix.python-version }} cache: pip - cache-dependency-path: setup.py + cache-dependency-path: pyproject.toml - name: Install dependencies run: | pip install -e '.[test]' @@ -41,7 +41,7 @@ jobs: with: python-version: '3.13' cache: pip - cache-dependency-path: setup.py + cache-dependency-path: pyproject.toml - name: Install dependencies run: | pip install setuptools wheel build @@ -62,7 +62,7 @@ jobs: with: python-version: '3.10' cache: pip - cache-dependency-path: setup.py + cache-dependency-path: pyproject.toml - name: Install dependencies run: | python -m pip install -e .[docs] @@ -73,12 +73,13 @@ jobs: DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build sphinx-to-sqlite ../docs.db _build cd .. - - name: Set up Cloud Run - uses: google-github-actions/setup-gcloud@v0 + - id: auth + name: Authenticate to Google Cloud + uses: google-github-actions/auth@v2 with: - version: '318.0.0' - service_account_email: ${{ secrets.GCP_SA_EMAIL }} - service_account_key: ${{ secrets.GCP_SA_KEY }} + credentials_json: ${{ secrets.GCP_SA_KEY }} + - name: Set up Cloud SDK + uses: google-github-actions/setup-gcloud@v3 - name: Deploy stable-docs.datasette.io to Cloud Run run: |- gcloud config set run/region us-central1 diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index 8a47fd2d..7c5370ce 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -15,7 +15,7 @@ jobs: with: python-version: '3.11' cache: 'pip' - cache-dependency-path: '**/setup.py' + cache-dependency-path: '**/pyproject.toml' - name: Install dependencies run: | pip install -e '.[docs]' diff --git a/.github/workflows/stable-docs.yml b/.github/workflows/stable-docs.yml new file mode 100644 index 00000000..3119d617 --- /dev/null +++ b/.github/workflows/stable-docs.yml @@ -0,0 +1,76 @@ +name: Update Stable Docs + +on: + release: + types: [published] + push: + branches: + - main + +permissions: + contents: write + +jobs: + update_stable_docs: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v5 + with: + fetch-depth: 0 # We need all commits to find docs/ changes + - name: Set up Git user + run: | + git config user.name "Automated" + git config user.email "actions@users.noreply.github.com" + - name: Create stable branch if it does not yet exist + run: | + if ! git ls-remote --heads origin stable | grep -qE '\bstable\b'; then + # Make sure we have all tags locally + git fetch --tags --quiet + + # Latest tag that is just numbers and dots (optionally prefixed with 'v') + # e.g., 0.65.2 or v0.65.2 — excludes 1.0a20, 1.0-rc1, etc. + LATEST_RELEASE=$( + git tag -l --sort=-v:refname \ + | grep -E '^v?[0-9]+(\.[0-9]+){1,3}$' \ + | head -n1 + ) + + git checkout -b stable + + # If there are any stable releases, copy docs/ from the most recent + if [ -n "$LATEST_RELEASE" ]; then + rm -rf docs/ + git checkout "$LATEST_RELEASE" -- docs/ || true + fi + + git commit -m "Populate docs/ from $LATEST_RELEASE" || echo "No changes" + git push -u origin stable + fi + - name: Handle Release + if: github.event_name == 'release' && !github.event.release.prerelease + run: | + git fetch --all + git checkout stable + git reset --hard ${GITHUB_REF#refs/tags/} + git push origin stable --force + - name: Handle Commit to Main + if: contains(github.event.head_commit.message, '!stable-docs') + run: | + git fetch origin + git checkout -b stable origin/stable + # Get the list of modified files in docs/ from the current commit + FILES=$(git diff-tree --no-commit-id --name-only -r ${{ github.sha }} -- docs/) + # Check if the list of files is non-empty + if [[ -n "$FILES" ]]; then + # Checkout those files to the stable branch to over-write with their contents + for FILE in $FILES; do + git checkout ${{ github.sha }} -- $FILE + done + git add docs/ + git commit -m "Doc changes from ${{ github.sha }}" + git push origin stable + else + echo "No changes to docs/ in this commit." + exit 0 + fi diff --git a/.github/workflows/test-coverage.yml b/.github/workflows/test-coverage.yml index 22a69150..8d73b64d 100644 --- a/.github/workflows/test-coverage.yml +++ b/.github/workflows/test-coverage.yml @@ -21,7 +21,7 @@ jobs: with: python-version: '3.12' cache: 'pip' - cache-dependency-path: '**/setup.py' + cache-dependency-path: '**/pyproject.toml' - name: Install Python dependencies run: | python -m pip install --upgrade pip diff --git a/.github/workflows/test-pyodide.yml b/.github/workflows/test-pyodide.yml index 7357b30c..b490a9bf 100644 --- a/.github/workflows/test-pyodide.yml +++ b/.github/workflows/test-pyodide.yml @@ -18,7 +18,7 @@ jobs: with: python-version: "3.10" cache: 'pip' - cache-dependency-path: '**/setup.py' + cache-dependency-path: '**/pyproject.toml' - name: Cache Playwright browsers uses: actions/cache@v4 with: diff --git a/.github/workflows/test-sqlite-support.yml b/.github/workflows/test-sqlite-support.yml index 698aec8a..76ea138a 100644 --- a/.github/workflows/test-sqlite-support.yml +++ b/.github/workflows/test-sqlite-support.yml @@ -32,7 +32,7 @@ jobs: python-version: ${{ matrix.python-version }} allow-prereleases: true cache: pip - cache-dependency-path: setup.py + cache-dependency-path: pyproject.toml - name: Set up SQLite ${{ matrix.sqlite-version }} uses: asg017/sqlite-versions@71ea0de37ae739c33e447af91ba71dda8fcf22e6 with: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 901c4905..1e5e03d2 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -19,7 +19,7 @@ jobs: python-version: ${{ matrix.python-version }} allow-prereleases: true cache: pip - cache-dependency-path: setup.py + cache-dependency-path: pyproject.toml - name: Build extension for --load-extension test run: |- (cd tests && gcc ext.c -fPIC -shared -o ext.so) @@ -36,6 +36,8 @@ jobs: - name: Install docs dependencies run: | pip install -e '.[docs]' + - name: Black + run: black --check . - name: Check if cog needs to be run run: | cog --check docs/*.rst diff --git a/.github/workflows/tmate.yml b/.github/workflows/tmate.yml index 9792245d..123f6c71 100644 --- a/.github/workflows/tmate.yml +++ b/.github/workflows/tmate.yml @@ -5,6 +5,7 @@ on: permissions: contents: read + models: read jobs: build: @@ -13,3 +14,5 @@ jobs: - uses: actions/checkout@v2 - name: Setup tmate session uses: mxschmitt/action-tmate@v3 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 277ff653..70e6bbeb 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,9 @@ scratchpad .vscode +uv.lock +data.db + # We don't use Pipfile, so ignore them Pipfile Pipfile.lock @@ -123,4 +126,4 @@ node_modules # include it in source control. tests/*.dylib tests/*.so -tests/*.dll \ No newline at end of file +tests/*.dll diff --git a/Justfile b/Justfile index 172de444..a47662c3 100644 --- a/Justfile +++ b/Justfile @@ -5,38 +5,52 @@ export DATASETTE_SECRET := "not_a_secret" # Setup project @init: - pipenv run pip install -e '.[test,docs]' + uv sync --extra test --extra docs # Run pytest with supplied options -@test *options: - pipenv run pytest {{options}} +@test *options: init + uv run pytest -n auto {{options}} @codespell: - pipenv run codespell README.md --ignore-words docs/codespell-ignore-words.txt - pipenv run codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt - pipenv run codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt - pipenv run codespell tests --ignore-words docs/codespell-ignore-words.txt + uv run codespell README.md --ignore-words docs/codespell-ignore-words.txt + uv run codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt + uv run codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt + uv run codespell tests --ignore-words docs/codespell-ignore-words.txt # Run linters: black, flake8, mypy, cog @lint: codespell - pipenv run black . --check - pipenv run flake8 - pipenv run cog --check README.md docs/*.rst + uv run black . --check + uv run flake8 + uv run --extra test cog --check README.md docs/*.rst # Rebuild docs with cog @cog: - pipenv run cog -r README.md docs/*.rst + uv run --extra test cog -r README.md docs/*.rst # Serve live docs on localhost:8000 -@docs: cog - pipenv run blacken-docs -l 60 docs/*.rst - cd docs && pipenv run make livehtml +@docs: cog blacken-docs + uv run --extra docs make -C docs livehtml + +# Build docs as static HTML +@docs-build: cog blacken-docs + rm -rf docs/_build && cd docs && uv run make html # Apply Black @black: - pipenv run black . + uv run black . -@serve: - pipenv run sqlite-utils create-database data.db - pipenv run sqlite-utils create-table data.db docs id integer title text --pk id --ignore - pipenv run python -m datasette data.db --root --reload +# Apply blacken-docs +@blacken-docs: + uv run blacken-docs -l 60 docs/*.rst + +# Apply prettier +@prettier: + npm run fix + +# Format code with both black and prettier +@format: black prettier blacken-docs + +@serve *options: + uv run sqlite-utils create-database data.db + uv run sqlite-utils create-table data.db docs id integer title text --pk id --ignore + uv run python -m datasette data.db --root --reload {{options}} diff --git a/datasette/app.py b/datasette/app.py index 2658d848..b9955925 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1,6 +1,12 @@ +from __future__ import annotations + from asgi_csrf import Errors import asyncio -from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union +import contextvars +from typing import TYPE_CHECKING, Any, Dict, Iterable, List + +if TYPE_CHECKING: + from datasette.permissions import AllowedResource, Resource import asgi_csrf import collections import dataclasses @@ -52,7 +58,10 @@ from .views.special import ( AllowedResourcesView, PermissionRulesView, PermissionCheckView, - TablesSearchView, + TablesView, + InstanceSchemaView, + DatabaseSchemaView, + TableSchemaView, ) from .views.table import ( TableInsertView, @@ -66,6 +75,7 @@ from .url_builder import Urls from .database import Database, QueryInterrupted from .utils import ( + PaginatedResources, PrefixedUrlString, SPATIALITE_FUNCTIONS, StartupError, @@ -86,6 +96,7 @@ from .utils import ( resolve_env_secrets, resolve_routes, tilde_decode, + tilde_encode, to_css_class, urlsafe_components, redact_keys, @@ -115,10 +126,39 @@ from .tracer import AsgiTracer from .plugins import pm, DEFAULT_PLUGINS, get_plugins from .version import __version__ -from .utils.permissions import build_rules_union, PluginSQL +from .resources import DatabaseResource, TableResource app_root = Path(__file__).parent.parent + +# Context variable to track when code is executing within a datasette.client request +_in_datasette_client = contextvars.ContextVar("in_datasette_client", default=False) + + +class _DatasetteClientContext: + """Context manager to mark code as executing within a datasette.client request.""" + + def __enter__(self): + self.token = _in_datasette_client.set(True) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + _in_datasette_client.reset(self.token) + return False + + +@dataclasses.dataclass +class PermissionCheck: + """Represents a logged permission check for debugging purposes.""" + + when: str + actor: Dict[str, Any] | None + action: str + parent: str | None + child: str | None + result: bool + + # https://github.com/simonw/datasette/issues/283#issuecomment-781591015 SQLITE_LIMIT_ATTACHED = 10 @@ -228,6 +268,9 @@ FAVICON_PATH = app_root / "datasette" / "static" / "favicon.png" DEFAULT_NOT_SET = object() +ResourcesSQL = collections.namedtuple("ResourcesSQL", ("sql", "params")) + + async def favicon(request, send): await asgi_send_file( send, @@ -278,6 +321,7 @@ class Datasette: crossdb=False, nolock=False, internal=None, + default_deny=False, ): self._startup_invoked = False assert config_dir is None or isinstance( @@ -308,7 +352,7 @@ class Datasette: self.inspect_data = inspect_data self.immutables = set(immutables or []) self.databases = collections.OrderedDict() - self.permissions = {} # .invoke_startup() will populate this + self.actions = {} # .invoke_startup() will populate this try: self._refresh_schemas_lock = asyncio.Lock() except RuntimeError as rex: @@ -392,10 +436,37 @@ class Datasette: config = config or {} config_settings = config.get("settings") or {} - # validate "settings" keys in datasette.json - for key in config_settings: + # Validate settings from config file + for key, value in config_settings.items(): if key not in DEFAULT_SETTINGS: - raise StartupError("Invalid setting '{}' in datasette.json".format(key)) + raise StartupError(f"Invalid setting '{key}' in config file") + # Validate type matches expected type from DEFAULT_SETTINGS + if value is not None: # Allow None/null values + expected_type = type(DEFAULT_SETTINGS[key]) + actual_type = type(value) + if actual_type != expected_type: + raise StartupError( + f"Setting '{key}' in config file has incorrect type. " + f"Expected {expected_type.__name__}, got {actual_type.__name__}. " + f"Value: {value!r}. " + f"Hint: In YAML/JSON config files, remove quotes from boolean and integer values." + ) + + # Validate settings from constructor parameter + if settings: + for key, value in settings.items(): + if key not in DEFAULT_SETTINGS: + raise StartupError(f"Invalid setting '{key}' in settings parameter") + if value is not None: + expected_type = type(DEFAULT_SETTINGS[key]) + actual_type = type(value) + if actual_type != expected_type: + raise StartupError( + f"Setting '{key}' in settings parameter has incorrect type. " + f"Expected {expected_type.__name__}, got {actual_type.__name__}. " + f"Value: {value!r}" + ) + self.config = config # CLI settings should overwrite datasette.json settings self._settings = dict(DEFAULT_SETTINGS, **(config_settings), **(settings or {})) @@ -458,6 +529,8 @@ class Datasette: self._register_renderers() self._permission_checks = collections.deque(maxlen=200) self._root_token = secrets.token_hex(32) + self.root_enabled = False + self.default_deny = default_deny self.client = DatasetteClient(self) async def apply_metadata_json(self): @@ -503,19 +576,17 @@ class Datasette: pass return environment - def get_permission(self, name_or_abbr: str) -> "Permission": + def get_action(self, name_or_abbr: str): """ - Returns a Permission object for the given name or abbreviation. Raises KeyError if not found. + Returns an Action object for the given name or abbreviation. Returns None if not found. """ - if name_or_abbr in self.permissions: - return self.permissions[name_or_abbr] + if name_or_abbr in self.actions: + return self.actions[name_or_abbr] # Try abbreviation - for permission in self.permissions.values(): - if permission.abbr == name_or_abbr: - return permission - raise KeyError( - "No permission found with name or abbreviation {}".format(name_or_abbr) - ) + for action in self.actions.values(): + if action.abbr == name_or_abbr: + return action + return None async def refresh_schemas(self): if self._refresh_schemas_lock.locked(): @@ -535,6 +606,15 @@ class Datasette: "select database_name, schema_version from catalog_databases" ) } + # Delete stale entries for databases that are no longer attached + stale_databases = set(current_schema_versions.keys()) - set( + self.databases.keys() + ) + for stale_db_name in stale_databases: + await internal_db.execute_write( + "DELETE FROM catalog_databases WHERE database_name = ?", + [stale_db_name], + ) for database_name, db in self.databases.items(): schema_version = (await db.execute("PRAGMA schema_version")).first()[0] # Compare schema versions to see if we should skip it @@ -560,6 +640,17 @@ class Datasette: def urls(self): return Urls(self) + @property + def pm(self): + """ + Return the global plugin manager instance. + + This provides access to the pluggy PluginManager that manages all + Datasette plugins and hooks. Use datasette.pm.hook.hook_name() to + call plugin hooks. + """ + return pm + async def invoke_startup(self): # This must be called for Datasette to be in a usable state if self._startup_invoked: @@ -572,24 +663,32 @@ class Datasette: event_classes.extend(extra_classes) self.event_classes = tuple(event_classes) - # Register permissions, but watch out for duplicate name/abbr - names = {} - abbrs = {} - for hook in pm.hook.register_permissions(datasette=self): + # Register actions, but watch out for duplicate name/abbr + action_names = {} + action_abbrs = {} + for hook in pm.hook.register_actions(datasette=self): if hook: - for p in hook: - if p.name in names and p != names[p.name]: + for action in hook: + if ( + action.name in action_names + and action != action_names[action.name] + ): raise StartupError( - "Duplicate permission name: {}".format(p.name) + "Duplicate action name: {}".format(action.name) ) - if p.abbr and p.abbr in abbrs and p != abbrs[p.abbr]: + if ( + action.abbr + and action.abbr in action_abbrs + and action != action_abbrs[action.abbr] + ): raise StartupError( - "Duplicate permission abbr: {}".format(p.abbr) + "Duplicate action abbr: {}".format(action.abbr) ) - names[p.name] = p - if p.abbr: - abbrs[p.abbr] = p - self.permissions[p.name] = p + action_names[action.name] = action + if action.abbr: + action_abbrs[action.abbr] = action + self.actions[action.name] = action + for hook in pm.hook.prepare_jinja2_environment( env=self._jinja_env, datasette=self ): @@ -604,14 +703,22 @@ class Datasette: def unsign(self, signed, namespace="default"): return URLSafeSerializer(self._secret, namespace).loads(signed) + def in_client(self) -> bool: + """Check if the current code is executing within a datasette.client request. + + Returns: + bool: True if currently executing within a datasette.client request, False otherwise. + """ + return _in_datasette_client.get() + def create_token( self, actor_id: str, *, - expires_after: Optional[int] = None, - restrict_all: Optional[Iterable[str]] = None, - restrict_database: Optional[Dict[str, Iterable[str]]] = None, - restrict_resource: Optional[Dict[str, Dict[str, Iterable[str]]]] = None, + expires_after: int | None = None, + restrict_all: Iterable[str] | None = None, + restrict_database: Dict[str, Iterable[str]] | None = None, + restrict_resource: Dict[str, Dict[str, Iterable[str]]] | None = None, ): token = {"a": actor_id, "t": int(time.time())} if expires_after: @@ -619,10 +726,10 @@ class Datasette: def abbreviate_action(action): # rename to abbr if possible - permission = self.permissions.get(action) - if not permission: + action_obj = self.actions.get(action) + if not action_obj: return action - return permission.abbr or action + return action_obj.abbr or action if expires_after: token["d"] = expires_after @@ -672,8 +779,10 @@ class Datasette: self.databases = new_databases return db - def add_memory_database(self, memory_name): - return self.add_database(Database(self, memory_name=memory_name)) + def add_memory_database(self, memory_name, name=None, route=None): + return self.add_database( + Database(self, memory_name=memory_name), name=name, route=route + ) def remove_database(self, name): self.get_database(name).close() @@ -859,9 +968,7 @@ class Datasette: return self._app_css_hash async def get_canned_queries(self, database_name, actor): - queries = ( - ((self.config or {}).get("databases") or {}).get(database_name) or {} - ).get("queries") or {} + queries = {} for more_queries in pm.hook.canned_queries( datasette=self, database=database_name, @@ -943,14 +1050,14 @@ class Datasette: if request: actor = request.actor # Top-level link - if await self.permission_allowed(actor=actor, action="view-instance"): + if await self.allowed(action="view-instance", actor=actor): crumbs.append({"href": self.urls.instance(), "label": "home"}) # Database link if database: - if await self.permission_allowed( - actor=actor, + if await self.allowed( action="view-database", - resource=database, + resource=DatabaseResource(database=database), + actor=actor, ): crumbs.append( { @@ -961,10 +1068,10 @@ class Datasette: # Table link if table: assert database, "table= requires database=" - if await self.permission_allowed( - actor=actor, + if await self.allowed( action="view-table", - resource=(database, table), + resource=TableResource(database=database, table=table), + actor=actor, ): crumbs.append( { @@ -975,8 +1082,8 @@ class Datasette: return crumbs async def actors_from_ids( - self, actor_ids: Iterable[Union[str, int]] - ) -> Dict[Union[id, str], Dict]: + self, actor_ids: Iterable[str | int] + ) -> Dict[int | str, Dict]: result = pm.hook.actors_from_ids(datasette=self, actor_ids=actor_ids) if result is None: # Do the default thing @@ -991,419 +1098,355 @@ class Datasette: for hook in pm.hook.track_event(datasette=self, event=event): await await_me_maybe(hook) - async def permission_allowed( - self, actor, action, resource=None, *, default=DEFAULT_NOT_SET - ): - """Check permissions using the permissions_allowed plugin hook""" - result = None - # Use default from registered permission, if available - if default is DEFAULT_NOT_SET and action in self.permissions: - default = self.permissions[action].default - opinions = [] - # Every plugin is consulted for their opinion - for check in pm.hook.permission_allowed( - datasette=self, - actor=actor, - action=action, - resource=resource, - ): - check = await await_me_maybe(check) - if check is not None: - opinions.append(check) - - result = None - # If any plugin said False it's false - the veto rule - if any(not r for r in opinions): - result = False - elif any(r for r in opinions): - # Otherwise, if any plugin said True it's true - result = True - - used_default = False - if result is None: - # No plugin expressed an opinion, so use the default - result = default - used_default = True - self._permission_checks.append( - { - "when": datetime.datetime.now(datetime.timezone.utc).isoformat(), - "actor": actor, - "action": action, - "resource": resource, - "used_default": used_default, - "result": result, - } - ) - return result - - async def allowed_resources_sql( - self, actor: dict | None, action: str - ) -> tuple[str, dict]: - """Combine permission_resources_sql PluginSQL blocks into a UNION query. - - Returns a (sql, params) tuple suitable for execution against SQLite. + def resource_for_action(self, action: str, parent: str | None, child: str | None): """ - plugin_blocks: List[PluginSQL] = [] - for block in pm.hook.permission_resources_sql( - datasette=self, - actor=actor, - action=action, - ): - block = await await_me_maybe(block) - if block is None: - continue - if isinstance(block, (list, tuple)): - candidates = block - else: - candidates = [block] - for candidate in candidates: - if candidate is None: - continue - if not isinstance(candidate, PluginSQL): - continue - plugin_blocks.append(candidate) + Create a Resource instance for the given action with parent/child values. - actor_id = actor.get("id") if actor else None - sql, params = build_rules_union( - actor=str(actor_id) if actor_id is not None else "", - plugins=plugin_blocks, - ) - return sql, params - - async def get_allowed_tables( - self, - actor, - database: Optional[str] = None, - extra_sql: str = "", - extra_params: Optional[dict] = None, - ): - """ - Get list of tables the actor is allowed to view. + Looks up the action's resource_class and instantiates it with the + provided parent and child identifiers. Args: - actor: The actor dict (or None for anonymous) - database: Optional database name to filter by - extra_sql: Optional extra SQL to add to the WHERE clause - extra_params: Optional parameters for the extra SQL + action: The action name (e.g., "view-table", "view-query") + parent: The parent resource identifier (e.g., database name) + child: The child resource identifier (e.g., table/query name) Returns: - List of dicts with keys: database, table, resource + A Resource instance of the appropriate subclass + + Raises: + ValueError: If the action is unknown """ - from datasette.utils.permissions import resolve_permissions_from_catalog + from datasette.permissions import Resource - await self.refresh_schemas() - internal_db = self.get_internal_database() + action_obj = self.actions.get(action) + if not action_obj: + raise ValueError(f"Unknown action: {action}") - # Build the candidate SQL query - where_clauses = [] - params = extra_params.copy() if extra_params else {} - - if database: - where_clauses.append("database_name = :database") - params["database"] = database - - if extra_sql: - where_clauses.append(f"({extra_sql})") - - where_sql = " AND ".join(where_clauses) if where_clauses else "1=1" - - candidate_sql = f""" - SELECT database_name AS parent, table_name AS child - FROM catalog_tables - WHERE {where_sql} - """ - - # Collect plugin SQL blocks for view-table permission - table_plugins = [] - for block in pm.hook.permission_resources_sql( - datasette=self, - actor=actor, - action="view-table", - ): - block = await await_me_maybe(block) - if block is None: - continue - if isinstance(block, (list, tuple)): - candidates = block - else: - candidates = [block] - for candidate in candidates: - if candidate is None: - continue - if not isinstance(candidate, PluginSQL): - continue - table_plugins.append(candidate) - - # Collect plugin SQL blocks for view-database permission - db_plugins = [] - for block in pm.hook.permission_resources_sql( - datasette=self, - actor=actor, - action="view-database", - ): - block = await await_me_maybe(block) - if block is None: - continue - if isinstance(block, (list, tuple)): - candidates = block - else: - candidates = [block] - for candidate in candidates: - if candidate is None: - continue - if not isinstance(candidate, PluginSQL): - continue - db_plugins.append(candidate) - - # Get actor_id for resolve_permissions_from_catalog - if isinstance(actor, dict): - actor_id = actor.get("id") - elif actor: - actor_id = actor - else: - actor_id = None - - actor_str = str(actor_id) if actor_id is not None else "" - - # Resolve permissions for all matching tables - table_permission_results = await resolve_permissions_from_catalog( - internal_db, - actor=actor_str, - plugins=table_plugins, - action="view-table", - candidate_sql=candidate_sql, - candidate_params=params, - implicit_deny=True, - ) - - # Get unique database names from table results - database_names = list( - set(r["parent"] for r in table_permission_results if r["allow"] == 1) - ) - - # Check view-database permissions for those databases - if database_names: - # Build placeholders and params dict for database check - placeholders = ",".join(f":db{i}" for i in range(len(database_names))) - db_params = {f"db{i}": db_name for i, db_name in enumerate(database_names)} - - db_candidate_sql = f""" - SELECT database_name AS parent, NULL AS child - FROM catalog_databases - WHERE database_name IN ({placeholders}) - """ - db_permission_results = await resolve_permissions_from_catalog( - internal_db, - actor=actor_str, - plugins=db_plugins, - action="view-database", - candidate_sql=db_candidate_sql, - candidate_params=db_params, - implicit_deny=True, - ) - - # Create set of allowed databases - allowed_databases = { - r["parent"] for r in db_permission_results if r["allow"] == 1 - } - else: - allowed_databases = set() - - # Filter to only tables in allowed databases - allowed = [] - for result in table_permission_results: - if result["allow"] == 1 and result["parent"] in allowed_databases: - allowed.append( - { - "database": result["parent"], - "table": result["child"], - "resource": result["resource"], - } - ) - - return allowed - - async def allowed( - self, *, actor, action, resource=None, default=DEFAULT_NOT_SET - ): - """Permission check backed by permission_resources_sql rules.""" - - if default is DEFAULT_NOT_SET and action in self.permissions: - default = self.permissions[action].default - - if isinstance(actor, dict) or actor is None: - actor_dict = actor - else: - actor_dict = {"id": actor} - actor_id = actor_dict.get("id") if actor_dict else None - - candidate_parent = None - candidate_child = None - if isinstance(resource, str): - candidate_parent = resource - elif isinstance(resource, (tuple, list)) and len(resource) == 2: - candidate_parent, candidate_child = resource - elif resource is not None: - raise TypeError("resource must be None, str, or (parent, child) tuple") - - union_sql, union_params = await self.allowed_resources_sql(actor_dict, action) - - query = f""" - WITH rules AS ( - {union_sql} - ), - candidate AS ( - SELECT :cand_parent AS parent, :cand_child AS child - ), - matched AS ( - SELECT - r.allow, - r.reason, - r.source_plugin, - CASE - WHEN r.child IS NOT NULL THEN 2 - WHEN r.parent IS NOT NULL THEN 1 - ELSE 0 - END AS depth - FROM rules r - JOIN candidate c - ON (r.parent IS NULL OR r.parent = c.parent) - AND (r.child IS NULL OR r.child = c.child) - ), - ranked AS ( - SELECT *, - ROW_NUMBER() OVER ( - ORDER BY - depth DESC, - CASE WHEN allow = 0 THEN 0 ELSE 1 END, - source_plugin - ) AS rn - FROM matched - ), - winner AS ( - SELECT allow, reason, source_plugin, depth - FROM ranked - WHERE rn = 1 - ) - SELECT allow, reason, source_plugin, depth FROM winner - """ - - params = { - **union_params, - "cand_parent": candidate_parent, - "cand_child": candidate_child, - } - - rows = await self.get_internal_database().execute(query, params) - row = rows.first() - - reason = None - source_plugin = None - depth = None - used_default = False - - if row is None: - result = default - used_default = True - else: - allow = row["allow"] - reason = row["reason"] - source_plugin = row["source_plugin"] - depth = row["depth"] - if allow is None: - result = default - used_default = True - else: - result = bool(allow) - - self._permission_checks.append( - { - "when": datetime.datetime.now(datetime.timezone.utc).isoformat(), - "actor": actor, - "action": action, - "resource": resource, - "used_default": used_default, - "result": result, - "reason": reason, - "source_plugin": source_plugin, - "depth": depth, - } - ) - - return result - - async def permission_allowed_2( - self, actor, action, resource=None, *, default=DEFAULT_NOT_SET - ): - """Legacy method that delegates to allowed().""" - return await self.allowed( - actor=actor, action=action, resource=resource, default=default - ) - - async def ensure_permissions( - self, - actor: dict, - permissions: Sequence[Union[Tuple[str, Union[str, Tuple[str, str]]], str]], - ): - """ - permissions is a list of (action, resource) tuples or 'action' strings - - Raises datasette.Forbidden() if any of the checks fail - """ - assert actor is None or isinstance(actor, dict), "actor must be None or a dict" - for permission in permissions: - if isinstance(permission, str): - action = permission - resource = None - elif isinstance(permission, (tuple, list)) and len(permission) == 2: - action, resource = permission - else: - assert ( - False - ), "permission should be string or tuple of two items: {}".format( - repr(permission) - ) - ok = await self.permission_allowed( - actor, - action, - resource=resource, - default=None, - ) - if ok is not None: - if ok: - return - else: - raise Forbidden(action) + resource_class = action_obj.resource_class + instance = object.__new__(resource_class) + Resource.__init__(instance, parent=parent, child=child) + return instance async def check_visibility( self, actor: dict, - action: Optional[str] = None, - resource: Optional[Union[str, Tuple[str, str]]] = None, - permissions: Optional[ - Sequence[Union[Tuple[str, Union[str, Tuple[str, str]]], str]] - ] = None, + action: str, + resource: "Resource" | None = None, ): - """Returns (visible, private) - visible = can you see it, private = can others see it too""" - if permissions: - assert ( - not action and not resource - ), "Can't use action= or resource= with permissions=" - else: - permissions = [(action, resource)] - try: - await self.ensure_permissions(actor, permissions) - except Forbidden: + """ + Check if actor can see a resource and if it's private. + + Returns (visible, private) tuple: + - visible: bool - can the actor see it? + - private: bool - if visible, can anonymous users NOT see it? + """ + from datasette.permissions import Resource + + # Validate that resource is a Resource object or None + if resource is not None and not isinstance(resource, Resource): + raise TypeError(f"resource must be a Resource subclass instance or None.") + + # Check if actor can see it + if not await self.allowed(action=action, resource=resource, actor=actor): return False, False - # User can see it, but can the anonymous user see it? - try: - await self.ensure_permissions(None, permissions) - except Forbidden: - # It's visible but private + + # Check if anonymous user can see it (for "private" flag) + if not await self.allowed(action=action, resource=resource, actor=None): + # Actor can see it but anonymous cannot - it's private return True, True - # It's visible to everyone + + # Both actor and anonymous can see it - it's public return True, False + async def allowed_resources_sql( + self, + *, + action: str, + actor: dict | None = None, + parent: str | None = None, + include_is_private: bool = False, + ) -> ResourcesSQL: + """ + Build SQL query to get all resources the actor can access for the given action. + + Args: + action: The action name (e.g., "view-table") + actor: The actor dict (or None for unauthenticated) + parent: Optional parent filter (e.g., database name) to limit results + include_is_private: If True, include is_private column showing if anonymous cannot access + + Returns a namedtuple of (query: str, params: dict) that can be executed against the internal database. + The query returns rows with (parent, child, reason) columns, plus is_private if requested. + + Example: + query, params = await datasette.allowed_resources_sql( + action="view-table", + actor=actor, + parent="mydb", + include_is_private=True + ) + result = await datasette.get_internal_database().execute(query, params) + """ + from datasette.utils.actions_sql import build_allowed_resources_sql + + action_obj = self.actions.get(action) + if not action_obj: + raise ValueError(f"Unknown action: {action}") + + sql, params = await build_allowed_resources_sql( + self, actor, action, parent=parent, include_is_private=include_is_private + ) + return ResourcesSQL(sql, params) + + async def allowed_resources( + self, + action: str, + actor: dict | None = None, + *, + parent: str | None = None, + include_is_private: bool = False, + include_reasons: bool = False, + limit: int = 100, + next: str | None = None, + ) -> PaginatedResources: + """ + Return paginated resources the actor can access for the given action. + + Uses SQL with keyset pagination to efficiently filter resources. + Returns PaginatedResources with list of Resource instances and pagination metadata. + + Args: + action: The action name (e.g., "view-table") + actor: The actor dict (or None for unauthenticated) + parent: Optional parent filter (e.g., database name) to limit results + include_is_private: If True, adds a .private attribute to each Resource + include_reasons: If True, adds a .reasons attribute with List[str] of permission reasons + limit: Maximum number of results to return (1-1000, default 100) + next: Keyset token from previous page for pagination + + Returns: + PaginatedResources with: + - resources: List of Resource objects for this page + - next: Token for next page (None if no more results) + + Example: + # Get first page of tables + page = await datasette.allowed_resources("view-table", actor, limit=50) + for table in page.resources: + print(f"{table.parent}/{table.child}") + + # Get next page + if page.next: + next_page = await datasette.allowed_resources( + "view-table", actor, limit=50, next=page.next + ) + + # With reasons for debugging + page = await datasette.allowed_resources( + "view-table", actor, include_reasons=True + ) + for table in page.resources: + print(f"{table.child}: {table.reasons}") + + # Iterate through all results with async generator + page = await datasette.allowed_resources("view-table", actor) + async for table in page.all(): + print(table.child) + """ + + action_obj = self.actions.get(action) + if not action_obj: + raise ValueError(f"Unknown action: {action}") + + # Validate and cap limit + limit = min(max(1, limit), 1000) + + # Get base SQL query + query, params = await self.allowed_resources_sql( + action=action, + actor=actor, + parent=parent, + include_is_private=include_is_private, + ) + + # Add keyset pagination WHERE clause if next token provided + if next: + try: + components = urlsafe_components(next) + if len(components) >= 2: + last_parent, last_child = components[0], components[1] + # Keyset condition: (parent > last) OR (parent = last AND child > last) + keyset_where = """ + (parent > :keyset_parent OR + (parent = :keyset_parent AND child > :keyset_child)) + """ + # Wrap original query and add keyset filter + query = f"SELECT * FROM ({query}) WHERE {keyset_where}" + params["keyset_parent"] = last_parent + params["keyset_child"] = last_child + except (ValueError, KeyError): + # Invalid token - ignore and start from beginning + pass + + # Add LIMIT (fetch limit+1 to detect if there are more results) + # Note: query from allowed_resources_sql() already includes ORDER BY parent, child + query = f"{query} LIMIT :limit" + params["limit"] = limit + 1 + + # Execute query + result = await self.get_internal_database().execute(query, params) + rows = list(result.rows) + + # Check if truncated (got more than limit rows) + truncated = len(rows) > limit + if truncated: + rows = rows[:limit] # Remove the extra row + + # Build Resource objects with optional attributes + resources = [] + for row in rows: + # row[0]=parent, row[1]=child, row[2]=reason, row[3]=is_private (if requested) + resource = self.resource_for_action(action, parent=row[0], child=row[1]) + + # Add reasons if requested + if include_reasons: + reason_json = row[2] + try: + reasons_array = ( + json.loads(reason_json) if isinstance(reason_json, str) else [] + ) + resource.reasons = [r for r in reasons_array if r is not None] + except (json.JSONDecodeError, TypeError): + resource.reasons = [reason_json] if reason_json else [] + + # Add private flag if requested + if include_is_private: + resource.private = bool(row[3]) + + resources.append(resource) + + # Generate next token if there are more results + next_token = None + if truncated and resources: + last_resource = resources[-1] + # Use tilde-encoding like table pagination + next_token = "{},{}".format( + tilde_encode(str(last_resource.parent)), + tilde_encode(str(last_resource.child)), + ) + + return PaginatedResources( + resources=resources, + next=next_token, + _datasette=self, + _action=action, + _actor=actor, + _parent=parent, + _include_is_private=include_is_private, + _include_reasons=include_reasons, + _limit=limit, + ) + + async def allowed( + self, + *, + action: str, + resource: "Resource" = None, + actor: dict | None = None, + ) -> bool: + """ + Check if actor can perform action on specific resource. + + Uses SQL to check permission for a single resource without fetching all resources. + This is efficient - it does NOT call allowed_resources() and check membership. + + For global actions, resource should be None (or omitted). + + Example: + from datasette.resources import TableResource + can_view = await datasette.allowed( + action="view-table", + resource=TableResource(database="analytics", table="users"), + actor=actor + ) + + # For global actions, resource can be omitted: + can_debug = await datasette.allowed(action="permissions-debug", actor=actor) + """ + from datasette.utils.actions_sql import check_permission_for_resource + + # For global actions, resource remains None + + # Check if this action has also_requires - if so, check that action first + action_obj = self.actions.get(action) + if action_obj and action_obj.also_requires: + # Must have the required action first + if not await self.allowed( + action=action_obj.also_requires, + resource=resource, + actor=actor, + ): + return False + + # For global actions, resource is None + parent = resource.parent if resource else None + child = resource.child if resource else None + + result = await check_permission_for_resource( + datasette=self, + actor=actor, + action=action, + parent=parent, + child=child, + ) + + # Log the permission check for debugging + self._permission_checks.append( + PermissionCheck( + when=datetime.datetime.now(datetime.timezone.utc).isoformat(), + actor=actor, + action=action, + parent=parent, + child=child, + result=result, + ) + ) + + return result + + async def ensure_permission( + self, + *, + action: str, + resource: "Resource" = None, + actor: dict | None = None, + ): + """ + Check if actor can perform action on resource, raising Forbidden if not. + + This is a convenience wrapper around allowed() that raises Forbidden + instead of returning False. Use this when you want to enforce a permission + check and halt execution if it fails. + + Example: + from datasette.resources import TableResource + + # Will raise Forbidden if actor cannot view the table + await datasette.ensure_permission( + action="view-table", + resource=TableResource(database="analytics", table="users"), + actor=request.actor + ) + + # For instance-level actions, resource can be omitted: + await datasette.ensure_permission( + action="permissions-debug", + actor=request.actor + ) + """ + if not await self.allowed(action=action, resource=resource, actor=actor): + raise Forbidden(action) + async def execute( self, db_name, @@ -1438,15 +1481,14 @@ class Datasette: except IndexError: return {} # Ensure user has permission to view the referenced table + from datasette.resources import TableResource + other_table = fk["other_table"] other_column = fk["other_column"] visible, _ = await self.check_visibility( actor, - permissions=[ - ("view-table", (database, other_table)), - ("view-database", database), - "view-instance", - ], + action="view-table", + resource=TableResource(database=database, table=other_table), ) if not visible: return {} @@ -1611,6 +1653,22 @@ class Datasette: def _actor(self, request): return {"actor": request.actor} + def _actions(self): + return [ + { + "name": action.name, + "abbr": action.abbr, + "description": action.description, + "takes_parent": action.takes_parent, + "takes_child": action.takes_child, + "resource_class": ( + action.resource_class.__name__ if action.resource_class else None + ), + "also_requires": action.also_requires, + } + for action in sorted(self.actions.values(), key=lambda a: a.name) + ] + async def table_config(self, database: str, table: str) -> dict: """Return dictionary of configuration for specified table""" return ( @@ -1644,10 +1702,10 @@ class Datasette: async def render_template( self, - templates: Union[List[str], str, Template], - context: Optional[Union[Dict[str, Any], Context]] = None, - request: Optional[Request] = None, - view_name: Optional[str] = None, + templates: List[str] | str | Template, + context: Dict[str, Any] | Context | None = None, + request: Request | None = None, + view_name: str | None = None, ): if not self._startup_invoked: raise Exception("render_template() called before await ds.invoke_startup()") @@ -1746,7 +1804,7 @@ class Datasette: return await template.render_async(template_context) def set_actor_cookie( - self, response: Response, actor: dict, expire_after: Optional[int] = None + self, response: Response, actor: dict, expire_after: int | None = None ): data = {"a": actor} if expire_after: @@ -1876,6 +1934,16 @@ class Datasette: ), r"/-/actor(\.(?Pjson))?$", ) + add_route( + JsonDataView.as_view( + self, + "actions.json", + self._actions, + template="debug_actions.html", + permission="permissions-debug", + ), + r"/-/actions(\.(?Pjson))?$", + ) add_route( AuthTokenView.as_view(self), r"/-/auth-token$", @@ -1888,6 +1956,14 @@ class Datasette: ApiExplorerView.as_view(self), r"/-/api$", ) + add_route( + TablesView.as_view(self), + r"/-/tables(\.(?Pjson))?$", + ) + add_route( + InstanceSchemaView.as_view(self), + r"/-/schema(\.(?Pjson|md))?$", + ) add_route( LogoutView.as_view(self), r"/-/logout$", @@ -1916,10 +1992,6 @@ class Datasette: AllowDebugView.as_view(self), r"/-/allow-debug$", ) - add_route( - TablesSearchView.as_view(self), - r"/-/tables(\.(?Pjson))?$", - ) add_route( wrap_view(PatternPortfolioView, self), r"/-/patterns$", @@ -1933,6 +2005,10 @@ class Datasette: r"/(?P[^\/\.]+)(\.(?P\w+))?$", ) add_route(TableCreateView.as_view(self), r"/(?P[^\/\.]+)/-/create$") + add_route( + DatabaseSchemaView.as_view(self), + r"/(?P[^\/\.]+)/-/schema(\.(?Pjson|md))?$", + ) add_route( wrap_view(QueryView, self), r"/(?P[^\/\.]+)/-/query(\.(?P\w+))?$", @@ -1957,6 +2033,10 @@ class Datasette: TableDropView.as_view(self), r"/(?P[^\/\.]+)/(?P[^\/\.]+)/-/drop$", ) + add_route( + TableSchemaView.as_view(self), + r"/(?P[^\/\.]+)/(?P
[^\/\.]+)/-/schema(\.(?Pjson|md))?$", + ) add_route( RowDeleteView.as_view(self), r"/(?P[^\/\.]+)/(?P
[^/]+?)/(?P[^/]+?)/-/delete$", @@ -2347,9 +2427,18 @@ class NotFoundExplicit(NotFound): class DatasetteClient: + """Internal HTTP client for making requests to a Datasette instance. + + Used for testing and for internal operations that need to make HTTP requests + to the Datasette app without going through an actual HTTP server. + """ + def __init__(self, ds): self.ds = ds - self.app = ds.app() + + @property + def app(self): + return self.ds.app() def actor_cookie(self, actor): # Utility method, mainly for tests @@ -2362,40 +2451,89 @@ class DatasetteClient: path = f"http://localhost{path}" return path - async def _request(self, method, path, **kwargs): - async with httpx.AsyncClient( - transport=httpx.ASGITransport(app=self.app), - cookies=kwargs.pop("cookies", None), - ) as client: - return await getattr(client, method)(self._fix(path), **kwargs) + async def _request(self, method, path, skip_permission_checks=False, **kwargs): + from datasette.permissions import SkipPermissions - async def get(self, path, **kwargs): - return await self._request("get", path, **kwargs) + with _DatasetteClientContext(): + if skip_permission_checks: + with SkipPermissions(): + async with httpx.AsyncClient( + transport=httpx.ASGITransport(app=self.app), + cookies=kwargs.pop("cookies", None), + ) as client: + return await getattr(client, method)(self._fix(path), **kwargs) + else: + async with httpx.AsyncClient( + transport=httpx.ASGITransport(app=self.app), + cookies=kwargs.pop("cookies", None), + ) as client: + return await getattr(client, method)(self._fix(path), **kwargs) - async def options(self, path, **kwargs): - return await self._request("options", path, **kwargs) + async def get(self, path, skip_permission_checks=False, **kwargs): + return await self._request( + "get", path, skip_permission_checks=skip_permission_checks, **kwargs + ) - async def head(self, path, **kwargs): - return await self._request("head", path, **kwargs) + async def options(self, path, skip_permission_checks=False, **kwargs): + return await self._request( + "options", path, skip_permission_checks=skip_permission_checks, **kwargs + ) - async def post(self, path, **kwargs): - return await self._request("post", path, **kwargs) + async def head(self, path, skip_permission_checks=False, **kwargs): + return await self._request( + "head", path, skip_permission_checks=skip_permission_checks, **kwargs + ) - async def put(self, path, **kwargs): - return await self._request("put", path, **kwargs) + async def post(self, path, skip_permission_checks=False, **kwargs): + return await self._request( + "post", path, skip_permission_checks=skip_permission_checks, **kwargs + ) - async def patch(self, path, **kwargs): - return await self._request("patch", path, **kwargs) + async def put(self, path, skip_permission_checks=False, **kwargs): + return await self._request( + "put", path, skip_permission_checks=skip_permission_checks, **kwargs + ) - async def delete(self, path, **kwargs): - return await self._request("delete", path, **kwargs) + async def patch(self, path, skip_permission_checks=False, **kwargs): + return await self._request( + "patch", path, skip_permission_checks=skip_permission_checks, **kwargs + ) + + async def delete(self, path, skip_permission_checks=False, **kwargs): + return await self._request( + "delete", path, skip_permission_checks=skip_permission_checks, **kwargs + ) + + async def request(self, method, path, skip_permission_checks=False, **kwargs): + """Make an HTTP request with the specified method. + + Args: + method: HTTP method (e.g., "GET", "POST", "PUT") + path: The path to request + skip_permission_checks: If True, bypass all permission checks for this request + **kwargs: Additional arguments to pass to httpx + + Returns: + httpx.Response: The response from the request + """ + from datasette.permissions import SkipPermissions - async def request(self, method, path, **kwargs): avoid_path_rewrites = kwargs.pop("avoid_path_rewrites", None) - async with httpx.AsyncClient( - transport=httpx.ASGITransport(app=self.app), - cookies=kwargs.pop("cookies", None), - ) as client: - return await client.request( - method, self._fix(path, avoid_path_rewrites), **kwargs - ) + with _DatasetteClientContext(): + if skip_permission_checks: + with SkipPermissions(): + async with httpx.AsyncClient( + transport=httpx.ASGITransport(app=self.app), + cookies=kwargs.pop("cookies", None), + ) as client: + return await client.request( + method, self._fix(path, avoid_path_rewrites), **kwargs + ) + else: + async with httpx.AsyncClient( + transport=httpx.ASGITransport(app=self.app), + cookies=kwargs.pop("cookies", None), + ) as client: + return await client.request( + method, self._fix(path, avoid_path_rewrites), **kwargs + ) diff --git a/datasette/cli.py b/datasette/cli.py index bacabc4c..21420491 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -146,7 +146,6 @@ def inspect(files, inspect_file, sqlite_extensions): This can then be passed to "datasette --inspect-file" to speed up count operations against immutable database files. """ - app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions) inspect_data = run_sync(lambda: inspect_(files, sqlite_extensions)) if inspect_file == "-": sys.stdout.write(json.dumps(inspect_data, indent=2)) @@ -439,10 +438,20 @@ def uninstall(packages, yes): help="Output URL that sets a cookie authenticating the root user", is_flag=True, ) +@click.option( + "--default-deny", + help="Deny all permissions by default", + is_flag=True, +) @click.option( "--get", help="Run an HTTP GET request against this path, print results and exit", ) +@click.option( + "--headers", + is_flag=True, + help="Include HTTP headers in --get output", +) @click.option( "--token", help="API token to send with --get requests", @@ -510,7 +519,9 @@ def serve( settings, secret, root, + default_deny, get, + headers, token, actor, version_note, @@ -589,15 +600,23 @@ def serve( crossdb=crossdb, nolock=nolock, internal=internal, + default_deny=default_deny, ) - # if files is a single directory, use that as config_dir= - if 1 == len(files) and os.path.isdir(files[0]): - kwargs["config_dir"] = pathlib.Path(files[0]) - files = [] + # Separate directories from files + directories = [f for f in files if os.path.isdir(f)] + file_paths = [f for f in files if not os.path.isdir(f)] + + # Handle config_dir - only one directory allowed + if len(directories) > 1: + raise click.ClickException( + "Cannot pass multiple directories. Pass a single directory as config_dir." + ) + elif len(directories) == 1: + kwargs["config_dir"] = pathlib.Path(directories[0]) # Verify list of files, create if needed (and --create) - for file in files: + for file in file_paths: if not pathlib.Path(file).exists(): if create: sqlite3.connect(file).execute("vacuum") @@ -608,8 +627,32 @@ def serve( ) ) - # De-duplicate files so 'datasette db.db db.db' only attaches one /db - files = list(dict.fromkeys(files)) + # Check for duplicate files by resolving all paths to their absolute forms + # Collect all database files that will be loaded (explicit files + config_dir files) + all_db_files = [] + + # Add explicit files + for file in file_paths: + all_db_files.append((file, pathlib.Path(file).resolve())) + + # Add config_dir databases if config_dir is set + if "config_dir" in kwargs: + config_dir = kwargs["config_dir"] + for ext in ("db", "sqlite", "sqlite3"): + for db_file in config_dir.glob(f"*.{ext}"): + all_db_files.append((str(db_file), db_file.resolve())) + + # Check for duplicates + seen = {} + for original_path, resolved_path in all_db_files: + if resolved_path in seen: + raise click.ClickException( + f"Duplicate database file: '{original_path}' and '{seen[resolved_path]}' " + f"both refer to {resolved_path}" + ) + seen[resolved_path] = original_path + + files = file_paths try: ds = Datasette(files, **kwargs) @@ -628,19 +671,33 @@ def serve( # Run async soundness checks - but only if we're not under pytest run_sync(lambda: check_databases(ds)) + if headers and not get: + raise click.ClickException("--headers can only be used with --get") + if token and not get: raise click.ClickException("--token can only be used with --get") if get: client = TestClient(ds) - headers = {} + request_headers = {} if token: - headers["Authorization"] = "Bearer {}".format(token) + request_headers["Authorization"] = "Bearer {}".format(token) cookies = {} if actor: cookies["ds_actor"] = client.actor_cookie(json.loads(actor)) - response = client.get(get, headers=headers, cookies=cookies) - click.echo(response.text) + response = client.get(get, headers=request_headers, cookies=cookies) + + if headers: + # Output HTTP status code, headers, two newlines, then the response body + click.echo(f"HTTP/1.1 {response.status}") + for key, value in response.headers.items(): + click.echo(f"{key}: {value}") + if response.text: + click.echo() + click.echo(response.text) + else: + click.echo(response.text) + exit_code = 0 if response.status == 200 else 1 sys.exit(exit_code) return @@ -648,6 +705,7 @@ def serve( # Start the server url = None if root: + ds.root_enabled = True url = "http://{}:{}{}?token={}".format( host, port, ds.urls.path("-/auth-token"), ds._root_token ) @@ -765,7 +823,7 @@ def create_token( actions.extend([p[1] for p in databases]) actions.extend([p[2] for p in resources]) for action in actions: - if not ds.permissions.get(action): + if not ds.actions.get(action): click.secho( f" Unknown permission: {action} ", fg="red", diff --git a/datasette/database.py b/datasette/database.py index 54a81a67..e5858128 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -143,7 +143,9 @@ class Database: return conn.executescript(sql) with trace("sql", database=self.name, sql=sql.strip(), executescript=True): - results = await self.execute_write_fn(_inner, block=block) + results = await self.execute_write_fn( + _inner, block=block, transaction=False + ) return results async def execute_write_many(self, sql, params_seq, block=True): @@ -374,25 +376,12 @@ class Database: self.cached_size = Path(self.path).stat().st_size return self.cached_size - async def table_counts(self, limit=10, tables=None): - # Determine which tables we need counts for - if tables is None: - tables_to_count = await self.table_names() - else: - tables_to_count = tables - - # If we have cached counts for immutable database, use them + async def table_counts(self, limit=10): if not self.is_mutable and self.cached_table_counts is not None: - # Return only the requested tables from cache - return { - table: self.cached_table_counts.get(table) - for table in tables_to_count - if table in self.cached_table_counts - } - + return self.cached_table_counts # Try to get counts for each table, $limit timeout for each count counts = {} - for table in tables_to_count: + for table in await self.table_names(): try: table_count = ( await self.execute( @@ -405,11 +394,8 @@ class Database: # QueryInterrupted - so we catch that too: except (QueryInterrupted, sqlite3.OperationalError, sqlite3.DatabaseError): counts[table] = None - - # Only cache if we counted all tables - if tables is None and not self.is_mutable: + if not self.is_mutable: self._cached_table_counts = counts - return counts @property @@ -424,7 +410,12 @@ class Database: # But SQLite prior to 3.16.0 doesn't support pragma functions results = await self.execute("PRAGMA database_list;") # {'seq': 0, 'name': 'main', 'file': ''} - return [AttachedDatabase(*row) for row in results.rows if row["seq"] > 0] + return [ + AttachedDatabase(*row) + for row in results.rows + # Filter out the SQLite internal "temp" database, refs #2557 + if row["seq"] > 0 and row["name"] != "temp" + ] async def table_exists(self, table): results = await self.execute( diff --git a/datasette/default_actions.py b/datasette/default_actions.py new file mode 100644 index 00000000..87d98fac --- /dev/null +++ b/datasette/default_actions.py @@ -0,0 +1,101 @@ +from datasette import hookimpl +from datasette.permissions import Action +from datasette.resources import ( + DatabaseResource, + TableResource, + QueryResource, +) + + +@hookimpl +def register_actions(): + """Register the core Datasette actions.""" + return ( + # Global actions (no resource_class) + Action( + name="view-instance", + abbr="vi", + description="View Datasette instance", + ), + Action( + name="permissions-debug", + abbr="pd", + description="Access permission debug tool", + ), + Action( + name="debug-menu", + abbr="dm", + description="View debug menu items", + ), + # Database-level actions (parent-level) + Action( + name="view-database", + abbr="vd", + description="View database", + resource_class=DatabaseResource, + ), + Action( + name="view-database-download", + abbr="vdd", + description="Download database file", + resource_class=DatabaseResource, + also_requires="view-database", + ), + Action( + name="execute-sql", + abbr="es", + description="Execute read-only SQL queries", + resource_class=DatabaseResource, + also_requires="view-database", + ), + Action( + name="create-table", + abbr="ct", + description="Create tables", + resource_class=DatabaseResource, + ), + # Table-level actions (child-level) + Action( + name="view-table", + abbr="vt", + description="View table", + resource_class=TableResource, + ), + Action( + name="insert-row", + abbr="ir", + description="Insert rows", + resource_class=TableResource, + ), + Action( + name="delete-row", + abbr="dr", + description="Delete rows", + resource_class=TableResource, + ), + Action( + name="update-row", + abbr="ur", + description="Update rows", + resource_class=TableResource, + ), + Action( + name="alter-table", + abbr="at", + description="Alter tables", + resource_class=TableResource, + ), + Action( + name="drop-table", + abbr="dt", + description="Drop tables", + resource_class=TableResource, + ), + # Query-level actions (child-level) + Action( + name="view-query", + abbr="vq", + description="View named query results", + resource_class=QueryResource, + ), + ) diff --git a/datasette/default_menu_links.py b/datasette/default_menu_links.py index 22e6e46a..85032387 100644 --- a/datasette/default_menu_links.py +++ b/datasette/default_menu_links.py @@ -4,7 +4,7 @@ from datasette import hookimpl @hookimpl def menu_links(datasette, actor): async def inner(): - if not await datasette.permission_allowed(actor, "debug-menu"): + if not await datasette.allowed(action="debug-menu", actor=actor): return [] return [ diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py deleted file mode 100644 index abad3787..00000000 --- a/datasette/default_permissions.py +++ /dev/null @@ -1,579 +0,0 @@ -from datasette import hookimpl, Permission -from datasette.utils.permissions import PluginSQL -from datasette.utils import actor_matches_allow -import itsdangerous -import time - - -@hookimpl -def register_permissions(): - return ( - Permission( - name="view-instance", - abbr="vi", - description="View Datasette instance", - takes_database=False, - takes_resource=False, - default=True, - ), - Permission( - name="view-database", - abbr="vd", - description="View database", - takes_database=True, - takes_resource=False, - default=True, - implies_can_view=True, - ), - Permission( - name="view-database-download", - abbr="vdd", - description="Download database file", - takes_database=True, - takes_resource=False, - default=True, - ), - Permission( - name="view-table", - abbr="vt", - description="View table", - takes_database=True, - takes_resource=True, - default=True, - implies_can_view=True, - ), - Permission( - name="view-query", - abbr="vq", - description="View named query results", - takes_database=True, - takes_resource=True, - default=True, - implies_can_view=True, - ), - Permission( - name="execute-sql", - abbr="es", - description="Execute read-only SQL queries", - takes_database=True, - takes_resource=False, - default=True, - implies_can_view=True, - ), - Permission( - name="permissions-debug", - abbr="pd", - description="Access permission debug tool", - takes_database=False, - takes_resource=False, - default=False, - ), - Permission( - name="debug-menu", - abbr="dm", - description="View debug menu items", - takes_database=False, - takes_resource=False, - default=False, - ), - Permission( - name="insert-row", - abbr="ir", - description="Insert rows", - takes_database=True, - takes_resource=True, - default=False, - ), - Permission( - name="delete-row", - abbr="dr", - description="Delete rows", - takes_database=True, - takes_resource=True, - default=False, - ), - Permission( - name="update-row", - abbr="ur", - description="Update rows", - takes_database=True, - takes_resource=True, - default=False, - ), - Permission( - name="create-table", - abbr="ct", - description="Create tables", - takes_database=True, - takes_resource=False, - default=False, - ), - Permission( - name="alter-table", - abbr="at", - description="Alter tables", - takes_database=True, - takes_resource=True, - default=False, - ), - Permission( - name="drop-table", - abbr="dt", - description="Drop tables", - takes_database=True, - takes_resource=True, - default=False, - ), - ) - - -@hookimpl(tryfirst=True, specname="permission_allowed") -def permission_allowed_default(datasette, actor, action, resource): - async def inner(): - # id=root gets some special permissions: - if action in ( - "permissions-debug", - "debug-menu", - "insert-row", - "create-table", - "alter-table", - "drop-table", - "delete-row", - "update-row", - ): - if actor and actor.get("id") == "root": - return True - - # Resolve view permissions in allow blocks in configuration - if action in ( - "view-instance", - "view-database", - "view-table", - "view-query", - "execute-sql", - ): - result = await _resolve_config_view_permissions( - datasette, actor, action, resource - ) - if result is not None: - return result - - # Resolve custom permissions: blocks in configuration - result = await _resolve_config_permissions_blocks( - datasette, actor, action, resource - ) - if result is not None: - return result - - # --setting default_allow_sql - if action == "execute-sql" and not datasette.setting("default_allow_sql"): - return False - - return inner - - -@hookimpl -async def permission_resources_sql(datasette, actor, action): - rules: list[PluginSQL] = [] - - config_rules = await _config_permission_rules(datasette, actor, action) - rules.extend(config_rules) - - default_allow_actions = { - "view-instance", - "view-database", - "view-table", - "execute-sql", - } - if action in default_allow_actions: - reason = f"default allow for {action}".replace("'", "''") - sql = ( - "SELECT NULL AS parent, NULL AS child, 1 AS allow, " f"'{reason}' AS reason" - ) - rules.append( - PluginSQL( - source="default_permissions", - sql=sql, - params={}, - ) - ) - - if not rules: - return None - if len(rules) == 1: - return rules[0] - return rules - - -async def _config_permission_rules(datasette, actor, action) -> list[PluginSQL]: - config = datasette.config or {} - - if actor is None: - actor_dict: dict | None = None - elif isinstance(actor, dict): - actor_dict = actor - else: - actor_lookup = await datasette.actors_from_ids([actor]) - actor_dict = actor_lookup.get(actor) or {"id": actor} - - def evaluate(allow_block): - if allow_block is None: - return None - return actor_matches_allow(actor_dict, allow_block) - - rows = [] - - def add_row(parent, child, result, scope): - if result is None: - return - rows.append( - ( - parent, - child, - bool(result), - f"config {'allow' if result else 'deny'} {scope}", - ) - ) - - root_perm = (config.get("permissions") or {}).get(action) - add_row(None, None, evaluate(root_perm), f"permissions for {action}") - - for db_name, db_config in (config.get("databases") or {}).items(): - db_perm = (db_config.get("permissions") or {}).get(action) - add_row( - db_name, None, evaluate(db_perm), f"permissions for {action} on {db_name}" - ) - - for table_name, table_config in (db_config.get("tables") or {}).items(): - table_perm = (table_config.get("permissions") or {}).get(action) - add_row( - db_name, - table_name, - evaluate(table_perm), - f"permissions for {action} on {db_name}/{table_name}", - ) - - if action == "view-table": - table_allow = (table_config or {}).get("allow") - add_row( - db_name, - table_name, - evaluate(table_allow), - f"allow for {action} on {db_name}/{table_name}", - ) - - for query_name, query_config in (db_config.get("queries") or {}).items(): - # query_config can be a string (just SQL) or a dict - if isinstance(query_config, str): - continue - query_perm = (query_config.get("permissions") or {}).get(action) - add_row( - db_name, - query_name, - evaluate(query_perm), - f"permissions for {action} on {db_name}/{query_name}", - ) - if action == "view-query": - query_allow = (query_config or {}).get("allow") - add_row( - db_name, - query_name, - evaluate(query_allow), - f"allow for {action} on {db_name}/{query_name}", - ) - - if action == "view-database": - db_allow = db_config.get("allow") - add_row( - db_name, None, evaluate(db_allow), f"allow for {action} on {db_name}" - ) - - if action == "execute-sql": - db_allow_sql = db_config.get("allow_sql") - add_row(db_name, None, evaluate(db_allow_sql), f"allow_sql for {db_name}") - - if action == "view-instance": - allow_block = config.get("allow") - add_row(None, None, evaluate(allow_block), "allow for view-instance") - - if action == "view-table": - # Tables handled in loop - pass - - if action == "view-query": - # Queries handled in loop - pass - - if action == "execute-sql": - allow_sql = config.get("allow_sql") - add_row(None, None, evaluate(allow_sql), "allow_sql") - - if action == "view-database": - # already handled per-database - pass - - if not rows: - return [] - - parts = [] - params = {} - for idx, (parent, child, allow, reason) in enumerate(rows): - key = f"cfg_{idx}" - parts.append( - f"SELECT :{key}_parent AS parent, :{key}_child AS child, :{key}_allow AS allow, :{key}_reason AS reason" - ) - params[f"{key}_parent"] = parent - params[f"{key}_child"] = child - params[f"{key}_allow"] = 1 if allow else 0 - params[f"{key}_reason"] = reason - - sql = "\nUNION ALL\n".join(parts) - return [PluginSQL(source="config_permissions", sql=sql, params=params)] - - -async def _resolve_config_permissions_blocks(datasette, actor, action, resource): - # Check custom permissions: blocks - config = datasette.config or {} - root_block = (config.get("permissions", None) or {}).get(action) - if root_block: - root_result = actor_matches_allow(actor, root_block) - if root_result is not None: - return root_result - # Now try database-specific blocks - if not resource: - return None - if isinstance(resource, str): - database = resource - else: - database = resource[0] - database_block = ( - (config.get("databases", {}).get(database, {}).get("permissions", None)) or {} - ).get(action) - if database_block: - database_result = actor_matches_allow(actor, database_block) - if database_result is not None: - return database_result - # Finally try table/query specific blocks - if not isinstance(resource, tuple): - return None - database, table_or_query = resource - table_block = ( - ( - config.get("databases", {}) - .get(database, {}) - .get("tables", {}) - .get(table_or_query, {}) - .get("permissions", None) - ) - or {} - ).get(action) - if table_block: - table_result = actor_matches_allow(actor, table_block) - if table_result is not None: - return table_result - # Finally the canned queries - query_block = ( - ( - config.get("databases", {}) - .get(database, {}) - .get("queries", {}) - .get(table_or_query, {}) - .get("permissions", None) - ) - or {} - ).get(action) - if query_block: - query_result = actor_matches_allow(actor, query_block) - if query_result is not None: - return query_result - return None - - -async def _resolve_config_view_permissions(datasette, actor, action, resource): - config = datasette.config or {} - if action == "view-instance": - allow = config.get("allow") - if allow is not None: - return actor_matches_allow(actor, allow) - elif action == "view-database": - database_allow = ((config.get("databases") or {}).get(resource) or {}).get( - "allow" - ) - if database_allow is None: - return None - return actor_matches_allow(actor, database_allow) - elif action == "view-table": - database, table = resource - tables = ((config.get("databases") or {}).get(database) or {}).get( - "tables" - ) or {} - table_allow = (tables.get(table) or {}).get("allow") - if table_allow is None: - return None - return actor_matches_allow(actor, table_allow) - elif action == "view-query": - # Check if this query has a "allow" block in config - database, query_name = resource - query = await datasette.get_canned_query(database, query_name, actor) - assert query is not None - allow = query.get("allow") - if allow is None: - return None - return actor_matches_allow(actor, allow) - elif action == "execute-sql": - # Use allow_sql block from database block, or from top-level - database_allow_sql = ((config.get("databases") or {}).get(resource) or {}).get( - "allow_sql" - ) - if database_allow_sql is None: - database_allow_sql = config.get("allow_sql") - if database_allow_sql is None: - return None - return actor_matches_allow(actor, database_allow_sql) - - -def restrictions_allow_action( - datasette: "Datasette", - restrictions: dict, - action: str, - resource: str | tuple[str, str], -): - "Do these restrictions allow the requested action against the requested resource?" - if action == "view-instance": - # Special case for view-instance: it's allowed if the restrictions include any - # permissions that have the implies_can_view=True flag set - all_rules = restrictions.get("a") or [] - for database_rules in (restrictions.get("d") or {}).values(): - all_rules += database_rules - for database_resource_rules in (restrictions.get("r") or {}).values(): - for resource_rules in database_resource_rules.values(): - all_rules += resource_rules - permissions = [datasette.get_permission(action) for action in all_rules] - if any(p for p in permissions if p.implies_can_view): - return True - - if action == "view-database": - # Special case for view-database: it's allowed if the restrictions include any - # permissions that have the implies_can_view=True flag set AND takes_database - all_rules = restrictions.get("a") or [] - database_rules = list((restrictions.get("d") or {}).get(resource) or []) - all_rules += database_rules - resource_rules = ((restrictions.get("r") or {}).get(resource) or {}).values() - for resource_rules in (restrictions.get("r") or {}).values(): - for table_rules in resource_rules.values(): - all_rules += table_rules - permissions = [datasette.get_permission(action) for action in all_rules] - if any(p for p in permissions if p.implies_can_view and p.takes_database): - return True - - # Does this action have an abbreviation? - to_check = {action} - permission = datasette.permissions.get(action) - if permission and permission.abbr: - to_check.add(permission.abbr) - - # If restrictions is defined then we use those to further restrict the actor - # Crucially, we only use this to say NO (return False) - we never - # use it to return YES (True) because that might over-ride other - # restrictions placed on this actor - all_allowed = restrictions.get("a") - if all_allowed is not None: - assert isinstance(all_allowed, list) - if to_check.intersection(all_allowed): - return True - # How about for the current database? - if resource: - if isinstance(resource, str): - database_name = resource - else: - database_name = resource[0] - database_allowed = restrictions.get("d", {}).get(database_name) - if database_allowed is not None: - assert isinstance(database_allowed, list) - if to_check.intersection(database_allowed): - return True - # Or the current table? That's any time the resource is (database, table) - if resource is not None and not isinstance(resource, str) and len(resource) == 2: - database, table = resource - table_allowed = restrictions.get("r", {}).get(database, {}).get(table) - # TODO: What should this do for canned queries? - if table_allowed is not None: - assert isinstance(table_allowed, list) - if to_check.intersection(table_allowed): - return True - - # This action is not specifically allowed, so reject it - return False - - -@hookimpl(specname="permission_allowed") -def permission_allowed_actor_restrictions(datasette, actor, action, resource): - if actor is None: - return None - if "_r" not in actor: - # No restrictions, so we have no opinion - return None - _r = actor.get("_r") - if restrictions_allow_action(datasette, _r, action, resource): - # Return None because we do not have an opinion here - return None - else: - # Block this permission check - return False - - -@hookimpl -def actor_from_request(datasette, request): - prefix = "dstok_" - if not datasette.setting("allow_signed_tokens"): - return None - max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl") - authorization = request.headers.get("authorization") - if not authorization: - return None - if not authorization.startswith("Bearer "): - return None - token = authorization[len("Bearer ") :] - if not token.startswith(prefix): - return None - token = token[len(prefix) :] - try: - decoded = datasette.unsign(token, namespace="token") - except itsdangerous.BadSignature: - return None - if "t" not in decoded: - # Missing timestamp - return None - created = decoded["t"] - if not isinstance(created, int): - # Invalid timestamp - return None - duration = decoded.get("d") - if duration is not None and not isinstance(duration, int): - # Invalid duration - return None - if (duration is None and max_signed_tokens_ttl) or ( - duration is not None - and max_signed_tokens_ttl - and duration > max_signed_tokens_ttl - ): - duration = max_signed_tokens_ttl - if duration: - if time.time() - created > duration: - # Expired - return None - actor = {"id": decoded["a"], "token": "dstok"} - if "_r" in decoded: - actor["_r"] = decoded["_r"] - if duration: - actor["token_expires"] = created + duration - return actor - - -@hookimpl -def skip_csrf(scope): - # Skip CSRF check for requests with content-type: application/json - if scope["type"] == "http": - headers = scope.get("headers") or {} - if dict(headers).get(b"content-type") == b"application/json": - return True diff --git a/datasette/default_permissions/__init__.py b/datasette/default_permissions/__init__.py new file mode 100644 index 00000000..4c82d705 --- /dev/null +++ b/datasette/default_permissions/__init__.py @@ -0,0 +1,59 @@ +""" +Default permission implementations for Datasette. + +This module provides the built-in permission checking logic through implementations +of the permission_resources_sql hook. The hooks are organized by their purpose: + +1. Actor Restrictions - Enforces _r allowlists embedded in actor tokens +2. Root User - Grants full access when --root flag is used +3. Config Rules - Applies permissions from datasette.yaml +4. Default Settings - Enforces default_allow_sql and default view permissions + +IMPORTANT: These hooks return PermissionSQL objects that are combined using SQL +UNION/INTERSECT operations. The order of evaluation is: + - restriction_sql fields are INTERSECTed (all must match) + - Regular sql fields are UNIONed and evaluated with cascading priority +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Optional + +if TYPE_CHECKING: + from datasette.app import Datasette + +from datasette import hookimpl + +# Re-export all hooks and public utilities +from .restrictions import ( + actor_restrictions_sql, + restrictions_allow_action, + ActorRestrictions, +) +from .root import root_user_permissions_sql +from .config import config_permissions_sql +from .defaults import ( + default_allow_sql_check, + default_action_permissions_sql, + DEFAULT_ALLOW_ACTIONS, +) +from .tokens import actor_from_signed_api_token + + +@hookimpl +def skip_csrf(scope) -> Optional[bool]: + """Skip CSRF check for JSON content-type requests.""" + if scope["type"] == "http": + headers = scope.get("headers") or {} + if dict(headers).get(b"content-type") == b"application/json": + return True + return None + + +@hookimpl +def canned_queries(datasette: "Datasette", database: str, actor) -> dict: + """Return canned queries defined in datasette.yaml configuration.""" + queries = ( + ((datasette.config or {}).get("databases") or {}).get(database) or {} + ).get("queries") or {} + return queries diff --git a/datasette/default_permissions/config.py b/datasette/default_permissions/config.py new file mode 100644 index 00000000..aab87c1c --- /dev/null +++ b/datasette/default_permissions/config.py @@ -0,0 +1,442 @@ +""" +Config-based permission handling for Datasette. + +Applies permission rules from datasette.yaml configuration. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, List, Optional, Set, Tuple + +if TYPE_CHECKING: + from datasette.app import Datasette + +from datasette import hookimpl +from datasette.permissions import PermissionSQL +from datasette.utils import actor_matches_allow + +from .helpers import PermissionRowCollector, get_action_name_variants + + +class ConfigPermissionProcessor: + """ + Processes permission rules from datasette.yaml configuration. + + Configuration structure: + + permissions: # Root-level permissions block + view-instance: + id: admin + + databases: + mydb: + permissions: # Database-level permissions + view-database: + id: admin + allow: # Database-level allow block (for view-*) + id: viewer + allow_sql: # execute-sql allow block + id: analyst + tables: + users: + permissions: # Table-level permissions + view-table: + id: admin + allow: # Table-level allow block + id: viewer + queries: + my_query: + permissions: # Query-level permissions + view-query: + id: admin + allow: # Query-level allow block + id: viewer + """ + + def __init__( + self, + datasette: "Datasette", + actor: Optional[dict], + action: str, + ): + self.datasette = datasette + self.actor = actor + self.action = action + self.config = datasette.config or {} + self.collector = PermissionRowCollector(prefix="cfg") + + # Pre-compute action variants + self.action_checks = get_action_name_variants(datasette, action) + self.action_obj = datasette.actions.get(action) + + # Parse restrictions if present + self.has_restrictions = actor and "_r" in actor if actor else False + self.restrictions = actor.get("_r", {}) if actor else {} + + # Pre-compute restriction info for efficiency + self.restricted_databases: Set[str] = set() + self.restricted_tables: Set[Tuple[str, str]] = set() + + if self.has_restrictions: + self.restricted_databases = { + db_name + for db_name, db_actions in (self.restrictions.get("d") or {}).items() + if self.action_checks.intersection(db_actions) + } + self.restricted_tables = { + (db_name, table_name) + for db_name, tables in (self.restrictions.get("r") or {}).items() + for table_name, table_actions in tables.items() + if self.action_checks.intersection(table_actions) + } + # Tables implicitly reference their parent databases + self.restricted_databases.update(db for db, _ in self.restricted_tables) + + def evaluate_allow_block(self, allow_block: Any) -> Optional[bool]: + """Evaluate an allow block against the current actor.""" + if allow_block is None: + return None + return actor_matches_allow(self.actor, allow_block) + + def is_in_restriction_allowlist( + self, + parent: Optional[str], + child: Optional[str], + ) -> bool: + """Check if resource is allowed by actor restrictions.""" + if not self.has_restrictions: + return True # No restrictions, all resources allowed + + # Check global allowlist + if self.action_checks.intersection(self.restrictions.get("a", [])): + return True + + # Check database-level allowlist + if parent and self.action_checks.intersection( + self.restrictions.get("d", {}).get(parent, []) + ): + return True + + # Check table-level allowlist + if parent: + table_restrictions = (self.restrictions.get("r", {}) or {}).get(parent, {}) + if child: + table_actions = table_restrictions.get(child, []) + if self.action_checks.intersection(table_actions): + return True + else: + # Parent query should proceed if any child in this database is allowlisted + for table_actions in table_restrictions.values(): + if self.action_checks.intersection(table_actions): + return True + + # Parent/child both None: include if any restrictions exist for this action + if parent is None and child is None: + if self.action_checks.intersection(self.restrictions.get("a", [])): + return True + if self.restricted_databases: + return True + if self.restricted_tables: + return True + + return False + + def add_permissions_rule( + self, + parent: Optional[str], + child: Optional[str], + permissions_block: Optional[dict], + scope_desc: str, + ) -> None: + """Add a rule from a permissions:{action} block.""" + if permissions_block is None: + return + + action_allow_block = permissions_block.get(self.action) + result = self.evaluate_allow_block(action_allow_block) + + self.collector.add( + parent=parent, + child=child, + allow=result, + reason=f"config {'allow' if result else 'deny'} {scope_desc}", + if_not_none=True, + ) + + def add_allow_block_rule( + self, + parent: Optional[str], + child: Optional[str], + allow_block: Any, + scope_desc: str, + ) -> None: + """ + Add rules from an allow:{} block. + + For allow blocks, if the block exists but doesn't match the actor, + this is treated as a deny. We also handle the restriction-gate logic. + """ + if allow_block is None: + return + + # Skip if resource is not in restriction allowlist + if not self.is_in_restriction_allowlist(parent, child): + return + + result = self.evaluate_allow_block(allow_block) + bool_result = bool(result) + + self.collector.add( + parent, + child, + bool_result, + f"config {'allow' if result else 'deny'} {scope_desc}", + ) + + # Handle restriction-gate: add explicit denies for restricted resources + self._add_restriction_gate_denies(parent, child, bool_result, scope_desc) + + def _add_restriction_gate_denies( + self, + parent: Optional[str], + child: Optional[str], + is_allowed: bool, + scope_desc: str, + ) -> None: + """ + When a config rule denies at a higher level, add explicit denies + for restricted resources to prevent child-level allows from + incorrectly granting access. + """ + if is_allowed or child is not None or not self.has_restrictions: + return + + if not self.action_obj: + return + + reason = f"config deny {scope_desc} (restriction gate)" + + if parent is None: + # Root-level deny: add denies for all restricted resources + if self.action_obj.takes_parent: + for db_name in self.restricted_databases: + self.collector.add(db_name, None, False, reason) + if self.action_obj.takes_child: + for db_name, table_name in self.restricted_tables: + self.collector.add(db_name, table_name, False, reason) + else: + # Database-level deny: add denies for tables in that database + if self.action_obj.takes_child: + for db_name, table_name in self.restricted_tables: + if db_name == parent: + self.collector.add(db_name, table_name, False, reason) + + def process(self) -> Optional[PermissionSQL]: + """Process all config rules and return combined PermissionSQL.""" + self._process_root_permissions() + self._process_databases() + self._process_root_allow_blocks() + + return self.collector.to_permission_sql() + + def _process_root_permissions(self) -> None: + """Process root-level permissions block.""" + root_perms = self.config.get("permissions") or {} + self.add_permissions_rule( + None, + None, + root_perms, + f"permissions for {self.action}", + ) + + def _process_databases(self) -> None: + """Process database-level and nested configurations.""" + databases = self.config.get("databases") or {} + + for db_name, db_config in databases.items(): + self._process_database(db_name, db_config or {}) + + def _process_database(self, db_name: str, db_config: dict) -> None: + """Process a single database's configuration.""" + # Database-level permissions block + db_perms = db_config.get("permissions") or {} + self.add_permissions_rule( + db_name, + None, + db_perms, + f"permissions for {self.action} on {db_name}", + ) + + # Process tables + for table_name, table_config in (db_config.get("tables") or {}).items(): + self._process_table(db_name, table_name, table_config or {}) + + # Process queries + for query_name, query_config in (db_config.get("queries") or {}).items(): + self._process_query(db_name, query_name, query_config) + + # Database-level allow blocks + self._process_database_allow_blocks(db_name, db_config) + + def _process_table( + self, + db_name: str, + table_name: str, + table_config: dict, + ) -> None: + """Process a single table's configuration.""" + # Table-level permissions block + table_perms = table_config.get("permissions") or {} + self.add_permissions_rule( + db_name, + table_name, + table_perms, + f"permissions for {self.action} on {db_name}/{table_name}", + ) + + # Table-level allow block (for view-table) + if self.action == "view-table": + self.add_allow_block_rule( + db_name, + table_name, + table_config.get("allow"), + f"allow for {self.action} on {db_name}/{table_name}", + ) + + def _process_query( + self, + db_name: str, + query_name: str, + query_config: Any, + ) -> None: + """Process a single query's configuration.""" + # Query config can be a string (just SQL) or dict + if not isinstance(query_config, dict): + return + + # Query-level permissions block + query_perms = query_config.get("permissions") or {} + self.add_permissions_rule( + db_name, + query_name, + query_perms, + f"permissions for {self.action} on {db_name}/{query_name}", + ) + + # Query-level allow block (for view-query) + if self.action == "view-query": + self.add_allow_block_rule( + db_name, + query_name, + query_config.get("allow"), + f"allow for {self.action} on {db_name}/{query_name}", + ) + + def _process_database_allow_blocks( + self, + db_name: str, + db_config: dict, + ) -> None: + """Process database-level allow/allow_sql blocks.""" + # view-database allow block + if self.action == "view-database": + self.add_allow_block_rule( + db_name, + None, + db_config.get("allow"), + f"allow for {self.action} on {db_name}", + ) + + # execute-sql allow_sql block + if self.action == "execute-sql": + self.add_allow_block_rule( + db_name, + None, + db_config.get("allow_sql"), + f"allow_sql for {db_name}", + ) + + # view-table uses database-level allow for inheritance + if self.action == "view-table": + self.add_allow_block_rule( + db_name, + None, + db_config.get("allow"), + f"allow for {self.action} on {db_name}", + ) + + # view-query uses database-level allow for inheritance + if self.action == "view-query": + self.add_allow_block_rule( + db_name, + None, + db_config.get("allow"), + f"allow for {self.action} on {db_name}", + ) + + def _process_root_allow_blocks(self) -> None: + """Process root-level allow/allow_sql blocks.""" + root_allow = self.config.get("allow") + + if self.action == "view-instance": + self.add_allow_block_rule( + None, + None, + root_allow, + "allow for view-instance", + ) + + if self.action == "view-database": + self.add_allow_block_rule( + None, + None, + root_allow, + "allow for view-database", + ) + + if self.action == "view-table": + self.add_allow_block_rule( + None, + None, + root_allow, + "allow for view-table", + ) + + if self.action == "view-query": + self.add_allow_block_rule( + None, + None, + root_allow, + "allow for view-query", + ) + + if self.action == "execute-sql": + self.add_allow_block_rule( + None, + None, + self.config.get("allow_sql"), + "allow_sql", + ) + + +@hookimpl(specname="permission_resources_sql") +async def config_permissions_sql( + datasette: "Datasette", + actor: Optional[dict], + action: str, +) -> Optional[List[PermissionSQL]]: + """ + Apply permission rules from datasette.yaml configuration. + + This processes: + - permissions: blocks at root, database, table, and query levels + - allow: blocks for view-* actions + - allow_sql: blocks for execute-sql action + """ + processor = ConfigPermissionProcessor(datasette, actor, action) + result = processor.process() + + if result is None: + return [] + + return [result] diff --git a/datasette/default_permissions/defaults.py b/datasette/default_permissions/defaults.py new file mode 100644 index 00000000..f5a6a270 --- /dev/null +++ b/datasette/default_permissions/defaults.py @@ -0,0 +1,70 @@ +""" +Default permission settings for Datasette. + +Provides default allow rules for standard view/execute actions. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Optional + +if TYPE_CHECKING: + from datasette.app import Datasette + +from datasette import hookimpl +from datasette.permissions import PermissionSQL + + +# Actions that are allowed by default (unless --default-deny is used) +DEFAULT_ALLOW_ACTIONS = frozenset( + { + "view-instance", + "view-database", + "view-database-download", + "view-table", + "view-query", + "execute-sql", + } +) + + +@hookimpl(specname="permission_resources_sql") +async def default_allow_sql_check( + datasette: "Datasette", + actor: Optional[dict], + action: str, +) -> Optional[PermissionSQL]: + """ + Enforce the default_allow_sql setting. + + When default_allow_sql is false (the default), execute-sql is denied + unless explicitly allowed by config or other rules. + """ + if action == "execute-sql": + if not datasette.setting("default_allow_sql"): + return PermissionSQL.deny(reason="default_allow_sql is false") + + return None + + +@hookimpl(specname="permission_resources_sql") +async def default_action_permissions_sql( + datasette: "Datasette", + actor: Optional[dict], + action: str, +) -> Optional[PermissionSQL]: + """ + Provide default allow rules for standard view/execute actions. + + These defaults are skipped when datasette is started with --default-deny. + The restriction_sql mechanism (from actor_restrictions_sql) will still + filter these results if the actor has restrictions. + """ + if datasette.default_deny: + return None + + if action in DEFAULT_ALLOW_ACTIONS: + reason = f"default allow for {action}".replace("'", "''") + return PermissionSQL.allow(reason=reason) + + return None diff --git a/datasette/default_permissions/helpers.py b/datasette/default_permissions/helpers.py new file mode 100644 index 00000000..47e03569 --- /dev/null +++ b/datasette/default_permissions/helpers.py @@ -0,0 +1,85 @@ +""" +Shared helper utilities for default permission implementations. +""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING, List, Optional, Set + +if TYPE_CHECKING: + from datasette.app import Datasette + +from datasette.permissions import PermissionSQL + + +def get_action_name_variants(datasette: "Datasette", action: str) -> Set[str]: + """ + Get all name variants for an action (full name and abbreviation). + + Example: + get_action_name_variants(ds, "view-table") -> {"view-table", "vt"} + """ + variants = {action} + action_obj = datasette.actions.get(action) + if action_obj and action_obj.abbr: + variants.add(action_obj.abbr) + return variants + + +def action_in_list(datasette: "Datasette", action: str, action_list: list) -> bool: + """Check if an action (or its abbreviation) is in a list.""" + return bool(get_action_name_variants(datasette, action).intersection(action_list)) + + +@dataclass +class PermissionRow: + """A single permission rule row.""" + + parent: Optional[str] + child: Optional[str] + allow: bool + reason: str + + +class PermissionRowCollector: + """Collects permission rows and converts them to PermissionSQL.""" + + def __init__(self, prefix: str = "row"): + self.rows: List[PermissionRow] = [] + self.prefix = prefix + + def add( + self, + parent: Optional[str], + child: Optional[str], + allow: Optional[bool], + reason: str, + if_not_none: bool = False, + ) -> None: + """Add a permission row. If if_not_none=True, only add if allow is not None.""" + if if_not_none and allow is None: + return + self.rows.append(PermissionRow(parent, child, allow, reason)) + + def to_permission_sql(self) -> Optional[PermissionSQL]: + """Convert collected rows to a PermissionSQL object.""" + if not self.rows: + return None + + parts = [] + params = {} + + for idx, row in enumerate(self.rows): + key = f"{self.prefix}_{idx}" + parts.append( + f"SELECT :{key}_parent AS parent, :{key}_child AS child, " + f":{key}_allow AS allow, :{key}_reason AS reason" + ) + params[f"{key}_parent"] = row.parent + params[f"{key}_child"] = row.child + params[f"{key}_allow"] = 1 if row.allow else 0 + params[f"{key}_reason"] = row.reason + + sql = "\nUNION ALL\n".join(parts) + return PermissionSQL(sql=sql, params=params) diff --git a/datasette/default_permissions/restrictions.py b/datasette/default_permissions/restrictions.py new file mode 100644 index 00000000..a22cd7e5 --- /dev/null +++ b/datasette/default_permissions/restrictions.py @@ -0,0 +1,195 @@ +""" +Actor restriction handling for Datasette permissions. + +This module handles the _r (restrictions) key in actor dictionaries, which +contains allowlists of resources the actor can access. +""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING, List, Optional, Set, Tuple + +if TYPE_CHECKING: + from datasette.app import Datasette + +from datasette import hookimpl +from datasette.permissions import PermissionSQL + +from .helpers import action_in_list, get_action_name_variants + + +@dataclass +class ActorRestrictions: + """Parsed actor restrictions from the _r key.""" + + global_actions: List[str] # _r.a - globally allowed actions + database_actions: dict # _r.d - {db_name: [actions]} + table_actions: dict # _r.r - {db_name: {table: [actions]}} + + @classmethod + def from_actor(cls, actor: Optional[dict]) -> Optional["ActorRestrictions"]: + """Parse restrictions from actor dict. Returns None if no restrictions.""" + if not actor: + return None + assert isinstance(actor, dict), "actor must be a dictionary" + + restrictions = actor.get("_r") + if restrictions is None: + return None + + return cls( + global_actions=restrictions.get("a", []), + database_actions=restrictions.get("d", {}), + table_actions=restrictions.get("r", {}), + ) + + def is_action_globally_allowed(self, datasette: "Datasette", action: str) -> bool: + """Check if action is in the global allowlist.""" + return action_in_list(datasette, action, self.global_actions) + + def get_allowed_databases(self, datasette: "Datasette", action: str) -> Set[str]: + """Get database names where this action is allowed.""" + allowed = set() + for db_name, db_actions in self.database_actions.items(): + if action_in_list(datasette, action, db_actions): + allowed.add(db_name) + return allowed + + def get_allowed_tables( + self, datasette: "Datasette", action: str + ) -> Set[Tuple[str, str]]: + """Get (database, table) pairs where this action is allowed.""" + allowed = set() + for db_name, tables in self.table_actions.items(): + for table_name, table_actions in tables.items(): + if action_in_list(datasette, action, table_actions): + allowed.add((db_name, table_name)) + return allowed + + +@hookimpl(specname="permission_resources_sql") +async def actor_restrictions_sql( + datasette: "Datasette", + actor: Optional[dict], + action: str, +) -> Optional[List[PermissionSQL]]: + """ + Handle actor restriction-based permission rules. + + When an actor has an "_r" key, it contains an allowlist of resources they + can access. This function returns restriction_sql that filters the final + results to only include resources in that allowlist. + + The _r structure: + { + "a": ["vi", "pd"], # Global actions allowed + "d": {"mydb": ["vt", "es"]}, # Database-level actions + "r": {"mydb": {"users": ["vt"]}} # Table-level actions + } + """ + if not actor: + return None + + restrictions = ActorRestrictions.from_actor(actor) + + if restrictions is None: + # No restrictions - all resources allowed + return [] + + # If globally allowed, no filtering needed + if restrictions.is_action_globally_allowed(datasette, action): + return [] + + # Build restriction SQL + allowed_dbs = restrictions.get_allowed_databases(datasette, action) + allowed_tables = restrictions.get_allowed_tables(datasette, action) + + # If nothing is allowed for this action, return empty-set restriction + if not allowed_dbs and not allowed_tables: + return [ + PermissionSQL( + params={"deny": f"actor restrictions: {action} not in allowlist"}, + restriction_sql="SELECT NULL AS parent, NULL AS child WHERE 0", + ) + ] + + # Build UNION of allowed resources + selects = [] + params = {} + counter = 0 + + # Database-level entries (parent, NULL) - allows all children + for db_name in allowed_dbs: + key = f"restr_{counter}" + counter += 1 + selects.append(f"SELECT :{key}_parent AS parent, NULL AS child") + params[f"{key}_parent"] = db_name + + # Table-level entries (parent, child) + for db_name, table_name in allowed_tables: + key = f"restr_{counter}" + counter += 1 + selects.append(f"SELECT :{key}_parent AS parent, :{key}_child AS child") + params[f"{key}_parent"] = db_name + params[f"{key}_child"] = table_name + + restriction_sql = "\nUNION ALL\n".join(selects) + + return [PermissionSQL(params=params, restriction_sql=restriction_sql)] + + +def restrictions_allow_action( + datasette: "Datasette", + restrictions: dict, + action: str, + resource: Optional[str | Tuple[str, str]], +) -> bool: + """ + Check if restrictions allow the requested action on the requested resource. + + This is a synchronous utility function for use by other code that needs + to quickly check restriction allowlists. + + Args: + datasette: The Datasette instance + restrictions: The _r dict from an actor + action: The action name to check + resource: None for global, str for database, (db, table) tuple for table + + Returns: + True if allowed, False if denied + """ + # Does this action have an abbreviation? + to_check = get_action_name_variants(datasette, action) + + # Check global level (any resource) + all_allowed = restrictions.get("a") + if all_allowed is not None: + assert isinstance(all_allowed, list) + if to_check.intersection(all_allowed): + return True + + # Check database level + if resource: + if isinstance(resource, str): + database_name = resource + else: + database_name = resource[0] + database_allowed = restrictions.get("d", {}).get(database_name) + if database_allowed is not None: + assert isinstance(database_allowed, list) + if to_check.intersection(database_allowed): + return True + + # Check table/resource level + if resource is not None and not isinstance(resource, str) and len(resource) == 2: + database, table = resource + table_allowed = restrictions.get("r", {}).get(database, {}).get(table) + if table_allowed is not None: + assert isinstance(table_allowed, list) + if to_check.intersection(table_allowed): + return True + + # This action is not explicitly allowed, so reject it + return False diff --git a/datasette/default_permissions/root.py b/datasette/default_permissions/root.py new file mode 100644 index 00000000..4931f7ff --- /dev/null +++ b/datasette/default_permissions/root.py @@ -0,0 +1,29 @@ +""" +Root user permission handling for Datasette. + +Grants full permissions to the root user when --root flag is used. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Optional + +if TYPE_CHECKING: + from datasette.app import Datasette + +from datasette import hookimpl +from datasette.permissions import PermissionSQL + + +@hookimpl(specname="permission_resources_sql") +async def root_user_permissions_sql( + datasette: "Datasette", + actor: Optional[dict], +) -> Optional[PermissionSQL]: + """ + Grant root user full permissions when --root flag is used. + """ + if not datasette.root_enabled: + return None + if actor is not None and actor.get("id") == "root": + return PermissionSQL.allow(reason="root user") diff --git a/datasette/default_permissions/tokens.py b/datasette/default_permissions/tokens.py new file mode 100644 index 00000000..474b0c23 --- /dev/null +++ b/datasette/default_permissions/tokens.py @@ -0,0 +1,95 @@ +""" +Token authentication for Datasette. + +Handles signed API tokens (dstok_ prefix). +""" + +from __future__ import annotations + +import time +from typing import TYPE_CHECKING, Optional + +if TYPE_CHECKING: + from datasette.app import Datasette + +import itsdangerous + +from datasette import hookimpl + + +@hookimpl(specname="actor_from_request") +def actor_from_signed_api_token(datasette: "Datasette", request) -> Optional[dict]: + """ + Authenticate requests using signed API tokens (dstok_ prefix). + + Token structure (signed JSON): + { + "a": "actor_id", # Actor ID + "t": 1234567890, # Timestamp (Unix epoch) + "d": 3600, # Optional: Duration in seconds + "_r": {...} # Optional: Restrictions + } + """ + prefix = "dstok_" + + # Check if tokens are enabled + if not datasette.setting("allow_signed_tokens"): + return None + + max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl") + + # Get authorization header + authorization = request.headers.get("authorization") + if not authorization: + return None + if not authorization.startswith("Bearer "): + return None + + token = authorization[len("Bearer ") :] + if not token.startswith(prefix): + return None + + # Remove prefix and verify signature + token = token[len(prefix) :] + try: + decoded = datasette.unsign(token, namespace="token") + except itsdangerous.BadSignature: + return None + + # Validate timestamp + if "t" not in decoded: + return None + created = decoded["t"] + if not isinstance(created, int): + return None + + # Handle duration/expiry + duration = decoded.get("d") + if duration is not None and not isinstance(duration, int): + return None + + # Apply max TTL if configured + if (duration is None and max_signed_tokens_ttl) or ( + duration is not None + and max_signed_tokens_ttl + and duration > max_signed_tokens_ttl + ): + duration = max_signed_tokens_ttl + + # Check expiry + if duration: + if time.time() - created > duration: + return None + + # Build actor dict + actor = {"id": decoded["a"], "token": "dstok"} + + # Copy restrictions if present + if "_r" in decoded: + actor["_r"] = decoded["_r"] + + # Add expiry timestamp if applicable + if duration: + actor["token_expires"] = created + duration + + return actor diff --git a/datasette/events.py b/datasette/events.py index ae90972d..5cd5ba3d 100644 --- a/datasette/events.py +++ b/datasette/events.py @@ -2,7 +2,6 @@ from abc import ABC, abstractproperty from dataclasses import asdict, dataclass, field from datasette.hookspecs import hookimpl from datetime import datetime, timezone -from typing import Optional @dataclass @@ -14,7 +13,7 @@ class Event(ABC): created: datetime = field( init=False, default_factory=lambda: datetime.now(timezone.utc) ) - actor: Optional[dict] + actor: dict | None def properties(self): properties = asdict(self) @@ -63,7 +62,7 @@ class CreateTokenEvent(Event): """ name = "create-token" - expires_after: Optional[int] + expires_after: int | None restrict_all: list restrict_database: dict restrict_resource: dict diff --git a/datasette/filters.py b/datasette/filters.py index 67d4170b..95cc5f37 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -1,8 +1,8 @@ from datasette import hookimpl +from datasette.resources import DatabaseResource from datasette.views.base import DatasetteError from datasette.utils.asgi import BadRequest import json -import numbers from .utils import detect_json1, escape_sqlite, path_with_removed_args @@ -13,11 +13,10 @@ def where_filters(request, database, datasette): where_clauses = [] extra_wheres_for_ui = [] if "_where" in request.args: - if not await datasette.permission_allowed( - request.actor, - "execute-sql", - resource=database, - default=True, + if not await datasette.allowed( + action="execute-sql", + resource=DatabaseResource(database=database), + actor=request.actor, ): raise DatasetteError("_where= is not allowed", status=403) else: diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index eedb2481..3f6a1425 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -70,8 +70,8 @@ def register_facet_classes(): @hookspec -def register_permissions(datasette): - """Register permissions: returns a list of datasette.permission.Permission named tuples""" +def register_actions(datasette): + """Register actions: returns a list of datasette.permission.Action objects""" @hookspec @@ -110,17 +110,12 @@ def filters_from_request(request, database, table, datasette): ) based on the request""" -@hookspec -def permission_allowed(datasette, actor, action, resource): - """Check if actor is allowed to perform this action - return True, False or None""" - - @hookspec def permission_resources_sql(datasette, actor, action): """Return SQL query fragments for permission checks on resources. - Returns None, a PluginSQL object, or a list of PluginSQL objects. - Each PluginSQL contains SQL that should return rows with columns: + Returns None, a PermissionSQL object, or a list of PermissionSQL objects. + Each PermissionSQL contains SQL that should return rows with columns: parent (str|None), child (str|None), allow (int), reason (str). Used to efficiently check permissions across multiple resources at once. diff --git a/datasette/permissions.py b/datasette/permissions.py index bd42158e..c48293ac 100644 --- a/datasette/permissions.py +++ b/datasette/permissions.py @@ -1,12 +1,206 @@ +from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import Optional +from typing import Any, NamedTuple +import contextvars + + +# Context variable to track when permission checks should be skipped +_skip_permission_checks = contextvars.ContextVar( + "skip_permission_checks", default=False +) + + +class SkipPermissions: + """Context manager to temporarily skip permission checks. + + This is not a stable API and may change in future releases. + + Usage: + with SkipPermissions(): + # Permission checks are skipped within this block + response = await datasette.client.get("/protected") + """ + + def __enter__(self): + self.token = _skip_permission_checks.set(True) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + _skip_permission_checks.reset(self.token) + return False + + +class Resource(ABC): + """ + Base class for all resource types. + + Each subclass represents a type of resource (e.g., TableResource, DatabaseResource). + The class itself carries metadata about the resource type. + Instances represent specific resources. + """ + + # Class-level metadata (subclasses must define these) + name: str = None # e.g., "table", "database", "model" + parent_class: type["Resource"] | None = None # e.g., DatabaseResource for tables + + # Instance-level optional extra attributes + reasons: list[str] | None = None + include_reasons: bool | None = None + + def __init__(self, parent: str | None = None, child: str | None = None): + """ + Create a resource instance. + + Args: + parent: The parent identifier (meaning depends on resource type) + child: The child identifier (meaning depends on resource type) + """ + self.parent = parent + self.child = child + self._private = None # Sentinel to track if private was set + + @property + def private(self) -> bool: + """ + Whether this resource is private (accessible to actor but not anonymous). + + This property is only available on Resource objects returned from + allowed_resources() when include_is_private=True is used. + + Raises: + AttributeError: If accessed without calling include_is_private=True + """ + if self._private is None: + raise AttributeError( + "The 'private' attribute is only available when using " + "allowed_resources(..., include_is_private=True)" + ) + return self._private + + @private.setter + def private(self, value: bool): + self._private = value + + @classmethod + def __init_subclass__(cls): + """ + Validate resource hierarchy doesn't exceed 2 levels. + + Raises: + ValueError: If this resource would create a 3-level hierarchy + """ + super().__init_subclass__() + + if cls.parent_class is None: + return # Top of hierarchy, nothing to validate + + # Check if our parent has a parent - that would create 3 levels + if cls.parent_class.parent_class is not None: + # We have a parent, and that parent has a parent + # This creates a 3-level hierarchy, which is not allowed + raise ValueError( + f"Resource {cls.__name__} creates a 3-level hierarchy: " + f"{cls.parent_class.parent_class.__name__} -> {cls.parent_class.__name__} -> {cls.__name__}. " + f"Maximum 2 levels allowed (parent -> child)." + ) + + @classmethod + @abstractmethod + def resources_sql(cls) -> str: + """ + Return SQL query that returns all resources of this type. + + Must return two columns: parent, child + """ + pass + + +class AllowedResource(NamedTuple): + """A resource with the reason it was allowed (for debugging).""" + + resource: Resource + reason: str + + +@dataclass(frozen=True, kw_only=True) +class Action: + name: str + description: str | None + abbr: str | None = None + resource_class: type[Resource] | None = None + also_requires: str | None = None # Optional action name that must also be allowed + + @property + def takes_parent(self) -> bool: + """ + Whether this action requires a parent identifier when instantiating its resource. + + Returns False for global-only actions (no resource_class). + Returns True for all actions with a resource_class (all resources require a parent identifier). + """ + return self.resource_class is not None + + @property + def takes_child(self) -> bool: + """ + Whether this action requires a child identifier when instantiating its resource. + + Returns False for global actions (no resource_class). + Returns False for parent-level resources (DatabaseResource - parent_class is None). + Returns True for child-level resources (TableResource, QueryResource - have a parent_class). + """ + if self.resource_class is None: + return False + return self.resource_class.parent_class is not None + + +_reason_id = 1 +@dataclass +class PermissionSQL: + """ + A plugin contributes SQL that yields: + parent TEXT NULL, + child TEXT NULL, + allow INTEGER, -- 1 allow, 0 deny + reason TEXT + + For restriction-only plugins, sql can be None and only restriction_sql is provided. + """ + + sql: str | None = ( + None # SQL that SELECTs the 4 columns above (can be None for restriction-only) + ) + params: dict[str, Any] | None = ( + None # bound params for the SQL (values only; no ':' prefix) + ) + source: str | None = None # System will set this to the plugin name + restriction_sql: str | None = ( + None # Optional SQL that returns (parent, child) for restriction filtering + ) + + @classmethod + def allow(cls, reason: str, _allow: bool = True) -> "PermissionSQL": + global _reason_id + i = _reason_id + _reason_id += 1 + return cls( + sql=f"SELECT NULL AS parent, NULL AS child, {1 if _allow else 0} AS allow, :reason_{i} AS reason", + params={f"reason_{i}": reason}, + ) + + @classmethod + def deny(cls, reason: str) -> "PermissionSQL": + return cls.allow(reason=reason, _allow=False) + + +# This is obsolete, replaced by Action and ResourceType @dataclass class Permission: name: str - abbr: Optional[str] - description: Optional[str] + abbr: str | None + description: str | None takes_database: bool takes_resource: bool default: bool diff --git a/datasette/plugins.py b/datasette/plugins.py index 3769a209..e9818885 100644 --- a/datasette/plugins.py +++ b/datasette/plugins.py @@ -23,6 +23,7 @@ DEFAULT_PLUGINS = ( "datasette.sql_functions", "datasette.actor_auth_cookie", "datasette.default_permissions", + "datasette.default_actions", "datasette.default_magic_parameters", "datasette.blob_renderer", "datasette.default_menu_links", @@ -49,7 +50,7 @@ def after(outcome, hook_name, hook_impls, kwargs): results = outcome.get_result() if not isinstance(results, list): results = [results] - print(f"Results:", file=sys.stderr) + print("Results:", file=sys.stderr) pprint(results, width=40, indent=4, stream=sys.stderr) @@ -93,21 +94,24 @@ def get_plugins(): for plugin in pm.get_plugins(): static_path = None templates_path = None - if plugin.__name__ not in DEFAULT_PLUGINS: + plugin_name = ( + plugin.__name__ + if hasattr(plugin, "__name__") + else plugin.__class__.__name__ + ) + if plugin_name not in DEFAULT_PLUGINS: try: - if (importlib_resources.files(plugin.__name__) / "static").is_dir(): - static_path = str( - importlib_resources.files(plugin.__name__) / "static" - ) - if (importlib_resources.files(plugin.__name__) / "templates").is_dir(): + if (importlib_resources.files(plugin_name) / "static").is_dir(): + static_path = str(importlib_resources.files(plugin_name) / "static") + if (importlib_resources.files(plugin_name) / "templates").is_dir(): templates_path = str( - importlib_resources.files(plugin.__name__) / "templates" + importlib_resources.files(plugin_name) / "templates" ) except (TypeError, ModuleNotFoundError): # Caused by --plugins_dir= plugins pass plugin_info = { - "name": plugin.__name__, + "name": plugin_name, "static_path": static_path, "templates_path": templates_path, "hooks": [h.name for h in pm.get_hookcallers(plugin)], diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 760ff0d1..63d22fe8 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -3,7 +3,7 @@ import click import json import os import re -from subprocess import check_call, check_output +from subprocess import CalledProcessError, check_call, check_output from .common import ( add_common_publish_arguments_and_options, @@ -23,7 +23,9 @@ def publish_subcommand(publish): help="Application name to use when building", ) @click.option( - "--service", default="", help="Cloud Run service to deploy (or over-write)" + "--service", + default="", + help="Cloud Run service to deploy (or over-write)", ) @click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension") @click.option( @@ -55,13 +57,32 @@ def publish_subcommand(publish): @click.option( "--max-instances", type=int, - help="Maximum Cloud Run instances", + default=1, + show_default=True, + help="Maximum Cloud Run instances (use 0 to remove the limit)", ) @click.option( "--min-instances", type=int, help="Minimum Cloud Run instances", ) + @click.option( + "--artifact-repository", + default="datasette", + show_default=True, + help="Artifact Registry repository to store the image", + ) + @click.option( + "--artifact-region", + default="us", + show_default=True, + help="Artifact Registry location (region or multi-region)", + ) + @click.option( + "--artifact-project", + default=None, + help="Project ID for Artifact Registry (defaults to the active project)", + ) def cloudrun( files, metadata, @@ -91,6 +112,9 @@ def publish_subcommand(publish): apt_get_extras, max_instances, min_instances, + artifact_repository, + artifact_region, + artifact_project, ): "Publish databases to Datasette running on Cloud Run" fail_if_publish_binary_not_installed( @@ -100,6 +124,21 @@ def publish_subcommand(publish): "gcloud config get-value project", shell=True, universal_newlines=True ).strip() + artifact_project = artifact_project or project + + # Ensure Artifact Registry exists for the target image + _ensure_artifact_registry( + artifact_project=artifact_project, + artifact_region=artifact_region, + artifact_repository=artifact_repository, + ) + + artifact_host = ( + artifact_region + if artifact_region.endswith("-docker.pkg.dev") + else f"{artifact_region}-docker.pkg.dev" + ) + if not service: # Show the user their current services, then prompt for one click.echo("Please provide a service name for this deployment\n") @@ -117,6 +156,11 @@ def publish_subcommand(publish): click.echo("") service = click.prompt("Service name", type=str) + image_id = ( + f"{artifact_host}/{artifact_project}/" + f"{artifact_repository}/datasette-{service}" + ) + extra_metadata = { "title": title, "license": license, @@ -173,7 +217,6 @@ def publish_subcommand(publish): print(fp.read()) print("\n====================\n") - image_id = f"gcr.io/{project}/datasette-{service}" check_call( "gcloud builds submit --tag {}{}".format( image_id, " --timeout {}".format(timeout) if timeout else "" @@ -187,7 +230,7 @@ def publish_subcommand(publish): ("--max-instances", max_instances), ("--min-instances", min_instances), ): - if value: + if value is not None: extra_deploy_options.append("{} {}".format(option, value)) check_call( "gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}".format( @@ -199,6 +242,52 @@ def publish_subcommand(publish): ) +def _ensure_artifact_registry(artifact_project, artifact_region, artifact_repository): + """Ensure Artifact Registry API is enabled and the repository exists.""" + + enable_cmd = ( + "gcloud services enable artifactregistry.googleapis.com " + f"--project {artifact_project} --quiet" + ) + try: + check_call(enable_cmd, shell=True) + except CalledProcessError as exc: + raise click.ClickException( + "Failed to enable artifactregistry.googleapis.com. " + "Please ensure you have permissions to manage services." + ) from exc + + describe_cmd = ( + "gcloud artifacts repositories describe {repo} --project {project} " + "--location {location} --quiet" + ).format( + repo=artifact_repository, + project=artifact_project, + location=artifact_region, + ) + try: + check_call(describe_cmd, shell=True) + return + except CalledProcessError: + create_cmd = ( + "gcloud artifacts repositories create {repo} --repository-format=docker " + '--location {location} --project {project} --description "Datasette Cloud Run images" --quiet' + ).format( + repo=artifact_repository, + location=artifact_region, + project=artifact_project, + ) + try: + check_call(create_cmd, shell=True) + click.echo(f"Created Artifact Registry repository '{artifact_repository}'") + except CalledProcessError as exc: + raise click.ClickException( + "Failed to create Artifact Registry repository. " + "Use --artifact-repository/--artifact-region to point to an existing repo " + "or create one manually." + ) from exc + + def get_existing_services(): services = json.loads( check_output( @@ -214,6 +303,7 @@ def get_existing_services(): "url": service["status"]["address"]["url"], } for service in services + if "url" in service["status"] ] diff --git a/datasette/renderer.py b/datasette/renderer.py index 483c81e9..acf23e59 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -20,7 +20,7 @@ def convert_specific_columns_to_json(rows, columns, json_cols): if column in json_cols: try: value = json.loads(value) - except (TypeError, ValueError) as e: + except (TypeError, ValueError): pass new_row.append(value) new_rows.append(new_row) diff --git a/datasette/resources.py b/datasette/resources.py new file mode 100644 index 00000000..641afb2f --- /dev/null +++ b/datasette/resources.py @@ -0,0 +1,90 @@ +"""Core resource types for Datasette's permission system.""" + +from datasette.permissions import Resource + + +class DatabaseResource(Resource): + """A database in Datasette.""" + + name = "database" + parent_class = None # Top of the resource hierarchy + + def __init__(self, database: str): + super().__init__(parent=database, child=None) + + @classmethod + async def resources_sql(cls, datasette) -> str: + return """ + SELECT database_name AS parent, NULL AS child + FROM catalog_databases + """ + + +class TableResource(Resource): + """A table in a database.""" + + name = "table" + parent_class = DatabaseResource + + def __init__(self, database: str, table: str): + super().__init__(parent=database, child=table) + + @classmethod + async def resources_sql(cls, datasette) -> str: + return """ + SELECT database_name AS parent, table_name AS child + FROM catalog_tables + UNION ALL + SELECT database_name AS parent, view_name AS child + FROM catalog_views + """ + + +class QueryResource(Resource): + """A canned query in a database.""" + + name = "query" + parent_class = DatabaseResource + + def __init__(self, database: str, query: str): + super().__init__(parent=database, child=query) + + @classmethod + async def resources_sql(cls, datasette) -> str: + from datasette.plugins import pm + from datasette.utils import await_me_maybe + + # Get all databases from catalog + db = datasette.get_internal_database() + result = await db.execute("SELECT database_name FROM catalog_databases") + databases = [row[0] for row in result.rows] + + # Gather all canned queries from all databases + query_pairs = [] + for database_name in databases: + # Call the hook to get queries (including from config via default plugin) + for queries_result in pm.hook.canned_queries( + datasette=datasette, + database=database_name, + actor=None, # Get ALL queries for resource enumeration + ): + queries = await await_me_maybe(queries_result) + if queries: + for query_name in queries.keys(): + query_pairs.append((database_name, query_name)) + + # Build SQL + if not query_pairs: + return "SELECT NULL AS parent, NULL AS child WHERE 0" + + # Generate UNION ALL query + selects = [] + for db_name, query_name in query_pairs: + # Escape single quotes by doubling them + db_escaped = db_name.replace("'", "''") + query_escaped = query_name.replace("'", "''") + selects.append( + f"SELECT '{db_escaped}' AS parent, '{query_escaped}' AS child" + ) + + return " UNION ALL ".join(selects) diff --git a/datasette/static/datasette-manager.js b/datasette/static/datasette-manager.js index 10716cc5..d2347ab3 100644 --- a/datasette/static/datasette-manager.js +++ b/datasette/static/datasette-manager.js @@ -93,12 +93,12 @@ const datasetteManager = { */ renderAboveTablePanel: () => { const aboveTablePanel = document.querySelector( - DOM_SELECTORS.aboveTablePanel + DOM_SELECTORS.aboveTablePanel, ); if (!aboveTablePanel) { console.warn( - "This page does not have a table, the renderAboveTablePanel cannot be used." + "This page does not have a table, the renderAboveTablePanel cannot be used.", ); return; } diff --git a/datasette/static/json-format-highlight-1.0.1.js b/datasette/static/json-format-highlight-1.0.1.js index d83b8186..0e6e2c29 100644 --- a/datasette/static/json-format-highlight-1.0.1.js +++ b/datasette/static/json-format-highlight-1.0.1.js @@ -7,8 +7,8 @@ MIT Licensed typeof exports === "object" && typeof module !== "undefined" ? (module.exports = factory()) : typeof define === "function" && define.amd - ? define(factory) - : (global.jsonFormatHighlight = factory()); + ? define(factory) + : (global.jsonFormatHighlight = factory()); })(this, function () { "use strict"; @@ -42,13 +42,13 @@ MIT Licensed color = /true/.test(match) ? colors.trueColor : /false/.test(match) - ? colors.falseColor - : /null/.test(match) - ? colors.nullColor - : color; + ? colors.falseColor + : /null/.test(match) + ? colors.nullColor + : color; } return '' + match + ""; - } + }, ); } diff --git a/datasette/static/navigation-search.js b/datasette/static/navigation-search.js index 202839d5..48de5c4f 100644 --- a/datasette/static/navigation-search.js +++ b/datasette/static/navigation-search.js @@ -1,17 +1,17 @@ class NavigationSearch extends HTMLElement { - constructor() { - super(); - this.attachShadow({ mode: 'open' }); - this.selectedIndex = -1; - this.matches = []; - this.debounceTimer = null; - - this.render(); - this.setupEventListeners(); - } + constructor() { + super(); + this.attachShadow({ mode: "open" }); + this.selectedIndex = -1; + this.matches = []; + this.debounceTimer = null; - render() { - this.shadowRoot.innerHTML = ` + this.render(); + this.setupEventListeners(); + } + + render() { + this.shadowRoot.innerHTML = ` + + +{% endif %} diff --git a/datasette/templates/allow_debug.html b/datasette/templates/allow_debug.html index 610417d2..1ecc92df 100644 --- a/datasette/templates/allow_debug.html +++ b/datasette/templates/allow_debug.html @@ -33,6 +33,9 @@ p.message-warning {

Debug allow rules

+{% set current_tab = "allow_debug" %} +{% include "_permissions_debug_tabs.html" %} +

Use this tool to try out different actor and allow combinations. See Defining permissions with "allow" blocks for documentation.

diff --git a/datasette/templates/create_token.html b/datasette/templates/create_token.html index 409fb8a9..ad7c71b6 100644 --- a/datasette/templates/create_token.html +++ b/datasette/templates/create_token.html @@ -57,7 +57,7 @@ Restrict actions that can be performed using this token

All databases and tables

    - {% for permission in all_permissions %} + {% for permission in all_actions %}
  • {% endfor %}
@@ -65,7 +65,7 @@ {% for database in database_with_tables %}

All tables in "{{ database.name }}"

    - {% for permission in database_permissions %} + {% for permission in database_actions %}
  • {% endfor %}
@@ -75,7 +75,7 @@ {% for table in database.tables %}

{{ database.name }}: {{ table.name }}

    - {% for permission in resource_permissions %} + {% for permission in child_actions %}
  • {% endfor %}
diff --git a/datasette/templates/database.html b/datasette/templates/database.html index 66f288dc..42b4ca0b 100644 --- a/datasette/templates/database.html +++ b/datasette/templates/database.html @@ -56,7 +56,7 @@ {% endif %} {% if tables %} -

Tables

+

Tables schema

{% endif %} {% for table in tables %} diff --git a/datasette/templates/debug_actions.html b/datasette/templates/debug_actions.html new file mode 100644 index 00000000..0ef7b329 --- /dev/null +++ b/datasette/templates/debug_actions.html @@ -0,0 +1,43 @@ +{% extends "base.html" %} + +{% block title %}Registered Actions{% endblock %} + +{% block content %} +

Registered actions

+ +{% set current_tab = "actions" %} +{% include "_permissions_debug_tabs.html" %} + +

+ This Datasette instance has registered {{ data|length }} action{{ data|length != 1 and "s" or "" }}. + Actions are used by the permission system to control access to different features. +

+ +
+ + + + + + + + + + + + + {% for action in data %} + + + + + + + + + + {% endfor %} + +
NameAbbrDescriptionResourceTakes ParentTakes ChildAlso Requires
{{ action.name }}{% if action.abbr %}{{ action.abbr }}{% endif %}{{ action.description or "" }}{% if action.resource_class %}{{ action.resource_class }}{% endif %}{% if action.takes_parent %}✓{% endif %}{% if action.takes_child %}✓{% endif %}{% if action.also_requires %}{{ action.also_requires }}{% endif %}
+ +{% endblock %} diff --git a/datasette/templates/debug_allowed.html b/datasette/templates/debug_allowed.html index 031ff07d..add3154a 100644 --- a/datasette/templates/debug_allowed.html +++ b/datasette/templates/debug_allowed.html @@ -9,8 +9,10 @@ {% endblock %} {% block content %} +

Allowed resources

-

Allowed Resources

+{% set current_tab = "allowed" %} +{% include "_permissions_debug_tabs.html" %}

Use this tool to check which resources the current actor is allowed to access for a given permission action. It queries the /-/allowed.json API endpoint.

@@ -21,13 +23,13 @@ {% endif %}
- +
Only certain actions are supported by this endpoint @@ -42,7 +44,7 @@
- Filter results to a specific child resource (requires parent) + Filter results to a specific child resource (requires parent to be set)
@@ -80,23 +82,7 @@ const resultsContent = document.getElementById('results-content'); const resultsCount = document.getElementById('results-count'); const pagination = document.getElementById('pagination'); const submitBtn = document.getElementById('submit-btn'); -let currentData = null; - -form.addEventListener('submit', async (ev) => { - ev.preventDefault(); - updateURL('allowed-form', 1); - await fetchResults(1, false); -}); - -// Handle browser back/forward -window.addEventListener('popstate', () => { - const params = populateFormFromURL(); - const action = params.get('action'); - const page = params.get('page'); - if (action) { - fetchResults(page ? parseInt(page) : 1, false); - } -}); +const hasDebugPermission = {{ 'true' if has_debug_permission else 'false' }}; // Populate form on initial load (function() { @@ -104,11 +90,11 @@ window.addEventListener('popstate', () => { const action = params.get('action'); const page = params.get('page'); if (action) { - fetchResults(page ? parseInt(page) : 1, false); + fetchResults(page ? parseInt(page) : 1); } })(); -async function fetchResults(page = 1, updateHistory = true) { +async function fetchResults(page = 1) { submitBtn.disabled = true; submitBtn.textContent = 'Loading...'; @@ -136,7 +122,6 @@ async function fetchResults(page = 1, updateHistory = true) { const data = await response.json(); if (response.ok) { - currentData = data; displayResults(data); } else { displayError(data); @@ -164,8 +149,9 @@ function displayResults(data) { html += 'Resource Path'; html += 'Parent'; html += 'Child'; - html += 'Reason'; - html += 'Source Plugin'; + if (hasDebugPermission) { + html += 'Reason'; + } html += ''; html += ''; @@ -174,8 +160,14 @@ function displayResults(data) { html += `${escapeHtml(item.resource || '/')}`; html += `${escapeHtml(item.parent || '—')}`; html += `${escapeHtml(item.child || '—')}`; - html += `${escapeHtml(item.reason || '—')}`; - html += `${escapeHtml(item.source_plugin || '—')}`; + if (hasDebugPermission) { + // Display reason as JSON array + let reasonHtml = '—'; + if (item.reason && Array.isArray(item.reason)) { + reasonHtml = `${escapeHtml(JSON.stringify(item.reason))}`; + } + html += `${reasonHtml}`; + } html += ''; } @@ -188,13 +180,8 @@ function displayResults(data) { if (data.previous_url || data.next_url) { if (data.previous_url) { const prevLink = document.createElement('a'); - prevLink.href = '#'; + prevLink.href = data.previous_url; prevLink.textContent = '← Previous'; - prevLink.addEventListener('click', (e) => { - e.preventDefault(); - updateURL('allowed-form', data.page - 1); - fetchResults(data.page - 1, false); - }); pagination.appendChild(prevLink); } @@ -204,22 +191,14 @@ function displayResults(data) { if (data.next_url) { const nextLink = document.createElement('a'); - nextLink.href = '#'; + nextLink.href = data.next_url; nextLink.textContent = 'Next →'; - nextLink.addEventListener('click', (e) => { - e.preventDefault(); - updateURL('allowed-form', data.page + 1); - fetchResults(data.page + 1, false); - }); pagination.appendChild(nextLink); } } // Update raw JSON document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data); - - // Scroll to results - resultsContainer.scrollIntoView({ behavior: 'smooth', block: 'nearest' }); } function displayError(data) { @@ -230,20 +209,21 @@ function displayError(data) { resultsContent.innerHTML = `
Error: ${escapeHtml(data.error || 'Unknown error')}
`; document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data); - - resultsContainer.scrollIntoView({ behavior: 'smooth', block: 'nearest' }); } // Disable child input if parent is empty const parentInput = document.getElementById('parent'); const childInput = document.getElementById('child'); -childInput.addEventListener('focus', () => { +parentInput.addEventListener('input', () => { + childInput.disabled = !parentInput.value; if (!parentInput.value) { - alert('Please specify a parent resource first before filtering by child resource.'); - parentInput.focus(); + childInput.value = ''; } }); + +// Initialize disabled state +childInput.disabled = !parentInput.value; {% endblock %} diff --git a/datasette/templates/debug_check.html b/datasette/templates/debug_check.html index 2e077327..c2e7997f 100644 --- a/datasette/templates/debug_check.html +++ b/datasette/templates/debug_check.html @@ -4,35 +4,9 @@ {% block extra_head %} +{% include "_permission_ui_styles.html" %} {% include "_debug_common_functions.html" %} {% endblock %} {% block content %} +

Permission check

-

Permission Check

+{% set current_tab = "check" %} +{% include "_permissions_debug_tabs.html" %}

Use this tool to test permission checks for the current actor. It queries the /-/check.json API endpoint.

@@ -105,32 +65,36 @@

Current actor: anonymous (not logged in)

{% endif %} - -
- - - The permission action to check -
+
+ +
+ + + The permission action to check +
-
- - - For database-level permissions, specify the database name -
+
+ + + For database-level permissions, specify the database name +
-
- - - For table-level permissions, specify the table name (requires parent) -
+
+ + + For table-level permissions, specify the table name (requires parent) +
- - +
+ +
+ +