mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Compare commits
1 commit
main
...
default-de
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
335814a753 |
77 changed files with 2034 additions and 5274 deletions
32
.github/workflows/deploy-latest.yml
vendored
32
.github/workflows/deploy-latest.yml
vendored
|
|
@ -2,10 +2,10 @@ name: Deploy latest.datasette.io
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
push:
|
# push:
|
||||||
branches:
|
# branches:
|
||||||
- main
|
# - main
|
||||||
# - 1.0-dev
|
# - 1.0-dev
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
@ -15,12 +15,19 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Check out datasette
|
- name: Check out datasette
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v3
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v6
|
uses: actions/setup-python@v6
|
||||||
|
# Using Python 3.10 for gcloud compatibility:
|
||||||
with:
|
with:
|
||||||
python-version: "3.13"
|
python-version: "3.10"
|
||||||
cache: pip
|
- uses: actions/cache@v4
|
||||||
|
name: Configure pip caching
|
||||||
|
with:
|
||||||
|
path: ~/.cache/pip
|
||||||
|
key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pip-
|
||||||
- name: Install Python dependencies
|
- name: Install Python dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
|
|
@ -95,13 +102,12 @@ jobs:
|
||||||
# jq '.plugins |= . + {"datasette-ephemeral-tables": {"table_ttl": 900}}' \
|
# jq '.plugins |= . + {"datasette-ephemeral-tables": {"table_ttl": 900}}' \
|
||||||
# > metadata.json
|
# > metadata.json
|
||||||
# cat metadata.json
|
# cat metadata.json
|
||||||
- id: auth
|
- name: Set up Cloud Run
|
||||||
name: Authenticate to Google Cloud
|
uses: google-github-actions/setup-gcloud@v0
|
||||||
uses: google-github-actions/auth@v3
|
|
||||||
with:
|
with:
|
||||||
credentials_json: ${{ secrets.GCP_SA_KEY }}
|
version: '318.0.0'
|
||||||
- name: Set up Cloud SDK
|
service_account_email: ${{ secrets.GCP_SA_EMAIL }}
|
||||||
uses: google-github-actions/setup-gcloud@v3
|
service_account_key: ${{ secrets.GCP_SA_KEY }}
|
||||||
- name: Deploy to Cloud Run
|
- name: Deploy to Cloud Run
|
||||||
env:
|
env:
|
||||||
LATEST_DATASETTE_SECRET: ${{ secrets.LATEST_DATASETTE_SECRET }}
|
LATEST_DATASETTE_SECRET: ${{ secrets.LATEST_DATASETTE_SECRET }}
|
||||||
|
|
|
||||||
11
.github/workflows/publish.yml
vendored
11
.github/workflows/publish.yml
vendored
|
|
@ -73,13 +73,12 @@ jobs:
|
||||||
DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build
|
DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build
|
||||||
sphinx-to-sqlite ../docs.db _build
|
sphinx-to-sqlite ../docs.db _build
|
||||||
cd ..
|
cd ..
|
||||||
- id: auth
|
- name: Set up Cloud Run
|
||||||
name: Authenticate to Google Cloud
|
uses: google-github-actions/setup-gcloud@v0
|
||||||
uses: google-github-actions/auth@v2
|
|
||||||
with:
|
with:
|
||||||
credentials_json: ${{ secrets.GCP_SA_KEY }}
|
version: '318.0.0'
|
||||||
- name: Set up Cloud SDK
|
service_account_email: ${{ secrets.GCP_SA_EMAIL }}
|
||||||
uses: google-github-actions/setup-gcloud@v3
|
service_account_key: ${{ secrets.GCP_SA_KEY }}
|
||||||
- name: Deploy stable-docs.datasette.io to Cloud Run
|
- name: Deploy stable-docs.datasette.io to Cloud Run
|
||||||
run: |-
|
run: |-
|
||||||
gcloud config set run/region us-central1
|
gcloud config set run/region us-central1
|
||||||
|
|
|
||||||
76
.github/workflows/stable-docs.yml
vendored
76
.github/workflows/stable-docs.yml
vendored
|
|
@ -1,76 +0,0 @@
|
||||||
name: Update Stable Docs
|
|
||||||
|
|
||||||
on:
|
|
||||||
release:
|
|
||||||
types: [published]
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update_stable_docs:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v5
|
|
||||||
with:
|
|
||||||
fetch-depth: 0 # We need all commits to find docs/ changes
|
|
||||||
- name: Set up Git user
|
|
||||||
run: |
|
|
||||||
git config user.name "Automated"
|
|
||||||
git config user.email "actions@users.noreply.github.com"
|
|
||||||
- name: Create stable branch if it does not yet exist
|
|
||||||
run: |
|
|
||||||
if ! git ls-remote --heads origin stable | grep -qE '\bstable\b'; then
|
|
||||||
# Make sure we have all tags locally
|
|
||||||
git fetch --tags --quiet
|
|
||||||
|
|
||||||
# Latest tag that is just numbers and dots (optionally prefixed with 'v')
|
|
||||||
# e.g., 0.65.2 or v0.65.2 — excludes 1.0a20, 1.0-rc1, etc.
|
|
||||||
LATEST_RELEASE=$(
|
|
||||||
git tag -l --sort=-v:refname \
|
|
||||||
| grep -E '^v?[0-9]+(\.[0-9]+){1,3}$' \
|
|
||||||
| head -n1
|
|
||||||
)
|
|
||||||
|
|
||||||
git checkout -b stable
|
|
||||||
|
|
||||||
# If there are any stable releases, copy docs/ from the most recent
|
|
||||||
if [ -n "$LATEST_RELEASE" ]; then
|
|
||||||
rm -rf docs/
|
|
||||||
git checkout "$LATEST_RELEASE" -- docs/ || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
git commit -m "Populate docs/ from $LATEST_RELEASE" || echo "No changes"
|
|
||||||
git push -u origin stable
|
|
||||||
fi
|
|
||||||
- name: Handle Release
|
|
||||||
if: github.event_name == 'release' && !github.event.release.prerelease
|
|
||||||
run: |
|
|
||||||
git fetch --all
|
|
||||||
git checkout stable
|
|
||||||
git reset --hard ${GITHUB_REF#refs/tags/}
|
|
||||||
git push origin stable --force
|
|
||||||
- name: Handle Commit to Main
|
|
||||||
if: contains(github.event.head_commit.message, '!stable-docs')
|
|
||||||
run: |
|
|
||||||
git fetch origin
|
|
||||||
git checkout -b stable origin/stable
|
|
||||||
# Get the list of modified files in docs/ from the current commit
|
|
||||||
FILES=$(git diff-tree --no-commit-id --name-only -r ${{ github.sha }} -- docs/)
|
|
||||||
# Check if the list of files is non-empty
|
|
||||||
if [[ -n "$FILES" ]]; then
|
|
||||||
# Checkout those files to the stable branch to over-write with their contents
|
|
||||||
for FILE in $FILES; do
|
|
||||||
git checkout ${{ github.sha }} -- $FILE
|
|
||||||
done
|
|
||||||
git add docs/
|
|
||||||
git commit -m "Doc changes from ${{ github.sha }}"
|
|
||||||
git push origin stable
|
|
||||||
else
|
|
||||||
echo "No changes to docs/ in this commit."
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
10
Justfile
10
Justfile
|
|
@ -21,19 +21,15 @@ export DATASETTE_SECRET := "not_a_secret"
|
||||||
@lint: codespell
|
@lint: codespell
|
||||||
uv run black . --check
|
uv run black . --check
|
||||||
uv run flake8
|
uv run flake8
|
||||||
uv run --extra test cog --check README.md docs/*.rst
|
uv run cog --check README.md docs/*.rst
|
||||||
|
|
||||||
# Rebuild docs with cog
|
# Rebuild docs with cog
|
||||||
@cog:
|
@cog:
|
||||||
uv run --extra test cog -r README.md docs/*.rst
|
uv run cog -r README.md docs/*.rst
|
||||||
|
|
||||||
# Serve live docs on localhost:8000
|
# Serve live docs on localhost:8000
|
||||||
@docs: cog blacken-docs
|
@docs: cog blacken-docs
|
||||||
uv run --extra docs make -C docs livehtml
|
cd docs && uv run make livehtml
|
||||||
|
|
||||||
# Build docs as static HTML
|
|
||||||
@docs-build: cog blacken-docs
|
|
||||||
rm -rf docs/_build && cd docs && uv run make html
|
|
||||||
|
|
||||||
# Apply Black
|
# Apply Black
|
||||||
@black:
|
@black:
|
||||||
|
|
|
||||||
401
datasette/app.py
401
datasette/app.py
|
|
@ -2,7 +2,6 @@ from __future__ import annotations
|
||||||
|
|
||||||
from asgi_csrf import Errors
|
from asgi_csrf import Errors
|
||||||
import asyncio
|
import asyncio
|
||||||
import contextvars
|
|
||||||
from typing import TYPE_CHECKING, Any, Dict, Iterable, List
|
from typing import TYPE_CHECKING, Any, Dict, Iterable, List
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
|
@ -59,9 +58,6 @@ from .views.special import (
|
||||||
PermissionRulesView,
|
PermissionRulesView,
|
||||||
PermissionCheckView,
|
PermissionCheckView,
|
||||||
TablesView,
|
TablesView,
|
||||||
InstanceSchemaView,
|
|
||||||
DatabaseSchemaView,
|
|
||||||
TableSchemaView,
|
|
||||||
)
|
)
|
||||||
from .views.table import (
|
from .views.table import (
|
||||||
TableInsertView,
|
TableInsertView,
|
||||||
|
|
@ -75,7 +71,6 @@ from .url_builder import Urls
|
||||||
from .database import Database, QueryInterrupted
|
from .database import Database, QueryInterrupted
|
||||||
|
|
||||||
from .utils import (
|
from .utils import (
|
||||||
PaginatedResources,
|
|
||||||
PrefixedUrlString,
|
PrefixedUrlString,
|
||||||
SPATIALITE_FUNCTIONS,
|
SPATIALITE_FUNCTIONS,
|
||||||
StartupError,
|
StartupError,
|
||||||
|
|
@ -96,7 +91,6 @@ from .utils import (
|
||||||
resolve_env_secrets,
|
resolve_env_secrets,
|
||||||
resolve_routes,
|
resolve_routes,
|
||||||
tilde_decode,
|
tilde_decode,
|
||||||
tilde_encode,
|
|
||||||
to_css_class,
|
to_css_class,
|
||||||
urlsafe_components,
|
urlsafe_components,
|
||||||
redact_keys,
|
redact_keys,
|
||||||
|
|
@ -131,22 +125,6 @@ from .resources import DatabaseResource, TableResource
|
||||||
app_root = Path(__file__).parent.parent
|
app_root = Path(__file__).parent.parent
|
||||||
|
|
||||||
|
|
||||||
# Context variable to track when code is executing within a datasette.client request
|
|
||||||
_in_datasette_client = contextvars.ContextVar("in_datasette_client", default=False)
|
|
||||||
|
|
||||||
|
|
||||||
class _DatasetteClientContext:
|
|
||||||
"""Context manager to mark code as executing within a datasette.client request."""
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.token = _in_datasette_client.set(True)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
_in_datasette_client.reset(self.token)
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass
|
@dataclasses.dataclass
|
||||||
class PermissionCheck:
|
class PermissionCheck:
|
||||||
"""Represents a logged permission check for debugging purposes."""
|
"""Represents a logged permission check for debugging purposes."""
|
||||||
|
|
@ -268,9 +246,6 @@ FAVICON_PATH = app_root / "datasette" / "static" / "favicon.png"
|
||||||
DEFAULT_NOT_SET = object()
|
DEFAULT_NOT_SET = object()
|
||||||
|
|
||||||
|
|
||||||
ResourcesSQL = collections.namedtuple("ResourcesSQL", ("sql", "params"))
|
|
||||||
|
|
||||||
|
|
||||||
async def favicon(request, send):
|
async def favicon(request, send):
|
||||||
await asgi_send_file(
|
await asgi_send_file(
|
||||||
send,
|
send,
|
||||||
|
|
@ -321,7 +296,8 @@ class Datasette:
|
||||||
crossdb=False,
|
crossdb=False,
|
||||||
nolock=False,
|
nolock=False,
|
||||||
internal=None,
|
internal=None,
|
||||||
default_deny=False,
|
private=False,
|
||||||
|
require_auth=False,
|
||||||
):
|
):
|
||||||
self._startup_invoked = False
|
self._startup_invoked = False
|
||||||
assert config_dir is None or isinstance(
|
assert config_dir is None or isinstance(
|
||||||
|
|
@ -366,6 +342,8 @@ class Datasette:
|
||||||
raise
|
raise
|
||||||
self.crossdb = crossdb
|
self.crossdb = crossdb
|
||||||
self.nolock = nolock
|
self.nolock = nolock
|
||||||
|
self.private = private
|
||||||
|
self.require_auth = require_auth
|
||||||
if memory or crossdb or not self.files:
|
if memory or crossdb or not self.files:
|
||||||
self.add_database(
|
self.add_database(
|
||||||
Database(self, is_mutable=False, is_memory=True), name="_memory"
|
Database(self, is_mutable=False, is_memory=True), name="_memory"
|
||||||
|
|
@ -530,7 +508,6 @@ class Datasette:
|
||||||
self._permission_checks = collections.deque(maxlen=200)
|
self._permission_checks = collections.deque(maxlen=200)
|
||||||
self._root_token = secrets.token_hex(32)
|
self._root_token = secrets.token_hex(32)
|
||||||
self.root_enabled = False
|
self.root_enabled = False
|
||||||
self.default_deny = default_deny
|
|
||||||
self.client = DatasetteClient(self)
|
self.client = DatasetteClient(self)
|
||||||
|
|
||||||
async def apply_metadata_json(self):
|
async def apply_metadata_json(self):
|
||||||
|
|
@ -606,15 +583,6 @@ class Datasette:
|
||||||
"select database_name, schema_version from catalog_databases"
|
"select database_name, schema_version from catalog_databases"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
# Delete stale entries for databases that are no longer attached
|
|
||||||
stale_databases = set(current_schema_versions.keys()) - set(
|
|
||||||
self.databases.keys()
|
|
||||||
)
|
|
||||||
for stale_db_name in stale_databases:
|
|
||||||
await internal_db.execute_write(
|
|
||||||
"DELETE FROM catalog_databases WHERE database_name = ?",
|
|
||||||
[stale_db_name],
|
|
||||||
)
|
|
||||||
for database_name, db in self.databases.items():
|
for database_name, db in self.databases.items():
|
||||||
schema_version = (await db.execute("PRAGMA schema_version")).first()[0]
|
schema_version = (await db.execute("PRAGMA schema_version")).first()[0]
|
||||||
# Compare schema versions to see if we should skip it
|
# Compare schema versions to see if we should skip it
|
||||||
|
|
@ -640,17 +608,6 @@ class Datasette:
|
||||||
def urls(self):
|
def urls(self):
|
||||||
return Urls(self)
|
return Urls(self)
|
||||||
|
|
||||||
@property
|
|
||||||
def pm(self):
|
|
||||||
"""
|
|
||||||
Return the global plugin manager instance.
|
|
||||||
|
|
||||||
This provides access to the pluggy PluginManager that manages all
|
|
||||||
Datasette plugins and hooks. Use datasette.pm.hook.hook_name() to
|
|
||||||
call plugin hooks.
|
|
||||||
"""
|
|
||||||
return pm
|
|
||||||
|
|
||||||
async def invoke_startup(self):
|
async def invoke_startup(self):
|
||||||
# This must be called for Datasette to be in a usable state
|
# This must be called for Datasette to be in a usable state
|
||||||
if self._startup_invoked:
|
if self._startup_invoked:
|
||||||
|
|
@ -703,14 +660,6 @@ class Datasette:
|
||||||
def unsign(self, signed, namespace="default"):
|
def unsign(self, signed, namespace="default"):
|
||||||
return URLSafeSerializer(self._secret, namespace).loads(signed)
|
return URLSafeSerializer(self._secret, namespace).loads(signed)
|
||||||
|
|
||||||
def in_client(self) -> bool:
|
|
||||||
"""Check if the current code is executing within a datasette.client request.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if currently executing within a datasette.client request, False otherwise.
|
|
||||||
"""
|
|
||||||
return _in_datasette_client.get()
|
|
||||||
|
|
||||||
def create_token(
|
def create_token(
|
||||||
self,
|
self,
|
||||||
actor_id: str,
|
actor_id: str,
|
||||||
|
|
@ -779,10 +728,8 @@ class Datasette:
|
||||||
self.databases = new_databases
|
self.databases = new_databases
|
||||||
return db
|
return db
|
||||||
|
|
||||||
def add_memory_database(self, memory_name, name=None, route=None):
|
def add_memory_database(self, memory_name):
|
||||||
return self.add_database(
|
return self.add_database(Database(self, memory_name=memory_name))
|
||||||
Database(self, memory_name=memory_name), name=name, route=route
|
|
||||||
)
|
|
||||||
|
|
||||||
def remove_database(self, name):
|
def remove_database(self, name):
|
||||||
self.get_database(name).close()
|
self.get_database(name).close()
|
||||||
|
|
@ -1144,7 +1091,11 @@ class Datasette:
|
||||||
|
|
||||||
# Validate that resource is a Resource object or None
|
# Validate that resource is a Resource object or None
|
||||||
if resource is not None and not isinstance(resource, Resource):
|
if resource is not None and not isinstance(resource, Resource):
|
||||||
raise TypeError(f"resource must be a Resource subclass instance or None.")
|
raise TypeError(
|
||||||
|
f"resource must be a Resource object or None, not {type(resource).__name__}. "
|
||||||
|
f"Use DatabaseResource(database=...), TableResource(database=..., table=...), "
|
||||||
|
f"or QueryResource(database=..., query=...) instead."
|
||||||
|
)
|
||||||
|
|
||||||
# Check if actor can see it
|
# Check if actor can see it
|
||||||
if not await self.allowed(action=action, resource=resource, actor=actor):
|
if not await self.allowed(action=action, resource=resource, actor=actor):
|
||||||
|
|
@ -1165,7 +1116,7 @@ class Datasette:
|
||||||
actor: dict | None = None,
|
actor: dict | None = None,
|
||||||
parent: str | None = None,
|
parent: str | None = None,
|
||||||
include_is_private: bool = False,
|
include_is_private: bool = False,
|
||||||
) -> ResourcesSQL:
|
) -> tuple[str, dict]:
|
||||||
"""
|
"""
|
||||||
Build SQL query to get all resources the actor can access for the given action.
|
Build SQL query to get all resources the actor can access for the given action.
|
||||||
|
|
||||||
|
|
@ -1175,7 +1126,7 @@ class Datasette:
|
||||||
parent: Optional parent filter (e.g., database name) to limit results
|
parent: Optional parent filter (e.g., database name) to limit results
|
||||||
include_is_private: If True, include is_private column showing if anonymous cannot access
|
include_is_private: If True, include is_private column showing if anonymous cannot access
|
||||||
|
|
||||||
Returns a namedtuple of (query: str, params: dict) that can be executed against the internal database.
|
Returns a tuple of (query, params) that can be executed against the internal database.
|
||||||
The query returns rows with (parent, child, reason) columns, plus is_private if requested.
|
The query returns rows with (parent, child, reason) columns, plus is_private if requested.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
@ -1193,10 +1144,9 @@ class Datasette:
|
||||||
if not action_obj:
|
if not action_obj:
|
||||||
raise ValueError(f"Unknown action: {action}")
|
raise ValueError(f"Unknown action: {action}")
|
||||||
|
|
||||||
sql, params = await build_allowed_resources_sql(
|
return await build_allowed_resources_sql(
|
||||||
self, actor, action, parent=parent, include_is_private=include_is_private
|
self, actor, action, parent=parent, include_is_private=include_is_private
|
||||||
)
|
)
|
||||||
return ResourcesSQL(sql, params)
|
|
||||||
|
|
||||||
async def allowed_resources(
|
async def allowed_resources(
|
||||||
self,
|
self,
|
||||||
|
|
@ -1205,147 +1155,104 @@ class Datasette:
|
||||||
*,
|
*,
|
||||||
parent: str | None = None,
|
parent: str | None = None,
|
||||||
include_is_private: bool = False,
|
include_is_private: bool = False,
|
||||||
include_reasons: bool = False,
|
) -> list["Resource"]:
|
||||||
limit: int = 100,
|
|
||||||
next: str | None = None,
|
|
||||||
) -> PaginatedResources:
|
|
||||||
"""
|
"""
|
||||||
Return paginated resources the actor can access for the given action.
|
Return all resources the actor can access for the given action.
|
||||||
|
|
||||||
Uses SQL with keyset pagination to efficiently filter resources.
|
Uses SQL to filter resources based on cascading permission rules.
|
||||||
Returns PaginatedResources with list of Resource instances and pagination metadata.
|
Returns instances of the appropriate Resource subclass.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
action: The action name (e.g., "view-table")
|
action: The action name (e.g., "view-table")
|
||||||
actor: The actor dict (or None for unauthenticated)
|
actor: The actor dict (or None for unauthenticated)
|
||||||
parent: Optional parent filter (e.g., database name) to limit results
|
parent: Optional parent filter (e.g., database name) to limit results
|
||||||
include_is_private: If True, adds a .private attribute to each Resource
|
include_is_private: If True, adds a .private attribute to each Resource
|
||||||
include_reasons: If True, adds a .reasons attribute with List[str] of permission reasons
|
|
||||||
limit: Maximum number of results to return (1-1000, default 100)
|
|
||||||
next: Keyset token from previous page for pagination
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
PaginatedResources with:
|
|
||||||
- resources: List of Resource objects for this page
|
|
||||||
- next: Token for next page (None if no more results)
|
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
# Get first page of tables
|
# Get all tables
|
||||||
page = await datasette.allowed_resources("view-table", actor, limit=50)
|
tables = await datasette.allowed_resources("view-table", actor)
|
||||||
for table in page.resources:
|
for table in tables:
|
||||||
print(f"{table.parent}/{table.child}")
|
print(f"{table.parent}/{table.child}")
|
||||||
|
|
||||||
# Get next page
|
# Get tables for specific database with private flag
|
||||||
if page.next:
|
tables = await datasette.allowed_resources(
|
||||||
next_page = await datasette.allowed_resources(
|
"view-table", actor, parent="mydb", include_is_private=True
|
||||||
"view-table", actor, limit=50, next=page.next
|
|
||||||
)
|
|
||||||
|
|
||||||
# With reasons for debugging
|
|
||||||
page = await datasette.allowed_resources(
|
|
||||||
"view-table", actor, include_reasons=True
|
|
||||||
)
|
)
|
||||||
for table in page.resources:
|
for table in tables:
|
||||||
print(f"{table.child}: {table.reasons}")
|
if table.private:
|
||||||
|
print(f"{table.child} is private")
|
||||||
# Iterate through all results with async generator
|
|
||||||
page = await datasette.allowed_resources("view-table", actor)
|
|
||||||
async for table in page.all():
|
|
||||||
print(table.child)
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
action_obj = self.actions.get(action)
|
action_obj = self.actions.get(action)
|
||||||
if not action_obj:
|
if not action_obj:
|
||||||
raise ValueError(f"Unknown action: {action}")
|
raise ValueError(f"Unknown action: {action}")
|
||||||
|
|
||||||
# Validate and cap limit
|
|
||||||
limit = min(max(1, limit), 1000)
|
|
||||||
|
|
||||||
# Get base SQL query
|
|
||||||
query, params = await self.allowed_resources_sql(
|
query, params = await self.allowed_resources_sql(
|
||||||
action=action,
|
action=action,
|
||||||
actor=actor,
|
actor=actor,
|
||||||
parent=parent,
|
parent=parent,
|
||||||
include_is_private=include_is_private,
|
include_is_private=include_is_private,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add keyset pagination WHERE clause if next token provided
|
|
||||||
if next:
|
|
||||||
try:
|
|
||||||
components = urlsafe_components(next)
|
|
||||||
if len(components) >= 2:
|
|
||||||
last_parent, last_child = components[0], components[1]
|
|
||||||
# Keyset condition: (parent > last) OR (parent = last AND child > last)
|
|
||||||
keyset_where = """
|
|
||||||
(parent > :keyset_parent OR
|
|
||||||
(parent = :keyset_parent AND child > :keyset_child))
|
|
||||||
"""
|
|
||||||
# Wrap original query and add keyset filter
|
|
||||||
query = f"SELECT * FROM ({query}) WHERE {keyset_where}"
|
|
||||||
params["keyset_parent"] = last_parent
|
|
||||||
params["keyset_child"] = last_child
|
|
||||||
except (ValueError, KeyError):
|
|
||||||
# Invalid token - ignore and start from beginning
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Add LIMIT (fetch limit+1 to detect if there are more results)
|
|
||||||
# Note: query from allowed_resources_sql() already includes ORDER BY parent, child
|
|
||||||
query = f"{query} LIMIT :limit"
|
|
||||||
params["limit"] = limit + 1
|
|
||||||
|
|
||||||
# Execute query
|
|
||||||
result = await self.get_internal_database().execute(query, params)
|
result = await self.get_internal_database().execute(query, params)
|
||||||
rows = list(result.rows)
|
|
||||||
|
|
||||||
# Check if truncated (got more than limit rows)
|
# Instantiate the appropriate Resource subclass for each row
|
||||||
truncated = len(rows) > limit
|
|
||||||
if truncated:
|
|
||||||
rows = rows[:limit] # Remove the extra row
|
|
||||||
|
|
||||||
# Build Resource objects with optional attributes
|
|
||||||
resources = []
|
resources = []
|
||||||
for row in rows:
|
for row in result.rows:
|
||||||
# row[0]=parent, row[1]=child, row[2]=reason, row[3]=is_private (if requested)
|
# row[0]=parent, row[1]=child, row[2]=reason (ignored), row[3]=is_private (if requested)
|
||||||
resource = self.resource_for_action(action, parent=row[0], child=row[1])
|
resource = self.resource_for_action(action, parent=row[0], child=row[1])
|
||||||
|
|
||||||
# Add reasons if requested
|
|
||||||
if include_reasons:
|
|
||||||
reason_json = row[2]
|
|
||||||
try:
|
|
||||||
reasons_array = (
|
|
||||||
json.loads(reason_json) if isinstance(reason_json, str) else []
|
|
||||||
)
|
|
||||||
resource.reasons = [r for r in reasons_array if r is not None]
|
|
||||||
except (json.JSONDecodeError, TypeError):
|
|
||||||
resource.reasons = [reason_json] if reason_json else []
|
|
||||||
|
|
||||||
# Add private flag if requested
|
|
||||||
if include_is_private:
|
if include_is_private:
|
||||||
resource.private = bool(row[3])
|
resource.private = bool(row[3])
|
||||||
|
|
||||||
resources.append(resource)
|
resources.append(resource)
|
||||||
|
|
||||||
# Generate next token if there are more results
|
return resources
|
||||||
next_token = None
|
|
||||||
if truncated and resources:
|
|
||||||
last_resource = resources[-1]
|
|
||||||
# Use tilde-encoding like table pagination
|
|
||||||
next_token = "{},{}".format(
|
|
||||||
tilde_encode(str(last_resource.parent)),
|
|
||||||
tilde_encode(str(last_resource.child)),
|
|
||||||
)
|
|
||||||
|
|
||||||
return PaginatedResources(
|
async def allowed_resources_with_reasons(
|
||||||
resources=resources,
|
self,
|
||||||
next=next_token,
|
action: str,
|
||||||
_datasette=self,
|
actor: dict | None = None,
|
||||||
_action=action,
|
) -> list["AllowedResource"]:
|
||||||
_actor=actor,
|
"""
|
||||||
_parent=parent,
|
Return allowed resources with permission reasons for debugging.
|
||||||
_include_is_private=include_is_private,
|
|
||||||
_include_reasons=include_reasons,
|
Uses SQL to filter resources and includes the reason each was allowed.
|
||||||
_limit=limit,
|
Returns list of AllowedResource named tuples with (resource, reason).
|
||||||
)
|
|
||||||
|
Example:
|
||||||
|
debug_info = await datasette.allowed_resources_with_reasons("view-table", actor)
|
||||||
|
for allowed in debug_info:
|
||||||
|
print(f"{allowed.resource}: {allowed.reason}")
|
||||||
|
"""
|
||||||
|
from datasette.permissions import AllowedResource
|
||||||
|
|
||||||
|
action_obj = self.actions.get(action)
|
||||||
|
if not action_obj:
|
||||||
|
raise ValueError(f"Unknown action: {action}")
|
||||||
|
|
||||||
|
query, params = await self.allowed_resources_sql(action=action, actor=actor)
|
||||||
|
result = await self.get_internal_database().execute(query, params)
|
||||||
|
|
||||||
|
resources = []
|
||||||
|
for row in result.rows:
|
||||||
|
resource = self.resource_for_action(action, parent=row[0], child=row[1])
|
||||||
|
reason_json = row[2]
|
||||||
|
|
||||||
|
# Parse JSON array of reasons and filter out nulls
|
||||||
|
try:
|
||||||
|
import json
|
||||||
|
|
||||||
|
reasons_array = (
|
||||||
|
json.loads(reason_json) if isinstance(reason_json, str) else []
|
||||||
|
)
|
||||||
|
reasons_filtered = [r for r in reasons_array if r is not None]
|
||||||
|
# Store as list for multiple reasons, or keep empty list
|
||||||
|
reason = reasons_filtered
|
||||||
|
except (json.JSONDecodeError, TypeError):
|
||||||
|
# Fallback for backward compatibility
|
||||||
|
reason = [reason_json] if reason_json else []
|
||||||
|
|
||||||
|
resources.append(AllowedResource(resource=resource, reason=reason))
|
||||||
|
|
||||||
|
return resources
|
||||||
|
|
||||||
async def allowed(
|
async def allowed(
|
||||||
self,
|
self,
|
||||||
|
|
@ -1360,7 +1267,7 @@ class Datasette:
|
||||||
Uses SQL to check permission for a single resource without fetching all resources.
|
Uses SQL to check permission for a single resource without fetching all resources.
|
||||||
This is efficient - it does NOT call allowed_resources() and check membership.
|
This is efficient - it does NOT call allowed_resources() and check membership.
|
||||||
|
|
||||||
For global actions, resource should be None (or omitted).
|
If resource is not provided, defaults to InstanceResource() for instance-level actions.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
from datasette.resources import TableResource
|
from datasette.resources import TableResource
|
||||||
|
|
@ -1370,12 +1277,14 @@ class Datasette:
|
||||||
actor=actor
|
actor=actor
|
||||||
)
|
)
|
||||||
|
|
||||||
# For global actions, resource can be omitted:
|
# For instance-level actions, resource can be omitted:
|
||||||
can_debug = await datasette.allowed(action="permissions-debug", actor=actor)
|
can_debug = await datasette.allowed(action="permissions-debug", actor=actor)
|
||||||
"""
|
"""
|
||||||
from datasette.utils.actions_sql import check_permission_for_resource
|
from datasette.utils.actions_sql import check_permission_for_resource
|
||||||
|
from datasette.resources import InstanceResource
|
||||||
|
|
||||||
# For global actions, resource remains None
|
if resource is None:
|
||||||
|
resource = InstanceResource()
|
||||||
|
|
||||||
# Check if this action has also_requires - if so, check that action first
|
# Check if this action has also_requires - if so, check that action first
|
||||||
action_obj = self.actions.get(action)
|
action_obj = self.actions.get(action)
|
||||||
|
|
@ -1388,16 +1297,12 @@ class Datasette:
|
||||||
):
|
):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# For global actions, resource is None
|
|
||||||
parent = resource.parent if resource else None
|
|
||||||
child = resource.child if resource else None
|
|
||||||
|
|
||||||
result = await check_permission_for_resource(
|
result = await check_permission_for_resource(
|
||||||
datasette=self,
|
datasette=self,
|
||||||
actor=actor,
|
actor=actor,
|
||||||
action=action,
|
action=action,
|
||||||
parent=parent,
|
parent=resource.parent,
|
||||||
child=child,
|
child=resource.child,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Log the permission check for debugging
|
# Log the permission check for debugging
|
||||||
|
|
@ -1406,8 +1311,8 @@ class Datasette:
|
||||||
when=datetime.datetime.now(datetime.timezone.utc).isoformat(),
|
when=datetime.datetime.now(datetime.timezone.utc).isoformat(),
|
||||||
actor=actor,
|
actor=actor,
|
||||||
action=action,
|
action=action,
|
||||||
parent=parent,
|
parent=resource.parent,
|
||||||
child=child,
|
child=resource.child,
|
||||||
result=result,
|
result=result,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
@ -1661,9 +1566,7 @@ class Datasette:
|
||||||
"description": action.description,
|
"description": action.description,
|
||||||
"takes_parent": action.takes_parent,
|
"takes_parent": action.takes_parent,
|
||||||
"takes_child": action.takes_child,
|
"takes_child": action.takes_child,
|
||||||
"resource_class": (
|
"resource_class": action.resource_class.__name__,
|
||||||
action.resource_class.__name__ if action.resource_class else None
|
|
||||||
),
|
|
||||||
"also_requires": action.also_requires,
|
"also_requires": action.also_requires,
|
||||||
}
|
}
|
||||||
for action in sorted(self.actions.values(), key=lambda a: a.name)
|
for action in sorted(self.actions.values(), key=lambda a: a.name)
|
||||||
|
|
@ -1960,10 +1863,6 @@ class Datasette:
|
||||||
TablesView.as_view(self),
|
TablesView.as_view(self),
|
||||||
r"/-/tables(\.(?P<format>json))?$",
|
r"/-/tables(\.(?P<format>json))?$",
|
||||||
)
|
)
|
||||||
add_route(
|
|
||||||
InstanceSchemaView.as_view(self),
|
|
||||||
r"/-/schema(\.(?P<format>json|md))?$",
|
|
||||||
)
|
|
||||||
add_route(
|
add_route(
|
||||||
LogoutView.as_view(self),
|
LogoutView.as_view(self),
|
||||||
r"/-/logout$",
|
r"/-/logout$",
|
||||||
|
|
@ -2005,10 +1904,6 @@ class Datasette:
|
||||||
r"/(?P<database>[^\/\.]+)(\.(?P<format>\w+))?$",
|
r"/(?P<database>[^\/\.]+)(\.(?P<format>\w+))?$",
|
||||||
)
|
)
|
||||||
add_route(TableCreateView.as_view(self), r"/(?P<database>[^\/\.]+)/-/create$")
|
add_route(TableCreateView.as_view(self), r"/(?P<database>[^\/\.]+)/-/create$")
|
||||||
add_route(
|
|
||||||
DatabaseSchemaView.as_view(self),
|
|
||||||
r"/(?P<database>[^\/\.]+)/-/schema(\.(?P<format>json|md))?$",
|
|
||||||
)
|
|
||||||
add_route(
|
add_route(
|
||||||
wrap_view(QueryView, self),
|
wrap_view(QueryView, self),
|
||||||
r"/(?P<database>[^\/\.]+)/-/query(\.(?P<format>\w+))?$",
|
r"/(?P<database>[^\/\.]+)/-/query(\.(?P<format>\w+))?$",
|
||||||
|
|
@ -2033,10 +1928,6 @@ class Datasette:
|
||||||
TableDropView.as_view(self),
|
TableDropView.as_view(self),
|
||||||
r"/(?P<database>[^\/\.]+)/(?P<table>[^\/\.]+)/-/drop$",
|
r"/(?P<database>[^\/\.]+)/(?P<table>[^\/\.]+)/-/drop$",
|
||||||
)
|
)
|
||||||
add_route(
|
|
||||||
TableSchemaView.as_view(self),
|
|
||||||
r"/(?P<database>[^\/\.]+)/(?P<table>[^\/\.]+)/-/schema(\.(?P<format>json|md))?$",
|
|
||||||
)
|
|
||||||
add_route(
|
add_route(
|
||||||
RowDeleteView.as_view(self),
|
RowDeleteView.as_view(self),
|
||||||
r"/(?P<database>[^\/\.]+)/(?P<table>[^/]+?)/(?P<pks>[^/]+?)/-/delete$",
|
r"/(?P<database>[^\/\.]+)/(?P<table>[^/]+?)/(?P<pks>[^/]+?)/-/delete$",
|
||||||
|
|
@ -2427,18 +2318,9 @@ class NotFoundExplicit(NotFound):
|
||||||
|
|
||||||
|
|
||||||
class DatasetteClient:
|
class DatasetteClient:
|
||||||
"""Internal HTTP client for making requests to a Datasette instance.
|
|
||||||
|
|
||||||
Used for testing and for internal operations that need to make HTTP requests
|
|
||||||
to the Datasette app without going through an actual HTTP server.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, ds):
|
def __init__(self, ds):
|
||||||
self.ds = ds
|
self.ds = ds
|
||||||
|
self.app = ds.app()
|
||||||
@property
|
|
||||||
def app(self):
|
|
||||||
return self.ds.app()
|
|
||||||
|
|
||||||
def actor_cookie(self, actor):
|
def actor_cookie(self, actor):
|
||||||
# Utility method, mainly for tests
|
# Utility method, mainly for tests
|
||||||
|
|
@ -2451,89 +2333,40 @@ class DatasetteClient:
|
||||||
path = f"http://localhost{path}"
|
path = f"http://localhost{path}"
|
||||||
return path
|
return path
|
||||||
|
|
||||||
async def _request(self, method, path, skip_permission_checks=False, **kwargs):
|
async def _request(self, method, path, **kwargs):
|
||||||
from datasette.permissions import SkipPermissions
|
async with httpx.AsyncClient(
|
||||||
|
transport=httpx.ASGITransport(app=self.app),
|
||||||
|
cookies=kwargs.pop("cookies", None),
|
||||||
|
) as client:
|
||||||
|
return await getattr(client, method)(self._fix(path), **kwargs)
|
||||||
|
|
||||||
with _DatasetteClientContext():
|
async def get(self, path, **kwargs):
|
||||||
if skip_permission_checks:
|
return await self._request("get", path, **kwargs)
|
||||||
with SkipPermissions():
|
|
||||||
async with httpx.AsyncClient(
|
|
||||||
transport=httpx.ASGITransport(app=self.app),
|
|
||||||
cookies=kwargs.pop("cookies", None),
|
|
||||||
) as client:
|
|
||||||
return await getattr(client, method)(self._fix(path), **kwargs)
|
|
||||||
else:
|
|
||||||
async with httpx.AsyncClient(
|
|
||||||
transport=httpx.ASGITransport(app=self.app),
|
|
||||||
cookies=kwargs.pop("cookies", None),
|
|
||||||
) as client:
|
|
||||||
return await getattr(client, method)(self._fix(path), **kwargs)
|
|
||||||
|
|
||||||
async def get(self, path, skip_permission_checks=False, **kwargs):
|
async def options(self, path, **kwargs):
|
||||||
return await self._request(
|
return await self._request("options", path, **kwargs)
|
||||||
"get", path, skip_permission_checks=skip_permission_checks, **kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
async def options(self, path, skip_permission_checks=False, **kwargs):
|
async def head(self, path, **kwargs):
|
||||||
return await self._request(
|
return await self._request("head", path, **kwargs)
|
||||||
"options", path, skip_permission_checks=skip_permission_checks, **kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
async def head(self, path, skip_permission_checks=False, **kwargs):
|
async def post(self, path, **kwargs):
|
||||||
return await self._request(
|
return await self._request("post", path, **kwargs)
|
||||||
"head", path, skip_permission_checks=skip_permission_checks, **kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
async def post(self, path, skip_permission_checks=False, **kwargs):
|
async def put(self, path, **kwargs):
|
||||||
return await self._request(
|
return await self._request("put", path, **kwargs)
|
||||||
"post", path, skip_permission_checks=skip_permission_checks, **kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
async def put(self, path, skip_permission_checks=False, **kwargs):
|
async def patch(self, path, **kwargs):
|
||||||
return await self._request(
|
return await self._request("patch", path, **kwargs)
|
||||||
"put", path, skip_permission_checks=skip_permission_checks, **kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
async def patch(self, path, skip_permission_checks=False, **kwargs):
|
async def delete(self, path, **kwargs):
|
||||||
return await self._request(
|
return await self._request("delete", path, **kwargs)
|
||||||
"patch", path, skip_permission_checks=skip_permission_checks, **kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
async def delete(self, path, skip_permission_checks=False, **kwargs):
|
|
||||||
return await self._request(
|
|
||||||
"delete", path, skip_permission_checks=skip_permission_checks, **kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
async def request(self, method, path, skip_permission_checks=False, **kwargs):
|
|
||||||
"""Make an HTTP request with the specified method.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
method: HTTP method (e.g., "GET", "POST", "PUT")
|
|
||||||
path: The path to request
|
|
||||||
skip_permission_checks: If True, bypass all permission checks for this request
|
|
||||||
**kwargs: Additional arguments to pass to httpx
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
httpx.Response: The response from the request
|
|
||||||
"""
|
|
||||||
from datasette.permissions import SkipPermissions
|
|
||||||
|
|
||||||
|
async def request(self, method, path, **kwargs):
|
||||||
avoid_path_rewrites = kwargs.pop("avoid_path_rewrites", None)
|
avoid_path_rewrites = kwargs.pop("avoid_path_rewrites", None)
|
||||||
with _DatasetteClientContext():
|
async with httpx.AsyncClient(
|
||||||
if skip_permission_checks:
|
transport=httpx.ASGITransport(app=self.app),
|
||||||
with SkipPermissions():
|
cookies=kwargs.pop("cookies", None),
|
||||||
async with httpx.AsyncClient(
|
) as client:
|
||||||
transport=httpx.ASGITransport(app=self.app),
|
return await client.request(
|
||||||
cookies=kwargs.pop("cookies", None),
|
method, self._fix(path, avoid_path_rewrites), **kwargs
|
||||||
) as client:
|
)
|
||||||
return await client.request(
|
|
||||||
method, self._fix(path, avoid_path_rewrites), **kwargs
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
async with httpx.AsyncClient(
|
|
||||||
transport=httpx.ASGITransport(app=self.app),
|
|
||||||
cookies=kwargs.pop("cookies", None),
|
|
||||||
) as client:
|
|
||||||
return await client.request(
|
|
||||||
method, self._fix(path, avoid_path_rewrites), **kwargs
|
|
||||||
)
|
|
||||||
|
|
|
||||||
|
|
@ -438,20 +438,10 @@ def uninstall(packages, yes):
|
||||||
help="Output URL that sets a cookie authenticating the root user",
|
help="Output URL that sets a cookie authenticating the root user",
|
||||||
is_flag=True,
|
is_flag=True,
|
||||||
)
|
)
|
||||||
@click.option(
|
|
||||||
"--default-deny",
|
|
||||||
help="Deny all permissions by default",
|
|
||||||
is_flag=True,
|
|
||||||
)
|
|
||||||
@click.option(
|
@click.option(
|
||||||
"--get",
|
"--get",
|
||||||
help="Run an HTTP GET request against this path, print results and exit",
|
help="Run an HTTP GET request against this path, print results and exit",
|
||||||
)
|
)
|
||||||
@click.option(
|
|
||||||
"--headers",
|
|
||||||
is_flag=True,
|
|
||||||
help="Include HTTP headers in --get output",
|
|
||||||
)
|
|
||||||
@click.option(
|
@click.option(
|
||||||
"--token",
|
"--token",
|
||||||
help="API token to send with --get requests",
|
help="API token to send with --get requests",
|
||||||
|
|
@ -500,6 +490,16 @@ def uninstall(packages, yes):
|
||||||
type=click.Path(),
|
type=click.Path(),
|
||||||
help="Path to a persistent Datasette internal SQLite database",
|
help="Path to a persistent Datasette internal SQLite database",
|
||||||
)
|
)
|
||||||
|
@click.option(
|
||||||
|
"--private",
|
||||||
|
is_flag=True,
|
||||||
|
help="Default deny mode - all access blocked unless explicitly allowed",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--require-auth",
|
||||||
|
is_flag=True,
|
||||||
|
help="Require authentication - only actors with an id can access",
|
||||||
|
)
|
||||||
def serve(
|
def serve(
|
||||||
files,
|
files,
|
||||||
immutable,
|
immutable,
|
||||||
|
|
@ -519,9 +519,7 @@ def serve(
|
||||||
settings,
|
settings,
|
||||||
secret,
|
secret,
|
||||||
root,
|
root,
|
||||||
default_deny,
|
|
||||||
get,
|
get,
|
||||||
headers,
|
|
||||||
token,
|
token,
|
||||||
actor,
|
actor,
|
||||||
version_note,
|
version_note,
|
||||||
|
|
@ -534,6 +532,8 @@ def serve(
|
||||||
ssl_keyfile,
|
ssl_keyfile,
|
||||||
ssl_certfile,
|
ssl_certfile,
|
||||||
internal,
|
internal,
|
||||||
|
private,
|
||||||
|
require_auth,
|
||||||
return_instance=False,
|
return_instance=False,
|
||||||
):
|
):
|
||||||
"""Serve up specified SQLite database files with a web UI"""
|
"""Serve up specified SQLite database files with a web UI"""
|
||||||
|
|
@ -548,6 +548,8 @@ def serve(
|
||||||
)
|
)
|
||||||
click.echo(formatter.getvalue())
|
click.echo(formatter.getvalue())
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
if private and require_auth:
|
||||||
|
raise click.UsageError("Cannot use both --private and --require-auth")
|
||||||
if reload:
|
if reload:
|
||||||
import hupper
|
import hupper
|
||||||
|
|
||||||
|
|
@ -600,23 +602,17 @@ def serve(
|
||||||
crossdb=crossdb,
|
crossdb=crossdb,
|
||||||
nolock=nolock,
|
nolock=nolock,
|
||||||
internal=internal,
|
internal=internal,
|
||||||
default_deny=default_deny,
|
private=private,
|
||||||
|
require_auth=require_auth,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Separate directories from files
|
# if files is a single directory, use that as config_dir=
|
||||||
directories = [f for f in files if os.path.isdir(f)]
|
if 1 == len(files) and os.path.isdir(files[0]):
|
||||||
file_paths = [f for f in files if not os.path.isdir(f)]
|
kwargs["config_dir"] = pathlib.Path(files[0])
|
||||||
|
files = []
|
||||||
# Handle config_dir - only one directory allowed
|
|
||||||
if len(directories) > 1:
|
|
||||||
raise click.ClickException(
|
|
||||||
"Cannot pass multiple directories. Pass a single directory as config_dir."
|
|
||||||
)
|
|
||||||
elif len(directories) == 1:
|
|
||||||
kwargs["config_dir"] = pathlib.Path(directories[0])
|
|
||||||
|
|
||||||
# Verify list of files, create if needed (and --create)
|
# Verify list of files, create if needed (and --create)
|
||||||
for file in file_paths:
|
for file in files:
|
||||||
if not pathlib.Path(file).exists():
|
if not pathlib.Path(file).exists():
|
||||||
if create:
|
if create:
|
||||||
sqlite3.connect(file).execute("vacuum")
|
sqlite3.connect(file).execute("vacuum")
|
||||||
|
|
@ -627,32 +623,8 @@ def serve(
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check for duplicate files by resolving all paths to their absolute forms
|
# De-duplicate files so 'datasette db.db db.db' only attaches one /db
|
||||||
# Collect all database files that will be loaded (explicit files + config_dir files)
|
files = list(dict.fromkeys(files))
|
||||||
all_db_files = []
|
|
||||||
|
|
||||||
# Add explicit files
|
|
||||||
for file in file_paths:
|
|
||||||
all_db_files.append((file, pathlib.Path(file).resolve()))
|
|
||||||
|
|
||||||
# Add config_dir databases if config_dir is set
|
|
||||||
if "config_dir" in kwargs:
|
|
||||||
config_dir = kwargs["config_dir"]
|
|
||||||
for ext in ("db", "sqlite", "sqlite3"):
|
|
||||||
for db_file in config_dir.glob(f"*.{ext}"):
|
|
||||||
all_db_files.append((str(db_file), db_file.resolve()))
|
|
||||||
|
|
||||||
# Check for duplicates
|
|
||||||
seen = {}
|
|
||||||
for original_path, resolved_path in all_db_files:
|
|
||||||
if resolved_path in seen:
|
|
||||||
raise click.ClickException(
|
|
||||||
f"Duplicate database file: '{original_path}' and '{seen[resolved_path]}' "
|
|
||||||
f"both refer to {resolved_path}"
|
|
||||||
)
|
|
||||||
seen[resolved_path] = original_path
|
|
||||||
|
|
||||||
files = file_paths
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ds = Datasette(files, **kwargs)
|
ds = Datasette(files, **kwargs)
|
||||||
|
|
@ -671,33 +643,19 @@ def serve(
|
||||||
# Run async soundness checks - but only if we're not under pytest
|
# Run async soundness checks - but only if we're not under pytest
|
||||||
run_sync(lambda: check_databases(ds))
|
run_sync(lambda: check_databases(ds))
|
||||||
|
|
||||||
if headers and not get:
|
|
||||||
raise click.ClickException("--headers can only be used with --get")
|
|
||||||
|
|
||||||
if token and not get:
|
if token and not get:
|
||||||
raise click.ClickException("--token can only be used with --get")
|
raise click.ClickException("--token can only be used with --get")
|
||||||
|
|
||||||
if get:
|
if get:
|
||||||
client = TestClient(ds)
|
client = TestClient(ds)
|
||||||
request_headers = {}
|
headers = {}
|
||||||
if token:
|
if token:
|
||||||
request_headers["Authorization"] = "Bearer {}".format(token)
|
headers["Authorization"] = "Bearer {}".format(token)
|
||||||
cookies = {}
|
cookies = {}
|
||||||
if actor:
|
if actor:
|
||||||
cookies["ds_actor"] = client.actor_cookie(json.loads(actor))
|
cookies["ds_actor"] = client.actor_cookie(json.loads(actor))
|
||||||
response = client.get(get, headers=request_headers, cookies=cookies)
|
response = client.get(get, headers=headers, cookies=cookies)
|
||||||
|
click.echo(response.text)
|
||||||
if headers:
|
|
||||||
# Output HTTP status code, headers, two newlines, then the response body
|
|
||||||
click.echo(f"HTTP/1.1 {response.status}")
|
|
||||||
for key, value in response.headers.items():
|
|
||||||
click.echo(f"{key}: {value}")
|
|
||||||
if response.text:
|
|
||||||
click.echo()
|
|
||||||
click.echo(response.text)
|
|
||||||
else:
|
|
||||||
click.echo(response.text)
|
|
||||||
|
|
||||||
exit_code = 0 if response.status == 200 else 1
|
exit_code = 0 if response.status == 200 else 1
|
||||||
sys.exit(exit_code)
|
sys.exit(exit_code)
|
||||||
return
|
return
|
||||||
|
|
|
||||||
|
|
@ -410,12 +410,7 @@ class Database:
|
||||||
# But SQLite prior to 3.16.0 doesn't support pragma functions
|
# But SQLite prior to 3.16.0 doesn't support pragma functions
|
||||||
results = await self.execute("PRAGMA database_list;")
|
results = await self.execute("PRAGMA database_list;")
|
||||||
# {'seq': 0, 'name': 'main', 'file': ''}
|
# {'seq': 0, 'name': 'main', 'file': ''}
|
||||||
return [
|
return [AttachedDatabase(*row) for row in results.rows if row["seq"] > 0]
|
||||||
AttachedDatabase(*row)
|
|
||||||
for row in results.rows
|
|
||||||
# Filter out the SQLite internal "temp" database, refs #2557
|
|
||||||
if row["seq"] > 0 and row["name"] != "temp"
|
|
||||||
]
|
|
||||||
|
|
||||||
async def table_exists(self, table):
|
async def table_exists(self, table):
|
||||||
results = await self.execute(
|
results = await self.execute(
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
from datasette import hookimpl
|
from datasette import hookimpl
|
||||||
from datasette.permissions import Action
|
from datasette.permissions import Action
|
||||||
from datasette.resources import (
|
from datasette.resources import (
|
||||||
|
InstanceResource,
|
||||||
DatabaseResource,
|
DatabaseResource,
|
||||||
TableResource,
|
TableResource,
|
||||||
QueryResource,
|
QueryResource,
|
||||||
|
|
@ -11,91 +12,122 @@ from datasette.resources import (
|
||||||
def register_actions():
|
def register_actions():
|
||||||
"""Register the core Datasette actions."""
|
"""Register the core Datasette actions."""
|
||||||
return (
|
return (
|
||||||
# Global actions (no resource_class)
|
# View actions
|
||||||
Action(
|
Action(
|
||||||
name="view-instance",
|
name="view-instance",
|
||||||
abbr="vi",
|
abbr="vi",
|
||||||
description="View Datasette instance",
|
description="View Datasette instance",
|
||||||
|
takes_parent=False,
|
||||||
|
takes_child=False,
|
||||||
|
resource_class=InstanceResource,
|
||||||
),
|
),
|
||||||
Action(
|
|
||||||
name="permissions-debug",
|
|
||||||
abbr="pd",
|
|
||||||
description="Access permission debug tool",
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="debug-menu",
|
|
||||||
abbr="dm",
|
|
||||||
description="View debug menu items",
|
|
||||||
),
|
|
||||||
# Database-level actions (parent-level)
|
|
||||||
Action(
|
Action(
|
||||||
name="view-database",
|
name="view-database",
|
||||||
abbr="vd",
|
abbr="vd",
|
||||||
description="View database",
|
description="View database",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=False,
|
||||||
resource_class=DatabaseResource,
|
resource_class=DatabaseResource,
|
||||||
),
|
),
|
||||||
Action(
|
Action(
|
||||||
name="view-database-download",
|
name="view-database-download",
|
||||||
abbr="vdd",
|
abbr="vdd",
|
||||||
description="Download database file",
|
description="Download database file",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=False,
|
||||||
resource_class=DatabaseResource,
|
resource_class=DatabaseResource,
|
||||||
also_requires="view-database",
|
also_requires="view-database",
|
||||||
),
|
),
|
||||||
|
Action(
|
||||||
|
name="view-table",
|
||||||
|
abbr="vt",
|
||||||
|
description="View table",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=True,
|
||||||
|
resource_class=TableResource,
|
||||||
|
),
|
||||||
|
Action(
|
||||||
|
name="view-query",
|
||||||
|
abbr="vq",
|
||||||
|
description="View named query results",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=True,
|
||||||
|
resource_class=QueryResource,
|
||||||
|
),
|
||||||
Action(
|
Action(
|
||||||
name="execute-sql",
|
name="execute-sql",
|
||||||
abbr="es",
|
abbr="es",
|
||||||
description="Execute read-only SQL queries",
|
description="Execute read-only SQL queries",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=False,
|
||||||
resource_class=DatabaseResource,
|
resource_class=DatabaseResource,
|
||||||
also_requires="view-database",
|
also_requires="view-database",
|
||||||
),
|
),
|
||||||
|
# Debug actions
|
||||||
Action(
|
Action(
|
||||||
name="create-table",
|
name="permissions-debug",
|
||||||
abbr="ct",
|
abbr="pd",
|
||||||
description="Create tables",
|
description="Access permission debug tool",
|
||||||
resource_class=DatabaseResource,
|
takes_parent=False,
|
||||||
|
takes_child=False,
|
||||||
|
resource_class=InstanceResource,
|
||||||
),
|
),
|
||||||
# Table-level actions (child-level)
|
|
||||||
Action(
|
Action(
|
||||||
name="view-table",
|
name="debug-menu",
|
||||||
abbr="vt",
|
abbr="dm",
|
||||||
description="View table",
|
description="View debug menu items",
|
||||||
resource_class=TableResource,
|
takes_parent=False,
|
||||||
|
takes_child=False,
|
||||||
|
resource_class=InstanceResource,
|
||||||
),
|
),
|
||||||
|
# Write actions on tables
|
||||||
Action(
|
Action(
|
||||||
name="insert-row",
|
name="insert-row",
|
||||||
abbr="ir",
|
abbr="ir",
|
||||||
description="Insert rows",
|
description="Insert rows",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=True,
|
||||||
resource_class=TableResource,
|
resource_class=TableResource,
|
||||||
),
|
),
|
||||||
Action(
|
Action(
|
||||||
name="delete-row",
|
name="delete-row",
|
||||||
abbr="dr",
|
abbr="dr",
|
||||||
description="Delete rows",
|
description="Delete rows",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=True,
|
||||||
resource_class=TableResource,
|
resource_class=TableResource,
|
||||||
),
|
),
|
||||||
Action(
|
Action(
|
||||||
name="update-row",
|
name="update-row",
|
||||||
abbr="ur",
|
abbr="ur",
|
||||||
description="Update rows",
|
description="Update rows",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=True,
|
||||||
resource_class=TableResource,
|
resource_class=TableResource,
|
||||||
),
|
),
|
||||||
Action(
|
Action(
|
||||||
name="alter-table",
|
name="alter-table",
|
||||||
abbr="at",
|
abbr="at",
|
||||||
description="Alter tables",
|
description="Alter tables",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=True,
|
||||||
resource_class=TableResource,
|
resource_class=TableResource,
|
||||||
),
|
),
|
||||||
Action(
|
Action(
|
||||||
name="drop-table",
|
name="drop-table",
|
||||||
abbr="dt",
|
abbr="dt",
|
||||||
description="Drop tables",
|
description="Drop tables",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=True,
|
||||||
resource_class=TableResource,
|
resource_class=TableResource,
|
||||||
),
|
),
|
||||||
# Query-level actions (child-level)
|
# Schema actions on databases
|
||||||
Action(
|
Action(
|
||||||
name="view-query",
|
name="create-table",
|
||||||
abbr="vq",
|
abbr="ct",
|
||||||
description="View named query results",
|
description="Create tables",
|
||||||
resource_class=QueryResource,
|
takes_parent=True,
|
||||||
|
takes_child=False,
|
||||||
|
resource_class=DatabaseResource,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
|
||||||
553
datasette/default_permissions.py
Normal file
553
datasette/default_permissions.py
Normal file
|
|
@ -0,0 +1,553 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from datasette.app import Datasette
|
||||||
|
|
||||||
|
from datasette import hookimpl
|
||||||
|
from datasette.permissions import PermissionSQL
|
||||||
|
from datasette.utils import actor_matches_allow
|
||||||
|
import itsdangerous
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
@hookimpl
|
||||||
|
async def permission_resources_sql(datasette, actor, action):
|
||||||
|
rules: list[PermissionSQL] = []
|
||||||
|
|
||||||
|
# 1. FIRST: Actor restrictions (if present)
|
||||||
|
# These act as a gating filter - must pass through before other checks
|
||||||
|
restriction_rules = await _restriction_permission_rules(datasette, actor, action)
|
||||||
|
rules.extend(restriction_rules)
|
||||||
|
|
||||||
|
# 2. Root user permissions
|
||||||
|
# Root user with root_enabled gets all permissions at global level
|
||||||
|
# Config rules at more specific levels (database/table) can still override
|
||||||
|
if datasette.root_enabled and actor and actor.get("id") == "root":
|
||||||
|
# Add a single global-level allow rule (NULL, NULL) for root
|
||||||
|
# This allows root to access everything by default, but database-level
|
||||||
|
# and table-level deny rules in config can still block specific resources
|
||||||
|
sql = "SELECT NULL AS parent, NULL AS child, 1 AS allow, 'root user' AS reason"
|
||||||
|
rules.append(
|
||||||
|
PermissionSQL(
|
||||||
|
source="root_permissions",
|
||||||
|
sql=sql,
|
||||||
|
params={},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# 3. Config-based permission rules
|
||||||
|
config_rules = await _config_permission_rules(datasette, actor, action)
|
||||||
|
rules.extend(config_rules)
|
||||||
|
|
||||||
|
# 4. Check default_allow_sql setting for execute-sql action
|
||||||
|
if action == "execute-sql" and not datasette.setting("default_allow_sql"):
|
||||||
|
# Return a deny rule for all databases
|
||||||
|
sql = "SELECT NULL AS parent, NULL AS child, 0 AS allow, 'default_allow_sql is false' AS reason"
|
||||||
|
rules.append(
|
||||||
|
PermissionSQL(
|
||||||
|
source="default_allow_sql_setting",
|
||||||
|
sql=sql,
|
||||||
|
params={},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# Early return - don't add default allow rule
|
||||||
|
if not rules:
|
||||||
|
return None
|
||||||
|
if len(rules) == 1:
|
||||||
|
return rules[0]
|
||||||
|
return rules
|
||||||
|
|
||||||
|
# 5. Default allow actions (ONLY if no restrictions)
|
||||||
|
default_allow_actions = {
|
||||||
|
"view-instance",
|
||||||
|
"view-database",
|
||||||
|
"view-database-download",
|
||||||
|
"view-table",
|
||||||
|
"view-query",
|
||||||
|
"execute-sql",
|
||||||
|
}
|
||||||
|
# If actor has restrictions, they've already added their own deny/allow rules
|
||||||
|
has_restrictions = actor and "_r" in actor
|
||||||
|
if not has_restrictions:
|
||||||
|
# Check for --private flag (complete default-deny mode)
|
||||||
|
if datasette.private:
|
||||||
|
# In private mode, don't grant any default allow permissions
|
||||||
|
pass
|
||||||
|
# Check for --require-auth flag (authenticated-only mode)
|
||||||
|
elif datasette.require_auth:
|
||||||
|
# Only grant default allow if actor has an id (is authenticated)
|
||||||
|
if actor and actor.get("id"):
|
||||||
|
if action in default_allow_actions:
|
||||||
|
reason = f"default allow for {action} (authenticated)".replace(
|
||||||
|
"'", "''"
|
||||||
|
)
|
||||||
|
sql = (
|
||||||
|
"SELECT NULL AS parent, NULL AS child, 1 AS allow, "
|
||||||
|
f"'{reason}' AS reason"
|
||||||
|
)
|
||||||
|
rules.append(
|
||||||
|
PermissionSQL(
|
||||||
|
source="default_permissions",
|
||||||
|
sql=sql,
|
||||||
|
params={},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Normal mode - grant default allow to everyone
|
||||||
|
if action in default_allow_actions:
|
||||||
|
reason = f"default allow for {action}".replace("'", "''")
|
||||||
|
sql = (
|
||||||
|
"SELECT NULL AS parent, NULL AS child, 1 AS allow, "
|
||||||
|
f"'{reason}' AS reason"
|
||||||
|
)
|
||||||
|
rules.append(
|
||||||
|
PermissionSQL(
|
||||||
|
source="default_permissions",
|
||||||
|
sql=sql,
|
||||||
|
params={},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not rules:
|
||||||
|
return None
|
||||||
|
if len(rules) == 1:
|
||||||
|
return rules[0]
|
||||||
|
return rules
|
||||||
|
|
||||||
|
|
||||||
|
async def _config_permission_rules(datasette, actor, action) -> list[PermissionSQL]:
|
||||||
|
config = datasette.config or {}
|
||||||
|
|
||||||
|
if actor is None:
|
||||||
|
actor_dict: dict | None = None
|
||||||
|
elif isinstance(actor, dict):
|
||||||
|
actor_dict = actor
|
||||||
|
else:
|
||||||
|
actor_lookup = await datasette.actors_from_ids([actor])
|
||||||
|
actor_dict = actor_lookup.get(actor) or {"id": actor}
|
||||||
|
|
||||||
|
def evaluate(allow_block):
|
||||||
|
if allow_block is None:
|
||||||
|
return None
|
||||||
|
return actor_matches_allow(actor_dict, allow_block)
|
||||||
|
|
||||||
|
# Check if actor has restrictions - if so, we'll filter config rules
|
||||||
|
has_restrictions = actor_dict and "_r" in actor_dict if actor_dict else False
|
||||||
|
restrictions = actor_dict.get("_r", {}) if actor_dict else {}
|
||||||
|
|
||||||
|
def is_in_restriction_allowlist(parent, child, action):
|
||||||
|
"""Check if a resource is in the actor's restriction allowlist for this action"""
|
||||||
|
if not has_restrictions:
|
||||||
|
return True # No restrictions, all resources allowed
|
||||||
|
|
||||||
|
# Check action with abbreviations
|
||||||
|
action_obj = datasette.actions.get(action)
|
||||||
|
action_checks = {action}
|
||||||
|
if action_obj and action_obj.abbr:
|
||||||
|
action_checks.add(action_obj.abbr)
|
||||||
|
|
||||||
|
# Check global allowlist
|
||||||
|
if action_checks.intersection(restrictions.get("a", [])):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check database-level allowlist
|
||||||
|
if parent and action_checks.intersection(
|
||||||
|
restrictions.get("d", {}).get(parent, [])
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check table-level allowlist
|
||||||
|
if parent and child:
|
||||||
|
table_actions = restrictions.get("r", {}).get(parent, {}).get(child, [])
|
||||||
|
if action_checks.intersection(table_actions):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
rows = []
|
||||||
|
|
||||||
|
def add_row(parent, child, result, scope):
|
||||||
|
if result is None:
|
||||||
|
return
|
||||||
|
rows.append(
|
||||||
|
(
|
||||||
|
parent,
|
||||||
|
child,
|
||||||
|
bool(result),
|
||||||
|
f"config {'allow' if result else 'deny'} {scope}",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def add_row_allow_block(parent, child, allow_block, scope):
|
||||||
|
"""For 'allow' blocks, always add a row if the block exists - deny if no match"""
|
||||||
|
if allow_block is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
# If actor has restrictions and this resource is NOT in allowlist, skip this config rule
|
||||||
|
# Restrictions act as a gating filter - config cannot grant access to restricted-out resources
|
||||||
|
if not is_in_restriction_allowlist(parent, child, action):
|
||||||
|
return
|
||||||
|
|
||||||
|
result = evaluate(allow_block)
|
||||||
|
# If result is None (no match) or False, treat as deny
|
||||||
|
rows.append(
|
||||||
|
(
|
||||||
|
parent,
|
||||||
|
child,
|
||||||
|
bool(result), # None becomes False, False stays False, True stays True
|
||||||
|
f"config {'allow' if result else 'deny'} {scope}",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
root_perm = (config.get("permissions") or {}).get(action)
|
||||||
|
add_row(None, None, evaluate(root_perm), f"permissions for {action}")
|
||||||
|
|
||||||
|
for db_name, db_config in (config.get("databases") or {}).items():
|
||||||
|
db_perm = (db_config.get("permissions") or {}).get(action)
|
||||||
|
add_row(
|
||||||
|
db_name, None, evaluate(db_perm), f"permissions for {action} on {db_name}"
|
||||||
|
)
|
||||||
|
|
||||||
|
for table_name, table_config in (db_config.get("tables") or {}).items():
|
||||||
|
table_perm = (table_config.get("permissions") or {}).get(action)
|
||||||
|
add_row(
|
||||||
|
db_name,
|
||||||
|
table_name,
|
||||||
|
evaluate(table_perm),
|
||||||
|
f"permissions for {action} on {db_name}/{table_name}",
|
||||||
|
)
|
||||||
|
|
||||||
|
if action == "view-table":
|
||||||
|
table_allow = (table_config or {}).get("allow")
|
||||||
|
add_row_allow_block(
|
||||||
|
db_name,
|
||||||
|
table_name,
|
||||||
|
table_allow,
|
||||||
|
f"allow for {action} on {db_name}/{table_name}",
|
||||||
|
)
|
||||||
|
|
||||||
|
for query_name, query_config in (db_config.get("queries") or {}).items():
|
||||||
|
# query_config can be a string (just SQL) or a dict (with SQL and options)
|
||||||
|
if isinstance(query_config, dict):
|
||||||
|
query_perm = (query_config.get("permissions") or {}).get(action)
|
||||||
|
add_row(
|
||||||
|
db_name,
|
||||||
|
query_name,
|
||||||
|
evaluate(query_perm),
|
||||||
|
f"permissions for {action} on {db_name}/{query_name}",
|
||||||
|
)
|
||||||
|
if action == "view-query":
|
||||||
|
query_allow = query_config.get("allow")
|
||||||
|
add_row_allow_block(
|
||||||
|
db_name,
|
||||||
|
query_name,
|
||||||
|
query_allow,
|
||||||
|
f"allow for {action} on {db_name}/{query_name}",
|
||||||
|
)
|
||||||
|
|
||||||
|
if action == "view-database":
|
||||||
|
db_allow = db_config.get("allow")
|
||||||
|
add_row_allow_block(
|
||||||
|
db_name, None, db_allow, f"allow for {action} on {db_name}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if action == "execute-sql":
|
||||||
|
db_allow_sql = db_config.get("allow_sql")
|
||||||
|
add_row_allow_block(db_name, None, db_allow_sql, f"allow_sql for {db_name}")
|
||||||
|
|
||||||
|
if action == "view-table":
|
||||||
|
# Database-level allow block affects all tables in that database
|
||||||
|
db_allow = db_config.get("allow")
|
||||||
|
add_row_allow_block(
|
||||||
|
db_name, None, db_allow, f"allow for {action} on {db_name}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if action == "view-query":
|
||||||
|
# Database-level allow block affects all queries in that database
|
||||||
|
db_allow = db_config.get("allow")
|
||||||
|
add_row_allow_block(
|
||||||
|
db_name, None, db_allow, f"allow for {action} on {db_name}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Root-level allow block applies to all view-* actions
|
||||||
|
if action == "view-instance":
|
||||||
|
allow_block = config.get("allow")
|
||||||
|
add_row_allow_block(None, None, allow_block, "allow for view-instance")
|
||||||
|
|
||||||
|
if action == "view-database":
|
||||||
|
# Root-level allow block also applies to view-database
|
||||||
|
allow_block = config.get("allow")
|
||||||
|
add_row_allow_block(None, None, allow_block, "allow for view-database")
|
||||||
|
|
||||||
|
if action == "view-table":
|
||||||
|
# Root-level allow block also applies to view-table
|
||||||
|
allow_block = config.get("allow")
|
||||||
|
add_row_allow_block(None, None, allow_block, "allow for view-table")
|
||||||
|
|
||||||
|
if action == "view-query":
|
||||||
|
# Root-level allow block also applies to view-query
|
||||||
|
allow_block = config.get("allow")
|
||||||
|
add_row_allow_block(None, None, allow_block, "allow for view-query")
|
||||||
|
|
||||||
|
if action == "execute-sql":
|
||||||
|
allow_sql = config.get("allow_sql")
|
||||||
|
add_row_allow_block(None, None, allow_sql, "allow_sql")
|
||||||
|
|
||||||
|
if not rows:
|
||||||
|
return []
|
||||||
|
|
||||||
|
parts = []
|
||||||
|
params = {}
|
||||||
|
for idx, (parent, child, allow, reason) in enumerate(rows):
|
||||||
|
key = f"cfg_{idx}"
|
||||||
|
parts.append(
|
||||||
|
f"SELECT :{key}_parent AS parent, :{key}_child AS child, :{key}_allow AS allow, :{key}_reason AS reason"
|
||||||
|
)
|
||||||
|
params[f"{key}_parent"] = parent
|
||||||
|
params[f"{key}_child"] = child
|
||||||
|
params[f"{key}_allow"] = 1 if allow else 0
|
||||||
|
params[f"{key}_reason"] = reason
|
||||||
|
|
||||||
|
sql = "\nUNION ALL\n".join(parts)
|
||||||
|
return [PermissionSQL(source="config_permissions", sql=sql, params=params)]
|
||||||
|
|
||||||
|
|
||||||
|
async def _restriction_permission_rules(
|
||||||
|
datasette, actor, action
|
||||||
|
) -> list[PermissionSQL]:
|
||||||
|
"""
|
||||||
|
Generate PermissionSQL rules from actor restrictions (_r key).
|
||||||
|
|
||||||
|
Actor restrictions define an allowlist. We implement this via:
|
||||||
|
1. Global DENY rule for the action (blocks everything by default)
|
||||||
|
2. Specific ALLOW rules for each allowlisted resource
|
||||||
|
|
||||||
|
The cascading logic (child → parent → global) ensures that:
|
||||||
|
- Allowlisted resources at child/parent level override global deny
|
||||||
|
- Non-allowlisted resources are blocked by global deny
|
||||||
|
|
||||||
|
This creates a gating filter that runs BEFORE normal permission checks.
|
||||||
|
Restrictions cannot be overridden by config - they gate what gets checked.
|
||||||
|
"""
|
||||||
|
if not actor or "_r" not in actor:
|
||||||
|
return []
|
||||||
|
|
||||||
|
restrictions = actor["_r"]
|
||||||
|
|
||||||
|
# Check if this action appears in restrictions (with abbreviations)
|
||||||
|
action_obj = datasette.actions.get(action)
|
||||||
|
action_checks = {action}
|
||||||
|
if action_obj and action_obj.abbr:
|
||||||
|
action_checks.add(action_obj.abbr)
|
||||||
|
|
||||||
|
# Check if this action is in the allowlist anywhere in restrictions
|
||||||
|
is_in_allowlist = False
|
||||||
|
global_actions = restrictions.get("a", [])
|
||||||
|
if action_checks.intersection(global_actions):
|
||||||
|
is_in_allowlist = True
|
||||||
|
|
||||||
|
if not is_in_allowlist:
|
||||||
|
for db_actions in restrictions.get("d", {}).values():
|
||||||
|
if action_checks.intersection(db_actions):
|
||||||
|
is_in_allowlist = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not is_in_allowlist:
|
||||||
|
for tables in restrictions.get("r", {}).values():
|
||||||
|
for table_actions in tables.values():
|
||||||
|
if action_checks.intersection(table_actions):
|
||||||
|
is_in_allowlist = True
|
||||||
|
break
|
||||||
|
if is_in_allowlist:
|
||||||
|
break
|
||||||
|
|
||||||
|
# If action not in allowlist at all, add global deny and return
|
||||||
|
if not is_in_allowlist:
|
||||||
|
sql = "SELECT NULL AS parent, NULL AS child, 0 AS allow, :deny_reason AS reason"
|
||||||
|
return [
|
||||||
|
PermissionSQL(
|
||||||
|
source="actor_restrictions",
|
||||||
|
sql=sql,
|
||||||
|
params={
|
||||||
|
"deny_reason": f"actor restrictions: {action} not in allowlist"
|
||||||
|
},
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Action IS in allowlist - build deny + specific allows
|
||||||
|
selects = []
|
||||||
|
params = {}
|
||||||
|
param_counter = 0
|
||||||
|
|
||||||
|
def add_row(parent, child, allow, reason):
|
||||||
|
"""Helper to add a parameterized SELECT statement"""
|
||||||
|
nonlocal param_counter
|
||||||
|
prefix = f"restr_{param_counter}"
|
||||||
|
param_counter += 1
|
||||||
|
|
||||||
|
selects.append(
|
||||||
|
f"SELECT :{prefix}_parent AS parent, :{prefix}_child AS child, "
|
||||||
|
f":{prefix}_allow AS allow, :{prefix}_reason AS reason"
|
||||||
|
)
|
||||||
|
params[f"{prefix}_parent"] = parent
|
||||||
|
params[f"{prefix}_child"] = child
|
||||||
|
params[f"{prefix}_allow"] = 1 if allow else 0
|
||||||
|
params[f"{prefix}_reason"] = reason
|
||||||
|
|
||||||
|
# If NOT globally allowed, add global deny as gatekeeper
|
||||||
|
is_globally_allowed = action_checks.intersection(global_actions)
|
||||||
|
if not is_globally_allowed:
|
||||||
|
add_row(None, None, 0, f"actor restrictions: {action} denied by default")
|
||||||
|
else:
|
||||||
|
# Globally allowed - add global allow
|
||||||
|
add_row(None, None, 1, f"actor restrictions: global {action}")
|
||||||
|
|
||||||
|
# Add database-level allows
|
||||||
|
db_restrictions = restrictions.get("d", {})
|
||||||
|
for db_name, db_actions in db_restrictions.items():
|
||||||
|
if action_checks.intersection(db_actions):
|
||||||
|
add_row(db_name, None, 1, f"actor restrictions: database {db_name}")
|
||||||
|
|
||||||
|
# Add resource/table-level allows
|
||||||
|
resource_restrictions = restrictions.get("r", {})
|
||||||
|
for db_name, tables in resource_restrictions.items():
|
||||||
|
for table_name, table_actions in tables.items():
|
||||||
|
if action_checks.intersection(table_actions):
|
||||||
|
add_row(
|
||||||
|
db_name,
|
||||||
|
table_name,
|
||||||
|
1,
|
||||||
|
f"actor restrictions: {db_name}/{table_name}",
|
||||||
|
)
|
||||||
|
|
||||||
|
if not selects:
|
||||||
|
return []
|
||||||
|
|
||||||
|
sql = "\nUNION ALL\n".join(selects)
|
||||||
|
|
||||||
|
return [PermissionSQL(source="actor_restrictions", sql=sql, params=params)]
|
||||||
|
|
||||||
|
|
||||||
|
def restrictions_allow_action(
|
||||||
|
datasette: "Datasette",
|
||||||
|
restrictions: dict,
|
||||||
|
action: str,
|
||||||
|
resource: str | tuple[str, str],
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Check if actor restrictions allow the requested action against the requested resource.
|
||||||
|
|
||||||
|
Restrictions work on an exact-match basis: if an actor has view-table permission,
|
||||||
|
they can view tables, but NOT automatically view-instance or view-database.
|
||||||
|
Each permission is checked independently without implication logic.
|
||||||
|
"""
|
||||||
|
# Does this action have an abbreviation?
|
||||||
|
to_check = {action}
|
||||||
|
action_obj = datasette.actions.get(action)
|
||||||
|
if action_obj and action_obj.abbr:
|
||||||
|
to_check.add(action_obj.abbr)
|
||||||
|
|
||||||
|
# Check if restrictions explicitly allow this action
|
||||||
|
# Restrictions can be at three levels:
|
||||||
|
# - "a": global (any resource)
|
||||||
|
# - "d": per-database
|
||||||
|
# - "r": per-table/resource
|
||||||
|
|
||||||
|
# Check global level (any resource)
|
||||||
|
all_allowed = restrictions.get("a")
|
||||||
|
if all_allowed is not None:
|
||||||
|
assert isinstance(all_allowed, list)
|
||||||
|
if to_check.intersection(all_allowed):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check database level
|
||||||
|
if resource:
|
||||||
|
if isinstance(resource, str):
|
||||||
|
database_name = resource
|
||||||
|
else:
|
||||||
|
database_name = resource[0]
|
||||||
|
database_allowed = restrictions.get("d", {}).get(database_name)
|
||||||
|
if database_allowed is not None:
|
||||||
|
assert isinstance(database_allowed, list)
|
||||||
|
if to_check.intersection(database_allowed):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check table/resource level
|
||||||
|
if resource is not None and not isinstance(resource, str) and len(resource) == 2:
|
||||||
|
database, table = resource
|
||||||
|
table_allowed = restrictions.get("r", {}).get(database, {}).get(table)
|
||||||
|
if table_allowed is not None:
|
||||||
|
assert isinstance(table_allowed, list)
|
||||||
|
if to_check.intersection(table_allowed):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# This action is not explicitly allowed, so reject it
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@hookimpl
|
||||||
|
def actor_from_request(datasette, request):
|
||||||
|
prefix = "dstok_"
|
||||||
|
if not datasette.setting("allow_signed_tokens"):
|
||||||
|
return None
|
||||||
|
max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl")
|
||||||
|
authorization = request.headers.get("authorization")
|
||||||
|
if not authorization:
|
||||||
|
return None
|
||||||
|
if not authorization.startswith("Bearer "):
|
||||||
|
return None
|
||||||
|
token = authorization[len("Bearer ") :]
|
||||||
|
if not token.startswith(prefix):
|
||||||
|
return None
|
||||||
|
token = token[len(prefix) :]
|
||||||
|
try:
|
||||||
|
decoded = datasette.unsign(token, namespace="token")
|
||||||
|
except itsdangerous.BadSignature:
|
||||||
|
return None
|
||||||
|
if "t" not in decoded:
|
||||||
|
# Missing timestamp
|
||||||
|
return None
|
||||||
|
created = decoded["t"]
|
||||||
|
if not isinstance(created, int):
|
||||||
|
# Invalid timestamp
|
||||||
|
return None
|
||||||
|
duration = decoded.get("d")
|
||||||
|
if duration is not None and not isinstance(duration, int):
|
||||||
|
# Invalid duration
|
||||||
|
return None
|
||||||
|
if (duration is None and max_signed_tokens_ttl) or (
|
||||||
|
duration is not None
|
||||||
|
and max_signed_tokens_ttl
|
||||||
|
and duration > max_signed_tokens_ttl
|
||||||
|
):
|
||||||
|
duration = max_signed_tokens_ttl
|
||||||
|
if duration:
|
||||||
|
if time.time() - created > duration:
|
||||||
|
# Expired
|
||||||
|
return None
|
||||||
|
actor = {"id": decoded["a"], "token": "dstok"}
|
||||||
|
if "_r" in decoded:
|
||||||
|
actor["_r"] = decoded["_r"]
|
||||||
|
if duration:
|
||||||
|
actor["token_expires"] = created + duration
|
||||||
|
return actor
|
||||||
|
|
||||||
|
|
||||||
|
@hookimpl
|
||||||
|
def skip_csrf(scope):
|
||||||
|
# Skip CSRF check for requests with content-type: application/json
|
||||||
|
if scope["type"] == "http":
|
||||||
|
headers = scope.get("headers") or {}
|
||||||
|
if dict(headers).get(b"content-type") == b"application/json":
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@hookimpl
|
||||||
|
def canned_queries(datasette, database, actor):
|
||||||
|
"""Return canned queries from datasette configuration."""
|
||||||
|
queries = (
|
||||||
|
((datasette.config or {}).get("databases") or {}).get(database) or {}
|
||||||
|
).get("queries") or {}
|
||||||
|
return queries
|
||||||
|
|
@ -1,59 +0,0 @@
|
||||||
"""
|
|
||||||
Default permission implementations for Datasette.
|
|
||||||
|
|
||||||
This module provides the built-in permission checking logic through implementations
|
|
||||||
of the permission_resources_sql hook. The hooks are organized by their purpose:
|
|
||||||
|
|
||||||
1. Actor Restrictions - Enforces _r allowlists embedded in actor tokens
|
|
||||||
2. Root User - Grants full access when --root flag is used
|
|
||||||
3. Config Rules - Applies permissions from datasette.yaml
|
|
||||||
4. Default Settings - Enforces default_allow_sql and default view permissions
|
|
||||||
|
|
||||||
IMPORTANT: These hooks return PermissionSQL objects that are combined using SQL
|
|
||||||
UNION/INTERSECT operations. The order of evaluation is:
|
|
||||||
- restriction_sql fields are INTERSECTed (all must match)
|
|
||||||
- Regular sql fields are UNIONed and evaluated with cascading priority
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Optional
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
from datasette import hookimpl
|
|
||||||
|
|
||||||
# Re-export all hooks and public utilities
|
|
||||||
from .restrictions import (
|
|
||||||
actor_restrictions_sql,
|
|
||||||
restrictions_allow_action,
|
|
||||||
ActorRestrictions,
|
|
||||||
)
|
|
||||||
from .root import root_user_permissions_sql
|
|
||||||
from .config import config_permissions_sql
|
|
||||||
from .defaults import (
|
|
||||||
default_allow_sql_check,
|
|
||||||
default_action_permissions_sql,
|
|
||||||
DEFAULT_ALLOW_ACTIONS,
|
|
||||||
)
|
|
||||||
from .tokens import actor_from_signed_api_token
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def skip_csrf(scope) -> Optional[bool]:
|
|
||||||
"""Skip CSRF check for JSON content-type requests."""
|
|
||||||
if scope["type"] == "http":
|
|
||||||
headers = scope.get("headers") or {}
|
|
||||||
if dict(headers).get(b"content-type") == b"application/json":
|
|
||||||
return True
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def canned_queries(datasette: "Datasette", database: str, actor) -> dict:
|
|
||||||
"""Return canned queries defined in datasette.yaml configuration."""
|
|
||||||
queries = (
|
|
||||||
((datasette.config or {}).get("databases") or {}).get(database) or {}
|
|
||||||
).get("queries") or {}
|
|
||||||
return queries
|
|
||||||
|
|
@ -1,442 +0,0 @@
|
||||||
"""
|
|
||||||
Config-based permission handling for Datasette.
|
|
||||||
|
|
||||||
Applies permission rules from datasette.yaml configuration.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Any, List, Optional, Set, Tuple
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
from datasette import hookimpl
|
|
||||||
from datasette.permissions import PermissionSQL
|
|
||||||
from datasette.utils import actor_matches_allow
|
|
||||||
|
|
||||||
from .helpers import PermissionRowCollector, get_action_name_variants
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigPermissionProcessor:
|
|
||||||
"""
|
|
||||||
Processes permission rules from datasette.yaml configuration.
|
|
||||||
|
|
||||||
Configuration structure:
|
|
||||||
|
|
||||||
permissions: # Root-level permissions block
|
|
||||||
view-instance:
|
|
||||||
id: admin
|
|
||||||
|
|
||||||
databases:
|
|
||||||
mydb:
|
|
||||||
permissions: # Database-level permissions
|
|
||||||
view-database:
|
|
||||||
id: admin
|
|
||||||
allow: # Database-level allow block (for view-*)
|
|
||||||
id: viewer
|
|
||||||
allow_sql: # execute-sql allow block
|
|
||||||
id: analyst
|
|
||||||
tables:
|
|
||||||
users:
|
|
||||||
permissions: # Table-level permissions
|
|
||||||
view-table:
|
|
||||||
id: admin
|
|
||||||
allow: # Table-level allow block
|
|
||||||
id: viewer
|
|
||||||
queries:
|
|
||||||
my_query:
|
|
||||||
permissions: # Query-level permissions
|
|
||||||
view-query:
|
|
||||||
id: admin
|
|
||||||
allow: # Query-level allow block
|
|
||||||
id: viewer
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
datasette: "Datasette",
|
|
||||||
actor: Optional[dict],
|
|
||||||
action: str,
|
|
||||||
):
|
|
||||||
self.datasette = datasette
|
|
||||||
self.actor = actor
|
|
||||||
self.action = action
|
|
||||||
self.config = datasette.config or {}
|
|
||||||
self.collector = PermissionRowCollector(prefix="cfg")
|
|
||||||
|
|
||||||
# Pre-compute action variants
|
|
||||||
self.action_checks = get_action_name_variants(datasette, action)
|
|
||||||
self.action_obj = datasette.actions.get(action)
|
|
||||||
|
|
||||||
# Parse restrictions if present
|
|
||||||
self.has_restrictions = actor and "_r" in actor if actor else False
|
|
||||||
self.restrictions = actor.get("_r", {}) if actor else {}
|
|
||||||
|
|
||||||
# Pre-compute restriction info for efficiency
|
|
||||||
self.restricted_databases: Set[str] = set()
|
|
||||||
self.restricted_tables: Set[Tuple[str, str]] = set()
|
|
||||||
|
|
||||||
if self.has_restrictions:
|
|
||||||
self.restricted_databases = {
|
|
||||||
db_name
|
|
||||||
for db_name, db_actions in (self.restrictions.get("d") or {}).items()
|
|
||||||
if self.action_checks.intersection(db_actions)
|
|
||||||
}
|
|
||||||
self.restricted_tables = {
|
|
||||||
(db_name, table_name)
|
|
||||||
for db_name, tables in (self.restrictions.get("r") or {}).items()
|
|
||||||
for table_name, table_actions in tables.items()
|
|
||||||
if self.action_checks.intersection(table_actions)
|
|
||||||
}
|
|
||||||
# Tables implicitly reference their parent databases
|
|
||||||
self.restricted_databases.update(db for db, _ in self.restricted_tables)
|
|
||||||
|
|
||||||
def evaluate_allow_block(self, allow_block: Any) -> Optional[bool]:
|
|
||||||
"""Evaluate an allow block against the current actor."""
|
|
||||||
if allow_block is None:
|
|
||||||
return None
|
|
||||||
return actor_matches_allow(self.actor, allow_block)
|
|
||||||
|
|
||||||
def is_in_restriction_allowlist(
|
|
||||||
self,
|
|
||||||
parent: Optional[str],
|
|
||||||
child: Optional[str],
|
|
||||||
) -> bool:
|
|
||||||
"""Check if resource is allowed by actor restrictions."""
|
|
||||||
if not self.has_restrictions:
|
|
||||||
return True # No restrictions, all resources allowed
|
|
||||||
|
|
||||||
# Check global allowlist
|
|
||||||
if self.action_checks.intersection(self.restrictions.get("a", [])):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Check database-level allowlist
|
|
||||||
if parent and self.action_checks.intersection(
|
|
||||||
self.restrictions.get("d", {}).get(parent, [])
|
|
||||||
):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Check table-level allowlist
|
|
||||||
if parent:
|
|
||||||
table_restrictions = (self.restrictions.get("r", {}) or {}).get(parent, {})
|
|
||||||
if child:
|
|
||||||
table_actions = table_restrictions.get(child, [])
|
|
||||||
if self.action_checks.intersection(table_actions):
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
# Parent query should proceed if any child in this database is allowlisted
|
|
||||||
for table_actions in table_restrictions.values():
|
|
||||||
if self.action_checks.intersection(table_actions):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Parent/child both None: include if any restrictions exist for this action
|
|
||||||
if parent is None and child is None:
|
|
||||||
if self.action_checks.intersection(self.restrictions.get("a", [])):
|
|
||||||
return True
|
|
||||||
if self.restricted_databases:
|
|
||||||
return True
|
|
||||||
if self.restricted_tables:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def add_permissions_rule(
|
|
||||||
self,
|
|
||||||
parent: Optional[str],
|
|
||||||
child: Optional[str],
|
|
||||||
permissions_block: Optional[dict],
|
|
||||||
scope_desc: str,
|
|
||||||
) -> None:
|
|
||||||
"""Add a rule from a permissions:{action} block."""
|
|
||||||
if permissions_block is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
action_allow_block = permissions_block.get(self.action)
|
|
||||||
result = self.evaluate_allow_block(action_allow_block)
|
|
||||||
|
|
||||||
self.collector.add(
|
|
||||||
parent=parent,
|
|
||||||
child=child,
|
|
||||||
allow=result,
|
|
||||||
reason=f"config {'allow' if result else 'deny'} {scope_desc}",
|
|
||||||
if_not_none=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
def add_allow_block_rule(
|
|
||||||
self,
|
|
||||||
parent: Optional[str],
|
|
||||||
child: Optional[str],
|
|
||||||
allow_block: Any,
|
|
||||||
scope_desc: str,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Add rules from an allow:{} block.
|
|
||||||
|
|
||||||
For allow blocks, if the block exists but doesn't match the actor,
|
|
||||||
this is treated as a deny. We also handle the restriction-gate logic.
|
|
||||||
"""
|
|
||||||
if allow_block is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Skip if resource is not in restriction allowlist
|
|
||||||
if not self.is_in_restriction_allowlist(parent, child):
|
|
||||||
return
|
|
||||||
|
|
||||||
result = self.evaluate_allow_block(allow_block)
|
|
||||||
bool_result = bool(result)
|
|
||||||
|
|
||||||
self.collector.add(
|
|
||||||
parent,
|
|
||||||
child,
|
|
||||||
bool_result,
|
|
||||||
f"config {'allow' if result else 'deny'} {scope_desc}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle restriction-gate: add explicit denies for restricted resources
|
|
||||||
self._add_restriction_gate_denies(parent, child, bool_result, scope_desc)
|
|
||||||
|
|
||||||
def _add_restriction_gate_denies(
|
|
||||||
self,
|
|
||||||
parent: Optional[str],
|
|
||||||
child: Optional[str],
|
|
||||||
is_allowed: bool,
|
|
||||||
scope_desc: str,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
When a config rule denies at a higher level, add explicit denies
|
|
||||||
for restricted resources to prevent child-level allows from
|
|
||||||
incorrectly granting access.
|
|
||||||
"""
|
|
||||||
if is_allowed or child is not None or not self.has_restrictions:
|
|
||||||
return
|
|
||||||
|
|
||||||
if not self.action_obj:
|
|
||||||
return
|
|
||||||
|
|
||||||
reason = f"config deny {scope_desc} (restriction gate)"
|
|
||||||
|
|
||||||
if parent is None:
|
|
||||||
# Root-level deny: add denies for all restricted resources
|
|
||||||
if self.action_obj.takes_parent:
|
|
||||||
for db_name in self.restricted_databases:
|
|
||||||
self.collector.add(db_name, None, False, reason)
|
|
||||||
if self.action_obj.takes_child:
|
|
||||||
for db_name, table_name in self.restricted_tables:
|
|
||||||
self.collector.add(db_name, table_name, False, reason)
|
|
||||||
else:
|
|
||||||
# Database-level deny: add denies for tables in that database
|
|
||||||
if self.action_obj.takes_child:
|
|
||||||
for db_name, table_name in self.restricted_tables:
|
|
||||||
if db_name == parent:
|
|
||||||
self.collector.add(db_name, table_name, False, reason)
|
|
||||||
|
|
||||||
def process(self) -> Optional[PermissionSQL]:
|
|
||||||
"""Process all config rules and return combined PermissionSQL."""
|
|
||||||
self._process_root_permissions()
|
|
||||||
self._process_databases()
|
|
||||||
self._process_root_allow_blocks()
|
|
||||||
|
|
||||||
return self.collector.to_permission_sql()
|
|
||||||
|
|
||||||
def _process_root_permissions(self) -> None:
|
|
||||||
"""Process root-level permissions block."""
|
|
||||||
root_perms = self.config.get("permissions") or {}
|
|
||||||
self.add_permissions_rule(
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
root_perms,
|
|
||||||
f"permissions for {self.action}",
|
|
||||||
)
|
|
||||||
|
|
||||||
def _process_databases(self) -> None:
|
|
||||||
"""Process database-level and nested configurations."""
|
|
||||||
databases = self.config.get("databases") or {}
|
|
||||||
|
|
||||||
for db_name, db_config in databases.items():
|
|
||||||
self._process_database(db_name, db_config or {})
|
|
||||||
|
|
||||||
def _process_database(self, db_name: str, db_config: dict) -> None:
|
|
||||||
"""Process a single database's configuration."""
|
|
||||||
# Database-level permissions block
|
|
||||||
db_perms = db_config.get("permissions") or {}
|
|
||||||
self.add_permissions_rule(
|
|
||||||
db_name,
|
|
||||||
None,
|
|
||||||
db_perms,
|
|
||||||
f"permissions for {self.action} on {db_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Process tables
|
|
||||||
for table_name, table_config in (db_config.get("tables") or {}).items():
|
|
||||||
self._process_table(db_name, table_name, table_config or {})
|
|
||||||
|
|
||||||
# Process queries
|
|
||||||
for query_name, query_config in (db_config.get("queries") or {}).items():
|
|
||||||
self._process_query(db_name, query_name, query_config)
|
|
||||||
|
|
||||||
# Database-level allow blocks
|
|
||||||
self._process_database_allow_blocks(db_name, db_config)
|
|
||||||
|
|
||||||
def _process_table(
|
|
||||||
self,
|
|
||||||
db_name: str,
|
|
||||||
table_name: str,
|
|
||||||
table_config: dict,
|
|
||||||
) -> None:
|
|
||||||
"""Process a single table's configuration."""
|
|
||||||
# Table-level permissions block
|
|
||||||
table_perms = table_config.get("permissions") or {}
|
|
||||||
self.add_permissions_rule(
|
|
||||||
db_name,
|
|
||||||
table_name,
|
|
||||||
table_perms,
|
|
||||||
f"permissions for {self.action} on {db_name}/{table_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Table-level allow block (for view-table)
|
|
||||||
if self.action == "view-table":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
db_name,
|
|
||||||
table_name,
|
|
||||||
table_config.get("allow"),
|
|
||||||
f"allow for {self.action} on {db_name}/{table_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
def _process_query(
|
|
||||||
self,
|
|
||||||
db_name: str,
|
|
||||||
query_name: str,
|
|
||||||
query_config: Any,
|
|
||||||
) -> None:
|
|
||||||
"""Process a single query's configuration."""
|
|
||||||
# Query config can be a string (just SQL) or dict
|
|
||||||
if not isinstance(query_config, dict):
|
|
||||||
return
|
|
||||||
|
|
||||||
# Query-level permissions block
|
|
||||||
query_perms = query_config.get("permissions") or {}
|
|
||||||
self.add_permissions_rule(
|
|
||||||
db_name,
|
|
||||||
query_name,
|
|
||||||
query_perms,
|
|
||||||
f"permissions for {self.action} on {db_name}/{query_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Query-level allow block (for view-query)
|
|
||||||
if self.action == "view-query":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
db_name,
|
|
||||||
query_name,
|
|
||||||
query_config.get("allow"),
|
|
||||||
f"allow for {self.action} on {db_name}/{query_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
def _process_database_allow_blocks(
|
|
||||||
self,
|
|
||||||
db_name: str,
|
|
||||||
db_config: dict,
|
|
||||||
) -> None:
|
|
||||||
"""Process database-level allow/allow_sql blocks."""
|
|
||||||
# view-database allow block
|
|
||||||
if self.action == "view-database":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
db_name,
|
|
||||||
None,
|
|
||||||
db_config.get("allow"),
|
|
||||||
f"allow for {self.action} on {db_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# execute-sql allow_sql block
|
|
||||||
if self.action == "execute-sql":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
db_name,
|
|
||||||
None,
|
|
||||||
db_config.get("allow_sql"),
|
|
||||||
f"allow_sql for {db_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# view-table uses database-level allow for inheritance
|
|
||||||
if self.action == "view-table":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
db_name,
|
|
||||||
None,
|
|
||||||
db_config.get("allow"),
|
|
||||||
f"allow for {self.action} on {db_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# view-query uses database-level allow for inheritance
|
|
||||||
if self.action == "view-query":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
db_name,
|
|
||||||
None,
|
|
||||||
db_config.get("allow"),
|
|
||||||
f"allow for {self.action} on {db_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
def _process_root_allow_blocks(self) -> None:
|
|
||||||
"""Process root-level allow/allow_sql blocks."""
|
|
||||||
root_allow = self.config.get("allow")
|
|
||||||
|
|
||||||
if self.action == "view-instance":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
root_allow,
|
|
||||||
"allow for view-instance",
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.action == "view-database":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
root_allow,
|
|
||||||
"allow for view-database",
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.action == "view-table":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
root_allow,
|
|
||||||
"allow for view-table",
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.action == "view-query":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
root_allow,
|
|
||||||
"allow for view-query",
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.action == "execute-sql":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
self.config.get("allow_sql"),
|
|
||||||
"allow_sql",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(specname="permission_resources_sql")
|
|
||||||
async def config_permissions_sql(
|
|
||||||
datasette: "Datasette",
|
|
||||||
actor: Optional[dict],
|
|
||||||
action: str,
|
|
||||||
) -> Optional[List[PermissionSQL]]:
|
|
||||||
"""
|
|
||||||
Apply permission rules from datasette.yaml configuration.
|
|
||||||
|
|
||||||
This processes:
|
|
||||||
- permissions: blocks at root, database, table, and query levels
|
|
||||||
- allow: blocks for view-* actions
|
|
||||||
- allow_sql: blocks for execute-sql action
|
|
||||||
"""
|
|
||||||
processor = ConfigPermissionProcessor(datasette, actor, action)
|
|
||||||
result = processor.process()
|
|
||||||
|
|
||||||
if result is None:
|
|
||||||
return []
|
|
||||||
|
|
||||||
return [result]
|
|
||||||
|
|
@ -1,70 +0,0 @@
|
||||||
"""
|
|
||||||
Default permission settings for Datasette.
|
|
||||||
|
|
||||||
Provides default allow rules for standard view/execute actions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Optional
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
from datasette import hookimpl
|
|
||||||
from datasette.permissions import PermissionSQL
|
|
||||||
|
|
||||||
|
|
||||||
# Actions that are allowed by default (unless --default-deny is used)
|
|
||||||
DEFAULT_ALLOW_ACTIONS = frozenset(
|
|
||||||
{
|
|
||||||
"view-instance",
|
|
||||||
"view-database",
|
|
||||||
"view-database-download",
|
|
||||||
"view-table",
|
|
||||||
"view-query",
|
|
||||||
"execute-sql",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(specname="permission_resources_sql")
|
|
||||||
async def default_allow_sql_check(
|
|
||||||
datasette: "Datasette",
|
|
||||||
actor: Optional[dict],
|
|
||||||
action: str,
|
|
||||||
) -> Optional[PermissionSQL]:
|
|
||||||
"""
|
|
||||||
Enforce the default_allow_sql setting.
|
|
||||||
|
|
||||||
When default_allow_sql is false (the default), execute-sql is denied
|
|
||||||
unless explicitly allowed by config or other rules.
|
|
||||||
"""
|
|
||||||
if action == "execute-sql":
|
|
||||||
if not datasette.setting("default_allow_sql"):
|
|
||||||
return PermissionSQL.deny(reason="default_allow_sql is false")
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(specname="permission_resources_sql")
|
|
||||||
async def default_action_permissions_sql(
|
|
||||||
datasette: "Datasette",
|
|
||||||
actor: Optional[dict],
|
|
||||||
action: str,
|
|
||||||
) -> Optional[PermissionSQL]:
|
|
||||||
"""
|
|
||||||
Provide default allow rules for standard view/execute actions.
|
|
||||||
|
|
||||||
These defaults are skipped when datasette is started with --default-deny.
|
|
||||||
The restriction_sql mechanism (from actor_restrictions_sql) will still
|
|
||||||
filter these results if the actor has restrictions.
|
|
||||||
"""
|
|
||||||
if datasette.default_deny:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if action in DEFAULT_ALLOW_ACTIONS:
|
|
||||||
reason = f"default allow for {action}".replace("'", "''")
|
|
||||||
return PermissionSQL.allow(reason=reason)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
@ -1,85 +0,0 @@
|
||||||
"""
|
|
||||||
Shared helper utilities for default permission implementations.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from typing import TYPE_CHECKING, List, Optional, Set
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
from datasette.permissions import PermissionSQL
|
|
||||||
|
|
||||||
|
|
||||||
def get_action_name_variants(datasette: "Datasette", action: str) -> Set[str]:
|
|
||||||
"""
|
|
||||||
Get all name variants for an action (full name and abbreviation).
|
|
||||||
|
|
||||||
Example:
|
|
||||||
get_action_name_variants(ds, "view-table") -> {"view-table", "vt"}
|
|
||||||
"""
|
|
||||||
variants = {action}
|
|
||||||
action_obj = datasette.actions.get(action)
|
|
||||||
if action_obj and action_obj.abbr:
|
|
||||||
variants.add(action_obj.abbr)
|
|
||||||
return variants
|
|
||||||
|
|
||||||
|
|
||||||
def action_in_list(datasette: "Datasette", action: str, action_list: list) -> bool:
|
|
||||||
"""Check if an action (or its abbreviation) is in a list."""
|
|
||||||
return bool(get_action_name_variants(datasette, action).intersection(action_list))
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class PermissionRow:
|
|
||||||
"""A single permission rule row."""
|
|
||||||
|
|
||||||
parent: Optional[str]
|
|
||||||
child: Optional[str]
|
|
||||||
allow: bool
|
|
||||||
reason: str
|
|
||||||
|
|
||||||
|
|
||||||
class PermissionRowCollector:
|
|
||||||
"""Collects permission rows and converts them to PermissionSQL."""
|
|
||||||
|
|
||||||
def __init__(self, prefix: str = "row"):
|
|
||||||
self.rows: List[PermissionRow] = []
|
|
||||||
self.prefix = prefix
|
|
||||||
|
|
||||||
def add(
|
|
||||||
self,
|
|
||||||
parent: Optional[str],
|
|
||||||
child: Optional[str],
|
|
||||||
allow: Optional[bool],
|
|
||||||
reason: str,
|
|
||||||
if_not_none: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""Add a permission row. If if_not_none=True, only add if allow is not None."""
|
|
||||||
if if_not_none and allow is None:
|
|
||||||
return
|
|
||||||
self.rows.append(PermissionRow(parent, child, allow, reason))
|
|
||||||
|
|
||||||
def to_permission_sql(self) -> Optional[PermissionSQL]:
|
|
||||||
"""Convert collected rows to a PermissionSQL object."""
|
|
||||||
if not self.rows:
|
|
||||||
return None
|
|
||||||
|
|
||||||
parts = []
|
|
||||||
params = {}
|
|
||||||
|
|
||||||
for idx, row in enumerate(self.rows):
|
|
||||||
key = f"{self.prefix}_{idx}"
|
|
||||||
parts.append(
|
|
||||||
f"SELECT :{key}_parent AS parent, :{key}_child AS child, "
|
|
||||||
f":{key}_allow AS allow, :{key}_reason AS reason"
|
|
||||||
)
|
|
||||||
params[f"{key}_parent"] = row.parent
|
|
||||||
params[f"{key}_child"] = row.child
|
|
||||||
params[f"{key}_allow"] = 1 if row.allow else 0
|
|
||||||
params[f"{key}_reason"] = row.reason
|
|
||||||
|
|
||||||
sql = "\nUNION ALL\n".join(parts)
|
|
||||||
return PermissionSQL(sql=sql, params=params)
|
|
||||||
|
|
@ -1,195 +0,0 @@
|
||||||
"""
|
|
||||||
Actor restriction handling for Datasette permissions.
|
|
||||||
|
|
||||||
This module handles the _r (restrictions) key in actor dictionaries, which
|
|
||||||
contains allowlists of resources the actor can access.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from typing import TYPE_CHECKING, List, Optional, Set, Tuple
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
from datasette import hookimpl
|
|
||||||
from datasette.permissions import PermissionSQL
|
|
||||||
|
|
||||||
from .helpers import action_in_list, get_action_name_variants
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ActorRestrictions:
|
|
||||||
"""Parsed actor restrictions from the _r key."""
|
|
||||||
|
|
||||||
global_actions: List[str] # _r.a - globally allowed actions
|
|
||||||
database_actions: dict # _r.d - {db_name: [actions]}
|
|
||||||
table_actions: dict # _r.r - {db_name: {table: [actions]}}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_actor(cls, actor: Optional[dict]) -> Optional["ActorRestrictions"]:
|
|
||||||
"""Parse restrictions from actor dict. Returns None if no restrictions."""
|
|
||||||
if not actor:
|
|
||||||
return None
|
|
||||||
assert isinstance(actor, dict), "actor must be a dictionary"
|
|
||||||
|
|
||||||
restrictions = actor.get("_r")
|
|
||||||
if restrictions is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return cls(
|
|
||||||
global_actions=restrictions.get("a", []),
|
|
||||||
database_actions=restrictions.get("d", {}),
|
|
||||||
table_actions=restrictions.get("r", {}),
|
|
||||||
)
|
|
||||||
|
|
||||||
def is_action_globally_allowed(self, datasette: "Datasette", action: str) -> bool:
|
|
||||||
"""Check if action is in the global allowlist."""
|
|
||||||
return action_in_list(datasette, action, self.global_actions)
|
|
||||||
|
|
||||||
def get_allowed_databases(self, datasette: "Datasette", action: str) -> Set[str]:
|
|
||||||
"""Get database names where this action is allowed."""
|
|
||||||
allowed = set()
|
|
||||||
for db_name, db_actions in self.database_actions.items():
|
|
||||||
if action_in_list(datasette, action, db_actions):
|
|
||||||
allowed.add(db_name)
|
|
||||||
return allowed
|
|
||||||
|
|
||||||
def get_allowed_tables(
|
|
||||||
self, datasette: "Datasette", action: str
|
|
||||||
) -> Set[Tuple[str, str]]:
|
|
||||||
"""Get (database, table) pairs where this action is allowed."""
|
|
||||||
allowed = set()
|
|
||||||
for db_name, tables in self.table_actions.items():
|
|
||||||
for table_name, table_actions in tables.items():
|
|
||||||
if action_in_list(datasette, action, table_actions):
|
|
||||||
allowed.add((db_name, table_name))
|
|
||||||
return allowed
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(specname="permission_resources_sql")
|
|
||||||
async def actor_restrictions_sql(
|
|
||||||
datasette: "Datasette",
|
|
||||||
actor: Optional[dict],
|
|
||||||
action: str,
|
|
||||||
) -> Optional[List[PermissionSQL]]:
|
|
||||||
"""
|
|
||||||
Handle actor restriction-based permission rules.
|
|
||||||
|
|
||||||
When an actor has an "_r" key, it contains an allowlist of resources they
|
|
||||||
can access. This function returns restriction_sql that filters the final
|
|
||||||
results to only include resources in that allowlist.
|
|
||||||
|
|
||||||
The _r structure:
|
|
||||||
{
|
|
||||||
"a": ["vi", "pd"], # Global actions allowed
|
|
||||||
"d": {"mydb": ["vt", "es"]}, # Database-level actions
|
|
||||||
"r": {"mydb": {"users": ["vt"]}} # Table-level actions
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
if not actor:
|
|
||||||
return None
|
|
||||||
|
|
||||||
restrictions = ActorRestrictions.from_actor(actor)
|
|
||||||
|
|
||||||
if restrictions is None:
|
|
||||||
# No restrictions - all resources allowed
|
|
||||||
return []
|
|
||||||
|
|
||||||
# If globally allowed, no filtering needed
|
|
||||||
if restrictions.is_action_globally_allowed(datasette, action):
|
|
||||||
return []
|
|
||||||
|
|
||||||
# Build restriction SQL
|
|
||||||
allowed_dbs = restrictions.get_allowed_databases(datasette, action)
|
|
||||||
allowed_tables = restrictions.get_allowed_tables(datasette, action)
|
|
||||||
|
|
||||||
# If nothing is allowed for this action, return empty-set restriction
|
|
||||||
if not allowed_dbs and not allowed_tables:
|
|
||||||
return [
|
|
||||||
PermissionSQL(
|
|
||||||
params={"deny": f"actor restrictions: {action} not in allowlist"},
|
|
||||||
restriction_sql="SELECT NULL AS parent, NULL AS child WHERE 0",
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
# Build UNION of allowed resources
|
|
||||||
selects = []
|
|
||||||
params = {}
|
|
||||||
counter = 0
|
|
||||||
|
|
||||||
# Database-level entries (parent, NULL) - allows all children
|
|
||||||
for db_name in allowed_dbs:
|
|
||||||
key = f"restr_{counter}"
|
|
||||||
counter += 1
|
|
||||||
selects.append(f"SELECT :{key}_parent AS parent, NULL AS child")
|
|
||||||
params[f"{key}_parent"] = db_name
|
|
||||||
|
|
||||||
# Table-level entries (parent, child)
|
|
||||||
for db_name, table_name in allowed_tables:
|
|
||||||
key = f"restr_{counter}"
|
|
||||||
counter += 1
|
|
||||||
selects.append(f"SELECT :{key}_parent AS parent, :{key}_child AS child")
|
|
||||||
params[f"{key}_parent"] = db_name
|
|
||||||
params[f"{key}_child"] = table_name
|
|
||||||
|
|
||||||
restriction_sql = "\nUNION ALL\n".join(selects)
|
|
||||||
|
|
||||||
return [PermissionSQL(params=params, restriction_sql=restriction_sql)]
|
|
||||||
|
|
||||||
|
|
||||||
def restrictions_allow_action(
|
|
||||||
datasette: "Datasette",
|
|
||||||
restrictions: dict,
|
|
||||||
action: str,
|
|
||||||
resource: Optional[str | Tuple[str, str]],
|
|
||||||
) -> bool:
|
|
||||||
"""
|
|
||||||
Check if restrictions allow the requested action on the requested resource.
|
|
||||||
|
|
||||||
This is a synchronous utility function for use by other code that needs
|
|
||||||
to quickly check restriction allowlists.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
datasette: The Datasette instance
|
|
||||||
restrictions: The _r dict from an actor
|
|
||||||
action: The action name to check
|
|
||||||
resource: None for global, str for database, (db, table) tuple for table
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if allowed, False if denied
|
|
||||||
"""
|
|
||||||
# Does this action have an abbreviation?
|
|
||||||
to_check = get_action_name_variants(datasette, action)
|
|
||||||
|
|
||||||
# Check global level (any resource)
|
|
||||||
all_allowed = restrictions.get("a")
|
|
||||||
if all_allowed is not None:
|
|
||||||
assert isinstance(all_allowed, list)
|
|
||||||
if to_check.intersection(all_allowed):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Check database level
|
|
||||||
if resource:
|
|
||||||
if isinstance(resource, str):
|
|
||||||
database_name = resource
|
|
||||||
else:
|
|
||||||
database_name = resource[0]
|
|
||||||
database_allowed = restrictions.get("d", {}).get(database_name)
|
|
||||||
if database_allowed is not None:
|
|
||||||
assert isinstance(database_allowed, list)
|
|
||||||
if to_check.intersection(database_allowed):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Check table/resource level
|
|
||||||
if resource is not None and not isinstance(resource, str) and len(resource) == 2:
|
|
||||||
database, table = resource
|
|
||||||
table_allowed = restrictions.get("r", {}).get(database, {}).get(table)
|
|
||||||
if table_allowed is not None:
|
|
||||||
assert isinstance(table_allowed, list)
|
|
||||||
if to_check.intersection(table_allowed):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# This action is not explicitly allowed, so reject it
|
|
||||||
return False
|
|
||||||
|
|
@ -1,29 +0,0 @@
|
||||||
"""
|
|
||||||
Root user permission handling for Datasette.
|
|
||||||
|
|
||||||
Grants full permissions to the root user when --root flag is used.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Optional
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
from datasette import hookimpl
|
|
||||||
from datasette.permissions import PermissionSQL
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(specname="permission_resources_sql")
|
|
||||||
async def root_user_permissions_sql(
|
|
||||||
datasette: "Datasette",
|
|
||||||
actor: Optional[dict],
|
|
||||||
) -> Optional[PermissionSQL]:
|
|
||||||
"""
|
|
||||||
Grant root user full permissions when --root flag is used.
|
|
||||||
"""
|
|
||||||
if not datasette.root_enabled:
|
|
||||||
return None
|
|
||||||
if actor is not None and actor.get("id") == "root":
|
|
||||||
return PermissionSQL.allow(reason="root user")
|
|
||||||
|
|
@ -1,95 +0,0 @@
|
||||||
"""
|
|
||||||
Token authentication for Datasette.
|
|
||||||
|
|
||||||
Handles signed API tokens (dstok_ prefix).
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import time
|
|
||||||
from typing import TYPE_CHECKING, Optional
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
import itsdangerous
|
|
||||||
|
|
||||||
from datasette import hookimpl
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(specname="actor_from_request")
|
|
||||||
def actor_from_signed_api_token(datasette: "Datasette", request) -> Optional[dict]:
|
|
||||||
"""
|
|
||||||
Authenticate requests using signed API tokens (dstok_ prefix).
|
|
||||||
|
|
||||||
Token structure (signed JSON):
|
|
||||||
{
|
|
||||||
"a": "actor_id", # Actor ID
|
|
||||||
"t": 1234567890, # Timestamp (Unix epoch)
|
|
||||||
"d": 3600, # Optional: Duration in seconds
|
|
||||||
"_r": {...} # Optional: Restrictions
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
prefix = "dstok_"
|
|
||||||
|
|
||||||
# Check if tokens are enabled
|
|
||||||
if not datasette.setting("allow_signed_tokens"):
|
|
||||||
return None
|
|
||||||
|
|
||||||
max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl")
|
|
||||||
|
|
||||||
# Get authorization header
|
|
||||||
authorization = request.headers.get("authorization")
|
|
||||||
if not authorization:
|
|
||||||
return None
|
|
||||||
if not authorization.startswith("Bearer "):
|
|
||||||
return None
|
|
||||||
|
|
||||||
token = authorization[len("Bearer ") :]
|
|
||||||
if not token.startswith(prefix):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Remove prefix and verify signature
|
|
||||||
token = token[len(prefix) :]
|
|
||||||
try:
|
|
||||||
decoded = datasette.unsign(token, namespace="token")
|
|
||||||
except itsdangerous.BadSignature:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Validate timestamp
|
|
||||||
if "t" not in decoded:
|
|
||||||
return None
|
|
||||||
created = decoded["t"]
|
|
||||||
if not isinstance(created, int):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Handle duration/expiry
|
|
||||||
duration = decoded.get("d")
|
|
||||||
if duration is not None and not isinstance(duration, int):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Apply max TTL if configured
|
|
||||||
if (duration is None and max_signed_tokens_ttl) or (
|
|
||||||
duration is not None
|
|
||||||
and max_signed_tokens_ttl
|
|
||||||
and duration > max_signed_tokens_ttl
|
|
||||||
):
|
|
||||||
duration = max_signed_tokens_ttl
|
|
||||||
|
|
||||||
# Check expiry
|
|
||||||
if duration:
|
|
||||||
if time.time() - created > duration:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Build actor dict
|
|
||||||
actor = {"id": decoded["a"], "token": "dstok"}
|
|
||||||
|
|
||||||
# Copy restrictions if present
|
|
||||||
if "_r" in decoded:
|
|
||||||
actor["_r"] = decoded["_r"]
|
|
||||||
|
|
||||||
# Add expiry timestamp if applicable
|
|
||||||
if duration:
|
|
||||||
actor["token_expires"] = created + duration
|
|
||||||
|
|
||||||
return actor
|
|
||||||
|
|
@ -1,33 +1,6 @@
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, NamedTuple
|
from typing import Any, Dict, NamedTuple
|
||||||
import contextvars
|
|
||||||
|
|
||||||
|
|
||||||
# Context variable to track when permission checks should be skipped
|
|
||||||
_skip_permission_checks = contextvars.ContextVar(
|
|
||||||
"skip_permission_checks", default=False
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SkipPermissions:
|
|
||||||
"""Context manager to temporarily skip permission checks.
|
|
||||||
|
|
||||||
This is not a stable API and may change in future releases.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
with SkipPermissions():
|
|
||||||
# Permission checks are skipped within this block
|
|
||||||
response = await datasette.client.get("/protected")
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.token = _skip_permission_checks.set(True)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
_skip_permission_checks.reset(self.token)
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class Resource(ABC):
|
class Resource(ABC):
|
||||||
|
|
@ -41,11 +14,7 @@ class Resource(ABC):
|
||||||
|
|
||||||
# Class-level metadata (subclasses must define these)
|
# Class-level metadata (subclasses must define these)
|
||||||
name: str = None # e.g., "table", "database", "model"
|
name: str = None # e.g., "table", "database", "model"
|
||||||
parent_class: type["Resource"] | None = None # e.g., DatabaseResource for tables
|
parent_name: str | None = None # e.g., "database" for tables
|
||||||
|
|
||||||
# Instance-level optional extra attributes
|
|
||||||
reasons: list[str] | None = None
|
|
||||||
include_reasons: bool | None = None
|
|
||||||
|
|
||||||
def __init__(self, parent: str | None = None, child: str | None = None):
|
def __init__(self, parent: str | None = None, child: str | None = None):
|
||||||
"""
|
"""
|
||||||
|
|
@ -81,29 +50,6 @@ class Resource(ABC):
|
||||||
def private(self, value: bool):
|
def private(self, value: bool):
|
||||||
self._private = value
|
self._private = value
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def __init_subclass__(cls):
|
|
||||||
"""
|
|
||||||
Validate resource hierarchy doesn't exceed 2 levels.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If this resource would create a 3-level hierarchy
|
|
||||||
"""
|
|
||||||
super().__init_subclass__()
|
|
||||||
|
|
||||||
if cls.parent_class is None:
|
|
||||||
return # Top of hierarchy, nothing to validate
|
|
||||||
|
|
||||||
# Check if our parent has a parent - that would create 3 levels
|
|
||||||
if cls.parent_class.parent_class is not None:
|
|
||||||
# We have a parent, and that parent has a parent
|
|
||||||
# This creates a 3-level hierarchy, which is not allowed
|
|
||||||
raise ValueError(
|
|
||||||
f"Resource {cls.__name__} creates a 3-level hierarchy: "
|
|
||||||
f"{cls.parent_class.parent_class.__name__} -> {cls.parent_class.__name__} -> {cls.__name__}. "
|
|
||||||
f"Maximum 2 levels allowed (parent -> child)."
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def resources_sql(cls) -> str:
|
def resources_sql(cls) -> str:
|
||||||
|
|
@ -122,40 +68,16 @@ class AllowedResource(NamedTuple):
|
||||||
reason: str
|
reason: str
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True)
|
@dataclass(frozen=True)
|
||||||
class Action:
|
class Action:
|
||||||
name: str
|
name: str
|
||||||
|
abbr: str | None
|
||||||
description: str | None
|
description: str | None
|
||||||
abbr: str | None = None
|
takes_parent: bool
|
||||||
resource_class: type[Resource] | None = None
|
takes_child: bool
|
||||||
|
resource_class: type[Resource]
|
||||||
also_requires: str | None = None # Optional action name that must also be allowed
|
also_requires: str | None = None # Optional action name that must also be allowed
|
||||||
|
|
||||||
@property
|
|
||||||
def takes_parent(self) -> bool:
|
|
||||||
"""
|
|
||||||
Whether this action requires a parent identifier when instantiating its resource.
|
|
||||||
|
|
||||||
Returns False for global-only actions (no resource_class).
|
|
||||||
Returns True for all actions with a resource_class (all resources require a parent identifier).
|
|
||||||
"""
|
|
||||||
return self.resource_class is not None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def takes_child(self) -> bool:
|
|
||||||
"""
|
|
||||||
Whether this action requires a child identifier when instantiating its resource.
|
|
||||||
|
|
||||||
Returns False for global actions (no resource_class).
|
|
||||||
Returns False for parent-level resources (DatabaseResource - parent_class is None).
|
|
||||||
Returns True for child-level resources (TableResource, QueryResource - have a parent_class).
|
|
||||||
"""
|
|
||||||
if self.resource_class is None:
|
|
||||||
return False
|
|
||||||
return self.resource_class.parent_class is not None
|
|
||||||
|
|
||||||
|
|
||||||
_reason_id = 1
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class PermissionSQL:
|
class PermissionSQL:
|
||||||
|
|
@ -165,34 +87,11 @@ class PermissionSQL:
|
||||||
child TEXT NULL,
|
child TEXT NULL,
|
||||||
allow INTEGER, -- 1 allow, 0 deny
|
allow INTEGER, -- 1 allow, 0 deny
|
||||||
reason TEXT
|
reason TEXT
|
||||||
|
|
||||||
For restriction-only plugins, sql can be None and only restriction_sql is provided.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
sql: str | None = (
|
source: str # identifier used for auditing (e.g., plugin name)
|
||||||
None # SQL that SELECTs the 4 columns above (can be None for restriction-only)
|
sql: str # SQL that SELECTs the 4 columns above
|
||||||
)
|
params: Dict[str, Any] # bound params for the SQL (values only; no ':' prefix)
|
||||||
params: dict[str, Any] | None = (
|
|
||||||
None # bound params for the SQL (values only; no ':' prefix)
|
|
||||||
)
|
|
||||||
source: str | None = None # System will set this to the plugin name
|
|
||||||
restriction_sql: str | None = (
|
|
||||||
None # Optional SQL that returns (parent, child) for restriction filtering
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def allow(cls, reason: str, _allow: bool = True) -> "PermissionSQL":
|
|
||||||
global _reason_id
|
|
||||||
i = _reason_id
|
|
||||||
_reason_id += 1
|
|
||||||
return cls(
|
|
||||||
sql=f"SELECT NULL AS parent, NULL AS child, {1 if _allow else 0} AS allow, :reason_{i} AS reason",
|
|
||||||
params={f"reason_{i}": reason},
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def deny(cls, reason: str) -> "PermissionSQL":
|
|
||||||
return cls.allow(reason=reason, _allow=False)
|
|
||||||
|
|
||||||
|
|
||||||
# This is obsolete, replaced by Action and ResourceType
|
# This is obsolete, replaced by Action and ResourceType
|
||||||
|
|
|
||||||
|
|
@ -94,24 +94,21 @@ def get_plugins():
|
||||||
for plugin in pm.get_plugins():
|
for plugin in pm.get_plugins():
|
||||||
static_path = None
|
static_path = None
|
||||||
templates_path = None
|
templates_path = None
|
||||||
plugin_name = (
|
if plugin.__name__ not in DEFAULT_PLUGINS:
|
||||||
plugin.__name__
|
|
||||||
if hasattr(plugin, "__name__")
|
|
||||||
else plugin.__class__.__name__
|
|
||||||
)
|
|
||||||
if plugin_name not in DEFAULT_PLUGINS:
|
|
||||||
try:
|
try:
|
||||||
if (importlib_resources.files(plugin_name) / "static").is_dir():
|
if (importlib_resources.files(plugin.__name__) / "static").is_dir():
|
||||||
static_path = str(importlib_resources.files(plugin_name) / "static")
|
static_path = str(
|
||||||
if (importlib_resources.files(plugin_name) / "templates").is_dir():
|
importlib_resources.files(plugin.__name__) / "static"
|
||||||
|
)
|
||||||
|
if (importlib_resources.files(plugin.__name__) / "templates").is_dir():
|
||||||
templates_path = str(
|
templates_path = str(
|
||||||
importlib_resources.files(plugin_name) / "templates"
|
importlib_resources.files(plugin.__name__) / "templates"
|
||||||
)
|
)
|
||||||
except (TypeError, ModuleNotFoundError):
|
except (TypeError, ModuleNotFoundError):
|
||||||
# Caused by --plugins_dir= plugins
|
# Caused by --plugins_dir= plugins
|
||||||
pass
|
pass
|
||||||
plugin_info = {
|
plugin_info = {
|
||||||
"name": plugin_name,
|
"name": plugin.__name__,
|
||||||
"static_path": static_path,
|
"static_path": static_path,
|
||||||
"templates_path": templates_path,
|
"templates_path": templates_path,
|
||||||
"hooks": [h.name for h in pm.get_hookcallers(plugin)],
|
"hooks": [h.name for h in pm.get_hookcallers(plugin)],
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ import click
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from subprocess import CalledProcessError, check_call, check_output
|
from subprocess import check_call, check_output
|
||||||
|
|
||||||
from .common import (
|
from .common import (
|
||||||
add_common_publish_arguments_and_options,
|
add_common_publish_arguments_and_options,
|
||||||
|
|
@ -23,9 +23,7 @@ def publish_subcommand(publish):
|
||||||
help="Application name to use when building",
|
help="Application name to use when building",
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"--service",
|
"--service", default="", help="Cloud Run service to deploy (or over-write)"
|
||||||
default="",
|
|
||||||
help="Cloud Run service to deploy (or over-write)",
|
|
||||||
)
|
)
|
||||||
@click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension")
|
@click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension")
|
||||||
@click.option(
|
@click.option(
|
||||||
|
|
@ -57,32 +55,13 @@ def publish_subcommand(publish):
|
||||||
@click.option(
|
@click.option(
|
||||||
"--max-instances",
|
"--max-instances",
|
||||||
type=int,
|
type=int,
|
||||||
default=1,
|
help="Maximum Cloud Run instances",
|
||||||
show_default=True,
|
|
||||||
help="Maximum Cloud Run instances (use 0 to remove the limit)",
|
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"--min-instances",
|
"--min-instances",
|
||||||
type=int,
|
type=int,
|
||||||
help="Minimum Cloud Run instances",
|
help="Minimum Cloud Run instances",
|
||||||
)
|
)
|
||||||
@click.option(
|
|
||||||
"--artifact-repository",
|
|
||||||
default="datasette",
|
|
||||||
show_default=True,
|
|
||||||
help="Artifact Registry repository to store the image",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--artifact-region",
|
|
||||||
default="us",
|
|
||||||
show_default=True,
|
|
||||||
help="Artifact Registry location (region or multi-region)",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--artifact-project",
|
|
||||||
default=None,
|
|
||||||
help="Project ID for Artifact Registry (defaults to the active project)",
|
|
||||||
)
|
|
||||||
def cloudrun(
|
def cloudrun(
|
||||||
files,
|
files,
|
||||||
metadata,
|
metadata,
|
||||||
|
|
@ -112,9 +91,6 @@ def publish_subcommand(publish):
|
||||||
apt_get_extras,
|
apt_get_extras,
|
||||||
max_instances,
|
max_instances,
|
||||||
min_instances,
|
min_instances,
|
||||||
artifact_repository,
|
|
||||||
artifact_region,
|
|
||||||
artifact_project,
|
|
||||||
):
|
):
|
||||||
"Publish databases to Datasette running on Cloud Run"
|
"Publish databases to Datasette running on Cloud Run"
|
||||||
fail_if_publish_binary_not_installed(
|
fail_if_publish_binary_not_installed(
|
||||||
|
|
@ -124,21 +100,6 @@ def publish_subcommand(publish):
|
||||||
"gcloud config get-value project", shell=True, universal_newlines=True
|
"gcloud config get-value project", shell=True, universal_newlines=True
|
||||||
).strip()
|
).strip()
|
||||||
|
|
||||||
artifact_project = artifact_project or project
|
|
||||||
|
|
||||||
# Ensure Artifact Registry exists for the target image
|
|
||||||
_ensure_artifact_registry(
|
|
||||||
artifact_project=artifact_project,
|
|
||||||
artifact_region=artifact_region,
|
|
||||||
artifact_repository=artifact_repository,
|
|
||||||
)
|
|
||||||
|
|
||||||
artifact_host = (
|
|
||||||
artifact_region
|
|
||||||
if artifact_region.endswith("-docker.pkg.dev")
|
|
||||||
else f"{artifact_region}-docker.pkg.dev"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not service:
|
if not service:
|
||||||
# Show the user their current services, then prompt for one
|
# Show the user their current services, then prompt for one
|
||||||
click.echo("Please provide a service name for this deployment\n")
|
click.echo("Please provide a service name for this deployment\n")
|
||||||
|
|
@ -156,11 +117,6 @@ def publish_subcommand(publish):
|
||||||
click.echo("")
|
click.echo("")
|
||||||
service = click.prompt("Service name", type=str)
|
service = click.prompt("Service name", type=str)
|
||||||
|
|
||||||
image_id = (
|
|
||||||
f"{artifact_host}/{artifact_project}/"
|
|
||||||
f"{artifact_repository}/datasette-{service}"
|
|
||||||
)
|
|
||||||
|
|
||||||
extra_metadata = {
|
extra_metadata = {
|
||||||
"title": title,
|
"title": title,
|
||||||
"license": license,
|
"license": license,
|
||||||
|
|
@ -217,6 +173,7 @@ def publish_subcommand(publish):
|
||||||
print(fp.read())
|
print(fp.read())
|
||||||
print("\n====================\n")
|
print("\n====================\n")
|
||||||
|
|
||||||
|
image_id = f"gcr.io/{project}/datasette-{service}"
|
||||||
check_call(
|
check_call(
|
||||||
"gcloud builds submit --tag {}{}".format(
|
"gcloud builds submit --tag {}{}".format(
|
||||||
image_id, " --timeout {}".format(timeout) if timeout else ""
|
image_id, " --timeout {}".format(timeout) if timeout else ""
|
||||||
|
|
@ -230,7 +187,7 @@ def publish_subcommand(publish):
|
||||||
("--max-instances", max_instances),
|
("--max-instances", max_instances),
|
||||||
("--min-instances", min_instances),
|
("--min-instances", min_instances),
|
||||||
):
|
):
|
||||||
if value is not None:
|
if value:
|
||||||
extra_deploy_options.append("{} {}".format(option, value))
|
extra_deploy_options.append("{} {}".format(option, value))
|
||||||
check_call(
|
check_call(
|
||||||
"gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}".format(
|
"gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}".format(
|
||||||
|
|
@ -242,52 +199,6 @@ def publish_subcommand(publish):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _ensure_artifact_registry(artifact_project, artifact_region, artifact_repository):
|
|
||||||
"""Ensure Artifact Registry API is enabled and the repository exists."""
|
|
||||||
|
|
||||||
enable_cmd = (
|
|
||||||
"gcloud services enable artifactregistry.googleapis.com "
|
|
||||||
f"--project {artifact_project} --quiet"
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
check_call(enable_cmd, shell=True)
|
|
||||||
except CalledProcessError as exc:
|
|
||||||
raise click.ClickException(
|
|
||||||
"Failed to enable artifactregistry.googleapis.com. "
|
|
||||||
"Please ensure you have permissions to manage services."
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
describe_cmd = (
|
|
||||||
"gcloud artifacts repositories describe {repo} --project {project} "
|
|
||||||
"--location {location} --quiet"
|
|
||||||
).format(
|
|
||||||
repo=artifact_repository,
|
|
||||||
project=artifact_project,
|
|
||||||
location=artifact_region,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
check_call(describe_cmd, shell=True)
|
|
||||||
return
|
|
||||||
except CalledProcessError:
|
|
||||||
create_cmd = (
|
|
||||||
"gcloud artifacts repositories create {repo} --repository-format=docker "
|
|
||||||
'--location {location} --project {project} --description "Datasette Cloud Run images" --quiet'
|
|
||||||
).format(
|
|
||||||
repo=artifact_repository,
|
|
||||||
location=artifact_region,
|
|
||||||
project=artifact_project,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
check_call(create_cmd, shell=True)
|
|
||||||
click.echo(f"Created Artifact Registry repository '{artifact_repository}'")
|
|
||||||
except CalledProcessError as exc:
|
|
||||||
raise click.ClickException(
|
|
||||||
"Failed to create Artifact Registry repository. "
|
|
||||||
"Use --artifact-repository/--artifact-region to point to an existing repo "
|
|
||||||
"or create one manually."
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
|
|
||||||
def get_existing_services():
|
def get_existing_services():
|
||||||
services = json.loads(
|
services = json.loads(
|
||||||
check_output(
|
check_output(
|
||||||
|
|
@ -303,7 +214,6 @@ def get_existing_services():
|
||||||
"url": service["status"]["address"]["url"],
|
"url": service["status"]["address"]["url"],
|
||||||
}
|
}
|
||||||
for service in services
|
for service in services
|
||||||
if "url" in service["status"]
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -3,11 +3,25 @@
|
||||||
from datasette.permissions import Resource
|
from datasette.permissions import Resource
|
||||||
|
|
||||||
|
|
||||||
|
class InstanceResource(Resource):
|
||||||
|
"""The Datasette instance itself."""
|
||||||
|
|
||||||
|
name = "instance"
|
||||||
|
parent_name = None
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__(parent=None, child=None)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def resources_sql(cls, datasette) -> str:
|
||||||
|
return "SELECT NULL AS parent, NULL AS child"
|
||||||
|
|
||||||
|
|
||||||
class DatabaseResource(Resource):
|
class DatabaseResource(Resource):
|
||||||
"""A database in Datasette."""
|
"""A database in Datasette."""
|
||||||
|
|
||||||
name = "database"
|
name = "database"
|
||||||
parent_class = None # Top of the resource hierarchy
|
parent_name = "instance"
|
||||||
|
|
||||||
def __init__(self, database: str):
|
def __init__(self, database: str):
|
||||||
super().__init__(parent=database, child=None)
|
super().__init__(parent=database, child=None)
|
||||||
|
|
@ -24,7 +38,7 @@ class TableResource(Resource):
|
||||||
"""A table in a database."""
|
"""A table in a database."""
|
||||||
|
|
||||||
name = "table"
|
name = "table"
|
||||||
parent_class = DatabaseResource
|
parent_name = "database"
|
||||||
|
|
||||||
def __init__(self, database: str, table: str):
|
def __init__(self, database: str, table: str):
|
||||||
super().__init__(parent=database, child=table)
|
super().__init__(parent=database, child=table)
|
||||||
|
|
@ -44,7 +58,7 @@ class QueryResource(Resource):
|
||||||
"""A canned query in a database."""
|
"""A canned query in a database."""
|
||||||
|
|
||||||
name = "query"
|
name = "query"
|
||||||
parent_class = DatabaseResource
|
parent_name = "database"
|
||||||
|
|
||||||
def __init__(self, database: str, query: str):
|
def __init__(self, database: str, query: str):
|
||||||
super().__init__(parent=database, child=query)
|
super().__init__(parent=database, child=query)
|
||||||
|
|
|
||||||
|
|
@ -49,6 +49,5 @@
|
||||||
<a href="{{ urls.path('-/allowed') }}{{ query_string }}" {% if current_tab == "allowed" %}class="active"{% endif %}>Allowed</a>
|
<a href="{{ urls.path('-/allowed') }}{{ query_string }}" {% if current_tab == "allowed" %}class="active"{% endif %}>Allowed</a>
|
||||||
<a href="{{ urls.path('-/rules') }}{{ query_string }}" {% if current_tab == "rules" %}class="active"{% endif %}>Rules</a>
|
<a href="{{ urls.path('-/rules') }}{{ query_string }}" {% if current_tab == "rules" %}class="active"{% endif %}>Rules</a>
|
||||||
<a href="{{ urls.path('-/actions') }}" {% if current_tab == "actions" %}class="active"{% endif %}>Actions</a>
|
<a href="{{ urls.path('-/actions') }}" {% if current_tab == "actions" %}class="active"{% endif %}>Actions</a>
|
||||||
<a href="{{ urls.path('-/allow-debug') }}" {% if current_tab == "allow_debug" %}class="active"{% endif %}>Allow debug</a>
|
|
||||||
</nav>
|
</nav>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
|
||||||
|
|
@ -33,9 +33,6 @@ p.message-warning {
|
||||||
|
|
||||||
<h1>Debug allow rules</h1>
|
<h1>Debug allow rules</h1>
|
||||||
|
|
||||||
{% set current_tab = "allow_debug" %}
|
|
||||||
{% include "_permissions_debug_tabs.html" %}
|
|
||||||
|
|
||||||
<p>Use this tool to try out different actor and allow combinations. See <a href="https://docs.datasette.io/en/stable/authentication.html#defining-permissions-with-allow-blocks">Defining permissions with "allow" blocks</a> for documentation.</p>
|
<p>Use this tool to try out different actor and allow combinations. See <a href="https://docs.datasette.io/en/stable/authentication.html#defining-permissions-with-allow-blocks">Defining permissions with "allow" blocks</a> for documentation.</p>
|
||||||
|
|
||||||
<form class="core" action="{{ urls.path('-/allow-debug') }}" method="get" style="margin-bottom: 1em">
|
<form class="core" action="{{ urls.path('-/allow-debug') }}" method="get" style="margin-bottom: 1em">
|
||||||
|
|
|
||||||
|
|
@ -56,7 +56,7 @@
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if tables %}
|
{% if tables %}
|
||||||
<h2 id="tables">Tables <a style="font-weight: normal; font-size: 0.75em; padding-left: 0.5em;" href="{{ urls.database(database) }}/-/schema">schema</a></h2>
|
<h2 id="tables">Tables</h2>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% for table in tables %}
|
{% for table in tables %}
|
||||||
|
|
|
||||||
|
|
@ -31,7 +31,7 @@
|
||||||
<td><strong>{{ action.name }}</strong></td>
|
<td><strong>{{ action.name }}</strong></td>
|
||||||
<td>{% if action.abbr %}<code>{{ action.abbr }}</code>{% endif %}</td>
|
<td>{% if action.abbr %}<code>{{ action.abbr }}</code>{% endif %}</td>
|
||||||
<td>{{ action.description or "" }}</td>
|
<td>{{ action.description or "" }}</td>
|
||||||
<td>{% if action.resource_class %}<code>{{ action.resource_class }}</code>{% endif %}</td>
|
<td><code>{{ action.resource_class }}</code></td>
|
||||||
<td>{% if action.takes_parent %}✓{% endif %}</td>
|
<td>{% if action.takes_parent %}✓{% endif %}</td>
|
||||||
<td>{% if action.takes_child %}✓{% endif %}</td>
|
<td>{% if action.takes_child %}✓{% endif %}</td>
|
||||||
<td>{% if action.also_requires %}<code>{{ action.also_requires }}</code>{% endif %}</td>
|
<td>{% if action.also_requires %}<code>{{ action.also_requires }}</code>{% endif %}</td>
|
||||||
|
|
|
||||||
|
|
@ -137,7 +137,6 @@ function displayResults(data) {
|
||||||
html += '<th>Resource Path</th>';
|
html += '<th>Resource Path</th>';
|
||||||
html += '<th>Parent</th>';
|
html += '<th>Parent</th>';
|
||||||
html += '<th>Child</th>';
|
html += '<th>Child</th>';
|
||||||
html += '<th>Source Plugin</th>';
|
|
||||||
html += '<th>Reason</th>';
|
html += '<th>Reason</th>';
|
||||||
html += '</tr></thead>';
|
html += '</tr></thead>';
|
||||||
html += '<tbody>';
|
html += '<tbody>';
|
||||||
|
|
@ -153,7 +152,6 @@ function displayResults(data) {
|
||||||
html += `<td><span class="resource-path">${escapeHtml(item.resource || '/')}</span></td>`;
|
html += `<td><span class="resource-path">${escapeHtml(item.resource || '/')}</span></td>`;
|
||||||
html += `<td>${escapeHtml(item.parent || '—')}</td>`;
|
html += `<td>${escapeHtml(item.parent || '—')}</td>`;
|
||||||
html += `<td>${escapeHtml(item.child || '—')}</td>`;
|
html += `<td>${escapeHtml(item.child || '—')}</td>`;
|
||||||
html += `<td>${escapeHtml(item.source_plugin || '—')}</td>`;
|
|
||||||
html += `<td>${escapeHtml(item.reason || '—')}</td>`;
|
html += `<td>${escapeHtml(item.reason || '—')}</td>`;
|
||||||
html += '</tr>';
|
html += '</tr>';
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,41 +0,0 @@
|
||||||
{% extends "base.html" %}
|
|
||||||
|
|
||||||
{% block title %}{% if is_instance %}Schema for all databases{% elif table_name %}Schema for {{ schemas[0].database }}.{{ table_name }}{% else %}Schema for {{ schemas[0].database }}{% endif %}{% endblock %}
|
|
||||||
|
|
||||||
{% block body_class %}schema{% endblock %}
|
|
||||||
|
|
||||||
{% block crumbs %}
|
|
||||||
{% if is_instance %}
|
|
||||||
{{ crumbs.nav(request=request) }}
|
|
||||||
{% elif table_name %}
|
|
||||||
{{ crumbs.nav(request=request, database=schemas[0].database, table=table_name) }}
|
|
||||||
{% else %}
|
|
||||||
{{ crumbs.nav(request=request, database=schemas[0].database) }}
|
|
||||||
{% endif %}
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
<div class="page-header">
|
|
||||||
<h1>{% if is_instance %}Schema for all databases{% elif table_name %}Schema for {{ table_name }}{% else %}Schema for {{ schemas[0].database }}{% endif %}</h1>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{% for item in schemas %}
|
|
||||||
{% if is_instance %}
|
|
||||||
<h2>{{ item.database }}</h2>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if item.schema %}
|
|
||||||
<pre style="background-color: #f5f5f5; padding: 1em; overflow-x: auto; border: 1px solid #ddd; border-radius: 4px;"><code>{{ item.schema }}</code></pre>
|
|
||||||
{% else %}
|
|
||||||
<p><em>No schema available for this database.</em></p>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if not loop.last %}
|
|
||||||
<hr style="margin: 2em 0;">
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
{% if not schemas %}
|
|
||||||
<p><em>No databases with viewable schemas found.</em></p>
|
|
||||||
{% endif %}
|
|
||||||
{% endblock %}
|
|
||||||
|
|
@ -4,7 +4,6 @@ import aiofiles
|
||||||
import click
|
import click
|
||||||
from collections import OrderedDict, namedtuple, Counter
|
from collections import OrderedDict, namedtuple, Counter
|
||||||
import copy
|
import copy
|
||||||
import dataclasses
|
|
||||||
import base64
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
import inspect
|
import inspect
|
||||||
|
|
@ -28,58 +27,6 @@ from .sqlite import sqlite3, supports_table_xinfo
|
||||||
|
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
from datasette.database import Database
|
from datasette.database import Database
|
||||||
from datasette.permissions import Resource
|
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass
|
|
||||||
class PaginatedResources:
|
|
||||||
"""Paginated results from allowed_resources query."""
|
|
||||||
|
|
||||||
resources: List["Resource"]
|
|
||||||
next: str | None # Keyset token for next page (None if no more results)
|
|
||||||
_datasette: typing.Any = dataclasses.field(default=None, repr=False)
|
|
||||||
_action: str = dataclasses.field(default=None, repr=False)
|
|
||||||
_actor: typing.Any = dataclasses.field(default=None, repr=False)
|
|
||||||
_parent: str | None = dataclasses.field(default=None, repr=False)
|
|
||||||
_include_is_private: bool = dataclasses.field(default=False, repr=False)
|
|
||||||
_include_reasons: bool = dataclasses.field(default=False, repr=False)
|
|
||||||
_limit: int = dataclasses.field(default=100, repr=False)
|
|
||||||
|
|
||||||
async def all(self):
|
|
||||||
"""
|
|
||||||
Async generator that yields all resources across all pages.
|
|
||||||
|
|
||||||
Automatically handles pagination under the hood. This is useful when you need
|
|
||||||
to iterate through all results without manually managing pagination tokens.
|
|
||||||
|
|
||||||
Yields:
|
|
||||||
Resource objects one at a time
|
|
||||||
|
|
||||||
Example:
|
|
||||||
page = await datasette.allowed_resources("view-table", actor)
|
|
||||||
async for table in page.all():
|
|
||||||
print(f"{table.parent}/{table.child}")
|
|
||||||
"""
|
|
||||||
# Yield all resources from current page
|
|
||||||
for resource in self.resources:
|
|
||||||
yield resource
|
|
||||||
|
|
||||||
# Continue fetching subsequent pages if there are more
|
|
||||||
next_token = self.next
|
|
||||||
while next_token:
|
|
||||||
page = await self._datasette.allowed_resources(
|
|
||||||
self._action,
|
|
||||||
self._actor,
|
|
||||||
parent=self._parent,
|
|
||||||
include_is_private=self._include_is_private,
|
|
||||||
include_reasons=self._include_reasons,
|
|
||||||
limit=self._limit,
|
|
||||||
next=next_token,
|
|
||||||
)
|
|
||||||
for resource in page.resources:
|
|
||||||
yield resource
|
|
||||||
next_token = page.next
|
|
||||||
|
|
||||||
|
|
||||||
# From https://www.sqlite.org/lang_keywords.html
|
# From https://www.sqlite.org/lang_keywords.html
|
||||||
reserved_words = set(
|
reserved_words = set(
|
||||||
|
|
|
||||||
|
|
@ -23,12 +23,42 @@ The core pattern is:
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from datasette.utils.permissions import gather_permission_sql_from_hooks
|
from datasette.plugins import pm
|
||||||
|
from datasette.utils import await_me_maybe
|
||||||
|
from datasette.permissions import PermissionSQL
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from datasette.app import Datasette
|
from datasette.app import Datasette
|
||||||
|
|
||||||
|
|
||||||
|
def _process_permission_results(results) -> tuple[list[str], dict]:
|
||||||
|
"""
|
||||||
|
Process plugin permission results into SQL fragments and parameters.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
results: Results from permission_resources_sql hook (may be list or single PermissionSQL)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A tuple of (list of SQL strings, dict of parameters)
|
||||||
|
"""
|
||||||
|
rule_sqls = []
|
||||||
|
all_params = {}
|
||||||
|
|
||||||
|
if results is None:
|
||||||
|
return rule_sqls, all_params
|
||||||
|
|
||||||
|
if isinstance(results, list):
|
||||||
|
for plugin_sql in results:
|
||||||
|
if isinstance(plugin_sql, PermissionSQL):
|
||||||
|
rule_sqls.append(plugin_sql.sql)
|
||||||
|
all_params.update(plugin_sql.params)
|
||||||
|
elif isinstance(results, PermissionSQL):
|
||||||
|
rule_sqls.append(results.sql)
|
||||||
|
all_params.update(results.params)
|
||||||
|
|
||||||
|
return rule_sqls, all_params
|
||||||
|
|
||||||
|
|
||||||
async def build_allowed_resources_sql(
|
async def build_allowed_resources_sql(
|
||||||
datasette: "Datasette",
|
datasette: "Datasette",
|
||||||
actor: dict | None,
|
actor: dict | None,
|
||||||
|
|
@ -149,44 +179,22 @@ async def _build_single_action_sql(
|
||||||
# Get base resources SQL from the resource class
|
# Get base resources SQL from the resource class
|
||||||
base_resources_sql = await action_obj.resource_class.resources_sql(datasette)
|
base_resources_sql = await action_obj.resource_class.resources_sql(datasette)
|
||||||
|
|
||||||
permission_sqls = await gather_permission_sql_from_hooks(
|
# Get all permission rule fragments from plugins via the hook
|
||||||
|
rule_results = pm.hook.permission_resources_sql(
|
||||||
datasette=datasette,
|
datasette=datasette,
|
||||||
actor=actor,
|
actor=actor,
|
||||||
action=action,
|
action=action,
|
||||||
)
|
)
|
||||||
|
|
||||||
# If permission_sqls is the sentinel, skip all permission checks
|
# Combine rule fragments and collect parameters
|
||||||
# Return SQL that allows all resources
|
|
||||||
from datasette.utils.permissions import SKIP_PERMISSION_CHECKS
|
|
||||||
|
|
||||||
if permission_sqls is SKIP_PERMISSION_CHECKS:
|
|
||||||
cols = "parent, child, 'skip_permission_checks' AS reason"
|
|
||||||
if include_is_private:
|
|
||||||
cols += ", 0 AS is_private"
|
|
||||||
return f"SELECT {cols} FROM ({base_resources_sql})", {}
|
|
||||||
|
|
||||||
all_params = {}
|
all_params = {}
|
||||||
rule_sqls = []
|
rule_sqls = []
|
||||||
restriction_sqls = []
|
|
||||||
|
|
||||||
for permission_sql in permission_sqls:
|
for result in rule_results:
|
||||||
# Always collect params (even from restriction-only plugins)
|
result = await await_me_maybe(result)
|
||||||
all_params.update(permission_sql.params or {})
|
sqls, params = _process_permission_results(result)
|
||||||
|
rule_sqls.extend(sqls)
|
||||||
# Collect restriction SQL filters
|
all_params.update(params)
|
||||||
if permission_sql.restriction_sql:
|
|
||||||
restriction_sqls.append(permission_sql.restriction_sql)
|
|
||||||
|
|
||||||
# Skip plugins that only provide restriction_sql (no permission rules)
|
|
||||||
if permission_sql.sql is None:
|
|
||||||
continue
|
|
||||||
rule_sqls.append(
|
|
||||||
f"""
|
|
||||||
SELECT parent, child, allow, reason, '{permission_sql.source}' AS source_plugin FROM (
|
|
||||||
{permission_sql.sql}
|
|
||||||
)
|
|
||||||
""".strip()
|
|
||||||
)
|
|
||||||
|
|
||||||
# If no rules, return empty result (deny all)
|
# If no rules, return empty result (deny all)
|
||||||
if not rule_sqls:
|
if not rule_sqls:
|
||||||
|
|
@ -211,24 +219,28 @@ async def _build_single_action_sql(
|
||||||
|
|
||||||
# If include_is_private, we need to build anonymous permissions too
|
# If include_is_private, we need to build anonymous permissions too
|
||||||
if include_is_private:
|
if include_is_private:
|
||||||
anon_permission_sqls = await gather_permission_sql_from_hooks(
|
# Get anonymous permission rules
|
||||||
|
anon_rule_results = pm.hook.permission_resources_sql(
|
||||||
datasette=datasette,
|
datasette=datasette,
|
||||||
actor=None,
|
actor=None,
|
||||||
action=action,
|
action=action,
|
||||||
)
|
)
|
||||||
anon_sqls_rewritten = []
|
anon_rule_sqls = []
|
||||||
anon_params = {}
|
anon_params = {}
|
||||||
|
for result in anon_rule_results:
|
||||||
|
result = await await_me_maybe(result)
|
||||||
|
sqls, params = _process_permission_results(result)
|
||||||
|
anon_rule_sqls.extend(sqls)
|
||||||
|
# Namespace anonymous params to avoid conflicts
|
||||||
|
for key, value in params.items():
|
||||||
|
anon_params[f"anon_{key}"] = value
|
||||||
|
|
||||||
for permission_sql in anon_permission_sqls:
|
# Rewrite anonymous SQL to use namespaced params
|
||||||
# Skip plugins that only provide restriction_sql (no permission rules)
|
anon_sqls_rewritten = []
|
||||||
if permission_sql.sql is None:
|
for sql in anon_rule_sqls:
|
||||||
continue
|
for key in params.keys():
|
||||||
rewritten_sql = permission_sql.sql
|
sql = sql.replace(f":{key}", f":anon_{key}")
|
||||||
for key, value in (permission_sql.params or {}).items():
|
anon_sqls_rewritten.append(sql)
|
||||||
anon_key = f"anon_{key}"
|
|
||||||
anon_params[anon_key] = value
|
|
||||||
rewritten_sql = rewritten_sql.replace(f":{key}", f":{anon_key}")
|
|
||||||
anon_sqls_rewritten.append(rewritten_sql)
|
|
||||||
|
|
||||||
all_params.update(anon_params)
|
all_params.update(anon_params)
|
||||||
|
|
||||||
|
|
@ -249,8 +261,8 @@ async def _build_single_action_sql(
|
||||||
" SELECT b.parent, b.child,",
|
" SELECT b.parent, b.child,",
|
||||||
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
||||||
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow,",
|
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow,",
|
||||||
" json_group_array(CASE WHEN ar.allow = 0 THEN ar.source_plugin || ': ' || ar.reason END) AS deny_reasons,",
|
" json_group_array(CASE WHEN ar.allow = 0 THEN ar.reason END) AS deny_reasons,",
|
||||||
" json_group_array(CASE WHEN ar.allow = 1 THEN ar.source_plugin || ': ' || ar.reason END) AS allow_reasons",
|
" json_group_array(CASE WHEN ar.allow = 1 THEN ar.reason END) AS allow_reasons",
|
||||||
" FROM base b",
|
" FROM base b",
|
||||||
" LEFT JOIN all_rules ar ON ar.parent = b.parent AND ar.child = b.child",
|
" LEFT JOIN all_rules ar ON ar.parent = b.parent AND ar.child = b.child",
|
||||||
" GROUP BY b.parent, b.child",
|
" GROUP BY b.parent, b.child",
|
||||||
|
|
@ -259,8 +271,8 @@ async def _build_single_action_sql(
|
||||||
" SELECT b.parent, b.child,",
|
" SELECT b.parent, b.child,",
|
||||||
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
||||||
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow,",
|
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow,",
|
||||||
" json_group_array(CASE WHEN ar.allow = 0 THEN ar.source_plugin || ': ' || ar.reason END) AS deny_reasons,",
|
" json_group_array(CASE WHEN ar.allow = 0 THEN ar.reason END) AS deny_reasons,",
|
||||||
" json_group_array(CASE WHEN ar.allow = 1 THEN ar.source_plugin || ': ' || ar.reason END) AS allow_reasons",
|
" json_group_array(CASE WHEN ar.allow = 1 THEN ar.reason END) AS allow_reasons",
|
||||||
" FROM base b",
|
" FROM base b",
|
||||||
" LEFT JOIN all_rules ar ON ar.parent = b.parent AND ar.child IS NULL",
|
" LEFT JOIN all_rules ar ON ar.parent = b.parent AND ar.child IS NULL",
|
||||||
" GROUP BY b.parent, b.child",
|
" GROUP BY b.parent, b.child",
|
||||||
|
|
@ -269,8 +281,8 @@ async def _build_single_action_sql(
|
||||||
" SELECT b.parent, b.child,",
|
" SELECT b.parent, b.child,",
|
||||||
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
||||||
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow,",
|
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow,",
|
||||||
" json_group_array(CASE WHEN ar.allow = 0 THEN ar.source_plugin || ': ' || ar.reason END) AS deny_reasons,",
|
" json_group_array(CASE WHEN ar.allow = 0 THEN ar.reason END) AS deny_reasons,",
|
||||||
" json_group_array(CASE WHEN ar.allow = 1 THEN ar.source_plugin || ': ' || ar.reason END) AS allow_reasons",
|
" json_group_array(CASE WHEN ar.allow = 1 THEN ar.reason END) AS allow_reasons",
|
||||||
" FROM base b",
|
" FROM base b",
|
||||||
" LEFT JOIN all_rules ar ON ar.parent IS NULL AND ar.child IS NULL",
|
" LEFT JOIN all_rules ar ON ar.parent IS NULL AND ar.child IS NULL",
|
||||||
" GROUP BY b.parent, b.child",
|
" GROUP BY b.parent, b.child",
|
||||||
|
|
@ -383,17 +395,6 @@ async def _build_single_action_sql(
|
||||||
|
|
||||||
query_parts.append(")")
|
query_parts.append(")")
|
||||||
|
|
||||||
# Add restriction list CTE if there are restrictions
|
|
||||||
if restriction_sqls:
|
|
||||||
# Wrap each restriction_sql in a subquery to avoid operator precedence issues
|
|
||||||
# with UNION ALL inside the restriction SQL statements
|
|
||||||
restriction_intersect = "\nINTERSECT\n".join(
|
|
||||||
f"SELECT * FROM ({sql})" for sql in restriction_sqls
|
|
||||||
)
|
|
||||||
query_parts.extend(
|
|
||||||
[",", "restriction_list AS (", f" {restriction_intersect}", ")"]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Final SELECT
|
# Final SELECT
|
||||||
select_cols = "parent, child, reason"
|
select_cols = "parent, child, reason"
|
||||||
if include_is_private:
|
if include_is_private:
|
||||||
|
|
@ -403,17 +404,6 @@ async def _build_single_action_sql(
|
||||||
query_parts.append("FROM decisions")
|
query_parts.append("FROM decisions")
|
||||||
query_parts.append("WHERE is_allowed = 1")
|
query_parts.append("WHERE is_allowed = 1")
|
||||||
|
|
||||||
# Add restriction filter if there are restrictions
|
|
||||||
if restriction_sqls:
|
|
||||||
query_parts.append(
|
|
||||||
"""
|
|
||||||
AND EXISTS (
|
|
||||||
SELECT 1 FROM restriction_list r
|
|
||||||
WHERE (r.parent = decisions.parent OR r.parent IS NULL)
|
|
||||||
AND (r.child = decisions.child OR r.child IS NULL)
|
|
||||||
)"""
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add parent filter if specified
|
# Add parent filter if specified
|
||||||
if parent is not None:
|
if parent is not None:
|
||||||
query_parts.append(" AND parent = :filter_parent")
|
query_parts.append(" AND parent = :filter_parent")
|
||||||
|
|
@ -440,55 +430,33 @@ async def build_permission_rules_sql(
|
||||||
if not action_obj:
|
if not action_obj:
|
||||||
raise ValueError(f"Unknown action: {action}")
|
raise ValueError(f"Unknown action: {action}")
|
||||||
|
|
||||||
permission_sqls = await gather_permission_sql_from_hooks(
|
# Get all permission rule fragments from plugins via the hook
|
||||||
|
rule_results = pm.hook.permission_resources_sql(
|
||||||
datasette=datasette,
|
datasette=datasette,
|
||||||
actor=actor,
|
actor=actor,
|
||||||
action=action,
|
action=action,
|
||||||
)
|
)
|
||||||
|
|
||||||
# If permission_sqls is the sentinel, skip all permission checks
|
# Combine rule fragments and collect parameters
|
||||||
# Return SQL that allows everything
|
all_params = {}
|
||||||
from datasette.utils.permissions import SKIP_PERMISSION_CHECKS
|
rule_sqls = []
|
||||||
|
|
||||||
if permission_sqls is SKIP_PERMISSION_CHECKS:
|
for result in rule_results:
|
||||||
return (
|
result = await await_me_maybe(result)
|
||||||
"SELECT NULL AS parent, NULL AS child, 1 AS allow, 'skip_permission_checks' AS reason, 'skip' AS source_plugin",
|
sqls, params = _process_permission_results(result)
|
||||||
{},
|
rule_sqls.extend(sqls)
|
||||||
[],
|
all_params.update(params)
|
||||||
)
|
|
||||||
|
|
||||||
if not permission_sqls:
|
# Build the UNION query
|
||||||
|
if not rule_sqls:
|
||||||
|
# Return empty result set
|
||||||
return (
|
return (
|
||||||
"SELECT NULL AS parent, NULL AS child, 0 AS allow, NULL AS reason, NULL AS source_plugin WHERE 0",
|
"SELECT NULL AS parent, NULL AS child, 0 AS allow, NULL AS reason, NULL AS source_plugin WHERE 0",
|
||||||
{},
|
{},
|
||||||
[],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
union_parts = []
|
rules_union = " UNION ALL ".join(rule_sqls)
|
||||||
all_params = {}
|
return rules_union, all_params
|
||||||
restriction_sqls = []
|
|
||||||
|
|
||||||
for permission_sql in permission_sqls:
|
|
||||||
all_params.update(permission_sql.params or {})
|
|
||||||
|
|
||||||
# Collect restriction SQL filters
|
|
||||||
if permission_sql.restriction_sql:
|
|
||||||
restriction_sqls.append(permission_sql.restriction_sql)
|
|
||||||
|
|
||||||
# Skip plugins that only provide restriction_sql (no permission rules)
|
|
||||||
if permission_sql.sql is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
union_parts.append(
|
|
||||||
f"""
|
|
||||||
SELECT parent, child, allow, reason, '{permission_sql.source}' AS source_plugin FROM (
|
|
||||||
{permission_sql.sql}
|
|
||||||
)
|
|
||||||
""".strip()
|
|
||||||
)
|
|
||||||
|
|
||||||
rules_union = " UNION ALL ".join(union_parts)
|
|
||||||
return rules_union, all_params, restriction_sqls
|
|
||||||
|
|
||||||
|
|
||||||
async def check_permission_for_resource(
|
async def check_permission_for_resource(
|
||||||
|
|
@ -515,9 +483,7 @@ async def check_permission_for_resource(
|
||||||
This builds the cascading permission query and checks if the specific
|
This builds the cascading permission query and checks if the specific
|
||||||
resource is in the allowed set.
|
resource is in the allowed set.
|
||||||
"""
|
"""
|
||||||
rules_union, all_params, restriction_sqls = await build_permission_rules_sql(
|
rules_union, all_params = await build_permission_rules_sql(datasette, actor, action)
|
||||||
datasette, actor, action
|
|
||||||
)
|
|
||||||
|
|
||||||
# If no rules (empty SQL), default deny
|
# If no rules (empty SQL), default deny
|
||||||
if not rules_union:
|
if not rules_union:
|
||||||
|
|
@ -527,57 +493,43 @@ async def check_permission_for_resource(
|
||||||
all_params["_check_parent"] = parent
|
all_params["_check_parent"] = parent
|
||||||
all_params["_check_child"] = child
|
all_params["_check_child"] = child
|
||||||
|
|
||||||
# If there are restriction filters, check if the resource passes them first
|
|
||||||
if restriction_sqls:
|
|
||||||
# Check if resource is in restriction allowlist
|
|
||||||
# Database-level restrictions (parent, NULL) should match all children (parent, *)
|
|
||||||
# Wrap each restriction_sql in a subquery to avoid operator precedence issues
|
|
||||||
restriction_check = "\nINTERSECT\n".join(
|
|
||||||
f"SELECT * FROM ({sql})" for sql in restriction_sqls
|
|
||||||
)
|
|
||||||
restriction_query = f"""
|
|
||||||
WITH restriction_list AS (
|
|
||||||
{restriction_check}
|
|
||||||
)
|
|
||||||
SELECT EXISTS (
|
|
||||||
SELECT 1 FROM restriction_list
|
|
||||||
WHERE (parent = :_check_parent OR parent IS NULL)
|
|
||||||
AND (child = :_check_child OR child IS NULL)
|
|
||||||
) AS in_allowlist
|
|
||||||
"""
|
|
||||||
result = await datasette.get_internal_database().execute(
|
|
||||||
restriction_query, all_params
|
|
||||||
)
|
|
||||||
if result.rows and not result.rows[0][0]:
|
|
||||||
# Resource not in restriction allowlist - deny
|
|
||||||
return False
|
|
||||||
|
|
||||||
query = f"""
|
query = f"""
|
||||||
WITH
|
WITH
|
||||||
all_rules AS (
|
all_rules AS (
|
||||||
{rules_union}
|
{rules_union}
|
||||||
),
|
),
|
||||||
matched_rules AS (
|
child_lvl AS (
|
||||||
SELECT ar.*,
|
SELECT
|
||||||
CASE
|
MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,
|
||||||
WHEN ar.child IS NOT NULL THEN 2 -- child-level (most specific)
|
MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow
|
||||||
WHEN ar.parent IS NOT NULL THEN 1 -- parent-level
|
|
||||||
ELSE 0 -- root/global
|
|
||||||
END AS depth
|
|
||||||
FROM all_rules ar
|
FROM all_rules ar
|
||||||
WHERE (ar.parent IS NULL OR ar.parent = :_check_parent)
|
WHERE ar.parent = :_check_parent AND ar.child = :_check_child
|
||||||
AND (ar.child IS NULL OR ar.child = :_check_child)
|
|
||||||
),
|
),
|
||||||
winner AS (
|
parent_lvl AS (
|
||||||
SELECT *
|
SELECT
|
||||||
FROM matched_rules
|
MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,
|
||||||
ORDER BY
|
MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow
|
||||||
depth DESC, -- specificity first (higher depth wins)
|
FROM all_rules ar
|
||||||
CASE WHEN allow=0 THEN 0 ELSE 1 END, -- then deny over allow
|
WHERE ar.parent = :_check_parent AND ar.child IS NULL
|
||||||
source_plugin -- stable tie-break
|
),
|
||||||
LIMIT 1
|
global_lvl AS (
|
||||||
|
SELECT
|
||||||
|
MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,
|
||||||
|
MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow
|
||||||
|
FROM all_rules ar
|
||||||
|
WHERE ar.parent IS NULL AND ar.child IS NULL
|
||||||
)
|
)
|
||||||
SELECT COALESCE((SELECT allow FROM winner), 0) AS is_allowed
|
SELECT
|
||||||
|
CASE
|
||||||
|
WHEN cl.any_deny = 1 THEN 0
|
||||||
|
WHEN cl.any_allow = 1 THEN 1
|
||||||
|
WHEN pl.any_deny = 1 THEN 0
|
||||||
|
WHEN pl.any_allow = 1 THEN 1
|
||||||
|
WHEN gl.any_deny = 1 THEN 0
|
||||||
|
WHEN gl.any_allow = 1 THEN 1
|
||||||
|
ELSE 0
|
||||||
|
END AS is_allowed
|
||||||
|
FROM child_lvl cl, parent_lvl pl, global_lvl gl
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Execute the query against the internal database
|
# Execute the query against the internal database
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@ from pathlib import Path
|
||||||
from http.cookies import SimpleCookie, Morsel
|
from http.cookies import SimpleCookie, Morsel
|
||||||
import aiofiles
|
import aiofiles
|
||||||
import aiofiles.os
|
import aiofiles.os
|
||||||
import re
|
|
||||||
|
|
||||||
# Workaround for adding samesite support to pre 3.8 python
|
# Workaround for adding samesite support to pre 3.8 python
|
||||||
Morsel._reserved["samesite"] = "SameSite"
|
Morsel._reserved["samesite"] = "SameSite"
|
||||||
|
|
@ -249,9 +248,6 @@ async def asgi_send_html(send, html, status=200, headers=None):
|
||||||
|
|
||||||
|
|
||||||
async def asgi_send_redirect(send, location, status=302):
|
async def asgi_send_redirect(send, location, status=302):
|
||||||
# Prevent open redirect vulnerability: strip multiple leading slashes
|
|
||||||
# //example.com would be interpreted as a protocol-relative URL (e.g., https://example.com/)
|
|
||||||
location = re.sub(r"^/+", "/", location)
|
|
||||||
await asgi_send(
|
await asgi_send(
|
||||||
send,
|
send,
|
||||||
"",
|
"",
|
||||||
|
|
|
||||||
|
|
@ -6,82 +6,6 @@ from typing import Any, Dict, Iterable, List, Sequence, Tuple
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
from datasette.permissions import PermissionSQL
|
from datasette.permissions import PermissionSQL
|
||||||
from datasette.plugins import pm
|
|
||||||
from datasette.utils import await_me_maybe
|
|
||||||
|
|
||||||
|
|
||||||
# Sentinel object to indicate permission checks should be skipped
|
|
||||||
SKIP_PERMISSION_CHECKS = object()
|
|
||||||
|
|
||||||
|
|
||||||
async def gather_permission_sql_from_hooks(
|
|
||||||
*, datasette, actor: dict | None, action: str
|
|
||||||
) -> List[PermissionSQL] | object:
|
|
||||||
"""Collect PermissionSQL objects from the permission_resources_sql hook.
|
|
||||||
|
|
||||||
Ensures that each returned PermissionSQL has a populated ``source``.
|
|
||||||
|
|
||||||
Returns SKIP_PERMISSION_CHECKS sentinel if skip_permission_checks context variable
|
|
||||||
is set, signaling that all permission checks should be bypassed.
|
|
||||||
"""
|
|
||||||
from datasette.permissions import _skip_permission_checks
|
|
||||||
|
|
||||||
# Check if we should skip permission checks BEFORE calling hooks
|
|
||||||
# This avoids creating unawaited coroutines
|
|
||||||
if _skip_permission_checks.get():
|
|
||||||
return SKIP_PERMISSION_CHECKS
|
|
||||||
|
|
||||||
hook_caller = pm.hook.permission_resources_sql
|
|
||||||
hookimpls = hook_caller.get_hookimpls()
|
|
||||||
hook_results = list(hook_caller(datasette=datasette, actor=actor, action=action))
|
|
||||||
|
|
||||||
collected: List[PermissionSQL] = []
|
|
||||||
actor_json = json.dumps(actor) if actor is not None else None
|
|
||||||
actor_id = actor.get("id") if isinstance(actor, dict) else None
|
|
||||||
|
|
||||||
for index, result in enumerate(hook_results):
|
|
||||||
hookimpl = hookimpls[index]
|
|
||||||
resolved = await await_me_maybe(result)
|
|
||||||
default_source = _plugin_name_from_hookimpl(hookimpl)
|
|
||||||
for permission_sql in _iter_permission_sql_from_result(resolved, action=action):
|
|
||||||
if not permission_sql.source:
|
|
||||||
permission_sql.source = default_source
|
|
||||||
params = permission_sql.params or {}
|
|
||||||
params.setdefault("action", action)
|
|
||||||
params.setdefault("actor", actor_json)
|
|
||||||
params.setdefault("actor_id", actor_id)
|
|
||||||
collected.append(permission_sql)
|
|
||||||
|
|
||||||
return collected
|
|
||||||
|
|
||||||
|
|
||||||
def _plugin_name_from_hookimpl(hookimpl) -> str:
|
|
||||||
if getattr(hookimpl, "plugin_name", None):
|
|
||||||
return hookimpl.plugin_name
|
|
||||||
plugin = getattr(hookimpl, "plugin", None)
|
|
||||||
if hasattr(plugin, "__name__"):
|
|
||||||
return plugin.__name__
|
|
||||||
return repr(plugin)
|
|
||||||
|
|
||||||
|
|
||||||
def _iter_permission_sql_from_result(
|
|
||||||
result: Any, *, action: str
|
|
||||||
) -> Iterable[PermissionSQL]:
|
|
||||||
if result is None:
|
|
||||||
return []
|
|
||||||
if isinstance(result, PermissionSQL):
|
|
||||||
return [result]
|
|
||||||
if isinstance(result, (list, tuple)):
|
|
||||||
collected: List[PermissionSQL] = []
|
|
||||||
for item in result:
|
|
||||||
collected.extend(_iter_permission_sql_from_result(item, action=action))
|
|
||||||
return collected
|
|
||||||
if callable(result):
|
|
||||||
permission_sql = result(action) # type: ignore[call-arg]
|
|
||||||
return _iter_permission_sql_from_result(permission_sql, action=action)
|
|
||||||
raise TypeError(
|
|
||||||
"Plugin providers must return PermissionSQL instances, sequences, or callables"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
|
|
@ -110,11 +34,7 @@ def build_rules_union(
|
||||||
|
|
||||||
for p in plugins:
|
for p in plugins:
|
||||||
# No namespacing - just use plugin params as-is
|
# No namespacing - just use plugin params as-is
|
||||||
params.update(p.params or {})
|
params.update(p.params)
|
||||||
|
|
||||||
# Skip plugins that only provide restriction_sql (no permission rules)
|
|
||||||
if p.sql is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
parts.append(
|
parts.append(
|
||||||
f"""
|
f"""
|
||||||
|
|
@ -172,8 +92,6 @@ async def resolve_permissions_from_catalog(
|
||||||
- resource (rendered "/parent/child" or "/parent" or "/")
|
- resource (rendered "/parent/child" or "/parent" or "/")
|
||||||
"""
|
"""
|
||||||
resolved_plugins: List[PermissionSQL] = []
|
resolved_plugins: List[PermissionSQL] = []
|
||||||
restriction_sqls: List[str] = []
|
|
||||||
|
|
||||||
for plugin in plugins:
|
for plugin in plugins:
|
||||||
if callable(plugin) and not isinstance(plugin, PermissionSQL):
|
if callable(plugin) and not isinstance(plugin, PermissionSQL):
|
||||||
resolved = plugin(action) # type: ignore[arg-type]
|
resolved = plugin(action) # type: ignore[arg-type]
|
||||||
|
|
@ -183,10 +101,6 @@ async def resolve_permissions_from_catalog(
|
||||||
raise TypeError("Plugin providers must return PermissionSQL instances")
|
raise TypeError("Plugin providers must return PermissionSQL instances")
|
||||||
resolved_plugins.append(resolved)
|
resolved_plugins.append(resolved)
|
||||||
|
|
||||||
# Collect restriction SQL filters
|
|
||||||
if resolved.restriction_sql:
|
|
||||||
restriction_sqls.append(resolved.restriction_sql)
|
|
||||||
|
|
||||||
union_sql, rule_params = build_rules_union(actor, resolved_plugins)
|
union_sql, rule_params = build_rules_union(actor, resolved_plugins)
|
||||||
all_params = {
|
all_params = {
|
||||||
**(candidate_params or {}),
|
**(candidate_params or {}),
|
||||||
|
|
@ -222,8 +136,8 @@ async def resolve_permissions_from_catalog(
|
||||||
PARTITION BY parent, child
|
PARTITION BY parent, child
|
||||||
ORDER BY
|
ORDER BY
|
||||||
depth DESC, -- specificity first
|
depth DESC, -- specificity first
|
||||||
CASE WHEN allow=0 THEN 0 ELSE 1 END, -- then deny over allow at same depth
|
CASE WHEN allow=0 THEN 0 ELSE 1 END, -- deny over allow at same depth
|
||||||
source_plugin -- stable tie-break
|
source_plugin -- stable tie-break
|
||||||
) AS rn
|
) AS rn
|
||||||
FROM matched
|
FROM matched
|
||||||
),
|
),
|
||||||
|
|
@ -251,145 +165,6 @@ async def resolve_permissions_from_catalog(
|
||||||
ORDER BY c.parent, c.child
|
ORDER BY c.parent, c.child
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# If there are restriction filters, wrap the query with INTERSECT
|
|
||||||
# This ensures only resources in the restriction allowlist are returned
|
|
||||||
if restriction_sqls:
|
|
||||||
# Start with the main query, but select only parent/child for the INTERSECT
|
|
||||||
main_query_for_intersect = f"""
|
|
||||||
WITH
|
|
||||||
cands AS (
|
|
||||||
{candidate_sql}
|
|
||||||
),
|
|
||||||
rules AS (
|
|
||||||
{union_sql}
|
|
||||||
),
|
|
||||||
matched AS (
|
|
||||||
SELECT
|
|
||||||
c.parent, c.child,
|
|
||||||
r.allow, r.reason, r.source_plugin,
|
|
||||||
CASE
|
|
||||||
WHEN r.child IS NOT NULL THEN 2 -- child-level (most specific)
|
|
||||||
WHEN r.parent IS NOT NULL THEN 1 -- parent-level
|
|
||||||
ELSE 0 -- root/global
|
|
||||||
END AS depth
|
|
||||||
FROM cands c
|
|
||||||
JOIN rules r
|
|
||||||
ON (r.parent IS NULL OR r.parent = c.parent)
|
|
||||||
AND (r.child IS NULL OR r.child = c.child)
|
|
||||||
),
|
|
||||||
ranked AS (
|
|
||||||
SELECT *,
|
|
||||||
ROW_NUMBER() OVER (
|
|
||||||
PARTITION BY parent, child
|
|
||||||
ORDER BY
|
|
||||||
depth DESC, -- specificity first
|
|
||||||
CASE WHEN allow=0 THEN 0 ELSE 1 END, -- then deny over allow at same depth
|
|
||||||
source_plugin -- stable tie-break
|
|
||||||
) AS rn
|
|
||||||
FROM matched
|
|
||||||
),
|
|
||||||
winner AS (
|
|
||||||
SELECT parent, child,
|
|
||||||
allow, reason, source_plugin, depth
|
|
||||||
FROM ranked WHERE rn = 1
|
|
||||||
),
|
|
||||||
permitted_resources AS (
|
|
||||||
SELECT c.parent, c.child
|
|
||||||
FROM cands c
|
|
||||||
LEFT JOIN winner w
|
|
||||||
ON ((w.parent = c.parent) OR (w.parent IS NULL AND c.parent IS NULL))
|
|
||||||
AND ((w.child = c.child ) OR (w.child IS NULL AND c.child IS NULL))
|
|
||||||
WHERE COALESCE(w.allow, CASE WHEN :implicit_deny THEN 0 ELSE NULL END) = 1
|
|
||||||
)
|
|
||||||
SELECT parent, child FROM permitted_resources
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Build restriction list with INTERSECT (all must match)
|
|
||||||
# Then filter to resources that match hierarchically
|
|
||||||
# Wrap each restriction_sql in a subquery to avoid operator precedence issues
|
|
||||||
# with UNION ALL inside the restriction SQL statements
|
|
||||||
restriction_intersect = "\nINTERSECT\n".join(
|
|
||||||
f"SELECT * FROM ({sql})" for sql in restriction_sqls
|
|
||||||
)
|
|
||||||
|
|
||||||
# Combine: resources allowed by permissions AND in restriction allowlist
|
|
||||||
# Database-level restrictions (parent, NULL) should match all children (parent, *)
|
|
||||||
filtered_resources = f"""
|
|
||||||
WITH restriction_list AS (
|
|
||||||
{restriction_intersect}
|
|
||||||
),
|
|
||||||
permitted AS (
|
|
||||||
{main_query_for_intersect}
|
|
||||||
),
|
|
||||||
filtered AS (
|
|
||||||
SELECT p.parent, p.child
|
|
||||||
FROM permitted p
|
|
||||||
WHERE EXISTS (
|
|
||||||
SELECT 1 FROM restriction_list r
|
|
||||||
WHERE (r.parent = p.parent OR r.parent IS NULL)
|
|
||||||
AND (r.child = p.child OR r.child IS NULL)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Now join back to get full results for only the filtered resources
|
|
||||||
sql = f"""
|
|
||||||
{filtered_resources}
|
|
||||||
, cands AS (
|
|
||||||
{candidate_sql}
|
|
||||||
),
|
|
||||||
rules AS (
|
|
||||||
{union_sql}
|
|
||||||
),
|
|
||||||
matched AS (
|
|
||||||
SELECT
|
|
||||||
c.parent, c.child,
|
|
||||||
r.allow, r.reason, r.source_plugin,
|
|
||||||
CASE
|
|
||||||
WHEN r.child IS NOT NULL THEN 2 -- child-level (most specific)
|
|
||||||
WHEN r.parent IS NOT NULL THEN 1 -- parent-level
|
|
||||||
ELSE 0 -- root/global
|
|
||||||
END AS depth
|
|
||||||
FROM cands c
|
|
||||||
JOIN rules r
|
|
||||||
ON (r.parent IS NULL OR r.parent = c.parent)
|
|
||||||
AND (r.child IS NULL OR r.child = c.child)
|
|
||||||
),
|
|
||||||
ranked AS (
|
|
||||||
SELECT *,
|
|
||||||
ROW_NUMBER() OVER (
|
|
||||||
PARTITION BY parent, child
|
|
||||||
ORDER BY
|
|
||||||
depth DESC, -- specificity first
|
|
||||||
CASE WHEN allow=0 THEN 0 ELSE 1 END, -- then deny over allow at same depth
|
|
||||||
source_plugin -- stable tie-break
|
|
||||||
) AS rn
|
|
||||||
FROM matched
|
|
||||||
),
|
|
||||||
winner AS (
|
|
||||||
SELECT parent, child,
|
|
||||||
allow, reason, source_plugin, depth
|
|
||||||
FROM ranked WHERE rn = 1
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
c.parent, c.child,
|
|
||||||
COALESCE(w.allow, CASE WHEN :implicit_deny THEN 0 ELSE NULL END) AS allow,
|
|
||||||
COALESCE(w.reason, CASE WHEN :implicit_deny THEN 'implicit deny' ELSE NULL END) AS reason,
|
|
||||||
w.source_plugin,
|
|
||||||
COALESCE(w.depth, -1) AS depth,
|
|
||||||
:action AS action,
|
|
||||||
CASE
|
|
||||||
WHEN c.parent IS NULL THEN '/'
|
|
||||||
WHEN c.child IS NULL THEN '/' || c.parent
|
|
||||||
ELSE '/' || c.parent || '/' || c.child
|
|
||||||
END AS resource
|
|
||||||
FROM filtered c
|
|
||||||
LEFT JOIN winner w
|
|
||||||
ON ((w.parent = c.parent) OR (w.parent IS NULL AND c.parent IS NULL))
|
|
||||||
AND ((w.child = c.child ) OR (w.child IS NULL AND c.child IS NULL))
|
|
||||||
ORDER BY c.parent, c.child
|
|
||||||
"""
|
|
||||||
|
|
||||||
rows_iter: Iterable[sqlite3.Row] = await db.execute(
|
rows_iter: Iterable[sqlite3.Row] = await db.execute(
|
||||||
sql,
|
sql,
|
||||||
{**all_params, "implicit_deny": 1 if implicit_deny else 0},
|
{**all_params, "implicit_deny": 1 if implicit_deny else 0},
|
||||||
|
|
|
||||||
|
|
@ -1,2 +1,2 @@
|
||||||
__version__ = "1.0a23"
|
__version__ = "1.0a19"
|
||||||
__version_info__ = tuple(__version__.split("."))
|
__version_info__ = tuple(__version__.split("."))
|
||||||
|
|
|
||||||
|
|
@ -70,15 +70,12 @@ class DatabaseView(View):
|
||||||
metadata = await datasette.get_database_metadata(database)
|
metadata = await datasette.get_database_metadata(database)
|
||||||
|
|
||||||
# Get all tables/views this actor can see in bulk with private flag
|
# Get all tables/views this actor can see in bulk with private flag
|
||||||
allowed_tables_page = await datasette.allowed_resources(
|
|
||||||
"view-table",
|
allowed_tables = await datasette.allowed_resources(
|
||||||
request.actor,
|
"view-table", request.actor, parent=database, include_is_private=True
|
||||||
parent=database,
|
|
||||||
include_is_private=True,
|
|
||||||
limit=1000,
|
|
||||||
)
|
)
|
||||||
# Create lookup dict for quick access
|
# Create lookup dict for quick access
|
||||||
allowed_dict = {r.child: r for r in allowed_tables_page.resources}
|
allowed_dict = {r.child: r for r in allowed_tables}
|
||||||
|
|
||||||
# Filter to just views
|
# Filter to just views
|
||||||
view_names_set = set(await db.view_names())
|
view_names_set = set(await db.view_names())
|
||||||
|
|
@ -91,18 +88,14 @@ class DatabaseView(View):
|
||||||
tables = await get_tables(datasette, request, db, allowed_dict)
|
tables = await get_tables(datasette, request, db, allowed_dict)
|
||||||
|
|
||||||
# Get allowed queries using the new permission system
|
# Get allowed queries using the new permission system
|
||||||
allowed_query_page = await datasette.allowed_resources(
|
allowed_query_resources = await datasette.allowed_resources(
|
||||||
"view-query",
|
"view-query", request.actor, parent=database, include_is_private=True
|
||||||
request.actor,
|
|
||||||
parent=database,
|
|
||||||
include_is_private=True,
|
|
||||||
limit=1000,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Build canned_queries list by looking up each allowed query
|
# Build canned_queries list by looking up each allowed query
|
||||||
all_queries = await datasette.get_canned_queries(database, request.actor)
|
all_queries = await datasette.get_canned_queries(database, request.actor)
|
||||||
canned_queries = []
|
canned_queries = []
|
||||||
for query_resource in allowed_query_page.resources:
|
for query_resource in allowed_query_resources:
|
||||||
query_name = query_resource.child
|
query_name = query_resource.child
|
||||||
if query_name in all_queries:
|
if query_name in all_queries:
|
||||||
canned_queries.append(
|
canned_queries.append(
|
||||||
|
|
@ -516,15 +509,12 @@ class QueryView(View):
|
||||||
database = db.name
|
database = db.name
|
||||||
|
|
||||||
# Get all tables/views this actor can see in bulk with private flag
|
# Get all tables/views this actor can see in bulk with private flag
|
||||||
allowed_tables_page = await datasette.allowed_resources(
|
|
||||||
"view-table",
|
allowed_tables = await datasette.allowed_resources(
|
||||||
request.actor,
|
"view-table", request.actor, parent=database, include_is_private=True
|
||||||
parent=database,
|
|
||||||
include_is_private=True,
|
|
||||||
limit=1000,
|
|
||||||
)
|
)
|
||||||
# Create lookup dict for quick access
|
# Create lookup dict for quick access
|
||||||
allowed_dict = {r.child: r for r in allowed_tables_page.resources}
|
allowed_dict = {r.child: r for r in allowed_tables}
|
||||||
|
|
||||||
# Are we a canned query?
|
# Are we a canned query?
|
||||||
canned_query = None
|
canned_query = None
|
||||||
|
|
|
||||||
|
|
@ -28,18 +28,17 @@ class IndexView(BaseView):
|
||||||
await self.ds.ensure_permission(action="view-instance", actor=request.actor)
|
await self.ds.ensure_permission(action="view-instance", actor=request.actor)
|
||||||
|
|
||||||
# Get all allowed databases and tables in bulk
|
# Get all allowed databases and tables in bulk
|
||||||
db_page = await self.ds.allowed_resources(
|
allowed_databases = await self.ds.allowed_resources(
|
||||||
"view-database", request.actor, include_is_private=True
|
"view-database", request.actor, include_is_private=True
|
||||||
)
|
)
|
||||||
allowed_databases = [r async for r in db_page.all()]
|
|
||||||
allowed_db_dict = {r.parent: r for r in allowed_databases}
|
allowed_db_dict = {r.parent: r for r in allowed_databases}
|
||||||
|
|
||||||
# Group tables by database
|
allowed_tables = await self.ds.allowed_resources(
|
||||||
tables_by_db = {}
|
|
||||||
table_page = await self.ds.allowed_resources(
|
|
||||||
"view-table", request.actor, include_is_private=True
|
"view-table", request.actor, include_is_private=True
|
||||||
)
|
)
|
||||||
async for t in table_page.all():
|
# Group by database
|
||||||
|
tables_by_db = {}
|
||||||
|
for t in allowed_tables:
|
||||||
if t.parent not in tables_by_db:
|
if t.parent not in tables_by_db:
|
||||||
tables_by_db[t.parent] = {}
|
tables_by_db[t.parent] = {}
|
||||||
tables_by_db[t.parent][t.child] = t
|
tables_by_db[t.parent][t.child] = t
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from datasette.events import LogoutEvent, LoginEvent, CreateTokenEvent
|
from datasette.events import LogoutEvent, LoginEvent, CreateTokenEvent
|
||||||
from datasette.resources import DatabaseResource, TableResource
|
from datasette.resources import DatabaseResource, TableResource, InstanceResource
|
||||||
from datasette.utils.asgi import Response, Forbidden
|
from datasette.utils.asgi import Response, Forbidden
|
||||||
from datasette.utils import (
|
from datasette.utils import (
|
||||||
actor_matches_allow,
|
actor_matches_allow,
|
||||||
|
|
@ -268,38 +268,19 @@ class AllowedResourcesView(BaseView):
|
||||||
offset = (page - 1) * page_size
|
offset = (page - 1) * page_size
|
||||||
|
|
||||||
# Use the simplified allowed_resources method
|
# Use the simplified allowed_resources method
|
||||||
# Collect all resources with optional reasons for debugging
|
# If user has debug permission, use the with_reasons variant
|
||||||
try:
|
try:
|
||||||
allowed_rows = []
|
if has_debug_permission:
|
||||||
result = await self.ds.allowed_resources(
|
allowed_resources = await self.ds.allowed_resources_with_reasons(
|
||||||
action=action,
|
action=action,
|
||||||
actor=actor,
|
actor=actor,
|
||||||
parent=parent_filter,
|
)
|
||||||
include_reasons=has_debug_permission,
|
else:
|
||||||
)
|
allowed_resources = await self.ds.allowed_resources(
|
||||||
async for resource in result.all():
|
action=action,
|
||||||
parent_val = resource.parent
|
actor=actor,
|
||||||
child_val = resource.child
|
parent=parent_filter,
|
||||||
|
)
|
||||||
# Build resource path
|
|
||||||
if parent_val is None:
|
|
||||||
resource_path = "/"
|
|
||||||
elif child_val is None:
|
|
||||||
resource_path = f"/{parent_val}"
|
|
||||||
else:
|
|
||||||
resource_path = f"/{parent_val}/{child_val}"
|
|
||||||
|
|
||||||
row = {
|
|
||||||
"parent": parent_val,
|
|
||||||
"child": child_val,
|
|
||||||
"resource": resource_path,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Add reason if we have it (from include_reasons=True)
|
|
||||||
if has_debug_permission and hasattr(resource, "reasons"):
|
|
||||||
row["reason"] = resource.reasons
|
|
||||||
|
|
||||||
allowed_rows.append(row)
|
|
||||||
except Exception:
|
except Exception:
|
||||||
# If catalog tables don't exist yet, return empty results
|
# If catalog tables don't exist yet, return empty results
|
||||||
return (
|
return (
|
||||||
|
|
@ -314,6 +295,46 @@ class AllowedResourcesView(BaseView):
|
||||||
200,
|
200,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Convert to list of dicts with resource path
|
||||||
|
allowed_rows = []
|
||||||
|
for item in allowed_resources:
|
||||||
|
# Extract resource and reason depending on what we got back
|
||||||
|
if has_debug_permission:
|
||||||
|
# allowed_resources_with_reasons returns AllowedResource(resource, reason)
|
||||||
|
resource = item.resource
|
||||||
|
reason = item.reason
|
||||||
|
else:
|
||||||
|
# allowed_resources returns plain Resource objects
|
||||||
|
resource = item
|
||||||
|
reason = None
|
||||||
|
|
||||||
|
parent_val = resource.parent
|
||||||
|
child_val = resource.child
|
||||||
|
|
||||||
|
# Apply parent filter if needed (when using with_reasons, we need to filter manually)
|
||||||
|
if parent_filter is not None and parent_val != parent_filter:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Build resource path
|
||||||
|
if parent_val is None:
|
||||||
|
resource_path = "/"
|
||||||
|
elif child_val is None:
|
||||||
|
resource_path = f"/{parent_val}"
|
||||||
|
else:
|
||||||
|
resource_path = f"/{parent_val}/{child_val}"
|
||||||
|
|
||||||
|
row = {
|
||||||
|
"parent": parent_val,
|
||||||
|
"child": child_val,
|
||||||
|
"resource": resource_path,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add reason if we have it (it's already a list from allowed_resources_with_reasons)
|
||||||
|
if reason is not None:
|
||||||
|
row["reason"] = reason
|
||||||
|
|
||||||
|
allowed_rows.append(row)
|
||||||
|
|
||||||
# Apply child filter if specified
|
# Apply child filter if specified
|
||||||
if child_filter is not None:
|
if child_filter is not None:
|
||||||
allowed_rows = [row for row in allowed_rows if row["child"] == child_filter]
|
allowed_rows = [row for row in allowed_rows if row["child"] == child_filter]
|
||||||
|
|
@ -403,7 +424,7 @@ class PermissionRulesView(BaseView):
|
||||||
|
|
||||||
from datasette.utils.actions_sql import build_permission_rules_sql
|
from datasette.utils.actions_sql import build_permission_rules_sql
|
||||||
|
|
||||||
union_sql, union_params, restriction_sqls = await build_permission_rules_sql(
|
union_sql, union_params = await build_permission_rules_sql(
|
||||||
self.ds, actor, action
|
self.ds, actor, action
|
||||||
)
|
)
|
||||||
await self.ds.refresh_schemas()
|
await self.ds.refresh_schemas()
|
||||||
|
|
@ -423,7 +444,7 @@ class PermissionRulesView(BaseView):
|
||||||
WITH rules AS (
|
WITH rules AS (
|
||||||
{union_sql}
|
{union_sql}
|
||||||
)
|
)
|
||||||
SELECT parent, child, allow, reason, source_plugin
|
SELECT parent, child, allow, reason
|
||||||
FROM rules
|
FROM rules
|
||||||
ORDER BY allow DESC, (parent IS NOT NULL), parent, child
|
ORDER BY allow DESC, (parent IS NOT NULL), parent, child
|
||||||
LIMIT :limit OFFSET :offset
|
LIMIT :limit OFFSET :offset
|
||||||
|
|
@ -442,7 +463,6 @@ class PermissionRulesView(BaseView):
|
||||||
"resource": _resource_path(parent, child),
|
"resource": _resource_path(parent, child),
|
||||||
"allow": row["allow"],
|
"allow": row["allow"],
|
||||||
"reason": row["reason"],
|
"reason": row["reason"],
|
||||||
"source_plugin": row["source_plugin"],
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -491,18 +511,12 @@ async def _check_permission_for_actor(ds, action, parent, child, actor):
|
||||||
if not action_obj:
|
if not action_obj:
|
||||||
return {"error": f"Unknown action: {action}"}, 400
|
return {"error": f"Unknown action: {action}"}, 400
|
||||||
|
|
||||||
# Global actions (no resource_class) don't have a resource
|
if action_obj.takes_parent and action_obj.takes_child:
|
||||||
if action_obj.resource_class is None:
|
|
||||||
resource_obj = None
|
|
||||||
elif action_obj.takes_parent and action_obj.takes_child:
|
|
||||||
# Child-level resource (e.g., TableResource, QueryResource)
|
|
||||||
resource_obj = action_obj.resource_class(database=parent, table=child)
|
resource_obj = action_obj.resource_class(database=parent, table=child)
|
||||||
elif action_obj.takes_parent:
|
elif action_obj.takes_parent:
|
||||||
# Parent-level resource (e.g., DatabaseResource)
|
|
||||||
resource_obj = action_obj.resource_class(database=parent)
|
resource_obj = action_obj.resource_class(database=parent)
|
||||||
else:
|
else:
|
||||||
# This shouldn't happen given validation in Action.__post_init__
|
resource_obj = action_obj.resource_class()
|
||||||
return {"error": f"Invalid action configuration: {action}"}, 500
|
|
||||||
|
|
||||||
allowed = await ds.allowed(action=action, resource=resource_obj, actor=actor)
|
allowed = await ds.allowed(action=action, resource=resource_obj, actor=actor)
|
||||||
|
|
||||||
|
|
@ -585,9 +599,6 @@ class AllowDebugView(BaseView):
|
||||||
"error": "\n\n".join(errors) if errors else "",
|
"error": "\n\n".join(errors) if errors else "",
|
||||||
"actor_input": actor_input,
|
"actor_input": actor_input,
|
||||||
"allow_input": allow_input,
|
"allow_input": allow_input,
|
||||||
"has_debug_permission": await self.ds.allowed(
|
|
||||||
action="permissions-debug", actor=request.actor
|
|
||||||
),
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -637,11 +648,10 @@ class CreateTokenView(BaseView):
|
||||||
async def shared(self, request):
|
async def shared(self, request):
|
||||||
self.check_permission(request)
|
self.check_permission(request)
|
||||||
# Build list of databases and tables the user has permission to view
|
# Build list of databases and tables the user has permission to view
|
||||||
db_page = await self.ds.allowed_resources("view-database", request.actor)
|
allowed_databases = await self.ds.allowed_resources(
|
||||||
allowed_databases = [r async for r in db_page.all()]
|
"view-database", request.actor
|
||||||
|
)
|
||||||
table_page = await self.ds.allowed_resources("view-table", request.actor)
|
allowed_tables = await self.ds.allowed_resources("view-table", request.actor)
|
||||||
allowed_tables = [r async for r in table_page.all()]
|
|
||||||
|
|
||||||
# Build database -> tables mapping
|
# Build database -> tables mapping
|
||||||
database_with_tables = []
|
database_with_tables = []
|
||||||
|
|
@ -761,6 +771,8 @@ class ApiExplorerView(BaseView):
|
||||||
async def example_links(self, request):
|
async def example_links(self, request):
|
||||||
databases = []
|
databases = []
|
||||||
for name, db in self.ds.databases.items():
|
for name, db in self.ds.databases.items():
|
||||||
|
if name == "_internal":
|
||||||
|
continue
|
||||||
database_visible, _ = await self.ds.check_visibility(
|
database_visible, _ = await self.ds.check_visibility(
|
||||||
request.actor,
|
request.actor,
|
||||||
action="view-database",
|
action="view-database",
|
||||||
|
|
@ -979,180 +991,3 @@ class TablesView(BaseView):
|
||||||
]
|
]
|
||||||
|
|
||||||
return Response.json({"matches": matches, "truncated": truncated})
|
return Response.json({"matches": matches, "truncated": truncated})
|
||||||
|
|
||||||
|
|
||||||
class SchemaBaseView(BaseView):
|
|
||||||
"""Base class for schema views with common response formatting."""
|
|
||||||
|
|
||||||
has_json_alternate = False
|
|
||||||
|
|
||||||
async def get_database_schema(self, database_name):
|
|
||||||
"""Get schema SQL for a database."""
|
|
||||||
db = self.ds.databases[database_name]
|
|
||||||
result = await db.execute(
|
|
||||||
"select group_concat(sql, ';' || CHAR(10)) as schema from sqlite_master where sql is not null"
|
|
||||||
)
|
|
||||||
row = result.first()
|
|
||||||
return row["schema"] if row and row["schema"] else ""
|
|
||||||
|
|
||||||
def format_json_response(self, data):
|
|
||||||
"""Format data as JSON response with CORS headers if needed."""
|
|
||||||
headers = {}
|
|
||||||
if self.ds.cors:
|
|
||||||
add_cors_headers(headers)
|
|
||||||
return Response.json(data, headers=headers)
|
|
||||||
|
|
||||||
def format_error_response(self, error_message, format_, status=404):
|
|
||||||
"""Format error response based on requested format."""
|
|
||||||
if format_ == "json":
|
|
||||||
headers = {}
|
|
||||||
if self.ds.cors:
|
|
||||||
add_cors_headers(headers)
|
|
||||||
return Response.json(
|
|
||||||
{"ok": False, "error": error_message}, status=status, headers=headers
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return Response.text(error_message, status=status)
|
|
||||||
|
|
||||||
def format_markdown_response(self, heading, schema):
|
|
||||||
"""Format schema as Markdown response."""
|
|
||||||
md_output = f"# {heading}\n\n```sql\n{schema}\n```\n"
|
|
||||||
return Response.text(
|
|
||||||
md_output, headers={"content-type": "text/markdown; charset=utf-8"}
|
|
||||||
)
|
|
||||||
|
|
||||||
async def format_html_response(
|
|
||||||
self, request, schemas, is_instance=False, table_name=None
|
|
||||||
):
|
|
||||||
"""Format schema as HTML response."""
|
|
||||||
context = {
|
|
||||||
"schemas": schemas,
|
|
||||||
"is_instance": is_instance,
|
|
||||||
}
|
|
||||||
if table_name:
|
|
||||||
context["table_name"] = table_name
|
|
||||||
return await self.render(["schema.html"], request=request, context=context)
|
|
||||||
|
|
||||||
|
|
||||||
class InstanceSchemaView(SchemaBaseView):
|
|
||||||
"""
|
|
||||||
Displays schema for all databases in the instance.
|
|
||||||
Supports HTML, JSON, and Markdown formats.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "instance_schema"
|
|
||||||
|
|
||||||
async def get(self, request):
|
|
||||||
format_ = request.url_vars.get("format") or "html"
|
|
||||||
|
|
||||||
# Get all databases the actor can view
|
|
||||||
allowed_databases_page = await self.ds.allowed_resources(
|
|
||||||
"view-database",
|
|
||||||
request.actor,
|
|
||||||
)
|
|
||||||
allowed_databases = [r.parent async for r in allowed_databases_page.all()]
|
|
||||||
|
|
||||||
# Get schema for each database
|
|
||||||
schemas = []
|
|
||||||
for database_name in allowed_databases:
|
|
||||||
schema = await self.get_database_schema(database_name)
|
|
||||||
schemas.append({"database": database_name, "schema": schema})
|
|
||||||
|
|
||||||
if format_ == "json":
|
|
||||||
return self.format_json_response({"schemas": schemas})
|
|
||||||
elif format_ == "md":
|
|
||||||
md_parts = [
|
|
||||||
f"# Schema for {item['database']}\n\n```sql\n{item['schema']}\n```"
|
|
||||||
for item in schemas
|
|
||||||
]
|
|
||||||
return Response.text(
|
|
||||||
"\n\n".join(md_parts),
|
|
||||||
headers={"content-type": "text/markdown; charset=utf-8"},
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return await self.format_html_response(request, schemas, is_instance=True)
|
|
||||||
|
|
||||||
|
|
||||||
class DatabaseSchemaView(SchemaBaseView):
|
|
||||||
"""
|
|
||||||
Displays schema for a specific database.
|
|
||||||
Supports HTML, JSON, and Markdown formats.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "database_schema"
|
|
||||||
|
|
||||||
async def get(self, request):
|
|
||||||
database_name = request.url_vars["database"]
|
|
||||||
format_ = request.url_vars.get("format") or "html"
|
|
||||||
|
|
||||||
# Check if database exists
|
|
||||||
if database_name not in self.ds.databases:
|
|
||||||
return self.format_error_response("Database not found", format_)
|
|
||||||
|
|
||||||
# Check view-database permission
|
|
||||||
await self.ds.ensure_permission(
|
|
||||||
action="view-database",
|
|
||||||
resource=DatabaseResource(database=database_name),
|
|
||||||
actor=request.actor,
|
|
||||||
)
|
|
||||||
|
|
||||||
schema = await self.get_database_schema(database_name)
|
|
||||||
|
|
||||||
if format_ == "json":
|
|
||||||
return self.format_json_response(
|
|
||||||
{"database": database_name, "schema": schema}
|
|
||||||
)
|
|
||||||
elif format_ == "md":
|
|
||||||
return self.format_markdown_response(f"Schema for {database_name}", schema)
|
|
||||||
else:
|
|
||||||
schemas = [{"database": database_name, "schema": schema}]
|
|
||||||
return await self.format_html_response(request, schemas)
|
|
||||||
|
|
||||||
|
|
||||||
class TableSchemaView(SchemaBaseView):
|
|
||||||
"""
|
|
||||||
Displays schema for a specific table.
|
|
||||||
Supports HTML, JSON, and Markdown formats.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "table_schema"
|
|
||||||
|
|
||||||
async def get(self, request):
|
|
||||||
database_name = request.url_vars["database"]
|
|
||||||
table_name = request.url_vars["table"]
|
|
||||||
format_ = request.url_vars.get("format") or "html"
|
|
||||||
|
|
||||||
# Check view-table permission
|
|
||||||
await self.ds.ensure_permission(
|
|
||||||
action="view-table",
|
|
||||||
resource=TableResource(database=database_name, table=table_name),
|
|
||||||
actor=request.actor,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get schema for the table
|
|
||||||
db = self.ds.databases[database_name]
|
|
||||||
result = await db.execute(
|
|
||||||
"select sql from sqlite_master where name = ? and sql is not null",
|
|
||||||
[table_name],
|
|
||||||
)
|
|
||||||
row = result.first()
|
|
||||||
|
|
||||||
# Return 404 if table doesn't exist
|
|
||||||
if not row or not row["sql"]:
|
|
||||||
return self.format_error_response("Table not found", format_)
|
|
||||||
|
|
||||||
schema = row["sql"]
|
|
||||||
|
|
||||||
if format_ == "json":
|
|
||||||
return self.format_json_response(
|
|
||||||
{"database": database_name, "table": table_name, "schema": schema}
|
|
||||||
)
|
|
||||||
elif format_ == "md":
|
|
||||||
return self.format_markdown_response(
|
|
||||||
f"Schema for {database_name}.{table_name}", schema
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
schemas = [{"database": database_name, "schema": schema}]
|
|
||||||
return await self.format_html_response(
|
|
||||||
request, schemas, table_name=table_name
|
|
||||||
)
|
|
||||||
|
|
|
||||||
|
|
@ -6,18 +6,18 @@
|
||||||
|
|
||||||
Datasette doesn't require authentication by default. Any visitor to a Datasette instance can explore the full data and execute read-only SQL queries.
|
Datasette doesn't require authentication by default. Any visitor to a Datasette instance can explore the full data and execute read-only SQL queries.
|
||||||
|
|
||||||
Datasette can be configured to only allow authenticated users, or to control which databases, tables, and queries can be accessed by the public or by specific users. Datasette's plugin system can be used to add many different styles of authentication, such as user accounts, single sign-on or API keys.
|
Datasette's plugin system can be used to add many different styles of authentication, such as user accounts, single sign-on or API keys.
|
||||||
|
|
||||||
.. _authentication_actor:
|
.. _authentication_actor:
|
||||||
|
|
||||||
Actors
|
Actors
|
||||||
======
|
======
|
||||||
|
|
||||||
Through plugins, Datasette can support both authenticated users (with cookies) and authenticated API clients (via authentication tokens). The word "actor" is used to cover both of these cases.
|
Through plugins, Datasette can support both authenticated users (with cookies) and authenticated API agents (via authentication tokens). The word "actor" is used to cover both of these cases.
|
||||||
|
|
||||||
Every request to Datasette has an associated actor value, available in the code as ``request.actor``. This can be ``None`` for unauthenticated requests, or a JSON compatible Python dictionary for authenticated users or API clients.
|
Every request to Datasette has an associated actor value, available in the code as ``request.actor``. This can be ``None`` for unauthenticated requests, or a JSON compatible Python dictionary for authenticated users or API agents.
|
||||||
|
|
||||||
The actor dictionary can be any shape - the design of that data structure is left up to the plugins. Actors should always include a unique ``"id"`` string, as demonstrated by the "root" actor below.
|
The actor dictionary can be any shape - the design of that data structure is left up to the plugins. A useful convention is to include an ``"id"`` string, as demonstrated by the "root" actor below.
|
||||||
|
|
||||||
Plugins can use the :ref:`plugin_hook_actor_from_request` hook to implement custom logic for authenticating an actor based on the incoming HTTP request.
|
Plugins can use the :ref:`plugin_hook_actor_from_request` hook to implement custom logic for authenticating an actor based on the incoming HTTP request.
|
||||||
|
|
||||||
|
|
@ -32,21 +32,19 @@ The one exception is the "root" account, which you can sign into while using Dat
|
||||||
|
|
||||||
The ``--root`` flag is designed for local development and testing. When you start Datasette with ``--root``, the root user automatically receives every permission, including:
|
The ``--root`` flag is designed for local development and testing. When you start Datasette with ``--root``, the root user automatically receives every permission, including:
|
||||||
|
|
||||||
* All view permissions (``view-instance``, ``view-database``, ``view-table``, etc.)
|
* All view permissions (view-instance, view-database, view-table, etc.)
|
||||||
* All write permissions (``insert-row``, ``update-row``, ``delete-row``, ``create-table``, ``alter-table``, ``drop-table``)
|
* All write permissions (insert-row, update-row, delete-row, create-table, alter-table, drop-table)
|
||||||
* Debug permissions (``permissions-debug``, ``debug-menu``)
|
* Debug permissions (permissions-debug, debug-menu)
|
||||||
* Any custom permissions defined by plugins
|
* Any custom permissions defined by plugins
|
||||||
|
|
||||||
If you add explicit deny rules in ``datasette.yaml`` those can still block the
|
.. warning::
|
||||||
root actor from specific databases or tables.
|
The ``--root`` flag should only be used for local development. Never use it in production or on publicly accessible servers.
|
||||||
|
|
||||||
The ``--root`` flag sets an internal ``root_enabled`` switch—without it, a signed-in user with ``{"id": "root"}`` is treated like any other actor.
|
|
||||||
|
|
||||||
To sign in as root, start Datasette using the ``--root`` command-line option, like this::
|
To sign in as root, start Datasette using the ``--root`` command-line option, like this::
|
||||||
|
|
||||||
datasette --root
|
datasette --root
|
||||||
|
|
||||||
Datasette will output a single-use-only login URL on startup::
|
::
|
||||||
|
|
||||||
http://127.0.0.1:8001/-/auth-token?token=786fc524e0199d70dc9a581d851f466244e114ca92f33aa3b42a139e9388daa7
|
http://127.0.0.1:8001/-/auth-token?token=786fc524e0199d70dc9a581d851f466244e114ca92f33aa3b42a139e9388daa7
|
||||||
INFO: Started server process [25801]
|
INFO: Started server process [25801]
|
||||||
|
|
@ -54,7 +52,7 @@ Datasette will output a single-use-only login URL on startup::
|
||||||
INFO: Application startup complete.
|
INFO: Application startup complete.
|
||||||
INFO: Uvicorn running on http://127.0.0.1:8001 (Press CTRL+C to quit)
|
INFO: Uvicorn running on http://127.0.0.1:8001 (Press CTRL+C to quit)
|
||||||
|
|
||||||
Click on that link and then visit ``http://127.0.0.1:8001/-/actor`` to confirm that you are authenticated as an actor that looks like this:
|
The URL on the first line includes a one-use token which can be used to sign in as the "root" actor in your browser. Click on that link and then visit ``http://127.0.0.1:8001/-/actor`` to confirm that you are authenticated as an actor that looks like this:
|
||||||
|
|
||||||
.. code-block:: json
|
.. code-block:: json
|
||||||
|
|
||||||
|
|
@ -67,7 +65,7 @@ Click on that link and then visit ``http://127.0.0.1:8001/-/actor`` to confirm t
|
||||||
Permissions
|
Permissions
|
||||||
===========
|
===========
|
||||||
|
|
||||||
Datasette's permissions system is built around SQL queries. Datasette and its plugins construct SQL queries to resolve the list of resources that an actor cas access.
|
Datasette has an extensive permissions system built-in, which can be further extended and customized by plugins.
|
||||||
|
|
||||||
The key question the permissions system answers is this:
|
The key question the permissions system answers is this:
|
||||||
|
|
||||||
|
|
@ -75,80 +73,37 @@ The key question the permissions system answers is this:
|
||||||
|
|
||||||
**Actors** are :ref:`described above <authentication_actor>`.
|
**Actors** are :ref:`described above <authentication_actor>`.
|
||||||
|
|
||||||
An **action** is a string describing the action the actor would like to perform. A full list is :ref:`provided below <actions>` - examples include ``view-table`` and ``execute-sql``.
|
An **action** is a string describing the action the actor would like to perform. A full list is :ref:`provided below <permissions>` - examples include ``view-table`` and ``execute-sql``.
|
||||||
|
|
||||||
A **resource** is the item the actor wishes to interact with - for example a specific database or table. Some actions, such as ``permissions-debug``, are not associated with a particular resource.
|
A **resource** is the item the actor wishes to interact with - for example a specific database or table. Some actions, such as ``permissions-debug``, are not associated with a particular resource.
|
||||||
|
|
||||||
Datasette's built-in view actions (``view-database``, ``view-table`` etc) are allowed by Datasette's default configuration: unless you :ref:`configure additional permission rules <authentication_permissions_config>` unauthenticated users will be allowed to access content.
|
Datasette's built-in view permissions (``view-database``, ``view-table`` etc) default to *allow* - unless you :ref:`configure additional permission rules <authentication_permissions_config>` unauthenticated users will be allowed to access content.
|
||||||
|
|
||||||
Other actions, including those introduced by plugins, will default to *deny*.
|
Permissions with potentially harmful effects should default to *deny*. Plugin authors should account for this when designing new plugins - for example, the `datasette-upload-csvs <https://github.com/simonw/datasette-upload-csvs>`__ plugin defaults to deny so that installations don't accidentally allow unauthenticated users to create new tables by uploading a CSV file.
|
||||||
|
|
||||||
.. _authentication_default_deny:
|
|
||||||
|
|
||||||
Denying all permissions by default
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
By default, Datasette allows unauthenticated access to view databases, tables, and execute SQL queries.
|
|
||||||
|
|
||||||
You may want to run Datasette in a mode where **all** access is denied by default, and you explicitly grant permissions only to authenticated users, either using the :ref:`--root mechanism <authentication_root>` or through :ref:`configuration file rules <authentication_permissions_config>` or plugins.
|
|
||||||
|
|
||||||
Use the ``--default-deny`` command-line option to run Datasette in this mode::
|
|
||||||
|
|
||||||
datasette --default-deny data.db --root
|
|
||||||
|
|
||||||
With ``--default-deny`` enabled:
|
|
||||||
|
|
||||||
* Anonymous users are denied access to view the instance, databases, tables, and queries
|
|
||||||
* Authenticated users are also denied access unless they're explicitly granted permissions
|
|
||||||
* The root user (when using ``--root``) still has access to everything
|
|
||||||
* You can grant permissions using :ref:`configuration file rules <authentication_permissions_config>` or plugins
|
|
||||||
|
|
||||||
For example, to allow only a specific user to access your instance::
|
|
||||||
|
|
||||||
datasette --default-deny data.db --config datasette.yaml
|
|
||||||
|
|
||||||
Where ``datasette.yaml`` contains:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
allow:
|
|
||||||
id: alice
|
|
||||||
|
|
||||||
This configuration will deny access to everyone except the user with ``id`` of ``alice``.
|
|
||||||
|
|
||||||
.. _authentication_permissions_explained:
|
.. _authentication_permissions_explained:
|
||||||
|
|
||||||
How permissions are resolved
|
How permissions are resolved
|
||||||
----------------------------
|
----------------------------
|
||||||
|
|
||||||
Datasette performs permission checks using the internal :ref:`datasette_allowed`, method which accepts keyword arguments for ``action``, ``resource`` and an optional ``actor``.
|
The :ref:`datasette.permission_allowed(actor, action, resource=None, default=...)<datasette_permission_allowed>` method is called to check if an actor is allowed to perform a specific action.
|
||||||
|
|
||||||
``resource`` should be an instance of the appropriate ``Resource`` subclass from :mod:`datasette.resources`—for example ``InstanceResource()``, ``DatabaseResource(database="...``)`` or ``TableResource(database="...", table="...")``. This defaults to ``InstanceResource()`` if not specified.
|
This method asks every plugin that implements the :ref:`plugin_hook_permission_allowed` hook if the actor is allowed to perform the action.
|
||||||
|
|
||||||
When a check runs Datasette gathers allow/deny rules from multiple sources and
|
Each plugin can return ``True`` to indicate that the actor is allowed to perform the action, ``False`` if they are not allowed and ``None`` if the plugin has no opinion on the matter.
|
||||||
compiles them into a SQL query. The resulting query describes all of the
|
|
||||||
resources an actor may access for that action, together with the reasons those
|
|
||||||
resources were allowed or denied. The combined sources are:
|
|
||||||
|
|
||||||
* ``allow`` blocks configured in :ref:`datasette.yaml <authentication_permissions_config>`.
|
``False`` acts as a veto - if any plugin returns ``False`` then the permission check is denied. Otherwise, if any plugin returns ``True`` then the permission check is allowed.
|
||||||
* :ref:`Actor restrictions <authentication_cli_create_token_restrict>` encoded into the actor dictionary or API token.
|
|
||||||
* The "root" user shortcut when ``--root`` (or :attr:`Datasette.root_enabled <datasette.app.Datasette.root_enabled>`) is active, replying ``True`` to all permission chucks unless configuration rules deny them at a more specific level.
|
|
||||||
* Any additional SQL provided by plugins implementing :ref:`plugin_hook_permission_resources_sql`.
|
|
||||||
|
|
||||||
Datasette evaluates the SQL to determine if the requested ``resource`` is
|
The ``resource`` argument can be used to specify a specific resource that the action is being performed against. Some permissions, such as ``view-instance``, do not involve a resource. Others such as ``view-database`` have a resource that is a string naming the database. Permissions that take both a database name and the name of a table, view or canned query within that database use a resource that is a tuple of two strings, ``(database_name, resource_name)``.
|
||||||
included. Explicit deny rules returned by configuration or plugins will block
|
|
||||||
access even if other rules allowed it.
|
Plugins that implement the ``permission_allowed()`` hook can decide if they are going to consider the provided resource or not.
|
||||||
|
|
||||||
.. _authentication_permissions_allow:
|
.. _authentication_permissions_allow:
|
||||||
|
|
||||||
Defining permissions with "allow" blocks
|
Defining permissions with "allow" blocks
|
||||||
----------------------------------------
|
----------------------------------------
|
||||||
|
|
||||||
One way to define permissions in Datasette is to use an ``"allow"`` block :ref:`in the datasette.yaml file <authentication_permissions_config>`. This is a JSON document describing which actors are allowed to perform an action against a specific resource.
|
The standard way to define permissions in Datasette is to use an ``"allow"`` block :ref:`in the datasette.yaml file <authentication_permissions_config>`. This is a JSON document describing which actors are allowed to perform a permission.
|
||||||
|
|
||||||
Each ``allow`` block is compiled into SQL and combined with any
|
|
||||||
:ref:`plugin-provided rules <plugin_hook_permission_resources_sql>` to produce
|
|
||||||
the cascading allow/deny decisions that power :ref:`datasette_allowed`.
|
|
||||||
|
|
||||||
The most basic form of allow block is this (`allow demo <https://latest.datasette.io/-/allow-debug?actor=%7B%22id%22%3A+%22root%22%7D&allow=%7B%0D%0A++++++++%22id%22%3A+%22root%22%0D%0A++++%7D>`__, `deny demo <https://latest.datasette.io/-/allow-debug?actor=%7B%22id%22%3A+%22trevor%22%7D&allow=%7B%0D%0A++++++++%22id%22%3A+%22root%22%0D%0A++++%7D>`__):
|
The most basic form of allow block is this (`allow demo <https://latest.datasette.io/-/allow-debug?actor=%7B%22id%22%3A+%22root%22%7D&allow=%7B%0D%0A++++++++%22id%22%3A+%22root%22%0D%0A++++%7D>`__, `deny demo <https://latest.datasette.io/-/allow-debug?actor=%7B%22id%22%3A+%22trevor%22%7D&allow=%7B%0D%0A++++++++%22id%22%3A+%22root%22%0D%0A++++%7D>`__):
|
||||||
|
|
||||||
|
|
@ -470,7 +425,7 @@ You can control the following:
|
||||||
* Access to specific tables and views
|
* Access to specific tables and views
|
||||||
* Access to specific :ref:`canned_queries`
|
* Access to specific :ref:`canned_queries`
|
||||||
|
|
||||||
If a user has permission to view a table they will be able to view that table, independent of if they have permission to view the database or instance that the table exists within.
|
If a user cannot access a specific database, they will not be able to access tables, views or queries within that database. If a user cannot access the instance they will not be able to access any of the databases, tables, views or queries.
|
||||||
|
|
||||||
.. _authentication_permissions_instance:
|
.. _authentication_permissions_instance:
|
||||||
|
|
||||||
|
|
@ -708,7 +663,7 @@ Controlling the ability to execute arbitrary SQL
|
||||||
|
|
||||||
Datasette defaults to allowing any site visitor to execute their own custom SQL queries, for example using the form on `the database page <https://latest.datasette.io/fixtures>`__ or by appending a ``?_where=`` parameter to the table page `like this <https://latest.datasette.io/fixtures/facetable?_where=_city_id=1>`__.
|
Datasette defaults to allowing any site visitor to execute their own custom SQL queries, for example using the form on `the database page <https://latest.datasette.io/fixtures>`__ or by appending a ``?_where=`` parameter to the table page `like this <https://latest.datasette.io/fixtures/facetable?_where=_city_id=1>`__.
|
||||||
|
|
||||||
Access to this ability is controlled by the :ref:`actions_execute_sql` permission.
|
Access to this ability is controlled by the :ref:`permissions_execute_sql` permission.
|
||||||
|
|
||||||
The easiest way to disable arbitrary SQL queries is using the :ref:`default_allow_sql setting <setting_default_allow_sql>` when you first start Datasette running.
|
The easiest way to disable arbitrary SQL queries is using the :ref:`default_allow_sql setting <setting_default_allow_sql>` when you first start Datasette running.
|
||||||
|
|
||||||
|
|
@ -1066,37 +1021,15 @@ This example outputs the following::
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Restrictions act as an allowlist layered on top of the actor's existing
|
|
||||||
permissions. They can only remove access the actor would otherwise have—they
|
|
||||||
cannot grant new access. If the underlying actor is denied by ``allow`` rules in
|
|
||||||
``datasette.yaml`` or by a plugin, a token that lists that resource in its
|
|
||||||
``"_r"`` section will still be denied.
|
|
||||||
|
|
||||||
|
|
||||||
.. _permissions_plugins:
|
.. _permissions_plugins:
|
||||||
|
|
||||||
Checking permissions in plugins
|
Checking permissions in plugins
|
||||||
===============================
|
===============================
|
||||||
|
|
||||||
Datasette plugins can check if an actor has permission to perform an action using :ref:`datasette_allowed`—for example::
|
Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)<datasette_permission_allowed>` method.
|
||||||
|
|
||||||
from datasette.resources import TableResource
|
Datasette core performs a number of permission checks, :ref:`documented below <permissions>`. Plugins can implement the :ref:`plugin_hook_permission_allowed` plugin hook to participate in decisions about whether an actor should be able to perform a specified action.
|
||||||
|
|
||||||
can_edit = await datasette.allowed(
|
|
||||||
action="update-row",
|
|
||||||
resource=TableResource(database="fixtures", table="facetable"),
|
|
||||||
actor=request.actor,
|
|
||||||
)
|
|
||||||
|
|
||||||
Use :ref:`datasette_ensure_permission` when you need to enforce a permission and
|
|
||||||
raise a ``Forbidden`` error automatically.
|
|
||||||
|
|
||||||
Plugins that define new operations should return :class:`~datasette.permissions.Action`
|
|
||||||
objects from :ref:`plugin_register_actions` and can supply additional allow/deny
|
|
||||||
rules by returning :class:`~datasette.permissions.PermissionSQL` objects from the
|
|
||||||
:ref:`plugin_hook_permission_resources_sql` hook. Those rules are merged with
|
|
||||||
configuration ``allow`` blocks and actor restrictions to determine the final
|
|
||||||
result for each check.
|
|
||||||
|
|
||||||
.. _authentication_actor_matches_allow:
|
.. _authentication_actor_matches_allow:
|
||||||
|
|
||||||
|
|
@ -1116,14 +1049,12 @@ The currently authenticated actor is made available to plugins as ``request.acto
|
||||||
|
|
||||||
.. _PermissionsDebugView:
|
.. _PermissionsDebugView:
|
||||||
|
|
||||||
Permissions debug tools
|
The permissions debug tool
|
||||||
=======================
|
==========================
|
||||||
|
|
||||||
The debug tool at ``/-/permissions`` is available to any actor with the ``permissions-debug`` permission. By default this is just the :ref:`authenticated root user <authentication_root>` but you can open it up to all users by starting Datasette like this::
|
The debug tool at ``/-/permissions`` is only available to the :ref:`authenticated root user <authentication_root>` (or any actor granted the ``permissions-debug`` action).
|
||||||
|
|
||||||
datasette -s permissions.permissions-debug true data.db
|
It shows the thirty most recent permission checks that have been carried out by the Datasette instance.
|
||||||
|
|
||||||
The page shows the permission checks that have been carried out by the Datasette instance.
|
|
||||||
|
|
||||||
It also provides an interface for running hypothetical permission checks against a hypothetical actor. This is a useful way of confirming that your configured permissions work in the way you expect.
|
It also provides an interface for running hypothetical permission checks against a hypothetical actor. This is a useful way of confirming that your configured permissions work in the way you expect.
|
||||||
|
|
||||||
|
|
@ -1132,20 +1063,37 @@ This is designed to help administrators and plugin authors understand exactly ho
|
||||||
.. _AllowedResourcesView:
|
.. _AllowedResourcesView:
|
||||||
|
|
||||||
Allowed resources view
|
Allowed resources view
|
||||||
----------------------
|
======================
|
||||||
|
|
||||||
The ``/-/allowed`` endpoint displays resources that the current actor can access for a specified ``action``.
|
The ``/-/allowed`` endpoint displays resources that the current actor can access for a supplied ``action`` query string argument.
|
||||||
|
|
||||||
This endpoint provides an interactive HTML form interface. Add ``.json`` to the URL path (e.g. ``/-/allowed.json``) to get the raw JSON response instead.
|
This endpoint provides an interactive HTML form interface. Add ``.json`` to the URL path (e.g. ``/-/allowed.json``) to get the raw JSON response instead.
|
||||||
|
|
||||||
Pass ``?action=view-table`` (or another action) to select the action. Optional ``parent=`` and ``child=`` query parameters can narrow the results to a specific database/table pair.
|
Pass ``?action=view-table`` (or another action) to select the action. Optional ``parent=`` and ``child=`` query parameters can narrow the results to a specific database/table pair.
|
||||||
|
|
||||||
This endpoint is publicly accessible to help users understand their own permissions. The potentially sensitive ``reason`` field is only shown to users with the ``permissions-debug`` permission - it shows the plugins and explanatory reasons that were responsible for each decision.
|
This endpoint is publicly accessible to help users understand their own permissions. However, potentially sensitive fields (``reason`` and ``source_plugin``) are only included in responses for users with the ``permissions-debug`` permission.
|
||||||
|
|
||||||
|
Datasette includes helper endpoints for exploring the action-based permission resolver:
|
||||||
|
|
||||||
|
``/-/allowed``
|
||||||
|
Returns a paginated list of resources that the current actor is allowed to access for a given action. Pass ``?action=view-table`` (or another action) to select the action, and optional ``parent=``/``child=`` query parameters to narrow the results to a specific database/table pair.
|
||||||
|
|
||||||
|
``/-/rules``
|
||||||
|
Lists the raw permission rules (both allow and deny) contributing to each resource for the supplied action. This includes configuration-derived and plugin-provided rules. **Requires the permissions-debug permission** (only available to the root user by default).
|
||||||
|
|
||||||
|
``/-/check``
|
||||||
|
Evaluates whether the current actor can perform ``action`` against an optional ``parent``/``child`` resource tuple, returning the winning rule and reason.
|
||||||
|
|
||||||
|
These endpoints work in conjunction with :ref:`plugin_hook_permission_resources_sql` and make it easier to verify that configuration allow blocks and plugins are behaving as intended.
|
||||||
|
|
||||||
|
All three endpoints support both HTML and JSON responses. Visit the endpoint directly for an interactive HTML form interface, or add ``.json`` to the URL for a raw JSON response.
|
||||||
|
|
||||||
|
**Security note:** The ``/-/check`` and ``/-/allowed`` endpoints are publicly accessible to help users understand their own permissions. However, potentially sensitive fields (``reason`` and ``source_plugin``) are only included in responses for users with the ``permissions-debug`` permission. The ``/-/rules`` endpoint requires the ``permissions-debug`` permission for all access.
|
||||||
|
|
||||||
.. _PermissionRulesView:
|
.. _PermissionRulesView:
|
||||||
|
|
||||||
Permission rules view
|
Permission rules view
|
||||||
---------------------
|
=====================
|
||||||
|
|
||||||
The ``/-/rules`` endpoint displays all permission rules (both allow and deny) for each candidate resource for the requested action.
|
The ``/-/rules`` endpoint displays all permission rules (both allow and deny) for each candidate resource for the requested action.
|
||||||
|
|
||||||
|
|
@ -1153,12 +1101,12 @@ This endpoint provides an interactive HTML form interface. Add ``.json`` to the
|
||||||
|
|
||||||
Pass ``?action=`` as a query parameter to specify which action to check.
|
Pass ``?action=`` as a query parameter to specify which action to check.
|
||||||
|
|
||||||
This endpoint requires the ``permissions-debug`` permission.
|
**Requires the permissions-debug permission** - this endpoint returns a 403 Forbidden error for users without this permission.
|
||||||
|
|
||||||
.. _PermissionCheckView:
|
.. _PermissionCheckView:
|
||||||
|
|
||||||
Permission check view
|
Permission check view
|
||||||
---------------------
|
=====================
|
||||||
|
|
||||||
The ``/-/check`` endpoint evaluates a single action/resource pair and returns information indicating whether the access was allowed along with diagnostic information.
|
The ``/-/check`` endpoint evaluates a single action/resource pair and returns information indicating whether the access was allowed along with diagnostic information.
|
||||||
|
|
||||||
|
|
@ -1166,6 +1114,8 @@ This endpoint provides an interactive HTML form interface. Add ``.json`` to the
|
||||||
|
|
||||||
Pass ``?action=`` to specify the action to check, and optional ``?parent=`` and ``?child=`` parameters to specify the resource.
|
Pass ``?action=`` to specify the action to check, and optional ``?parent=`` and ``?child=`` parameters to specify the resource.
|
||||||
|
|
||||||
|
This endpoint is publicly accessible to help users understand their own permissions. However, potentially sensitive fields (``reason`` and ``source_plugin``) are only included in responses for users with the ``permissions-debug`` permission.
|
||||||
|
|
||||||
.. _authentication_ds_actor:
|
.. _authentication_ds_actor:
|
||||||
|
|
||||||
The ds_actor cookie
|
The ds_actor cookie
|
||||||
|
|
@ -1231,156 +1181,168 @@ The /-/logout page
|
||||||
|
|
||||||
The page at ``/-/logout`` provides the ability to log out of a ``ds_actor`` cookie authentication session.
|
The page at ``/-/logout`` provides the ability to log out of a ``ds_actor`` cookie authentication session.
|
||||||
|
|
||||||
.. _actions:
|
.. _permissions:
|
||||||
|
|
||||||
Built-in actions
|
Built-in permissions
|
||||||
================
|
====================
|
||||||
|
|
||||||
This section lists all of the permission checks that are carried out by Datasette core, along with the ``resource`` if it was passed.
|
This section lists all of the permission checks that are carried out by Datasette core, along with the ``resource`` if it was passed.
|
||||||
|
|
||||||
.. _actions_view_instance:
|
.. _permissions_view_instance:
|
||||||
|
|
||||||
view-instance
|
view-instance
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
Top level permission - Actor is allowed to view any pages within this instance, starting at https://latest.datasette.io/
|
Top level permission - Actor is allowed to view any pages within this instance, starting at https://latest.datasette.io/
|
||||||
|
|
||||||
.. _actions_view_database:
|
Default *allow*.
|
||||||
|
|
||||||
|
.. _permissions_view_database:
|
||||||
|
|
||||||
view-database
|
view-database
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
Actor is allowed to view a database page, e.g. https://latest.datasette.io/fixtures
|
Actor is allowed to view a database page, e.g. https://latest.datasette.io/fixtures
|
||||||
|
|
||||||
``resource`` - ``datasette.permissions.DatabaseResource(database)``
|
``resource`` - string
|
||||||
``database`` is the name of the database (string)
|
The name of the database
|
||||||
|
|
||||||
.. _actions_view_database_download:
|
Default *allow*.
|
||||||
|
|
||||||
|
.. _permissions_view_database_download:
|
||||||
|
|
||||||
view-database-download
|
view-database-download
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
Actor is allowed to download a database, e.g. https://latest.datasette.io/fixtures.db
|
Actor is allowed to download a database, e.g. https://latest.datasette.io/fixtures.db
|
||||||
|
|
||||||
``resource`` - ``datasette.resources.DatabaseResource(database)``
|
``resource`` - string
|
||||||
``database`` is the name of the database (string)
|
The name of the database
|
||||||
|
|
||||||
.. _actions_view_table:
|
Default *allow*.
|
||||||
|
|
||||||
|
.. _permissions_view_table:
|
||||||
|
|
||||||
view-table
|
view-table
|
||||||
----------
|
----------
|
||||||
|
|
||||||
Actor is allowed to view a table (or view) page, e.g. https://latest.datasette.io/fixtures/complex_foreign_keys
|
Actor is allowed to view a table (or view) page, e.g. https://latest.datasette.io/fixtures/complex_foreign_keys
|
||||||
|
|
||||||
``resource`` - ``datasette.resources.TableResource(database, table)``
|
``resource`` - tuple: (string, string)
|
||||||
``database`` is the name of the database (string)
|
The name of the database, then the name of the table
|
||||||
|
|
||||||
``table`` is the name of the table (string)
|
Default *allow*.
|
||||||
|
|
||||||
.. _actions_view_query:
|
.. _permissions_view_query:
|
||||||
|
|
||||||
view-query
|
view-query
|
||||||
----------
|
----------
|
||||||
|
|
||||||
Actor is allowed to view (and execute) a :ref:`canned query <canned_queries>` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size - this includes executing :ref:`canned_queries_writable`.
|
Actor is allowed to view (and execute) a :ref:`canned query <canned_queries>` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size - this includes executing :ref:`canned_queries_writable`.
|
||||||
|
|
||||||
``resource`` - ``datasette.resources.QueryResource(database, query)``
|
``resource`` - tuple: (string, string)
|
||||||
``database`` is the name of the database (string)
|
The name of the database, then the name of the canned query
|
||||||
|
|
||||||
``query`` is the name of the canned query (string)
|
|
||||||
|
|
||||||
.. _actions_insert_row:
|
Default *allow*.
|
||||||
|
|
||||||
|
.. _permissions_insert_row:
|
||||||
|
|
||||||
insert-row
|
insert-row
|
||||||
----------
|
----------
|
||||||
|
|
||||||
Actor is allowed to insert rows into a table.
|
Actor is allowed to insert rows into a table.
|
||||||
|
|
||||||
``resource`` - ``datasette.resources.TableResource(database, table)``
|
``resource`` - tuple: (string, string)
|
||||||
``database`` is the name of the database (string)
|
The name of the database, then the name of the table
|
||||||
|
|
||||||
``table`` is the name of the table (string)
|
Default *deny*.
|
||||||
|
|
||||||
.. _actions_delete_row:
|
.. _permissions_delete_row:
|
||||||
|
|
||||||
delete-row
|
delete-row
|
||||||
----------
|
----------
|
||||||
|
|
||||||
Actor is allowed to delete rows from a table.
|
Actor is allowed to delete rows from a table.
|
||||||
|
|
||||||
``resource`` - ``datasette.resources.TableResource(database, table)``
|
``resource`` - tuple: (string, string)
|
||||||
``database`` is the name of the database (string)
|
The name of the database, then the name of the table
|
||||||
|
|
||||||
``table`` is the name of the table (string)
|
Default *deny*.
|
||||||
|
|
||||||
.. _actions_update_row:
|
.. _permissions_update_row:
|
||||||
|
|
||||||
update-row
|
update-row
|
||||||
----------
|
----------
|
||||||
|
|
||||||
Actor is allowed to update rows in a table.
|
Actor is allowed to update rows in a table.
|
||||||
|
|
||||||
``resource`` - ``datasette.resources.TableResource(database, table)``
|
``resource`` - tuple: (string, string)
|
||||||
``database`` is the name of the database (string)
|
The name of the database, then the name of the table
|
||||||
|
|
||||||
``table`` is the name of the table (string)
|
Default *deny*.
|
||||||
|
|
||||||
.. _actions_create_table:
|
.. _permissions_create_table:
|
||||||
|
|
||||||
create-table
|
create-table
|
||||||
------------
|
------------
|
||||||
|
|
||||||
Actor is allowed to create a database table.
|
Actor is allowed to create a database table.
|
||||||
|
|
||||||
``resource`` - ``datasette.resources.DatabaseResource(database)``
|
``resource`` - string
|
||||||
``database`` is the name of the database (string)
|
The name of the database
|
||||||
|
|
||||||
.. _actions_alter_table:
|
Default *deny*.
|
||||||
|
|
||||||
|
.. _permissions_alter_table:
|
||||||
|
|
||||||
alter-table
|
alter-table
|
||||||
-----------
|
-----------
|
||||||
|
|
||||||
Actor is allowed to alter a database table.
|
Actor is allowed to alter a database table.
|
||||||
|
|
||||||
``resource`` - ``datasette.resources.TableResource(database, table)``
|
``resource`` - tuple: (string, string)
|
||||||
``database`` is the name of the database (string)
|
The name of the database, then the name of the table
|
||||||
|
|
||||||
``table`` is the name of the table (string)
|
Default *deny*.
|
||||||
|
|
||||||
.. _actions_drop_table:
|
.. _permissions_drop_table:
|
||||||
|
|
||||||
drop-table
|
drop-table
|
||||||
----------
|
----------
|
||||||
|
|
||||||
Actor is allowed to drop a database table.
|
Actor is allowed to drop a database table.
|
||||||
|
|
||||||
``resource`` - ``datasette.resources.TableResource(database, table)``
|
``resource`` - tuple: (string, string)
|
||||||
``database`` is the name of the database (string)
|
The name of the database, then the name of the table
|
||||||
|
|
||||||
``table`` is the name of the table (string)
|
Default *deny*.
|
||||||
|
|
||||||
.. _actions_execute_sql:
|
.. _permissions_execute_sql:
|
||||||
|
|
||||||
execute-sql
|
execute-sql
|
||||||
-----------
|
-----------
|
||||||
|
|
||||||
Actor is allowed to run arbitrary SQL queries against a specific database, e.g. https://latest.datasette.io/fixtures/-/query?sql=select+100
|
Actor is allowed to run arbitrary SQL queries against a specific database, e.g. https://latest.datasette.io/fixtures?sql=select+100
|
||||||
|
|
||||||
``resource`` - ``datasette.resources.DatabaseResource(database)``
|
``resource`` - string
|
||||||
``database`` is the name of the database (string)
|
The name of the database
|
||||||
|
|
||||||
See also :ref:`the default_allow_sql setting <setting_default_allow_sql>`.
|
Default *allow*. See also :ref:`the default_allow_sql setting <setting_default_allow_sql>`.
|
||||||
|
|
||||||
.. _actions_permissions_debug:
|
.. _permissions_permissions_debug:
|
||||||
|
|
||||||
permissions-debug
|
permissions-debug
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
Actor is allowed to view the ``/-/permissions`` debug tools.
|
Actor is allowed to view the ``/-/permissions`` debug page.
|
||||||
|
|
||||||
.. _actions_debug_menu:
|
Default *deny*.
|
||||||
|
|
||||||
|
.. _permissions_debug_menu:
|
||||||
|
|
||||||
debug-menu
|
debug-menu
|
||||||
----------
|
----------
|
||||||
|
|
||||||
Controls if the various debug pages are displayed in the navigation menu.
|
Controls if the various debug pages are displayed in the navigation menu.
|
||||||
|
|
||||||
|
Default *deny*.
|
||||||
|
|
|
||||||
|
|
@ -4,93 +4,6 @@
|
||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
.. _v1_0_a23:
|
|
||||||
|
|
||||||
1.0a23 (2025-12-02)
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
- Fix for bug where a stale database entry in ``internal.db`` could cause a 500 error on the homepage. (:issue:`2605`)
|
|
||||||
- Cosmetic improvement to ``/-/actions`` page. (:issue:`2599`)
|
|
||||||
|
|
||||||
.. _v1_0_a22:
|
|
||||||
|
|
||||||
1.0a22 (2025-11-13)
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
- ``datasette serve --default-deny`` option for running Datasette configured to :ref:`deny all permissions by default <authentication_default_deny>`. (:issue:`2592`)
|
|
||||||
- ``datasette.is_client()`` method for detecting if code is :ref:`executing inside a datasette.client request <internals_datasette_is_client>`. (:issue:`2594`)
|
|
||||||
- ``datasette.pm`` property can now be used to :ref:`register and unregister plugins in tests <testing_plugins_register_in_test>`. (:issue:`2595`)
|
|
||||||
|
|
||||||
.. _v1_0_a21:
|
|
||||||
|
|
||||||
1.0a21 (2025-11-05)
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
- Fixes an **open redirect** security issue: Datasette instances would redirect to ``example.com/foo/bar`` if you accessed the path ``//example.com/foo/bar``. Thanks to `James Jefferies <https://github.com/jamesjefferies>`__ for the fix. (:issue:`2429`)
|
|
||||||
- Fixed ``datasette publish cloudrun`` to work with changes to the underlying Cloud Run architecture. (:issue:`2511`)
|
|
||||||
- New ``datasette --get /path --headers`` option for inspecting the headers returned by a path. (:issue:`2578`)
|
|
||||||
- New ``datasette.client.get(..., skip_permission_checks=True)`` parameter to bypass permission checks when making requests using the internal client. (:issue:`2583`)
|
|
||||||
|
|
||||||
.. _v0_65_2:
|
|
||||||
|
|
||||||
0.65.2 (2025-11-05)
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
- Fixes an **open redirect** security issue: Datasette instances would redirect to ``example.com/foo/bar`` if you accessed the path ``//example.com/foo/bar``. Thanks to `James Jefferies <https://github.com/jamesjefferies>`__ for the fix. (:issue:`2429`)
|
|
||||||
- Upgraded for compatibility with Python 3.14.
|
|
||||||
- Fixed ``datasette publish cloudrun`` to work with changes to the underlying Cloud Run architecture. (:issue:`2511`)
|
|
||||||
- Minor upgrades to fix warnings, including ``pkg_resources`` deprecation.
|
|
||||||
|
|
||||||
.. _v1_0_a20:
|
|
||||||
|
|
||||||
1.0a20 (2025-11-03)
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
This alpha introduces a major breaking change prior to the 1.0 release of Datasette concerning how Datasette's permission system works.
|
|
||||||
|
|
||||||
Permission system redesign
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Previously the permission system worked using ``datasette.permission_allowed()`` checks which consulted all available plugins in turn to determine whether a given actor was allowed to perform a given action on a given resource.
|
|
||||||
|
|
||||||
This approach could become prohibitively expensive for large lists of items - for example to determine the list of tables that a user could view in a large Datasette instance each plugin implementation of that hook would be fired for every table.
|
|
||||||
|
|
||||||
The new design uses SQL queries against Datasette's internal :ref:`catalog tables <internals_internal>` to derive the list of resources for which an actor has permission for a given action. This turns an N x M problem (N resources, M plugins) into a single SQL query.
|
|
||||||
|
|
||||||
Plugins can use the new :ref:`plugin_hook_permission_resources_sql` hook to return SQL fragments which will be used as part of that query.
|
|
||||||
|
|
||||||
Plugins that use any of the following features will need to be updated to work with this and following alphas (and Datasette 1.0 stable itself):
|
|
||||||
|
|
||||||
- Checking permissions with ``datasette.permission_allowed()`` - this method has been replaced with :ref:`datasette.allowed() <datasette_allowed>`.
|
|
||||||
- Implementing the ``permission_allowed()`` plugin hook - this hook has been removed in favor of :ref:`permission_resources_sql() <plugin_hook_permission_resources_sql>`.
|
|
||||||
- Using ``register_permissions()`` to register permissions - this hook has been removed in favor of :ref:`register_actions() <plugin_register_actions>`.
|
|
||||||
|
|
||||||
Consult the :ref:`v1.0a20 upgrade guide <upgrade_guide_v1_a20>` for further details on how to upgrade affected plugins.
|
|
||||||
|
|
||||||
Plugins can now make use of two new internal methods to help resolve permission checks:
|
|
||||||
|
|
||||||
- :ref:`datasette.allowed_resources() <datasette_allowed_resources>` returns a ``PaginatedResources`` object with a ``.resources`` list of ``Resource`` instances that an actor is allowed to access for a given action (and a ``.next`` token for pagination).
|
|
||||||
- :ref:`datasette.allowed_resources_sql() <datasette_allowed_resources_sql>` returns the SQL and parameters that can be executed against the internal catalog tables to determine which resources an actor is allowed to access for a given action. This can be combined with further SQL to perform advanced custom filtering.
|
|
||||||
|
|
||||||
Related changes:
|
|
||||||
|
|
||||||
- The way ``datasette --root`` works has changed. Running Datasette with this flag now causes the root actor to pass *all* permission checks. (:issue:`2521`)
|
|
||||||
|
|
||||||
- Permission debugging improvements:
|
|
||||||
|
|
||||||
- The ``/-/allowed`` endpoint shows resources the user is allowed to interact with for different actions.
|
|
||||||
- ``/-/rules`` shows the raw allow/deny rules that apply to different permission checks.
|
|
||||||
- ``/-/actions`` lists every available action.
|
|
||||||
- ``/-/check`` can be used to try out different permission checks for the current actor.
|
|
||||||
|
|
||||||
Other changes
|
|
||||||
~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
- The internal ``catalog_views`` table now tracks SQLite views alongside tables in the introspection database. (:issue:`2495`)
|
|
||||||
- Hitting the ``/`` brings up a search interface for navigating to tables that the current user can view. A new ``/-/tables`` endpoint supports this functionality. (:issue:`2523`)
|
|
||||||
- Datasette attempts to detect some configuration errors on startup.
|
|
||||||
- Datasette now supports Python 3.14 and no longer tests against Python 3.9.
|
|
||||||
|
|
||||||
.. _v1_0_a19:
|
.. _v1_0_a19:
|
||||||
|
|
||||||
1.0a19 (2025-04-21)
|
1.0a19 (2025-04-21)
|
||||||
|
|
@ -275,7 +188,7 @@ This alpha release adds basic alter table support to the Datasette Write API and
|
||||||
Alter table support for create, insert, upsert and update
|
Alter table support for create, insert, upsert and update
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
The :ref:`JSON write API <json_api_write>` can now be used to apply simple alter table schema changes, provided the acting actor has the new :ref:`actions_alter_table` permission. (:issue:`2101`)
|
The :ref:`JSON write API <json_api_write>` can now be used to apply simple alter table schema changes, provided the acting actor has the new :ref:`permissions_alter_table` permission. (:issue:`2101`)
|
||||||
|
|
||||||
The only alter operation supported so far is adding new columns to an existing table.
|
The only alter operation supported so far is adding new columns to an existing table.
|
||||||
|
|
||||||
|
|
@ -290,12 +203,12 @@ Permissions fix for the upsert API
|
||||||
|
|
||||||
The :ref:`/database/table/-/upsert API <TableUpsertView>` had a minor permissions bug, only affecting Datasette instances that had configured the ``insert-row`` and ``update-row`` permissions to apply to a specific table rather than the database or instance as a whole. Full details in issue :issue:`2262`.
|
The :ref:`/database/table/-/upsert API <TableUpsertView>` had a minor permissions bug, only affecting Datasette instances that had configured the ``insert-row`` and ``update-row`` permissions to apply to a specific table rather than the database or instance as a whole. Full details in issue :issue:`2262`.
|
||||||
|
|
||||||
To avoid similar mistakes in the future the ``datasette.permission_allowed()`` method now specifies ``default=`` as a keyword-only argument.
|
To avoid similar mistakes in the future the :ref:`datasette.permission_allowed() <datasette_permission_allowed>` method now specifies ``default=`` as a keyword-only argument.
|
||||||
|
|
||||||
Permission checks now consider opinions from every plugin
|
Permission checks now consider opinions from every plugin
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
The ``datasette.permission_allowed()`` method previously consulted every plugin that implemented the ``permission_allowed()`` plugin hook and obeyed the opinion of the last plugin to return a value. (:issue:`2275`)
|
The :ref:`datasette.permission_allowed() <datasette_permission_allowed>` method previously consulted every plugin that implemented the :ref:`permission_allowed() <plugin_hook_permission_allowed>` plugin hook and obeyed the opinion of the last plugin to return a value. (:issue:`2275`)
|
||||||
|
|
||||||
Datasette now consults every plugin and checks to see if any of them returned ``False`` (the veto rule), and if none of them did, it then checks to see if any of them returned ``True``.
|
Datasette now consults every plugin and checks to see if any of them returned ``False`` (the veto rule), and if none of them did, it then checks to see if any of them returned ``True``.
|
||||||
|
|
||||||
|
|
@ -555,7 +468,7 @@ The third Datasette 1.0 alpha release adds upsert support to the JSON API, plus
|
||||||
See `Datasette 1.0a2: Upserts and finely grained permissions <https://simonwillison.net/2022/Dec/15/datasette-1a2/>`__ for an extended, annotated version of these release notes.
|
See `Datasette 1.0a2: Upserts and finely grained permissions <https://simonwillison.net/2022/Dec/15/datasette-1a2/>`__ for an extended, annotated version of these release notes.
|
||||||
|
|
||||||
- New ``/db/table/-/upsert`` API, :ref:`documented here <TableUpsertView>`. upsert is an update-or-insert: existing rows will have specified keys updated, but if no row matches the incoming primary key a brand new row will be inserted instead. (:issue:`1878`)
|
- New ``/db/table/-/upsert`` API, :ref:`documented here <TableUpsertView>`. upsert is an update-or-insert: existing rows will have specified keys updated, but if no row matches the incoming primary key a brand new row will be inserted instead. (:issue:`1878`)
|
||||||
- New ``register_permissions()`` plugin hook. Plugins can now register named permissions, which will then be listed in various interfaces that show available permissions. (:issue:`1940`)
|
- New :ref:`plugin_register_permissions` plugin hook. Plugins can now register named permissions, which will then be listed in various interfaces that show available permissions. (:issue:`1940`)
|
||||||
- The ``/db/-/create`` API for :ref:`creating a table <TableCreateView>` now accepts ``"ignore": true`` and ``"replace": true`` options when called with the ``"rows"`` property that creates a new table based on an example set of rows. This means the API can be called multiple times with different rows, setting rules for what should happen if a primary key collides with an existing row. (:issue:`1927`)
|
- The ``/db/-/create`` API for :ref:`creating a table <TableCreateView>` now accepts ``"ignore": true`` and ``"replace": true`` options when called with the ``"rows"`` property that creates a new table based on an example set of rows. This means the API can be called multiple times with different rows, setting rules for what should happen if a primary key collides with an existing row. (:issue:`1927`)
|
||||||
- Arbitrary permissions can now be configured at the instance, database and resource (table, SQL view or canned query) level in Datasette's :ref:`metadata` JSON and YAML files. The new ``"permissions"`` key can be used to specify which actors should have which permissions. See :ref:`authentication_permissions_other` for details. (:issue:`1636`)
|
- Arbitrary permissions can now be configured at the instance, database and resource (table, SQL view or canned query) level in Datasette's :ref:`metadata` JSON and YAML files. The new ``"permissions"`` key can be used to specify which actors should have which permissions. See :ref:`authentication_permissions_other` for details. (:issue:`1636`)
|
||||||
- The ``/-/create-token`` page can now be used to create API tokens which are restricted to just a subset of actions, including against specific databases or resources. See :ref:`CreateTokenView` for details. (:issue:`1947`)
|
- The ``/-/create-token`` page can now be used to create API tokens which are restricted to just a subset of actions, including against specific databases or resources. See :ref:`CreateTokenView` for details. (:issue:`1947`)
|
||||||
|
|
@ -1155,7 +1068,7 @@ Smaller changes
|
||||||
~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
- Wide tables shown within Datasette now scroll horizontally (:issue:`998`). This is achieved using a new ``<div class="table-wrapper">`` element which may impact the implementation of some plugins (for example `this change to datasette-cluster-map <https://github.com/simonw/datasette-cluster-map/commit/fcb4abbe7df9071c5ab57defd39147de7145b34e>`__).
|
- Wide tables shown within Datasette now scroll horizontally (:issue:`998`). This is achieved using a new ``<div class="table-wrapper">`` element which may impact the implementation of some plugins (for example `this change to datasette-cluster-map <https://github.com/simonw/datasette-cluster-map/commit/fcb4abbe7df9071c5ab57defd39147de7145b34e>`__).
|
||||||
- New :ref:`actions_debug_menu` permission. (:issue:`1068`)
|
- New :ref:`permissions_debug_menu` permission. (:issue:`1068`)
|
||||||
- Removed ``--debug`` option, which didn't do anything. (:issue:`814`)
|
- Removed ``--debug`` option, which didn't do anything. (:issue:`814`)
|
||||||
- ``Link:`` HTTP header pagination. (:issue:`1014`)
|
- ``Link:`` HTTP header pagination. (:issue:`1014`)
|
||||||
- ``x`` button for clearing filters. (:issue:`1016`)
|
- ``x`` button for clearing filters. (:issue:`1016`)
|
||||||
|
|
@ -1414,7 +1327,7 @@ You can use the new ``"allow"`` block syntax in ``metadata.json`` (or ``metadata
|
||||||
|
|
||||||
See :ref:`authentication_permissions_allow` for more details.
|
See :ref:`authentication_permissions_allow` for more details.
|
||||||
|
|
||||||
Plugins can implement their own custom permission checks using the new ``plugin_hook_permission_allowed()`` plugin hook.
|
Plugins can implement their own custom permission checks using the new :ref:`plugin_hook_permission_allowed` hook.
|
||||||
|
|
||||||
A new debug page at ``/-/permissions`` shows recent permission checks, to help administrators and plugin authors understand exactly what checks are being performed. This tool defaults to only being available to the root user, but can be exposed to other users by plugins that respond to the ``permissions-debug`` permission. (:issue:`788`)
|
A new debug page at ``/-/permissions`` shows recent permission checks, to help administrators and plugin authors understand exactly what checks are being performed. This tool defaults to only being available to the root user, but can be exposed to other users by plugins that respond to the ``permissions-debug`` permission. (:issue:`788`)
|
||||||
|
|
||||||
|
|
@ -1490,7 +1403,7 @@ Smaller changes
|
||||||
- New :ref:`datasette.get_database() <datasette_get_database>` method.
|
- New :ref:`datasette.get_database() <datasette_get_database>` method.
|
||||||
- Added ``_`` prefix to many private, undocumented methods of the Datasette class. (:issue:`576`)
|
- Added ``_`` prefix to many private, undocumented methods of the Datasette class. (:issue:`576`)
|
||||||
- Removed the ``db.get_outbound_foreign_keys()`` method which duplicated the behaviour of ``db.foreign_keys_for_table()``.
|
- Removed the ``db.get_outbound_foreign_keys()`` method which duplicated the behaviour of ``db.foreign_keys_for_table()``.
|
||||||
- New ``await datasette.permission_allowed()`` method.
|
- New :ref:`await datasette.permission_allowed() <datasette_permission_allowed>` method.
|
||||||
- ``/-/actor`` debugging endpoint for viewing the currently authenticated actor.
|
- ``/-/actor`` debugging endpoint for viewing the currently authenticated actor.
|
||||||
- New ``request.cookies`` property.
|
- New ``request.cookies`` property.
|
||||||
- ``/-/plugins`` endpoint now shows a list of hooks implemented by each plugin, e.g. https://latest.datasette.io/-/plugins?all=1
|
- ``/-/plugins`` endpoint now shows a list of hooks implemented by each plugin, e.g. https://latest.datasette.io/-/plugins?all=1
|
||||||
|
|
|
||||||
|
|
@ -119,10 +119,8 @@ Once started you can access it at ``http://localhost:8001``
|
||||||
signed cookies
|
signed cookies
|
||||||
--root Output URL that sets a cookie authenticating
|
--root Output URL that sets a cookie authenticating
|
||||||
the root user
|
the root user
|
||||||
--default-deny Deny all permissions by default
|
|
||||||
--get TEXT Run an HTTP GET request against this path,
|
--get TEXT Run an HTTP GET request against this path,
|
||||||
print results and exit
|
print results and exit
|
||||||
--headers Include HTTP headers in --get output
|
|
||||||
--token TEXT API token to send with --get requests
|
--token TEXT API token to send with --get requests
|
||||||
--actor TEXT Actor to use for --get requests (JSON string)
|
--actor TEXT Actor to use for --get requests (JSON string)
|
||||||
--version-note TEXT Additional note to show on /-/versions
|
--version-note TEXT Additional note to show on /-/versions
|
||||||
|
|
@ -490,15 +488,8 @@ See :ref:`publish_cloud_run`.
|
||||||
--cpu [1|2|4] Number of vCPUs to allocate in Cloud Run
|
--cpu [1|2|4] Number of vCPUs to allocate in Cloud Run
|
||||||
--timeout INTEGER Build timeout in seconds
|
--timeout INTEGER Build timeout in seconds
|
||||||
--apt-get-install TEXT Additional packages to apt-get install
|
--apt-get-install TEXT Additional packages to apt-get install
|
||||||
--max-instances INTEGER Maximum Cloud Run instances (use 0 to remove
|
--max-instances INTEGER Maximum Cloud Run instances
|
||||||
the limit) [default: 1]
|
|
||||||
--min-instances INTEGER Minimum Cloud Run instances
|
--min-instances INTEGER Minimum Cloud Run instances
|
||||||
--artifact-repository TEXT Artifact Registry repository to store the
|
|
||||||
image [default: datasette]
|
|
||||||
--artifact-region TEXT Artifact Registry location (region or multi-
|
|
||||||
region) [default: us]
|
|
||||||
--artifact-project TEXT Project ID for Artifact Registry (defaults to
|
|
||||||
the active project)
|
|
||||||
--help Show this message and exit.
|
--help Show this message and exit.
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
12
docs/conf.py
12
docs/conf.py
|
|
@ -36,19 +36,12 @@ extensions = [
|
||||||
"sphinx.ext.extlinks",
|
"sphinx.ext.extlinks",
|
||||||
"sphinx.ext.autodoc",
|
"sphinx.ext.autodoc",
|
||||||
"sphinx_copybutton",
|
"sphinx_copybutton",
|
||||||
"myst_parser",
|
|
||||||
"sphinx_markdown_builder",
|
|
||||||
]
|
]
|
||||||
if not os.environ.get("DISABLE_SPHINX_INLINE_TABS"):
|
if not os.environ.get("DISABLE_SPHINX_INLINE_TABS"):
|
||||||
extensions += ["sphinx_inline_tabs"]
|
extensions += ["sphinx_inline_tabs"]
|
||||||
|
|
||||||
autodoc_member_order = "bysource"
|
autodoc_member_order = "bysource"
|
||||||
|
|
||||||
myst_enable_extensions = ["colon_fence"]
|
|
||||||
|
|
||||||
markdown_http_base = "https://docs.datasette.io/en/stable"
|
|
||||||
markdown_uri_doc_suffix = ".html"
|
|
||||||
|
|
||||||
extlinks = {
|
extlinks = {
|
||||||
"issue": ("https://github.com/simonw/datasette/issues/%s", "#%s"),
|
"issue": ("https://github.com/simonw/datasette/issues/%s", "#%s"),
|
||||||
}
|
}
|
||||||
|
|
@ -60,10 +53,7 @@ templates_path = ["_templates"]
|
||||||
# You can specify multiple suffix as a list of string:
|
# You can specify multiple suffix as a list of string:
|
||||||
#
|
#
|
||||||
# source_suffix = ['.rst', '.md']
|
# source_suffix = ['.rst', '.md']
|
||||||
source_suffix = {
|
source_suffix = ".rst"
|
||||||
".rst": "restructuredtext",
|
|
||||||
".md": "markdown",
|
|
||||||
}
|
|
||||||
|
|
||||||
# The master toctree document.
|
# The master toctree document.
|
||||||
master_doc = "index"
|
master_doc = "index"
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,7 @@ General guidelines
|
||||||
Setting up a development environment
|
Setting up a development environment
|
||||||
------------------------------------
|
------------------------------------
|
||||||
|
|
||||||
If you have Python 3.10 or higher installed on your computer (on OS X the quickest way to do this `is using homebrew <https://docs.python-guide.org/starting/install3/osx/>`__) you can install an editable copy of Datasette using the following steps.
|
If you have Python 3.8 or higher installed on your computer (on OS X the quickest way to do this `is using homebrew <https://docs.python-guide.org/starting/install3/osx/>`__) you can install an editable copy of Datasette using the following steps.
|
||||||
|
|
||||||
If you want to use GitHub to publish your changes, first `create a fork of datasette <https://github.com/simonw/datasette/fork>`__ under your own GitHub account.
|
If you want to use GitHub to publish your changes, first `create a fork of datasette <https://github.com/simonw/datasette/fork>`__ under your own GitHub account.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,14 @@
|
||||||
(events)=
|
.. _events:
|
||||||
# Events
|
|
||||||
|
Events
|
||||||
|
======
|
||||||
|
|
||||||
Datasette includes a mechanism for tracking events that occur while the software is running. This is primarily intended to be used by plugins, which can both trigger events and listen for events.
|
Datasette includes a mechanism for tracking events that occur while the software is running. This is primarily intended to be used by plugins, which can both trigger events and listen for events.
|
||||||
|
|
||||||
The core Datasette application triggers events when certain things happen. This page describes those events.
|
The core Datasette application triggers events when certain things happen. This page describes those events.
|
||||||
|
|
||||||
Plugins can listen for events using the {ref}`plugin_hook_track_event` plugin hook, which will be called with instances of the following classes - or additional classes {ref}`registered by other plugins <plugin_hook_register_events>`.
|
Plugins can listen for events using the :ref:`plugin_hook_track_event` plugin hook, which will be called with instances of the following classes - or additional classes :ref:`registered by other plugins <plugin_hook_register_events>`.
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. automodule:: datasette.events
|
.. automodule:: datasette.events
|
||||||
:members:
|
:members:
|
||||||
:exclude-members: Event
|
:exclude-members: Event
|
||||||
```
|
|
||||||
|
|
@ -272,14 +272,14 @@ The dictionary keys are the name of the database that is used in the URL - e.g.
|
||||||
|
|
||||||
All databases are listed, irrespective of user permissions.
|
All databases are listed, irrespective of user permissions.
|
||||||
|
|
||||||
.. _datasette_actions:
|
.. _datasette_permissions:
|
||||||
|
|
||||||
.actions
|
.permissions
|
||||||
--------
|
------------
|
||||||
|
|
||||||
Property exposing a dictionary of actions that have been registered using the :ref:`plugin_register_actions` plugin hook.
|
Property exposing a dictionary of permissions that have been registered using the :ref:`plugin_register_permissions` plugin hook.
|
||||||
|
|
||||||
The dictionary keys are the action names - e.g. ``view-instance`` - and the values are ``Action()`` objects describing the permission.
|
The dictionary keys are the permission names - e.g. ``view-instance`` - and the values are ``Permission()`` objects describing the permission. Here is a :ref:`description of that object <plugin_register_permissions>`.
|
||||||
|
|
||||||
.. _datasette_plugin_config:
|
.. _datasette_plugin_config:
|
||||||
|
|
||||||
|
|
@ -342,6 +342,33 @@ If no plugins that implement that hook are installed, the default return value l
|
||||||
"2": {"id": "2"}
|
"2": {"id": "2"}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.. _datasette_permission_allowed:
|
||||||
|
|
||||||
|
await .permission_allowed(actor, action, resource=None, default=...)
|
||||||
|
--------------------------------------------------------------------
|
||||||
|
|
||||||
|
``actor`` - dictionary
|
||||||
|
The authenticated actor. This is usually ``request.actor``.
|
||||||
|
|
||||||
|
``action`` - string
|
||||||
|
The name of the action that is being permission checked.
|
||||||
|
|
||||||
|
``resource`` - string or tuple, optional
|
||||||
|
The resource, e.g. the name of the database, or a tuple of two strings containing the name of the database and the name of the table. Only some permissions apply to a resource.
|
||||||
|
|
||||||
|
``default`` - optional: True, False or None
|
||||||
|
What value should be returned by default if nothing provides an opinion on this permission check.
|
||||||
|
Set to ``True`` for default allow or ``False`` for default deny.
|
||||||
|
If not specified the ``default`` from the ``Permission()`` tuple that was registered using :ref:`plugin_register_permissions` will be used.
|
||||||
|
|
||||||
|
Check if the given actor has :ref:`permission <authentication_permissions>` to perform the given action on the given resource.
|
||||||
|
|
||||||
|
Some permission checks are carried out against :ref:`rules defined in datasette.yaml <authentication_permissions_config>`, while other custom permissions may be decided by plugins that implement the :ref:`plugin_hook_permission_allowed` plugin hook.
|
||||||
|
|
||||||
|
If neither ``metadata.json`` nor any of the plugins provide an answer to the permission query the ``default`` argument will be returned.
|
||||||
|
|
||||||
|
See :ref:`permissions` for a full list of permission actions included in Datasette core.
|
||||||
|
|
||||||
.. _datasette_allowed:
|
.. _datasette_allowed:
|
||||||
|
|
||||||
await .allowed(\*, action, resource, actor=None)
|
await .allowed(\*, action, resource, actor=None)
|
||||||
|
|
@ -358,6 +385,8 @@ await .allowed(\*, action, resource, actor=None)
|
||||||
|
|
||||||
This method checks if the given actor has permission to perform the given action on the given resource. All parameters must be passed as keyword arguments.
|
This method checks if the given actor has permission to perform the given action on the given resource. All parameters must be passed as keyword arguments.
|
||||||
|
|
||||||
|
This is the modern resource-based permission checking method. It works with Resource objects that provide structured information about what is being accessed.
|
||||||
|
|
||||||
Example usage:
|
Example usage:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
@ -385,98 +414,7 @@ Example usage:
|
||||||
|
|
||||||
The method returns ``True`` if the permission is granted, ``False`` if denied.
|
The method returns ``True`` if the permission is granted, ``False`` if denied.
|
||||||
|
|
||||||
.. _datasette_allowed_resources:
|
For legacy string/tuple based permission checking, use :ref:`datasette_permission_allowed` instead.
|
||||||
|
|
||||||
await .allowed_resources(action, actor=None, \*, parent=None, include_is_private=False, include_reasons=False, limit=100, next=None)
|
|
||||||
------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
Returns a ``PaginatedResources`` object containing resources that the actor can access for the specified action, with support for keyset pagination.
|
|
||||||
|
|
||||||
``action`` - string
|
|
||||||
The action name (e.g., "view-table", "view-database")
|
|
||||||
|
|
||||||
``actor`` - dictionary, optional
|
|
||||||
The authenticated actor. Defaults to ``None`` for unauthenticated requests.
|
|
||||||
|
|
||||||
``parent`` - string, optional
|
|
||||||
Optional parent filter (e.g., database name) to limit results
|
|
||||||
|
|
||||||
``include_is_private`` - boolean, optional
|
|
||||||
If True, adds a ``.private`` attribute to each Resource indicating whether anonymous users can access it
|
|
||||||
|
|
||||||
``include_reasons`` - boolean, optional
|
|
||||||
If True, adds a ``.reasons`` attribute with a list of strings describing why access was granted (useful for debugging)
|
|
||||||
|
|
||||||
``limit`` - integer, optional
|
|
||||||
Maximum number of results to return per page (1-1000, default 100)
|
|
||||||
|
|
||||||
``next`` - string, optional
|
|
||||||
Keyset token from a previous page for pagination
|
|
||||||
|
|
||||||
The method returns a ``PaginatedResources`` object (from ``datasette.utils``) with the following attributes:
|
|
||||||
|
|
||||||
``resources`` - list
|
|
||||||
List of ``Resource`` objects for the current page
|
|
||||||
|
|
||||||
``next`` - string or None
|
|
||||||
Token for the next page, or ``None`` if no more results exist
|
|
||||||
|
|
||||||
Example usage:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# Get first page of tables
|
|
||||||
page = await datasette.allowed_resources(
|
|
||||||
"view-table",
|
|
||||||
actor=request.actor,
|
|
||||||
parent="fixtures",
|
|
||||||
limit=50,
|
|
||||||
)
|
|
||||||
|
|
||||||
for table in page.resources:
|
|
||||||
print(table.parent, table.child)
|
|
||||||
if hasattr(table, "private"):
|
|
||||||
print(f" Private: {table.private}")
|
|
||||||
|
|
||||||
# Get next page if available
|
|
||||||
if page.next:
|
|
||||||
next_page = await datasette.allowed_resources(
|
|
||||||
"view-table", actor=request.actor, next=page.next
|
|
||||||
)
|
|
||||||
|
|
||||||
# Iterate through all results automatically
|
|
||||||
page = await datasette.allowed_resources(
|
|
||||||
"view-table", actor=request.actor
|
|
||||||
)
|
|
||||||
async for table in page.all():
|
|
||||||
print(table.parent, table.child)
|
|
||||||
|
|
||||||
# With reasons for debugging
|
|
||||||
page = await datasette.allowed_resources(
|
|
||||||
"view-table", actor=request.actor, include_reasons=True
|
|
||||||
)
|
|
||||||
for table in page.resources:
|
|
||||||
print(f"{table.child}: {table.reasons}")
|
|
||||||
|
|
||||||
The ``page.all()`` async generator automatically handles pagination, fetching additional pages and yielding all resources one at a time.
|
|
||||||
|
|
||||||
This method uses :ref:`datasette_allowed_resources_sql` under the hood and is an efficient way to list the databases, tables or other resources that an actor can access for a specific action.
|
|
||||||
|
|
||||||
.. _datasette_allowed_resources_sql:
|
|
||||||
|
|
||||||
await .allowed_resources_sql(\*, action, actor=None, parent=None, include_is_private=False)
|
|
||||||
-------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
Builds the SQL query that Datasette uses to determine which resources an actor may access for a specific action. Returns a ``(sql: str, params: dict)`` namedtuple that can be executed against the internal ``catalog_*`` database tables. ``parent`` can be used to limit results to a specific database, and ``include_is_private`` adds a column indicating whether anonymous users would be denied access to that resource.
|
|
||||||
|
|
||||||
Plugins that need to execute custom analysis over the raw allow/deny rules can use this helper to run the same query that powers the ``/-/allowed`` debugging interface.
|
|
||||||
|
|
||||||
The SQL query built by this method will return the following columns:
|
|
||||||
|
|
||||||
- ``parent``: The parent resource identifier (or NULL)
|
|
||||||
- ``child``: The child resource identifier (or NULL)
|
|
||||||
- ``reason``: The reason from the rule that granted access
|
|
||||||
- ``is_private``: (if ``include_is_private``) 1 if anonymous users cannot access, 0 otherwise
|
|
||||||
|
|
||||||
.. _datasette_ensure_permission:
|
.. _datasette_ensure_permission:
|
||||||
|
|
||||||
|
|
@ -484,7 +422,7 @@ await .ensure_permission(action, resource=None, actor=None)
|
||||||
-----------------------------------------------------------
|
-----------------------------------------------------------
|
||||||
|
|
||||||
``action`` - string
|
``action`` - string
|
||||||
The action to check. See :ref:`actions` for a list of available actions.
|
The action to check. See :ref:`permissions` for a list of available actions.
|
||||||
|
|
||||||
``resource`` - Resource object (optional)
|
``resource`` - Resource object (optional)
|
||||||
The resource to check the permission against. Must be an instance of ``InstanceResource``, ``DatabaseResource``, or ``TableResource`` from the ``datasette.resources`` module. If omitted, defaults to ``InstanceResource()`` for instance-level permissions.
|
The resource to check the permission against. Must be an instance of ``InstanceResource``, ``DatabaseResource``, or ``TableResource`` from the ``datasette.resources`` module. If omitted, defaults to ``InstanceResource()`` for instance-level permissions.
|
||||||
|
|
@ -594,6 +532,16 @@ The following example creates a token that can access ``view-instance`` and ``vi
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
.. _datasette_get_permission:
|
||||||
|
|
||||||
|
.get_permission(name_or_abbr)
|
||||||
|
-----------------------------
|
||||||
|
|
||||||
|
``name_or_abbr`` - string
|
||||||
|
The name or abbreviation of the permission to look up, e.g. ``view-table`` or ``vt``.
|
||||||
|
|
||||||
|
Returns a :ref:`Permission object <plugin_register_permissions>` representing the permission, or raises a ``KeyError`` if one is not found.
|
||||||
|
|
||||||
.. _datasette_get_database:
|
.. _datasette_get_database:
|
||||||
|
|
||||||
.get_database(name)
|
.get_database(name)
|
||||||
|
|
@ -781,8 +729,8 @@ Use ``is_mutable=False`` to add an immutable database.
|
||||||
|
|
||||||
.. _datasette_add_memory_database:
|
.. _datasette_add_memory_database:
|
||||||
|
|
||||||
.add_memory_database(memory_name, name=None, route=None)
|
.add_memory_database(name)
|
||||||
--------------------------------------------------------
|
--------------------------
|
||||||
|
|
||||||
Adds a shared in-memory database with the specified name:
|
Adds a shared in-memory database with the specified name:
|
||||||
|
|
||||||
|
|
@ -800,9 +748,7 @@ This is a shortcut for the following:
|
||||||
Database(datasette, memory_name="statistics")
|
Database(datasette, memory_name="statistics")
|
||||||
)
|
)
|
||||||
|
|
||||||
Using either of these patterns will result in the in-memory database being served at ``/statistics``.
|
Using either of these pattern will result in the in-memory database being served at ``/statistics``.
|
||||||
|
|
||||||
The ``name`` and ``route`` parameters are optional and work the same way as they do for :ref:`datasette_add_database`.
|
|
||||||
|
|
||||||
.. _datasette_remove_database:
|
.. _datasette_remove_database:
|
||||||
|
|
||||||
|
|
@ -1047,60 +993,6 @@ These methods can be used with :ref:`internals_datasette_urls` - for example:
|
||||||
|
|
||||||
For documentation on available ``**kwargs`` options and the shape of the HTTPX Response object refer to the `HTTPX Async documentation <https://www.python-httpx.org/async/>`__.
|
For documentation on available ``**kwargs`` options and the shape of the HTTPX Response object refer to the `HTTPX Async documentation <https://www.python-httpx.org/async/>`__.
|
||||||
|
|
||||||
Bypassing permission checks
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
All ``datasette.client`` methods accept an optional ``skip_permission_checks=True`` parameter. When set, all permission checks will be bypassed for that request, allowing access to any resource regardless of the configured permissions.
|
|
||||||
|
|
||||||
This is useful for plugins and internal operations that need to access all resources without being subject to permission restrictions.
|
|
||||||
|
|
||||||
Example usage:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# Regular request - respects permissions
|
|
||||||
response = await datasette.client.get(
|
|
||||||
"/private-db/secret-table.json"
|
|
||||||
)
|
|
||||||
# May return 403 Forbidden if access is denied
|
|
||||||
|
|
||||||
# With skip_permission_checks - bypasses all permission checks
|
|
||||||
response = await datasette.client.get(
|
|
||||||
"/private-db/secret-table.json",
|
|
||||||
skip_permission_checks=True,
|
|
||||||
)
|
|
||||||
# Will return 200 OK and the data, regardless of permissions
|
|
||||||
|
|
||||||
This parameter works with all HTTP methods (``get``, ``post``, ``put``, ``patch``, ``delete``, ``options``, ``head``) and the generic ``request`` method.
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
Use ``skip_permission_checks=True`` with caution. It completely bypasses Datasette's permission system and should only be used in trusted plugin code or internal operations where you need guaranteed access to resources.
|
|
||||||
|
|
||||||
.. _internals_datasette_is_client:
|
|
||||||
|
|
||||||
Detecting internal client requests
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
``datasette.in_client()`` - returns bool
|
|
||||||
Returns ``True`` if the current code is executing within a ``datasette.client`` request, ``False`` otherwise.
|
|
||||||
|
|
||||||
This method is useful for plugins that need to behave differently when called through ``datasette.client`` versus when handling external HTTP requests.
|
|
||||||
|
|
||||||
Example usage:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
async def fetch_documents(datasette):
|
|
||||||
if not datasette.in_client():
|
|
||||||
return Response.text(
|
|
||||||
"Only available via internal client requests",
|
|
||||||
status=403,
|
|
||||||
)
|
|
||||||
...
|
|
||||||
|
|
||||||
Note that ``datasette.in_client()`` is independent of ``skip_permission_checks``. A request made through ``datasette.client`` will always have ``in_client()`` return ``True``, regardless of whether ``skip_permission_checks`` is set.
|
|
||||||
|
|
||||||
.. _internals_datasette_urls:
|
.. _internals_datasette_urls:
|
||||||
|
|
||||||
datasette.urls
|
datasette.urls
|
||||||
|
|
|
||||||
|
|
@ -347,7 +347,7 @@ Special table arguments
|
||||||
though this could potentially result in errors if the wrong syntax is used.
|
though this could potentially result in errors if the wrong syntax is used.
|
||||||
|
|
||||||
``?_where=SQL-fragment``
|
``?_where=SQL-fragment``
|
||||||
If the :ref:`actions_execute_sql` permission is enabled, this parameter
|
If the :ref:`permissions_execute_sql` permission is enabled, this parameter
|
||||||
can be used to pass one or more additional SQL fragments to be used in the
|
can be used to pass one or more additional SQL fragments to be used in the
|
||||||
`WHERE` clause of the SQL used to query the table.
|
`WHERE` clause of the SQL used to query the table.
|
||||||
|
|
||||||
|
|
@ -510,7 +510,7 @@ Datasette provides a write API for JSON data. This is a POST-only API that requi
|
||||||
Inserting rows
|
Inserting rows
|
||||||
~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~
|
||||||
|
|
||||||
This requires the :ref:`actions_insert_row` permission.
|
This requires the :ref:`permissions_insert_row` permission.
|
||||||
|
|
||||||
A single row can be inserted using the ``"row"`` key:
|
A single row can be inserted using the ``"row"`` key:
|
||||||
|
|
||||||
|
|
@ -621,9 +621,9 @@ Pass ``"ignore": true`` to ignore these errors and insert the other rows:
|
||||||
"ignore": true
|
"ignore": true
|
||||||
}
|
}
|
||||||
|
|
||||||
Or you can pass ``"replace": true`` to replace any rows with conflicting primary keys with the new values. This requires the :ref:`actions_update_row` permission.
|
Or you can pass ``"replace": true`` to replace any rows with conflicting primary keys with the new values. This requires the :ref:`permissions_update_row` permission.
|
||||||
|
|
||||||
Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`actions_alter_table` permission.
|
Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`permissions_alter_table` permission.
|
||||||
|
|
||||||
.. _TableUpsertView:
|
.. _TableUpsertView:
|
||||||
|
|
||||||
|
|
@ -632,7 +632,7 @@ Upserting rows
|
||||||
|
|
||||||
An upsert is an insert or update operation. If a row with a matching primary key already exists it will be updated - otherwise a new row will be inserted.
|
An upsert is an insert or update operation. If a row with a matching primary key already exists it will be updated - otherwise a new row will be inserted.
|
||||||
|
|
||||||
The upsert API is mostly the same shape as the :ref:`insert API <TableInsertView>`. It requires both the :ref:`actions_insert_row` and :ref:`actions_update_row` permissions.
|
The upsert API is mostly the same shape as the :ref:`insert API <TableInsertView>`. It requires both the :ref:`permissions_insert_row` and :ref:`permissions_update_row` permissions.
|
||||||
|
|
||||||
::
|
::
|
||||||
|
|
||||||
|
|
@ -735,14 +735,14 @@ When using upsert you must provide the primary key column (or columns if the tab
|
||||||
|
|
||||||
If your table does not have an explicit primary key you should pass the SQLite ``rowid`` key instead.
|
If your table does not have an explicit primary key you should pass the SQLite ``rowid`` key instead.
|
||||||
|
|
||||||
Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`actions_alter_table` permission.
|
Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`permissions_alter_table` permission.
|
||||||
|
|
||||||
.. _RowUpdateView:
|
.. _RowUpdateView:
|
||||||
|
|
||||||
Updating a row
|
Updating a row
|
||||||
~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~
|
||||||
|
|
||||||
To update a row, make a ``POST`` to ``/<database>/<table>/<row-pks>/-/update``. This requires the :ref:`actions_update_row` permission.
|
To update a row, make a ``POST`` to ``/<database>/<table>/<row-pks>/-/update``. This requires the :ref:`permissions_update_row` permission.
|
||||||
|
|
||||||
::
|
::
|
||||||
|
|
||||||
|
|
@ -792,14 +792,14 @@ The returned JSON will look like this:
|
||||||
|
|
||||||
Any errors will return ``{"errors": ["... descriptive message ..."], "ok": false}``, and a ``400`` status code for a bad input or a ``403`` status code for an authentication or permission error.
|
Any errors will return ``{"errors": ["... descriptive message ..."], "ok": false}``, and a ``400`` status code for a bad input or a ``403`` status code for an authentication or permission error.
|
||||||
|
|
||||||
Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`actions_alter_table` permission.
|
Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`permissions_alter_table` permission.
|
||||||
|
|
||||||
.. _RowDeleteView:
|
.. _RowDeleteView:
|
||||||
|
|
||||||
Deleting a row
|
Deleting a row
|
||||||
~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~
|
||||||
|
|
||||||
To delete a row, make a ``POST`` to ``/<database>/<table>/<row-pks>/-/delete``. This requires the :ref:`actions_delete_row` permission.
|
To delete a row, make a ``POST`` to ``/<database>/<table>/<row-pks>/-/delete``. This requires the :ref:`permissions_delete_row` permission.
|
||||||
|
|
||||||
::
|
::
|
||||||
|
|
||||||
|
|
@ -818,7 +818,7 @@ Any errors will return ``{"errors": ["... descriptive message ..."], "ok": false
|
||||||
Creating a table
|
Creating a table
|
||||||
~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
To create a table, make a ``POST`` to ``/<database>/-/create``. This requires the :ref:`actions_create_table` permission.
|
To create a table, make a ``POST`` to ``/<database>/-/create``. This requires the :ref:`permissions_create_table` permission.
|
||||||
|
|
||||||
::
|
::
|
||||||
|
|
||||||
|
|
@ -859,8 +859,8 @@ The JSON here describes the table that will be created:
|
||||||
|
|
||||||
* ``pks`` can be used instead of ``pk`` to create a compound primary key. It should be a JSON list of column names to use in that primary key.
|
* ``pks`` can be used instead of ``pk`` to create a compound primary key. It should be a JSON list of column names to use in that primary key.
|
||||||
* ``ignore`` can be set to ``true`` to ignore existing rows by primary key if the table already exists.
|
* ``ignore`` can be set to ``true`` to ignore existing rows by primary key if the table already exists.
|
||||||
* ``replace`` can be set to ``true`` to replace existing rows by primary key if the table already exists. This requires the :ref:`actions_update_row` permission.
|
* ``replace`` can be set to ``true`` to replace existing rows by primary key if the table already exists. This requires the :ref:`permissions_update_row` permission.
|
||||||
* ``alter`` can be set to ``true`` if you want to automatically add any missing columns to the table. This requires the :ref:`actions_alter_table` permission.
|
* ``alter`` can be set to ``true`` if you want to automatically add any missing columns to the table. This requires the :ref:`permissions_alter_table` permission.
|
||||||
|
|
||||||
If the table is successfully created this will return a ``201`` status code and the following response:
|
If the table is successfully created this will return a ``201`` status code and the following response:
|
||||||
|
|
||||||
|
|
@ -906,7 +906,7 @@ Datasette will create a table with a schema that matches those rows and insert t
|
||||||
"pk": "id"
|
"pk": "id"
|
||||||
}
|
}
|
||||||
|
|
||||||
Doing this requires both the :ref:`actions_create_table` and :ref:`actions_insert_row` permissions.
|
Doing this requires both the :ref:`permissions_create_table` and :ref:`permissions_insert_row` permissions.
|
||||||
|
|
||||||
The ``201`` response here will be similar to the ``columns`` form, but will also include the number of rows that were inserted as ``row_count``:
|
The ``201`` response here will be similar to the ``columns`` form, but will also include the number of rows that were inserted as ``row_count``:
|
||||||
|
|
||||||
|
|
@ -937,16 +937,16 @@ If you pass a row to the create endpoint with a primary key that already exists
|
||||||
|
|
||||||
You can avoid this error by passing the same ``"ignore": true`` or ``"replace": true`` options to the create endpoint as you can to the :ref:`insert endpoint <TableInsertView>`.
|
You can avoid this error by passing the same ``"ignore": true`` or ``"replace": true`` options to the create endpoint as you can to the :ref:`insert endpoint <TableInsertView>`.
|
||||||
|
|
||||||
To use the ``"replace": true`` option you will also need the :ref:`actions_update_row` permission.
|
To use the ``"replace": true`` option you will also need the :ref:`permissions_update_row` permission.
|
||||||
|
|
||||||
Pass ``"alter": true`` to automatically add any missing columns to the existing table that are present in the rows you are submitting. This requires the :ref:`actions_alter_table` permission.
|
Pass ``"alter": true`` to automatically add any missing columns to the existing table that are present in the rows you are submitting. This requires the :ref:`permissions_alter_table` permission.
|
||||||
|
|
||||||
.. _TableDropView:
|
.. _TableDropView:
|
||||||
|
|
||||||
Dropping tables
|
Dropping tables
|
||||||
~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
To drop a table, make a ``POST`` to ``/<database>/<table>/-/drop``. This requires the :ref:`actions_drop_table` permission.
|
To drop a table, make a ``POST`` to ``/<database>/<table>/-/drop``. This requires the :ref:`permissions_drop_table` permission.
|
||||||
|
|
||||||
::
|
::
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ The index page can also be accessed at ``/-/``, useful for if the default index
|
||||||
Database
|
Database
|
||||||
========
|
========
|
||||||
|
|
||||||
Each database has a page listing the tables, views and canned queries available for that database. If the :ref:`actions_execute_sql` permission is enabled (it's on by default) there will also be an interface for executing arbitrary SQL select queries against the data.
|
Each database has a page listing the tables, views and canned queries available for that database. If the :ref:`permissions_execute_sql` permission is enabled (it's on by default) there will also be an interface for executing arbitrary SQL select queries against the data.
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
|
|
||||||
|
|
@ -60,7 +60,7 @@ The following tables are hidden by default:
|
||||||
Queries
|
Queries
|
||||||
=======
|
=======
|
||||||
|
|
||||||
The ``/database-name/-/query`` page can be used to execute an arbitrary SQL query against that database, if the :ref:`actions_execute_sql` permission is enabled. This query is passed as the ``?sql=`` query string parameter.
|
The ``/database-name/-/query`` page can be used to execute an arbitrary SQL query against that database, if the :ref:`permissions_execute_sql` permission is enabled. This query is passed as the ``?sql=`` query string parameter.
|
||||||
|
|
||||||
This means you can link directly to a query by constructing the following URL:
|
This means you can link directly to a query by constructing the following URL:
|
||||||
|
|
||||||
|
|
@ -107,46 +107,3 @@ Note that this URL includes the encoded primary key of the record.
|
||||||
Here's that same page as JSON:
|
Here's that same page as JSON:
|
||||||
|
|
||||||
`../people/uk~2Eorg~2Epublicwhip~2Fperson~2F10001.json <https://register-of-members-interests.datasettes.com/regmem/people/uk~2Eorg~2Epublicwhip~2Fperson~2F10001.json>`_
|
`../people/uk~2Eorg~2Epublicwhip~2Fperson~2F10001.json <https://register-of-members-interests.datasettes.com/regmem/people/uk~2Eorg~2Epublicwhip~2Fperson~2F10001.json>`_
|
||||||
|
|
||||||
|
|
||||||
.. _pages_schemas:
|
|
||||||
|
|
||||||
Schemas
|
|
||||||
=======
|
|
||||||
|
|
||||||
Datasette offers ``/-/schema`` endpoints to expose the SQL schema for databases and tables.
|
|
||||||
|
|
||||||
.. _InstanceSchemaView:
|
|
||||||
|
|
||||||
Instance schema
|
|
||||||
---------------
|
|
||||||
|
|
||||||
Access ``/-/schema`` to see the complete schema for all attached databases in the Datasette instance.
|
|
||||||
|
|
||||||
Use ``/-/schema.md`` to get the same information as Markdown.
|
|
||||||
|
|
||||||
Use ``/-/schema.json`` to get the same information as JSON, which looks like this:
|
|
||||||
|
|
||||||
.. code-block:: json
|
|
||||||
|
|
||||||
{
|
|
||||||
"schemas": [
|
|
||||||
{
|
|
||||||
"database": "content",
|
|
||||||
"schema": "create table posts ..."
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.. _DatabaseSchemaView:
|
|
||||||
|
|
||||||
Database schema
|
|
||||||
---------------
|
|
||||||
|
|
||||||
Use ``/database-name/-/schema`` to see the complete schema for a specific database. The ``.md`` and ``.json`` extensions work here too. The JSON returns an object with ``"database"`` and ``"schema"`` keys.
|
|
||||||
|
|
||||||
.. _TableSchemaView:
|
|
||||||
|
|
||||||
Table schema
|
|
||||||
------------
|
|
||||||
|
|
||||||
Use ``/database-name/table-name/-/schema`` to see the schema for a specific table. The ``.md`` and ``.json`` extensions work here too. The JSON returns an object with ``"database"``, ``"table"``, and ``"schema"`` keys.
|
|
||||||
|
|
|
||||||
|
|
@ -777,6 +777,56 @@ The plugin hook can then be used to register the new facet class like this:
|
||||||
def register_facet_classes():
|
def register_facet_classes():
|
||||||
return [SpecialFacet]
|
return [SpecialFacet]
|
||||||
|
|
||||||
|
.. _plugin_register_permissions:
|
||||||
|
|
||||||
|
register_permissions(datasette)
|
||||||
|
-------------------------------
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
This hook is deprecated. Use :ref:`plugin_register_actions` instead, which provides a more flexible resource-based permission system.
|
||||||
|
|
||||||
|
If your plugin needs to register additional permissions unique to that plugin - ``upload-csvs`` for example - you can return a list of those permissions from this hook.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from datasette import hookimpl, Permission
|
||||||
|
|
||||||
|
|
||||||
|
@hookimpl
|
||||||
|
def register_permissions(datasette):
|
||||||
|
return [
|
||||||
|
Permission(
|
||||||
|
name="upload-csvs",
|
||||||
|
abbr=None,
|
||||||
|
description="Upload CSV files",
|
||||||
|
takes_database=True,
|
||||||
|
takes_resource=False,
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
The fields of the ``Permission`` class are as follows:
|
||||||
|
|
||||||
|
``name`` - string
|
||||||
|
The name of the permission, e.g. ``upload-csvs``. This should be unique across all plugins that the user might have installed, so choose carefully.
|
||||||
|
|
||||||
|
``abbr`` - string or None
|
||||||
|
An abbreviation of the permission, e.g. ``uc``. This is optional - you can set it to ``None`` if you do not want to pick an abbreviation. Since this needs to be unique across all installed plugins it's best not to specify an abbreviation at all. If an abbreviation is provided it will be used when creating restricted signed API tokens.
|
||||||
|
|
||||||
|
``description`` - string or None
|
||||||
|
A human-readable description of what the permission lets you do. Should make sense as the second part of a sentence that starts "A user with this permission can ...".
|
||||||
|
|
||||||
|
``takes_database`` - boolean
|
||||||
|
``True`` if this permission can be granted on a per-database basis, ``False`` if it is only valid at the overall Datasette instance level.
|
||||||
|
|
||||||
|
``takes_resource`` - boolean
|
||||||
|
``True`` if this permission can be granted on a per-resource basis. A resource is a database table, SQL view or :ref:`canned query <canned_queries>`.
|
||||||
|
|
||||||
|
``default`` - boolean
|
||||||
|
The default value for this permission if it is not explicitly granted to a user. ``True`` means the permission is granted by default, ``False`` means it is not.
|
||||||
|
|
||||||
|
This should only be ``True`` if you want anonymous users to be able to take this action.
|
||||||
|
|
||||||
.. _plugin_register_actions:
|
.. _plugin_register_actions:
|
||||||
|
|
||||||
register_actions(datasette)
|
register_actions(datasette)
|
||||||
|
|
@ -833,18 +883,24 @@ Actions define what operations can be performed on resources (like viewing a tab
|
||||||
name="list-documents",
|
name="list-documents",
|
||||||
abbr="ld",
|
abbr="ld",
|
||||||
description="List documents in a collection",
|
description="List documents in a collection",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=False,
|
||||||
resource_class=DocumentCollectionResource,
|
resource_class=DocumentCollectionResource,
|
||||||
),
|
),
|
||||||
Action(
|
Action(
|
||||||
name="view-document",
|
name="view-document",
|
||||||
abbr="vdoc",
|
abbr="vdoc",
|
||||||
description="View document",
|
description="View document",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=True,
|
||||||
resource_class=DocumentResource,
|
resource_class=DocumentResource,
|
||||||
),
|
),
|
||||||
Action(
|
Action(
|
||||||
name="edit-document",
|
name="edit-document",
|
||||||
abbr="edoc",
|
abbr="edoc",
|
||||||
description="Edit document",
|
description="Edit document",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=True,
|
||||||
resource_class=DocumentResource,
|
resource_class=DocumentResource,
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
@ -855,25 +911,31 @@ The fields of the ``Action`` dataclass are as follows:
|
||||||
The name of the action, e.g. ``view-document``. This should be unique across all plugins.
|
The name of the action, e.g. ``view-document``. This should be unique across all plugins.
|
||||||
|
|
||||||
``abbr`` - string or None
|
``abbr`` - string or None
|
||||||
An abbreviation of the action, e.g. ``vdoc``. This is optional. Since this needs to be unique across all installed plugins it's best to choose carefully or omit it entirely (same as setting it to ``None``.)
|
An abbreviation of the action, e.g. ``vdoc``. This is optional. Since this needs to be unique across all installed plugins it's best to choose carefully or use ``None``.
|
||||||
|
|
||||||
``description`` - string or None
|
``description`` - string or None
|
||||||
A human-readable description of what the action allows you to do.
|
A human-readable description of what the action allows you to do.
|
||||||
|
|
||||||
``resource_class`` - type[Resource] or None
|
``takes_parent`` - boolean
|
||||||
The Resource subclass that defines what kind of resource this action applies to. Omit this (or set to ``None``) for global actions that apply only at the instance level with no associated resources (like ``debug-menu`` or ``permissions-debug``). Your Resource subclass must:
|
``True`` if this action requires a parent identifier (like a database name).
|
||||||
|
|
||||||
|
``takes_child`` - boolean
|
||||||
|
``True`` if this action requires a child identifier (like a table or document name).
|
||||||
|
|
||||||
|
``resource_class`` - type[Resource]
|
||||||
|
The Resource subclass that defines what kind of resource this action applies to. Your Resource subclass must:
|
||||||
|
|
||||||
- Define a ``name`` class attribute (e.g., ``"document"``)
|
- Define a ``name`` class attribute (e.g., ``"document"``)
|
||||||
- Define a ``parent_class`` class attribute (``None`` for top-level resources like databases, or the parent ``Resource`` subclass for child resources)
|
- Optionally define a ``parent_name`` class attribute (e.g., ``"collection"``)
|
||||||
- Implement a ``resources_sql()`` classmethod that returns SQL returning all resources as ``(parent, child)`` columns
|
- Implement a ``resources_sql()`` classmethod that returns SQL returning all resources as ``(parent, child)`` columns
|
||||||
- Have an ``__init__`` method that accepts appropriate parameters and calls ``super().__init__(parent=..., child=...)``
|
- Have an ``__init__`` method that accepts appropriate parameters and calls ``super().__init__(parent=..., child=...)``
|
||||||
|
|
||||||
The ``resources_sql()`` method
|
The ``resources_sql()`` method
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
The ``resources_sql()`` classmethod returns a SQL query that lists all resources of that type that exist in the system.
|
The ``resources_sql()`` classmethod is crucial to Datasette's permission system. It returns a SQL query that lists all resources of that type that exist in the system.
|
||||||
|
|
||||||
This query is used by Datasette to efficiently check permissions across multiple resources at once. When a user requests a list of resources (like tables, documents, or other entities), Datasette uses this SQL to:
|
This SQL query is used by Datasette to efficiently check permissions across multiple resources at once. When a user requests a list of resources (like tables, documents, or other entities), Datasette uses this SQL to:
|
||||||
|
|
||||||
1. Get all resources of this type from your data catalog
|
1. Get all resources of this type from your data catalog
|
||||||
2. Combine it with permission rules from the ``permission_resources_sql`` hook
|
2. Combine it with permission rules from the ``permission_resources_sql`` hook
|
||||||
|
|
@ -1314,6 +1376,72 @@ This example plugin causes 0 results to be returned if ``?_nothing=1`` is added
|
||||||
|
|
||||||
Example: `datasette-leaflet-freedraw <https://datasette.io/plugins/datasette-leaflet-freedraw>`_
|
Example: `datasette-leaflet-freedraw <https://datasette.io/plugins/datasette-leaflet-freedraw>`_
|
||||||
|
|
||||||
|
.. _plugin_hook_permission_allowed:
|
||||||
|
|
||||||
|
permission_allowed(datasette, actor, action, resource)
|
||||||
|
------------------------------------------------------
|
||||||
|
|
||||||
|
``datasette`` - :ref:`internals_datasette`
|
||||||
|
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
|
||||||
|
|
||||||
|
``actor`` - dictionary
|
||||||
|
The current actor, as decided by :ref:`plugin_hook_actor_from_request`.
|
||||||
|
|
||||||
|
``action`` - string
|
||||||
|
The action to be performed, e.g. ``"edit-table"``.
|
||||||
|
|
||||||
|
``resource`` - string or None
|
||||||
|
An identifier for the individual resource, e.g. the name of the table.
|
||||||
|
|
||||||
|
Called to check that an actor has permission to perform an action on a resource. Can return ``True`` if the action is allowed, ``False`` if the action is not allowed or ``None`` if the plugin does not have an opinion one way or the other.
|
||||||
|
|
||||||
|
Here's an example plugin which randomly selects if a permission should be allowed or denied, except for ``view-instance`` which always uses the default permission scheme instead.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from datasette import hookimpl
|
||||||
|
import random
|
||||||
|
|
||||||
|
|
||||||
|
@hookimpl
|
||||||
|
def permission_allowed(action):
|
||||||
|
if action != "view-instance":
|
||||||
|
# Return True or False at random
|
||||||
|
return random.random() > 0.5
|
||||||
|
# Returning None falls back to default permissions
|
||||||
|
|
||||||
|
This function can alternatively return an awaitable function which itself returns ``True``, ``False`` or ``None``. You can use this option if you need to execute additional database queries using ``await datasette.execute(...)``.
|
||||||
|
|
||||||
|
Here's an example that allows users to view the ``admin_log`` table only if their actor ``id`` is present in the ``admin_users`` table. It aso disallows arbitrary SQL queries for the ``staff.db`` database for all users.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
@hookimpl
|
||||||
|
def permission_allowed(datasette, actor, action, resource):
|
||||||
|
async def inner():
|
||||||
|
if action == "execute-sql" and resource == "staff":
|
||||||
|
return False
|
||||||
|
if action == "view-table" and resource == (
|
||||||
|
"staff",
|
||||||
|
"admin_log",
|
||||||
|
):
|
||||||
|
if not actor:
|
||||||
|
return False
|
||||||
|
user_id = actor["id"]
|
||||||
|
result = await datasette.get_database(
|
||||||
|
"staff"
|
||||||
|
).execute(
|
||||||
|
"select count(*) from admin_users where user_id = :user_id",
|
||||||
|
{"user_id": user_id},
|
||||||
|
)
|
||||||
|
return result.first()[0] > 0
|
||||||
|
|
||||||
|
return inner
|
||||||
|
|
||||||
|
See :ref:`built-in permissions <permissions>` for a full list of permissions that are included in Datasette core.
|
||||||
|
|
||||||
|
Example: `datasette-permissions-sql <https://datasette.io/plugins/datasette-permissions-sql>`_
|
||||||
|
|
||||||
.. _plugin_hook_permission_resources_sql:
|
.. _plugin_hook_permission_resources_sql:
|
||||||
|
|
||||||
permission_resources_sql(datasette, actor, action)
|
permission_resources_sql(datasette, actor, action)
|
||||||
|
|
@ -1329,28 +1457,17 @@ permission_resources_sql(datasette, actor, action)
|
||||||
The permission action being evaluated. Examples include ``"view-table"`` or ``"insert-row"``.
|
The permission action being evaluated. Examples include ``"view-table"`` or ``"insert-row"``.
|
||||||
|
|
||||||
Return value
|
Return value
|
||||||
A :class:`datasette.permissions.PermissionSQL` object, ``None`` or an iterable of ``PermissionSQL`` objects.
|
A :class:`datasette.utils.permissions.PluginSQL` object, ``None`` or an iterable of ``PluginSQL`` objects.
|
||||||
|
|
||||||
Datasette's action-based permission resolver calls this hook to gather SQL rows describing which
|
Datasette's action-based permission resolver calls this hook to gather SQL rows describing which
|
||||||
resources an actor may access (``allow = 1``) or should be denied (``allow = 0``) for a specific action.
|
resources an actor may access (``allow = 1``) or should be denied (``allow = 0``) for a specific action.
|
||||||
Each SQL snippet should return ``parent``, ``child``, ``allow`` and ``reason`` columns.
|
Each SQL snippet should return ``parent``, ``child``, ``allow`` and ``reason`` columns.
|
||||||
|
|
||||||
**Parameter naming convention:** Plugin parameters in ``PermissionSQL.params`` should use unique names
|
**Parameter naming convention:** Plugin parameters in ``PluginSQL.params`` should use unique names
|
||||||
to avoid conflicts with other plugins. The recommended convention is to prefix parameters with your
|
to avoid conflicts with other plugins. The recommended convention is to prefix parameters with your
|
||||||
plugin's source name (e.g., ``myplugin_user_id``). The system reserves these parameter names:
|
plugin's source name (e.g., ``myplugin_user_id``). The system reserves these parameter names:
|
||||||
``:actor``, ``:actor_id``, ``:action``, and ``:filter_parent``.
|
``:actor``, ``:actor_id``, ``:action``, and ``:filter_parent``.
|
||||||
|
|
||||||
You can also use return ``PermissionSQL.allow(reason="reason goes here")`` or ``PermissionSQL.deny(reason="reason goes here")`` as shortcuts for simple root-level allow or deny rules. These will create SQL snippets that look like this:
|
|
||||||
|
|
||||||
.. code-block:: sql
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
NULL AS parent,
|
|
||||||
NULL AS child,
|
|
||||||
1 AS allow,
|
|
||||||
'reason goes here' AS reason
|
|
||||||
|
|
||||||
Or ``0 AS allow`` for denies.
|
|
||||||
|
|
||||||
Permission plugin examples
|
Permission plugin examples
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
@ -1358,7 +1475,7 @@ Permission plugin examples
|
||||||
These snippets show how to use the new ``permission_resources_sql`` hook to
|
These snippets show how to use the new ``permission_resources_sql`` hook to
|
||||||
contribute rows to the action-based permission resolver. Each hook receives the
|
contribute rows to the action-based permission resolver. Each hook receives the
|
||||||
current actor dictionary (or ``None``) and must return ``None`` or an instance or list of
|
current actor dictionary (or ``None``) and must return ``None`` or an instance or list of
|
||||||
``datasette.permissions.PermissionSQL`` (or a coroutine that resolves to that).
|
``datasette.utils.permissions.PluginSQL`` (or a coroutine that resolves to that).
|
||||||
|
|
||||||
Allow Alice to view a specific table
|
Allow Alice to view a specific table
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
@ -1369,7 +1486,7 @@ This plugin grants the actor with ``id == "alice"`` permission to perform the
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
from datasette import hookimpl
|
from datasette import hookimpl
|
||||||
from datasette.permissions import PermissionSQL
|
from datasette.utils.permissions import PluginSQL
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
|
|
@ -1379,7 +1496,8 @@ This plugin grants the actor with ``id == "alice"`` permission to perform the
|
||||||
if not actor or actor.get("id") != "alice":
|
if not actor or actor.get("id") != "alice":
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return PermissionSQL(
|
return PluginSQL(
|
||||||
|
source="alice_sales_allow",
|
||||||
sql="""
|
sql="""
|
||||||
SELECT
|
SELECT
|
||||||
'accounting' AS parent,
|
'accounting' AS parent,
|
||||||
|
|
@ -1387,6 +1505,7 @@ This plugin grants the actor with ``id == "alice"`` permission to perform the
|
||||||
1 AS allow,
|
1 AS allow,
|
||||||
'alice can view accounting/sales' AS reason
|
'alice can view accounting/sales' AS reason
|
||||||
""",
|
""",
|
||||||
|
params={},
|
||||||
)
|
)
|
||||||
|
|
||||||
Restrict execute-sql to a database prefix
|
Restrict execute-sql to a database prefix
|
||||||
|
|
@ -1399,7 +1518,7 @@ will pass through to the SQL snippet.
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
from datasette import hookimpl
|
from datasette import hookimpl
|
||||||
from datasette.permissions import PermissionSQL
|
from datasette.utils.permissions import PluginSQL
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
|
|
@ -1407,7 +1526,8 @@ will pass through to the SQL snippet.
|
||||||
if action != "execute-sql":
|
if action != "execute-sql":
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return PermissionSQL(
|
return PluginSQL(
|
||||||
|
source="analytics_execute_sql",
|
||||||
sql="""
|
sql="""
|
||||||
SELECT
|
SELECT
|
||||||
parent,
|
parent,
|
||||||
|
|
@ -1431,7 +1551,7 @@ with columns ``(actor_id, action, parent, child, allow, reason)``.
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
from datasette import hookimpl
|
from datasette import hookimpl
|
||||||
from datasette.permissions import PermissionSQL
|
from datasette.utils.permissions import PluginSQL
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
|
|
@ -1439,7 +1559,8 @@ with columns ``(actor_id, action, parent, child, allow, reason)``.
|
||||||
if not actor:
|
if not actor:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return PermissionSQL(
|
return PluginSQL(
|
||||||
|
source="permission_grants_table",
|
||||||
sql="""
|
sql="""
|
||||||
SELECT
|
SELECT
|
||||||
parent,
|
parent,
|
||||||
|
|
@ -1465,7 +1586,7 @@ The resolver will automatically apply the most specific rule.
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
from datasette import hookimpl
|
from datasette import hookimpl
|
||||||
from datasette.permissions import PermissionSQL
|
from datasette.utils.permissions import PluginSQL
|
||||||
|
|
||||||
|
|
||||||
TRUSTED = {"alice", "bob"}
|
TRUSTED = {"alice", "bob"}
|
||||||
|
|
@ -1479,14 +1600,17 @@ The resolver will automatically apply the most specific rule.
|
||||||
actor_id = (actor or {}).get("id")
|
actor_id = (actor or {}).get("id")
|
||||||
|
|
||||||
if actor_id not in TRUSTED:
|
if actor_id not in TRUSTED:
|
||||||
return PermissionSQL(
|
return PluginSQL(
|
||||||
|
source="view_table_root_deny",
|
||||||
sql="""
|
sql="""
|
||||||
SELECT NULL AS parent, NULL AS child, 0 AS allow,
|
SELECT NULL AS parent, NULL AS child, 0 AS allow,
|
||||||
'default deny view-table' AS reason
|
'default deny view-table' AS reason
|
||||||
""",
|
""",
|
||||||
|
params={},
|
||||||
)
|
)
|
||||||
|
|
||||||
return PermissionSQL(
|
return PluginSQL(
|
||||||
|
source="trusted_allow",
|
||||||
sql="""
|
sql="""
|
||||||
SELECT NULL AS parent, NULL AS child, 0 AS allow,
|
SELECT NULL AS parent, NULL AS child, 0 AS allow,
|
||||||
'default deny view-table' AS reason
|
'default deny view-table' AS reason
|
||||||
|
|
@ -1915,16 +2039,16 @@ This example adds a new database action for creating a table, if the user has th
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
from datasette import hookimpl
|
from datasette import hookimpl
|
||||||
from datasette.resources import DatabaseResource
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
def database_actions(datasette, actor, database):
|
def database_actions(datasette, actor, database):
|
||||||
async def inner():
|
async def inner():
|
||||||
if not await datasette.allowed(
|
if not await datasette.permission_allowed(
|
||||||
actor,
|
actor,
|
||||||
"edit-schema",
|
"edit-schema",
|
||||||
resource=DatabaseResource("database"),
|
resource=database,
|
||||||
|
default=False,
|
||||||
):
|
):
|
||||||
return []
|
return []
|
||||||
return [
|
return [
|
||||||
|
|
|
||||||
|
|
@ -69,7 +69,7 @@ default_allow_sql
|
||||||
|
|
||||||
Should users be able to execute arbitrary SQL queries by default?
|
Should users be able to execute arbitrary SQL queries by default?
|
||||||
|
|
||||||
Setting this to ``off`` causes permission checks for :ref:`actions_execute_sql` to fail by default.
|
Setting this to ``off`` causes permission checks for :ref:`permissions_execute_sql` to fail by default.
|
||||||
|
|
||||||
::
|
::
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ Datasette treats SQLite database files as read-only and immutable. This means it
|
||||||
|
|
||||||
The easiest way to execute custom SQL against Datasette is through the web UI. The database index page includes a SQL editor that lets you run any SELECT query you like. You can also construct queries using the filter interface on the tables page, then click "View and edit SQL" to open that query in the custom SQL editor.
|
The easiest way to execute custom SQL against Datasette is through the web UI. The database index page includes a SQL editor that lets you run any SELECT query you like. You can also construct queries using the filter interface on the tables page, then click "View and edit SQL" to open that query in the custom SQL editor.
|
||||||
|
|
||||||
Note that this interface is only available if the :ref:`actions_execute_sql` permission is allowed. See :ref:`authentication_permissions_execute_sql`.
|
Note that this interface is only available if the :ref:`permissions_execute_sql` permission is allowed. See :ref:`authentication_permissions_execute_sql`.
|
||||||
|
|
||||||
Any Datasette SQL query is reflected in the URL of the page, allowing you to bookmark them, share them with others and navigate through previous queries using your browser back button.
|
Any Datasette SQL query is reflected in the URL of the page, allowing you to bookmark them, share them with others and navigate through previous queries using your browser back button.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -283,12 +283,13 @@ Here's a test for that plugin that mocks the HTTPX outbound request:
|
||||||
Registering a plugin for the duration of a test
|
Registering a plugin for the duration of a test
|
||||||
-----------------------------------------------
|
-----------------------------------------------
|
||||||
|
|
||||||
When writing tests for plugins you may find it useful to register a test plugin just for the duration of a single test. You can do this using ``datasette.pm.register()`` and ``datasette.pm.unregister()`` like this:
|
When writing tests for plugins you may find it useful to register a test plugin just for the duration of a single test. You can do this using ``pm.register()`` and ``pm.unregister()`` like this:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
from datasette import hookimpl
|
from datasette import hookimpl
|
||||||
from datasette.app import Datasette
|
from datasette.app import Datasette
|
||||||
|
from datasette.plugins import pm
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -304,14 +305,14 @@ When writing tests for plugins you may find it useful to register a test plugin
|
||||||
(r"^/error$", lambda: 1 / 0),
|
(r"^/error$", lambda: 1 / 0),
|
||||||
]
|
]
|
||||||
|
|
||||||
datasette = Datasette()
|
pm.register(TestPlugin(), name="undo")
|
||||||
try:
|
try:
|
||||||
# The test implementation goes here
|
# The test implementation goes here
|
||||||
datasette.pm.register(TestPlugin(), name="undo")
|
datasette = Datasette()
|
||||||
response = await datasette.client.get("/error")
|
response = await datasette.client.get("/error")
|
||||||
assert response.status_code == 500
|
assert response.status_code == 500
|
||||||
finally:
|
finally:
|
||||||
datasette.pm.unregister(name="undo")
|
pm.unregister(name="undo")
|
||||||
|
|
||||||
To reuse the same temporary plugin in multiple tests, you can register it inside a fixture in your ``conftest.py`` file like this:
|
To reuse the same temporary plugin in multiple tests, you can register it inside a fixture in your ``conftest.py`` file like this:
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,289 +0,0 @@
|
||||||
---
|
|
||||||
orphan: true
|
|
||||||
---
|
|
||||||
|
|
||||||
(upgrade_guide_v1_a20)=
|
|
||||||
# Datasette 1.0a20 plugin upgrade guide
|
|
||||||
|
|
||||||
Datasette 1.0a20 makes some breaking changes to Datasette's permission system. Plugins need to be updated if they use **any of the following**:
|
|
||||||
|
|
||||||
- The `register_permissions()` plugin hook - this should be replaced with `register_actions`
|
|
||||||
- The `permission_allowed()` plugin hook - this should be upgraded to use `permission_resources_sql()`.
|
|
||||||
- The `datasette.permission_allowed()` internal method - this should be replaced with `datasette.allowed()`
|
|
||||||
- Logic that grants access to the `"root"` actor can be removed.
|
|
||||||
|
|
||||||
## Permissions are now actions
|
|
||||||
|
|
||||||
The `register_permissions()` hook shoud be replaced with `register_actions()`.
|
|
||||||
|
|
||||||
Old code:
|
|
||||||
|
|
||||||
```python
|
|
||||||
@hookimpl
|
|
||||||
def register_permissions(datasette):
|
|
||||||
return [
|
|
||||||
Permission(
|
|
||||||
name="explain-sql",
|
|
||||||
abbr=None,
|
|
||||||
description="Can explain SQL queries",
|
|
||||||
takes_database=True,
|
|
||||||
takes_resource=False,
|
|
||||||
default=False,
|
|
||||||
),
|
|
||||||
Permission(
|
|
||||||
name="annotate-rows",
|
|
||||||
abbr=None,
|
|
||||||
description="Can annotate rows",
|
|
||||||
takes_database=True,
|
|
||||||
takes_resource=True,
|
|
||||||
default=False,
|
|
||||||
),
|
|
||||||
Permission(
|
|
||||||
name="view-debug-info",
|
|
||||||
abbr=None,
|
|
||||||
description="Can view debug information",
|
|
||||||
takes_database=False,
|
|
||||||
takes_resource=False,
|
|
||||||
default=False,
|
|
||||||
),
|
|
||||||
]
|
|
||||||
```
|
|
||||||
The new `Action` does not have a `default=` parameter.
|
|
||||||
|
|
||||||
Here's the equivalent new code:
|
|
||||||
|
|
||||||
```python
|
|
||||||
from datasette import hookimpl
|
|
||||||
from datasette.permissions import Action
|
|
||||||
from datasette.resources import DatabaseResource, TableResource
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def register_actions(datasette):
|
|
||||||
return [
|
|
||||||
Action(
|
|
||||||
name="explain-sql",
|
|
||||||
description="Explain SQL queries",
|
|
||||||
resource_class=DatabaseResource,
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="annotate-rows",
|
|
||||||
description="Annotate rows",
|
|
||||||
resource_class=TableResource,
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="view-debug-info",
|
|
||||||
description="View debug information",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
```
|
|
||||||
The `abbr=` is now optional and defaults to `None`.
|
|
||||||
|
|
||||||
For actions that apply to specific resources (like databases or tables), specify the `resource_class` instead of `takes_parent` and `takes_child`. Note that `view-debug-info` does not specify a `resource_class` because it applies globally.
|
|
||||||
|
|
||||||
## permission_allowed() hook is replaced by permission_resources_sql()
|
|
||||||
|
|
||||||
The following old code:
|
|
||||||
```python
|
|
||||||
@hookimpl
|
|
||||||
def permission_allowed(action):
|
|
||||||
if action == "permissions-debug":
|
|
||||||
return True
|
|
||||||
```
|
|
||||||
Can be replaced by:
|
|
||||||
```python
|
|
||||||
from datasette.permissions import PermissionSQL
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def permission_resources_sql(action):
|
|
||||||
return PermissionSQL.allow(reason="datasette-allow-permissions-debug")
|
|
||||||
```
|
|
||||||
A `.deny(reason="")` class method is also available.
|
|
||||||
|
|
||||||
For more complex permission checks consult the documentation for that plugin hook:
|
|
||||||
<https://docs.datasette.io/en/latest/plugin_hooks.html#permission-resources-sql-datasette-actor-action>
|
|
||||||
|
|
||||||
## Using datasette.allowed() to check permissions instead of datasette.permission_allowed()
|
|
||||||
|
|
||||||
The internal method `datasette.permission_allowed()` has been replaced by `datasette.allowed()`.
|
|
||||||
|
|
||||||
The old method looked like this:
|
|
||||||
```python
|
|
||||||
can_debug = await datasette.permission_allowed(
|
|
||||||
request.actor,
|
|
||||||
"view-debug-info",
|
|
||||||
)
|
|
||||||
can_explain_sql = await datasette.permission_allowed(
|
|
||||||
request.actor,
|
|
||||||
"explain-sql",
|
|
||||||
resource="database_name",
|
|
||||||
)
|
|
||||||
can_annotate_rows = await datasette.permission_allowed(
|
|
||||||
request.actor,
|
|
||||||
"annotate-rows",
|
|
||||||
resource=(database_name, table_name),
|
|
||||||
)
|
|
||||||
```
|
|
||||||
Note the confusing design here where `resource` could be either a string or a tuple depending on the permission being checked.
|
|
||||||
|
|
||||||
The new keyword-only design makes this a lot more clear:
|
|
||||||
```python
|
|
||||||
from datasette.resources import DatabaseResource, TableResource
|
|
||||||
can_debug = await datasette.allowed(
|
|
||||||
actor=request.actor,
|
|
||||||
action="view-debug-info",
|
|
||||||
)
|
|
||||||
can_explain_sql = await datasette.allowed(
|
|
||||||
actor=request.actor,
|
|
||||||
action="explain-sql",
|
|
||||||
resource=DatabaseResource(database_name),
|
|
||||||
)
|
|
||||||
can_annotate_rows = await datasette.allowed(
|
|
||||||
actor=request.actor,
|
|
||||||
action="annotate-rows",
|
|
||||||
resource=TableResource(database_name, table_name),
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Root user checks are no longer necessary
|
|
||||||
|
|
||||||
Some plugins would introduce their own custom permission and then ensure the `"root"` actor had access to it using a pattern like this:
|
|
||||||
|
|
||||||
```python
|
|
||||||
@hookimpl
|
|
||||||
def register_permissions(datasette):
|
|
||||||
return [
|
|
||||||
Permission(
|
|
||||||
name="upload-dbs",
|
|
||||||
abbr=None,
|
|
||||||
description="Upload SQLite database files",
|
|
||||||
takes_database=False,
|
|
||||||
takes_resource=False,
|
|
||||||
default=False,
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def permission_allowed(actor, action):
|
|
||||||
if action == "upload-dbs" and actor and actor.get("id") == "root":
|
|
||||||
return True
|
|
||||||
```
|
|
||||||
This is no longer necessary in Datasette 1.0a20 - the `"root"` actor automatically has all permissions when Datasette is started with the `datasette --root` option.
|
|
||||||
|
|
||||||
The `permission_allowed()` hook in this example can be entirely removed.
|
|
||||||
|
|
||||||
### Root-enabled instances during testing
|
|
||||||
|
|
||||||
When writing tests that exercise root-only functionality, make sure to set `datasette.root_enabled = True` on the `Datasette` instance. Root permissions are only granted automatically when Datasette is started with `datasette --root` or when the flag is enabled directly in tests.
|
|
||||||
|
|
||||||
## Target the new APIs exclusively
|
|
||||||
|
|
||||||
Datasette 1.0a20’s permission system is substantially different from previous releases. Attempting to keep plugin code compatible with both the old `permission_allowed()` and the new `allowed()` interfaces leads to brittle workarounds. Prefer to adopt the 1.0a20 APIs (`register_actions`, `permission_resources_sql()`, and `datasette.allowed()`) outright and drop legacy fallbacks.
|
|
||||||
|
|
||||||
## Fixing async with httpx.AsyncClient(app=app)
|
|
||||||
|
|
||||||
Some older plugins may use the following pattern in their tests, which is no longer supported:
|
|
||||||
```python
|
|
||||||
app = Datasette([], memory=True).app()
|
|
||||||
async with httpx.AsyncClient(app=app) as client:
|
|
||||||
response = await client.get("http://localhost/path")
|
|
||||||
```
|
|
||||||
The new pattern is to use `ds.client` like this:
|
|
||||||
```python
|
|
||||||
ds = Datasette([], memory=True)
|
|
||||||
response = await ds.client.get("/path")
|
|
||||||
```
|
|
||||||
|
|
||||||
## Migrating from metadata= to config=
|
|
||||||
|
|
||||||
Datasette 1.0 separates metadata (titles, descriptions, licenses) from configuration (settings, plugins, queries, permissions). Plugin tests and code need to be updated accordingly.
|
|
||||||
|
|
||||||
### Update test constructors
|
|
||||||
|
|
||||||
Old code:
|
|
||||||
```python
|
|
||||||
ds = Datasette(
|
|
||||||
memory=True,
|
|
||||||
metadata={
|
|
||||||
"databases": {
|
|
||||||
"_memory": {"queries": {"my_query": {"sql": "select 1", "title": "My Query"}}}
|
|
||||||
},
|
|
||||||
"plugins": {
|
|
||||||
"my-plugin": {"setting": "value"}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
New code:
|
|
||||||
```python
|
|
||||||
ds = Datasette(
|
|
||||||
memory=True,
|
|
||||||
config={
|
|
||||||
"databases": {
|
|
||||||
"_memory": {"queries": {"my_query": {"sql": "select 1", "title": "My Query"}}}
|
|
||||||
},
|
|
||||||
"plugins": {
|
|
||||||
"my-plugin": {"setting": "value"}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Update datasette.metadata() calls
|
|
||||||
|
|
||||||
The `datasette.metadata()` method has been removed. Use these methods instead:
|
|
||||||
|
|
||||||
Old code:
|
|
||||||
```python
|
|
||||||
try:
|
|
||||||
title = datasette.metadata(database=database)["queries"][query_name]["title"]
|
|
||||||
except (KeyError, TypeError):
|
|
||||||
pass
|
|
||||||
```
|
|
||||||
|
|
||||||
New code:
|
|
||||||
```python
|
|
||||||
try:
|
|
||||||
query_info = await datasette.get_canned_query(database, query_name, request.actor)
|
|
||||||
if query_info and "title" in query_info:
|
|
||||||
title = query_info["title"]
|
|
||||||
except (KeyError, TypeError):
|
|
||||||
pass
|
|
||||||
```
|
|
||||||
|
|
||||||
### Update render functions to async
|
|
||||||
|
|
||||||
If your plugin's render function needs to call `datasette.get_canned_query()` or other async Datasette methods, it must be declared as async:
|
|
||||||
|
|
||||||
Old code:
|
|
||||||
```python
|
|
||||||
def render_atom(datasette, request, sql, columns, rows, database, table, query_name, view_name, data):
|
|
||||||
# ...
|
|
||||||
if query_name:
|
|
||||||
title = datasette.metadata(database=database)["queries"][query_name]["title"]
|
|
||||||
```
|
|
||||||
|
|
||||||
New code:
|
|
||||||
```python
|
|
||||||
async def render_atom(datasette, request, sql, columns, rows, database, table, query_name, view_name, data):
|
|
||||||
# ...
|
|
||||||
if query_name:
|
|
||||||
query_info = await datasette.get_canned_query(database, query_name, request.actor)
|
|
||||||
if query_info and "title" in query_info:
|
|
||||||
title = query_info["title"]
|
|
||||||
```
|
|
||||||
|
|
||||||
### Update query URLs in tests
|
|
||||||
|
|
||||||
Datasette now redirects `?sql=` parameters from database pages to the query view:
|
|
||||||
|
|
||||||
Old code:
|
|
||||||
```python
|
|
||||||
response = await ds.client.get("/_memory.atom?sql=select+1")
|
|
||||||
```
|
|
||||||
|
|
||||||
New code:
|
|
||||||
```python
|
|
||||||
response = await ds.client.get("/_memory/-/query.atom?sql=select+1")
|
|
||||||
```
|
|
||||||
|
|
@ -1,116 +0,0 @@
|
||||||
(upgrade_guide)=
|
|
||||||
# Upgrade guide
|
|
||||||
|
|
||||||
(upgrade_guide_v1)=
|
|
||||||
## Datasette 0.X -> 1.0
|
|
||||||
|
|
||||||
This section reviews breaking changes Datasette ``1.0`` has when upgrading from a ``0.XX`` version. For new features that ``1.0`` offers, see the {ref}`changelog`.
|
|
||||||
|
|
||||||
(upgrade_guide_v1_sql_queries)=
|
|
||||||
### New URL for SQL queries
|
|
||||||
|
|
||||||
Prior to ``1.0a14`` the URL for executing a SQL query looked like this:
|
|
||||||
|
|
||||||
```text
|
|
||||||
/databasename?sql=select+1
|
|
||||||
# Or for JSON:
|
|
||||||
/databasename.json?sql=select+1
|
|
||||||
```
|
|
||||||
|
|
||||||
This endpoint served two purposes: without a ``?sql=`` it would list the tables in the database, but with that option it would return results of a query instead.
|
|
||||||
|
|
||||||
The URL for executing a SQL query now looks like this:
|
|
||||||
|
|
||||||
```text
|
|
||||||
/databasename/-/query?sql=select+1
|
|
||||||
# Or for JSON:
|
|
||||||
/databasename/-/query.json?sql=select+1
|
|
||||||
```
|
|
||||||
|
|
||||||
**This isn't a breaking change.** API calls to the older ``/databasename?sql=...`` endpoint will redirect to the new ``databasename/-/query?sql=...`` endpoint. Upgrading to the new URL is recommended to avoid the overhead of the additional redirect.
|
|
||||||
|
|
||||||
(upgrade_guide_v1_metadata)=
|
|
||||||
### Metadata changes
|
|
||||||
|
|
||||||
Metadata was completely revamped for Datasette 1.0. There are a number of related breaking changes, from the ``metadata.yaml`` file to Python APIs, that you'll need to consider when upgrading.
|
|
||||||
|
|
||||||
(upgrade_guide_v1_metadata_split)=
|
|
||||||
#### ``metadata.yaml`` split into ``datasette.yaml``
|
|
||||||
|
|
||||||
Before Datasette 1.0, the ``metadata.yaml`` file became a kitchen sink if a mix of metadata, configuration, and settings. Now ``metadata.yaml`` is strictly for metadata (ex title and descriptions of database and tables, licensing info, etc). Other settings have been moved to a ``datasette.yml`` configuration file, described in {ref}`configuration`.
|
|
||||||
|
|
||||||
To start Datasette with both metadata and configuration files, run it like this:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
datasette --metadata metadata.yaml --config datasette.yaml
|
|
||||||
# Or the shortened version:
|
|
||||||
datasette -m metadata.yml -c datasette.yml
|
|
||||||
```
|
|
||||||
|
|
||||||
(upgrade_guide_v1_metadata_upgrade)=
|
|
||||||
#### Upgrading an existing ``metadata.yaml`` file
|
|
||||||
|
|
||||||
The [datasette-upgrade plugin](https://github.com/datasette/datasette-upgrade) can be used to split a Datasette 0.x.x ``metadata.yaml`` (or ``.json``) file into separate ``metadata.yaml`` and ``datasette.yaml`` files. First, install the plugin:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
datasette install datasette-upgrade
|
|
||||||
```
|
|
||||||
|
|
||||||
Then run it like this to produce the two new files:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
datasette upgrade metadata-to-config metadata.json -m metadata.yml -c datasette.yml
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Metadata "fallback" has been removed
|
|
||||||
|
|
||||||
Certain keys in metadata like ``license`` used to "fallback" up the chain of ownership.
|
|
||||||
For example, if you set an ``MIT`` to a database and a table within that database did not have a specified license, then that table would inherit an ``MIT`` license.
|
|
||||||
|
|
||||||
This behavior has been removed in Datasette 1.0. Now license fields must be placed on all items, including individual databases and tables.
|
|
||||||
|
|
||||||
(upgrade_guide_v1_metadata_removed)=
|
|
||||||
#### The ``get_metadata()`` plugin hook has been removed
|
|
||||||
|
|
||||||
In Datasette ``0.x`` plugins could implement a ``get_metadata()`` plugin hook to customize how metadata was retrieved for different instances, databases and tables.
|
|
||||||
|
|
||||||
This hook could be inefficient, since some pages might load metadata for many different items (to list a large number of tables, for example) which could result in a large number of calls to potentially expensive plugin hook implementations.
|
|
||||||
|
|
||||||
As of Datasette ``1.0a14`` (2024-08-05), the ``get_metadata()`` hook has been deprecated:
|
|
||||||
|
|
||||||
```python
|
|
||||||
# ❌ DEPRECATED in Datasette 1.0
|
|
||||||
@hookimpl
|
|
||||||
def get_metadata(datasette, key, database, table):
|
|
||||||
pass
|
|
||||||
```
|
|
||||||
|
|
||||||
Instead, plugins are encouraged to interact directly with Datasette's in-memory metadata tables in SQLite using the following methods on the {ref}`internals_datasette`:
|
|
||||||
|
|
||||||
- {ref}`get_instance_metadata() <datasette_get_instance_metadata>` and {ref}`set_instance_metadata() <datasette_set_instance_metadata>`
|
|
||||||
- {ref}`get_database_metadata() <datasette_get_database_metadata>` and {ref}`set_database_metadata() <datasette_set_database_metadata>`
|
|
||||||
- {ref}`get_resource_metadata() <datasette_get_resource_metadata>` and {ref}`set_resource_metadata() <datasette_set_resource_metadata>`
|
|
||||||
- {ref}`get_column_metadata() <datasette_get_column_metadata>` and {ref}`set_column_metadata() <datasette_set_column_metadata>`
|
|
||||||
|
|
||||||
A plugin that stores or calculates its own metadata can implement the {ref}`plugin_hook_startup` hook to populate those items on startup, and then call those methods while it is running to persist any new metadata changes.
|
|
||||||
|
|
||||||
(upgrade_guide_v1_metadata_json_removed)=
|
|
||||||
#### The ``/metadata.json`` endpoint has been removed
|
|
||||||
|
|
||||||
As of Datasette ``1.0a14``, the root level ``/metadata.json`` endpoint has been removed. Metadata for tables will become available through currently in-development extras in a future alpha.
|
|
||||||
|
|
||||||
(upgrade_guide_v1_metadata_method_removed)=
|
|
||||||
#### The ``metadata()`` method on the Datasette class has been removed
|
|
||||||
|
|
||||||
As of Datasette ``1.0a14``, the ``.metadata()`` method on the Datasette Python API has been removed.
|
|
||||||
|
|
||||||
Instead, one should use the following methods on a Datasette class:
|
|
||||||
|
|
||||||
- {ref}`get_instance_metadata() <datasette_get_instance_metadata>`
|
|
||||||
- {ref}`get_database_metadata() <datasette_get_database_metadata>`
|
|
||||||
- {ref}`get_resource_metadata() <datasette_get_resource_metadata>`
|
|
||||||
- {ref}`get_column_metadata() <datasette_get_column_metadata>`
|
|
||||||
|
|
||||||
```{include} upgrade-1.0a20.md
|
|
||||||
:heading-offset: 1
|
|
||||||
```
|
|
||||||
130
docs/upgrade_guide.rst
Normal file
130
docs/upgrade_guide.rst
Normal file
|
|
@ -0,0 +1,130 @@
|
||||||
|
.. _upgrade_guide:
|
||||||
|
|
||||||
|
===============
|
||||||
|
Upgrade guide
|
||||||
|
===============
|
||||||
|
|
||||||
|
.. _upgrade_guide_v1:
|
||||||
|
|
||||||
|
Datasette 0.X -> 1.0
|
||||||
|
====================
|
||||||
|
|
||||||
|
This section reviews breaking changes Datasette ``1.0`` has when upgrading from a ``0.XX`` version. For new features that ``1.0`` offers, see the :ref:`changelog`.
|
||||||
|
|
||||||
|
.. _upgrade_guide_v1_sql_queries:
|
||||||
|
|
||||||
|
New URL for SQL queries
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
Prior to ``1.0a14`` the URL for executing a SQL query looked like this:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
/databasename?sql=select+1
|
||||||
|
# Or for JSON:
|
||||||
|
/databasename.json?sql=select+1
|
||||||
|
|
||||||
|
This endpoint served two purposes: without a ``?sql=`` it would list the tables in the database, but with that option it would return results of a query instead.
|
||||||
|
|
||||||
|
The URL for executing a SQL query now looks like this::
|
||||||
|
|
||||||
|
/databasename/-/query?sql=select+1
|
||||||
|
# Or for JSON:
|
||||||
|
/databasename/-/query.json?sql=select+1
|
||||||
|
|
||||||
|
**This isn't a breaking change.** API calls to the older ``/databasename?sql=...`` endpoint will redirect to the new ``databasename/-/query?sql=...`` endpoint. Upgrading to the new URL is recommended to avoid the overhead of the additional redirect.
|
||||||
|
|
||||||
|
.. _upgrade_guide_v1_metadata:
|
||||||
|
|
||||||
|
Metadata changes
|
||||||
|
----------------
|
||||||
|
|
||||||
|
Metadata was completely revamped for Datasette 1.0. There are a number of related breaking changes, from the ``metadata.yaml`` file to Python APIs, that you'll need to consider when upgrading.
|
||||||
|
|
||||||
|
.. _upgrade_guide_v1_metadata_split:
|
||||||
|
|
||||||
|
``metadata.yaml`` split into ``datasette.yaml``
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Before Datasette 1.0, the ``metadata.yaml`` file became a kitchen sink if a mix of metadata, configuration, and settings. Now ``metadata.yaml`` is strictly for metaata (ex title and descriptions of database and tables, licensing info, etc). Other settings have been moved to a ``datasette.yml`` configuration file, described in :ref:`configuration`.
|
||||||
|
|
||||||
|
To start Datasette with both metadata and configuration files, run it like this:
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
datasette --metadata metadata.yaml --config datasette.yaml
|
||||||
|
# Or the shortened version:
|
||||||
|
datasette -m metadata.yml -c datasette.yml
|
||||||
|
|
||||||
|
.. _upgrade_guide_v1_metadata_upgrade:
|
||||||
|
|
||||||
|
Upgrading an existing ``metadata.yaml`` file
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The `datasette-upgrade plugin <https://github.com/datasette/datasette-upgrade>`__ can be used to split a Datasette 0.x.x ``metadata.yaml`` (or ``.json``) file into separate ``metadata.yaml`` and ``datasette.yaml`` files. First, install the plugin:
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
datasette install datasette-upgrade
|
||||||
|
|
||||||
|
Then run it like this to produce the two new files:
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
datasette upgrade metadata-to-config metadata.json -m metadata.yml -c datasette.yml
|
||||||
|
|
||||||
|
Metadata "fallback" has been removed
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Certain keys in metadata like ``license`` used to "fallback" up the chain of ownership.
|
||||||
|
For example, if you set an ``MIT`` to a database and a table within that database did not have a specified license, then that table would inherit an ``MIT`` license.
|
||||||
|
|
||||||
|
This behavior has been removed in Datasette 1.0. Now license fields must be placed on all items, including individual databases and tables.
|
||||||
|
|
||||||
|
.. _upgrade_guide_v1_metadata_removed:
|
||||||
|
|
||||||
|
The ``get_metadata()`` plugin hook has been removed
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
In Datasette ``0.x`` plugins could implement a ``get_metadata()`` plugin hook to customize how metadata was retrieved for different instances, databases and tables.
|
||||||
|
|
||||||
|
This hook could be inefficient, since some pages might load metadata for many different items (to list a large number of tables, for example) which could result in a large number of calls to potentially expensive plugin hook implementations.
|
||||||
|
|
||||||
|
As of Datasette ``1.0a14`` (2024-08-05), the ``get_metadata()`` hook has been deprecated:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# ❌ DEPRECATED in Datasette 1.0
|
||||||
|
@hookimpl
|
||||||
|
def get_metadata(datasette, key, database, table):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Instead, plugins are encouraged to interact directly with Datasette's in-memory metadata tables in SQLite using the following methods on the :ref:`internals_datasette`:
|
||||||
|
|
||||||
|
- :ref:`get_instance_metadata() <datasette_get_instance_metadata>` and :ref:`set_instance_metadata() <datasette_set_instance_metadata>`
|
||||||
|
- :ref:`get_database_metadata() <datasette_get_database_metadata>` and :ref:`set_database_metadata() <datasette_set_database_metadata>`
|
||||||
|
- :ref:`get_resource_metadata() <datasette_get_resource_metadata>` and :ref:`set_resource_metadata() <datasette_set_resource_metadata>`
|
||||||
|
- :ref:`get_column_metadata() <datasette_get_column_metadata>` and :ref:`set_column_metadata() <datasette_set_column_metadata>`
|
||||||
|
|
||||||
|
A plugin that stores or calculates its own metadata can implement the :ref:`plugin_hook_startup` hook to populate those items on startup, and then call those methods while it is running to persist any new metadata changes.
|
||||||
|
|
||||||
|
.. _upgrade_guide_v1_metadata_json_removed:
|
||||||
|
|
||||||
|
The ``/metadata.json`` endpoint has been removed
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
As of Datasette ``1.0a14``, the root level ``/metadata.json`` endpoint has been removed. Metadata for tables will become available through currently in-development extras in a future alpha.
|
||||||
|
|
||||||
|
.. _upgrade_guide_v1_metadata_method_removed:
|
||||||
|
|
||||||
|
The ``metadata()`` method on the Datasette class has been removed
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
As of Datasette ``1.0a14``, the ``.metadata()`` method on the Datasette Python API has been removed.
|
||||||
|
|
||||||
|
Instead, one should use the following methods on a Datasette class:
|
||||||
|
|
||||||
|
- :ref:`get_instance_metadata() <datasette_get_instance_metadata>`
|
||||||
|
- :ref:`get_database_metadata() <datasette_get_database_metadata>`
|
||||||
|
- :ref:`get_resource_metadata() <datasette_get_resource_metadata>`
|
||||||
|
- :ref:`get_column_metadata() <datasette_get_column_metadata>`
|
||||||
|
|
@ -28,7 +28,7 @@ dependencies = [
|
||||||
"click-default-group>=1.2.3",
|
"click-default-group>=1.2.3",
|
||||||
"Jinja2>=2.10.3",
|
"Jinja2>=2.10.3",
|
||||||
"hupper>=1.9",
|
"hupper>=1.9",
|
||||||
"httpx>=0.20,<1.0",
|
"httpx>=0.20",
|
||||||
"pluggy>=1.0",
|
"pluggy>=1.0",
|
||||||
"uvicorn>=0.11",
|
"uvicorn>=0.11",
|
||||||
"aiofiles>=0.4",
|
"aiofiles>=0.4",
|
||||||
|
|
@ -64,16 +64,14 @@ docs = [
|
||||||
"blacken-docs",
|
"blacken-docs",
|
||||||
"sphinx-copybutton",
|
"sphinx-copybutton",
|
||||||
"sphinx-inline-tabs",
|
"sphinx-inline-tabs",
|
||||||
"myst-parser",
|
|
||||||
"sphinx-markdown-builder",
|
|
||||||
"ruamel.yaml",
|
"ruamel.yaml",
|
||||||
]
|
]
|
||||||
test = [
|
test = [
|
||||||
"pytest>=9",
|
"pytest>=5.2.2",
|
||||||
"pytest-xdist>=2.2.1",
|
"pytest-xdist>=2.2.1",
|
||||||
"pytest-asyncio>=1.2.0",
|
"pytest-asyncio>=1.2.0",
|
||||||
"beautifulsoup4>=4.8.1",
|
"beautifulsoup4>=4.8.1",
|
||||||
"black==25.11.0",
|
"black==25.9.0",
|
||||||
"blacken-docs==1.20.0",
|
"blacken-docs==1.20.0",
|
||||||
"pytest-timeout>=1.4.2",
|
"pytest-timeout>=1.4.2",
|
||||||
"trustme>=0.7",
|
"trustme>=0.7",
|
||||||
|
|
@ -93,6 +91,3 @@ datasette = ["templates/*.html"]
|
||||||
|
|
||||||
[tool.setuptools.dynamic]
|
[tool.setuptools.dynamic]
|
||||||
version = {attr = "datasette.version.__version__"}
|
version = {attr = "datasette.version.__version__"}
|
||||||
|
|
||||||
[tool.uv]
|
|
||||||
package = true
|
|
||||||
|
|
|
||||||
|
|
@ -23,10 +23,6 @@ UNDOCUMENTED_PERMISSIONS = {
|
||||||
"this_is_allowed_async",
|
"this_is_allowed_async",
|
||||||
"this_is_denied_async",
|
"this_is_denied_async",
|
||||||
"no_match",
|
"no_match",
|
||||||
# Test actions from test_hook_register_actions_with_custom_resources
|
|
||||||
"manage_documents",
|
|
||||||
"view_document_collection",
|
|
||||||
"view_document",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
_ds_client = None
|
_ds_client = None
|
||||||
|
|
@ -62,7 +58,6 @@ async def ds_client():
|
||||||
"default_page_size": 50,
|
"default_page_size": 50,
|
||||||
"max_returned_rows": 100,
|
"max_returned_rows": 100,
|
||||||
"sql_time_limit_ms": 200,
|
"sql_time_limit_ms": 200,
|
||||||
"facet_suggest_time_limit_ms": 200, # Up from 50 default
|
|
||||||
# Default is 3 but this results in "too many open files"
|
# Default is 3 but this results in "too many open files"
|
||||||
# errors when running the full test suite:
|
# errors when running the full test suite:
|
||||||
"num_sql_threads": 1,
|
"num_sql_threads": 1,
|
||||||
|
|
@ -143,14 +138,14 @@ def restore_working_directory(tmpdir, request):
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session", autouse=True)
|
@pytest.fixture(scope="session", autouse=True)
|
||||||
def check_actions_are_documented():
|
def check_permission_actions_are_documented():
|
||||||
from datasette.plugins import pm
|
from datasette.plugins import pm
|
||||||
|
|
||||||
content = (
|
content = (
|
||||||
pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst"
|
pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst"
|
||||||
).read_text()
|
).read_text()
|
||||||
permissions_re = re.compile(r"\.\. _actions_([^\s:]+):")
|
permissions_re = re.compile(r"\.\. _permissions_([^\s:]+):")
|
||||||
documented_actions = set(permissions_re.findall(content)).union(
|
documented_permission_actions = set(permissions_re.findall(content)).union(
|
||||||
UNDOCUMENTED_PERMISSIONS
|
UNDOCUMENTED_PERMISSIONS
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -165,7 +160,7 @@ def check_actions_are_documented():
|
||||||
)
|
)
|
||||||
action = kwargs.get("action").replace("-", "_")
|
action = kwargs.get("action").replace("-", "_")
|
||||||
assert (
|
assert (
|
||||||
action in documented_actions
|
action in documented_permission_actions
|
||||||
), "Undocumented permission action: {}".format(action)
|
), "Undocumented permission action: {}".format(action)
|
||||||
|
|
||||||
pm.add_hookcall_monitoring(
|
pm.add_hookcall_monitoring(
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ from datasette import hookimpl
|
||||||
from datasette.facets import Facet
|
from datasette.facets import Facet
|
||||||
from datasette import tracer
|
from datasette import tracer
|
||||||
from datasette.permissions import Action
|
from datasette.permissions import Action
|
||||||
from datasette.resources import DatabaseResource
|
from datasette.resources import DatabaseResource, InstanceResource
|
||||||
from datasette.utils import path_with_added_args
|
from datasette.utils import path_with_added_args
|
||||||
from datasette.utils.asgi import asgi_send_json, Response
|
from datasette.utils.asgi import asgi_send_json, Response
|
||||||
import base64
|
import base64
|
||||||
|
|
@ -461,90 +461,61 @@ def register_actions(datasette):
|
||||||
name="action-from-plugin",
|
name="action-from-plugin",
|
||||||
abbr="ap",
|
abbr="ap",
|
||||||
description="New action added by a plugin",
|
description="New action added by a plugin",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=False,
|
||||||
resource_class=DatabaseResource,
|
resource_class=DatabaseResource,
|
||||||
),
|
),
|
||||||
Action(
|
Action(
|
||||||
name="view-collection",
|
name="view-collection",
|
||||||
abbr="vc",
|
abbr="vc",
|
||||||
description="View a collection",
|
description="View a collection",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=False,
|
||||||
resource_class=DatabaseResource,
|
resource_class=DatabaseResource,
|
||||||
),
|
),
|
||||||
# Test actions for test_hook_custom_allowed (global actions - no resource_class)
|
|
||||||
Action(
|
|
||||||
name="this_is_allowed",
|
|
||||||
abbr=None,
|
|
||||||
description=None,
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="this_is_denied",
|
|
||||||
abbr=None,
|
|
||||||
description=None,
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="this_is_allowed_async",
|
|
||||||
abbr=None,
|
|
||||||
description=None,
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="this_is_denied_async",
|
|
||||||
abbr=None,
|
|
||||||
description=None,
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Support old-style config for backwards compatibility
|
# Support old-style config for backwards compatibility
|
||||||
if extras_old:
|
if extras_old:
|
||||||
for p in extras_old["permissions"]:
|
for p in extras_old["permissions"]:
|
||||||
# Map old takes_database/takes_resource to new global/resource_class
|
# Map old takes_database/takes_resource to new takes_parent/takes_child
|
||||||
if p.get("takes_database"):
|
actions.append(
|
||||||
# Has database -> DatabaseResource
|
Action(
|
||||||
actions.append(
|
name=p["name"],
|
||||||
Action(
|
abbr=p["abbr"],
|
||||||
name=p["name"],
|
description=p["description"],
|
||||||
abbr=p["abbr"],
|
takes_parent=p.get("takes_database", False),
|
||||||
description=p["description"],
|
takes_child=p.get("takes_resource", False),
|
||||||
resource_class=DatabaseResource,
|
resource_class=(
|
||||||
)
|
DatabaseResource
|
||||||
)
|
if p.get("takes_database")
|
||||||
else:
|
else InstanceResource
|
||||||
# No database -> global action (no resource_class)
|
),
|
||||||
actions.append(
|
|
||||||
Action(
|
|
||||||
name=p["name"],
|
|
||||||
abbr=p["abbr"],
|
|
||||||
description=p["description"],
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# Support new-style config
|
# Support new-style config
|
||||||
if extras_new:
|
if extras_new:
|
||||||
for a in extras_new["actions"]:
|
for a in extras_new["actions"]:
|
||||||
# Check if this is a global action (no resource_class specified)
|
# Map string resource_class to actual class
|
||||||
if not a.get("resource_class"):
|
resource_class_map = {
|
||||||
actions.append(
|
"InstanceResource": InstanceResource,
|
||||||
Action(
|
"DatabaseResource": DatabaseResource,
|
||||||
name=a["name"],
|
}
|
||||||
abbr=a["abbr"],
|
resource_class = resource_class_map.get(
|
||||||
description=a["description"],
|
a.get("resource_class", "InstanceResource"), InstanceResource
|
||||||
)
|
)
|
||||||
)
|
|
||||||
else:
|
|
||||||
# Map string resource_class to actual class
|
|
||||||
resource_class_map = {
|
|
||||||
"DatabaseResource": DatabaseResource,
|
|
||||||
}
|
|
||||||
resource_class = resource_class_map.get(
|
|
||||||
a.get("resource_class", "DatabaseResource"), DatabaseResource
|
|
||||||
)
|
|
||||||
|
|
||||||
actions.append(
|
actions.append(
|
||||||
Action(
|
Action(
|
||||||
name=a["name"],
|
name=a["name"],
|
||||||
abbr=a["abbr"],
|
abbr=a["abbr"],
|
||||||
description=a["description"],
|
description=a["description"],
|
||||||
resource_class=resource_class,
|
takes_parent=a.get("takes_parent", False),
|
||||||
)
|
takes_child=a.get("takes_child", False),
|
||||||
|
resource_class=resource_class,
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return actions
|
return actions
|
||||||
|
|
||||||
|
|
@ -553,29 +524,32 @@ def register_actions(datasette):
|
||||||
def permission_resources_sql(datasette, actor, action):
|
def permission_resources_sql(datasette, actor, action):
|
||||||
from datasette.permissions import PermissionSQL
|
from datasette.permissions import PermissionSQL
|
||||||
|
|
||||||
# Handle test actions used in test_hook_custom_allowed
|
# Handle test actions used in test_hook_permission_allowed
|
||||||
if action == "this_is_allowed":
|
if action == "this_is_allowed":
|
||||||
return PermissionSQL.allow(reason="test plugin allows this_is_allowed")
|
sql = "SELECT NULL AS parent, NULL AS child, 1 AS allow, 'test plugin allows this_is_allowed' AS reason"
|
||||||
|
return PermissionSQL(source="my_plugin", sql=sql, params={})
|
||||||
elif action == "this_is_denied":
|
elif action == "this_is_denied":
|
||||||
return PermissionSQL.deny(reason="test plugin denies this_is_denied")
|
sql = "SELECT NULL AS parent, NULL AS child, 0 AS allow, 'test plugin denies this_is_denied' AS reason"
|
||||||
|
return PermissionSQL(source="my_plugin", sql=sql, params={})
|
||||||
elif action == "this_is_allowed_async":
|
elif action == "this_is_allowed_async":
|
||||||
return PermissionSQL.allow(reason="test plugin allows this_is_allowed_async")
|
sql = "SELECT NULL AS parent, NULL AS child, 1 AS allow, 'test plugin allows this_is_allowed_async' AS reason"
|
||||||
|
return PermissionSQL(source="my_plugin", sql=sql, params={})
|
||||||
elif action == "this_is_denied_async":
|
elif action == "this_is_denied_async":
|
||||||
return PermissionSQL.deny(reason="test plugin denies this_is_denied_async")
|
sql = "SELECT NULL AS parent, NULL AS child, 0 AS allow, 'test plugin denies this_is_denied_async' AS reason"
|
||||||
|
return PermissionSQL(source="my_plugin", sql=sql, params={})
|
||||||
elif action == "view-database-download":
|
elif action == "view-database-download":
|
||||||
# Return rule based on actor's can_download permission
|
# Return rule based on actor's can_download permission
|
||||||
if actor and actor.get("can_download"):
|
if actor and actor.get("can_download"):
|
||||||
return PermissionSQL.allow(reason="actor has can_download")
|
sql = "SELECT NULL AS parent, NULL AS child, 1 AS allow, 'actor has can_download' AS reason"
|
||||||
else:
|
else:
|
||||||
return None # No opinion
|
return None # No opinion
|
||||||
|
return PermissionSQL(source="my_plugin", sql=sql, params={})
|
||||||
elif action == "view-database":
|
elif action == "view-database":
|
||||||
# Also grant view-database if actor has can_download (needed for download to work)
|
# Also grant view-database if actor has can_download (needed for download to work)
|
||||||
if actor and actor.get("can_download"):
|
if actor and actor.get("can_download"):
|
||||||
return PermissionSQL.allow(
|
sql = "SELECT NULL AS parent, NULL AS child, 1 AS allow, 'actor has can_download, grants view-database' AS reason"
|
||||||
reason="actor has can_download, grants view-database"
|
return PermissionSQL(source="my_plugin", sql=sql, params={})
|
||||||
)
|
return None
|
||||||
else:
|
|
||||||
return None
|
|
||||||
elif action in (
|
elif action in (
|
||||||
"insert-row",
|
"insert-row",
|
||||||
"create-table",
|
"create-table",
|
||||||
|
|
@ -586,6 +560,7 @@ def permission_resources_sql(datasette, actor, action):
|
||||||
# Special permissions for latest.datasette.io demos
|
# Special permissions for latest.datasette.io demos
|
||||||
actor_id = actor.get("id") if actor else None
|
actor_id = actor.get("id") if actor else None
|
||||||
if actor_id == "todomvc":
|
if actor_id == "todomvc":
|
||||||
return PermissionSQL.allow(reason=f"todomvc actor allowed for {action}")
|
sql = f"SELECT NULL AS parent, NULL AS child, 1 AS allow, 'todomvc actor allowed for {action}' AS reason"
|
||||||
|
return PermissionSQL(source="my_plugin", sql=sql, params={})
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
|
||||||
|
|
@ -2,15 +2,16 @@
|
||||||
Tests for the new Resource-based permission system.
|
Tests for the new Resource-based permission system.
|
||||||
|
|
||||||
These tests verify:
|
These tests verify:
|
||||||
1. The new Datasette.allowed_resources() method (with pagination)
|
1. The new Datasette.allowed_resources() method
|
||||||
2. The new Datasette.allowed() method
|
2. The new Datasette.allowed() method
|
||||||
3. The include_reasons parameter for debugging
|
3. The new Datasette.allowed_resources_with_reasons() method
|
||||||
4. That SQL does the heavy lifting (no Python filtering)
|
4. That SQL does the heavy lifting (no Python filtering)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_asyncio
|
import pytest_asyncio
|
||||||
from datasette.app import Datasette
|
from datasette.app import Datasette
|
||||||
|
from datasette.plugins import pm
|
||||||
from datasette.permissions import PermissionSQL
|
from datasette.permissions import PermissionSQL
|
||||||
from datasette.resources import TableResource
|
from datasette.resources import TableResource
|
||||||
from datasette import hookimpl
|
from datasette import hookimpl
|
||||||
|
|
@ -62,16 +63,15 @@ async def test_allowed_resources_global_allow(test_ds):
|
||||||
def rules_callback(datasette, actor, action):
|
def rules_callback(datasette, actor, action):
|
||||||
if actor and actor.get("id") == "alice":
|
if actor and actor.get("id") == "alice":
|
||||||
sql = "SELECT NULL AS parent, NULL AS child, 1 AS allow, 'global: alice has access' AS reason"
|
sql = "SELECT NULL AS parent, NULL AS child, 1 AS allow, 'global: alice has access' AS reason"
|
||||||
return PermissionSQL(sql=sql)
|
return PermissionSQL(source="test", sql=sql, params={})
|
||||||
return None
|
return None
|
||||||
|
|
||||||
plugin = PermissionRulesPlugin(rules_callback)
|
plugin = PermissionRulesPlugin(rules_callback)
|
||||||
test_ds.pm.register(plugin, name="test_plugin")
|
pm.register(plugin, name="test_plugin")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Use the new allowed_resources() method
|
# Use the new allowed_resources() method
|
||||||
result = await test_ds.allowed_resources("view-table", {"id": "alice"})
|
tables = await test_ds.allowed_resources("view-table", {"id": "alice"})
|
||||||
tables = result.resources
|
|
||||||
|
|
||||||
# Alice should see all tables
|
# Alice should see all tables
|
||||||
assert len(tables) == 5
|
assert len(tables) == 5
|
||||||
|
|
@ -86,7 +86,7 @@ async def test_allowed_resources_global_allow(test_ds):
|
||||||
assert ("production", "orders") in table_set
|
assert ("production", "orders") in table_set
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
test_ds.pm.unregister(plugin, name="test_plugin")
|
pm.unregister(plugin, name="test_plugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -101,11 +101,11 @@ async def test_allowed_specific_resource(test_ds):
|
||||||
UNION ALL
|
UNION ALL
|
||||||
SELECT 'analytics' AS parent, NULL AS child, 1 AS allow, 'analyst access' AS reason
|
SELECT 'analytics' AS parent, NULL AS child, 1 AS allow, 'analyst access' AS reason
|
||||||
"""
|
"""
|
||||||
return PermissionSQL(sql=sql)
|
return PermissionSQL(source="test", sql=sql, params={})
|
||||||
return None
|
return None
|
||||||
|
|
||||||
plugin = PermissionRulesPlugin(rules_callback)
|
plugin = PermissionRulesPlugin(rules_callback)
|
||||||
test_ds.pm.register(plugin, name="test_plugin")
|
pm.register(plugin, name="test_plugin")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
actor = {"id": "bob", "role": "analyst"}
|
actor = {"id": "bob", "role": "analyst"}
|
||||||
|
|
@ -129,11 +129,13 @@ async def test_allowed_specific_resource(test_ds):
|
||||||
)
|
)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
test_ds.pm.unregister(plugin, name="test_plugin")
|
pm.unregister(plugin, name="test_plugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_allowed_resources_include_reasons(test_ds):
|
async def test_allowed_resources_with_reasons(test_ds):
|
||||||
|
"""Test allowed_resources_with_reasons() exposes debugging info"""
|
||||||
|
|
||||||
def rules_callback(datasette, actor, action):
|
def rules_callback(datasette, actor, action):
|
||||||
if actor and actor.get("role") == "analyst":
|
if actor and actor.get("role") == "analyst":
|
||||||
sql = """
|
sql = """
|
||||||
|
|
@ -143,33 +145,32 @@ async def test_allowed_resources_include_reasons(test_ds):
|
||||||
SELECT 'analytics' AS parent, 'sensitive' AS child, 0 AS allow,
|
SELECT 'analytics' AS parent, 'sensitive' AS child, 0 AS allow,
|
||||||
'child: sensitive data denied' AS reason
|
'child: sensitive data denied' AS reason
|
||||||
"""
|
"""
|
||||||
return PermissionSQL(sql=sql)
|
return PermissionSQL(source="test", sql=sql, params={})
|
||||||
return None
|
return None
|
||||||
|
|
||||||
plugin = PermissionRulesPlugin(rules_callback)
|
plugin = PermissionRulesPlugin(rules_callback)
|
||||||
test_ds.pm.register(plugin, name="test_plugin")
|
pm.register(plugin, name="test_plugin")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Use allowed_resources with include_reasons to get debugging info
|
# Use allowed_resources_with_reasons to get debugging info
|
||||||
result = await test_ds.allowed_resources(
|
allowed = await test_ds.allowed_resources_with_reasons(
|
||||||
"view-table", {"id": "bob", "role": "analyst"}, include_reasons=True
|
"view-table", {"id": "bob", "role": "analyst"}
|
||||||
)
|
)
|
||||||
allowed = result.resources
|
|
||||||
|
|
||||||
# Should get analytics tables except sensitive
|
# Should get analytics tables except sensitive
|
||||||
assert len(allowed) >= 2 # At least users and events
|
assert len(allowed) >= 2 # At least users and events
|
||||||
|
|
||||||
# Check we can access both resource and reason
|
# Check we can access both resource and reason
|
||||||
for resource in allowed:
|
for item in allowed:
|
||||||
assert isinstance(resource, TableResource)
|
assert isinstance(item.resource, TableResource)
|
||||||
assert isinstance(resource.reasons, list)
|
assert isinstance(item.reason, list)
|
||||||
if resource.parent == "analytics":
|
if item.resource.parent == "analytics":
|
||||||
# Should mention parent-level reason in at least one of the reasons
|
# Should mention parent-level reason in at least one of the reasons
|
||||||
reasons_text = " ".join(resource.reasons).lower()
|
reasons_text = " ".join(item.reason).lower()
|
||||||
assert "analyst access" in reasons_text
|
assert "analyst access" in reasons_text
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
test_ds.pm.unregister(plugin, name="test_plugin")
|
pm.unregister(plugin, name="test_plugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -185,16 +186,15 @@ async def test_child_deny_overrides_parent_allow(test_ds):
|
||||||
SELECT 'analytics' AS parent, 'sensitive' AS child, 0 AS allow,
|
SELECT 'analytics' AS parent, 'sensitive' AS child, 0 AS allow,
|
||||||
'child: deny sensitive' AS reason
|
'child: deny sensitive' AS reason
|
||||||
"""
|
"""
|
||||||
return PermissionSQL(sql=sql)
|
return PermissionSQL(source="test", sql=sql, params={})
|
||||||
return None
|
return None
|
||||||
|
|
||||||
plugin = PermissionRulesPlugin(rules_callback)
|
plugin = PermissionRulesPlugin(rules_callback)
|
||||||
test_ds.pm.register(plugin, name="test_plugin")
|
pm.register(plugin, name="test_plugin")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
actor = {"id": "bob", "role": "analyst"}
|
actor = {"id": "bob", "role": "analyst"}
|
||||||
result = await test_ds.allowed_resources("view-table", actor)
|
tables = await test_ds.allowed_resources("view-table", actor)
|
||||||
tables = result.resources
|
|
||||||
|
|
||||||
# Should see analytics tables except sensitive
|
# Should see analytics tables except sensitive
|
||||||
analytics_tables = [t for t in tables if t.parent == "analytics"]
|
analytics_tables = [t for t in tables if t.parent == "analytics"]
|
||||||
|
|
@ -218,7 +218,7 @@ async def test_child_deny_overrides_parent_allow(test_ds):
|
||||||
)
|
)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
test_ds.pm.unregister(plugin, name="test_plugin")
|
pm.unregister(plugin, name="test_plugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -234,16 +234,15 @@ async def test_child_allow_overrides_parent_deny(test_ds):
|
||||||
SELECT 'production' AS parent, 'orders' AS child, 1 AS allow,
|
SELECT 'production' AS parent, 'orders' AS child, 1 AS allow,
|
||||||
'child: carol can see orders' AS reason
|
'child: carol can see orders' AS reason
|
||||||
"""
|
"""
|
||||||
return PermissionSQL(sql=sql)
|
return PermissionSQL(source="test", sql=sql, params={})
|
||||||
return None
|
return None
|
||||||
|
|
||||||
plugin = PermissionRulesPlugin(rules_callback)
|
plugin = PermissionRulesPlugin(rules_callback)
|
||||||
test_ds.pm.register(plugin, name="test_plugin")
|
pm.register(plugin, name="test_plugin")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
actor = {"id": "carol"}
|
actor = {"id": "carol"}
|
||||||
result = await test_ds.allowed_resources("view-table", actor)
|
tables = await test_ds.allowed_resources("view-table", actor)
|
||||||
tables = result.resources
|
|
||||||
|
|
||||||
# Should only see production.orders
|
# Should only see production.orders
|
||||||
production_tables = [t for t in tables if t.parent == "production"]
|
production_tables = [t for t in tables if t.parent == "production"]
|
||||||
|
|
@ -263,7 +262,7 @@ async def test_child_allow_overrides_parent_deny(test_ds):
|
||||||
)
|
)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
test_ds.pm.unregister(plugin, name="test_plugin")
|
pm.unregister(plugin, name="test_plugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -284,10 +283,10 @@ async def test_sql_does_filtering_not_python(test_ds):
|
||||||
SELECT 'analytics' AS parent, 'users' AS child, 1 AS allow,
|
SELECT 'analytics' AS parent, 'users' AS child, 1 AS allow,
|
||||||
'specific allow' AS reason
|
'specific allow' AS reason
|
||||||
"""
|
"""
|
||||||
return PermissionSQL(sql=sql)
|
return PermissionSQL(source="test", sql=sql, params={})
|
||||||
|
|
||||||
plugin = PermissionRulesPlugin(rules_callback)
|
plugin = PermissionRulesPlugin(rules_callback)
|
||||||
test_ds.pm.register(plugin, name="test_plugin")
|
pm.register(plugin, name="test_plugin")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
actor = {"id": "dave"}
|
actor = {"id": "dave"}
|
||||||
|
|
@ -306,11 +305,66 @@ async def test_sql_does_filtering_not_python(test_ds):
|
||||||
)
|
)
|
||||||
|
|
||||||
# allowed_resources() should also use SQL filtering
|
# allowed_resources() should also use SQL filtering
|
||||||
result = await test_ds.allowed_resources("view-table", actor)
|
tables = await test_ds.allowed_resources("view-table", actor)
|
||||||
tables = result.resources
|
|
||||||
assert len(tables) == 1
|
assert len(tables) == 1
|
||||||
assert tables[0].parent == "analytics"
|
assert tables[0].parent == "analytics"
|
||||||
assert tables[0].child == "users"
|
assert tables[0].child == "users"
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
test_ds.pm.unregister(plugin, name="test_plugin")
|
pm.unregister(plugin, name="test_plugin")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_no_permission_rules_returns_correct_schema():
|
||||||
|
"""
|
||||||
|
Test that when no permission rules exist, the empty result has correct schema.
|
||||||
|
|
||||||
|
This is a regression test for a bug where the empty result returned only
|
||||||
|
2 columns (parent, child) instead of the documented 3 columns
|
||||||
|
(parent, child, reason), causing schema mismatches.
|
||||||
|
|
||||||
|
See: https://github.com/simonw/datasette/pull/2515#discussion_r2457803901
|
||||||
|
"""
|
||||||
|
from datasette.utils.actions_sql import build_allowed_resources_sql
|
||||||
|
|
||||||
|
# Create a fresh datasette instance
|
||||||
|
ds = Datasette()
|
||||||
|
await ds.invoke_startup()
|
||||||
|
|
||||||
|
# Add a test database
|
||||||
|
db = ds.add_memory_database("testdb")
|
||||||
|
await db.execute_write(
|
||||||
|
"CREATE TABLE IF NOT EXISTS test_table (id INTEGER PRIMARY KEY)"
|
||||||
|
)
|
||||||
|
await ds._refresh_schemas()
|
||||||
|
|
||||||
|
# Temporarily block all permission_resources_sql hooks to simulate no rules
|
||||||
|
original_hook = pm.hook.permission_resources_sql
|
||||||
|
|
||||||
|
def empty_hook(*args, **kwargs):
|
||||||
|
return []
|
||||||
|
|
||||||
|
pm.hook.permission_resources_sql = empty_hook
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Call build_allowed_resources_sql directly which will hit the no-rules code path
|
||||||
|
sql, params = await build_allowed_resources_sql(
|
||||||
|
ds, actor={"id": "nobody"}, action="view-table"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Execute the query to verify it has correct column structure
|
||||||
|
result = await ds.get_internal_database().execute(sql, params)
|
||||||
|
|
||||||
|
# Should have 3 columns: parent, child, reason
|
||||||
|
# This assertion would fail if the empty result only had 2 columns
|
||||||
|
assert (
|
||||||
|
len(result.columns) == 3
|
||||||
|
), f"Expected 3 columns, got {len(result.columns)}: {result.columns}"
|
||||||
|
assert result.columns == ["parent", "child", "reason"]
|
||||||
|
|
||||||
|
# Should have no rows (no rules = no access)
|
||||||
|
assert len(result.rows) == 0
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Restore original hook
|
||||||
|
pm.hook.permission_resources_sql = original_hook
|
||||||
|
|
|
||||||
|
|
@ -1,133 +0,0 @@
|
||||||
"""
|
|
||||||
Test for actor restrictions bug with database-level config.
|
|
||||||
|
|
||||||
This test currently FAILS, demonstrating the bug where database-level
|
|
||||||
config allow blocks can bypass table-level restrictions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from datasette.app import Datasette
|
|
||||||
from datasette.resources import TableResource
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_table_restrictions_not_bypassed_by_database_level_config():
|
|
||||||
"""
|
|
||||||
Actor restrictions should act as hard limits that config cannot override.
|
|
||||||
|
|
||||||
BUG: When an actor has table-level restrictions (e.g., only table2 and table3)
|
|
||||||
but config has a database-level allow block, the database-level config rule
|
|
||||||
currently allows ALL tables, not just those in the restriction allowlist.
|
|
||||||
|
|
||||||
This test documents the expected behavior and will FAIL until the bug is fixed.
|
|
||||||
"""
|
|
||||||
# Config grants access at DATABASE level (not table level)
|
|
||||||
config = {
|
|
||||||
"databases": {
|
|
||||||
"test_db_rnbbdlc": {
|
|
||||||
"allow": {
|
|
||||||
"id": "user"
|
|
||||||
} # Database-level allow - grants access to all tables
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ds = Datasette(config=config)
|
|
||||||
await ds.invoke_startup()
|
|
||||||
db = ds.add_memory_database("test_db_rnbbdlc")
|
|
||||||
await db.execute_write("create table table1 (id integer primary key)")
|
|
||||||
await db.execute_write("create table table2 (id integer primary key)")
|
|
||||||
await db.execute_write("create table table3 (id integer primary key)")
|
|
||||||
await db.execute_write("create table table4 (id integer primary key)")
|
|
||||||
|
|
||||||
# Actor restricted to ONLY table2 and table3
|
|
||||||
# Even though config allows the whole database, restrictions should limit access
|
|
||||||
actor = {
|
|
||||||
"id": "user",
|
|
||||||
"_r": {
|
|
||||||
"r": { # Resource-level (table-level) restrictions
|
|
||||||
"test_db_rnbbdlc": {
|
|
||||||
"table2": ["vt"], # vt = view-table abbreviation
|
|
||||||
"table3": ["vt"],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
# table2 should be allowed (in restriction allowlist AND config allows)
|
|
||||||
result = await ds.allowed(
|
|
||||||
action="view-table",
|
|
||||||
resource=TableResource("test_db_rnbbdlc", "table2"),
|
|
||||||
actor=actor,
|
|
||||||
)
|
|
||||||
assert result is True, "table2 should be allowed - in restriction allowlist"
|
|
||||||
|
|
||||||
# table3 should be allowed (in restriction allowlist AND config allows)
|
|
||||||
result = await ds.allowed(
|
|
||||||
action="view-table",
|
|
||||||
resource=TableResource("test_db_rnbbdlc", "table3"),
|
|
||||||
actor=actor,
|
|
||||||
)
|
|
||||||
assert result is True, "table3 should be allowed - in restriction allowlist"
|
|
||||||
|
|
||||||
# table1 should be DENIED (NOT in restriction allowlist)
|
|
||||||
# Even though database-level config allows it, restrictions should deny it
|
|
||||||
result = await ds.allowed(
|
|
||||||
action="view-table",
|
|
||||||
resource=TableResource("test_db_rnbbdlc", "table1"),
|
|
||||||
actor=actor,
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
result is False
|
|
||||||
), "table1 should be DENIED - not in restriction allowlist, config cannot override"
|
|
||||||
|
|
||||||
# table4 should be DENIED (NOT in restriction allowlist)
|
|
||||||
# Even though database-level config allows it, restrictions should deny it
|
|
||||||
result = await ds.allowed(
|
|
||||||
action="view-table",
|
|
||||||
resource=TableResource("test_db_rnbbdlc", "table4"),
|
|
||||||
actor=actor,
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
result is False
|
|
||||||
), "table4 should be DENIED - not in restriction allowlist, config cannot override"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_database_restrictions_with_database_level_config():
|
|
||||||
"""
|
|
||||||
Verify that database-level restrictions work correctly with database-level config.
|
|
||||||
|
|
||||||
This should pass - it's testing the case where restriction granularity
|
|
||||||
matches config granularity.
|
|
||||||
"""
|
|
||||||
config = {
|
|
||||||
"databases": {"test_db_rwdl": {"allow": {"id": "user"}}} # Database-level allow
|
|
||||||
}
|
|
||||||
|
|
||||||
ds = Datasette(config=config)
|
|
||||||
await ds.invoke_startup()
|
|
||||||
db = ds.add_memory_database("test_db_rwdl")
|
|
||||||
await db.execute_write("create table table1 (id integer primary key)")
|
|
||||||
await db.execute_write("create table table2 (id integer primary key)")
|
|
||||||
|
|
||||||
# Actor has database-level restriction (all tables in test_db_rwdl)
|
|
||||||
actor = {
|
|
||||||
"id": "user",
|
|
||||||
"_r": {"d": {"test_db_rwdl": ["vt"]}}, # Database-level restrictions
|
|
||||||
}
|
|
||||||
|
|
||||||
# Both tables should be allowed (database-level restriction matches database-level config)
|
|
||||||
result = await ds.allowed(
|
|
||||||
action="view-table",
|
|
||||||
resource=TableResource("test_db_rwdl", "table1"),
|
|
||||||
actor=actor,
|
|
||||||
)
|
|
||||||
assert result is True, "table1 should be allowed"
|
|
||||||
|
|
||||||
result = await ds.allowed(
|
|
||||||
action="view-table",
|
|
||||||
resource=TableResource("test_db_rwdl", "table2"),
|
|
||||||
actor=actor,
|
|
||||||
)
|
|
||||||
assert result is True, "table2 should be allowed"
|
|
||||||
|
|
@ -8,6 +8,7 @@ based on permission rules from plugins and configuration.
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_asyncio
|
import pytest_asyncio
|
||||||
from datasette.app import Datasette
|
from datasette.app import Datasette
|
||||||
|
from datasette.plugins import pm
|
||||||
from datasette.permissions import PermissionSQL
|
from datasette.permissions import PermissionSQL
|
||||||
from datasette import hookimpl
|
from datasette import hookimpl
|
||||||
|
|
||||||
|
|
@ -57,15 +58,15 @@ async def test_tables_endpoint_global_access(test_ds):
|
||||||
def rules_callback(datasette, actor, action):
|
def rules_callback(datasette, actor, action):
|
||||||
if actor and actor.get("id") == "alice":
|
if actor and actor.get("id") == "alice":
|
||||||
sql = "SELECT NULL AS parent, NULL AS child, 1 AS allow, 'global: alice has access' AS reason"
|
sql = "SELECT NULL AS parent, NULL AS child, 1 AS allow, 'global: alice has access' AS reason"
|
||||||
return PermissionSQL(sql=sql)
|
return PermissionSQL(source="test", sql=sql, params={})
|
||||||
return None
|
return None
|
||||||
|
|
||||||
plugin = PermissionRulesPlugin(rules_callback)
|
plugin = PermissionRulesPlugin(rules_callback)
|
||||||
test_ds.pm.register(plugin, name="test_plugin")
|
pm.register(plugin, name="test_plugin")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Use the allowed_resources API directly
|
# Use the allowed_resources API directly
|
||||||
page = await test_ds.allowed_resources("view-table", {"id": "alice"})
|
tables = await test_ds.allowed_resources("view-table", {"id": "alice"})
|
||||||
|
|
||||||
# Convert to the format the endpoint returns
|
# Convert to the format the endpoint returns
|
||||||
result = [
|
result = [
|
||||||
|
|
@ -73,7 +74,7 @@ async def test_tables_endpoint_global_access(test_ds):
|
||||||
"name": f"{t.parent}/{t.child}",
|
"name": f"{t.parent}/{t.child}",
|
||||||
"url": test_ds.urls.table(t.parent, t.child),
|
"url": test_ds.urls.table(t.parent, t.child),
|
||||||
}
|
}
|
||||||
for t in page.resources
|
for t in tables
|
||||||
]
|
]
|
||||||
|
|
||||||
# Alice should see all tables
|
# Alice should see all tables
|
||||||
|
|
@ -86,7 +87,7 @@ async def test_tables_endpoint_global_access(test_ds):
|
||||||
assert "production/orders" in table_names
|
assert "production/orders" in table_names
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
test_ds.pm.unregister(plugin, name="test_plugin")
|
pm.unregister(plugin, name="test_plugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -97,14 +98,14 @@ async def test_tables_endpoint_database_restriction(test_ds):
|
||||||
if actor and actor.get("role") == "analyst":
|
if actor and actor.get("role") == "analyst":
|
||||||
# Allow only analytics database
|
# Allow only analytics database
|
||||||
sql = "SELECT 'analytics' AS parent, NULL AS child, 1 AS allow, 'analyst access' AS reason"
|
sql = "SELECT 'analytics' AS parent, NULL AS child, 1 AS allow, 'analyst access' AS reason"
|
||||||
return PermissionSQL(sql=sql)
|
return PermissionSQL(source="test", sql=sql, params={})
|
||||||
return None
|
return None
|
||||||
|
|
||||||
plugin = PermissionRulesPlugin(rules_callback)
|
plugin = PermissionRulesPlugin(rules_callback)
|
||||||
test_ds.pm.register(plugin, name="test_plugin")
|
pm.register(plugin, name="test_plugin")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
page = await test_ds.allowed_resources(
|
tables = await test_ds.allowed_resources(
|
||||||
"view-table", {"id": "bob", "role": "analyst"}
|
"view-table", {"id": "bob", "role": "analyst"}
|
||||||
)
|
)
|
||||||
result = [
|
result = [
|
||||||
|
|
@ -112,7 +113,7 @@ async def test_tables_endpoint_database_restriction(test_ds):
|
||||||
"name": f"{t.parent}/{t.child}",
|
"name": f"{t.parent}/{t.child}",
|
||||||
"url": test_ds.urls.table(t.parent, t.child),
|
"url": test_ds.urls.table(t.parent, t.child),
|
||||||
}
|
}
|
||||||
for t in page.resources
|
for t in tables
|
||||||
]
|
]
|
||||||
|
|
||||||
# Bob should only see analytics tables
|
# Bob should only see analytics tables
|
||||||
|
|
@ -129,7 +130,7 @@ async def test_tables_endpoint_database_restriction(test_ds):
|
||||||
# Note: default_permissions.py provides default allows, so we just check analytics are present
|
# Note: default_permissions.py provides default allows, so we just check analytics are present
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
test_ds.pm.unregister(plugin, name="test_plugin")
|
pm.unregister(plugin, name="test_plugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -144,20 +145,20 @@ async def test_tables_endpoint_table_exception(test_ds):
|
||||||
UNION ALL
|
UNION ALL
|
||||||
SELECT 'analytics' AS parent, 'users' AS child, 1 AS allow, 'carol exception' AS reason
|
SELECT 'analytics' AS parent, 'users' AS child, 1 AS allow, 'carol exception' AS reason
|
||||||
"""
|
"""
|
||||||
return PermissionSQL(sql=sql)
|
return PermissionSQL(source="test", sql=sql, params={})
|
||||||
return None
|
return None
|
||||||
|
|
||||||
plugin = PermissionRulesPlugin(rules_callback)
|
plugin = PermissionRulesPlugin(rules_callback)
|
||||||
test_ds.pm.register(plugin, name="test_plugin")
|
pm.register(plugin, name="test_plugin")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
page = await test_ds.allowed_resources("view-table", {"id": "carol"})
|
tables = await test_ds.allowed_resources("view-table", {"id": "carol"})
|
||||||
result = [
|
result = [
|
||||||
{
|
{
|
||||||
"name": f"{t.parent}/{t.child}",
|
"name": f"{t.parent}/{t.child}",
|
||||||
"url": test_ds.urls.table(t.parent, t.child),
|
"url": test_ds.urls.table(t.parent, t.child),
|
||||||
}
|
}
|
||||||
for t in page.resources
|
for t in tables
|
||||||
]
|
]
|
||||||
|
|
||||||
# Carol should see analytics.users but not other analytics tables
|
# Carol should see analytics.users but not other analytics tables
|
||||||
|
|
@ -171,7 +172,7 @@ async def test_tables_endpoint_table_exception(test_ds):
|
||||||
assert "analytics/sensitive" not in table_names
|
assert "analytics/sensitive" not in table_names
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
test_ds.pm.unregister(plugin, name="test_plugin")
|
pm.unregister(plugin, name="test_plugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -186,14 +187,14 @@ async def test_tables_endpoint_deny_overrides_allow(test_ds):
|
||||||
UNION ALL
|
UNION ALL
|
||||||
SELECT 'analytics' AS parent, 'sensitive' AS child, 0 AS allow, 'deny sensitive' AS reason
|
SELECT 'analytics' AS parent, 'sensitive' AS child, 0 AS allow, 'deny sensitive' AS reason
|
||||||
"""
|
"""
|
||||||
return PermissionSQL(sql=sql)
|
return PermissionSQL(source="test", sql=sql, params={})
|
||||||
return None
|
return None
|
||||||
|
|
||||||
plugin = PermissionRulesPlugin(rules_callback)
|
plugin = PermissionRulesPlugin(rules_callback)
|
||||||
test_ds.pm.register(plugin, name="test_plugin")
|
pm.register(plugin, name="test_plugin")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
page = await test_ds.allowed_resources(
|
tables = await test_ds.allowed_resources(
|
||||||
"view-table", {"id": "bob", "role": "analyst"}
|
"view-table", {"id": "bob", "role": "analyst"}
|
||||||
)
|
)
|
||||||
result = [
|
result = [
|
||||||
|
|
@ -201,7 +202,7 @@ async def test_tables_endpoint_deny_overrides_allow(test_ds):
|
||||||
"name": f"{t.parent}/{t.child}",
|
"name": f"{t.parent}/{t.child}",
|
||||||
"url": test_ds.urls.table(t.parent, t.child),
|
"url": test_ds.urls.table(t.parent, t.child),
|
||||||
}
|
}
|
||||||
for t in page.resources
|
for t in tables
|
||||||
]
|
]
|
||||||
|
|
||||||
analytics_tables = [m for m in result if m["name"].startswith("analytics/")]
|
analytics_tables = [m for m in result if m["name"].startswith("analytics/")]
|
||||||
|
|
@ -213,7 +214,7 @@ async def test_tables_endpoint_deny_overrides_allow(test_ds):
|
||||||
assert "analytics/sensitive" not in table_names
|
assert "analytics/sensitive" not in table_names
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
test_ds.pm.unregister(plugin, name="test_plugin")
|
pm.unregister(plugin, name="test_plugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -229,10 +230,10 @@ async def test_tables_endpoint_no_permissions():
|
||||||
await ds._refresh_schemas()
|
await ds._refresh_schemas()
|
||||||
|
|
||||||
# Unknown actor with no custom permissions
|
# Unknown actor with no custom permissions
|
||||||
page = await ds.allowed_resources("view-table", {"id": "unknown"})
|
tables = await ds.allowed_resources("view-table", {"id": "unknown"})
|
||||||
result = [
|
result = [
|
||||||
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
||||||
for t in page.resources
|
for t in tables
|
||||||
]
|
]
|
||||||
|
|
||||||
# Should see tables (due to default_permissions.py providing default allow)
|
# Should see tables (due to default_permissions.py providing default allow)
|
||||||
|
|
@ -252,20 +253,20 @@ async def test_tables_endpoint_specific_table_only(test_ds):
|
||||||
UNION ALL
|
UNION ALL
|
||||||
SELECT 'production' AS parent, 'orders' AS child, 1 AS allow, 'specific table 2' AS reason
|
SELECT 'production' AS parent, 'orders' AS child, 1 AS allow, 'specific table 2' AS reason
|
||||||
"""
|
"""
|
||||||
return PermissionSQL(sql=sql)
|
return PermissionSQL(source="test", sql=sql, params={})
|
||||||
return None
|
return None
|
||||||
|
|
||||||
plugin = PermissionRulesPlugin(rules_callback)
|
plugin = PermissionRulesPlugin(rules_callback)
|
||||||
test_ds.pm.register(plugin, name="test_plugin")
|
pm.register(plugin, name="test_plugin")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
page = await test_ds.allowed_resources("view-table", {"id": "dave"})
|
tables = await test_ds.allowed_resources("view-table", {"id": "dave"})
|
||||||
result = [
|
result = [
|
||||||
{
|
{
|
||||||
"name": f"{t.parent}/{t.child}",
|
"name": f"{t.parent}/{t.child}",
|
||||||
"url": test_ds.urls.table(t.parent, t.child),
|
"url": test_ds.urls.table(t.parent, t.child),
|
||||||
}
|
}
|
||||||
for t in page.resources
|
for t in tables
|
||||||
]
|
]
|
||||||
|
|
||||||
# Should see only the two specifically allowed tables
|
# Should see only the two specifically allowed tables
|
||||||
|
|
@ -279,7 +280,7 @@ async def test_tables_endpoint_specific_table_only(test_ds):
|
||||||
assert "production/orders" in table_names
|
assert "production/orders" in table_names
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
test_ds.pm.unregister(plugin, name="test_plugin")
|
pm.unregister(plugin, name="test_plugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -290,27 +291,27 @@ async def test_tables_endpoint_empty_result(test_ds):
|
||||||
if actor and actor.get("id") == "blocked":
|
if actor and actor.get("id") == "blocked":
|
||||||
# Global deny
|
# Global deny
|
||||||
sql = "SELECT NULL AS parent, NULL AS child, 0 AS allow, 'global deny' AS reason"
|
sql = "SELECT NULL AS parent, NULL AS child, 0 AS allow, 'global deny' AS reason"
|
||||||
return PermissionSQL(sql=sql)
|
return PermissionSQL(source="test", sql=sql, params={})
|
||||||
return None
|
return None
|
||||||
|
|
||||||
plugin = PermissionRulesPlugin(rules_callback)
|
plugin = PermissionRulesPlugin(rules_callback)
|
||||||
test_ds.pm.register(plugin, name="test_plugin")
|
pm.register(plugin, name="test_plugin")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
page = await test_ds.allowed_resources("view-table", {"id": "blocked"})
|
tables = await test_ds.allowed_resources("view-table", {"id": "blocked"})
|
||||||
result = [
|
result = [
|
||||||
{
|
{
|
||||||
"name": f"{t.parent}/{t.child}",
|
"name": f"{t.parent}/{t.child}",
|
||||||
"url": test_ds.urls.table(t.parent, t.child),
|
"url": test_ds.urls.table(t.parent, t.child),
|
||||||
}
|
}
|
||||||
for t in page.resources
|
for t in tables
|
||||||
]
|
]
|
||||||
|
|
||||||
# Global deny should block access to all tables
|
# Global deny should block access to all tables
|
||||||
assert len(result) == 0
|
assert len(result) == 0
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
test_ds.pm.unregister(plugin, name="test_plugin")
|
pm.unregister(plugin, name="test_plugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -327,11 +328,11 @@ async def test_tables_endpoint_no_query_returns_all():
|
||||||
await ds._refresh_schemas()
|
await ds._refresh_schemas()
|
||||||
|
|
||||||
# Get all tables without query
|
# Get all tables without query
|
||||||
page = await ds.allowed_resources("view-table", None)
|
all_tables = await ds.allowed_resources("view-table", None)
|
||||||
|
|
||||||
# Should return all tables with truncated: false
|
# Should return all tables with truncated: false
|
||||||
assert len(page.resources) >= 3
|
assert len(all_tables) >= 3
|
||||||
table_names = {f"{t.parent}/{t.child}" for t in page.resources}
|
table_names = {f"{t.parent}/{t.child}" for t in all_tables}
|
||||||
assert "test_db/users" in table_names
|
assert "test_db/users" in table_names
|
||||||
assert "test_db/posts" in table_names
|
assert "test_db/posts" in table_names
|
||||||
assert "test_db/comments" in table_names
|
assert "test_db/comments" in table_names
|
||||||
|
|
@ -349,13 +350,12 @@ async def test_tables_endpoint_truncation():
|
||||||
await db.execute_write(f"CREATE TABLE table_{i:03d} (id INTEGER)")
|
await db.execute_write(f"CREATE TABLE table_{i:03d} (id INTEGER)")
|
||||||
await ds._refresh_schemas()
|
await ds._refresh_schemas()
|
||||||
|
|
||||||
# Get all tables - should be paginated with limit=100 by default
|
# Get all tables - should be truncated
|
||||||
page = await ds.allowed_resources("view-table", None)
|
all_tables = await ds.allowed_resources("view-table", None)
|
||||||
big_db_tables = [t for t in page.resources if t.parent == "big_db"]
|
big_db_tables = [t for t in all_tables if t.parent == "big_db"]
|
||||||
|
|
||||||
# Should have exactly 100 tables in first page (default limit)
|
# Should have exactly 105 tables in the database
|
||||||
assert len(big_db_tables) == 100
|
assert len(big_db_tables) == 105
|
||||||
assert page.next is not None # More results available
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -374,10 +374,10 @@ async def test_tables_endpoint_search_single_term():
|
||||||
await ds._refresh_schemas()
|
await ds._refresh_schemas()
|
||||||
|
|
||||||
# Get all tables in the new format
|
# Get all tables in the new format
|
||||||
page = await ds.allowed_resources("view-table", None)
|
all_tables = await ds.allowed_resources("view-table", None)
|
||||||
matches = [
|
matches = [
|
||||||
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
||||||
for t in page.resources
|
for t in all_tables
|
||||||
]
|
]
|
||||||
|
|
||||||
# Filter for "user" (extract table name from "db/table")
|
# Filter for "user" (extract table name from "db/table")
|
||||||
|
|
@ -411,10 +411,10 @@ async def test_tables_endpoint_search_multiple_terms():
|
||||||
await ds._refresh_schemas()
|
await ds._refresh_schemas()
|
||||||
|
|
||||||
# Get all tables in the new format
|
# Get all tables in the new format
|
||||||
page = await ds.allowed_resources("view-table", None)
|
all_tables = await ds.allowed_resources("view-table", None)
|
||||||
matches = [
|
matches = [
|
||||||
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
||||||
for t in page.resources
|
for t in all_tables
|
||||||
]
|
]
|
||||||
|
|
||||||
# Filter for "user profile" (two terms, extract table name from "db/table")
|
# Filter for "user profile" (two terms, extract table name from "db/table")
|
||||||
|
|
@ -453,10 +453,10 @@ async def test_tables_endpoint_search_ordering():
|
||||||
await ds._refresh_schemas()
|
await ds._refresh_schemas()
|
||||||
|
|
||||||
# Get all tables in the new format
|
# Get all tables in the new format
|
||||||
page = await ds.allowed_resources("view-table", None)
|
all_tables = await ds.allowed_resources("view-table", None)
|
||||||
matches = [
|
matches = [
|
||||||
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
||||||
for t in page.resources
|
for t in all_tables
|
||||||
]
|
]
|
||||||
|
|
||||||
# Filter for "user" and sort by table name length
|
# Filter for "user" and sort by table name length
|
||||||
|
|
@ -490,10 +490,10 @@ async def test_tables_endpoint_search_case_insensitive():
|
||||||
await ds._refresh_schemas()
|
await ds._refresh_schemas()
|
||||||
|
|
||||||
# Get all tables in the new format
|
# Get all tables in the new format
|
||||||
page = await ds.allowed_resources("view-table", None)
|
all_tables = await ds.allowed_resources("view-table", None)
|
||||||
matches = [
|
matches = [
|
||||||
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
||||||
for t in page.resources
|
for t in all_tables
|
||||||
]
|
]
|
||||||
|
|
||||||
# Filter for "user" (lowercase) should match all case variants
|
# Filter for "user" (lowercase) should match all case variants
|
||||||
|
|
@ -525,10 +525,10 @@ async def test_tables_endpoint_search_no_matches():
|
||||||
await ds._refresh_schemas()
|
await ds._refresh_schemas()
|
||||||
|
|
||||||
# Get all tables in the new format
|
# Get all tables in the new format
|
||||||
page = await ds.allowed_resources("view-table", None)
|
all_tables = await ds.allowed_resources("view-table", None)
|
||||||
matches = [
|
matches = [
|
||||||
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
||||||
for t in page.resources
|
for t in all_tables
|
||||||
]
|
]
|
||||||
|
|
||||||
# Filter for "zzz" which doesn't exist
|
# Filter for "zzz" which doesn't exist
|
||||||
|
|
@ -563,10 +563,10 @@ async def test_tables_endpoint_config_database_allow():
|
||||||
await ds._refresh_schemas()
|
await ds._refresh_schemas()
|
||||||
|
|
||||||
# Root user should see restricted_db tables
|
# Root user should see restricted_db tables
|
||||||
root_page = await ds.allowed_resources("view-table", {"id": "root"})
|
root_tables = await ds.allowed_resources("view-table", {"id": "root"})
|
||||||
root_list = [
|
root_list = [
|
||||||
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
||||||
for t in root_page.resources
|
for t in root_tables
|
||||||
]
|
]
|
||||||
restricted_tables_root = [
|
restricted_tables_root = [
|
||||||
m for m in root_list if m["name"].startswith("restricted_db/")
|
m for m in root_list if m["name"].startswith("restricted_db/")
|
||||||
|
|
@ -577,10 +577,10 @@ async def test_tables_endpoint_config_database_allow():
|
||||||
assert "restricted_db/posts" in table_names
|
assert "restricted_db/posts" in table_names
|
||||||
|
|
||||||
# Alice should NOT see restricted_db tables
|
# Alice should NOT see restricted_db tables
|
||||||
alice_page = await ds.allowed_resources("view-table", {"id": "alice"})
|
alice_tables = await ds.allowed_resources("view-table", {"id": "alice"})
|
||||||
alice_list = [
|
alice_list = [
|
||||||
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
{"name": f"{t.parent}/{t.child}", "url": ds.urls.table(t.parent, t.child)}
|
||||||
for t in alice_page.resources
|
for t in alice_tables
|
||||||
]
|
]
|
||||||
restricted_tables_alice = [
|
restricted_tables_alice = [
|
||||||
m for m in alice_list if m["name"].startswith("restricted_db/")
|
m for m in alice_list if m["name"].startswith("restricted_db/")
|
||||||
|
|
|
||||||
|
|
@ -875,7 +875,7 @@ async def test_settings_json(ds_client):
|
||||||
"default_page_size": 50,
|
"default_page_size": 50,
|
||||||
"default_facet_size": 30,
|
"default_facet_size": 30,
|
||||||
"default_allow_sql": True,
|
"default_allow_sql": True,
|
||||||
"facet_suggest_time_limit_ms": 200,
|
"facet_suggest_time_limit_ms": 50,
|
||||||
"facet_time_limit_ms": 200,
|
"facet_time_limit_ms": 200,
|
||||||
"max_returned_rows": 100,
|
"max_returned_rows": 100,
|
||||||
"max_insert_rows": 100,
|
"max_insert_rows": 100,
|
||||||
|
|
|
||||||
|
|
@ -142,12 +142,10 @@ def test_metadata_yaml():
|
||||||
settings=[],
|
settings=[],
|
||||||
secret=None,
|
secret=None,
|
||||||
root=False,
|
root=False,
|
||||||
default_deny=False,
|
|
||||||
token=None,
|
token=None,
|
||||||
actor=None,
|
actor=None,
|
||||||
version_note=None,
|
version_note=None,
|
||||||
get=None,
|
get=None,
|
||||||
headers=False,
|
|
||||||
help_settings=False,
|
help_settings=False,
|
||||||
pdb=False,
|
pdb=False,
|
||||||
crossdb=False,
|
crossdb=False,
|
||||||
|
|
@ -449,6 +447,17 @@ def test_serve_duplicate_database_names(tmpdir):
|
||||||
assert {db["name"] for db in databases} == {"db", "db_2"}
|
assert {db["name"] for db in databases} == {"db", "db_2"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_serve_deduplicate_same_database_path(tmpdir):
|
||||||
|
"'datasette db.db db.db' should only attach one database, /db"
|
||||||
|
runner = CliRunner()
|
||||||
|
db_path = str(tmpdir / "db.db")
|
||||||
|
sqlite3.connect(db_path).execute("vacuum")
|
||||||
|
result = runner.invoke(cli, [db_path, db_path, "--get", "/-/databases.json"])
|
||||||
|
assert result.exit_code == 0, result.output
|
||||||
|
databases = json.loads(result.output)
|
||||||
|
assert {db["name"] for db in databases} == {"db"}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"filename", ["test-database (1).sqlite", "database (1).sqlite"]
|
"filename", ["test-database (1).sqlite", "database (1).sqlite"]
|
||||||
)
|
)
|
||||||
|
|
@ -487,57 +496,3 @@ def test_internal_db(tmpdir):
|
||||||
)
|
)
|
||||||
assert result.exit_code == 0
|
assert result.exit_code == 0
|
||||||
assert internal_path.exists()
|
assert internal_path.exists()
|
||||||
|
|
||||||
|
|
||||||
def test_duplicate_database_files_error(tmpdir):
|
|
||||||
"""Test that passing the same database file multiple times raises an error"""
|
|
||||||
runner = CliRunner()
|
|
||||||
db_path = str(tmpdir / "test.db")
|
|
||||||
sqlite3.connect(db_path).execute("vacuum")
|
|
||||||
|
|
||||||
# Test with exact duplicate
|
|
||||||
result = runner.invoke(cli, ["serve", db_path, db_path, "--get", "/"])
|
|
||||||
assert result.exit_code == 1
|
|
||||||
assert "Duplicate database file" in result.output
|
|
||||||
assert "both refer to" in result.output
|
|
||||||
|
|
||||||
# Test with different paths to same file (relative vs absolute)
|
|
||||||
result2 = runner.invoke(
|
|
||||||
cli, ["serve", db_path, str(pathlib.Path(db_path).resolve()), "--get", "/"]
|
|
||||||
)
|
|
||||||
assert result2.exit_code == 1
|
|
||||||
assert "Duplicate database file" in result2.output
|
|
||||||
|
|
||||||
# Test that a file in the config_dir can't also be passed explicitly
|
|
||||||
config_dir = tmpdir / "config"
|
|
||||||
config_dir.mkdir()
|
|
||||||
config_db_path = str(config_dir / "data.db")
|
|
||||||
sqlite3.connect(config_db_path).execute("vacuum")
|
|
||||||
|
|
||||||
result3 = runner.invoke(
|
|
||||||
cli, ["serve", config_db_path, str(config_dir), "--get", "/"]
|
|
||||||
)
|
|
||||||
assert result3.exit_code == 1
|
|
||||||
assert "Duplicate database file" in result3.output
|
|
||||||
assert "both refer to" in result3.output
|
|
||||||
|
|
||||||
# Test that mixing a file NOT in the directory with a directory works fine
|
|
||||||
other_db_path = str(tmpdir / "other.db")
|
|
||||||
sqlite3.connect(other_db_path).execute("vacuum")
|
|
||||||
|
|
||||||
result4 = runner.invoke(
|
|
||||||
cli, ["serve", other_db_path, str(config_dir), "--get", "/-/databases.json"]
|
|
||||||
)
|
|
||||||
assert result4.exit_code == 0
|
|
||||||
databases = json.loads(result4.output)
|
|
||||||
assert {db["name"] for db in databases} == {"other", "data"}
|
|
||||||
|
|
||||||
# Test that multiple directories raise an error
|
|
||||||
config_dir2 = tmpdir / "config2"
|
|
||||||
config_dir2.mkdir()
|
|
||||||
|
|
||||||
result5 = runner.invoke(
|
|
||||||
cli, ["serve", str(config_dir), str(config_dir2), "--get", "/"]
|
|
||||||
)
|
|
||||||
assert result5.exit_code == 1
|
|
||||||
assert "Cannot pass multiple directories" in result5.output
|
|
||||||
|
|
|
||||||
|
|
@ -52,26 +52,6 @@ def test_serve_with_get(tmp_path_factory):
|
||||||
pm.unregister(to_unregister)
|
pm.unregister(to_unregister)
|
||||||
|
|
||||||
|
|
||||||
def test_serve_with_get_headers():
|
|
||||||
runner = CliRunner()
|
|
||||||
result = runner.invoke(
|
|
||||||
cli,
|
|
||||||
[
|
|
||||||
"serve",
|
|
||||||
"--memory",
|
|
||||||
"--get",
|
|
||||||
"/_memory/",
|
|
||||||
"--headers",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
# exit_code is 1 because it wasn't a 200 response
|
|
||||||
assert result.exit_code == 1, result.output
|
|
||||||
lines = result.output.splitlines()
|
|
||||||
assert lines and lines[0] == "HTTP/1.1 302"
|
|
||||||
assert "location: /_memory" in lines
|
|
||||||
assert "content-type: text/html; charset=utf-8" in lines
|
|
||||||
|
|
||||||
|
|
||||||
def test_serve_with_get_and_token():
|
def test_serve_with_get_and_token():
|
||||||
runner = CliRunner()
|
runner = CliRunner()
|
||||||
result1 = runner.invoke(
|
result1 = runner.invoke(
|
||||||
|
|
|
||||||
|
|
@ -161,3 +161,130 @@ async def test_view_instance_allow_block():
|
||||||
|
|
||||||
assert await ds.allowed(action="view-instance", actor={"id": "alice"})
|
assert await ds.allowed(action="view-instance", actor={"id": "alice"})
|
||||||
assert not await ds.allowed(action="view-instance", actor={"id": "bob"})
|
assert not await ds.allowed(action="view-instance", actor={"id": "bob"})
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_private_mode_denies_all_by_default():
|
||||||
|
"""Test --private flag blocks all access unless explicitly allowed"""
|
||||||
|
ds = Datasette(memory=True, private=True)
|
||||||
|
ds.add_database(Database(ds, memory_name="test_memory"), name="test")
|
||||||
|
await ds.invoke_startup()
|
||||||
|
await ds.refresh_schemas()
|
||||||
|
|
||||||
|
# Unauthenticated access should be denied for all default actions
|
||||||
|
assert not await ds.allowed(action="view-instance", actor=None)
|
||||||
|
assert not await ds.allowed(
|
||||||
|
action="view-database", resource=DatabaseResource(database="test"), actor=None
|
||||||
|
)
|
||||||
|
assert not await ds.allowed(
|
||||||
|
action="view-table",
|
||||||
|
resource=TableResource(database="test", table="test"),
|
||||||
|
actor=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Even authenticated users should be denied in private mode
|
||||||
|
assert not await ds.allowed(action="view-instance", actor={"id": "alice"})
|
||||||
|
assert not await ds.allowed(
|
||||||
|
action="view-database",
|
||||||
|
resource=DatabaseResource(database="test"),
|
||||||
|
actor={"id": "alice"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_private_mode_with_explicit_allow():
|
||||||
|
"""Test --private flag allows explicitly configured permissions"""
|
||||||
|
config = {"permissions": {"view-instance": {"id": "alice"}}}
|
||||||
|
ds = Datasette(memory=True, private=True, config=config)
|
||||||
|
ds.add_database(Database(ds, memory_name="test_memory"), name="test")
|
||||||
|
await ds.invoke_startup()
|
||||||
|
await ds.refresh_schemas()
|
||||||
|
|
||||||
|
# Alice should be allowed due to explicit config
|
||||||
|
assert await ds.allowed(action="view-instance", actor={"id": "alice"})
|
||||||
|
|
||||||
|
# Bob should still be denied
|
||||||
|
assert not await ds.allowed(action="view-instance", actor={"id": "bob"})
|
||||||
|
|
||||||
|
# Unauthenticated should be denied
|
||||||
|
assert not await ds.allowed(action="view-instance", actor=None)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_require_auth_mode_allows_authenticated():
|
||||||
|
"""Test --require-auth flag allows actors with id"""
|
||||||
|
ds = Datasette(memory=True, require_auth=True)
|
||||||
|
ds.add_database(Database(ds, memory_name="test_memory"), name="test")
|
||||||
|
await ds.invoke_startup()
|
||||||
|
await ds.refresh_schemas()
|
||||||
|
|
||||||
|
# Authenticated users should be allowed
|
||||||
|
assert await ds.allowed(action="view-instance", actor={"id": "alice"})
|
||||||
|
assert await ds.allowed(
|
||||||
|
action="view-database",
|
||||||
|
resource=DatabaseResource(database="test"),
|
||||||
|
actor={"id": "bob"},
|
||||||
|
)
|
||||||
|
assert await ds.allowed(
|
||||||
|
action="view-table",
|
||||||
|
resource=TableResource(database="test", table="test"),
|
||||||
|
actor={"id": "charlie"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Unauthenticated access should be denied
|
||||||
|
assert not await ds.allowed(action="view-instance", actor=None)
|
||||||
|
assert not await ds.allowed(
|
||||||
|
action="view-database", resource=DatabaseResource(database="test"), actor=None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Actor without id should be denied
|
||||||
|
assert not await ds.allowed(action="view-instance", actor={"name": "anonymous"})
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_require_auth_mode_with_restrictions():
|
||||||
|
"""Test --require-auth mode works with actor restrictions"""
|
||||||
|
# Test with actor that has restrictions
|
||||||
|
ds = Datasette(memory=True, require_auth=True)
|
||||||
|
ds.add_database(Database(ds, memory_name="test_memory"), name="test")
|
||||||
|
await ds.invoke_startup()
|
||||||
|
await ds.refresh_schemas()
|
||||||
|
|
||||||
|
# Actor with restrictions should have those restrictions applied
|
||||||
|
restricted_actor = {"id": "alice", "_r": {"a": ["view-table"]}}
|
||||||
|
# This actor has restrictions, so default allow won't apply
|
||||||
|
# Instead their restrictions define what they can do
|
||||||
|
assert await ds.allowed(
|
||||||
|
action="view-table",
|
||||||
|
resource=TableResource(database="test", table="test"),
|
||||||
|
actor=restricted_actor,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Regular authenticated actor without restrictions should get default allow
|
||||||
|
normal_actor = {"id": "bob"}
|
||||||
|
assert await ds.allowed(
|
||||||
|
action="view-database",
|
||||||
|
resource=DatabaseResource(database="test"),
|
||||||
|
actor=normal_actor,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_normal_mode_allows_all():
|
||||||
|
"""Test default behavior without --private or --require-auth"""
|
||||||
|
ds = Datasette(memory=True, private=False, require_auth=False)
|
||||||
|
ds.add_database(Database(ds, memory_name="test_memory"), name="test")
|
||||||
|
await ds.invoke_startup()
|
||||||
|
await ds.refresh_schemas()
|
||||||
|
|
||||||
|
# Everyone should be allowed in normal mode
|
||||||
|
assert await ds.allowed(action="view-instance", actor=None)
|
||||||
|
assert await ds.allowed(
|
||||||
|
action="view-database", resource=DatabaseResource(database="test"), actor=None
|
||||||
|
)
|
||||||
|
assert await ds.allowed(action="view-instance", actor={"id": "alice"})
|
||||||
|
assert await ds.allowed(
|
||||||
|
action="view-database",
|
||||||
|
resource=DatabaseResource(database="test"),
|
||||||
|
actor={"id": "bob"},
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -97,10 +97,3 @@ def test_custom_route_pattern_404(custom_pages_client):
|
||||||
assert response.status == 404
|
assert response.status == 404
|
||||||
assert "<h1>Error 404</h1>" in response.text
|
assert "<h1>Error 404</h1>" in response.text
|
||||||
assert ">Oh no</" in response.text
|
assert ">Oh no</" in response.text
|
||||||
|
|
||||||
|
|
||||||
def test_custom_route_pattern_with_slash_slash_302(custom_pages_client):
|
|
||||||
# https://github.com/simonw/datasette/issues/2429
|
|
||||||
response = custom_pages_client.get("//example.com/")
|
|
||||||
assert response.status == 302
|
|
||||||
assert response.headers["location"] == "/example.com"
|
|
||||||
|
|
|
||||||
|
|
@ -1,129 +0,0 @@
|
||||||
import pytest
|
|
||||||
from datasette.app import Datasette
|
|
||||||
from datasette.resources import DatabaseResource, TableResource
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_default_deny_denies_default_permissions():
|
|
||||||
"""Test that default_deny=True denies default permissions"""
|
|
||||||
# Without default_deny, anonymous users can view instance/database/tables
|
|
||||||
ds_normal = Datasette()
|
|
||||||
await ds_normal.invoke_startup()
|
|
||||||
|
|
||||||
# Add a test database
|
|
||||||
db = ds_normal.add_memory_database("test_db_normal")
|
|
||||||
await db.execute_write("create table test_table (id integer primary key)")
|
|
||||||
await ds_normal._refresh_schemas() # Trigger catalog refresh
|
|
||||||
|
|
||||||
# Test default behavior - anonymous user should be able to view
|
|
||||||
response = await ds_normal.client.get("/")
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
response = await ds_normal.client.get("/test_db_normal")
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
response = await ds_normal.client.get("/test_db_normal/test_table")
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
# With default_deny=True, anonymous users should be denied
|
|
||||||
ds_deny = Datasette(default_deny=True)
|
|
||||||
await ds_deny.invoke_startup()
|
|
||||||
|
|
||||||
# Add the same test database
|
|
||||||
db = ds_deny.add_memory_database("test_db_deny")
|
|
||||||
await db.execute_write("create table test_table (id integer primary key)")
|
|
||||||
await ds_deny._refresh_schemas() # Trigger catalog refresh
|
|
||||||
|
|
||||||
# Anonymous user should be denied
|
|
||||||
response = await ds_deny.client.get("/")
|
|
||||||
assert response.status_code == 403
|
|
||||||
|
|
||||||
response = await ds_deny.client.get("/test_db_deny")
|
|
||||||
assert response.status_code == 403
|
|
||||||
|
|
||||||
response = await ds_deny.client.get("/test_db_deny/test_table")
|
|
||||||
assert response.status_code == 403
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_default_deny_with_root_user():
|
|
||||||
"""Test that root user still has access when default_deny=True"""
|
|
||||||
ds = Datasette(default_deny=True)
|
|
||||||
ds.root_enabled = True
|
|
||||||
await ds.invoke_startup()
|
|
||||||
|
|
||||||
root_actor = {"id": "root"}
|
|
||||||
|
|
||||||
# Root user should have all permissions even with default_deny
|
|
||||||
assert await ds.allowed(action="view-instance", actor=root_actor) is True
|
|
||||||
assert (
|
|
||||||
await ds.allowed(
|
|
||||||
action="view-database",
|
|
||||||
actor=root_actor,
|
|
||||||
resource=DatabaseResource("test_db"),
|
|
||||||
)
|
|
||||||
is True
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
await ds.allowed(
|
|
||||||
action="view-table",
|
|
||||||
actor=root_actor,
|
|
||||||
resource=TableResource("test_db", "test_table"),
|
|
||||||
)
|
|
||||||
is True
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
await ds.allowed(
|
|
||||||
action="execute-sql", actor=root_actor, resource=DatabaseResource("test_db")
|
|
||||||
)
|
|
||||||
is True
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_default_deny_with_config_allow():
|
|
||||||
"""Test that config allow rules still work with default_deny=True"""
|
|
||||||
ds = Datasette(default_deny=True, config={"allow": {"id": "user1"}})
|
|
||||||
await ds.invoke_startup()
|
|
||||||
|
|
||||||
# Anonymous user should be denied
|
|
||||||
assert await ds.allowed(action="view-instance", actor=None) is False
|
|
||||||
|
|
||||||
# Authenticated user with explicit permission should have access
|
|
||||||
assert await ds.allowed(action="view-instance", actor={"id": "user1"}) is True
|
|
||||||
|
|
||||||
# Different user should be denied
|
|
||||||
assert await ds.allowed(action="view-instance", actor={"id": "user2"}) is False
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_default_deny_basic_permissions():
|
|
||||||
"""Test that default_deny=True denies basic permissions"""
|
|
||||||
ds = Datasette(default_deny=True)
|
|
||||||
await ds.invoke_startup()
|
|
||||||
|
|
||||||
# Anonymous user should be denied all default permissions
|
|
||||||
assert await ds.allowed(action="view-instance", actor=None) is False
|
|
||||||
assert (
|
|
||||||
await ds.allowed(
|
|
||||||
action="view-database", actor=None, resource=DatabaseResource("test_db")
|
|
||||||
)
|
|
||||||
is False
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
await ds.allowed(
|
|
||||||
action="view-table",
|
|
||||||
actor=None,
|
|
||||||
resource=TableResource("test_db", "test_table"),
|
|
||||||
)
|
|
||||||
is False
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
await ds.allowed(
|
|
||||||
action="execute-sql", actor=None, resource=DatabaseResource("test_db")
|
|
||||||
)
|
|
||||||
is False
|
|
||||||
)
|
|
||||||
|
|
||||||
# Authenticated user without explicit permission should also be denied
|
|
||||||
assert await ds.allowed(action="view-instance", actor={"id": "user"}) is False
|
|
||||||
|
|
@ -28,10 +28,9 @@ def settings_headings():
|
||||||
return get_headings((docs_path / "settings.rst").read_text(), "~")
|
return get_headings((docs_path / "settings.rst").read_text(), "~")
|
||||||
|
|
||||||
|
|
||||||
def test_settings_are_documented(settings_headings, subtests):
|
@pytest.mark.parametrize("setting", app.SETTINGS)
|
||||||
for setting in app.SETTINGS:
|
def test_settings_are_documented(settings_headings, setting):
|
||||||
with subtests.test(setting=setting.name):
|
assert setting.name in settings_headings
|
||||||
assert setting.name in settings_headings
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
|
|
@ -39,21 +38,21 @@ def plugin_hooks_content():
|
||||||
return (docs_path / "plugin_hooks.rst").read_text()
|
return (docs_path / "plugin_hooks.rst").read_text()
|
||||||
|
|
||||||
|
|
||||||
def test_plugin_hooks_are_documented(plugin_hooks_content, subtests):
|
@pytest.mark.parametrize(
|
||||||
|
"plugin", [name for name in dir(app.pm.hook) if not name.startswith("_")]
|
||||||
|
)
|
||||||
|
def test_plugin_hooks_are_documented(plugin, plugin_hooks_content):
|
||||||
headings = set()
|
headings = set()
|
||||||
headings.update(get_headings(plugin_hooks_content, "-"))
|
headings.update(get_headings(plugin_hooks_content, "-"))
|
||||||
headings.update(get_headings(plugin_hooks_content, "~"))
|
headings.update(get_headings(plugin_hooks_content, "~"))
|
||||||
plugins = [name for name in dir(app.pm.hook) if not name.startswith("_")]
|
assert plugin in headings
|
||||||
for plugin in plugins:
|
hook_caller = getattr(app.pm.hook, plugin)
|
||||||
with subtests.test(plugin=plugin):
|
arg_names = [a for a in hook_caller.spec.argnames if a != "__multicall__"]
|
||||||
assert plugin in headings
|
# Check for plugin_name(arg1, arg2, arg3)
|
||||||
hook_caller = getattr(app.pm.hook, plugin)
|
expected = f"{plugin}({', '.join(arg_names)})"
|
||||||
arg_names = [a for a in hook_caller.spec.argnames if a != "__multicall__"]
|
assert (
|
||||||
# Check for plugin_name(arg1, arg2, arg3)
|
expected in plugin_hooks_content
|
||||||
expected = f"{plugin}({', '.join(arg_names)})"
|
), f"Missing from plugin hook documentation: {expected}"
|
||||||
assert (
|
|
||||||
expected in plugin_hooks_content
|
|
||||||
), f"Missing from plugin hook documentation: {expected}"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
|
|
@ -69,11 +68,9 @@ def documented_views():
|
||||||
return view_labels
|
return view_labels
|
||||||
|
|
||||||
|
|
||||||
def test_view_classes_are_documented(documented_views, subtests):
|
@pytest.mark.parametrize("view_class", [v for v in dir(app) if v.endswith("View")])
|
||||||
view_classes = [v for v in dir(app) if v.endswith("View")]
|
def test_view_classes_are_documented(documented_views, view_class):
|
||||||
for view_class in view_classes:
|
assert view_class in documented_views
|
||||||
with subtests.test(view_class=view_class):
|
|
||||||
assert view_class in documented_views
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
|
|
@ -88,10 +85,9 @@ def documented_table_filters():
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_table_filters_are_documented(documented_table_filters, subtests):
|
@pytest.mark.parametrize("filter", [f.key for f in Filters._filters])
|
||||||
for f in Filters._filters:
|
def test_table_filters_are_documented(documented_table_filters, filter):
|
||||||
with subtests.test(filter=f.key):
|
assert filter in documented_table_filters
|
||||||
assert f.key in documented_table_filters
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
|
|
@ -105,10 +101,9 @@ def documented_fns():
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_functions_marked_with_documented_are_documented(documented_fns, subtests):
|
@pytest.mark.parametrize("fn", utils.functions_marked_as_documented)
|
||||||
for fn in utils.functions_marked_as_documented:
|
def test_functions_marked_with_documented_are_documented(documented_fns, fn):
|
||||||
with subtests.test(fn=fn.__name__):
|
assert fn.__name__ in documented_fns
|
||||||
assert fn.__name__ in documented_fns
|
|
||||||
|
|
||||||
|
|
||||||
def test_rst_heading_underlines_match_title_length():
|
def test_rst_heading_underlines_match_title_length():
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@
|
||||||
# -- start datasette_with_plugin_fixture --
|
# -- start datasette_with_plugin_fixture --
|
||||||
from datasette import hookimpl
|
from datasette import hookimpl
|
||||||
from datasette.app import Datasette
|
from datasette.app import Datasette
|
||||||
|
from datasette.plugins import pm
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_asyncio
|
import pytest_asyncio
|
||||||
|
|
||||||
|
|
@ -17,12 +18,11 @@ async def datasette_with_plugin():
|
||||||
(r"^/error$", lambda: 1 / 0),
|
(r"^/error$", lambda: 1 / 0),
|
||||||
]
|
]
|
||||||
|
|
||||||
datasette = Datasette()
|
pm.register(TestPlugin(), name="undo")
|
||||||
datasette.pm.register(TestPlugin(), name="undo")
|
|
||||||
try:
|
try:
|
||||||
yield datasette
|
yield Datasette()
|
||||||
finally:
|
finally:
|
||||||
datasette.pm.unregister(name="undo")
|
pm.unregister(name="undo")
|
||||||
# -- end datasette_with_plugin_fixture --
|
# -- end datasette_with_plugin_fixture --
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -142,7 +142,7 @@ async def test_database_page(ds_client):
|
||||||
|
|
||||||
# And a list of tables
|
# And a list of tables
|
||||||
for fragment in (
|
for fragment in (
|
||||||
'<h2 id="tables">Tables',
|
'<h2 id="tables">Tables</h2>',
|
||||||
'<h3><a href="/fixtures/sortable">sortable</a></h3>',
|
'<h3><a href="/fixtures/sortable">sortable</a></h3>',
|
||||||
"<p><em>pk, foreign_key_with_label, foreign_key_with_blank_label, ",
|
"<p><em>pk, foreign_key_with_label, foreign_key_with_blank_label, ",
|
||||||
):
|
):
|
||||||
|
|
@ -935,7 +935,7 @@ async def test_edit_sql_link_on_canned_queries(ds_client, path, expected):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"has_permission",
|
"permission_allowed",
|
||||||
[
|
[
|
||||||
pytest.param(
|
pytest.param(
|
||||||
True,
|
True,
|
||||||
|
|
@ -943,15 +943,15 @@ async def test_edit_sql_link_on_canned_queries(ds_client, path, expected):
|
||||||
False,
|
False,
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_edit_sql_link_not_shown_if_user_lacks_permission(has_permission):
|
def test_edit_sql_link_not_shown_if_user_lacks_permission(permission_allowed):
|
||||||
with make_app_client(
|
with make_app_client(
|
||||||
config={
|
config={
|
||||||
"allow_sql": None if has_permission else {"id": "not-you"},
|
"allow_sql": None if permission_allowed else {"id": "not-you"},
|
||||||
"databases": {"fixtures": {"queries": {"simple": "select 1 + 1"}}},
|
"databases": {"fixtures": {"queries": {"simple": "select 1 + 1"}}},
|
||||||
}
|
}
|
||||||
) as client:
|
) as client:
|
||||||
response = client.get("/fixtures/simple")
|
response = client.get("/fixtures/simple")
|
||||||
if has_permission:
|
if permission_allowed:
|
||||||
assert "Edit SQL" in response.text
|
assert "Edit SQL" in response.text
|
||||||
else:
|
else:
|
||||||
assert "Edit SQL" not in response.text
|
assert "Edit SQL" not in response.text
|
||||||
|
|
@ -1194,21 +1194,6 @@ async def test_actions_page(ds_client):
|
||||||
ds_client.ds.root_enabled = original_root_enabled
|
ds_client.ds.root_enabled = original_root_enabled
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_actions_page_does_not_display_none_string(ds_client):
|
|
||||||
"""Ensure the Resource column doesn't display the string 'None' for null values."""
|
|
||||||
# https://github.com/simonw/datasette/issues/2599
|
|
||||||
original_root_enabled = ds_client.ds.root_enabled
|
|
||||||
try:
|
|
||||||
ds_client.ds.root_enabled = True
|
|
||||||
cookies = {"ds_actor": ds_client.actor_cookie({"id": "root"})}
|
|
||||||
response = await ds_client.get("/-/actions", cookies=cookies)
|
|
||||||
assert response.status_code == 200
|
|
||||||
assert "<code>None</code>" not in response.text
|
|
||||||
finally:
|
|
||||||
ds_client.ds.root_enabled = original_root_enabled
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_permission_debug_tabs_with_query_string(ds_client):
|
async def test_permission_debug_tabs_with_query_string(ds_client):
|
||||||
"""Test that navigation tabs persist query strings across Check, Allowed, and Rules pages"""
|
"""Test that navigation tabs persist query strings across Check, Allowed, and Rules pages"""
|
||||||
|
|
|
||||||
|
|
@ -91,51 +91,3 @@ async def test_internal_foreign_key_references(ds_client):
|
||||||
)
|
)
|
||||||
|
|
||||||
await internal_db.execute_fn(inner)
|
await internal_db.execute_fn(inner)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_stale_catalog_entry_database_fix(tmp_path):
|
|
||||||
"""
|
|
||||||
Test for https://github.com/simonw/datasette/issues/2605
|
|
||||||
|
|
||||||
When the internal database persists across restarts and has entries in
|
|
||||||
catalog_databases for databases that no longer exist, accessing the
|
|
||||||
index page should not cause a 500 error (KeyError).
|
|
||||||
"""
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
internal_db_path = str(tmp_path / "internal.db")
|
|
||||||
data_db_path = str(tmp_path / "data.db")
|
|
||||||
|
|
||||||
# Create a data database file
|
|
||||||
import sqlite3
|
|
||||||
|
|
||||||
conn = sqlite3.connect(data_db_path)
|
|
||||||
conn.execute("CREATE TABLE test_table (id INTEGER PRIMARY KEY)")
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# First Datasette instance: with the data database and persistent internal db
|
|
||||||
ds1 = Datasette(files=[data_db_path], internal=internal_db_path)
|
|
||||||
await ds1.invoke_startup()
|
|
||||||
|
|
||||||
# Access the index page to populate the internal catalog
|
|
||||||
response = await ds1.client.get("/")
|
|
||||||
assert "data" in ds1.databases
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
# Second Datasette instance: reusing internal.db but WITHOUT the data database
|
|
||||||
# This simulates restarting Datasette after removing a database
|
|
||||||
ds2 = Datasette(internal=internal_db_path)
|
|
||||||
await ds2.invoke_startup()
|
|
||||||
|
|
||||||
# The database is not in ds2.databases
|
|
||||||
assert "data" not in ds2.databases
|
|
||||||
|
|
||||||
# Accessing the index page should NOT cause a 500 error
|
|
||||||
# This is the bug: it currently raises KeyError when trying to
|
|
||||||
# access ds.databases["data"] for the stale catalog entry
|
|
||||||
response = await ds2.client.get("/")
|
|
||||||
assert response.status_code == 200, (
|
|
||||||
f"Index page should return 200, not {response.status_code}. "
|
|
||||||
"This fails due to stale catalog entries causing KeyError."
|
|
||||||
)
|
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ Tests for the datasette.app.Datasette class
|
||||||
|
|
||||||
import dataclasses
|
import dataclasses
|
||||||
from datasette import Context
|
from datasette import Context
|
||||||
from datasette.app import Datasette, Database, ResourcesSQL
|
from datasette.app import Datasette, Database
|
||||||
from datasette.resources import DatabaseResource
|
from datasette.resources import DatabaseResource
|
||||||
from itsdangerous import BadSignature
|
from itsdangerous import BadSignature
|
||||||
import pytest
|
import pytest
|
||||||
|
|
@ -195,14 +195,3 @@ async def test_apply_metadata_json():
|
||||||
assert (await ds.client.get("/")).status_code == 200
|
assert (await ds.client.get("/")).status_code == 200
|
||||||
value = (await ds.get_instance_metadata()).get("weird_instance_value")
|
value = (await ds.get_instance_metadata()).get("weird_instance_value")
|
||||||
assert value == '{"nested": [1, 2, 3]}'
|
assert value == '{"nested": [1, 2, 3]}'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_allowed_resources_sql(datasette):
|
|
||||||
result = await datasette.allowed_resources_sql(
|
|
||||||
action="view-table",
|
|
||||||
actor=None,
|
|
||||||
)
|
|
||||||
assert isinstance(result, ResourcesSQL)
|
|
||||||
assert "all_rules AS" in result.sql
|
|
||||||
assert result.params["action"] == "view-table"
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
import httpx
|
import httpx
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_asyncio
|
import pytest_asyncio
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
|
|
||||||
@pytest_asyncio.fixture
|
@pytest_asyncio.fixture
|
||||||
|
|
@ -10,23 +9,6 @@ async def datasette(ds_client):
|
||||||
return ds_client.ds
|
return ds_client.ds
|
||||||
|
|
||||||
|
|
||||||
@pytest_asyncio.fixture
|
|
||||||
async def datasette_with_permissions():
|
|
||||||
"""A datasette instance with permission restrictions for testing"""
|
|
||||||
ds = Datasette(config={"databases": {"test_db": {"allow": {"id": "admin"}}}})
|
|
||||||
await ds.invoke_startup()
|
|
||||||
db = ds.add_memory_database("test_datasette_with_permissions", name="test_db")
|
|
||||||
await db.execute_write(
|
|
||||||
"create table if not exists test_table (id integer primary key, name text)"
|
|
||||||
)
|
|
||||||
await db.execute_write(
|
|
||||||
"insert or ignore into test_table (id, name) values (1, 'Alice')"
|
|
||||||
)
|
|
||||||
# Trigger catalog refresh
|
|
||||||
await ds.client.get("/")
|
|
||||||
return ds
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"method,path,expected_status",
|
"method,path,expected_status",
|
||||||
|
|
@ -83,231 +65,3 @@ async def test_client_path(datasette, prefix, expected_path):
|
||||||
assert path == expected_path
|
assert path == expected_path
|
||||||
finally:
|
finally:
|
||||||
datasette._settings["base_url"] = original_base_url
|
datasette._settings["base_url"] = original_base_url
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_skip_permission_checks_allows_forbidden_access(
|
|
||||||
datasette_with_permissions,
|
|
||||||
):
|
|
||||||
"""Test that skip_permission_checks=True bypasses permission checks"""
|
|
||||||
ds = datasette_with_permissions
|
|
||||||
|
|
||||||
# Without skip_permission_checks, anonymous user should get 403 for protected database
|
|
||||||
response = await ds.client.get("/test_db.json")
|
|
||||||
assert response.status_code == 403
|
|
||||||
|
|
||||||
# With skip_permission_checks=True, should get 200
|
|
||||||
response = await ds.client.get("/test_db.json", skip_permission_checks=True)
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
assert data["database"] == "test_db"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_skip_permission_checks_on_table(datasette_with_permissions):
|
|
||||||
"""Test skip_permission_checks works for table access"""
|
|
||||||
ds = datasette_with_permissions
|
|
||||||
|
|
||||||
# Without skip_permission_checks, should get 403
|
|
||||||
response = await ds.client.get("/test_db/test_table.json")
|
|
||||||
assert response.status_code == 403
|
|
||||||
|
|
||||||
# With skip_permission_checks=True, should get table data
|
|
||||||
response = await ds.client.get(
|
|
||||||
"/test_db/test_table.json", skip_permission_checks=True
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
assert data["rows"] == [{"id": 1, "name": "Alice"}]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"method", ["get", "post", "put", "patch", "delete", "options", "head"]
|
|
||||||
)
|
|
||||||
async def test_skip_permission_checks_all_methods(datasette_with_permissions, method):
|
|
||||||
"""Test that skip_permission_checks works with all HTTP methods"""
|
|
||||||
ds = datasette_with_permissions
|
|
||||||
|
|
||||||
# All methods should work with skip_permission_checks=True
|
|
||||||
client_method = getattr(ds.client, method)
|
|
||||||
response = await client_method("/test_db.json", skip_permission_checks=True)
|
|
||||||
# We don't check status code since some methods might not be allowed,
|
|
||||||
# but we verify the request doesn't fail due to permissions
|
|
||||||
assert isinstance(response, httpx.Response)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_skip_permission_checks_request_method(datasette_with_permissions):
|
|
||||||
"""Test that skip_permission_checks works with client.request()"""
|
|
||||||
ds = datasette_with_permissions
|
|
||||||
|
|
||||||
# Without skip_permission_checks
|
|
||||||
response = await ds.client.request("GET", "/test_db.json")
|
|
||||||
assert response.status_code == 403
|
|
||||||
|
|
||||||
# With skip_permission_checks=True
|
|
||||||
response = await ds.client.request(
|
|
||||||
"GET", "/test_db.json", skip_permission_checks=True
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_skip_permission_checks_isolated_to_request(datasette_with_permissions):
|
|
||||||
"""Test that skip_permission_checks doesn't affect other concurrent requests"""
|
|
||||||
ds = datasette_with_permissions
|
|
||||||
|
|
||||||
# First request with skip_permission_checks=True should succeed
|
|
||||||
response1 = await ds.client.get("/test_db.json", skip_permission_checks=True)
|
|
||||||
assert response1.status_code == 200
|
|
||||||
|
|
||||||
# Subsequent request without it should still get 403
|
|
||||||
response2 = await ds.client.get("/test_db.json")
|
|
||||||
assert response2.status_code == 403
|
|
||||||
|
|
||||||
# And another with skip should succeed again
|
|
||||||
response3 = await ds.client.get("/test_db.json", skip_permission_checks=True)
|
|
||||||
assert response3.status_code == 200
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_skip_permission_checks_with_admin_actor(datasette_with_permissions):
|
|
||||||
"""Test that skip_permission_checks works even when actor is provided"""
|
|
||||||
ds = datasette_with_permissions
|
|
||||||
|
|
||||||
# Admin actor should normally have access
|
|
||||||
admin_cookies = {"ds_actor": ds.client.actor_cookie({"id": "admin"})}
|
|
||||||
response = await ds.client.get("/test_db.json", cookies=admin_cookies)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
# Non-admin actor should get 403
|
|
||||||
user_cookies = {"ds_actor": ds.client.actor_cookie({"id": "user"})}
|
|
||||||
response = await ds.client.get("/test_db.json", cookies=user_cookies)
|
|
||||||
assert response.status_code == 403
|
|
||||||
|
|
||||||
# Non-admin actor with skip_permission_checks=True should get 200
|
|
||||||
response = await ds.client.get(
|
|
||||||
"/test_db.json", cookies=user_cookies, skip_permission_checks=True
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_skip_permission_checks_shows_denied_tables():
|
|
||||||
"""Test that skip_permission_checks=True shows tables from denied databases in /-/tables.json"""
|
|
||||||
ds = Datasette(
|
|
||||||
config={
|
|
||||||
"databases": {
|
|
||||||
"fixtures": {"allow": False} # Deny all access to this database
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
await ds.invoke_startup()
|
|
||||||
db = ds.add_memory_database("fixtures")
|
|
||||||
await db.execute_write(
|
|
||||||
"CREATE TABLE test_table (id INTEGER PRIMARY KEY, name TEXT)"
|
|
||||||
)
|
|
||||||
await db.execute_write("INSERT INTO test_table (id, name) VALUES (1, 'Alice')")
|
|
||||||
await ds._refresh_schemas()
|
|
||||||
|
|
||||||
# Without skip_permission_checks, tables from denied database should not appear in /-/tables.json
|
|
||||||
response = await ds.client.get("/-/tables.json")
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
table_names = [match["name"] for match in data["matches"]]
|
|
||||||
# Should not see any fixtures tables since access is denied
|
|
||||||
fixtures_tables = [name for name in table_names if name.startswith("fixtures:")]
|
|
||||||
assert len(fixtures_tables) == 0
|
|
||||||
|
|
||||||
# With skip_permission_checks=True, tables from denied database SHOULD appear
|
|
||||||
response = await ds.client.get("/-/tables.json", skip_permission_checks=True)
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
table_names = [match["name"] for match in data["matches"]]
|
|
||||||
# Should see fixtures tables when permission checks are skipped
|
|
||||||
assert "fixtures: test_table" in table_names
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_in_client_returns_false_outside_request(datasette):
|
|
||||||
"""Test that datasette.in_client() returns False outside of a client request"""
|
|
||||||
assert datasette.in_client() is False
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_in_client_returns_true_inside_request():
|
|
||||||
"""Test that datasette.in_client() returns True inside a client request"""
|
|
||||||
from datasette import hookimpl, Response
|
|
||||||
|
|
||||||
class TestPlugin:
|
|
||||||
__name__ = "test_in_client_plugin"
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def register_routes(self):
|
|
||||||
async def test_view(datasette):
|
|
||||||
# Assert in_client() returns True within the view
|
|
||||||
assert datasette.in_client() is True
|
|
||||||
return Response.json({"in_client": datasette.in_client()})
|
|
||||||
|
|
||||||
return [
|
|
||||||
(r"^/-/test-in-client$", test_view),
|
|
||||||
]
|
|
||||||
|
|
||||||
ds = Datasette()
|
|
||||||
await ds.invoke_startup()
|
|
||||||
ds.pm.register(TestPlugin(), name="test_in_client_plugin")
|
|
||||||
try:
|
|
||||||
|
|
||||||
# Outside of a client request, should be False
|
|
||||||
assert ds.in_client() is False
|
|
||||||
|
|
||||||
# Make a request via datasette.client
|
|
||||||
response = await ds.client.get("/-/test-in-client")
|
|
||||||
assert response.status_code == 200
|
|
||||||
assert response.json()["in_client"] is True
|
|
||||||
|
|
||||||
# After the request, should be False again
|
|
||||||
assert ds.in_client() is False
|
|
||||||
finally:
|
|
||||||
ds.pm.unregister(name="test_in_client_plugin")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_in_client_with_skip_permission_checks():
|
|
||||||
"""Test that in_client() works regardless of skip_permission_checks value"""
|
|
||||||
from datasette import hookimpl
|
|
||||||
from datasette.utils.asgi import Response
|
|
||||||
|
|
||||||
in_client_values = []
|
|
||||||
|
|
||||||
class TestPlugin:
|
|
||||||
__name__ = "test_in_client_skip_plugin"
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def register_routes(self):
|
|
||||||
async def test_view(datasette):
|
|
||||||
in_client_values.append(datasette.in_client())
|
|
||||||
return Response.json({"in_client": datasette.in_client()})
|
|
||||||
|
|
||||||
return [
|
|
||||||
(r"^/-/test-in-client$", test_view),
|
|
||||||
]
|
|
||||||
|
|
||||||
ds = Datasette(config={"databases": {"test_db": {"allow": {"id": "admin"}}}})
|
|
||||||
await ds.invoke_startup()
|
|
||||||
ds.pm.register(TestPlugin(), name="test_in_client_skip_plugin")
|
|
||||||
try:
|
|
||||||
|
|
||||||
# Request without skip_permission_checks
|
|
||||||
await ds.client.get("/-/test-in-client")
|
|
||||||
# Request with skip_permission_checks=True
|
|
||||||
await ds.client.get("/-/test-in-client", skip_permission_checks=True)
|
|
||||||
|
|
||||||
# Both should have detected in_client as True
|
|
||||||
assert (
|
|
||||||
len(in_client_values) == 2
|
|
||||||
), f"Expected 2 values, got {len(in_client_values)}"
|
|
||||||
assert all(in_client_values), f"Expected all True, got {in_client_values}"
|
|
||||||
finally:
|
|
||||||
ds.pm.unregister(name="test_in_client_skip_plugin")
|
|
||||||
|
|
|
||||||
|
|
@ -439,6 +439,7 @@ async def test_execute_sql_requires_view_database():
|
||||||
be able to execute SQL on that database.
|
be able to execute SQL on that database.
|
||||||
"""
|
"""
|
||||||
from datasette.permissions import PermissionSQL
|
from datasette.permissions import PermissionSQL
|
||||||
|
from datasette.plugins import pm
|
||||||
from datasette import hookimpl
|
from datasette import hookimpl
|
||||||
|
|
||||||
class TestPermissionPlugin:
|
class TestPermissionPlugin:
|
||||||
|
|
@ -452,23 +453,26 @@ async def test_execute_sql_requires_view_database():
|
||||||
if action == "execute-sql":
|
if action == "execute-sql":
|
||||||
# Grant execute-sql on the "secret" database
|
# Grant execute-sql on the "secret" database
|
||||||
return PermissionSQL(
|
return PermissionSQL(
|
||||||
|
source="test_plugin",
|
||||||
sql="SELECT 'secret' AS parent, NULL AS child, 1 AS allow, 'can execute sql' AS reason",
|
sql="SELECT 'secret' AS parent, NULL AS child, 1 AS allow, 'can execute sql' AS reason",
|
||||||
|
params={},
|
||||||
)
|
)
|
||||||
elif action == "view-database":
|
elif action == "view-database":
|
||||||
# Deny view-database on the "secret" database
|
# Deny view-database on the "secret" database
|
||||||
return PermissionSQL(
|
return PermissionSQL(
|
||||||
|
source="test_plugin",
|
||||||
sql="SELECT 'secret' AS parent, NULL AS child, 0 AS allow, 'cannot view db' AS reason",
|
sql="SELECT 'secret' AS parent, NULL AS child, 0 AS allow, 'cannot view db' AS reason",
|
||||||
|
params={},
|
||||||
)
|
)
|
||||||
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
plugin = TestPermissionPlugin()
|
plugin = TestPermissionPlugin()
|
||||||
|
pm.register(plugin, name="test_plugin")
|
||||||
ds = Datasette()
|
|
||||||
await ds.invoke_startup()
|
|
||||||
ds.pm.register(plugin, name="test_plugin")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
ds = Datasette()
|
||||||
|
await ds.invoke_startup()
|
||||||
ds.add_memory_database("secret")
|
ds.add_memory_database("secret")
|
||||||
await ds.refresh_schemas()
|
await ds.refresh_schemas()
|
||||||
|
|
||||||
|
|
@ -498,4 +502,4 @@ async def test_execute_sql_requires_view_database():
|
||||||
f"but got {response.status_code}"
|
f"but got {response.status_code}"
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
ds.pm.unregister(plugin)
|
pm.unregister(plugin)
|
||||||
|
|
|
||||||
|
|
@ -1117,29 +1117,16 @@ async def test_api_explorer_visibility(
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_view_table_token_cannot_gain_access_without_base_permission(perms_ds):
|
async def test_view_table_token_can_access_table(perms_ds):
|
||||||
# Only allow a different actor to view this table
|
actor = {
|
||||||
previous_config = perms_ds.config
|
"id": "restricted-token",
|
||||||
perms_ds.config = {
|
"token": "dstok",
|
||||||
"databases": {
|
# Restricted to just view-table on perms_ds_two/t1
|
||||||
"perms_ds_two": {
|
"_r": {"r": {"perms_ds_two": {"t1": ["vt"]}}},
|
||||||
# Only someone-else can see anything in this database
|
|
||||||
"allow": {"id": "someone-else"},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
try:
|
cookies = {"ds_actor": perms_ds.client.actor_cookie(actor)}
|
||||||
actor = {
|
response = await perms_ds.client.get("/perms_ds_two/t1.json", cookies=cookies)
|
||||||
"id": "restricted-token",
|
assert response.status_code == 200
|
||||||
"token": "dstok",
|
|
||||||
# Restricted token claims access to perms_ds_two/t1 only
|
|
||||||
"_r": {"r": {"perms_ds_two": {"t1": ["vt"]}}},
|
|
||||||
}
|
|
||||||
cookies = {"ds_actor": perms_ds.client.actor_cookie(actor)}
|
|
||||||
response = await perms_ds.client.get("/perms_ds_two/t1.json", cookies=cookies)
|
|
||||||
assert response.status_code == 403
|
|
||||||
finally:
|
|
||||||
perms_ds.config = previous_config
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -1323,20 +1310,6 @@ async def test_actor_restrictions(
|
||||||
("dbname2", "tablename"),
|
("dbname2", "tablename"),
|
||||||
False,
|
False,
|
||||||
),
|
),
|
||||||
# Table-level restriction allows access to that specific table
|
|
||||||
(
|
|
||||||
{"r": {"dbname": {"tablename": ["view-table"]}}},
|
|
||||||
"view-table",
|
|
||||||
("dbname", "tablename"),
|
|
||||||
True,
|
|
||||||
),
|
|
||||||
# But not to a different table in the same database
|
|
||||||
(
|
|
||||||
{"r": {"dbname": {"tablename": ["view-table"]}}},
|
|
||||||
"view-table",
|
|
||||||
("dbname", "other_table"),
|
|
||||||
False,
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
async def test_restrictions_allow_action(restrictions, action, resource, expected):
|
async def test_restrictions_allow_action(restrictions, action, resource, expected):
|
||||||
|
|
@ -1354,43 +1327,14 @@ async def test_actor_restrictions_filters_allowed_resources(perms_ds):
|
||||||
actor = {"id": "user", "_r": {"r": {"perms_ds_one": {"t1": ["vt"]}}}}
|
actor = {"id": "user", "_r": {"r": {"perms_ds_one": {"t1": ["vt"]}}}}
|
||||||
|
|
||||||
# Should only return t1
|
# Should only return t1
|
||||||
page = await perms_ds.allowed_resources("view-table", actor)
|
allowed_tables = await perms_ds.allowed_resources("view-table", actor)
|
||||||
assert len(page.resources) == 1
|
assert len(allowed_tables) == 1
|
||||||
assert page.resources[0].parent == "perms_ds_one"
|
assert allowed_tables[0].parent == "perms_ds_one"
|
||||||
assert page.resources[0].child == "t1"
|
assert allowed_tables[0].child == "t1"
|
||||||
|
|
||||||
# Database listing should be empty (no view-database permission)
|
# Database listing should be empty (no view-database permission)
|
||||||
db_page = await perms_ds.allowed_resources("view-database", actor)
|
allowed_dbs = await perms_ds.allowed_resources("view-database", actor)
|
||||||
assert len(db_page.resources) == 0
|
assert len(allowed_dbs) == 0
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_actor_restrictions_do_not_expand_allowed_resources(perms_ds):
|
|
||||||
"""Restrictions cannot grant access not already allowed to the actor."""
|
|
||||||
|
|
||||||
previous_config = perms_ds.config
|
|
||||||
perms_ds.config = {
|
|
||||||
"databases": {
|
|
||||||
"perms_ds_one": {
|
|
||||||
"allow": {"id": "someone-else"},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
try:
|
|
||||||
actor = {"id": "user", "_r": {"r": {"perms_ds_one": {"t1": ["vt"]}}}}
|
|
||||||
|
|
||||||
# Base actor is not allowed to see t1, so restrictions should not change that
|
|
||||||
page = await perms_ds.allowed_resources("view-table", actor)
|
|
||||||
assert len(page.resources) == 0
|
|
||||||
|
|
||||||
# And explicit permission checks should still deny
|
|
||||||
response = await perms_ds.client.get(
|
|
||||||
"/perms_ds_one/t1.json",
|
|
||||||
cookies={"ds_actor": perms_ds.client.actor_cookie(actor)},
|
|
||||||
)
|
|
||||||
assert response.status_code == 403
|
|
||||||
finally:
|
|
||||||
perms_ds.config = previous_config
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -1399,10 +1343,12 @@ async def test_actor_restrictions_database_level(perms_ds):
|
||||||
|
|
||||||
actor = {"id": "user", "_r": {"d": {"perms_ds_one": ["vt"]}}}
|
actor = {"id": "user", "_r": {"d": {"perms_ds_one": ["vt"]}}}
|
||||||
|
|
||||||
page = await perms_ds.allowed_resources("view-table", actor, parent="perms_ds_one")
|
allowed_tables = await perms_ds.allowed_resources(
|
||||||
|
"view-table", actor, parent="perms_ds_one"
|
||||||
|
)
|
||||||
|
|
||||||
# Should return all tables in perms_ds_one
|
# Should return all tables in perms_ds_one
|
||||||
table_names = {r.child for r in page.resources}
|
table_names = {r.child for r in allowed_tables}
|
||||||
assert "t1" in table_names
|
assert "t1" in table_names
|
||||||
assert "t2" in table_names
|
assert "t2" in table_names
|
||||||
assert "v1" in table_names # views too
|
assert "v1" in table_names # views too
|
||||||
|
|
@ -1414,11 +1360,11 @@ async def test_actor_restrictions_global_level(perms_ds):
|
||||||
|
|
||||||
actor = {"id": "user", "_r": {"a": ["vt"]}}
|
actor = {"id": "user", "_r": {"a": ["vt"]}}
|
||||||
|
|
||||||
page = await perms_ds.allowed_resources("view-table", actor)
|
allowed_tables = await perms_ds.allowed_resources("view-table", actor)
|
||||||
|
|
||||||
# Should return all tables in all databases
|
# Should return all tables in all databases
|
||||||
assert len(page.resources) > 0
|
assert len(allowed_tables) > 0
|
||||||
dbs = {r.parent for r in page.resources}
|
dbs = {r.parent for r in allowed_tables}
|
||||||
assert "perms_ds_one" in dbs
|
assert "perms_ds_one" in dbs
|
||||||
assert "perms_ds_two" in dbs
|
assert "perms_ds_two" in dbs
|
||||||
|
|
||||||
|
|
@ -1484,8 +1430,8 @@ async def test_actor_restrictions_view_instance_only(perms_ds):
|
||||||
data = response.json()
|
data = response.json()
|
||||||
# The instance is visible but databases list should be empty or minimal
|
# The instance is visible but databases list should be empty or minimal
|
||||||
# Actually, let's check via allowed_resources
|
# Actually, let's check via allowed_resources
|
||||||
page = await perms_ds.allowed_resources("view-database", actor)
|
allowed_dbs = await perms_ds.allowed_resources("view-database", actor)
|
||||||
assert len(page.resources) == 0
|
assert len(allowed_dbs) == 0
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -1495,11 +1441,11 @@ async def test_actor_restrictions_empty_allowlist(perms_ds):
|
||||||
actor = {"id": "user", "_r": {}}
|
actor = {"id": "user", "_r": {}}
|
||||||
|
|
||||||
# No actions in allowlist, so everything should be denied
|
# No actions in allowlist, so everything should be denied
|
||||||
page1 = await perms_ds.allowed_resources("view-table", actor)
|
allowed_tables = await perms_ds.allowed_resources("view-table", actor)
|
||||||
assert len(page1.resources) == 0
|
assert len(allowed_tables) == 0
|
||||||
|
|
||||||
page2 = await perms_ds.allowed_resources("view-database", actor)
|
allowed_dbs = await perms_ds.allowed_resources("view-database", actor)
|
||||||
assert len(page2.resources) == 0
|
assert len(allowed_dbs) == 0
|
||||||
|
|
||||||
result = await perms_ds.allowed(action="view-instance", actor=actor)
|
result = await perms_ds.allowed(action="view-instance", actor=actor)
|
||||||
assert result is False
|
assert result is False
|
||||||
|
|
@ -1667,48 +1613,3 @@ async def test_permission_check_view_requires_debug_permission():
|
||||||
data = response.json()
|
data = response.json()
|
||||||
assert data["action"] == "view-instance"
|
assert data["action"] == "view-instance"
|
||||||
assert data["allowed"] is True
|
assert data["allowed"] is True
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_root_allow_block_with_table_restricted_actor():
|
|
||||||
"""
|
|
||||||
Test that root-level allow: blocks are processed for actors with
|
|
||||||
table-level restrictions.
|
|
||||||
|
|
||||||
This covers the case in config.py is_in_restriction_allowlist() where
|
|
||||||
parent=None, child=None and actor has table restrictions but not global.
|
|
||||||
"""
|
|
||||||
from datasette.resources import TableResource
|
|
||||||
|
|
||||||
# Config with root-level allow block that denies non-admin users
|
|
||||||
ds = Datasette(
|
|
||||||
config={
|
|
||||||
"allow": {"id": "admin"}, # Root-level allow block
|
|
||||||
}
|
|
||||||
)
|
|
||||||
await ds.invoke_startup()
|
|
||||||
db = ds.add_memory_database("mydb")
|
|
||||||
await db.execute_write("create table t1 (id integer primary key)")
|
|
||||||
await ds.client.get("/") # Trigger catalog refresh
|
|
||||||
|
|
||||||
# Actor with table-level restrictions only (not global)
|
|
||||||
actor = {"id": "user", "_r": {"r": {"mydb": {"t1": ["view-table"]}}}}
|
|
||||||
|
|
||||||
# The root-level allow: {id: admin} should be processed and deny this user
|
|
||||||
# because they're not "admin", even though they have table restrictions
|
|
||||||
result = await ds.allowed(
|
|
||||||
action="view-table",
|
|
||||||
resource=TableResource("mydb", "t1"),
|
|
||||||
actor=actor,
|
|
||||||
)
|
|
||||||
# Should be False because root allow: {id: admin} denies non-admin users
|
|
||||||
assert result is False
|
|
||||||
|
|
||||||
# But admin with same restrictions should be allowed
|
|
||||||
admin_actor = {"id": "admin", "_r": {"r": {"mydb": {"t1": ["view-table"]}}}}
|
|
||||||
result = await ds.allowed(
|
|
||||||
action="view-table",
|
|
||||||
resource=TableResource("mydb", "t1"),
|
|
||||||
actor=admin_actor,
|
|
||||||
)
|
|
||||||
assert result is True
|
|
||||||
|
|
|
||||||
|
|
@ -11,8 +11,7 @@ from datasette.app import Datasette
|
||||||
from datasette import cli, hookimpl
|
from datasette import cli, hookimpl
|
||||||
from datasette.filters import FilterArguments
|
from datasette.filters import FilterArguments
|
||||||
from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm
|
from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm
|
||||||
from datasette.permissions import PermissionSQL, Action
|
from datasette.permissions import PermissionSQL
|
||||||
from datasette.resources import DatabaseResource
|
|
||||||
from datasette.utils.sqlite import sqlite3
|
from datasette.utils.sqlite import sqlite3
|
||||||
from datasette.utils import StartupError, await_me_maybe
|
from datasette.utils import StartupError, await_me_maybe
|
||||||
from jinja2 import ChoiceLoader, FileSystemLoader
|
from jinja2 import ChoiceLoader, FileSystemLoader
|
||||||
|
|
@ -326,11 +325,7 @@ async def test_plugin_config_file(ds_client):
|
||||||
)
|
)
|
||||||
def test_hook_extra_body_script(app_client, path, expected_extra_body_script):
|
def test_hook_extra_body_script(app_client, path, expected_extra_body_script):
|
||||||
r = re.compile(r"<script type=\"module\">var extra_body_script = (.*?);</script>")
|
r = re.compile(r"<script type=\"module\">var extra_body_script = (.*?);</script>")
|
||||||
response = app_client.get(path)
|
json_data = r.search(app_client.get(path).text).group(1)
|
||||||
assert response.status_code == 200, response.text
|
|
||||||
match = r.search(response.text)
|
|
||||||
assert match is not None, "No extra_body_script found in HTML"
|
|
||||||
json_data = match.group(1)
|
|
||||||
actual_data = json.loads(json_data)
|
actual_data = json.loads(json_data)
|
||||||
assert expected_extra_body_script == actual_data
|
assert expected_extra_body_script == actual_data
|
||||||
|
|
||||||
|
|
@ -677,12 +672,40 @@ async def test_existing_scope_actor_respected(ds_client):
|
||||||
("this_is_denied_async", False),
|
("this_is_denied_async", False),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_hook_custom_allowed(action, expected):
|
async def test_hook_permission_allowed(action, expected):
|
||||||
# Test actions and permission logic are defined in tests/plugins/my_plugin.py
|
from datasette.permissions import Action
|
||||||
ds = Datasette(plugins_dir=PLUGINS_DIR)
|
from datasette.resources import InstanceResource
|
||||||
await ds.invoke_startup()
|
|
||||||
actual = await ds.allowed(action=action, actor={"id": "actor"})
|
class TestPlugin:
|
||||||
assert expected == actual
|
__name__ = "TestPlugin"
|
||||||
|
|
||||||
|
@hookimpl
|
||||||
|
def register_actions(self):
|
||||||
|
return [
|
||||||
|
Action(
|
||||||
|
name=name,
|
||||||
|
abbr=None,
|
||||||
|
description=None,
|
||||||
|
takes_parent=False,
|
||||||
|
takes_child=False,
|
||||||
|
resource_class=InstanceResource,
|
||||||
|
)
|
||||||
|
for name in (
|
||||||
|
"this_is_allowed",
|
||||||
|
"this_is_denied",
|
||||||
|
"this_is_allowed_async",
|
||||||
|
"this_is_denied_async",
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
pm.register(TestPlugin(), name="undo_register_extras")
|
||||||
|
try:
|
||||||
|
ds = Datasette(plugins_dir=PLUGINS_DIR)
|
||||||
|
await ds.invoke_startup()
|
||||||
|
actual = await ds.allowed(action=action, actor={"id": "actor"})
|
||||||
|
assert expected == actual
|
||||||
|
finally:
|
||||||
|
pm.unregister(name="undo_register_extras")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -691,7 +714,7 @@ async def test_hook_permission_resources_sql():
|
||||||
await ds.invoke_startup()
|
await ds.invoke_startup()
|
||||||
|
|
||||||
collected = []
|
collected = []
|
||||||
for block in ds.pm.hook.permission_resources_sql(
|
for block in pm.hook.permission_resources_sql(
|
||||||
datasette=ds,
|
datasette=ds,
|
||||||
actor={"id": "alice"},
|
actor={"id": "alice"},
|
||||||
action="view-table",
|
action="view-table",
|
||||||
|
|
@ -1161,12 +1184,12 @@ async def test_hook_filters_from_request(ds_client):
|
||||||
if request.args.get("_nothing"):
|
if request.args.get("_nothing"):
|
||||||
return FilterArguments(["1 = 0"], human_descriptions=["NOTHING"])
|
return FilterArguments(["1 = 0"], human_descriptions=["NOTHING"])
|
||||||
|
|
||||||
ds_client.ds.pm.register(ReturnNothingPlugin(), name="ReturnNothingPlugin")
|
pm.register(ReturnNothingPlugin(), name="ReturnNothingPlugin")
|
||||||
response = await ds_client.get("/fixtures/facetable?_nothing=1")
|
response = await ds_client.get("/fixtures/facetable?_nothing=1")
|
||||||
assert "0 rows\n where NOTHING" in response.text
|
assert "0 rows\n where NOTHING" in response.text
|
||||||
json_response = await ds_client.get("/fixtures/facetable.json?_nothing=1")
|
json_response = await ds_client.get("/fixtures/facetable.json?_nothing=1")
|
||||||
assert json_response.json()["rows"] == []
|
assert json_response.json()["rows"] == []
|
||||||
ds_client.ds.pm.unregister(name="ReturnNothingPlugin")
|
pm.unregister(name="ReturnNothingPlugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -1185,6 +1208,9 @@ async def test_hook_register_actions(extra_metadata):
|
||||||
"name": "extra-from-metadata",
|
"name": "extra-from-metadata",
|
||||||
"abbr": "efm",
|
"abbr": "efm",
|
||||||
"description": "Extra from metadata",
|
"description": "Extra from metadata",
|
||||||
|
"takes_parent": False,
|
||||||
|
"takes_child": False,
|
||||||
|
"resource_class": "InstanceResource",
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
@ -1200,6 +1226,8 @@ async def test_hook_register_actions(extra_metadata):
|
||||||
name="action-from-plugin",
|
name="action-from-plugin",
|
||||||
abbr="ap",
|
abbr="ap",
|
||||||
description="New action added by a plugin",
|
description="New action added by a plugin",
|
||||||
|
takes_parent=True,
|
||||||
|
takes_child=False,
|
||||||
resource_class=DatabaseResource,
|
resource_class=DatabaseResource,
|
||||||
)
|
)
|
||||||
if extra_metadata:
|
if extra_metadata:
|
||||||
|
|
@ -1207,6 +1235,9 @@ async def test_hook_register_actions(extra_metadata):
|
||||||
name="extra-from-metadata",
|
name="extra-from-metadata",
|
||||||
abbr="efm",
|
abbr="efm",
|
||||||
description="Extra from metadata",
|
description="Extra from metadata",
|
||||||
|
takes_parent=False,
|
||||||
|
takes_child=False,
|
||||||
|
resource_class=InstanceResource,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
assert "extra-from-metadata" not in ds.actions
|
assert "extra-from-metadata" not in ds.actions
|
||||||
|
|
@ -1230,11 +1261,17 @@ async def test_hook_register_actions_no_duplicates(duplicate):
|
||||||
"name": name1,
|
"name": name1,
|
||||||
"abbr": abbr1,
|
"abbr": abbr1,
|
||||||
"description": None,
|
"description": None,
|
||||||
|
"takes_parent": False,
|
||||||
|
"takes_child": False,
|
||||||
|
"resource_class": "InstanceResource",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": name2,
|
"name": name2,
|
||||||
"abbr": abbr2,
|
"abbr": abbr2,
|
||||||
"description": None,
|
"description": None,
|
||||||
|
"takes_parent": False,
|
||||||
|
"takes_child": False,
|
||||||
|
"resource_class": "InstanceResource",
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
@ -1259,11 +1296,17 @@ async def test_hook_register_actions_allows_identical_duplicates():
|
||||||
"name": "name1",
|
"name": "name1",
|
||||||
"abbr": "abbr1",
|
"abbr": "abbr1",
|
||||||
"description": None,
|
"description": None,
|
||||||
|
"takes_parent": False,
|
||||||
|
"takes_child": False,
|
||||||
|
"resource_class": "InstanceResource",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "name1",
|
"name": "name1",
|
||||||
"abbr": "abbr1",
|
"abbr": "abbr1",
|
||||||
"description": None,
|
"description": None,
|
||||||
|
"takes_parent": False,
|
||||||
|
"takes_child": False,
|
||||||
|
"resource_class": "InstanceResource",
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
@ -1327,7 +1370,7 @@ async def test_hook_actors_from_ids():
|
||||||
return inner
|
return inner
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ds.pm.register(ActorsFromIdsPlugin(), name="ActorsFromIdsPlugin")
|
pm.register(ActorsFromIdsPlugin(), name="ActorsFromIdsPlugin")
|
||||||
actors2 = await ds.actors_from_ids(["3", "5", "7"])
|
actors2 = await ds.actors_from_ids(["3", "5", "7"])
|
||||||
assert actors2 == {
|
assert actors2 == {
|
||||||
"3": {"id": "3", "name": "Cate Blanchett"},
|
"3": {"id": "3", "name": "Cate Blanchett"},
|
||||||
|
|
@ -1335,7 +1378,7 @@ async def test_hook_actors_from_ids():
|
||||||
"7": {"id": "7", "name": "Sarah Paulson"},
|
"7": {"id": "7", "name": "Sarah Paulson"},
|
||||||
}
|
}
|
||||||
finally:
|
finally:
|
||||||
ds.pm.unregister(name="ReturnNothingPlugin")
|
pm.unregister(name="ReturnNothingPlugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -1350,14 +1393,14 @@ async def test_plugin_is_installed():
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
datasette.pm.register(DummyPlugin(), name="DummyPlugin")
|
pm.register(DummyPlugin(), name="DummyPlugin")
|
||||||
response = await datasette.client.get("/-/plugins.json")
|
response = await datasette.client.get("/-/plugins.json")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
installed_plugins = {p["name"] for p in response.json()}
|
installed_plugins = {p["name"] for p in response.json()}
|
||||||
assert "DummyPlugin" in installed_plugins
|
assert "DummyPlugin" in installed_plugins
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
datasette.pm.unregister(name="DummyPlugin")
|
pm.unregister(name="DummyPlugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -1384,7 +1427,7 @@ async def test_hook_jinja2_environment_from_request(tmpdir):
|
||||||
datasette = Datasette(memory=True)
|
datasette = Datasette(memory=True)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
datasette.pm.register(EnvironmentPlugin(), name="EnvironmentPlugin")
|
pm.register(EnvironmentPlugin(), name="EnvironmentPlugin")
|
||||||
response = await datasette.client.get("/")
|
response = await datasette.client.get("/")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert "Hello museums!" not in response.text
|
assert "Hello museums!" not in response.text
|
||||||
|
|
@ -1395,7 +1438,7 @@ async def test_hook_jinja2_environment_from_request(tmpdir):
|
||||||
assert response2.status_code == 200
|
assert response2.status_code == 200
|
||||||
assert "Hello museums!" in response2.text
|
assert "Hello museums!" in response2.text
|
||||||
finally:
|
finally:
|
||||||
datasette.pm.unregister(name="EnvironmentPlugin")
|
pm.unregister(name="EnvironmentPlugin")
|
||||||
|
|
||||||
|
|
||||||
class SlotPlugin:
|
class SlotPlugin:
|
||||||
|
|
@ -1433,48 +1476,48 @@ class SlotPlugin:
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_hook_top_homepage():
|
async def test_hook_top_homepage():
|
||||||
datasette = Datasette(memory=True)
|
|
||||||
try:
|
try:
|
||||||
datasette.pm.register(SlotPlugin(), name="SlotPlugin")
|
pm.register(SlotPlugin(), name="SlotPlugin")
|
||||||
|
datasette = Datasette(memory=True)
|
||||||
response = await datasette.client.get("/?z=foo")
|
response = await datasette.client.get("/?z=foo")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert "Xtop_homepage:foo" in response.text
|
assert "Xtop_homepage:foo" in response.text
|
||||||
finally:
|
finally:
|
||||||
datasette.pm.unregister(name="SlotPlugin")
|
pm.unregister(name="SlotPlugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_hook_top_database():
|
async def test_hook_top_database():
|
||||||
datasette = Datasette(memory=True)
|
|
||||||
try:
|
try:
|
||||||
datasette.pm.register(SlotPlugin(), name="SlotPlugin")
|
pm.register(SlotPlugin(), name="SlotPlugin")
|
||||||
|
datasette = Datasette(memory=True)
|
||||||
response = await datasette.client.get("/_memory?z=bar")
|
response = await datasette.client.get("/_memory?z=bar")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert "Xtop_database:_memory:bar" in response.text
|
assert "Xtop_database:_memory:bar" in response.text
|
||||||
finally:
|
finally:
|
||||||
datasette.pm.unregister(name="SlotPlugin")
|
pm.unregister(name="SlotPlugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_hook_top_table(ds_client):
|
async def test_hook_top_table(ds_client):
|
||||||
try:
|
try:
|
||||||
ds_client.ds.pm.register(SlotPlugin(), name="SlotPlugin")
|
pm.register(SlotPlugin(), name="SlotPlugin")
|
||||||
response = await ds_client.get("/fixtures/facetable?z=baz")
|
response = await ds_client.get("/fixtures/facetable?z=baz")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert "Xtop_table:fixtures:facetable:baz" in response.text
|
assert "Xtop_table:fixtures:facetable:baz" in response.text
|
||||||
finally:
|
finally:
|
||||||
ds_client.ds.pm.unregister(name="SlotPlugin")
|
pm.unregister(name="SlotPlugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_hook_top_row(ds_client):
|
async def test_hook_top_row(ds_client):
|
||||||
try:
|
try:
|
||||||
ds_client.ds.pm.register(SlotPlugin(), name="SlotPlugin")
|
pm.register(SlotPlugin(), name="SlotPlugin")
|
||||||
response = await ds_client.get("/fixtures/facet_cities/1?z=bax")
|
response = await ds_client.get("/fixtures/facet_cities/1?z=bax")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert "Xtop_row:fixtures:facet_cities:San Francisco:bax" in response.text
|
assert "Xtop_row:fixtures:facet_cities:San Francisco:bax" in response.text
|
||||||
finally:
|
finally:
|
||||||
ds_client.ds.pm.unregister(name="SlotPlugin")
|
pm.unregister(name="SlotPlugin")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
@ -1537,253 +1580,6 @@ async def test_hook_register_actions():
|
||||||
assert action.description == "View a collection"
|
assert action.description == "View a collection"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_hook_register_actions_with_custom_resources():
|
|
||||||
"""
|
|
||||||
Test registering actions with custom Resource classes:
|
|
||||||
- A global action (no resource)
|
|
||||||
- A parent-level action (DocumentCollectionResource)
|
|
||||||
- A child-level action (DocumentResource)
|
|
||||||
"""
|
|
||||||
from datasette.permissions import Resource, Action
|
|
||||||
|
|
||||||
# Define custom Resource classes
|
|
||||||
class DocumentCollectionResource(Resource):
|
|
||||||
"""A collection of documents."""
|
|
||||||
|
|
||||||
name = "document_collection"
|
|
||||||
parent_class = None # Top-level resource
|
|
||||||
|
|
||||||
def __init__(self, collection: str):
|
|
||||||
super().__init__(parent=collection, child=None)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def resources_sql(cls, datasette) -> str:
|
|
||||||
return """
|
|
||||||
SELECT 'collection1' AS parent, NULL AS child
|
|
||||||
UNION ALL
|
|
||||||
SELECT 'collection2' AS parent, NULL AS child
|
|
||||||
"""
|
|
||||||
|
|
||||||
class DocumentResource(Resource):
|
|
||||||
"""A document in a collection."""
|
|
||||||
|
|
||||||
name = "document"
|
|
||||||
parent_class = DocumentCollectionResource # Child of DocumentCollectionResource
|
|
||||||
|
|
||||||
def __init__(self, collection: str, document: str):
|
|
||||||
super().__init__(parent=collection, child=document)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def resources_sql(cls, datasette) -> str:
|
|
||||||
return """
|
|
||||||
SELECT 'collection1' AS parent, 'doc1' AS child
|
|
||||||
UNION ALL
|
|
||||||
SELECT 'collection1' AS parent, 'doc2' AS child
|
|
||||||
UNION ALL
|
|
||||||
SELECT 'collection2' AS parent, 'doc3' AS child
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Define a test plugin that registers these actions
|
|
||||||
class TestPlugin:
|
|
||||||
__name__ = "test_custom_resources_plugin"
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def register_actions(self, datasette):
|
|
||||||
return [
|
|
||||||
# Global action - no resource_class
|
|
||||||
Action(
|
|
||||||
name="manage-documents",
|
|
||||||
abbr="md",
|
|
||||||
description="Manage the document system",
|
|
||||||
),
|
|
||||||
# Parent-level action - collection only
|
|
||||||
Action(
|
|
||||||
name="view-document-collection",
|
|
||||||
description="View a document collection",
|
|
||||||
resource_class=DocumentCollectionResource,
|
|
||||||
),
|
|
||||||
# Child-level action - collection + document
|
|
||||||
Action(
|
|
||||||
name="view-document",
|
|
||||||
abbr="vdoc",
|
|
||||||
description="View a document",
|
|
||||||
resource_class=DocumentResource,
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def permission_resources_sql(self, datasette, actor, action):
|
|
||||||
from datasette.permissions import PermissionSQL
|
|
||||||
|
|
||||||
# Grant user2 access to manage-documents globally
|
|
||||||
if actor and actor.get("id") == "user2" and action == "manage-documents":
|
|
||||||
return PermissionSQL.allow(reason="user2 granted manage-documents")
|
|
||||||
|
|
||||||
# Grant user2 access to view-document-collection globally
|
|
||||||
if (
|
|
||||||
actor
|
|
||||||
and actor.get("id") == "user2"
|
|
||||||
and action == "view-document-collection"
|
|
||||||
):
|
|
||||||
return PermissionSQL.allow(
|
|
||||||
reason="user2 granted view-document-collection"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Default allow for view-document-collection (like other view-* actions)
|
|
||||||
if action == "view-document-collection":
|
|
||||||
return PermissionSQL.allow(
|
|
||||||
reason="default allow for view-document-collection"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Default allow for view-document (like other view-* actions)
|
|
||||||
if action == "view-document":
|
|
||||||
return PermissionSQL.allow(reason="default allow for view-document")
|
|
||||||
|
|
||||||
# Register the plugin temporarily
|
|
||||||
plugin = TestPlugin()
|
|
||||||
pm.register(plugin, name="test_custom_resources_plugin")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Create datasette instance and invoke startup
|
|
||||||
datasette = Datasette(memory=True)
|
|
||||||
await datasette.invoke_startup()
|
|
||||||
|
|
||||||
# Test global action
|
|
||||||
manage_docs = datasette.actions["manage-documents"]
|
|
||||||
assert manage_docs.name == "manage-documents"
|
|
||||||
assert manage_docs.abbr == "md"
|
|
||||||
assert manage_docs.resource_class is None
|
|
||||||
assert manage_docs.takes_parent is False
|
|
||||||
assert manage_docs.takes_child is False
|
|
||||||
|
|
||||||
# Test parent-level action
|
|
||||||
view_collection = datasette.actions["view-document-collection"]
|
|
||||||
assert view_collection.name == "view-document-collection"
|
|
||||||
assert view_collection.abbr is None
|
|
||||||
assert view_collection.resource_class is DocumentCollectionResource
|
|
||||||
assert view_collection.takes_parent is True
|
|
||||||
assert view_collection.takes_child is False
|
|
||||||
|
|
||||||
# Test child-level action
|
|
||||||
view_doc = datasette.actions["view-document"]
|
|
||||||
assert view_doc.name == "view-document"
|
|
||||||
assert view_doc.abbr == "vdoc"
|
|
||||||
assert view_doc.resource_class is DocumentResource
|
|
||||||
assert view_doc.takes_parent is True
|
|
||||||
assert view_doc.takes_child is True
|
|
||||||
|
|
||||||
# Verify the resource classes have correct hierarchy
|
|
||||||
assert DocumentCollectionResource.parent_class is None
|
|
||||||
assert DocumentResource.parent_class is DocumentCollectionResource
|
|
||||||
|
|
||||||
# Test that resources can be instantiated correctly
|
|
||||||
collection_resource = DocumentCollectionResource(collection="collection1")
|
|
||||||
assert collection_resource.parent == "collection1"
|
|
||||||
assert collection_resource.child is None
|
|
||||||
|
|
||||||
doc_resource = DocumentResource(collection="collection1", document="doc1")
|
|
||||||
assert doc_resource.parent == "collection1"
|
|
||||||
assert doc_resource.child == "doc1"
|
|
||||||
|
|
||||||
# Test permission checks with restricted actors
|
|
||||||
|
|
||||||
# Test 1: Global action - no restrictions (custom actions default to deny)
|
|
||||||
unrestricted_actor = {"id": "user1"}
|
|
||||||
allowed = await datasette.allowed(
|
|
||||||
action="manage-documents",
|
|
||||||
actor=unrestricted_actor,
|
|
||||||
)
|
|
||||||
assert allowed is False # Custom actions have no default allow
|
|
||||||
|
|
||||||
# Test 2: Global action - user2 has explicit permission via plugin hook
|
|
||||||
restricted_global = {"id": "user2", "_r": {"a": ["md"]}}
|
|
||||||
allowed = await datasette.allowed(
|
|
||||||
action="manage-documents",
|
|
||||||
actor=restricted_global,
|
|
||||||
)
|
|
||||||
assert allowed is True # Granted by plugin hook for user2
|
|
||||||
|
|
||||||
# Test 3: Global action - restricted but not in allowlist
|
|
||||||
restricted_no_access = {"id": "user3", "_r": {"a": ["vdc"]}}
|
|
||||||
allowed = await datasette.allowed(
|
|
||||||
action="manage-documents",
|
|
||||||
actor=restricted_no_access,
|
|
||||||
)
|
|
||||||
assert allowed is False # Not in allowlist
|
|
||||||
|
|
||||||
# Test 4: Collection-level action - allowed for specific collection
|
|
||||||
collection_resource = DocumentCollectionResource(collection="collection1")
|
|
||||||
# This one does not have an abbreviation:
|
|
||||||
restricted_collection = {
|
|
||||||
"id": "user4",
|
|
||||||
"_r": {"d": {"collection1": ["view-document-collection"]}},
|
|
||||||
}
|
|
||||||
allowed = await datasette.allowed(
|
|
||||||
action="view-document-collection",
|
|
||||||
resource=collection_resource,
|
|
||||||
actor=restricted_collection,
|
|
||||||
)
|
|
||||||
assert allowed is True # Allowed for collection1
|
|
||||||
|
|
||||||
# Test 5: Collection-level action - denied for different collection
|
|
||||||
collection2_resource = DocumentCollectionResource(collection="collection2")
|
|
||||||
allowed = await datasette.allowed(
|
|
||||||
action="view-document-collection",
|
|
||||||
resource=collection2_resource,
|
|
||||||
actor=restricted_collection,
|
|
||||||
)
|
|
||||||
assert allowed is False # Not allowed for collection2
|
|
||||||
|
|
||||||
# Test 6: Document-level action - allowed for specific document
|
|
||||||
doc1_resource = DocumentResource(collection="collection1", document="doc1")
|
|
||||||
restricted_document = {
|
|
||||||
"id": "user5",
|
|
||||||
"_r": {"r": {"collection1": {"doc1": ["vdoc"]}}},
|
|
||||||
}
|
|
||||||
allowed = await datasette.allowed(
|
|
||||||
action="view-document",
|
|
||||||
resource=doc1_resource,
|
|
||||||
actor=restricted_document,
|
|
||||||
)
|
|
||||||
assert allowed is True # Allowed for collection1/doc1
|
|
||||||
|
|
||||||
# Test 7: Document-level action - denied for different document
|
|
||||||
doc2_resource = DocumentResource(collection="collection1", document="doc2")
|
|
||||||
allowed = await datasette.allowed(
|
|
||||||
action="view-document",
|
|
||||||
resource=doc2_resource,
|
|
||||||
actor=restricted_document,
|
|
||||||
)
|
|
||||||
assert allowed is False # Not allowed for collection1/doc2
|
|
||||||
|
|
||||||
# Test 8: Document-level action - globally allowed
|
|
||||||
doc_resource = DocumentResource(collection="collection2", document="doc3")
|
|
||||||
restricted_all_docs = {"id": "user6", "_r": {"a": ["vdoc"]}}
|
|
||||||
allowed = await datasette.allowed(
|
|
||||||
action="view-document",
|
|
||||||
resource=doc_resource,
|
|
||||||
actor=restricted_all_docs,
|
|
||||||
)
|
|
||||||
assert allowed is True # Globally allowed for all documents
|
|
||||||
|
|
||||||
# Test 9: Verify hierarchy - collection access doesn't grant document access
|
|
||||||
collection_only_actor = {"id": "user7", "_r": {"d": {"collection1": ["vdc"]}}}
|
|
||||||
doc_resource = DocumentResource(collection="collection1", document="doc1")
|
|
||||||
allowed = await datasette.allowed(
|
|
||||||
action="view-document",
|
|
||||||
resource=doc_resource,
|
|
||||||
actor=collection_only_actor,
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
allowed is False
|
|
||||||
) # Collection permission doesn't grant document permission
|
|
||||||
|
|
||||||
finally:
|
|
||||||
# Unregister the plugin
|
|
||||||
pm.unregister(plugin)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="TODO")
|
@pytest.mark.skip(reason="TODO")
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"metadata,config,expected_metadata,expected_config",
|
"metadata,config,expected_metadata,expected_config",
|
||||||
|
|
|
||||||
|
|
@ -57,20 +57,12 @@ def test_publish_cloudrun_prompts_for_service(
|
||||||
"Service name: input-service"
|
"Service name: input-service"
|
||||||
) == result.output.strip()
|
) == result.output.strip()
|
||||||
assert 0 == result.exit_code
|
assert 0 == result.exit_code
|
||||||
tag = "us-docker.pkg.dev/myproject/datasette/datasette-input-service"
|
tag = "gcr.io/myproject/datasette-input-service"
|
||||||
mock_call.assert_has_calls(
|
mock_call.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
|
||||||
"gcloud services enable artifactregistry.googleapis.com --project myproject --quiet",
|
|
||||||
shell=True,
|
|
||||||
),
|
|
||||||
mock.call(
|
|
||||||
"gcloud artifacts repositories describe datasette --project myproject --location us --quiet",
|
|
||||||
shell=True,
|
|
||||||
),
|
|
||||||
mock.call(f"gcloud builds submit --tag {tag}", shell=True),
|
mock.call(f"gcloud builds submit --tag {tag}", shell=True),
|
||||||
mock.call(
|
mock.call(
|
||||||
"gcloud run deploy --allow-unauthenticated --platform=managed --image {} input-service --max-instances 1".format(
|
"gcloud run deploy --allow-unauthenticated --platform=managed --image {} input-service".format(
|
||||||
tag
|
tag
|
||||||
),
|
),
|
||||||
shell=True,
|
shell=True,
|
||||||
|
|
@ -94,20 +86,12 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which, tmp_path_factory):
|
||||||
cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"]
|
cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"]
|
||||||
)
|
)
|
||||||
assert 0 == result.exit_code
|
assert 0 == result.exit_code
|
||||||
tag = f"us-docker.pkg.dev/{mock_output.return_value}/datasette/datasette-test"
|
tag = f"gcr.io/{mock_output.return_value}/datasette-test"
|
||||||
mock_call.assert_has_calls(
|
mock_call.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
|
||||||
f"gcloud services enable artifactregistry.googleapis.com --project {mock_output.return_value} --quiet",
|
|
||||||
shell=True,
|
|
||||||
),
|
|
||||||
mock.call(
|
|
||||||
f"gcloud artifacts repositories describe datasette --project {mock_output.return_value} --location us --quiet",
|
|
||||||
shell=True,
|
|
||||||
),
|
|
||||||
mock.call(f"gcloud builds submit --tag {tag}", shell=True),
|
mock.call(f"gcloud builds submit --tag {tag}", shell=True),
|
||||||
mock.call(
|
mock.call(
|
||||||
"gcloud run deploy --allow-unauthenticated --platform=managed --image {} test --max-instances 1".format(
|
"gcloud run deploy --allow-unauthenticated --platform=managed --image {} test".format(
|
||||||
tag
|
tag
|
||||||
),
|
),
|
||||||
shell=True,
|
shell=True,
|
||||||
|
|
@ -183,7 +167,7 @@ def test_publish_cloudrun_memory_cpu(
|
||||||
assert 2 == result.exit_code
|
assert 2 == result.exit_code
|
||||||
return
|
return
|
||||||
assert 0 == result.exit_code
|
assert 0 == result.exit_code
|
||||||
tag = f"us-docker.pkg.dev/{mock_output.return_value}/datasette/datasette-test"
|
tag = f"gcr.io/{mock_output.return_value}/datasette-test"
|
||||||
expected_call = (
|
expected_call = (
|
||||||
"gcloud run deploy --allow-unauthenticated --platform=managed"
|
"gcloud run deploy --allow-unauthenticated --platform=managed"
|
||||||
" --image {} test".format(tag)
|
" --image {} test".format(tag)
|
||||||
|
|
@ -195,18 +179,8 @@ def test_publish_cloudrun_memory_cpu(
|
||||||
expected_call += " --cpu {}".format(cpu)
|
expected_call += " --cpu {}".format(cpu)
|
||||||
if timeout:
|
if timeout:
|
||||||
expected_build_call += f" --timeout {timeout}"
|
expected_build_call += f" --timeout {timeout}"
|
||||||
# max_instances defaults to 1
|
|
||||||
expected_call += " --max-instances 1"
|
|
||||||
mock_call.assert_has_calls(
|
mock_call.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
|
||||||
f"gcloud services enable artifactregistry.googleapis.com --project {mock_output.return_value} --quiet",
|
|
||||||
shell=True,
|
|
||||||
),
|
|
||||||
mock.call(
|
|
||||||
f"gcloud artifacts repositories describe datasette --project {mock_output.return_value} --location us --quiet",
|
|
||||||
shell=True,
|
|
||||||
),
|
|
||||||
mock.call(expected_build_call, shell=True),
|
mock.call(expected_build_call, shell=True),
|
||||||
mock.call(
|
mock.call(
|
||||||
expected_call,
|
expected_call,
|
||||||
|
|
|
||||||
|
|
@ -1,315 +0,0 @@
|
||||||
import pytest
|
|
||||||
from datasette.app import Datasette
|
|
||||||
from datasette.permissions import PermissionSQL
|
|
||||||
from datasette.resources import TableResource
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_multiple_restriction_sources_intersect():
|
|
||||||
"""
|
|
||||||
Test that when multiple plugins return restriction_sql, they are INTERSECTed.
|
|
||||||
|
|
||||||
This tests the case where both actor _r restrictions AND a plugin
|
|
||||||
provide restriction_sql - both must pass for access to be granted.
|
|
||||||
"""
|
|
||||||
from datasette import hookimpl
|
|
||||||
|
|
||||||
class RestrictivePlugin:
|
|
||||||
__name__ = "RestrictivePlugin"
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def permission_resources_sql(self, datasette, actor, action):
|
|
||||||
# Plugin adds additional restriction: only db1_multi_intersect allowed
|
|
||||||
if action == "view-table":
|
|
||||||
return PermissionSQL(
|
|
||||||
restriction_sql="SELECT 'db1_multi_intersect' AS parent, NULL AS child",
|
|
||||||
params={},
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
plugin = RestrictivePlugin()
|
|
||||||
|
|
||||||
ds = Datasette()
|
|
||||||
await ds.invoke_startup()
|
|
||||||
ds.pm.register(plugin, name="restrictive_plugin")
|
|
||||||
|
|
||||||
try:
|
|
||||||
db1 = ds.add_memory_database("db1_multi_intersect")
|
|
||||||
db2 = ds.add_memory_database("db2_multi_intersect")
|
|
||||||
await db1.execute_write("CREATE TABLE t1 (id INTEGER)")
|
|
||||||
await db2.execute_write("CREATE TABLE t1 (id INTEGER)")
|
|
||||||
await ds._refresh_schemas() # Populate catalog tables
|
|
||||||
|
|
||||||
# Actor has restrictions allowing both databases
|
|
||||||
# But plugin only allows db1_multi_intersect
|
|
||||||
# INTERSECT means only db1_multi_intersect/t1 should pass
|
|
||||||
actor = {
|
|
||||||
"id": "user",
|
|
||||||
"_r": {"d": {"db1_multi_intersect": ["vt"], "db2_multi_intersect": ["vt"]}},
|
|
||||||
}
|
|
||||||
|
|
||||||
page = await ds.allowed_resources("view-table", actor)
|
|
||||||
resources = {(r.parent, r.child) for r in page.resources}
|
|
||||||
|
|
||||||
# Should only see db1_multi_intersect/t1 (intersection of actor restrictions and plugin restrictions)
|
|
||||||
assert ("db1_multi_intersect", "t1") in resources
|
|
||||||
assert ("db2_multi_intersect", "t1") not in resources
|
|
||||||
finally:
|
|
||||||
ds.pm.unregister(name="restrictive_plugin")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_restriction_sql_with_overlapping_databases_and_tables():
|
|
||||||
"""
|
|
||||||
Test actor with both database-level and table-level restrictions for same database.
|
|
||||||
|
|
||||||
When actor has:
|
|
||||||
- Database-level: db1_overlapping allowed (all tables)
|
|
||||||
- Table-level: db1_overlapping/t1 allowed
|
|
||||||
|
|
||||||
Both entries are UNION'd (OR'ed) within the actor's restrictions.
|
|
||||||
Database-level restriction allows ALL tables, so table-level is redundant.
|
|
||||||
"""
|
|
||||||
ds = Datasette()
|
|
||||||
await ds.invoke_startup()
|
|
||||||
db = ds.add_memory_database("db1_overlapping")
|
|
||||||
await db.execute_write("CREATE TABLE t1 (id INTEGER)")
|
|
||||||
await db.execute_write("CREATE TABLE t2 (id INTEGER)")
|
|
||||||
await ds._refresh_schemas()
|
|
||||||
|
|
||||||
# Actor has BOTH database-level (db1_overlapping all tables) AND table-level (db1_overlapping/t1 only)
|
|
||||||
actor = {
|
|
||||||
"id": "user",
|
|
||||||
"_r": {
|
|
||||||
"d": {
|
|
||||||
"db1_overlapping": ["vt"]
|
|
||||||
}, # Database-level: all tables in db1_overlapping
|
|
||||||
"r": {
|
|
||||||
"db1_overlapping": {"t1": ["vt"]}
|
|
||||||
}, # Table-level: only t1 in db1_overlapping
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
# Within actor restrictions, entries are UNION'd (OR'ed):
|
|
||||||
# - Database level allows: (db1_overlapping, NULL) → matches all tables via hierarchical matching
|
|
||||||
# - Table level allows: (db1_overlapping, t1) → redundant, already covered by database level
|
|
||||||
# Result: Both tables are allowed
|
|
||||||
page = await ds.allowed_resources("view-table", actor)
|
|
||||||
resources = {(r.parent, r.child) for r in page.resources}
|
|
||||||
|
|
||||||
assert ("db1_overlapping", "t1") in resources
|
|
||||||
# Database-level restriction allows all tables, so t2 is also allowed
|
|
||||||
assert ("db1_overlapping", "t2") in resources
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_restriction_sql_empty_allowlist_query():
|
|
||||||
"""
|
|
||||||
Test the specific SQL query generated when action is not in allowlist.
|
|
||||||
|
|
||||||
actor_restrictions_sql() returns "SELECT NULL AS parent, NULL AS child WHERE 0"
|
|
||||||
Verify this produces an empty result set.
|
|
||||||
"""
|
|
||||||
ds = Datasette()
|
|
||||||
await ds.invoke_startup()
|
|
||||||
db = ds.add_memory_database("db1_empty_allowlist")
|
|
||||||
await db.execute_write("CREATE TABLE t1 (id INTEGER)")
|
|
||||||
await ds._refresh_schemas()
|
|
||||||
|
|
||||||
# Actor has restrictions but action not in allowlist
|
|
||||||
actor = {"id": "user", "_r": {"r": {"db1_empty_allowlist": {"t1": ["vt"]}}}}
|
|
||||||
|
|
||||||
# Try to view-database (only view-table is in allowlist)
|
|
||||||
page = await ds.allowed_resources("view-database", actor)
|
|
||||||
|
|
||||||
# Should be empty
|
|
||||||
assert len(page.resources) == 0
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_restriction_sql_with_pagination():
|
|
||||||
"""
|
|
||||||
Test that restrictions work correctly with keyset pagination.
|
|
||||||
"""
|
|
||||||
ds = Datasette()
|
|
||||||
await ds.invoke_startup()
|
|
||||||
db = ds.add_memory_database("db1_pagination")
|
|
||||||
|
|
||||||
# Create many tables
|
|
||||||
for i in range(10):
|
|
||||||
await db.execute_write(f"CREATE TABLE t{i:02d} (id INTEGER)")
|
|
||||||
await ds._refresh_schemas()
|
|
||||||
|
|
||||||
# Actor restricted to only odd-numbered tables
|
|
||||||
restrictions = {"r": {"db1_pagination": {}}}
|
|
||||||
for i in range(10):
|
|
||||||
if i % 2 == 1: # Only odd tables
|
|
||||||
restrictions["r"]["db1_pagination"][f"t{i:02d}"] = ["vt"]
|
|
||||||
|
|
||||||
actor = {"id": "user", "_r": restrictions}
|
|
||||||
|
|
||||||
# Get first page with small limit
|
|
||||||
page1 = await ds.allowed_resources(
|
|
||||||
"view-table", actor, parent="db1_pagination", limit=2
|
|
||||||
)
|
|
||||||
assert len(page1.resources) == 2
|
|
||||||
assert page1.next is not None
|
|
||||||
|
|
||||||
# Get second page using next token
|
|
||||||
page2 = await ds.allowed_resources(
|
|
||||||
"view-table", actor, parent="db1_pagination", limit=2, next=page1.next
|
|
||||||
)
|
|
||||||
assert len(page2.resources) == 2
|
|
||||||
|
|
||||||
# Should have no overlap
|
|
||||||
page1_ids = {r.child for r in page1.resources}
|
|
||||||
page2_ids = {r.child for r in page2.resources}
|
|
||||||
assert page1_ids.isdisjoint(page2_ids)
|
|
||||||
|
|
||||||
# All should be odd-numbered tables
|
|
||||||
all_ids = page1_ids | page2_ids
|
|
||||||
for table_id in all_ids:
|
|
||||||
table_num = int(table_id[1:]) # Extract number from "t01", "t03", etc.
|
|
||||||
assert table_num % 2 == 1, f"Table {table_id} should be odd-numbered"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_also_requires_with_restrictions():
|
|
||||||
"""
|
|
||||||
Test that also_requires actions properly respect restrictions.
|
|
||||||
|
|
||||||
execute-sql requires view-database. With restrictions, both must pass.
|
|
||||||
"""
|
|
||||||
ds = Datasette()
|
|
||||||
await ds.invoke_startup()
|
|
||||||
db1 = ds.add_memory_database("db1_also_requires")
|
|
||||||
db2 = ds.add_memory_database("db2_also_requires")
|
|
||||||
await ds._refresh_schemas()
|
|
||||||
|
|
||||||
# Actor restricted to only db1_also_requires for view-database
|
|
||||||
# execute-sql requires view-database, so should only work on db1_also_requires
|
|
||||||
actor = {
|
|
||||||
"id": "user",
|
|
||||||
"_r": {
|
|
||||||
"d": {
|
|
||||||
"db1_also_requires": ["vd", "es"],
|
|
||||||
"db2_also_requires": [
|
|
||||||
"es"
|
|
||||||
], # They have execute-sql but not view-database
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
# db1_also_requires should allow execute-sql
|
|
||||||
result = await ds.allowed(
|
|
||||||
action="execute-sql",
|
|
||||||
resource=TableResource("db1_also_requires", None),
|
|
||||||
actor=actor,
|
|
||||||
)
|
|
||||||
assert result is True
|
|
||||||
|
|
||||||
# db2_also_requires should not (they have execute-sql but not view-database)
|
|
||||||
result = await ds.allowed(
|
|
||||||
action="execute-sql",
|
|
||||||
resource=TableResource("db2_also_requires", None),
|
|
||||||
actor=actor,
|
|
||||||
)
|
|
||||||
assert result is False
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_restriction_abbreviations_and_full_names():
|
|
||||||
"""
|
|
||||||
Test that both abbreviations and full action names work in restrictions.
|
|
||||||
"""
|
|
||||||
ds = Datasette()
|
|
||||||
await ds.invoke_startup()
|
|
||||||
db = ds.add_memory_database("db1_abbrev")
|
|
||||||
await db.execute_write("CREATE TABLE t1 (id INTEGER)")
|
|
||||||
await ds._refresh_schemas()
|
|
||||||
|
|
||||||
# Test with abbreviation
|
|
||||||
actor_abbr = {"id": "user", "_r": {"r": {"db1_abbrev": {"t1": ["vt"]}}}}
|
|
||||||
result = await ds.allowed(
|
|
||||||
action="view-table",
|
|
||||||
resource=TableResource("db1_abbrev", "t1"),
|
|
||||||
actor=actor_abbr,
|
|
||||||
)
|
|
||||||
assert result is True
|
|
||||||
|
|
||||||
# Test with full name
|
|
||||||
actor_full = {"id": "user", "_r": {"r": {"db1_abbrev": {"t1": ["view-table"]}}}}
|
|
||||||
result = await ds.allowed(
|
|
||||||
action="view-table",
|
|
||||||
resource=TableResource("db1_abbrev", "t1"),
|
|
||||||
actor=actor_full,
|
|
||||||
)
|
|
||||||
assert result is True
|
|
||||||
|
|
||||||
# Test with mixed
|
|
||||||
actor_mixed = {"id": "user", "_r": {"d": {"db1_abbrev": ["view-database", "vt"]}}}
|
|
||||||
result = await ds.allowed(
|
|
||||||
action="view-table",
|
|
||||||
resource=TableResource("db1_abbrev", "t1"),
|
|
||||||
actor=actor_mixed,
|
|
||||||
)
|
|
||||||
assert result is True
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_permission_resources_sql_multiple_restriction_sources_intersect():
|
|
||||||
"""
|
|
||||||
Test that when multiple plugins return restriction_sql, they are INTERSECTed.
|
|
||||||
|
|
||||||
This tests the case where both actor _r restrictions AND a plugin
|
|
||||||
provide restriction_sql - both must pass for access to be granted.
|
|
||||||
"""
|
|
||||||
from datasette import hookimpl
|
|
||||||
|
|
||||||
class RestrictivePlugin:
|
|
||||||
__name__ = "RestrictivePlugin"
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def permission_resources_sql(self, datasette, actor, action):
|
|
||||||
# Plugin adds additional restriction: only db1_multi_restrictions allowed
|
|
||||||
if action == "view-table":
|
|
||||||
return PermissionSQL(
|
|
||||||
restriction_sql="SELECT 'db1_multi_restrictions' AS parent, NULL AS child",
|
|
||||||
params={},
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
plugin = RestrictivePlugin()
|
|
||||||
|
|
||||||
ds = Datasette()
|
|
||||||
await ds.invoke_startup()
|
|
||||||
ds.pm.register(plugin, name="restrictive_plugin")
|
|
||||||
|
|
||||||
try:
|
|
||||||
db1 = ds.add_memory_database("db1_multi_restrictions")
|
|
||||||
db2 = ds.add_memory_database("db2_multi_restrictions")
|
|
||||||
await db1.execute_write("CREATE TABLE t1 (id INTEGER)")
|
|
||||||
await db2.execute_write("CREATE TABLE t1 (id INTEGER)")
|
|
||||||
await ds._refresh_schemas() # Populate catalog tables
|
|
||||||
|
|
||||||
# Actor has restrictions allowing both databases
|
|
||||||
# But plugin only allows db1
|
|
||||||
# INTERSECT means only db1/t1 should pass
|
|
||||||
actor = {
|
|
||||||
"id": "user",
|
|
||||||
"_r": {
|
|
||||||
"d": {
|
|
||||||
"db1_multi_restrictions": ["vt"],
|
|
||||||
"db2_multi_restrictions": ["vt"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
page = await ds.allowed_resources("view-table", actor)
|
|
||||||
resources = {(r.parent, r.child) for r in page.resources}
|
|
||||||
|
|
||||||
# Should only see db1/t1 (intersection of actor restrictions and plugin restrictions)
|
|
||||||
assert ("db1_multi_restrictions", "t1") in resources
|
|
||||||
assert ("db2_multi_restrictions", "t1") not in resources
|
|
||||||
finally:
|
|
||||||
ds.pm.unregister(name="restrictive_plugin")
|
|
||||||
|
|
@ -1,248 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import pytest
|
|
||||||
import pytest_asyncio
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
|
|
||||||
@pytest_asyncio.fixture(scope="module")
|
|
||||||
async def schema_ds():
|
|
||||||
"""Create a Datasette instance with test databases and permission config."""
|
|
||||||
ds = Datasette(
|
|
||||||
config={
|
|
||||||
"databases": {
|
|
||||||
"schema_private_db": {"allow": {"id": "root"}},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create public database with multiple tables
|
|
||||||
public_db = ds.add_memory_database("schema_public_db")
|
|
||||||
await public_db.execute_write(
|
|
||||||
"CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY, name TEXT)"
|
|
||||||
)
|
|
||||||
await public_db.execute_write(
|
|
||||||
"CREATE TABLE IF NOT EXISTS posts (id INTEGER PRIMARY KEY, title TEXT)"
|
|
||||||
)
|
|
||||||
await public_db.execute_write(
|
|
||||||
"CREATE VIEW IF NOT EXISTS recent_posts AS SELECT * FROM posts ORDER BY id DESC"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create a database with restricted access (requires root permission)
|
|
||||||
private_db = ds.add_memory_database("schema_private_db")
|
|
||||||
await private_db.execute_write(
|
|
||||||
"CREATE TABLE IF NOT EXISTS secret_data (id INTEGER PRIMARY KEY, value TEXT)"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create an empty database
|
|
||||||
ds.add_memory_database("schema_empty_db")
|
|
||||||
|
|
||||||
return ds
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"format_ext,expected_in_content",
|
|
||||||
[
|
|
||||||
("json", None),
|
|
||||||
("md", ["# Schema for", "```sql"]),
|
|
||||||
("", ["Schema for", "CREATE TABLE"]),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
async def test_database_schema_formats(schema_ds, format_ext, expected_in_content):
|
|
||||||
"""Test /database/-/schema endpoint in different formats."""
|
|
||||||
url = "/schema_public_db/-/schema"
|
|
||||||
if format_ext:
|
|
||||||
url += f".{format_ext}"
|
|
||||||
response = await schema_ds.client.get(url)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
if format_ext == "json":
|
|
||||||
data = response.json()
|
|
||||||
assert "database" in data
|
|
||||||
assert data["database"] == "schema_public_db"
|
|
||||||
assert "schema" in data
|
|
||||||
assert "CREATE TABLE users" in data["schema"]
|
|
||||||
else:
|
|
||||||
content = response.text
|
|
||||||
for expected in expected_in_content:
|
|
||||||
assert expected in content
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"format_ext,expected_in_content",
|
|
||||||
[
|
|
||||||
("json", None),
|
|
||||||
("md", ["# Schema for", "```sql"]),
|
|
||||||
("", ["Schema for all databases"]),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
async def test_instance_schema_formats(schema_ds, format_ext, expected_in_content):
|
|
||||||
"""Test /-/schema endpoint in different formats."""
|
|
||||||
url = "/-/schema"
|
|
||||||
if format_ext:
|
|
||||||
url += f".{format_ext}"
|
|
||||||
response = await schema_ds.client.get(url)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
if format_ext == "json":
|
|
||||||
data = response.json()
|
|
||||||
assert "schemas" in data
|
|
||||||
assert isinstance(data["schemas"], list)
|
|
||||||
db_names = [item["database"] for item in data["schemas"]]
|
|
||||||
# Should see schema_public_db and schema_empty_db, but not schema_private_db (anonymous user)
|
|
||||||
assert "schema_public_db" in db_names
|
|
||||||
assert "schema_empty_db" in db_names
|
|
||||||
assert "schema_private_db" not in db_names
|
|
||||||
# Check schemas are present
|
|
||||||
for item in data["schemas"]:
|
|
||||||
if item["database"] == "schema_public_db":
|
|
||||||
assert "CREATE TABLE users" in item["schema"]
|
|
||||||
else:
|
|
||||||
content = response.text
|
|
||||||
for expected in expected_in_content:
|
|
||||||
assert expected in content
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"format_ext,expected_in_content",
|
|
||||||
[
|
|
||||||
("json", None),
|
|
||||||
("md", ["# Schema for", "```sql"]),
|
|
||||||
("", ["Schema for users"]),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
async def test_table_schema_formats(schema_ds, format_ext, expected_in_content):
|
|
||||||
"""Test /database/table/-/schema endpoint in different formats."""
|
|
||||||
url = "/schema_public_db/users/-/schema"
|
|
||||||
if format_ext:
|
|
||||||
url += f".{format_ext}"
|
|
||||||
response = await schema_ds.client.get(url)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
if format_ext == "json":
|
|
||||||
data = response.json()
|
|
||||||
assert "database" in data
|
|
||||||
assert data["database"] == "schema_public_db"
|
|
||||||
assert "table" in data
|
|
||||||
assert data["table"] == "users"
|
|
||||||
assert "schema" in data
|
|
||||||
assert "CREATE TABLE users" in data["schema"]
|
|
||||||
else:
|
|
||||||
content = response.text
|
|
||||||
for expected in expected_in_content:
|
|
||||||
assert expected in content
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"url",
|
|
||||||
[
|
|
||||||
"/schema_private_db/-/schema.json",
|
|
||||||
"/schema_private_db/secret_data/-/schema.json",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
async def test_schema_permission_enforcement(schema_ds, url):
|
|
||||||
"""Test that permissions are enforced for schema endpoints."""
|
|
||||||
# Anonymous user should get 403
|
|
||||||
response = await schema_ds.client.get(url)
|
|
||||||
assert response.status_code == 403
|
|
||||||
|
|
||||||
# Authenticated user with permission should succeed
|
|
||||||
response = await schema_ds.client.get(
|
|
||||||
url,
|
|
||||||
cookies={"ds_actor": schema_ds.client.actor_cookie({"id": "root"})},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_instance_schema_respects_database_permissions(schema_ds):
|
|
||||||
"""Test that /-/schema only shows databases the user can view."""
|
|
||||||
# Anonymous user should only see public databases
|
|
||||||
response = await schema_ds.client.get("/-/schema.json")
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
db_names = [item["database"] for item in data["schemas"]]
|
|
||||||
assert "schema_public_db" in db_names
|
|
||||||
assert "schema_empty_db" in db_names
|
|
||||||
assert "schema_private_db" not in db_names
|
|
||||||
|
|
||||||
# Authenticated user should see all databases
|
|
||||||
response = await schema_ds.client.get(
|
|
||||||
"/-/schema.json",
|
|
||||||
cookies={"ds_actor": schema_ds.client.actor_cookie({"id": "root"})},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
db_names = [item["database"] for item in data["schemas"]]
|
|
||||||
assert "schema_public_db" in db_names
|
|
||||||
assert "schema_empty_db" in db_names
|
|
||||||
assert "schema_private_db" in db_names
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_database_schema_with_multiple_tables(schema_ds):
|
|
||||||
"""Test schema with multiple tables in a database."""
|
|
||||||
response = await schema_ds.client.get("/schema_public_db/-/schema.json")
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
schema = data["schema"]
|
|
||||||
|
|
||||||
# All objects should be in the schema
|
|
||||||
assert "CREATE TABLE users" in schema
|
|
||||||
assert "CREATE TABLE posts" in schema
|
|
||||||
assert "CREATE VIEW recent_posts" in schema
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_empty_database_schema(schema_ds):
|
|
||||||
"""Test schema for an empty database."""
|
|
||||||
response = await schema_ds.client.get("/schema_empty_db/-/schema.json")
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
assert data["database"] == "schema_empty_db"
|
|
||||||
assert data["schema"] == ""
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_database_not_exists(schema_ds):
|
|
||||||
"""Test schema for a non-existent database returns 404."""
|
|
||||||
# Test JSON format
|
|
||||||
response = await schema_ds.client.get("/nonexistent_db/-/schema.json")
|
|
||||||
assert response.status_code == 404
|
|
||||||
data = response.json()
|
|
||||||
assert data["ok"] is False
|
|
||||||
assert "not found" in data["error"].lower()
|
|
||||||
|
|
||||||
# Test HTML format (returns text)
|
|
||||||
response = await schema_ds.client.get("/nonexistent_db/-/schema")
|
|
||||||
assert response.status_code == 404
|
|
||||||
assert "not found" in response.text.lower()
|
|
||||||
|
|
||||||
# Test Markdown format (returns text)
|
|
||||||
response = await schema_ds.client.get("/nonexistent_db/-/schema.md")
|
|
||||||
assert response.status_code == 404
|
|
||||||
assert "not found" in response.text.lower()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_table_not_exists(schema_ds):
|
|
||||||
"""Test schema for a non-existent table returns 404."""
|
|
||||||
# Test JSON format
|
|
||||||
response = await schema_ds.client.get("/schema_public_db/nonexistent/-/schema.json")
|
|
||||||
assert response.status_code == 404
|
|
||||||
data = response.json()
|
|
||||||
assert data["ok"] is False
|
|
||||||
assert "not found" in data["error"].lower()
|
|
||||||
|
|
||||||
# Test HTML format (returns text)
|
|
||||||
response = await schema_ds.client.get("/schema_public_db/nonexistent/-/schema")
|
|
||||||
assert response.status_code == 404
|
|
||||||
assert "not found" in response.text.lower()
|
|
||||||
|
|
||||||
# Test Markdown format (returns text)
|
|
||||||
response = await schema_ds.client.get("/schema_public_db/nonexistent/-/schema.md")
|
|
||||||
assert response.status_code == 404
|
|
||||||
assert "not found" in response.text.lower()
|
|
||||||
|
|
@ -383,7 +383,6 @@ async def test_sortable_columns_metadata(ds_client):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@pytest.mark.xfail
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"path,expected_rows",
|
"path,expected_rows",
|
||||||
[
|
[
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ def db():
|
||||||
|
|
||||||
path = tempfile.mktemp(suffix="demo.db")
|
path = tempfile.mktemp(suffix="demo.db")
|
||||||
db = ds.add_database(Database(ds, path=path))
|
db = ds.add_database(Database(ds, path=path))
|
||||||
|
print(path)
|
||||||
return db
|
return db
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -24,6 +25,7 @@ NO_RULES_SQL = (
|
||||||
def plugin_allow_all_for_user(user: str) -> Callable[[str], PermissionSQL]:
|
def plugin_allow_all_for_user(user: str) -> Callable[[str], PermissionSQL]:
|
||||||
def provider(action: str) -> PermissionSQL:
|
def provider(action: str) -> PermissionSQL:
|
||||||
return PermissionSQL(
|
return PermissionSQL(
|
||||||
|
"allow_all",
|
||||||
"""
|
"""
|
||||||
SELECT NULL AS parent, NULL AS child, 1 AS allow,
|
SELECT NULL AS parent, NULL AS child, 1 AS allow,
|
||||||
'global allow for ' || :allow_all_user || ' on ' || :allow_all_action AS reason
|
'global allow for ' || :allow_all_user || ' on ' || :allow_all_action AS reason
|
||||||
|
|
@ -40,6 +42,7 @@ def plugin_deny_specific_table(
|
||||||
) -> Callable[[str], PermissionSQL]:
|
) -> Callable[[str], PermissionSQL]:
|
||||||
def provider(action: str) -> PermissionSQL:
|
def provider(action: str) -> PermissionSQL:
|
||||||
return PermissionSQL(
|
return PermissionSQL(
|
||||||
|
"deny_specific_table",
|
||||||
"""
|
"""
|
||||||
SELECT :deny_specific_table_parent AS parent, :deny_specific_table_child AS child, 0 AS allow,
|
SELECT :deny_specific_table_parent AS parent, :deny_specific_table_child AS child, 0 AS allow,
|
||||||
'deny ' || :deny_specific_table_parent || '/' || :deny_specific_table_child || ' for ' || :deny_specific_table_user || ' on ' || :deny_specific_table_action AS reason
|
'deny ' || :deny_specific_table_parent || '/' || :deny_specific_table_child || ' for ' || :deny_specific_table_user || ' on ' || :deny_specific_table_action AS reason
|
||||||
|
|
@ -59,6 +62,7 @@ def plugin_deny_specific_table(
|
||||||
def plugin_org_policy_deny_parent(parent: str) -> Callable[[str], PermissionSQL]:
|
def plugin_org_policy_deny_parent(parent: str) -> Callable[[str], PermissionSQL]:
|
||||||
def provider(action: str) -> PermissionSQL:
|
def provider(action: str) -> PermissionSQL:
|
||||||
return PermissionSQL(
|
return PermissionSQL(
|
||||||
|
"org_policy_parent_deny",
|
||||||
"""
|
"""
|
||||||
SELECT :org_policy_parent_deny_parent AS parent, NULL AS child, 0 AS allow,
|
SELECT :org_policy_parent_deny_parent AS parent, NULL AS child, 0 AS allow,
|
||||||
'org policy: parent ' || :org_policy_parent_deny_parent || ' denied on ' || :org_policy_parent_deny_action AS reason
|
'org policy: parent ' || :org_policy_parent_deny_parent || ' denied on ' || :org_policy_parent_deny_action AS reason
|
||||||
|
|
@ -77,6 +81,7 @@ def plugin_allow_parent_for_user(
|
||||||
) -> Callable[[str], PermissionSQL]:
|
) -> Callable[[str], PermissionSQL]:
|
||||||
def provider(action: str) -> PermissionSQL:
|
def provider(action: str) -> PermissionSQL:
|
||||||
return PermissionSQL(
|
return PermissionSQL(
|
||||||
|
"allow_parent",
|
||||||
"""
|
"""
|
||||||
SELECT :allow_parent_parent AS parent, NULL AS child, 1 AS allow,
|
SELECT :allow_parent_parent AS parent, NULL AS child, 1 AS allow,
|
||||||
'allow full parent for ' || :allow_parent_user || ' on ' || :allow_parent_action AS reason
|
'allow full parent for ' || :allow_parent_user || ' on ' || :allow_parent_action AS reason
|
||||||
|
|
@ -97,6 +102,7 @@ def plugin_child_allow_for_user(
|
||||||
) -> Callable[[str], PermissionSQL]:
|
) -> Callable[[str], PermissionSQL]:
|
||||||
def provider(action: str) -> PermissionSQL:
|
def provider(action: str) -> PermissionSQL:
|
||||||
return PermissionSQL(
|
return PermissionSQL(
|
||||||
|
"allow_child",
|
||||||
"""
|
"""
|
||||||
SELECT :allow_child_parent AS parent, :allow_child_child AS child, 1 AS allow,
|
SELECT :allow_child_parent AS parent, :allow_child_child AS child, 1 AS allow,
|
||||||
'allow child for ' || :allow_child_user || ' on ' || :allow_child_action AS reason
|
'allow child for ' || :allow_child_user || ' on ' || :allow_child_action AS reason
|
||||||
|
|
@ -116,6 +122,7 @@ def plugin_child_allow_for_user(
|
||||||
def plugin_root_deny_for_all() -> Callable[[str], PermissionSQL]:
|
def plugin_root_deny_for_all() -> Callable[[str], PermissionSQL]:
|
||||||
def provider(action: str) -> PermissionSQL:
|
def provider(action: str) -> PermissionSQL:
|
||||||
return PermissionSQL(
|
return PermissionSQL(
|
||||||
|
"root_deny",
|
||||||
"""
|
"""
|
||||||
SELECT NULL AS parent, NULL AS child, 0 AS allow, 'root deny for all on ' || :root_deny_action AS reason
|
SELECT NULL AS parent, NULL AS child, 0 AS allow, 'root deny for all on ' || :root_deny_action AS reason
|
||||||
""",
|
""",
|
||||||
|
|
@ -130,6 +137,7 @@ def plugin_conflicting_same_child_rules(
|
||||||
) -> List[Callable[[str], PermissionSQL]]:
|
) -> List[Callable[[str], PermissionSQL]]:
|
||||||
def allow_provider(action: str) -> PermissionSQL:
|
def allow_provider(action: str) -> PermissionSQL:
|
||||||
return PermissionSQL(
|
return PermissionSQL(
|
||||||
|
"conflict_child_allow",
|
||||||
"""
|
"""
|
||||||
SELECT :conflict_child_allow_parent AS parent, :conflict_child_allow_child AS child, 1 AS allow,
|
SELECT :conflict_child_allow_parent AS parent, :conflict_child_allow_child AS child, 1 AS allow,
|
||||||
'team grant at child for ' || :conflict_child_allow_user || ' on ' || :conflict_child_allow_action AS reason
|
'team grant at child for ' || :conflict_child_allow_user || ' on ' || :conflict_child_allow_action AS reason
|
||||||
|
|
@ -145,6 +153,7 @@ def plugin_conflicting_same_child_rules(
|
||||||
|
|
||||||
def deny_provider(action: str) -> PermissionSQL:
|
def deny_provider(action: str) -> PermissionSQL:
|
||||||
return PermissionSQL(
|
return PermissionSQL(
|
||||||
|
"conflict_child_deny",
|
||||||
"""
|
"""
|
||||||
SELECT :conflict_child_deny_parent AS parent, :conflict_child_deny_child AS child, 0 AS allow,
|
SELECT :conflict_child_deny_parent AS parent, :conflict_child_deny_child AS child, 0 AS allow,
|
||||||
'exception deny at child for ' || :conflict_child_deny_user || ' on ' || :conflict_child_deny_action AS reason
|
'exception deny at child for ' || :conflict_child_deny_user || ' on ' || :conflict_child_deny_action AS reason
|
||||||
|
|
@ -166,10 +175,16 @@ def plugin_allow_all_for_action(
|
||||||
) -> Callable[[str], PermissionSQL]:
|
) -> Callable[[str], PermissionSQL]:
|
||||||
def provider(action: str) -> PermissionSQL:
|
def provider(action: str) -> PermissionSQL:
|
||||||
if action != allowed_action:
|
if action != allowed_action:
|
||||||
return PermissionSQL(NO_RULES_SQL)
|
return PermissionSQL(
|
||||||
|
f"allow_all_{allowed_action}_noop",
|
||||||
|
NO_RULES_SQL,
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
source_name = f"allow_all_{allowed_action}"
|
||||||
# Sanitize parameter names by replacing hyphens with underscores
|
# Sanitize parameter names by replacing hyphens with underscores
|
||||||
param_prefix = action.replace("-", "_")
|
param_prefix = source_name.replace("-", "_")
|
||||||
return PermissionSQL(
|
return PermissionSQL(
|
||||||
|
source_name,
|
||||||
f"""
|
f"""
|
||||||
SELECT NULL AS parent, NULL AS child, 1 AS allow,
|
SELECT NULL AS parent, NULL AS child, 1 AS allow,
|
||||||
'global allow for ' || :{param_prefix}_user || ' on ' || :{param_prefix}_action AS reason
|
'global allow for ' || :{param_prefix}_user || ' on ' || :{param_prefix}_action AS reason
|
||||||
|
|
@ -498,6 +513,7 @@ async def test_actor_actor_id_action_parameters_available(db):
|
||||||
def plugin_using_all_parameters() -> Callable[[str], PermissionSQL]:
|
def plugin_using_all_parameters() -> Callable[[str], PermissionSQL]:
|
||||||
def provider(action: str) -> PermissionSQL:
|
def provider(action: str) -> PermissionSQL:
|
||||||
return PermissionSQL(
|
return PermissionSQL(
|
||||||
|
"test_all_params",
|
||||||
"""
|
"""
|
||||||
SELECT NULL AS parent, NULL AS child, 1 AS allow,
|
SELECT NULL AS parent, NULL AS child, 1 AS allow,
|
||||||
'Actor ID: ' || COALESCE(:actor_id, 'null') ||
|
'Actor ID: ' || COALESCE(:actor_id, 'null') ||
|
||||||
|
|
@ -505,7 +521,8 @@ async def test_actor_actor_id_action_parameters_available(db):
|
||||||
', Action: ' || :action AS reason
|
', Action: ' || :action AS reason
|
||||||
WHERE :actor_id = 'test_user' AND :action = 'view-table'
|
WHERE :actor_id = 'test_user' AND :action = 'view-table'
|
||||||
AND json_extract(:actor, '$.role') = 'admin'
|
AND json_extract(:actor, '$.role') = 'admin'
|
||||||
"""
|
""",
|
||||||
|
{},
|
||||||
)
|
)
|
||||||
|
|
||||||
return provider
|
return provider
|
||||||
|
|
@ -550,6 +567,7 @@ async def test_multiple_plugins_with_own_parameters(db):
|
||||||
if action != "view-table":
|
if action != "view-table":
|
||||||
return PermissionSQL("plugin_one", "SELECT NULL WHERE 0", {})
|
return PermissionSQL("plugin_one", "SELECT NULL WHERE 0", {})
|
||||||
return PermissionSQL(
|
return PermissionSQL(
|
||||||
|
"plugin_one",
|
||||||
"""
|
"""
|
||||||
SELECT database_name AS parent, table_name AS child,
|
SELECT database_name AS parent, table_name AS child,
|
||||||
1 AS allow, 'Plugin one used param: ' || :plugin1_param AS reason
|
1 AS allow, 'Plugin one used param: ' || :plugin1_param AS reason
|
||||||
|
|
@ -568,6 +586,7 @@ async def test_multiple_plugins_with_own_parameters(db):
|
||||||
if action != "view-table":
|
if action != "view-table":
|
||||||
return PermissionSQL("plugin_two", "SELECT NULL WHERE 0", {})
|
return PermissionSQL("plugin_two", "SELECT NULL WHERE 0", {})
|
||||||
return PermissionSQL(
|
return PermissionSQL(
|
||||||
|
"plugin_two",
|
||||||
"""
|
"""
|
||||||
SELECT database_name AS parent, table_name AS child,
|
SELECT database_name AS parent, table_name AS child,
|
||||||
1 AS allow, 'Plugin two used param: ' || :plugin2_param AS reason
|
1 AS allow, 'Plugin two used param: ' || :plugin2_param AS reason
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue