mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Compare commits
3 commits
main
...
asg017/hid
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
999b9f0353 | ||
|
|
86c5203451 | ||
|
|
751abbcc57 |
154 changed files with 2669 additions and 14168 deletions
2
.github/workflows/deploy-branch-preview.yml
vendored
2
.github/workflows/deploy-branch-preview.yml
vendored
|
|
@ -14,7 +14,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Install dependencies
|
||||
|
|
|
|||
29
.github/workflows/deploy-latest.yml
vendored
29
.github/workflows/deploy-latest.yml
vendored
|
|
@ -1,11 +1,10 @@
|
|||
name: Deploy latest.datasette.io
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
# - 1.0-dev
|
||||
- 1.0-dev
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
|
@ -15,12 +14,19 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out datasette
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v4
|
||||
# gcloud commmand breaks on higher Python versions, so stick with 3.9:
|
||||
with:
|
||||
python-version: "3.13"
|
||||
cache: pip
|
||||
python-version: "3.9"
|
||||
- uses: actions/cache@v3
|
||||
name: Configure pip caching
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
|
|
@ -95,13 +101,12 @@ jobs:
|
|||
# jq '.plugins |= . + {"datasette-ephemeral-tables": {"table_ttl": 900}}' \
|
||||
# > metadata.json
|
||||
# cat metadata.json
|
||||
- id: auth
|
||||
name: Authenticate to Google Cloud
|
||||
uses: google-github-actions/auth@v3
|
||||
- name: Set up Cloud Run
|
||||
uses: google-github-actions/setup-gcloud@v0
|
||||
with:
|
||||
credentials_json: ${{ secrets.GCP_SA_KEY }}
|
||||
- name: Set up Cloud SDK
|
||||
uses: google-github-actions/setup-gcloud@v3
|
||||
version: '318.0.0'
|
||||
service_account_email: ${{ secrets.GCP_SA_EMAIL }}
|
||||
service_account_key: ${{ secrets.GCP_SA_KEY }}
|
||||
- name: Deploy to Cloud Run
|
||||
env:
|
||||
LATEST_DATASETTE_SECRET: ${{ secrets.LATEST_DATASETTE_SECRET }}
|
||||
|
|
|
|||
4
.github/workflows/prettier.yml
vendored
4
.github/workflows/prettier.yml
vendored
|
|
@ -10,8 +10,8 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
uses: actions/checkout@v2
|
||||
- uses: actions/cache@v2
|
||||
name: Configure npm caching
|
||||
with:
|
||||
path: ~/.npm
|
||||
|
|
|
|||
29
.github/workflows/publish.yml
vendored
29
.github/workflows/publish.yml
vendored
|
|
@ -12,15 +12,15 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: pip
|
||||
cache-dependency-path: pyproject.toml
|
||||
cache-dependency-path: setup.py
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install -e '.[test]'
|
||||
|
|
@ -37,11 +37,11 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.13'
|
||||
python-version: '3.12'
|
||||
cache: pip
|
||||
cache-dependency-path: pyproject.toml
|
||||
cache-dependency-path: setup.py
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install setuptools wheel build
|
||||
|
|
@ -58,11 +58,11 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: '3.9'
|
||||
cache: pip
|
||||
cache-dependency-path: pyproject.toml
|
||||
cache-dependency-path: setup.py
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install -e .[docs]
|
||||
|
|
@ -73,13 +73,12 @@ jobs:
|
|||
DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build
|
||||
sphinx-to-sqlite ../docs.db _build
|
||||
cd ..
|
||||
- id: auth
|
||||
name: Authenticate to Google Cloud
|
||||
uses: google-github-actions/auth@v2
|
||||
- name: Set up Cloud Run
|
||||
uses: google-github-actions/setup-gcloud@v0
|
||||
with:
|
||||
credentials_json: ${{ secrets.GCP_SA_KEY }}
|
||||
- name: Set up Cloud SDK
|
||||
uses: google-github-actions/setup-gcloud@v3
|
||||
version: '318.0.0'
|
||||
service_account_email: ${{ secrets.GCP_SA_EMAIL }}
|
||||
service_account_key: ${{ secrets.GCP_SA_KEY }}
|
||||
- name: Deploy stable-docs.datasette.io to Cloud Run
|
||||
run: |-
|
||||
gcloud config set run/region us-central1
|
||||
|
|
|
|||
4
.github/workflows/spellcheck.yml
vendored
4
.github/workflows/spellcheck.yml
vendored
|
|
@ -11,11 +11,11 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '**/pyproject.toml'
|
||||
cache-dependency-path: '**/setup.py'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install -e '.[docs]'
|
||||
|
|
|
|||
76
.github/workflows/stable-docs.yml
vendored
76
.github/workflows/stable-docs.yml
vendored
|
|
@ -1,76 +0,0 @@
|
|||
name: Update Stable Docs
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
update_stable_docs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0 # We need all commits to find docs/ changes
|
||||
- name: Set up Git user
|
||||
run: |
|
||||
git config user.name "Automated"
|
||||
git config user.email "actions@users.noreply.github.com"
|
||||
- name: Create stable branch if it does not yet exist
|
||||
run: |
|
||||
if ! git ls-remote --heads origin stable | grep -qE '\bstable\b'; then
|
||||
# Make sure we have all tags locally
|
||||
git fetch --tags --quiet
|
||||
|
||||
# Latest tag that is just numbers and dots (optionally prefixed with 'v')
|
||||
# e.g., 0.65.2 or v0.65.2 — excludes 1.0a20, 1.0-rc1, etc.
|
||||
LATEST_RELEASE=$(
|
||||
git tag -l --sort=-v:refname \
|
||||
| grep -E '^v?[0-9]+(\.[0-9]+){1,3}$' \
|
||||
| head -n1
|
||||
)
|
||||
|
||||
git checkout -b stable
|
||||
|
||||
# If there are any stable releases, copy docs/ from the most recent
|
||||
if [ -n "$LATEST_RELEASE" ]; then
|
||||
rm -rf docs/
|
||||
git checkout "$LATEST_RELEASE" -- docs/ || true
|
||||
fi
|
||||
|
||||
git commit -m "Populate docs/ from $LATEST_RELEASE" || echo "No changes"
|
||||
git push -u origin stable
|
||||
fi
|
||||
- name: Handle Release
|
||||
if: github.event_name == 'release' && !github.event.release.prerelease
|
||||
run: |
|
||||
git fetch --all
|
||||
git checkout stable
|
||||
git reset --hard ${GITHUB_REF#refs/tags/}
|
||||
git push origin stable --force
|
||||
- name: Handle Commit to Main
|
||||
if: contains(github.event.head_commit.message, '!stable-docs')
|
||||
run: |
|
||||
git fetch origin
|
||||
git checkout -b stable origin/stable
|
||||
# Get the list of modified files in docs/ from the current commit
|
||||
FILES=$(git diff-tree --no-commit-id --name-only -r ${{ github.sha }} -- docs/)
|
||||
# Check if the list of files is non-empty
|
||||
if [[ -n "$FILES" ]]; then
|
||||
# Checkout those files to the stable branch to over-write with their contents
|
||||
for FILE in $FILES; do
|
||||
git checkout ${{ github.sha }} -- $FILE
|
||||
done
|
||||
git add docs/
|
||||
git commit -m "Doc changes from ${{ github.sha }}"
|
||||
git push origin stable
|
||||
else
|
||||
echo "No changes to docs/ in this commit."
|
||||
exit 0
|
||||
fi
|
||||
6
.github/workflows/test-coverage.yml
vendored
6
.github/workflows/test-coverage.yml
vendored
|
|
@ -17,11 +17,11 @@ jobs:
|
|||
- name: Check out datasette
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '**/pyproject.toml'
|
||||
cache-dependency-path: '**/setup.py'
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
|
|
@ -31,7 +31,7 @@ jobs:
|
|||
run: |-
|
||||
ls -lah
|
||||
cat .coveragerc
|
||||
pytest -m "not serial" --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term -x
|
||||
pytest --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term
|
||||
ls -lah
|
||||
- name: Upload coverage report
|
||||
uses: codecov/codecov-action@v1
|
||||
|
|
|
|||
8
.github/workflows/test-pyodide.yml
vendored
8
.github/workflows/test-pyodide.yml
vendored
|
|
@ -12,15 +12,15 @@ jobs:
|
|||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '**/pyproject.toml'
|
||||
cache-dependency-path: '**/setup.py'
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@v4
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/ms-playwright/
|
||||
key: ${{ runner.os }}-browsers
|
||||
|
|
|
|||
6
.github/workflows/test-sqlite-support.yml
vendored
6
.github/workflows/test-sqlite-support.yml
vendored
|
|
@ -12,7 +12,7 @@ jobs:
|
|||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest]
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||
python-version: [ "3.8", "3.9", "3.10", "3.11", "3.12"]
|
||||
sqlite-version: [
|
||||
#"3", # latest version
|
||||
"3.46",
|
||||
|
|
@ -27,12 +27,12 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
allow-prereleases: true
|
||||
cache: pip
|
||||
cache-dependency-path: pyproject.toml
|
||||
cache-dependency-path: setup.py
|
||||
- name: Set up SQLite ${{ matrix.sqlite-version }}
|
||||
uses: asg017/sqlite-versions@71ea0de37ae739c33e447af91ba71dda8fcf22e6
|
||||
with:
|
||||
|
|
|
|||
13
.github/workflows/test.yml
vendored
13
.github/workflows/test.yml
vendored
|
|
@ -10,16 +10,16 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
allow-prereleases: true
|
||||
cache: pip
|
||||
cache-dependency-path: pyproject.toml
|
||||
cache-dependency-path: setup.py
|
||||
- name: Build extension for --load-extension test
|
||||
run: |-
|
||||
(cd tests && gcc ext.c -fPIC -shared -o ext.so)
|
||||
|
|
@ -33,15 +33,16 @@ jobs:
|
|||
pytest -m "serial"
|
||||
# And the test that exceeds a localhost HTTPS server
|
||||
tests/test_datasette_https_server.sh
|
||||
- name: Install docs dependencies
|
||||
- name: Install docs dependencies on Python 3.9+
|
||||
if: matrix.python-version != '3.8'
|
||||
run: |
|
||||
pip install -e '.[docs]'
|
||||
- name: Black
|
||||
run: black --check .
|
||||
- name: Check if cog needs to be run
|
||||
if: matrix.python-version != '3.8'
|
||||
run: |
|
||||
cog --check docs/*.rst
|
||||
- name: Check if blacken-docs needs to be run
|
||||
if: matrix.python-version != '3.8'
|
||||
run: |
|
||||
# This fails on syntax errors, or a diff was applied
|
||||
blacken-docs -l 60 docs/*.rst
|
||||
|
|
|
|||
3
.github/workflows/tmate.yml
vendored
3
.github/workflows/tmate.yml
vendored
|
|
@ -5,7 +5,6 @@ on:
|
|||
|
||||
permissions:
|
||||
contents: read
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
|
@ -14,5 +13,3 @@ jobs:
|
|||
- uses: actions/checkout@v2
|
||||
- name: Setup tmate session
|
||||
uses: mxschmitt/action-tmate@v3
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
|
|||
5
.gitignore
vendored
5
.gitignore
vendored
|
|
@ -5,9 +5,6 @@ scratchpad
|
|||
|
||||
.vscode
|
||||
|
||||
uv.lock
|
||||
data.db
|
||||
|
||||
# We don't use Pipfile, so ignore them
|
||||
Pipfile
|
||||
Pipfile.lock
|
||||
|
|
@ -126,4 +123,4 @@ node_modules
|
|||
# include it in source control.
|
||||
tests/*.dylib
|
||||
tests/*.so
|
||||
tests/*.dll
|
||||
tests/*.dll
|
||||
52
Justfile
52
Justfile
|
|
@ -5,52 +5,38 @@ export DATASETTE_SECRET := "not_a_secret"
|
|||
|
||||
# Setup project
|
||||
@init:
|
||||
uv sync --extra test --extra docs
|
||||
pipenv run pip install -e '.[test,docs]'
|
||||
|
||||
# Run pytest with supplied options
|
||||
@test *options: init
|
||||
uv run pytest -n auto {{options}}
|
||||
@test *options:
|
||||
pipenv run pytest {{options}}
|
||||
|
||||
@codespell:
|
||||
uv run codespell README.md --ignore-words docs/codespell-ignore-words.txt
|
||||
uv run codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt
|
||||
uv run codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt
|
||||
uv run codespell tests --ignore-words docs/codespell-ignore-words.txt
|
||||
pipenv run codespell README.md --ignore-words docs/codespell-ignore-words.txt
|
||||
pipenv run codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt
|
||||
pipenv run codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt
|
||||
pipenv run codespell tests --ignore-words docs/codespell-ignore-words.txt
|
||||
|
||||
# Run linters: black, flake8, mypy, cog
|
||||
@lint: codespell
|
||||
uv run black . --check
|
||||
uv run flake8
|
||||
uv run --extra test cog --check README.md docs/*.rst
|
||||
pipenv run black . --check
|
||||
pipenv run flake8
|
||||
pipenv run cog --check README.md docs/*.rst
|
||||
|
||||
# Rebuild docs with cog
|
||||
@cog:
|
||||
uv run --extra test cog -r README.md docs/*.rst
|
||||
pipenv run cog -r README.md docs/*.rst
|
||||
|
||||
# Serve live docs on localhost:8000
|
||||
@docs: cog blacken-docs
|
||||
uv run --extra docs make -C docs livehtml
|
||||
|
||||
# Build docs as static HTML
|
||||
@docs-build: cog blacken-docs
|
||||
rm -rf docs/_build && cd docs && uv run make html
|
||||
@docs: cog
|
||||
pipenv run blacken-docs -l 60 docs/*.rst
|
||||
cd docs && pipenv run make livehtml
|
||||
|
||||
# Apply Black
|
||||
@black:
|
||||
uv run black .
|
||||
pipenv run black .
|
||||
|
||||
# Apply blacken-docs
|
||||
@blacken-docs:
|
||||
uv run blacken-docs -l 60 docs/*.rst
|
||||
|
||||
# Apply prettier
|
||||
@prettier:
|
||||
npm run fix
|
||||
|
||||
# Format code with both black and prettier
|
||||
@format: black prettier blacken-docs
|
||||
|
||||
@serve *options:
|
||||
uv run sqlite-utils create-database data.db
|
||||
uv run sqlite-utils create-table data.db docs id integer title text --pk id --ignore
|
||||
uv run python -m datasette data.db --root --reload {{options}}
|
||||
@serve:
|
||||
pipenv run sqlite-utils create-database data.db
|
||||
pipenv run sqlite-utils create-table data.db docs id integer title text --pk id --ignore
|
||||
pipenv run python -m datasette data.db --root --reload
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ Datasette is a tool for exploring and publishing data. It helps people take data
|
|||
|
||||
Datasette is aimed at data journalists, museum curators, archivists, local governments, scientists, researchers and anyone else who has data that they wish to share with the world.
|
||||
|
||||
[Explore a demo](https://datasette.io/global-power-plants/global-power-plants), watch [a video about the project](https://simonwillison.net/2021/Feb/7/video/) or try it out [on GitHub Codespaces](https://github.com/datasette/datasette-studio).
|
||||
[Explore a demo](https://global-power-plants.datasettes.com/global-power-plants/global-power-plants), watch [a video about the project](https://simonwillison.net/2021/Feb/7/video/) or try it out by [uploading and publishing your own CSV data](https://docs.datasette.io/en/stable/getting_started.html#try-datasette-without-installing-anything-using-glitch).
|
||||
|
||||
* [datasette.io](https://datasette.io/) is the official project website
|
||||
* Latest [Datasette News](https://datasette.io/news)
|
||||
|
|
|
|||
893
datasette/app.py
893
datasette/app.py
File diff suppressed because it is too large
Load diff
114
datasette/cli.py
114
datasette/cli.py
|
|
@ -42,18 +42,6 @@ from .utils.sqlite import sqlite3
|
|||
from .utils.testing import TestClient
|
||||
from .version import __version__
|
||||
|
||||
|
||||
def run_sync(coro_func):
|
||||
"""Run an async callable to completion on a fresh event loop."""
|
||||
loop = asyncio.new_event_loop()
|
||||
try:
|
||||
asyncio.set_event_loop(loop)
|
||||
return loop.run_until_complete(coro_func())
|
||||
finally:
|
||||
asyncio.set_event_loop(None)
|
||||
loop.close()
|
||||
|
||||
|
||||
# Use Rich for tracebacks if it is installed
|
||||
try:
|
||||
from rich.traceback import install
|
||||
|
|
@ -97,7 +85,7 @@ def sqlite_extensions(fn):
|
|||
"sqlite_extensions",
|
||||
"--load-extension",
|
||||
type=LoadExtension(),
|
||||
envvar="DATASETTE_LOAD_EXTENSION",
|
||||
envvar="SQLITE_EXTENSIONS",
|
||||
multiple=True,
|
||||
help="Path to a SQLite extension to load, and optional entrypoint",
|
||||
)(fn)
|
||||
|
|
@ -146,7 +134,9 @@ def inspect(files, inspect_file, sqlite_extensions):
|
|||
This can then be passed to "datasette --inspect-file" to speed up count
|
||||
operations against immutable database files.
|
||||
"""
|
||||
inspect_data = run_sync(lambda: inspect_(files, sqlite_extensions))
|
||||
app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions)
|
||||
loop = asyncio.get_event_loop()
|
||||
inspect_data = loop.run_until_complete(inspect_(files, sqlite_extensions))
|
||||
if inspect_file == "-":
|
||||
sys.stdout.write(json.dumps(inspect_data, indent=2))
|
||||
else:
|
||||
|
|
@ -438,20 +428,10 @@ def uninstall(packages, yes):
|
|||
help="Output URL that sets a cookie authenticating the root user",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--default-deny",
|
||||
help="Deny all permissions by default",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--get",
|
||||
help="Run an HTTP GET request against this path, print results and exit",
|
||||
)
|
||||
@click.option(
|
||||
"--headers",
|
||||
is_flag=True,
|
||||
help="Include HTTP headers in --get output",
|
||||
)
|
||||
@click.option(
|
||||
"--token",
|
||||
help="API token to send with --get requests",
|
||||
|
|
@ -488,12 +468,10 @@ def uninstall(packages, yes):
|
|||
@click.option(
|
||||
"--ssl-keyfile",
|
||||
help="SSL key file",
|
||||
envvar="DATASETTE_SSL_KEYFILE",
|
||||
)
|
||||
@click.option(
|
||||
"--ssl-certfile",
|
||||
help="SSL certificate file",
|
||||
envvar="DATASETTE_SSL_CERTFILE",
|
||||
)
|
||||
@click.option(
|
||||
"--internal",
|
||||
|
|
@ -519,9 +497,7 @@ def serve(
|
|||
settings,
|
||||
secret,
|
||||
root,
|
||||
default_deny,
|
||||
get,
|
||||
headers,
|
||||
token,
|
||||
actor,
|
||||
version_note,
|
||||
|
|
@ -600,23 +576,15 @@ def serve(
|
|||
crossdb=crossdb,
|
||||
nolock=nolock,
|
||||
internal=internal,
|
||||
default_deny=default_deny,
|
||||
)
|
||||
|
||||
# Separate directories from files
|
||||
directories = [f for f in files if os.path.isdir(f)]
|
||||
file_paths = [f for f in files if not os.path.isdir(f)]
|
||||
|
||||
# Handle config_dir - only one directory allowed
|
||||
if len(directories) > 1:
|
||||
raise click.ClickException(
|
||||
"Cannot pass multiple directories. Pass a single directory as config_dir."
|
||||
)
|
||||
elif len(directories) == 1:
|
||||
kwargs["config_dir"] = pathlib.Path(directories[0])
|
||||
# if files is a single directory, use that as config_dir=
|
||||
if 1 == len(files) and os.path.isdir(files[0]):
|
||||
kwargs["config_dir"] = pathlib.Path(files[0])
|
||||
files = []
|
||||
|
||||
# Verify list of files, create if needed (and --create)
|
||||
for file in file_paths:
|
||||
for file in files:
|
||||
if not pathlib.Path(file).exists():
|
||||
if create:
|
||||
sqlite3.connect(file).execute("vacuum")
|
||||
|
|
@ -627,32 +595,8 @@ def serve(
|
|||
)
|
||||
)
|
||||
|
||||
# Check for duplicate files by resolving all paths to their absolute forms
|
||||
# Collect all database files that will be loaded (explicit files + config_dir files)
|
||||
all_db_files = []
|
||||
|
||||
# Add explicit files
|
||||
for file in file_paths:
|
||||
all_db_files.append((file, pathlib.Path(file).resolve()))
|
||||
|
||||
# Add config_dir databases if config_dir is set
|
||||
if "config_dir" in kwargs:
|
||||
config_dir = kwargs["config_dir"]
|
||||
for ext in ("db", "sqlite", "sqlite3"):
|
||||
for db_file in config_dir.glob(f"*.{ext}"):
|
||||
all_db_files.append((str(db_file), db_file.resolve()))
|
||||
|
||||
# Check for duplicates
|
||||
seen = {}
|
||||
for original_path, resolved_path in all_db_files:
|
||||
if resolved_path in seen:
|
||||
raise click.ClickException(
|
||||
f"Duplicate database file: '{original_path}' and '{seen[resolved_path]}' "
|
||||
f"both refer to {resolved_path}"
|
||||
)
|
||||
seen[resolved_path] = original_path
|
||||
|
||||
files = file_paths
|
||||
# De-duplicate files so 'datasette db.db db.db' only attaches one /db
|
||||
files = list(dict.fromkeys(files))
|
||||
|
||||
try:
|
||||
ds = Datasette(files, **kwargs)
|
||||
|
|
@ -666,38 +610,24 @@ def serve(
|
|||
return ds
|
||||
|
||||
# Run the "startup" plugin hooks
|
||||
run_sync(ds.invoke_startup)
|
||||
asyncio.get_event_loop().run_until_complete(ds.invoke_startup())
|
||||
|
||||
# Run async soundness checks - but only if we're not under pytest
|
||||
run_sync(lambda: check_databases(ds))
|
||||
|
||||
if headers and not get:
|
||||
raise click.ClickException("--headers can only be used with --get")
|
||||
asyncio.get_event_loop().run_until_complete(check_databases(ds))
|
||||
|
||||
if token and not get:
|
||||
raise click.ClickException("--token can only be used with --get")
|
||||
|
||||
if get:
|
||||
client = TestClient(ds)
|
||||
request_headers = {}
|
||||
headers = {}
|
||||
if token:
|
||||
request_headers["Authorization"] = "Bearer {}".format(token)
|
||||
headers["Authorization"] = "Bearer {}".format(token)
|
||||
cookies = {}
|
||||
if actor:
|
||||
cookies["ds_actor"] = client.actor_cookie(json.loads(actor))
|
||||
response = client.get(get, headers=request_headers, cookies=cookies)
|
||||
|
||||
if headers:
|
||||
# Output HTTP status code, headers, two newlines, then the response body
|
||||
click.echo(f"HTTP/1.1 {response.status}")
|
||||
for key, value in response.headers.items():
|
||||
click.echo(f"{key}: {value}")
|
||||
if response.text:
|
||||
click.echo()
|
||||
click.echo(response.text)
|
||||
else:
|
||||
click.echo(response.text)
|
||||
|
||||
response = client.get(get, headers=headers, cookies=cookies)
|
||||
click.echo(response.text)
|
||||
exit_code = 0 if response.status == 200 else 1
|
||||
sys.exit(exit_code)
|
||||
return
|
||||
|
|
@ -705,7 +635,6 @@ def serve(
|
|||
# Start the server
|
||||
url = None
|
||||
if root:
|
||||
ds.root_enabled = True
|
||||
url = "http://{}:{}{}?token={}".format(
|
||||
host, port, ds.urls.path("-/auth-token"), ds._root_token
|
||||
)
|
||||
|
|
@ -713,7 +642,9 @@ def serve(
|
|||
if open_browser:
|
||||
if url is None:
|
||||
# Figure out most convenient URL - to table, database or homepage
|
||||
path = run_sync(lambda: initial_path_for_datasette(ds))
|
||||
path = asyncio.get_event_loop().run_until_complete(
|
||||
initial_path_for_datasette(ds)
|
||||
)
|
||||
url = f"http://{host}:{port}{path}"
|
||||
webbrowser.open(url)
|
||||
uvicorn_kwargs = dict(
|
||||
|
|
@ -815,7 +746,8 @@ def create_token(
|
|||
ds = Datasette(secret=secret, plugins_dir=plugins_dir)
|
||||
|
||||
# Run ds.invoke_startup() in an event loop
|
||||
run_sync(ds.invoke_startup)
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(ds.invoke_startup())
|
||||
|
||||
# Warn about any unknown actions
|
||||
actions = []
|
||||
|
|
@ -823,7 +755,7 @@ def create_token(
|
|||
actions.extend([p[1] for p in databases])
|
||||
actions.extend([p[2] for p in resources])
|
||||
for action in actions:
|
||||
if not ds.actions.get(action):
|
||||
if not ds.permissions.get(action):
|
||||
click.secho(
|
||||
f" Unknown permission: {action} ",
|
||||
fg="red",
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ from collections import namedtuple
|
|||
from pathlib import Path
|
||||
import janus
|
||||
import queue
|
||||
import sqlite_utils
|
||||
import sys
|
||||
import threading
|
||||
import uuid
|
||||
|
|
@ -30,10 +29,6 @@ AttachedDatabase = namedtuple("AttachedDatabase", ("seq", "name", "file"))
|
|||
|
||||
|
||||
class Database:
|
||||
# For table counts stop at this many rows:
|
||||
count_limit = 10000
|
||||
_thread_local_id_counter = 1
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ds,
|
||||
|
|
@ -44,8 +39,6 @@ class Database:
|
|||
mode=None,
|
||||
):
|
||||
self.name = None
|
||||
self._thread_local_id = f"x{self._thread_local_id_counter}"
|
||||
Database._thread_local_id_counter += 1
|
||||
self.route = None
|
||||
self.ds = ds
|
||||
self.path = path
|
||||
|
|
@ -143,9 +136,7 @@ class Database:
|
|||
return conn.executescript(sql)
|
||||
|
||||
with trace("sql", database=self.name, sql=sql.strip(), executescript=True):
|
||||
results = await self.execute_write_fn(
|
||||
_inner, block=block, transaction=False
|
||||
)
|
||||
results = await self.execute_write_fn(_inner, block=block)
|
||||
return results
|
||||
|
||||
async def execute_write_many(self, sql, params_seq, block=True):
|
||||
|
|
@ -283,11 +274,11 @@ class Database:
|
|||
|
||||
# threaded mode
|
||||
def in_thread():
|
||||
conn = getattr(connections, self._thread_local_id, None)
|
||||
conn = getattr(connections, self.name, None)
|
||||
if not conn:
|
||||
conn = self.connect()
|
||||
self.ds._prepare_connection(conn, self.name)
|
||||
setattr(connections, self._thread_local_id, conn)
|
||||
setattr(connections, self.name, conn)
|
||||
return fn(conn)
|
||||
|
||||
return await asyncio.get_event_loop().run_in_executor(
|
||||
|
|
@ -385,7 +376,7 @@ class Database:
|
|||
try:
|
||||
table_count = (
|
||||
await self.execute(
|
||||
f"select count(*) from (select * from [{table}] limit {self.count_limit + 1})",
|
||||
f"select count(*) from [{table}]",
|
||||
custom_time_limit=limit,
|
||||
)
|
||||
).rows[0][0]
|
||||
|
|
@ -410,12 +401,7 @@ class Database:
|
|||
# But SQLite prior to 3.16.0 doesn't support pragma functions
|
||||
results = await self.execute("PRAGMA database_list;")
|
||||
# {'seq': 0, 'name': 'main', 'file': ''}
|
||||
return [
|
||||
AttachedDatabase(*row)
|
||||
for row in results.rows
|
||||
# Filter out the SQLite internal "temp" database, refs #2557
|
||||
if row["seq"] > 0 and row["name"] != "temp"
|
||||
]
|
||||
return [AttachedDatabase(*row) for row in results.rows if row["seq"] > 0]
|
||||
|
||||
async def table_exists(self, table):
|
||||
results = await self.execute(
|
||||
|
|
@ -453,33 +439,7 @@ class Database:
|
|||
)
|
||||
if explicit_label_column:
|
||||
return explicit_label_column
|
||||
|
||||
def column_details(conn):
|
||||
# Returns {column_name: (type, is_unique)}
|
||||
db = sqlite_utils.Database(conn)
|
||||
columns = db[table].columns_dict
|
||||
indexes = db[table].indexes
|
||||
details = {}
|
||||
for name in columns:
|
||||
is_unique = any(
|
||||
index
|
||||
for index in indexes
|
||||
if index.columns == [name] and index.unique
|
||||
)
|
||||
details[name] = (columns[name], is_unique)
|
||||
return details
|
||||
|
||||
column_details = await self.execute_fn(column_details)
|
||||
# Is there just one unique column that's text?
|
||||
unique_text_columns = [
|
||||
name
|
||||
for name, (type_, is_unique) in column_details.items()
|
||||
if is_unique and type_ is str
|
||||
]
|
||||
if len(unique_text_columns) == 1:
|
||||
return unique_text_columns[0]
|
||||
|
||||
column_names = list(column_details.keys())
|
||||
column_names = await self.execute_fn(lambda conn: table_columns(conn, table))
|
||||
# Is there a name or title column?
|
||||
name_or_title = [c for c in column_names if c.lower() in ("name", "title")]
|
||||
if name_or_title:
|
||||
|
|
@ -489,7 +449,6 @@ class Database:
|
|||
column_names
|
||||
and len(column_names) == 2
|
||||
and ("id" in column_names or "pk" in column_names)
|
||||
and not set(column_names) == {"id", "pk"}
|
||||
):
|
||||
return [c for c in column_names if c not in ("id", "pk")][0]
|
||||
# Couldn't find a label:
|
||||
|
|
@ -540,67 +499,16 @@ class Database:
|
|||
x[0]
|
||||
for x in await self.execute(
|
||||
"""
|
||||
WITH base AS (
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE name IN ('sqlite_stat1', 'sqlite_stat2', 'sqlite_stat3', 'sqlite_stat4')
|
||||
with final as (
|
||||
select name
|
||||
from sqlite_master
|
||||
WHERE name in ('sqlite_stat1', 'sqlite_stat2', 'sqlite_stat3', 'sqlite_stat4')
|
||||
OR substr(name, 1, 1) == '_'
|
||||
),
|
||||
fts_suffixes AS (
|
||||
SELECT column1 AS suffix
|
||||
FROM (VALUES ('_data'), ('_idx'), ('_docsize'), ('_content'), ('_config'))
|
||||
),
|
||||
fts5_names AS (
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE sql LIKE '%VIRTUAL TABLE%USING FTS%'
|
||||
),
|
||||
fts5_shadow_tables AS (
|
||||
SELECT
|
||||
printf('%s%s', fts5_names.name, fts_suffixes.suffix) AS name
|
||||
FROM fts5_names
|
||||
JOIN fts_suffixes
|
||||
),
|
||||
fts3_suffixes AS (
|
||||
SELECT column1 AS suffix
|
||||
FROM (VALUES ('_content'), ('_segdir'), ('_segments'), ('_stat'), ('_docsize'))
|
||||
),
|
||||
fts3_names AS (
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE sql LIKE '%VIRTUAL TABLE%USING FTS3%'
|
||||
OR sql LIKE '%VIRTUAL TABLE%USING FTS4%'
|
||||
),
|
||||
fts3_shadow_tables AS (
|
||||
SELECT
|
||||
printf('%s%s', fts3_names.name, fts3_suffixes.suffix) AS name
|
||||
FROM fts3_names
|
||||
JOIN fts3_suffixes
|
||||
),
|
||||
final AS (
|
||||
SELECT name FROM base
|
||||
UNION ALL
|
||||
SELECT name FROM fts5_shadow_tables
|
||||
UNION ALL
|
||||
SELECT name FROM fts3_shadow_tables
|
||||
)
|
||||
SELECT name FROM final ORDER BY 1
|
||||
select name from final order by 1
|
||||
"""
|
||||
)
|
||||
]
|
||||
# Also hide any FTS tables that have a content= argument
|
||||
hidden_tables += [
|
||||
x[0]
|
||||
for x in await self.execute(
|
||||
"""
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE sql LIKE '%VIRTUAL TABLE%'
|
||||
AND sql LIKE '%USING FTS%'
|
||||
AND sql LIKE '%content=%'
|
||||
"""
|
||||
)
|
||||
]
|
||||
|
||||
has_spatialite = await self.execute_fn(detect_spatialite)
|
||||
if has_spatialite:
|
||||
|
|
@ -697,9 +605,6 @@ class QueryInterrupted(Exception):
|
|||
self.sql = sql
|
||||
self.params = params
|
||||
|
||||
def __str__(self):
|
||||
return "QueryInterrupted: {}".format(self.e)
|
||||
|
||||
|
||||
class MultipleValues(Exception):
|
||||
pass
|
||||
|
|
@ -727,9 +632,6 @@ class Results:
|
|||
else:
|
||||
raise MultipleValues
|
||||
|
||||
def dicts(self):
|
||||
return [dict(row) for row in self.rows]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.rows)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,101 +0,0 @@
|
|||
from datasette import hookimpl
|
||||
from datasette.permissions import Action
|
||||
from datasette.resources import (
|
||||
DatabaseResource,
|
||||
TableResource,
|
||||
QueryResource,
|
||||
)
|
||||
|
||||
|
||||
@hookimpl
|
||||
def register_actions():
|
||||
"""Register the core Datasette actions."""
|
||||
return (
|
||||
# Global actions (no resource_class)
|
||||
Action(
|
||||
name="view-instance",
|
||||
abbr="vi",
|
||||
description="View Datasette instance",
|
||||
),
|
||||
Action(
|
||||
name="permissions-debug",
|
||||
abbr="pd",
|
||||
description="Access permission debug tool",
|
||||
),
|
||||
Action(
|
||||
name="debug-menu",
|
||||
abbr="dm",
|
||||
description="View debug menu items",
|
||||
),
|
||||
# Database-level actions (parent-level)
|
||||
Action(
|
||||
name="view-database",
|
||||
abbr="vd",
|
||||
description="View database",
|
||||
resource_class=DatabaseResource,
|
||||
),
|
||||
Action(
|
||||
name="view-database-download",
|
||||
abbr="vdd",
|
||||
description="Download database file",
|
||||
resource_class=DatabaseResource,
|
||||
also_requires="view-database",
|
||||
),
|
||||
Action(
|
||||
name="execute-sql",
|
||||
abbr="es",
|
||||
description="Execute read-only SQL queries",
|
||||
resource_class=DatabaseResource,
|
||||
also_requires="view-database",
|
||||
),
|
||||
Action(
|
||||
name="create-table",
|
||||
abbr="ct",
|
||||
description="Create tables",
|
||||
resource_class=DatabaseResource,
|
||||
),
|
||||
# Table-level actions (child-level)
|
||||
Action(
|
||||
name="view-table",
|
||||
abbr="vt",
|
||||
description="View table",
|
||||
resource_class=TableResource,
|
||||
),
|
||||
Action(
|
||||
name="insert-row",
|
||||
abbr="ir",
|
||||
description="Insert rows",
|
||||
resource_class=TableResource,
|
||||
),
|
||||
Action(
|
||||
name="delete-row",
|
||||
abbr="dr",
|
||||
description="Delete rows",
|
||||
resource_class=TableResource,
|
||||
),
|
||||
Action(
|
||||
name="update-row",
|
||||
abbr="ur",
|
||||
description="Update rows",
|
||||
resource_class=TableResource,
|
||||
),
|
||||
Action(
|
||||
name="alter-table",
|
||||
abbr="at",
|
||||
description="Alter tables",
|
||||
resource_class=TableResource,
|
||||
),
|
||||
Action(
|
||||
name="drop-table",
|
||||
abbr="dt",
|
||||
description="Drop tables",
|
||||
resource_class=TableResource,
|
||||
),
|
||||
# Query-level actions (child-level)
|
||||
Action(
|
||||
name="view-query",
|
||||
abbr="vq",
|
||||
description="View named query results",
|
||||
resource_class=QueryResource,
|
||||
),
|
||||
)
|
||||
|
|
@ -4,7 +4,7 @@ from datasette import hookimpl
|
|||
@hookimpl
|
||||
def menu_links(datasette, actor):
|
||||
async def inner():
|
||||
if not await datasette.allowed(action="debug-menu", actor=actor):
|
||||
if not await datasette.permission_allowed(actor, "debug-menu"):
|
||||
return []
|
||||
|
||||
return [
|
||||
|
|
|
|||
420
datasette/default_permissions.py
Normal file
420
datasette/default_permissions.py
Normal file
|
|
@ -0,0 +1,420 @@
|
|||
from datasette import hookimpl, Permission
|
||||
from datasette.utils import actor_matches_allow
|
||||
import itsdangerous
|
||||
import time
|
||||
from typing import Union, Tuple
|
||||
|
||||
|
||||
@hookimpl
|
||||
def register_permissions():
|
||||
return (
|
||||
Permission(
|
||||
name="view-instance",
|
||||
abbr="vi",
|
||||
description="View Datasette instance",
|
||||
takes_database=False,
|
||||
takes_resource=False,
|
||||
default=True,
|
||||
),
|
||||
Permission(
|
||||
name="view-database",
|
||||
abbr="vd",
|
||||
description="View database",
|
||||
takes_database=True,
|
||||
takes_resource=False,
|
||||
default=True,
|
||||
implies_can_view=True,
|
||||
),
|
||||
Permission(
|
||||
name="view-database-download",
|
||||
abbr="vdd",
|
||||
description="Download database file",
|
||||
takes_database=True,
|
||||
takes_resource=False,
|
||||
default=True,
|
||||
),
|
||||
Permission(
|
||||
name="view-table",
|
||||
abbr="vt",
|
||||
description="View table",
|
||||
takes_database=True,
|
||||
takes_resource=True,
|
||||
default=True,
|
||||
implies_can_view=True,
|
||||
),
|
||||
Permission(
|
||||
name="view-query",
|
||||
abbr="vq",
|
||||
description="View named query results",
|
||||
takes_database=True,
|
||||
takes_resource=True,
|
||||
default=True,
|
||||
implies_can_view=True,
|
||||
),
|
||||
Permission(
|
||||
name="execute-sql",
|
||||
abbr="es",
|
||||
description="Execute read-only SQL queries",
|
||||
takes_database=True,
|
||||
takes_resource=False,
|
||||
default=True,
|
||||
implies_can_view=True,
|
||||
),
|
||||
Permission(
|
||||
name="permissions-debug",
|
||||
abbr="pd",
|
||||
description="Access permission debug tool",
|
||||
takes_database=False,
|
||||
takes_resource=False,
|
||||
default=False,
|
||||
),
|
||||
Permission(
|
||||
name="debug-menu",
|
||||
abbr="dm",
|
||||
description="View debug menu items",
|
||||
takes_database=False,
|
||||
takes_resource=False,
|
||||
default=False,
|
||||
),
|
||||
Permission(
|
||||
name="insert-row",
|
||||
abbr="ir",
|
||||
description="Insert rows",
|
||||
takes_database=True,
|
||||
takes_resource=True,
|
||||
default=False,
|
||||
),
|
||||
Permission(
|
||||
name="delete-row",
|
||||
abbr="dr",
|
||||
description="Delete rows",
|
||||
takes_database=True,
|
||||
takes_resource=True,
|
||||
default=False,
|
||||
),
|
||||
Permission(
|
||||
name="update-row",
|
||||
abbr="ur",
|
||||
description="Update rows",
|
||||
takes_database=True,
|
||||
takes_resource=True,
|
||||
default=False,
|
||||
),
|
||||
Permission(
|
||||
name="create-table",
|
||||
abbr="ct",
|
||||
description="Create tables",
|
||||
takes_database=True,
|
||||
takes_resource=False,
|
||||
default=False,
|
||||
),
|
||||
Permission(
|
||||
name="alter-table",
|
||||
abbr="at",
|
||||
description="Alter tables",
|
||||
takes_database=True,
|
||||
takes_resource=True,
|
||||
default=False,
|
||||
),
|
||||
Permission(
|
||||
name="drop-table",
|
||||
abbr="dt",
|
||||
description="Drop tables",
|
||||
takes_database=True,
|
||||
takes_resource=True,
|
||||
default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@hookimpl(tryfirst=True, specname="permission_allowed")
|
||||
def permission_allowed_default(datasette, actor, action, resource):
|
||||
async def inner():
|
||||
# id=root gets some special permissions:
|
||||
if action in (
|
||||
"permissions-debug",
|
||||
"debug-menu",
|
||||
"insert-row",
|
||||
"create-table",
|
||||
"alter-table",
|
||||
"drop-table",
|
||||
"delete-row",
|
||||
"update-row",
|
||||
):
|
||||
if actor and actor.get("id") == "root":
|
||||
return True
|
||||
|
||||
# Resolve view permissions in allow blocks in configuration
|
||||
if action in (
|
||||
"view-instance",
|
||||
"view-database",
|
||||
"view-table",
|
||||
"view-query",
|
||||
"execute-sql",
|
||||
):
|
||||
result = await _resolve_config_view_permissions(
|
||||
datasette, actor, action, resource
|
||||
)
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
# Resolve custom permissions: blocks in configuration
|
||||
result = await _resolve_config_permissions_blocks(
|
||||
datasette, actor, action, resource
|
||||
)
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
# --setting default_allow_sql
|
||||
if action == "execute-sql" and not datasette.setting("default_allow_sql"):
|
||||
return False
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
async def _resolve_config_permissions_blocks(datasette, actor, action, resource):
|
||||
# Check custom permissions: blocks
|
||||
config = datasette.config or {}
|
||||
root_block = (config.get("permissions", None) or {}).get(action)
|
||||
if root_block:
|
||||
root_result = actor_matches_allow(actor, root_block)
|
||||
if root_result is not None:
|
||||
return root_result
|
||||
# Now try database-specific blocks
|
||||
if not resource:
|
||||
return None
|
||||
if isinstance(resource, str):
|
||||
database = resource
|
||||
else:
|
||||
database = resource[0]
|
||||
database_block = (
|
||||
(config.get("databases", {}).get(database, {}).get("permissions", None)) or {}
|
||||
).get(action)
|
||||
if database_block:
|
||||
database_result = actor_matches_allow(actor, database_block)
|
||||
if database_result is not None:
|
||||
return database_result
|
||||
# Finally try table/query specific blocks
|
||||
if not isinstance(resource, tuple):
|
||||
return None
|
||||
database, table_or_query = resource
|
||||
table_block = (
|
||||
(
|
||||
config.get("databases", {})
|
||||
.get(database, {})
|
||||
.get("tables", {})
|
||||
.get(table_or_query, {})
|
||||
.get("permissions", None)
|
||||
)
|
||||
or {}
|
||||
).get(action)
|
||||
if table_block:
|
||||
table_result = actor_matches_allow(actor, table_block)
|
||||
if table_result is not None:
|
||||
return table_result
|
||||
# Finally the canned queries
|
||||
query_block = (
|
||||
(
|
||||
config.get("databases", {})
|
||||
.get(database, {})
|
||||
.get("queries", {})
|
||||
.get(table_or_query, {})
|
||||
.get("permissions", None)
|
||||
)
|
||||
or {}
|
||||
).get(action)
|
||||
if query_block:
|
||||
query_result = actor_matches_allow(actor, query_block)
|
||||
if query_result is not None:
|
||||
return query_result
|
||||
return None
|
||||
|
||||
|
||||
async def _resolve_config_view_permissions(datasette, actor, action, resource):
|
||||
config = datasette.config or {}
|
||||
if action == "view-instance":
|
||||
allow = config.get("allow")
|
||||
if allow is not None:
|
||||
return actor_matches_allow(actor, allow)
|
||||
elif action == "view-database":
|
||||
database_allow = ((config.get("databases") or {}).get(resource) or {}).get(
|
||||
"allow"
|
||||
)
|
||||
if database_allow is None:
|
||||
return None
|
||||
return actor_matches_allow(actor, database_allow)
|
||||
elif action == "view-table":
|
||||
database, table = resource
|
||||
tables = ((config.get("databases") or {}).get(database) or {}).get(
|
||||
"tables"
|
||||
) or {}
|
||||
table_allow = (tables.get(table) or {}).get("allow")
|
||||
if table_allow is None:
|
||||
return None
|
||||
return actor_matches_allow(actor, table_allow)
|
||||
elif action == "view-query":
|
||||
# Check if this query has a "allow" block in config
|
||||
database, query_name = resource
|
||||
query = await datasette.get_canned_query(database, query_name, actor)
|
||||
assert query is not None
|
||||
allow = query.get("allow")
|
||||
if allow is None:
|
||||
return None
|
||||
return actor_matches_allow(actor, allow)
|
||||
elif action == "execute-sql":
|
||||
# Use allow_sql block from database block, or from top-level
|
||||
database_allow_sql = ((config.get("databases") or {}).get(resource) or {}).get(
|
||||
"allow_sql"
|
||||
)
|
||||
if database_allow_sql is None:
|
||||
database_allow_sql = config.get("allow_sql")
|
||||
if database_allow_sql is None:
|
||||
return None
|
||||
return actor_matches_allow(actor, database_allow_sql)
|
||||
|
||||
|
||||
def restrictions_allow_action(
|
||||
datasette: "Datasette",
|
||||
restrictions: dict,
|
||||
action: str,
|
||||
resource: Union[str, Tuple[str, str]],
|
||||
):
|
||||
"Do these restrictions allow the requested action against the requested resource?"
|
||||
if action == "view-instance":
|
||||
# Special case for view-instance: it's allowed if the restrictions include any
|
||||
# permissions that have the implies_can_view=True flag set
|
||||
all_rules = restrictions.get("a") or []
|
||||
for database_rules in (restrictions.get("d") or {}).values():
|
||||
all_rules += database_rules
|
||||
for database_resource_rules in (restrictions.get("r") or {}).values():
|
||||
for resource_rules in database_resource_rules.values():
|
||||
all_rules += resource_rules
|
||||
permissions = [datasette.get_permission(action) for action in all_rules]
|
||||
if any(p for p in permissions if p.implies_can_view):
|
||||
return True
|
||||
|
||||
if action == "view-database":
|
||||
# Special case for view-database: it's allowed if the restrictions include any
|
||||
# permissions that have the implies_can_view=True flag set AND takes_database
|
||||
all_rules = restrictions.get("a") or []
|
||||
database_rules = list((restrictions.get("d") or {}).get(resource) or [])
|
||||
all_rules += database_rules
|
||||
resource_rules = ((restrictions.get("r") or {}).get(resource) or {}).values()
|
||||
for resource_rules in (restrictions.get("r") or {}).values():
|
||||
for table_rules in resource_rules.values():
|
||||
all_rules += table_rules
|
||||
permissions = [datasette.get_permission(action) for action in all_rules]
|
||||
if any(p for p in permissions if p.implies_can_view and p.takes_database):
|
||||
return True
|
||||
|
||||
# Does this action have an abbreviation?
|
||||
to_check = {action}
|
||||
permission = datasette.permissions.get(action)
|
||||
if permission and permission.abbr:
|
||||
to_check.add(permission.abbr)
|
||||
|
||||
# If restrictions is defined then we use those to further restrict the actor
|
||||
# Crucially, we only use this to say NO (return False) - we never
|
||||
# use it to return YES (True) because that might over-ride other
|
||||
# restrictions placed on this actor
|
||||
all_allowed = restrictions.get("a")
|
||||
if all_allowed is not None:
|
||||
assert isinstance(all_allowed, list)
|
||||
if to_check.intersection(all_allowed):
|
||||
return True
|
||||
# How about for the current database?
|
||||
if resource:
|
||||
if isinstance(resource, str):
|
||||
database_name = resource
|
||||
else:
|
||||
database_name = resource[0]
|
||||
database_allowed = restrictions.get("d", {}).get(database_name)
|
||||
if database_allowed is not None:
|
||||
assert isinstance(database_allowed, list)
|
||||
if to_check.intersection(database_allowed):
|
||||
return True
|
||||
# Or the current table? That's any time the resource is (database, table)
|
||||
if resource is not None and not isinstance(resource, str) and len(resource) == 2:
|
||||
database, table = resource
|
||||
table_allowed = restrictions.get("r", {}).get(database, {}).get(table)
|
||||
# TODO: What should this do for canned queries?
|
||||
if table_allowed is not None:
|
||||
assert isinstance(table_allowed, list)
|
||||
if to_check.intersection(table_allowed):
|
||||
return True
|
||||
|
||||
# This action is not specifically allowed, so reject it
|
||||
return False
|
||||
|
||||
|
||||
@hookimpl(specname="permission_allowed")
|
||||
def permission_allowed_actor_restrictions(datasette, actor, action, resource):
|
||||
if actor is None:
|
||||
return None
|
||||
if "_r" not in actor:
|
||||
# No restrictions, so we have no opinion
|
||||
return None
|
||||
_r = actor.get("_r")
|
||||
if restrictions_allow_action(datasette, _r, action, resource):
|
||||
# Return None because we do not have an opinion here
|
||||
return None
|
||||
else:
|
||||
# Block this permission check
|
||||
return False
|
||||
|
||||
|
||||
@hookimpl
|
||||
def actor_from_request(datasette, request):
|
||||
prefix = "dstok_"
|
||||
if not datasette.setting("allow_signed_tokens"):
|
||||
return None
|
||||
max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl")
|
||||
authorization = request.headers.get("authorization")
|
||||
if not authorization:
|
||||
return None
|
||||
if not authorization.startswith("Bearer "):
|
||||
return None
|
||||
token = authorization[len("Bearer ") :]
|
||||
if not token.startswith(prefix):
|
||||
return None
|
||||
token = token[len(prefix) :]
|
||||
try:
|
||||
decoded = datasette.unsign(token, namespace="token")
|
||||
except itsdangerous.BadSignature:
|
||||
return None
|
||||
if "t" not in decoded:
|
||||
# Missing timestamp
|
||||
return None
|
||||
created = decoded["t"]
|
||||
if not isinstance(created, int):
|
||||
# Invalid timestamp
|
||||
return None
|
||||
duration = decoded.get("d")
|
||||
if duration is not None and not isinstance(duration, int):
|
||||
# Invalid duration
|
||||
return None
|
||||
if (duration is None and max_signed_tokens_ttl) or (
|
||||
duration is not None
|
||||
and max_signed_tokens_ttl
|
||||
and duration > max_signed_tokens_ttl
|
||||
):
|
||||
duration = max_signed_tokens_ttl
|
||||
if duration:
|
||||
if time.time() - created > duration:
|
||||
# Expired
|
||||
return None
|
||||
actor = {"id": decoded["a"], "token": "dstok"}
|
||||
if "_r" in decoded:
|
||||
actor["_r"] = decoded["_r"]
|
||||
if duration:
|
||||
actor["token_expires"] = created + duration
|
||||
return actor
|
||||
|
||||
|
||||
@hookimpl
|
||||
def skip_csrf(scope):
|
||||
# Skip CSRF check for requests with content-type: application/json
|
||||
if scope["type"] == "http":
|
||||
headers = scope.get("headers") or {}
|
||||
if dict(headers).get(b"content-type") == b"application/json":
|
||||
return True
|
||||
|
|
@ -1,59 +0,0 @@
|
|||
"""
|
||||
Default permission implementations for Datasette.
|
||||
|
||||
This module provides the built-in permission checking logic through implementations
|
||||
of the permission_resources_sql hook. The hooks are organized by their purpose:
|
||||
|
||||
1. Actor Restrictions - Enforces _r allowlists embedded in actor tokens
|
||||
2. Root User - Grants full access when --root flag is used
|
||||
3. Config Rules - Applies permissions from datasette.yaml
|
||||
4. Default Settings - Enforces default_allow_sql and default view permissions
|
||||
|
||||
IMPORTANT: These hooks return PermissionSQL objects that are combined using SQL
|
||||
UNION/INTERSECT operations. The order of evaluation is:
|
||||
- restriction_sql fields are INTERSECTed (all must match)
|
||||
- Regular sql fields are UNIONed and evaluated with cascading priority
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
from datasette import hookimpl
|
||||
|
||||
# Re-export all hooks and public utilities
|
||||
from .restrictions import (
|
||||
actor_restrictions_sql,
|
||||
restrictions_allow_action,
|
||||
ActorRestrictions,
|
||||
)
|
||||
from .root import root_user_permissions_sql
|
||||
from .config import config_permissions_sql
|
||||
from .defaults import (
|
||||
default_allow_sql_check,
|
||||
default_action_permissions_sql,
|
||||
DEFAULT_ALLOW_ACTIONS,
|
||||
)
|
||||
from .tokens import actor_from_signed_api_token
|
||||
|
||||
|
||||
@hookimpl
|
||||
def skip_csrf(scope) -> Optional[bool]:
|
||||
"""Skip CSRF check for JSON content-type requests."""
|
||||
if scope["type"] == "http":
|
||||
headers = scope.get("headers") or {}
|
||||
if dict(headers).get(b"content-type") == b"application/json":
|
||||
return True
|
||||
return None
|
||||
|
||||
|
||||
@hookimpl
|
||||
def canned_queries(datasette: "Datasette", database: str, actor) -> dict:
|
||||
"""Return canned queries defined in datasette.yaml configuration."""
|
||||
queries = (
|
||||
((datasette.config or {}).get("databases") or {}).get(database) or {}
|
||||
).get("queries") or {}
|
||||
return queries
|
||||
|
|
@ -1,442 +0,0 @@
|
|||
"""
|
||||
Config-based permission handling for Datasette.
|
||||
|
||||
Applies permission rules from datasette.yaml configuration.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, List, Optional, Set, Tuple
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
from datasette import hookimpl
|
||||
from datasette.permissions import PermissionSQL
|
||||
from datasette.utils import actor_matches_allow
|
||||
|
||||
from .helpers import PermissionRowCollector, get_action_name_variants
|
||||
|
||||
|
||||
class ConfigPermissionProcessor:
|
||||
"""
|
||||
Processes permission rules from datasette.yaml configuration.
|
||||
|
||||
Configuration structure:
|
||||
|
||||
permissions: # Root-level permissions block
|
||||
view-instance:
|
||||
id: admin
|
||||
|
||||
databases:
|
||||
mydb:
|
||||
permissions: # Database-level permissions
|
||||
view-database:
|
||||
id: admin
|
||||
allow: # Database-level allow block (for view-*)
|
||||
id: viewer
|
||||
allow_sql: # execute-sql allow block
|
||||
id: analyst
|
||||
tables:
|
||||
users:
|
||||
permissions: # Table-level permissions
|
||||
view-table:
|
||||
id: admin
|
||||
allow: # Table-level allow block
|
||||
id: viewer
|
||||
queries:
|
||||
my_query:
|
||||
permissions: # Query-level permissions
|
||||
view-query:
|
||||
id: admin
|
||||
allow: # Query-level allow block
|
||||
id: viewer
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
datasette: "Datasette",
|
||||
actor: Optional[dict],
|
||||
action: str,
|
||||
):
|
||||
self.datasette = datasette
|
||||
self.actor = actor
|
||||
self.action = action
|
||||
self.config = datasette.config or {}
|
||||
self.collector = PermissionRowCollector(prefix="cfg")
|
||||
|
||||
# Pre-compute action variants
|
||||
self.action_checks = get_action_name_variants(datasette, action)
|
||||
self.action_obj = datasette.actions.get(action)
|
||||
|
||||
# Parse restrictions if present
|
||||
self.has_restrictions = actor and "_r" in actor if actor else False
|
||||
self.restrictions = actor.get("_r", {}) if actor else {}
|
||||
|
||||
# Pre-compute restriction info for efficiency
|
||||
self.restricted_databases: Set[str] = set()
|
||||
self.restricted_tables: Set[Tuple[str, str]] = set()
|
||||
|
||||
if self.has_restrictions:
|
||||
self.restricted_databases = {
|
||||
db_name
|
||||
for db_name, db_actions in (self.restrictions.get("d") or {}).items()
|
||||
if self.action_checks.intersection(db_actions)
|
||||
}
|
||||
self.restricted_tables = {
|
||||
(db_name, table_name)
|
||||
for db_name, tables in (self.restrictions.get("r") or {}).items()
|
||||
for table_name, table_actions in tables.items()
|
||||
if self.action_checks.intersection(table_actions)
|
||||
}
|
||||
# Tables implicitly reference their parent databases
|
||||
self.restricted_databases.update(db for db, _ in self.restricted_tables)
|
||||
|
||||
def evaluate_allow_block(self, allow_block: Any) -> Optional[bool]:
|
||||
"""Evaluate an allow block against the current actor."""
|
||||
if allow_block is None:
|
||||
return None
|
||||
return actor_matches_allow(self.actor, allow_block)
|
||||
|
||||
def is_in_restriction_allowlist(
|
||||
self,
|
||||
parent: Optional[str],
|
||||
child: Optional[str],
|
||||
) -> bool:
|
||||
"""Check if resource is allowed by actor restrictions."""
|
||||
if not self.has_restrictions:
|
||||
return True # No restrictions, all resources allowed
|
||||
|
||||
# Check global allowlist
|
||||
if self.action_checks.intersection(self.restrictions.get("a", [])):
|
||||
return True
|
||||
|
||||
# Check database-level allowlist
|
||||
if parent and self.action_checks.intersection(
|
||||
self.restrictions.get("d", {}).get(parent, [])
|
||||
):
|
||||
return True
|
||||
|
||||
# Check table-level allowlist
|
||||
if parent:
|
||||
table_restrictions = (self.restrictions.get("r", {}) or {}).get(parent, {})
|
||||
if child:
|
||||
table_actions = table_restrictions.get(child, [])
|
||||
if self.action_checks.intersection(table_actions):
|
||||
return True
|
||||
else:
|
||||
# Parent query should proceed if any child in this database is allowlisted
|
||||
for table_actions in table_restrictions.values():
|
||||
if self.action_checks.intersection(table_actions):
|
||||
return True
|
||||
|
||||
# Parent/child both None: include if any restrictions exist for this action
|
||||
if parent is None and child is None:
|
||||
if self.action_checks.intersection(self.restrictions.get("a", [])):
|
||||
return True
|
||||
if self.restricted_databases:
|
||||
return True
|
||||
if self.restricted_tables:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def add_permissions_rule(
|
||||
self,
|
||||
parent: Optional[str],
|
||||
child: Optional[str],
|
||||
permissions_block: Optional[dict],
|
||||
scope_desc: str,
|
||||
) -> None:
|
||||
"""Add a rule from a permissions:{action} block."""
|
||||
if permissions_block is None:
|
||||
return
|
||||
|
||||
action_allow_block = permissions_block.get(self.action)
|
||||
result = self.evaluate_allow_block(action_allow_block)
|
||||
|
||||
self.collector.add(
|
||||
parent=parent,
|
||||
child=child,
|
||||
allow=result,
|
||||
reason=f"config {'allow' if result else 'deny'} {scope_desc}",
|
||||
if_not_none=True,
|
||||
)
|
||||
|
||||
def add_allow_block_rule(
|
||||
self,
|
||||
parent: Optional[str],
|
||||
child: Optional[str],
|
||||
allow_block: Any,
|
||||
scope_desc: str,
|
||||
) -> None:
|
||||
"""
|
||||
Add rules from an allow:{} block.
|
||||
|
||||
For allow blocks, if the block exists but doesn't match the actor,
|
||||
this is treated as a deny. We also handle the restriction-gate logic.
|
||||
"""
|
||||
if allow_block is None:
|
||||
return
|
||||
|
||||
# Skip if resource is not in restriction allowlist
|
||||
if not self.is_in_restriction_allowlist(parent, child):
|
||||
return
|
||||
|
||||
result = self.evaluate_allow_block(allow_block)
|
||||
bool_result = bool(result)
|
||||
|
||||
self.collector.add(
|
||||
parent,
|
||||
child,
|
||||
bool_result,
|
||||
f"config {'allow' if result else 'deny'} {scope_desc}",
|
||||
)
|
||||
|
||||
# Handle restriction-gate: add explicit denies for restricted resources
|
||||
self._add_restriction_gate_denies(parent, child, bool_result, scope_desc)
|
||||
|
||||
def _add_restriction_gate_denies(
|
||||
self,
|
||||
parent: Optional[str],
|
||||
child: Optional[str],
|
||||
is_allowed: bool,
|
||||
scope_desc: str,
|
||||
) -> None:
|
||||
"""
|
||||
When a config rule denies at a higher level, add explicit denies
|
||||
for restricted resources to prevent child-level allows from
|
||||
incorrectly granting access.
|
||||
"""
|
||||
if is_allowed or child is not None or not self.has_restrictions:
|
||||
return
|
||||
|
||||
if not self.action_obj:
|
||||
return
|
||||
|
||||
reason = f"config deny {scope_desc} (restriction gate)"
|
||||
|
||||
if parent is None:
|
||||
# Root-level deny: add denies for all restricted resources
|
||||
if self.action_obj.takes_parent:
|
||||
for db_name in self.restricted_databases:
|
||||
self.collector.add(db_name, None, False, reason)
|
||||
if self.action_obj.takes_child:
|
||||
for db_name, table_name in self.restricted_tables:
|
||||
self.collector.add(db_name, table_name, False, reason)
|
||||
else:
|
||||
# Database-level deny: add denies for tables in that database
|
||||
if self.action_obj.takes_child:
|
||||
for db_name, table_name in self.restricted_tables:
|
||||
if db_name == parent:
|
||||
self.collector.add(db_name, table_name, False, reason)
|
||||
|
||||
def process(self) -> Optional[PermissionSQL]:
|
||||
"""Process all config rules and return combined PermissionSQL."""
|
||||
self._process_root_permissions()
|
||||
self._process_databases()
|
||||
self._process_root_allow_blocks()
|
||||
|
||||
return self.collector.to_permission_sql()
|
||||
|
||||
def _process_root_permissions(self) -> None:
|
||||
"""Process root-level permissions block."""
|
||||
root_perms = self.config.get("permissions") or {}
|
||||
self.add_permissions_rule(
|
||||
None,
|
||||
None,
|
||||
root_perms,
|
||||
f"permissions for {self.action}",
|
||||
)
|
||||
|
||||
def _process_databases(self) -> None:
|
||||
"""Process database-level and nested configurations."""
|
||||
databases = self.config.get("databases") or {}
|
||||
|
||||
for db_name, db_config in databases.items():
|
||||
self._process_database(db_name, db_config or {})
|
||||
|
||||
def _process_database(self, db_name: str, db_config: dict) -> None:
|
||||
"""Process a single database's configuration."""
|
||||
# Database-level permissions block
|
||||
db_perms = db_config.get("permissions") or {}
|
||||
self.add_permissions_rule(
|
||||
db_name,
|
||||
None,
|
||||
db_perms,
|
||||
f"permissions for {self.action} on {db_name}",
|
||||
)
|
||||
|
||||
# Process tables
|
||||
for table_name, table_config in (db_config.get("tables") or {}).items():
|
||||
self._process_table(db_name, table_name, table_config or {})
|
||||
|
||||
# Process queries
|
||||
for query_name, query_config in (db_config.get("queries") or {}).items():
|
||||
self._process_query(db_name, query_name, query_config)
|
||||
|
||||
# Database-level allow blocks
|
||||
self._process_database_allow_blocks(db_name, db_config)
|
||||
|
||||
def _process_table(
|
||||
self,
|
||||
db_name: str,
|
||||
table_name: str,
|
||||
table_config: dict,
|
||||
) -> None:
|
||||
"""Process a single table's configuration."""
|
||||
# Table-level permissions block
|
||||
table_perms = table_config.get("permissions") or {}
|
||||
self.add_permissions_rule(
|
||||
db_name,
|
||||
table_name,
|
||||
table_perms,
|
||||
f"permissions for {self.action} on {db_name}/{table_name}",
|
||||
)
|
||||
|
||||
# Table-level allow block (for view-table)
|
||||
if self.action == "view-table":
|
||||
self.add_allow_block_rule(
|
||||
db_name,
|
||||
table_name,
|
||||
table_config.get("allow"),
|
||||
f"allow for {self.action} on {db_name}/{table_name}",
|
||||
)
|
||||
|
||||
def _process_query(
|
||||
self,
|
||||
db_name: str,
|
||||
query_name: str,
|
||||
query_config: Any,
|
||||
) -> None:
|
||||
"""Process a single query's configuration."""
|
||||
# Query config can be a string (just SQL) or dict
|
||||
if not isinstance(query_config, dict):
|
||||
return
|
||||
|
||||
# Query-level permissions block
|
||||
query_perms = query_config.get("permissions") or {}
|
||||
self.add_permissions_rule(
|
||||
db_name,
|
||||
query_name,
|
||||
query_perms,
|
||||
f"permissions for {self.action} on {db_name}/{query_name}",
|
||||
)
|
||||
|
||||
# Query-level allow block (for view-query)
|
||||
if self.action == "view-query":
|
||||
self.add_allow_block_rule(
|
||||
db_name,
|
||||
query_name,
|
||||
query_config.get("allow"),
|
||||
f"allow for {self.action} on {db_name}/{query_name}",
|
||||
)
|
||||
|
||||
def _process_database_allow_blocks(
|
||||
self,
|
||||
db_name: str,
|
||||
db_config: dict,
|
||||
) -> None:
|
||||
"""Process database-level allow/allow_sql blocks."""
|
||||
# view-database allow block
|
||||
if self.action == "view-database":
|
||||
self.add_allow_block_rule(
|
||||
db_name,
|
||||
None,
|
||||
db_config.get("allow"),
|
||||
f"allow for {self.action} on {db_name}",
|
||||
)
|
||||
|
||||
# execute-sql allow_sql block
|
||||
if self.action == "execute-sql":
|
||||
self.add_allow_block_rule(
|
||||
db_name,
|
||||
None,
|
||||
db_config.get("allow_sql"),
|
||||
f"allow_sql for {db_name}",
|
||||
)
|
||||
|
||||
# view-table uses database-level allow for inheritance
|
||||
if self.action == "view-table":
|
||||
self.add_allow_block_rule(
|
||||
db_name,
|
||||
None,
|
||||
db_config.get("allow"),
|
||||
f"allow for {self.action} on {db_name}",
|
||||
)
|
||||
|
||||
# view-query uses database-level allow for inheritance
|
||||
if self.action == "view-query":
|
||||
self.add_allow_block_rule(
|
||||
db_name,
|
||||
None,
|
||||
db_config.get("allow"),
|
||||
f"allow for {self.action} on {db_name}",
|
||||
)
|
||||
|
||||
def _process_root_allow_blocks(self) -> None:
|
||||
"""Process root-level allow/allow_sql blocks."""
|
||||
root_allow = self.config.get("allow")
|
||||
|
||||
if self.action == "view-instance":
|
||||
self.add_allow_block_rule(
|
||||
None,
|
||||
None,
|
||||
root_allow,
|
||||
"allow for view-instance",
|
||||
)
|
||||
|
||||
if self.action == "view-database":
|
||||
self.add_allow_block_rule(
|
||||
None,
|
||||
None,
|
||||
root_allow,
|
||||
"allow for view-database",
|
||||
)
|
||||
|
||||
if self.action == "view-table":
|
||||
self.add_allow_block_rule(
|
||||
None,
|
||||
None,
|
||||
root_allow,
|
||||
"allow for view-table",
|
||||
)
|
||||
|
||||
if self.action == "view-query":
|
||||
self.add_allow_block_rule(
|
||||
None,
|
||||
None,
|
||||
root_allow,
|
||||
"allow for view-query",
|
||||
)
|
||||
|
||||
if self.action == "execute-sql":
|
||||
self.add_allow_block_rule(
|
||||
None,
|
||||
None,
|
||||
self.config.get("allow_sql"),
|
||||
"allow_sql",
|
||||
)
|
||||
|
||||
|
||||
@hookimpl(specname="permission_resources_sql")
|
||||
async def config_permissions_sql(
|
||||
datasette: "Datasette",
|
||||
actor: Optional[dict],
|
||||
action: str,
|
||||
) -> Optional[List[PermissionSQL]]:
|
||||
"""
|
||||
Apply permission rules from datasette.yaml configuration.
|
||||
|
||||
This processes:
|
||||
- permissions: blocks at root, database, table, and query levels
|
||||
- allow: blocks for view-* actions
|
||||
- allow_sql: blocks for execute-sql action
|
||||
"""
|
||||
processor = ConfigPermissionProcessor(datasette, actor, action)
|
||||
result = processor.process()
|
||||
|
||||
if result is None:
|
||||
return []
|
||||
|
||||
return [result]
|
||||
|
|
@ -1,70 +0,0 @@
|
|||
"""
|
||||
Default permission settings for Datasette.
|
||||
|
||||
Provides default allow rules for standard view/execute actions.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
from datasette import hookimpl
|
||||
from datasette.permissions import PermissionSQL
|
||||
|
||||
|
||||
# Actions that are allowed by default (unless --default-deny is used)
|
||||
DEFAULT_ALLOW_ACTIONS = frozenset(
|
||||
{
|
||||
"view-instance",
|
||||
"view-database",
|
||||
"view-database-download",
|
||||
"view-table",
|
||||
"view-query",
|
||||
"execute-sql",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@hookimpl(specname="permission_resources_sql")
|
||||
async def default_allow_sql_check(
|
||||
datasette: "Datasette",
|
||||
actor: Optional[dict],
|
||||
action: str,
|
||||
) -> Optional[PermissionSQL]:
|
||||
"""
|
||||
Enforce the default_allow_sql setting.
|
||||
|
||||
When default_allow_sql is false (the default), execute-sql is denied
|
||||
unless explicitly allowed by config or other rules.
|
||||
"""
|
||||
if action == "execute-sql":
|
||||
if not datasette.setting("default_allow_sql"):
|
||||
return PermissionSQL.deny(reason="default_allow_sql is false")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@hookimpl(specname="permission_resources_sql")
|
||||
async def default_action_permissions_sql(
|
||||
datasette: "Datasette",
|
||||
actor: Optional[dict],
|
||||
action: str,
|
||||
) -> Optional[PermissionSQL]:
|
||||
"""
|
||||
Provide default allow rules for standard view/execute actions.
|
||||
|
||||
These defaults are skipped when datasette is started with --default-deny.
|
||||
The restriction_sql mechanism (from actor_restrictions_sql) will still
|
||||
filter these results if the actor has restrictions.
|
||||
"""
|
||||
if datasette.default_deny:
|
||||
return None
|
||||
|
||||
if action in DEFAULT_ALLOW_ACTIONS:
|
||||
reason = f"default allow for {action}".replace("'", "''")
|
||||
return PermissionSQL.allow(reason=reason)
|
||||
|
||||
return None
|
||||
|
|
@ -1,85 +0,0 @@
|
|||
"""
|
||||
Shared helper utilities for default permission implementations.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, List, Optional, Set
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
from datasette.permissions import PermissionSQL
|
||||
|
||||
|
||||
def get_action_name_variants(datasette: "Datasette", action: str) -> Set[str]:
|
||||
"""
|
||||
Get all name variants for an action (full name and abbreviation).
|
||||
|
||||
Example:
|
||||
get_action_name_variants(ds, "view-table") -> {"view-table", "vt"}
|
||||
"""
|
||||
variants = {action}
|
||||
action_obj = datasette.actions.get(action)
|
||||
if action_obj and action_obj.abbr:
|
||||
variants.add(action_obj.abbr)
|
||||
return variants
|
||||
|
||||
|
||||
def action_in_list(datasette: "Datasette", action: str, action_list: list) -> bool:
|
||||
"""Check if an action (or its abbreviation) is in a list."""
|
||||
return bool(get_action_name_variants(datasette, action).intersection(action_list))
|
||||
|
||||
|
||||
@dataclass
|
||||
class PermissionRow:
|
||||
"""A single permission rule row."""
|
||||
|
||||
parent: Optional[str]
|
||||
child: Optional[str]
|
||||
allow: bool
|
||||
reason: str
|
||||
|
||||
|
||||
class PermissionRowCollector:
|
||||
"""Collects permission rows and converts them to PermissionSQL."""
|
||||
|
||||
def __init__(self, prefix: str = "row"):
|
||||
self.rows: List[PermissionRow] = []
|
||||
self.prefix = prefix
|
||||
|
||||
def add(
|
||||
self,
|
||||
parent: Optional[str],
|
||||
child: Optional[str],
|
||||
allow: Optional[bool],
|
||||
reason: str,
|
||||
if_not_none: bool = False,
|
||||
) -> None:
|
||||
"""Add a permission row. If if_not_none=True, only add if allow is not None."""
|
||||
if if_not_none and allow is None:
|
||||
return
|
||||
self.rows.append(PermissionRow(parent, child, allow, reason))
|
||||
|
||||
def to_permission_sql(self) -> Optional[PermissionSQL]:
|
||||
"""Convert collected rows to a PermissionSQL object."""
|
||||
if not self.rows:
|
||||
return None
|
||||
|
||||
parts = []
|
||||
params = {}
|
||||
|
||||
for idx, row in enumerate(self.rows):
|
||||
key = f"{self.prefix}_{idx}"
|
||||
parts.append(
|
||||
f"SELECT :{key}_parent AS parent, :{key}_child AS child, "
|
||||
f":{key}_allow AS allow, :{key}_reason AS reason"
|
||||
)
|
||||
params[f"{key}_parent"] = row.parent
|
||||
params[f"{key}_child"] = row.child
|
||||
params[f"{key}_allow"] = 1 if row.allow else 0
|
||||
params[f"{key}_reason"] = row.reason
|
||||
|
||||
sql = "\nUNION ALL\n".join(parts)
|
||||
return PermissionSQL(sql=sql, params=params)
|
||||
|
|
@ -1,195 +0,0 @@
|
|||
"""
|
||||
Actor restriction handling for Datasette permissions.
|
||||
|
||||
This module handles the _r (restrictions) key in actor dictionaries, which
|
||||
contains allowlists of resources the actor can access.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, List, Optional, Set, Tuple
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
from datasette import hookimpl
|
||||
from datasette.permissions import PermissionSQL
|
||||
|
||||
from .helpers import action_in_list, get_action_name_variants
|
||||
|
||||
|
||||
@dataclass
|
||||
class ActorRestrictions:
|
||||
"""Parsed actor restrictions from the _r key."""
|
||||
|
||||
global_actions: List[str] # _r.a - globally allowed actions
|
||||
database_actions: dict # _r.d - {db_name: [actions]}
|
||||
table_actions: dict # _r.r - {db_name: {table: [actions]}}
|
||||
|
||||
@classmethod
|
||||
def from_actor(cls, actor: Optional[dict]) -> Optional["ActorRestrictions"]:
|
||||
"""Parse restrictions from actor dict. Returns None if no restrictions."""
|
||||
if not actor:
|
||||
return None
|
||||
assert isinstance(actor, dict), "actor must be a dictionary"
|
||||
|
||||
restrictions = actor.get("_r")
|
||||
if restrictions is None:
|
||||
return None
|
||||
|
||||
return cls(
|
||||
global_actions=restrictions.get("a", []),
|
||||
database_actions=restrictions.get("d", {}),
|
||||
table_actions=restrictions.get("r", {}),
|
||||
)
|
||||
|
||||
def is_action_globally_allowed(self, datasette: "Datasette", action: str) -> bool:
|
||||
"""Check if action is in the global allowlist."""
|
||||
return action_in_list(datasette, action, self.global_actions)
|
||||
|
||||
def get_allowed_databases(self, datasette: "Datasette", action: str) -> Set[str]:
|
||||
"""Get database names where this action is allowed."""
|
||||
allowed = set()
|
||||
for db_name, db_actions in self.database_actions.items():
|
||||
if action_in_list(datasette, action, db_actions):
|
||||
allowed.add(db_name)
|
||||
return allowed
|
||||
|
||||
def get_allowed_tables(
|
||||
self, datasette: "Datasette", action: str
|
||||
) -> Set[Tuple[str, str]]:
|
||||
"""Get (database, table) pairs where this action is allowed."""
|
||||
allowed = set()
|
||||
for db_name, tables in self.table_actions.items():
|
||||
for table_name, table_actions in tables.items():
|
||||
if action_in_list(datasette, action, table_actions):
|
||||
allowed.add((db_name, table_name))
|
||||
return allowed
|
||||
|
||||
|
||||
@hookimpl(specname="permission_resources_sql")
|
||||
async def actor_restrictions_sql(
|
||||
datasette: "Datasette",
|
||||
actor: Optional[dict],
|
||||
action: str,
|
||||
) -> Optional[List[PermissionSQL]]:
|
||||
"""
|
||||
Handle actor restriction-based permission rules.
|
||||
|
||||
When an actor has an "_r" key, it contains an allowlist of resources they
|
||||
can access. This function returns restriction_sql that filters the final
|
||||
results to only include resources in that allowlist.
|
||||
|
||||
The _r structure:
|
||||
{
|
||||
"a": ["vi", "pd"], # Global actions allowed
|
||||
"d": {"mydb": ["vt", "es"]}, # Database-level actions
|
||||
"r": {"mydb": {"users": ["vt"]}} # Table-level actions
|
||||
}
|
||||
"""
|
||||
if not actor:
|
||||
return None
|
||||
|
||||
restrictions = ActorRestrictions.from_actor(actor)
|
||||
|
||||
if restrictions is None:
|
||||
# No restrictions - all resources allowed
|
||||
return []
|
||||
|
||||
# If globally allowed, no filtering needed
|
||||
if restrictions.is_action_globally_allowed(datasette, action):
|
||||
return []
|
||||
|
||||
# Build restriction SQL
|
||||
allowed_dbs = restrictions.get_allowed_databases(datasette, action)
|
||||
allowed_tables = restrictions.get_allowed_tables(datasette, action)
|
||||
|
||||
# If nothing is allowed for this action, return empty-set restriction
|
||||
if not allowed_dbs and not allowed_tables:
|
||||
return [
|
||||
PermissionSQL(
|
||||
params={"deny": f"actor restrictions: {action} not in allowlist"},
|
||||
restriction_sql="SELECT NULL AS parent, NULL AS child WHERE 0",
|
||||
)
|
||||
]
|
||||
|
||||
# Build UNION of allowed resources
|
||||
selects = []
|
||||
params = {}
|
||||
counter = 0
|
||||
|
||||
# Database-level entries (parent, NULL) - allows all children
|
||||
for db_name in allowed_dbs:
|
||||
key = f"restr_{counter}"
|
||||
counter += 1
|
||||
selects.append(f"SELECT :{key}_parent AS parent, NULL AS child")
|
||||
params[f"{key}_parent"] = db_name
|
||||
|
||||
# Table-level entries (parent, child)
|
||||
for db_name, table_name in allowed_tables:
|
||||
key = f"restr_{counter}"
|
||||
counter += 1
|
||||
selects.append(f"SELECT :{key}_parent AS parent, :{key}_child AS child")
|
||||
params[f"{key}_parent"] = db_name
|
||||
params[f"{key}_child"] = table_name
|
||||
|
||||
restriction_sql = "\nUNION ALL\n".join(selects)
|
||||
|
||||
return [PermissionSQL(params=params, restriction_sql=restriction_sql)]
|
||||
|
||||
|
||||
def restrictions_allow_action(
|
||||
datasette: "Datasette",
|
||||
restrictions: dict,
|
||||
action: str,
|
||||
resource: Optional[str | Tuple[str, str]],
|
||||
) -> bool:
|
||||
"""
|
||||
Check if restrictions allow the requested action on the requested resource.
|
||||
|
||||
This is a synchronous utility function for use by other code that needs
|
||||
to quickly check restriction allowlists.
|
||||
|
||||
Args:
|
||||
datasette: The Datasette instance
|
||||
restrictions: The _r dict from an actor
|
||||
action: The action name to check
|
||||
resource: None for global, str for database, (db, table) tuple for table
|
||||
|
||||
Returns:
|
||||
True if allowed, False if denied
|
||||
"""
|
||||
# Does this action have an abbreviation?
|
||||
to_check = get_action_name_variants(datasette, action)
|
||||
|
||||
# Check global level (any resource)
|
||||
all_allowed = restrictions.get("a")
|
||||
if all_allowed is not None:
|
||||
assert isinstance(all_allowed, list)
|
||||
if to_check.intersection(all_allowed):
|
||||
return True
|
||||
|
||||
# Check database level
|
||||
if resource:
|
||||
if isinstance(resource, str):
|
||||
database_name = resource
|
||||
else:
|
||||
database_name = resource[0]
|
||||
database_allowed = restrictions.get("d", {}).get(database_name)
|
||||
if database_allowed is not None:
|
||||
assert isinstance(database_allowed, list)
|
||||
if to_check.intersection(database_allowed):
|
||||
return True
|
||||
|
||||
# Check table/resource level
|
||||
if resource is not None and not isinstance(resource, str) and len(resource) == 2:
|
||||
database, table = resource
|
||||
table_allowed = restrictions.get("r", {}).get(database, {}).get(table)
|
||||
if table_allowed is not None:
|
||||
assert isinstance(table_allowed, list)
|
||||
if to_check.intersection(table_allowed):
|
||||
return True
|
||||
|
||||
# This action is not explicitly allowed, so reject it
|
||||
return False
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
"""
|
||||
Root user permission handling for Datasette.
|
||||
|
||||
Grants full permissions to the root user when --root flag is used.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
from datasette import hookimpl
|
||||
from datasette.permissions import PermissionSQL
|
||||
|
||||
|
||||
@hookimpl(specname="permission_resources_sql")
|
||||
async def root_user_permissions_sql(
|
||||
datasette: "Datasette",
|
||||
actor: Optional[dict],
|
||||
) -> Optional[PermissionSQL]:
|
||||
"""
|
||||
Grant root user full permissions when --root flag is used.
|
||||
"""
|
||||
if not datasette.root_enabled:
|
||||
return None
|
||||
if actor is not None and actor.get("id") == "root":
|
||||
return PermissionSQL.allow(reason="root user")
|
||||
|
|
@ -1,95 +0,0 @@
|
|||
"""
|
||||
Token authentication for Datasette.
|
||||
|
||||
Handles signed API tokens (dstok_ prefix).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
import itsdangerous
|
||||
|
||||
from datasette import hookimpl
|
||||
|
||||
|
||||
@hookimpl(specname="actor_from_request")
|
||||
def actor_from_signed_api_token(datasette: "Datasette", request) -> Optional[dict]:
|
||||
"""
|
||||
Authenticate requests using signed API tokens (dstok_ prefix).
|
||||
|
||||
Token structure (signed JSON):
|
||||
{
|
||||
"a": "actor_id", # Actor ID
|
||||
"t": 1234567890, # Timestamp (Unix epoch)
|
||||
"d": 3600, # Optional: Duration in seconds
|
||||
"_r": {...} # Optional: Restrictions
|
||||
}
|
||||
"""
|
||||
prefix = "dstok_"
|
||||
|
||||
# Check if tokens are enabled
|
||||
if not datasette.setting("allow_signed_tokens"):
|
||||
return None
|
||||
|
||||
max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl")
|
||||
|
||||
# Get authorization header
|
||||
authorization = request.headers.get("authorization")
|
||||
if not authorization:
|
||||
return None
|
||||
if not authorization.startswith("Bearer "):
|
||||
return None
|
||||
|
||||
token = authorization[len("Bearer ") :]
|
||||
if not token.startswith(prefix):
|
||||
return None
|
||||
|
||||
# Remove prefix and verify signature
|
||||
token = token[len(prefix) :]
|
||||
try:
|
||||
decoded = datasette.unsign(token, namespace="token")
|
||||
except itsdangerous.BadSignature:
|
||||
return None
|
||||
|
||||
# Validate timestamp
|
||||
if "t" not in decoded:
|
||||
return None
|
||||
created = decoded["t"]
|
||||
if not isinstance(created, int):
|
||||
return None
|
||||
|
||||
# Handle duration/expiry
|
||||
duration = decoded.get("d")
|
||||
if duration is not None and not isinstance(duration, int):
|
||||
return None
|
||||
|
||||
# Apply max TTL if configured
|
||||
if (duration is None and max_signed_tokens_ttl) or (
|
||||
duration is not None
|
||||
and max_signed_tokens_ttl
|
||||
and duration > max_signed_tokens_ttl
|
||||
):
|
||||
duration = max_signed_tokens_ttl
|
||||
|
||||
# Check expiry
|
||||
if duration:
|
||||
if time.time() - created > duration:
|
||||
return None
|
||||
|
||||
# Build actor dict
|
||||
actor = {"id": decoded["a"], "token": "dstok"}
|
||||
|
||||
# Copy restrictions if present
|
||||
if "_r" in decoded:
|
||||
actor["_r"] = decoded["_r"]
|
||||
|
||||
# Add expiry timestamp if applicable
|
||||
if duration:
|
||||
actor["token_expires"] = created + duration
|
||||
|
||||
return actor
|
||||
|
|
@ -2,6 +2,7 @@ from abc import ABC, abstractproperty
|
|||
from dataclasses import asdict, dataclass, field
|
||||
from datasette.hookspecs import hookimpl
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
|
|
@ -13,7 +14,7 @@ class Event(ABC):
|
|||
created: datetime = field(
|
||||
init=False, default_factory=lambda: datetime.now(timezone.utc)
|
||||
)
|
||||
actor: dict | None
|
||||
actor: Optional[dict]
|
||||
|
||||
def properties(self):
|
||||
properties = asdict(self)
|
||||
|
|
@ -62,7 +63,7 @@ class CreateTokenEvent(Event):
|
|||
"""
|
||||
|
||||
name = "create-token"
|
||||
expires_after: int | None
|
||||
expires_after: Optional[int]
|
||||
restrict_all: list
|
||||
restrict_database: dict
|
||||
restrict_resource: dict
|
||||
|
|
|
|||
|
|
@ -65,8 +65,6 @@ def register_facet_classes():
|
|||
|
||||
class Facet:
|
||||
type = None
|
||||
# How many rows to consider when suggesting facets:
|
||||
suggest_consider = 1000
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
|
@ -147,6 +145,17 @@ class Facet:
|
|||
)
|
||||
).columns
|
||||
|
||||
async def get_row_count(self):
|
||||
if self.row_count is None:
|
||||
self.row_count = (
|
||||
await self.ds.execute(
|
||||
self.database,
|
||||
f"select count(*) from ({self.sql})",
|
||||
self.params,
|
||||
)
|
||||
).rows[0][0]
|
||||
return self.row_count
|
||||
|
||||
|
||||
class ColumnFacet(Facet):
|
||||
type = "column"
|
||||
|
|
@ -161,16 +170,13 @@ class ColumnFacet(Facet):
|
|||
if column in already_enabled:
|
||||
continue
|
||||
suggested_facet_sql = """
|
||||
with limited as (select * from ({sql}) limit {suggest_consider})
|
||||
select {column} as value, count(*) as n from limited
|
||||
where value is not null
|
||||
select {column} as value, count(*) as n from (
|
||||
{sql}
|
||||
) where value is not null
|
||||
group by value
|
||||
limit {limit}
|
||||
""".format(
|
||||
column=escape_sqlite(column),
|
||||
sql=self.sql,
|
||||
limit=facet_size + 1,
|
||||
suggest_consider=self.suggest_consider,
|
||||
column=escape_sqlite(column), sql=self.sql, limit=facet_size + 1
|
||||
)
|
||||
distinct_values = None
|
||||
try:
|
||||
|
|
@ -205,17 +211,6 @@ class ColumnFacet(Facet):
|
|||
continue
|
||||
return suggested_facets
|
||||
|
||||
async def get_row_count(self):
|
||||
if self.row_count is None:
|
||||
self.row_count = (
|
||||
await self.ds.execute(
|
||||
self.database,
|
||||
f"select count(*) from (select * from ({self.sql}) limit {self.suggest_consider})",
|
||||
self.params,
|
||||
)
|
||||
).rows[0][0]
|
||||
return self.row_count
|
||||
|
||||
async def facet_results(self):
|
||||
facet_results = []
|
||||
facets_timed_out = []
|
||||
|
|
@ -318,14 +313,11 @@ class ArrayFacet(Facet):
|
|||
continue
|
||||
# Is every value in this column either null or a JSON array?
|
||||
suggested_facet_sql = """
|
||||
with limited as (select * from ({sql}) limit {suggest_consider})
|
||||
select distinct json_type({column})
|
||||
from limited
|
||||
from ({sql})
|
||||
where {column} is not null and {column} != ''
|
||||
""".format(
|
||||
column=escape_sqlite(column),
|
||||
sql=self.sql,
|
||||
suggest_consider=self.suggest_consider,
|
||||
column=escape_sqlite(column), sql=self.sql
|
||||
)
|
||||
try:
|
||||
results = await self.ds.execute(
|
||||
|
|
@ -410,9 +402,7 @@ class ArrayFacet(Facet):
|
|||
order by
|
||||
count(*) desc, value limit {limit}
|
||||
""".format(
|
||||
col=escape_sqlite(column),
|
||||
sql=self.sql,
|
||||
limit=facet_size + 1,
|
||||
col=escape_sqlite(column), sql=self.sql, limit=facet_size + 1
|
||||
)
|
||||
try:
|
||||
facet_rows_results = await self.ds.execute(
|
||||
|
|
@ -480,8 +470,8 @@ class DateFacet(Facet):
|
|||
# Does this column contain any dates in the first 100 rows?
|
||||
suggested_facet_sql = """
|
||||
select date({column}) from (
|
||||
select * from ({sql}) limit 100
|
||||
) where {column} glob "????-??-*"
|
||||
{sql}
|
||||
) where {column} glob "????-??-*" limit 100;
|
||||
""".format(
|
||||
column=escape_sqlite(column), sql=self.sql
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from datasette import hookimpl
|
||||
from datasette.resources import DatabaseResource
|
||||
from datasette.views.base import DatasetteError
|
||||
from datasette.utils.asgi import BadRequest
|
||||
import json
|
||||
import numbers
|
||||
from .utils import detect_json1, escape_sqlite, path_with_removed_args
|
||||
|
||||
|
||||
|
|
@ -13,10 +13,11 @@ def where_filters(request, database, datasette):
|
|||
where_clauses = []
|
||||
extra_wheres_for_ui = []
|
||||
if "_where" in request.args:
|
||||
if not await datasette.allowed(
|
||||
action="execute-sql",
|
||||
resource=DatabaseResource(database=database),
|
||||
actor=request.actor,
|
||||
if not await datasette.permission_allowed(
|
||||
request.actor,
|
||||
"execute-sql",
|
||||
resource=database,
|
||||
default=True,
|
||||
):
|
||||
raise DatasetteError("_where= is not allowed", status=403)
|
||||
else:
|
||||
|
|
@ -367,8 +368,12 @@ class Filters:
|
|||
)
|
||||
_filters_by_key = {f.key: f for f in _filters}
|
||||
|
||||
def __init__(self, pairs):
|
||||
def __init__(self, pairs, units=None, ureg=None):
|
||||
if units is None:
|
||||
units = {}
|
||||
self.pairs = pairs
|
||||
self.units = units
|
||||
self.ureg = ureg
|
||||
|
||||
def lookups(self):
|
||||
"""Yields (lookup, display, no_argument) pairs"""
|
||||
|
|
@ -408,6 +413,20 @@ class Filters:
|
|||
def has_selections(self):
|
||||
return bool(self.pairs)
|
||||
|
||||
def convert_unit(self, column, value):
|
||||
"""If the user has provided a unit in the query, convert it into the column unit, if present."""
|
||||
if column not in self.units:
|
||||
return value
|
||||
|
||||
# Try to interpret the value as a unit
|
||||
value = self.ureg(value)
|
||||
if isinstance(value, numbers.Number):
|
||||
# It's just a bare number, assume it's the column unit
|
||||
return value
|
||||
|
||||
column_unit = self.ureg(self.units[column])
|
||||
return value.to(column_unit).magnitude
|
||||
|
||||
def build_where_clauses(self, table):
|
||||
sql_bits = []
|
||||
params = {}
|
||||
|
|
@ -415,7 +434,9 @@ class Filters:
|
|||
for column, lookup, value in self.selections():
|
||||
filter = self._filters_by_key.get(lookup, None)
|
||||
if filter:
|
||||
sql_bit, param = filter.where_clause(table, column, value, i)
|
||||
sql_bit, param = filter.where_clause(
|
||||
table, column, self.convert_unit(column, value), i
|
||||
)
|
||||
sql_bits.append(sql_bit)
|
||||
if param is not None:
|
||||
if not isinstance(param, list):
|
||||
|
|
|
|||
|
|
@ -5,13 +5,9 @@ from .utils.asgi import (
|
|||
)
|
||||
from .views.base import DatasetteError
|
||||
from markupsafe import Markup
|
||||
import pdb
|
||||
import traceback
|
||||
|
||||
try:
|
||||
import ipdb as pdb
|
||||
except ImportError:
|
||||
import pdb
|
||||
|
||||
try:
|
||||
import rich
|
||||
except ImportError:
|
||||
|
|
|
|||
|
|
@ -70,8 +70,8 @@ def register_facet_classes():
|
|||
|
||||
|
||||
@hookspec
|
||||
def register_actions(datasette):
|
||||
"""Register actions: returns a list of datasette.permission.Action objects"""
|
||||
def register_permissions(datasette):
|
||||
"""Register permissions: returns a list of datasette.permission.Permission named tuples"""
|
||||
|
||||
|
||||
@hookspec
|
||||
|
|
@ -111,15 +111,8 @@ def filters_from_request(request, database, table, datasette):
|
|||
|
||||
|
||||
@hookspec
|
||||
def permission_resources_sql(datasette, actor, action):
|
||||
"""Return SQL query fragments for permission checks on resources.
|
||||
|
||||
Returns None, a PermissionSQL object, or a list of PermissionSQL objects.
|
||||
Each PermissionSQL contains SQL that should return rows with columns:
|
||||
parent (str|None), child (str|None), allow (int), reason (str).
|
||||
|
||||
Used to efficiently check permissions across multiple resources at once.
|
||||
"""
|
||||
def permission_allowed(datasette, actor, action, resource):
|
||||
"""Check if actor is allowed to perform this action - return True, False or None"""
|
||||
|
||||
|
||||
@hookspec
|
||||
|
|
|
|||
|
|
@ -1,206 +1,12 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, NamedTuple
|
||||
import contextvars
|
||||
from typing import Optional
|
||||
|
||||
|
||||
# Context variable to track when permission checks should be skipped
|
||||
_skip_permission_checks = contextvars.ContextVar(
|
||||
"skip_permission_checks", default=False
|
||||
)
|
||||
|
||||
|
||||
class SkipPermissions:
|
||||
"""Context manager to temporarily skip permission checks.
|
||||
|
||||
This is not a stable API and may change in future releases.
|
||||
|
||||
Usage:
|
||||
with SkipPermissions():
|
||||
# Permission checks are skipped within this block
|
||||
response = await datasette.client.get("/protected")
|
||||
"""
|
||||
|
||||
def __enter__(self):
|
||||
self.token = _skip_permission_checks.set(True)
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
_skip_permission_checks.reset(self.token)
|
||||
return False
|
||||
|
||||
|
||||
class Resource(ABC):
|
||||
"""
|
||||
Base class for all resource types.
|
||||
|
||||
Each subclass represents a type of resource (e.g., TableResource, DatabaseResource).
|
||||
The class itself carries metadata about the resource type.
|
||||
Instances represent specific resources.
|
||||
"""
|
||||
|
||||
# Class-level metadata (subclasses must define these)
|
||||
name: str = None # e.g., "table", "database", "model"
|
||||
parent_class: type["Resource"] | None = None # e.g., DatabaseResource for tables
|
||||
|
||||
# Instance-level optional extra attributes
|
||||
reasons: list[str] | None = None
|
||||
include_reasons: bool | None = None
|
||||
|
||||
def __init__(self, parent: str | None = None, child: str | None = None):
|
||||
"""
|
||||
Create a resource instance.
|
||||
|
||||
Args:
|
||||
parent: The parent identifier (meaning depends on resource type)
|
||||
child: The child identifier (meaning depends on resource type)
|
||||
"""
|
||||
self.parent = parent
|
||||
self.child = child
|
||||
self._private = None # Sentinel to track if private was set
|
||||
|
||||
@property
|
||||
def private(self) -> bool:
|
||||
"""
|
||||
Whether this resource is private (accessible to actor but not anonymous).
|
||||
|
||||
This property is only available on Resource objects returned from
|
||||
allowed_resources() when include_is_private=True is used.
|
||||
|
||||
Raises:
|
||||
AttributeError: If accessed without calling include_is_private=True
|
||||
"""
|
||||
if self._private is None:
|
||||
raise AttributeError(
|
||||
"The 'private' attribute is only available when using "
|
||||
"allowed_resources(..., include_is_private=True)"
|
||||
)
|
||||
return self._private
|
||||
|
||||
@private.setter
|
||||
def private(self, value: bool):
|
||||
self._private = value
|
||||
|
||||
@classmethod
|
||||
def __init_subclass__(cls):
|
||||
"""
|
||||
Validate resource hierarchy doesn't exceed 2 levels.
|
||||
|
||||
Raises:
|
||||
ValueError: If this resource would create a 3-level hierarchy
|
||||
"""
|
||||
super().__init_subclass__()
|
||||
|
||||
if cls.parent_class is None:
|
||||
return # Top of hierarchy, nothing to validate
|
||||
|
||||
# Check if our parent has a parent - that would create 3 levels
|
||||
if cls.parent_class.parent_class is not None:
|
||||
# We have a parent, and that parent has a parent
|
||||
# This creates a 3-level hierarchy, which is not allowed
|
||||
raise ValueError(
|
||||
f"Resource {cls.__name__} creates a 3-level hierarchy: "
|
||||
f"{cls.parent_class.parent_class.__name__} -> {cls.parent_class.__name__} -> {cls.__name__}. "
|
||||
f"Maximum 2 levels allowed (parent -> child)."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def resources_sql(cls) -> str:
|
||||
"""
|
||||
Return SQL query that returns all resources of this type.
|
||||
|
||||
Must return two columns: parent, child
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class AllowedResource(NamedTuple):
|
||||
"""A resource with the reason it was allowed (for debugging)."""
|
||||
|
||||
resource: Resource
|
||||
reason: str
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class Action:
|
||||
name: str
|
||||
description: str | None
|
||||
abbr: str | None = None
|
||||
resource_class: type[Resource] | None = None
|
||||
also_requires: str | None = None # Optional action name that must also be allowed
|
||||
|
||||
@property
|
||||
def takes_parent(self) -> bool:
|
||||
"""
|
||||
Whether this action requires a parent identifier when instantiating its resource.
|
||||
|
||||
Returns False for global-only actions (no resource_class).
|
||||
Returns True for all actions with a resource_class (all resources require a parent identifier).
|
||||
"""
|
||||
return self.resource_class is not None
|
||||
|
||||
@property
|
||||
def takes_child(self) -> bool:
|
||||
"""
|
||||
Whether this action requires a child identifier when instantiating its resource.
|
||||
|
||||
Returns False for global actions (no resource_class).
|
||||
Returns False for parent-level resources (DatabaseResource - parent_class is None).
|
||||
Returns True for child-level resources (TableResource, QueryResource - have a parent_class).
|
||||
"""
|
||||
if self.resource_class is None:
|
||||
return False
|
||||
return self.resource_class.parent_class is not None
|
||||
|
||||
|
||||
_reason_id = 1
|
||||
|
||||
|
||||
@dataclass
|
||||
class PermissionSQL:
|
||||
"""
|
||||
A plugin contributes SQL that yields:
|
||||
parent TEXT NULL,
|
||||
child TEXT NULL,
|
||||
allow INTEGER, -- 1 allow, 0 deny
|
||||
reason TEXT
|
||||
|
||||
For restriction-only plugins, sql can be None and only restriction_sql is provided.
|
||||
"""
|
||||
|
||||
sql: str | None = (
|
||||
None # SQL that SELECTs the 4 columns above (can be None for restriction-only)
|
||||
)
|
||||
params: dict[str, Any] | None = (
|
||||
None # bound params for the SQL (values only; no ':' prefix)
|
||||
)
|
||||
source: str | None = None # System will set this to the plugin name
|
||||
restriction_sql: str | None = (
|
||||
None # Optional SQL that returns (parent, child) for restriction filtering
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def allow(cls, reason: str, _allow: bool = True) -> "PermissionSQL":
|
||||
global _reason_id
|
||||
i = _reason_id
|
||||
_reason_id += 1
|
||||
return cls(
|
||||
sql=f"SELECT NULL AS parent, NULL AS child, {1 if _allow else 0} AS allow, :reason_{i} AS reason",
|
||||
params={f"reason_{i}": reason},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def deny(cls, reason: str) -> "PermissionSQL":
|
||||
return cls.allow(reason=reason, _allow=False)
|
||||
|
||||
|
||||
# This is obsolete, replaced by Action and ResourceType
|
||||
@dataclass
|
||||
class Permission:
|
||||
name: str
|
||||
abbr: str | None
|
||||
description: str | None
|
||||
abbr: Optional[str]
|
||||
description: Optional[str]
|
||||
takes_database: bool
|
||||
takes_resource: bool
|
||||
default: bool
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@ DEFAULT_PLUGINS = (
|
|||
"datasette.sql_functions",
|
||||
"datasette.actor_auth_cookie",
|
||||
"datasette.default_permissions",
|
||||
"datasette.default_actions",
|
||||
"datasette.default_magic_parameters",
|
||||
"datasette.blob_renderer",
|
||||
"datasette.default_menu_links",
|
||||
|
|
@ -50,7 +49,7 @@ def after(outcome, hook_name, hook_impls, kwargs):
|
|||
results = outcome.get_result()
|
||||
if not isinstance(results, list):
|
||||
results = [results]
|
||||
print("Results:", file=sys.stderr)
|
||||
print(f"Results:", file=sys.stderr)
|
||||
pprint(results, width=40, indent=4, stream=sys.stderr)
|
||||
|
||||
|
||||
|
|
@ -94,24 +93,21 @@ def get_plugins():
|
|||
for plugin in pm.get_plugins():
|
||||
static_path = None
|
||||
templates_path = None
|
||||
plugin_name = (
|
||||
plugin.__name__
|
||||
if hasattr(plugin, "__name__")
|
||||
else plugin.__class__.__name__
|
||||
)
|
||||
if plugin_name not in DEFAULT_PLUGINS:
|
||||
if plugin.__name__ not in DEFAULT_PLUGINS:
|
||||
try:
|
||||
if (importlib_resources.files(plugin_name) / "static").is_dir():
|
||||
static_path = str(importlib_resources.files(plugin_name) / "static")
|
||||
if (importlib_resources.files(plugin_name) / "templates").is_dir():
|
||||
if (importlib_resources.files(plugin.__name__) / "static").is_dir():
|
||||
static_path = str(
|
||||
importlib_resources.files(plugin.__name__) / "static"
|
||||
)
|
||||
if (importlib_resources.files(plugin.__name__) / "templates").is_dir():
|
||||
templates_path = str(
|
||||
importlib_resources.files(plugin_name) / "templates"
|
||||
importlib_resources.files(plugin.__name__) / "templates"
|
||||
)
|
||||
except (TypeError, ModuleNotFoundError):
|
||||
# Caused by --plugins_dir= plugins
|
||||
pass
|
||||
plugin_info = {
|
||||
"name": plugin_name,
|
||||
"name": plugin.__name__,
|
||||
"static_path": static_path,
|
||||
"templates_path": templates_path,
|
||||
"hooks": [h.name for h in pm.get_hookcallers(plugin)],
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import click
|
|||
import json
|
||||
import os
|
||||
import re
|
||||
from subprocess import CalledProcessError, check_call, check_output
|
||||
from subprocess import check_call, check_output
|
||||
|
||||
from .common import (
|
||||
add_common_publish_arguments_and_options,
|
||||
|
|
@ -23,9 +23,7 @@ def publish_subcommand(publish):
|
|||
help="Application name to use when building",
|
||||
)
|
||||
@click.option(
|
||||
"--service",
|
||||
default="",
|
||||
help="Cloud Run service to deploy (or over-write)",
|
||||
"--service", default="", help="Cloud Run service to deploy (or over-write)"
|
||||
)
|
||||
@click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension")
|
||||
@click.option(
|
||||
|
|
@ -57,32 +55,13 @@ def publish_subcommand(publish):
|
|||
@click.option(
|
||||
"--max-instances",
|
||||
type=int,
|
||||
default=1,
|
||||
show_default=True,
|
||||
help="Maximum Cloud Run instances (use 0 to remove the limit)",
|
||||
help="Maximum Cloud Run instances",
|
||||
)
|
||||
@click.option(
|
||||
"--min-instances",
|
||||
type=int,
|
||||
help="Minimum Cloud Run instances",
|
||||
)
|
||||
@click.option(
|
||||
"--artifact-repository",
|
||||
default="datasette",
|
||||
show_default=True,
|
||||
help="Artifact Registry repository to store the image",
|
||||
)
|
||||
@click.option(
|
||||
"--artifact-region",
|
||||
default="us",
|
||||
show_default=True,
|
||||
help="Artifact Registry location (region or multi-region)",
|
||||
)
|
||||
@click.option(
|
||||
"--artifact-project",
|
||||
default=None,
|
||||
help="Project ID for Artifact Registry (defaults to the active project)",
|
||||
)
|
||||
def cloudrun(
|
||||
files,
|
||||
metadata,
|
||||
|
|
@ -112,9 +91,6 @@ def publish_subcommand(publish):
|
|||
apt_get_extras,
|
||||
max_instances,
|
||||
min_instances,
|
||||
artifact_repository,
|
||||
artifact_region,
|
||||
artifact_project,
|
||||
):
|
||||
"Publish databases to Datasette running on Cloud Run"
|
||||
fail_if_publish_binary_not_installed(
|
||||
|
|
@ -124,21 +100,6 @@ def publish_subcommand(publish):
|
|||
"gcloud config get-value project", shell=True, universal_newlines=True
|
||||
).strip()
|
||||
|
||||
artifact_project = artifact_project or project
|
||||
|
||||
# Ensure Artifact Registry exists for the target image
|
||||
_ensure_artifact_registry(
|
||||
artifact_project=artifact_project,
|
||||
artifact_region=artifact_region,
|
||||
artifact_repository=artifact_repository,
|
||||
)
|
||||
|
||||
artifact_host = (
|
||||
artifact_region
|
||||
if artifact_region.endswith("-docker.pkg.dev")
|
||||
else f"{artifact_region}-docker.pkg.dev"
|
||||
)
|
||||
|
||||
if not service:
|
||||
# Show the user their current services, then prompt for one
|
||||
click.echo("Please provide a service name for this deployment\n")
|
||||
|
|
@ -156,11 +117,6 @@ def publish_subcommand(publish):
|
|||
click.echo("")
|
||||
service = click.prompt("Service name", type=str)
|
||||
|
||||
image_id = (
|
||||
f"{artifact_host}/{artifact_project}/"
|
||||
f"{artifact_repository}/datasette-{service}"
|
||||
)
|
||||
|
||||
extra_metadata = {
|
||||
"title": title,
|
||||
"license": license,
|
||||
|
|
@ -217,6 +173,7 @@ def publish_subcommand(publish):
|
|||
print(fp.read())
|
||||
print("\n====================\n")
|
||||
|
||||
image_id = f"gcr.io/{project}/datasette-{service}"
|
||||
check_call(
|
||||
"gcloud builds submit --tag {}{}".format(
|
||||
image_id, " --timeout {}".format(timeout) if timeout else ""
|
||||
|
|
@ -230,7 +187,7 @@ def publish_subcommand(publish):
|
|||
("--max-instances", max_instances),
|
||||
("--min-instances", min_instances),
|
||||
):
|
||||
if value is not None:
|
||||
if value:
|
||||
extra_deploy_options.append("{} {}".format(option, value))
|
||||
check_call(
|
||||
"gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}".format(
|
||||
|
|
@ -242,52 +199,6 @@ def publish_subcommand(publish):
|
|||
)
|
||||
|
||||
|
||||
def _ensure_artifact_registry(artifact_project, artifact_region, artifact_repository):
|
||||
"""Ensure Artifact Registry API is enabled and the repository exists."""
|
||||
|
||||
enable_cmd = (
|
||||
"gcloud services enable artifactregistry.googleapis.com "
|
||||
f"--project {artifact_project} --quiet"
|
||||
)
|
||||
try:
|
||||
check_call(enable_cmd, shell=True)
|
||||
except CalledProcessError as exc:
|
||||
raise click.ClickException(
|
||||
"Failed to enable artifactregistry.googleapis.com. "
|
||||
"Please ensure you have permissions to manage services."
|
||||
) from exc
|
||||
|
||||
describe_cmd = (
|
||||
"gcloud artifacts repositories describe {repo} --project {project} "
|
||||
"--location {location} --quiet"
|
||||
).format(
|
||||
repo=artifact_repository,
|
||||
project=artifact_project,
|
||||
location=artifact_region,
|
||||
)
|
||||
try:
|
||||
check_call(describe_cmd, shell=True)
|
||||
return
|
||||
except CalledProcessError:
|
||||
create_cmd = (
|
||||
"gcloud artifacts repositories create {repo} --repository-format=docker "
|
||||
'--location {location} --project {project} --description "Datasette Cloud Run images" --quiet'
|
||||
).format(
|
||||
repo=artifact_repository,
|
||||
location=artifact_region,
|
||||
project=artifact_project,
|
||||
)
|
||||
try:
|
||||
check_call(create_cmd, shell=True)
|
||||
click.echo(f"Created Artifact Registry repository '{artifact_repository}'")
|
||||
except CalledProcessError as exc:
|
||||
raise click.ClickException(
|
||||
"Failed to create Artifact Registry repository. "
|
||||
"Use --artifact-repository/--artifact-region to point to an existing repo "
|
||||
"or create one manually."
|
||||
) from exc
|
||||
|
||||
|
||||
def get_existing_services():
|
||||
services = json.loads(
|
||||
check_output(
|
||||
|
|
@ -303,7 +214,6 @@ def get_existing_services():
|
|||
"url": service["status"]["address"]["url"],
|
||||
}
|
||||
for service in services
|
||||
if "url" in service["status"]
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ def convert_specific_columns_to_json(rows, columns, json_cols):
|
|||
if column in json_cols:
|
||||
try:
|
||||
value = json.loads(value)
|
||||
except (TypeError, ValueError):
|
||||
except (TypeError, ValueError) as e:
|
||||
pass
|
||||
new_row.append(value)
|
||||
new_rows.append(new_row)
|
||||
|
|
|
|||
|
|
@ -1,90 +0,0 @@
|
|||
"""Core resource types for Datasette's permission system."""
|
||||
|
||||
from datasette.permissions import Resource
|
||||
|
||||
|
||||
class DatabaseResource(Resource):
|
||||
"""A database in Datasette."""
|
||||
|
||||
name = "database"
|
||||
parent_class = None # Top of the resource hierarchy
|
||||
|
||||
def __init__(self, database: str):
|
||||
super().__init__(parent=database, child=None)
|
||||
|
||||
@classmethod
|
||||
async def resources_sql(cls, datasette) -> str:
|
||||
return """
|
||||
SELECT database_name AS parent, NULL AS child
|
||||
FROM catalog_databases
|
||||
"""
|
||||
|
||||
|
||||
class TableResource(Resource):
|
||||
"""A table in a database."""
|
||||
|
||||
name = "table"
|
||||
parent_class = DatabaseResource
|
||||
|
||||
def __init__(self, database: str, table: str):
|
||||
super().__init__(parent=database, child=table)
|
||||
|
||||
@classmethod
|
||||
async def resources_sql(cls, datasette) -> str:
|
||||
return """
|
||||
SELECT database_name AS parent, table_name AS child
|
||||
FROM catalog_tables
|
||||
UNION ALL
|
||||
SELECT database_name AS parent, view_name AS child
|
||||
FROM catalog_views
|
||||
"""
|
||||
|
||||
|
||||
class QueryResource(Resource):
|
||||
"""A canned query in a database."""
|
||||
|
||||
name = "query"
|
||||
parent_class = DatabaseResource
|
||||
|
||||
def __init__(self, database: str, query: str):
|
||||
super().__init__(parent=database, child=query)
|
||||
|
||||
@classmethod
|
||||
async def resources_sql(cls, datasette) -> str:
|
||||
from datasette.plugins import pm
|
||||
from datasette.utils import await_me_maybe
|
||||
|
||||
# Get all databases from catalog
|
||||
db = datasette.get_internal_database()
|
||||
result = await db.execute("SELECT database_name FROM catalog_databases")
|
||||
databases = [row[0] for row in result.rows]
|
||||
|
||||
# Gather all canned queries from all databases
|
||||
query_pairs = []
|
||||
for database_name in databases:
|
||||
# Call the hook to get queries (including from config via default plugin)
|
||||
for queries_result in pm.hook.canned_queries(
|
||||
datasette=datasette,
|
||||
database=database_name,
|
||||
actor=None, # Get ALL queries for resource enumeration
|
||||
):
|
||||
queries = await await_me_maybe(queries_result)
|
||||
if queries:
|
||||
for query_name in queries.keys():
|
||||
query_pairs.append((database_name, query_name))
|
||||
|
||||
# Build SQL
|
||||
if not query_pairs:
|
||||
return "SELECT NULL AS parent, NULL AS child WHERE 0"
|
||||
|
||||
# Generate UNION ALL query
|
||||
selects = []
|
||||
for db_name, query_name in query_pairs:
|
||||
# Escape single quotes by doubling them
|
||||
db_escaped = db_name.replace("'", "''")
|
||||
query_escaped = query_name.replace("'", "''")
|
||||
selects.append(
|
||||
f"SELECT '{db_escaped}' AS parent, '{query_escaped}' AS child"
|
||||
)
|
||||
|
||||
return " UNION ALL ".join(selects)
|
||||
|
|
@ -222,6 +222,12 @@ button.button-as-link:focus {
|
|||
color: #67C98D;
|
||||
}
|
||||
|
||||
a img {
|
||||
display: block;
|
||||
max-width: 100%;
|
||||
border: 0;
|
||||
}
|
||||
|
||||
code,
|
||||
pre {
|
||||
font-family: monospace;
|
||||
|
|
@ -259,8 +265,8 @@ a.not-underlined {
|
|||
|
||||
/* Page Furniture ========================================================= */
|
||||
/* Header */
|
||||
header.hd,
|
||||
footer.ft {
|
||||
header,
|
||||
footer {
|
||||
padding: 0.6rem 1rem 0.5rem 1rem;
|
||||
background-color: #276890;
|
||||
background: linear-gradient(180deg, rgba(96,144,173,1) 0%, rgba(39,104,144,1) 50%);
|
||||
|
|
@ -269,18 +275,15 @@ footer.ft {
|
|||
box-sizing: border-box;
|
||||
min-height: 2.6rem;
|
||||
}
|
||||
footer.ft {
|
||||
margin-top: 1rem;
|
||||
}
|
||||
header.hd p,
|
||||
footer.ft p {
|
||||
header p,
|
||||
footer p {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
header.hd .crumbs {
|
||||
header .crumbs {
|
||||
float: left;
|
||||
}
|
||||
header.hd .actor {
|
||||
header .actor {
|
||||
float: right;
|
||||
text-align: right;
|
||||
padding-left: 1rem;
|
||||
|
|
@ -289,32 +292,32 @@ header.hd .actor {
|
|||
top: -3px;
|
||||
}
|
||||
|
||||
footer.ft a:link,
|
||||
footer.ft a:visited,
|
||||
footer.ft a:hover,
|
||||
footer.ft a:focus,
|
||||
footer.ft a:active,
|
||||
footer.ft button.button-as-link {
|
||||
footer a:link,
|
||||
footer a:visited,
|
||||
footer a:hover,
|
||||
footer a:focus,
|
||||
footer a:active,
|
||||
footer button.button-as-link {
|
||||
color: rgba(255,255,244,0.8);
|
||||
}
|
||||
header.hd a:link,
|
||||
header.hd a:visited,
|
||||
header.hd a:hover,
|
||||
header.hd a:focus,
|
||||
header.hd a:active,
|
||||
header.hd button.button-as-link {
|
||||
header a:link,
|
||||
header a:visited,
|
||||
header a:hover,
|
||||
header a:focus,
|
||||
header a:active,
|
||||
header button.button-as-link {
|
||||
color: rgba(255,255,244,0.8);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
footer.ft a:hover,
|
||||
footer.ft a:focus,
|
||||
footer.ft a:active,
|
||||
footer.ft .button-as-link:hover,
|
||||
footer.ft .button-as-link:focus,
|
||||
header.hd a:hover,
|
||||
header.hd a:focus,
|
||||
header.hd a:active,
|
||||
footer a:hover,
|
||||
footer a:focus,
|
||||
footer a:active,
|
||||
footer.button-as-link:hover,
|
||||
footer.button-as-link:focus,
|
||||
header a:hover,
|
||||
header a:focus,
|
||||
header a:active,
|
||||
button.button-as-link:hover,
|
||||
button.button-as-link:focus {
|
||||
color: rgba(255,255,244,1);
|
||||
|
|
@ -326,6 +329,11 @@ section.content {
|
|||
margin: 0 1rem;
|
||||
}
|
||||
|
||||
/* Footer */
|
||||
footer {
|
||||
margin-top: 1rem;
|
||||
}
|
||||
|
||||
/* Navigation menu */
|
||||
details.nav-menu > summary {
|
||||
list-style: none;
|
||||
|
|
@ -444,30 +452,36 @@ h2 em {
|
|||
.table-wrapper {
|
||||
overflow-x: auto;
|
||||
}
|
||||
table.rows-and-columns {
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
table.rows-and-columns td {
|
||||
td {
|
||||
border-top: 1px solid #aaa;
|
||||
border-right: 1px solid #eee;
|
||||
padding: 4px;
|
||||
vertical-align: top;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
table.rows-and-columns td.type-pk {
|
||||
td.type-pk {
|
||||
font-weight: bold;
|
||||
}
|
||||
table.rows-and-columns td em {
|
||||
td em {
|
||||
font-style: normal;
|
||||
font-size: 0.8em;
|
||||
color: #aaa;
|
||||
}
|
||||
table.rows-and-columns th {
|
||||
th {
|
||||
padding-right: 1em;
|
||||
}
|
||||
table.rows-and-columns a:link {
|
||||
table a:link {
|
||||
text-decoration: none;
|
||||
}
|
||||
.rows-and-columns td:before {
|
||||
display: block;
|
||||
color: black;
|
||||
margin-left: -10%;
|
||||
font-size: 0.8em;
|
||||
}
|
||||
.rows-and-columns td ol,
|
||||
.rows-and-columns td ul {
|
||||
list-style: initial;
|
||||
|
|
@ -485,8 +499,10 @@ a.blob-download {
|
|||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
|
||||
/* Forms =================================================================== */
|
||||
|
||||
|
||||
form.sql textarea {
|
||||
border: 1px solid #ccc;
|
||||
width: 70%;
|
||||
|
|
@ -498,6 +514,10 @@ form.sql textarea {
|
|||
form.sql label {
|
||||
width: 15%;
|
||||
}
|
||||
form label {
|
||||
font-weight: bold;
|
||||
display: inline-block;
|
||||
}
|
||||
.advanced-export input[type=submit] {
|
||||
font-size: 0.6em;
|
||||
margin-left: 1em;
|
||||
|
|
@ -508,17 +528,8 @@ label.sort_by_desc {
|
|||
pre#sql-query {
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
.core label,
|
||||
label.core {
|
||||
font-weight: bold;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.core input[type=text],
|
||||
input.core[type=text],
|
||||
.core input[type=search],
|
||||
input.core[type=search] {
|
||||
form input[type=text],
|
||||
form input[type=search] {
|
||||
border: 1px solid #ccc;
|
||||
border-radius: 3px;
|
||||
width: 60%;
|
||||
|
|
@ -527,27 +538,19 @@ input.core[type=search] {
|
|||
font-size: 1em;
|
||||
font-family: Helvetica, sans-serif;
|
||||
}
|
||||
.core input[type=search],
|
||||
input.core[type=search] {
|
||||
/* Stop Webkit from styling search boxes in an inconsistent way */
|
||||
/* https://css-tricks.com/webkit-html5-search-inputs/ comments */
|
||||
/* Stop Webkit from styling search boxes in an inconsistent way */
|
||||
/* https://css-tricks.com/webkit-html5-search-inputs/ comments */
|
||||
input[type=search] {
|
||||
-webkit-appearance: textfield;
|
||||
}
|
||||
.core input[type="search"]::-webkit-search-decoration,
|
||||
input.core[type="search"]::-webkit-search-decoration,
|
||||
.core input[type="search"]::-webkit-search-cancel-button,
|
||||
input.core[type="search"]::-webkit-search-cancel-button,
|
||||
.core input[type="search"]::-webkit-search-results-button,
|
||||
input.core[type="search"]::-webkit-search-results-button,
|
||||
.core input[type="search"]::-webkit-search-results-decoration,
|
||||
input.core[type="search"]::-webkit-search-results-decoration {
|
||||
input[type="search"]::-webkit-search-decoration,
|
||||
input[type="search"]::-webkit-search-cancel-button,
|
||||
input[type="search"]::-webkit-search-results-button,
|
||||
input[type="search"]::-webkit-search-results-decoration {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.core input[type=submit],
|
||||
.core button[type=button],
|
||||
input.core[type=submit],
|
||||
button.core[type=button] {
|
||||
form input[type=submit], form button[type=button] {
|
||||
font-weight: 400;
|
||||
cursor: pointer;
|
||||
text-align: center;
|
||||
|
|
@ -560,16 +563,14 @@ button.core[type=button] {
|
|||
border-radius: .25rem;
|
||||
}
|
||||
|
||||
.core input[type=submit],
|
||||
input.core[type=submit] {
|
||||
form input[type=submit] {
|
||||
color: #fff;
|
||||
background: linear-gradient(180deg, #007bff 0%, #4E79C7 100%);
|
||||
border-color: #007bff;
|
||||
-webkit-appearance: button;
|
||||
}
|
||||
|
||||
.core button[type=button],
|
||||
button.core[type=button] {
|
||||
form button[type=button] {
|
||||
color: #007bff;
|
||||
background-color: #fff;
|
||||
border-color: #007bff;
|
||||
|
|
@ -759,7 +760,7 @@ p.zero-results {
|
|||
left: -9999px;
|
||||
}
|
||||
|
||||
table.rows-and-columns tr {
|
||||
.rows-and-columns tr {
|
||||
border: 1px solid #ccc;
|
||||
margin-bottom: 1em;
|
||||
border-radius: 10px;
|
||||
|
|
@ -767,7 +768,7 @@ p.zero-results {
|
|||
padding: 0.2rem;
|
||||
}
|
||||
|
||||
table.rows-and-columns td {
|
||||
.rows-and-columns td {
|
||||
/* Behave like a "row" */
|
||||
border: none;
|
||||
border-bottom: 1px solid #eee;
|
||||
|
|
@ -775,7 +776,7 @@ p.zero-results {
|
|||
padding-left: 10%;
|
||||
}
|
||||
|
||||
table.rows-and-columns td:before {
|
||||
.rows-and-columns td:before {
|
||||
display: block;
|
||||
color: black;
|
||||
margin-left: -10%;
|
||||
|
|
|
|||
|
|
@ -93,12 +93,12 @@ const datasetteManager = {
|
|||
*/
|
||||
renderAboveTablePanel: () => {
|
||||
const aboveTablePanel = document.querySelector(
|
||||
DOM_SELECTORS.aboveTablePanel,
|
||||
DOM_SELECTORS.aboveTablePanel
|
||||
);
|
||||
|
||||
if (!aboveTablePanel) {
|
||||
console.warn(
|
||||
"This page does not have a table, the renderAboveTablePanel cannot be used.",
|
||||
"This page does not have a table, the renderAboveTablePanel cannot be used."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,8 +7,8 @@ MIT Licensed
|
|||
typeof exports === "object" && typeof module !== "undefined"
|
||||
? (module.exports = factory())
|
||||
: typeof define === "function" && define.amd
|
||||
? define(factory)
|
||||
: (global.jsonFormatHighlight = factory());
|
||||
? define(factory)
|
||||
: (global.jsonFormatHighlight = factory());
|
||||
})(this, function () {
|
||||
"use strict";
|
||||
|
||||
|
|
@ -42,13 +42,13 @@ MIT Licensed
|
|||
color = /true/.test(match)
|
||||
? colors.trueColor
|
||||
: /false/.test(match)
|
||||
? colors.falseColor
|
||||
: /null/.test(match)
|
||||
? colors.nullColor
|
||||
: color;
|
||||
? colors.falseColor
|
||||
: /null/.test(match)
|
||||
? colors.nullColor
|
||||
: color;
|
||||
}
|
||||
return '<span style="color: ' + color + '">' + match + "</span>";
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,416 +0,0 @@
|
|||
class NavigationSearch extends HTMLElement {
|
||||
constructor() {
|
||||
super();
|
||||
this.attachShadow({ mode: "open" });
|
||||
this.selectedIndex = -1;
|
||||
this.matches = [];
|
||||
this.debounceTimer = null;
|
||||
|
||||
this.render();
|
||||
this.setupEventListeners();
|
||||
}
|
||||
|
||||
render() {
|
||||
this.shadowRoot.innerHTML = `
|
||||
<style>
|
||||
:host {
|
||||
display: contents;
|
||||
}
|
||||
|
||||
dialog {
|
||||
border: none;
|
||||
border-radius: 0.75rem;
|
||||
padding: 0;
|
||||
max-width: 90vw;
|
||||
width: 600px;
|
||||
max-height: 80vh;
|
||||
box-shadow: 0 20px 25px -5px rgba(0, 0, 0, 0.1), 0 10px 10px -5px rgba(0, 0, 0, 0.04);
|
||||
animation: slideIn 0.2s ease-out;
|
||||
}
|
||||
|
||||
dialog::backdrop {
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
backdrop-filter: blur(4px);
|
||||
animation: fadeIn 0.2s ease-out;
|
||||
}
|
||||
|
||||
@keyframes slideIn {
|
||||
from {
|
||||
opacity: 0;
|
||||
transform: translateY(-20px) scale(0.95);
|
||||
}
|
||||
to {
|
||||
opacity: 1;
|
||||
transform: translateY(0) scale(1);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes fadeIn {
|
||||
from { opacity: 0; }
|
||||
to { opacity: 1; }
|
||||
}
|
||||
|
||||
.search-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.search-input-wrapper {
|
||||
padding: 1.25rem;
|
||||
border-bottom: 1px solid #e5e7eb;
|
||||
}
|
||||
|
||||
.search-input {
|
||||
width: 100%;
|
||||
padding: 0.75rem 1rem;
|
||||
font-size: 1rem;
|
||||
border: 2px solid #e5e7eb;
|
||||
border-radius: 0.5rem;
|
||||
outline: none;
|
||||
transition: border-color 0.2s;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.search-input:focus {
|
||||
border-color: #2563eb;
|
||||
}
|
||||
|
||||
.results-container {
|
||||
overflow-y: auto;
|
||||
height: calc(80vh - 180px);
|
||||
padding: 0.5rem;
|
||||
}
|
||||
|
||||
.result-item {
|
||||
padding: 0.875rem 1rem;
|
||||
cursor: pointer;
|
||||
border-radius: 0.5rem;
|
||||
transition: background-color 0.15s;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.75rem;
|
||||
}
|
||||
|
||||
.result-item:hover {
|
||||
background-color: #f3f4f6;
|
||||
}
|
||||
|
||||
.result-item.selected {
|
||||
background-color: #dbeafe;
|
||||
}
|
||||
|
||||
.result-name {
|
||||
font-weight: 500;
|
||||
color: #111827;
|
||||
}
|
||||
|
||||
.result-url {
|
||||
font-size: 0.875rem;
|
||||
color: #6b7280;
|
||||
}
|
||||
|
||||
.no-results {
|
||||
padding: 2rem;
|
||||
text-align: center;
|
||||
color: #6b7280;
|
||||
}
|
||||
|
||||
.hint-text {
|
||||
padding: 0.75rem 1.25rem;
|
||||
font-size: 0.875rem;
|
||||
color: #6b7280;
|
||||
border-top: 1px solid #e5e7eb;
|
||||
display: flex;
|
||||
gap: 1rem;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.hint-text kbd {
|
||||
background: #f3f4f6;
|
||||
padding: 0.125rem 0.375rem;
|
||||
border-radius: 0.25rem;
|
||||
font-size: 0.75rem;
|
||||
border: 1px solid #d1d5db;
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
/* Mobile optimizations */
|
||||
@media (max-width: 640px) {
|
||||
dialog {
|
||||
width: 95vw;
|
||||
max-height: 85vh;
|
||||
border-radius: 0.5rem;
|
||||
}
|
||||
|
||||
.search-input-wrapper {
|
||||
padding: 1rem;
|
||||
}
|
||||
|
||||
.search-input {
|
||||
font-size: 16px; /* Prevents zoom on iOS */
|
||||
}
|
||||
|
||||
.result-item {
|
||||
padding: 1rem 0.75rem;
|
||||
}
|
||||
|
||||
.hint-text {
|
||||
font-size: 0.8rem;
|
||||
padding: 0.5rem 1rem;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
<dialog>
|
||||
<div class="search-container">
|
||||
<div class="search-input-wrapper">
|
||||
<input
|
||||
type="text"
|
||||
class="search-input"
|
||||
placeholder="Search..."
|
||||
aria-label="Search navigation"
|
||||
autocomplete="off"
|
||||
spellcheck="false"
|
||||
>
|
||||
</div>
|
||||
<div class="results-container" role="listbox"></div>
|
||||
<div class="hint-text">
|
||||
<span><kbd>↑</kbd> <kbd>↓</kbd> Navigate</span>
|
||||
<span><kbd>Enter</kbd> Select</span>
|
||||
<span><kbd>Esc</kbd> Close</span>
|
||||
</div>
|
||||
</div>
|
||||
</dialog>
|
||||
`;
|
||||
}
|
||||
|
||||
setupEventListeners() {
|
||||
const dialog = this.shadowRoot.querySelector("dialog");
|
||||
const input = this.shadowRoot.querySelector(".search-input");
|
||||
const resultsContainer =
|
||||
this.shadowRoot.querySelector(".results-container");
|
||||
|
||||
// Global keyboard listener for "/"
|
||||
document.addEventListener("keydown", (e) => {
|
||||
if (e.key === "/" && !this.isInputFocused() && !dialog.open) {
|
||||
e.preventDefault();
|
||||
this.openMenu();
|
||||
}
|
||||
});
|
||||
|
||||
// Input event
|
||||
input.addEventListener("input", (e) => {
|
||||
this.handleSearch(e.target.value);
|
||||
});
|
||||
|
||||
// Keyboard navigation
|
||||
input.addEventListener("keydown", (e) => {
|
||||
if (e.key === "ArrowDown") {
|
||||
e.preventDefault();
|
||||
this.moveSelection(1);
|
||||
} else if (e.key === "ArrowUp") {
|
||||
e.preventDefault();
|
||||
this.moveSelection(-1);
|
||||
} else if (e.key === "Enter") {
|
||||
e.preventDefault();
|
||||
this.selectCurrentItem();
|
||||
} else if (e.key === "Escape") {
|
||||
this.closeMenu();
|
||||
}
|
||||
});
|
||||
|
||||
// Click on result item
|
||||
resultsContainer.addEventListener("click", (e) => {
|
||||
const item = e.target.closest(".result-item");
|
||||
if (item) {
|
||||
const index = parseInt(item.dataset.index);
|
||||
this.selectItem(index);
|
||||
}
|
||||
});
|
||||
|
||||
// Close on backdrop click
|
||||
dialog.addEventListener("click", (e) => {
|
||||
if (e.target === dialog) {
|
||||
this.closeMenu();
|
||||
}
|
||||
});
|
||||
|
||||
// Initial load
|
||||
this.loadInitialData();
|
||||
}
|
||||
|
||||
isInputFocused() {
|
||||
const activeElement = document.activeElement;
|
||||
return (
|
||||
activeElement &&
|
||||
(activeElement.tagName === "INPUT" ||
|
||||
activeElement.tagName === "TEXTAREA" ||
|
||||
activeElement.isContentEditable)
|
||||
);
|
||||
}
|
||||
|
||||
loadInitialData() {
|
||||
const itemsAttr = this.getAttribute("items");
|
||||
if (itemsAttr) {
|
||||
try {
|
||||
this.allItems = JSON.parse(itemsAttr);
|
||||
this.matches = this.allItems;
|
||||
} catch (e) {
|
||||
console.error("Failed to parse items attribute:", e);
|
||||
this.allItems = [];
|
||||
this.matches = [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
handleSearch(query) {
|
||||
clearTimeout(this.debounceTimer);
|
||||
|
||||
this.debounceTimer = setTimeout(() => {
|
||||
const url = this.getAttribute("url");
|
||||
|
||||
if (url) {
|
||||
// Fetch from API
|
||||
this.fetchResults(url, query);
|
||||
} else {
|
||||
// Filter local items
|
||||
this.filterLocalItems(query);
|
||||
}
|
||||
}, 200);
|
||||
}
|
||||
|
||||
async fetchResults(url, query) {
|
||||
try {
|
||||
const searchUrl = `${url}?q=${encodeURIComponent(query)}`;
|
||||
const response = await fetch(searchUrl);
|
||||
const data = await response.json();
|
||||
this.matches = data.matches || [];
|
||||
this.selectedIndex = this.matches.length > 0 ? 0 : -1;
|
||||
this.renderResults();
|
||||
} catch (e) {
|
||||
console.error("Failed to fetch search results:", e);
|
||||
this.matches = [];
|
||||
this.renderResults();
|
||||
}
|
||||
}
|
||||
|
||||
filterLocalItems(query) {
|
||||
if (!query.trim()) {
|
||||
this.matches = [];
|
||||
} else {
|
||||
const lowerQuery = query.toLowerCase();
|
||||
this.matches = (this.allItems || []).filter(
|
||||
(item) =>
|
||||
item.name.toLowerCase().includes(lowerQuery) ||
|
||||
item.url.toLowerCase().includes(lowerQuery),
|
||||
);
|
||||
}
|
||||
this.selectedIndex = this.matches.length > 0 ? 0 : -1;
|
||||
this.renderResults();
|
||||
}
|
||||
|
||||
renderResults() {
|
||||
const container = this.shadowRoot.querySelector(".results-container");
|
||||
const input = this.shadowRoot.querySelector(".search-input");
|
||||
|
||||
if (this.matches.length === 0) {
|
||||
const message = input.value.trim()
|
||||
? "No results found"
|
||||
: "Start typing to search...";
|
||||
container.innerHTML = `<div class="no-results">${message}</div>`;
|
||||
return;
|
||||
}
|
||||
|
||||
container.innerHTML = this.matches
|
||||
.map(
|
||||
(match, index) => `
|
||||
<div
|
||||
class="result-item ${
|
||||
index === this.selectedIndex ? "selected" : ""
|
||||
}"
|
||||
data-index="${index}"
|
||||
role="option"
|
||||
aria-selected="${index === this.selectedIndex}"
|
||||
>
|
||||
<div>
|
||||
<div class="result-name">${this.escapeHtml(
|
||||
match.name,
|
||||
)}</div>
|
||||
<div class="result-url">${this.escapeHtml(match.url)}</div>
|
||||
</div>
|
||||
</div>
|
||||
`,
|
||||
)
|
||||
.join("");
|
||||
|
||||
// Scroll selected item into view
|
||||
if (this.selectedIndex >= 0) {
|
||||
const selectedItem = container.children[this.selectedIndex];
|
||||
if (selectedItem) {
|
||||
selectedItem.scrollIntoView({ block: "nearest" });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
moveSelection(direction) {
|
||||
const newIndex = this.selectedIndex + direction;
|
||||
if (newIndex >= 0 && newIndex < this.matches.length) {
|
||||
this.selectedIndex = newIndex;
|
||||
this.renderResults();
|
||||
}
|
||||
}
|
||||
|
||||
selectCurrentItem() {
|
||||
if (this.selectedIndex >= 0 && this.selectedIndex < this.matches.length) {
|
||||
this.selectItem(this.selectedIndex);
|
||||
}
|
||||
}
|
||||
|
||||
selectItem(index) {
|
||||
const match = this.matches[index];
|
||||
if (match) {
|
||||
// Dispatch custom event
|
||||
this.dispatchEvent(
|
||||
new CustomEvent("select", {
|
||||
detail: match,
|
||||
bubbles: true,
|
||||
composed: true,
|
||||
}),
|
||||
);
|
||||
|
||||
// Navigate to URL
|
||||
window.location.href = match.url;
|
||||
|
||||
this.closeMenu();
|
||||
}
|
||||
}
|
||||
|
||||
openMenu() {
|
||||
const dialog = this.shadowRoot.querySelector("dialog");
|
||||
const input = this.shadowRoot.querySelector(".search-input");
|
||||
|
||||
dialog.showModal();
|
||||
input.value = "";
|
||||
input.focus();
|
||||
|
||||
// Reset state - start with no items shown
|
||||
this.matches = [];
|
||||
this.selectedIndex = -1;
|
||||
this.renderResults();
|
||||
}
|
||||
|
||||
closeMenu() {
|
||||
const dialog = this.shadowRoot.querySelector("dialog");
|
||||
dialog.close();
|
||||
}
|
||||
|
||||
escapeHtml(text) {
|
||||
const div = document.createElement("div");
|
||||
div.textContent = text;
|
||||
return div.innerHTML;
|
||||
}
|
||||
}
|
||||
|
||||
// Register the custom element
|
||||
customElements.define("navigation-search", NavigationSearch);
|
||||
|
|
@ -132,7 +132,7 @@ const initDatasetteTable = function (manager) {
|
|||
/* Only show "Facet by this" if it's not the first column, not selected,
|
||||
not a single PK and the Datasette allow_facet setting is True */
|
||||
var displayedFacets = Array.from(
|
||||
document.querySelectorAll(".facet-info"),
|
||||
document.querySelectorAll(".facet-info")
|
||||
).map((el) => el.dataset.column);
|
||||
var isFirstColumn =
|
||||
th.parentElement.querySelector("th:first-of-type") == th;
|
||||
|
|
@ -152,7 +152,7 @@ const initDatasetteTable = function (manager) {
|
|||
}
|
||||
/* Show notBlank option if not selected AND at least one visible blank value */
|
||||
var tdsForThisColumn = Array.from(
|
||||
th.closest("table").querySelectorAll("td." + th.className),
|
||||
th.closest("table").querySelectorAll("td." + th.className)
|
||||
);
|
||||
if (
|
||||
params.get(`${column}__notblank`) != "1" &&
|
||||
|
|
@ -191,31 +191,29 @@ const initDatasetteTable = function (manager) {
|
|||
// Plugin hook: allow adding JS-based additional menu items
|
||||
const columnActionsPayload = {
|
||||
columnName: th.dataset.column,
|
||||
columnNotNull: th.dataset.columnNotNull === "1",
|
||||
columnNotNull: th.dataset.columnNotNull === '1',
|
||||
columnType: th.dataset.columnType,
|
||||
isPk: th.dataset.isPk === "1",
|
||||
isPk: th.dataset.isPk === '1'
|
||||
};
|
||||
const columnItemConfigs = manager.makeColumnActions(columnActionsPayload);
|
||||
|
||||
const menuList = menu.querySelector("ul");
|
||||
columnItemConfigs.forEach((itemConfig) => {
|
||||
const menuList = menu.querySelector('ul');
|
||||
columnItemConfigs.forEach(itemConfig => {
|
||||
// Remove items from previous render. We assume entries have unique labels.
|
||||
const existingItems = menuList.querySelectorAll(`li`);
|
||||
Array.from(existingItems)
|
||||
.filter((item) => item.innerText === itemConfig.label)
|
||||
.forEach((node) => {
|
||||
node.remove();
|
||||
});
|
||||
Array.from(existingItems).filter(item => item.innerText === itemConfig.label).forEach(node => {
|
||||
node.remove();
|
||||
});
|
||||
|
||||
const newLink = document.createElement("a");
|
||||
const newLink = document.createElement('a');
|
||||
newLink.textContent = itemConfig.label;
|
||||
newLink.href = itemConfig.href ?? "#";
|
||||
newLink.href = itemConfig.href ?? '#';
|
||||
if (itemConfig.onClick) {
|
||||
newLink.onclick = itemConfig.onClick;
|
||||
}
|
||||
|
||||
// Attach new elements to DOM
|
||||
const menuItem = document.createElement("li");
|
||||
const menuItem = document.createElement('li');
|
||||
menuItem.appendChild(newLink);
|
||||
menuList.appendChild(menuItem);
|
||||
});
|
||||
|
|
@ -227,17 +225,17 @@ const initDatasetteTable = function (manager) {
|
|||
menu.style.left = windowWidth - menuWidth - 20 + "px";
|
||||
}
|
||||
// Align menu .hook arrow with the column cog icon
|
||||
const hook = menu.querySelector(".hook");
|
||||
const icon = th.querySelector(".dropdown-menu-icon");
|
||||
const hook = menu.querySelector('.hook');
|
||||
const icon = th.querySelector('.dropdown-menu-icon');
|
||||
const iconRect = icon.getBoundingClientRect();
|
||||
const hookLeft = iconRect.left - menuLeft + 1 + "px";
|
||||
const hookLeft = (iconRect.left - menuLeft + 1) + 'px';
|
||||
hook.style.left = hookLeft;
|
||||
// Move the whole menu right if the hook is too far right
|
||||
const menuRect = menu.getBoundingClientRect();
|
||||
if (iconRect.right > menuRect.right) {
|
||||
menu.style.left = iconRect.right - menuWidth + "px";
|
||||
menu.style.left = (iconRect.right - menuWidth) + 'px';
|
||||
// And move hook tip as well
|
||||
hook.style.left = menuWidth - 13 + "px";
|
||||
hook.style.left = (menuWidth - 13) + 'px';
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -252,9 +250,7 @@ const initDatasetteTable = function (manager) {
|
|||
menu.style.display = "none";
|
||||
document.body.appendChild(menu);
|
||||
|
||||
var ths = Array.from(
|
||||
document.querySelectorAll(manager.selectors.tableHeaders),
|
||||
);
|
||||
var ths = Array.from(document.querySelectorAll(manager.selectors.tableHeaders));
|
||||
ths.forEach((th) => {
|
||||
if (!th.querySelector("a")) {
|
||||
return;
|
||||
|
|
@ -268,9 +264,9 @@ const initDatasetteTable = function (manager) {
|
|||
/* Add x buttons to the filter rows */
|
||||
function addButtonsToFilterRows(manager) {
|
||||
var x = "✖";
|
||||
var rows = Array.from(
|
||||
document.querySelectorAll(manager.selectors.filterRow),
|
||||
).filter((el) => el.querySelector(".filter-op"));
|
||||
var rows = Array.from(document.querySelectorAll(manager.selectors.filterRow)).filter((el) =>
|
||||
el.querySelector(".filter-op")
|
||||
);
|
||||
rows.forEach((row) => {
|
||||
var a = document.createElement("a");
|
||||
a.setAttribute("href", "#");
|
||||
|
|
@ -291,18 +287,18 @@ function addButtonsToFilterRows(manager) {
|
|||
a.style.display = "none";
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/* Set up datalist autocomplete for filter values */
|
||||
function initAutocompleteForFilterValues(manager) {
|
||||
function createDataLists() {
|
||||
var facetResults = document.querySelectorAll(
|
||||
manager.selectors.facetResults,
|
||||
manager.selectors.facetResults
|
||||
);
|
||||
Array.from(facetResults).forEach(function (facetResult) {
|
||||
// Use link text from all links in the facet result
|
||||
var links = Array.from(
|
||||
facetResult.querySelectorAll("li:not(.facet-truncated) a"),
|
||||
facetResult.querySelectorAll("li:not(.facet-truncated) a")
|
||||
);
|
||||
// Create a datalist element
|
||||
var datalist = document.createElement("datalist");
|
||||
|
|
@ -328,7 +324,7 @@ function initAutocompleteForFilterValues(manager) {
|
|||
.setAttribute("list", "datalist-" + event.target.value);
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Ensures Table UI is initialized only after the Manager is ready.
|
||||
document.addEventListener("datasette_init", function (evt) {
|
||||
|
|
|
|||
|
|
@ -1,50 +0,0 @@
|
|||
<script>
|
||||
// Common utility functions for debug pages
|
||||
|
||||
// Populate form from URL parameters on page load
|
||||
function populateFormFromURL() {
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
|
||||
const action = params.get('action');
|
||||
if (action) {
|
||||
const actionField = document.getElementById('action');
|
||||
if (actionField) {
|
||||
actionField.value = action;
|
||||
}
|
||||
}
|
||||
|
||||
const parent = params.get('parent');
|
||||
if (parent) {
|
||||
const parentField = document.getElementById('parent');
|
||||
if (parentField) {
|
||||
parentField.value = parent;
|
||||
}
|
||||
}
|
||||
|
||||
const child = params.get('child');
|
||||
if (child) {
|
||||
const childField = document.getElementById('child');
|
||||
if (childField) {
|
||||
childField.value = child;
|
||||
}
|
||||
}
|
||||
|
||||
const pageSize = params.get('page_size');
|
||||
if (pageSize) {
|
||||
const pageSizeField = document.getElementById('page_size');
|
||||
if (pageSizeField) {
|
||||
pageSizeField.value = pageSize;
|
||||
}
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
// HTML escape function
|
||||
function escapeHtml(text) {
|
||||
if (text === null || text === undefined) return '';
|
||||
const div = document.createElement('div');
|
||||
div.textContent = text;
|
||||
return div.innerHTML;
|
||||
}
|
||||
</script>
|
||||
|
|
@ -1,145 +0,0 @@
|
|||
<style>
|
||||
.permission-form {
|
||||
background-color: #f5f5f5;
|
||||
border: 1px solid #ddd;
|
||||
border-radius: 5px;
|
||||
padding: 1.5em;
|
||||
margin-bottom: 2em;
|
||||
}
|
||||
.form-section {
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
.form-section label {
|
||||
display: block;
|
||||
margin-bottom: 0.3em;
|
||||
font-weight: bold;
|
||||
}
|
||||
.form-section input[type="text"],
|
||||
.form-section select {
|
||||
width: 100%;
|
||||
max-width: 500px;
|
||||
padding: 0.5em;
|
||||
box-sizing: border-box;
|
||||
border: 1px solid #ccc;
|
||||
border-radius: 3px;
|
||||
}
|
||||
.form-section input[type="text"]:focus,
|
||||
.form-section select:focus {
|
||||
outline: 2px solid #0066cc;
|
||||
border-color: #0066cc;
|
||||
}
|
||||
.form-section small {
|
||||
display: block;
|
||||
margin-top: 0.3em;
|
||||
color: #666;
|
||||
}
|
||||
.form-actions {
|
||||
margin-top: 1em;
|
||||
}
|
||||
.submit-btn {
|
||||
padding: 0.6em 1.5em;
|
||||
font-size: 1em;
|
||||
background-color: #0066cc;
|
||||
color: white;
|
||||
border: none;
|
||||
border-radius: 3px;
|
||||
cursor: pointer;
|
||||
}
|
||||
.submit-btn:hover {
|
||||
background-color: #0052a3;
|
||||
}
|
||||
.submit-btn:disabled {
|
||||
background-color: #ccc;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
.results-container {
|
||||
margin-top: 2em;
|
||||
}
|
||||
.results-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
.results-count {
|
||||
font-size: 0.9em;
|
||||
color: #666;
|
||||
}
|
||||
.results-table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
background-color: white;
|
||||
box-shadow: 0 1px 3px rgba(0,0,0,0.1);
|
||||
}
|
||||
.results-table th {
|
||||
background-color: #f5f5f5;
|
||||
padding: 0.75em;
|
||||
text-align: left;
|
||||
font-weight: bold;
|
||||
border-bottom: 2px solid #ddd;
|
||||
}
|
||||
.results-table td {
|
||||
padding: 0.75em;
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
.results-table tr:hover {
|
||||
background-color: #f9f9f9;
|
||||
}
|
||||
.results-table tr.allow-row {
|
||||
background-color: #f1f8f4;
|
||||
}
|
||||
.results-table tr.allow-row:hover {
|
||||
background-color: #e8f5e9;
|
||||
}
|
||||
.results-table tr.deny-row {
|
||||
background-color: #fef5f5;
|
||||
}
|
||||
.results-table tr.deny-row:hover {
|
||||
background-color: #ffebee;
|
||||
}
|
||||
.resource-path {
|
||||
font-family: monospace;
|
||||
background-color: #f5f5f5;
|
||||
padding: 0.2em 0.4em;
|
||||
border-radius: 3px;
|
||||
}
|
||||
.pagination {
|
||||
margin-top: 1.5em;
|
||||
display: flex;
|
||||
gap: 1em;
|
||||
align-items: center;
|
||||
}
|
||||
.pagination a {
|
||||
padding: 0.5em 1em;
|
||||
background-color: #0066cc;
|
||||
color: white;
|
||||
text-decoration: none;
|
||||
border-radius: 3px;
|
||||
}
|
||||
.pagination a:hover {
|
||||
background-color: #0052a3;
|
||||
}
|
||||
.pagination span {
|
||||
color: #666;
|
||||
}
|
||||
.no-results {
|
||||
padding: 2em;
|
||||
text-align: center;
|
||||
color: #666;
|
||||
background-color: #f9f9f9;
|
||||
border: 1px solid #ddd;
|
||||
border-radius: 5px;
|
||||
}
|
||||
.error-message {
|
||||
padding: 1em;
|
||||
background-color: #ffebee;
|
||||
border: 2px solid #f44336;
|
||||
border-radius: 5px;
|
||||
color: #c62828;
|
||||
}
|
||||
.loading {
|
||||
padding: 2em;
|
||||
text-align: center;
|
||||
color: #666;
|
||||
}
|
||||
</style>
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
{% if has_debug_permission %}
|
||||
{% set query_string = '?' + request.query_string if request.query_string else '' %}
|
||||
|
||||
<style>
|
||||
.permissions-debug-tabs {
|
||||
border-bottom: 2px solid #e0e0e0;
|
||||
margin-bottom: 2em;
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 0.5em;
|
||||
}
|
||||
.permissions-debug-tabs a {
|
||||
padding: 0.75em 1.25em;
|
||||
text-decoration: none;
|
||||
color: #333;
|
||||
border-bottom: 3px solid transparent;
|
||||
margin-bottom: -2px;
|
||||
transition: all 0.2s;
|
||||
font-weight: 500;
|
||||
}
|
||||
.permissions-debug-tabs a:hover {
|
||||
background-color: #f5f5f5;
|
||||
border-bottom-color: #999;
|
||||
}
|
||||
.permissions-debug-tabs a.active {
|
||||
color: #0066cc;
|
||||
border-bottom-color: #0066cc;
|
||||
background-color: #f0f7ff;
|
||||
}
|
||||
@media only screen and (max-width: 576px) {
|
||||
.permissions-debug-tabs {
|
||||
flex-direction: column;
|
||||
gap: 0;
|
||||
}
|
||||
.permissions-debug-tabs a {
|
||||
border-bottom: 1px solid #e0e0e0;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
.permissions-debug-tabs a.active {
|
||||
border-left: 3px solid #0066cc;
|
||||
border-bottom: 1px solid #e0e0e0;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
<nav class="permissions-debug-tabs">
|
||||
<a href="{{ urls.path('-/permissions') }}" {% if current_tab == "permissions" %}class="active"{% endif %}>Playground</a>
|
||||
<a href="{{ urls.path('-/check') }}{{ query_string }}" {% if current_tab == "check" %}class="active"{% endif %}>Check</a>
|
||||
<a href="{{ urls.path('-/allowed') }}{{ query_string }}" {% if current_tab == "allowed" %}class="active"{% endif %}>Allowed</a>
|
||||
<a href="{{ urls.path('-/rules') }}{{ query_string }}" {% if current_tab == "rules" %}class="active"{% endif %}>Rules</a>
|
||||
<a href="{{ urls.path('-/actions') }}" {% if current_tab == "actions" %}class="active"{% endif %}>Actions</a>
|
||||
<a href="{{ urls.path('-/allow-debug') }}" {% if current_tab == "allow_debug" %}class="active"{% endif %}>Allow debug</a>
|
||||
</nav>
|
||||
{% endif %}
|
||||
|
|
@ -33,12 +33,9 @@ p.message-warning {
|
|||
|
||||
<h1>Debug allow rules</h1>
|
||||
|
||||
{% set current_tab = "allow_debug" %}
|
||||
{% include "_permissions_debug_tabs.html" %}
|
||||
|
||||
<p>Use this tool to try out different actor and allow combinations. See <a href="https://docs.datasette.io/en/stable/authentication.html#defining-permissions-with-allow-blocks">Defining permissions with "allow" blocks</a> for documentation.</p>
|
||||
|
||||
<form class="core" action="{{ urls.path('-/allow-debug') }}" method="get" style="margin-bottom: 1em">
|
||||
<form action="{{ urls.path('-/allow-debug') }}" method="get" style="margin-bottom: 1em">
|
||||
<div class="two-col">
|
||||
<p><label>Allow block</label></p>
|
||||
<textarea name="allow">{{ allow_input }}</textarea>
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@
|
|||
</p>
|
||||
<details open style="border: 2px solid #ccc; border-bottom: none; padding: 0.5em">
|
||||
<summary style="cursor: pointer;">GET</summary>
|
||||
<form class="core" method="get" id="api-explorer-get" style="margin-top: 0.7em">
|
||||
<form method="get" id="api-explorer-get" style="margin-top: 0.7em">
|
||||
<div>
|
||||
<label for="path">API path:</label>
|
||||
<input type="text" id="path" name="path" style="width: 60%">
|
||||
|
|
@ -29,7 +29,7 @@
|
|||
</details>
|
||||
<details style="border: 2px solid #ccc; padding: 0.5em">
|
||||
<summary style="cursor: pointer">POST</summary>
|
||||
<form class="core" method="post" id="api-explorer-post" style="margin-top: 0.7em">
|
||||
<form method="post" id="api-explorer-post" style="margin-top: 0.7em">
|
||||
<div>
|
||||
<label for="path">API path:</label>
|
||||
<input type="text" id="path" name="path" style="width: 60%">
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@
|
|||
</head>
|
||||
<body class="{% block body_class %}{% endblock %}">
|
||||
<div class="not-footer">
|
||||
<header class="hd"><nav>{% block nav %}{% block crumbs %}{{ crumbs.nav(request=request) }}{% endblock %}
|
||||
<header><nav>{% block nav %}{% block crumbs %}{{ crumbs.nav(request=request) }}{% endblock %}
|
||||
{% set links = menu_links() %}{% if links or show_logout %}
|
||||
<details class="nav-menu details-menu">
|
||||
<summary><svg aria-labelledby="nav-menu-svg-title" role="img"
|
||||
|
|
@ -72,7 +72,5 @@
|
|||
{% endfor %}
|
||||
|
||||
{% if select_templates %}<!-- Templates considered: {{ select_templates|join(", ") }} -->{% endif %}
|
||||
<script src="{{ urls.static('navigation-search.js') }}" defer></script>
|
||||
<navigation-search url="/-/tables"></navigation-search>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@
|
|||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
<form class="core" action="{{ urls.path('-/create-token') }}" method="post">
|
||||
<form action="{{ urls.path('-/create-token') }}" method="post">
|
||||
<div>
|
||||
<div class="select-wrapper" style="width: unset">
|
||||
<select name="expire_type">
|
||||
|
|
@ -57,7 +57,7 @@
|
|||
<summary style="cursor: pointer;">Restrict actions that can be performed using this token</summary>
|
||||
<h2>All databases and tables</h2>
|
||||
<ul>
|
||||
{% for permission in all_actions %}
|
||||
{% for permission in all_permissions %}
|
||||
<li><label><input type="checkbox" name="all:{{ permission }}"> {{ permission }}</label></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
|
@ -65,7 +65,7 @@
|
|||
{% for database in database_with_tables %}
|
||||
<h2>All tables in "{{ database.name }}"</h2>
|
||||
<ul>
|
||||
{% for permission in database_actions %}
|
||||
{% for permission in database_permissions %}
|
||||
<li><label><input type="checkbox" name="database:{{ database.encoded }}:{{ permission }}"> {{ permission }}</label></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
|
@ -75,7 +75,7 @@
|
|||
{% for table in database.tables %}
|
||||
<h3>{{ database.name }}: {{ table.name }}</h3>
|
||||
<ul>
|
||||
{% for permission in child_actions %}
|
||||
{% for permission in resource_permissions %}
|
||||
<li><label><input type="checkbox" name="resource:{{ database.encoded }}:{{ table.encoded }}:{{ permission }}"> {{ permission }}</label></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
{% block title %}CSRF check failed){% endblock %}
|
||||
{% block content %}
|
||||
<h1>Form origin check failed</h1>
|
||||
|
||||
<p>Your request's origin could not be validated. Please return to the form and submit it again.</p>
|
||||
|
||||
<details><summary>Technical details</summary>
|
||||
<p>Developers: consult Datasette's <a href="https://docs.datasette.io/en/latest/internals.html#csrf-protection">CSRF protection documentation</a>.</p>
|
||||
<p>Error code is {{ message_name }}.</p>
|
||||
</details>
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -9,10 +9,6 @@
|
|||
|
||||
{% block body_class %}db db-{{ database|to_css_class }}{% endblock %}
|
||||
|
||||
{% block crumbs %}
|
||||
{{ crumbs.nav(request=request, database=database) }}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="page-header" style="border-color: #{{ database_color }}">
|
||||
<h1>{{ metadata.title or database }}{% if private %} 🔒{% endif %}</h1>
|
||||
|
|
@ -25,7 +21,7 @@
|
|||
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
|
||||
|
||||
{% if allow_execute_sql %}
|
||||
<form class="sql core" action="{{ urls.database(database) }}/-/query" method="get">
|
||||
<form class="sql" action="{{ urls.database(database) }}/-/query" method="get">
|
||||
<h3>Custom SQL query</h3>
|
||||
<p><textarea id="sql-editor" name="sql">{% if tables %}select * from {{ tables[0].name|escape_sqlite }}{% else %}select sqlite_version(){% endif %}</textarea></p>
|
||||
<p>
|
||||
|
|
@ -56,7 +52,7 @@
|
|||
{% endif %}
|
||||
|
||||
{% if tables %}
|
||||
<h2 id="tables">Tables <a style="font-weight: normal; font-size: 0.75em; padding-left: 0.5em;" href="{{ urls.database(database) }}/-/schema">schema</a></h2>
|
||||
<h2 id="tables">Tables</h2>
|
||||
{% endif %}
|
||||
|
||||
{% for table in tables %}
|
||||
|
|
@ -64,7 +60,7 @@
|
|||
<div class="db-table">
|
||||
<h3><a href="{{ urls.table(database, table.name) }}">{{ table.name }}</a>{% if table.private %} 🔒{% endif %}{% if table.hidden %}<em> (hidden)</em>{% endif %}</h3>
|
||||
<p><em>{% for column in table.columns %}{{ column }}{% if not loop.last %}, {% endif %}{% endfor %}</em></p>
|
||||
<p>{% if table.count is none %}Many rows{% elif table.count == count_limit + 1 %}>{{ "{:,}".format(count_limit) }} rows{% else %}{{ "{:,}".format(table.count) }} row{% if table.count == 1 %}{% else %}s{% endif %}{% endif %}</p>
|
||||
<p>{% if table.count is none %}Many rows{% else %}{{ "{:,}".format(table.count) }} row{% if table.count == 1 %}{% else %}s{% endif %}{% endif %}</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
|
|
|||
|
|
@ -1,43 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Registered Actions{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<h1>Registered actions</h1>
|
||||
|
||||
{% set current_tab = "actions" %}
|
||||
{% include "_permissions_debug_tabs.html" %}
|
||||
|
||||
<p style="margin-bottom: 2em;">
|
||||
This Datasette instance has registered {{ data|length }} action{{ data|length != 1 and "s" or "" }}.
|
||||
Actions are used by the permission system to control access to different features.
|
||||
</p>
|
||||
|
||||
<table class="rows-and-columns">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Abbr</th>
|
||||
<th>Description</th>
|
||||
<th>Resource</th>
|
||||
<th>Takes Parent</th>
|
||||
<th>Takes Child</th>
|
||||
<th>Also Requires</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for action in data %}
|
||||
<tr>
|
||||
<td><strong>{{ action.name }}</strong></td>
|
||||
<td>{% if action.abbr %}<code>{{ action.abbr }}</code>{% endif %}</td>
|
||||
<td>{{ action.description or "" }}</td>
|
||||
<td>{% if action.resource_class %}<code>{{ action.resource_class }}</code>{% endif %}</td>
|
||||
<td>{% if action.takes_parent %}✓{% endif %}</td>
|
||||
<td>{% if action.takes_child %}✓{% endif %}</td>
|
||||
<td>{% if action.also_requires %}<code>{{ action.also_requires }}</code>{% endif %}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -1,229 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Allowed Resources{% endblock %}
|
||||
|
||||
{% block extra_head %}
|
||||
<script src="{{ base_url }}-/static/json-format-highlight-1.0.1.js"></script>
|
||||
{% include "_permission_ui_styles.html" %}
|
||||
{% include "_debug_common_functions.html" %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<h1>Allowed resources</h1>
|
||||
|
||||
{% set current_tab = "allowed" %}
|
||||
{% include "_permissions_debug_tabs.html" %}
|
||||
|
||||
<p>Use this tool to check which resources the current actor is allowed to access for a given permission action. It queries the <code>/-/allowed.json</code> API endpoint.</p>
|
||||
|
||||
{% if request.actor %}
|
||||
<p>Current actor: <strong>{{ request.actor.get("id", "anonymous") }}</strong></p>
|
||||
{% else %}
|
||||
<p>Current actor: <strong>anonymous (not logged in)</strong></p>
|
||||
{% endif %}
|
||||
|
||||
<div class="permission-form">
|
||||
<form id="allowed-form" method="get" action="{{ urls.path("-/allowed") }}">
|
||||
<div class="form-section">
|
||||
<label for="action">Action (permission name):</label>
|
||||
<select id="action" name="action" required>
|
||||
<option value="">Select an action...</option>
|
||||
{% for action_name in supported_actions %}
|
||||
<option value="{{ action_name }}">{{ action_name }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<small>Only certain actions are supported by this endpoint</small>
|
||||
</div>
|
||||
|
||||
<div class="form-section">
|
||||
<label for="parent">Filter by parent (optional):</label>
|
||||
<input type="text" id="parent" name="parent" placeholder="e.g., database name">
|
||||
<small>Filter results to a specific parent resource</small>
|
||||
</div>
|
||||
|
||||
<div class="form-section">
|
||||
<label for="child">Filter by child (optional):</label>
|
||||
<input type="text" id="child" name="child" placeholder="e.g., table name">
|
||||
<small>Filter results to a specific child resource (requires parent to be set)</small>
|
||||
</div>
|
||||
|
||||
<div class="form-section">
|
||||
<label for="page_size">Page size:</label>
|
||||
<input type="number" id="page_size" name="page_size" value="50" min="1" max="200" style="max-width: 100px;">
|
||||
<small>Number of results per page (max 200)</small>
|
||||
</div>
|
||||
|
||||
<div class="form-actions">
|
||||
<button type="submit" class="submit-btn" id="submit-btn">Check Allowed Resources</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="results-container" style="display: none;">
|
||||
<div class="results-header">
|
||||
<h2>Results</h2>
|
||||
<div class="results-count" id="results-count"></div>
|
||||
</div>
|
||||
|
||||
<div id="results-content"></div>
|
||||
|
||||
<div id="pagination" class="pagination"></div>
|
||||
|
||||
<details style="margin-top: 2em;">
|
||||
<summary style="cursor: pointer; font-weight: bold;">Raw JSON response</summary>
|
||||
<pre id="raw-json" style="margin-top: 1em; padding: 1em; background-color: #f5f5f5; border: 1px solid #ddd; border-radius: 3px; overflow-x: auto;"></pre>
|
||||
</details>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const form = document.getElementById('allowed-form');
|
||||
const resultsContainer = document.getElementById('results-container');
|
||||
const resultsContent = document.getElementById('results-content');
|
||||
const resultsCount = document.getElementById('results-count');
|
||||
const pagination = document.getElementById('pagination');
|
||||
const submitBtn = document.getElementById('submit-btn');
|
||||
const hasDebugPermission = {{ 'true' if has_debug_permission else 'false' }};
|
||||
|
||||
// Populate form on initial load
|
||||
(function() {
|
||||
const params = populateFormFromURL();
|
||||
const action = params.get('action');
|
||||
const page = params.get('page');
|
||||
if (action) {
|
||||
fetchResults(page ? parseInt(page) : 1);
|
||||
}
|
||||
})();
|
||||
|
||||
async function fetchResults(page = 1) {
|
||||
submitBtn.disabled = true;
|
||||
submitBtn.textContent = 'Loading...';
|
||||
|
||||
const formData = new FormData(form);
|
||||
const params = new URLSearchParams();
|
||||
|
||||
for (const [key, value] of formData.entries()) {
|
||||
if (value && key !== 'page_size') {
|
||||
params.append(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
const pageSize = document.getElementById('page_size').value || '50';
|
||||
params.append('page', page.toString());
|
||||
params.append('page_size', pageSize);
|
||||
|
||||
try {
|
||||
const response = await fetch('{{ urls.path("-/allowed.json") }}?' + params.toString(), {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
displayResults(data);
|
||||
} else {
|
||||
displayError(data);
|
||||
}
|
||||
} catch (error) {
|
||||
displayError({ error: error.message });
|
||||
} finally {
|
||||
submitBtn.disabled = false;
|
||||
submitBtn.textContent = 'Check Allowed Resources';
|
||||
}
|
||||
}
|
||||
|
||||
function displayResults(data) {
|
||||
resultsContainer.style.display = 'block';
|
||||
|
||||
// Update count
|
||||
resultsCount.textContent = `Showing ${data.items.length} of ${data.total} total resources (page ${data.page})`;
|
||||
|
||||
// Display results table
|
||||
if (data.items.length === 0) {
|
||||
resultsContent.innerHTML = '<div class="no-results">No allowed resources found for this action.</div>';
|
||||
} else {
|
||||
let html = '<table class="results-table">';
|
||||
html += '<thead><tr>';
|
||||
html += '<th>Resource Path</th>';
|
||||
html += '<th>Parent</th>';
|
||||
html += '<th>Child</th>';
|
||||
if (hasDebugPermission) {
|
||||
html += '<th>Reason</th>';
|
||||
}
|
||||
html += '</tr></thead>';
|
||||
html += '<tbody>';
|
||||
|
||||
for (const item of data.items) {
|
||||
html += '<tr>';
|
||||
html += `<td><span class="resource-path">${escapeHtml(item.resource || '/')}</span></td>`;
|
||||
html += `<td>${escapeHtml(item.parent || '—')}</td>`;
|
||||
html += `<td>${escapeHtml(item.child || '—')}</td>`;
|
||||
if (hasDebugPermission) {
|
||||
// Display reason as JSON array
|
||||
let reasonHtml = '—';
|
||||
if (item.reason && Array.isArray(item.reason)) {
|
||||
reasonHtml = `<code>${escapeHtml(JSON.stringify(item.reason))}</code>`;
|
||||
}
|
||||
html += `<td>${reasonHtml}</td>`;
|
||||
}
|
||||
html += '</tr>';
|
||||
}
|
||||
|
||||
html += '</tbody></table>';
|
||||
resultsContent.innerHTML = html;
|
||||
}
|
||||
|
||||
// Update pagination
|
||||
pagination.innerHTML = '';
|
||||
if (data.previous_url || data.next_url) {
|
||||
if (data.previous_url) {
|
||||
const prevLink = document.createElement('a');
|
||||
prevLink.href = data.previous_url;
|
||||
prevLink.textContent = '← Previous';
|
||||
pagination.appendChild(prevLink);
|
||||
}
|
||||
|
||||
const pageInfo = document.createElement('span');
|
||||
pageInfo.textContent = `Page ${data.page}`;
|
||||
pagination.appendChild(pageInfo);
|
||||
|
||||
if (data.next_url) {
|
||||
const nextLink = document.createElement('a');
|
||||
nextLink.href = data.next_url;
|
||||
nextLink.textContent = 'Next →';
|
||||
pagination.appendChild(nextLink);
|
||||
}
|
||||
}
|
||||
|
||||
// Update raw JSON
|
||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
||||
}
|
||||
|
||||
function displayError(data) {
|
||||
resultsContainer.style.display = 'block';
|
||||
resultsCount.textContent = '';
|
||||
pagination.innerHTML = '';
|
||||
|
||||
resultsContent.innerHTML = `<div class="error-message">Error: ${escapeHtml(data.error || 'Unknown error')}</div>`;
|
||||
|
||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
||||
}
|
||||
|
||||
// Disable child input if parent is empty
|
||||
const parentInput = document.getElementById('parent');
|
||||
const childInput = document.getElementById('child');
|
||||
|
||||
parentInput.addEventListener('input', () => {
|
||||
childInput.disabled = !parentInput.value;
|
||||
if (!parentInput.value) {
|
||||
childInput.value = '';
|
||||
}
|
||||
});
|
||||
|
||||
// Initialize disabled state
|
||||
childInput.disabled = !parentInput.value;
|
||||
</script>
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -1,270 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Permission Check{% endblock %}
|
||||
|
||||
{% block extra_head %}
|
||||
<script src="{{ base_url }}-/static/json-format-highlight-1.0.1.js"></script>
|
||||
{% include "_permission_ui_styles.html" %}
|
||||
{% include "_debug_common_functions.html" %}
|
||||
<style>
|
||||
#output {
|
||||
margin-top: 2em;
|
||||
padding: 1em;
|
||||
border-radius: 5px;
|
||||
}
|
||||
#output.allowed {
|
||||
background-color: #e8f5e9;
|
||||
border: 2px solid #4caf50;
|
||||
}
|
||||
#output.denied {
|
||||
background-color: #ffebee;
|
||||
border: 2px solid #f44336;
|
||||
}
|
||||
#output h2 {
|
||||
margin-top: 0;
|
||||
}
|
||||
#output .result-badge {
|
||||
display: inline-block;
|
||||
padding: 0.3em 0.8em;
|
||||
border-radius: 3px;
|
||||
font-weight: bold;
|
||||
font-size: 1.1em;
|
||||
}
|
||||
#output .allowed-badge {
|
||||
background-color: #4caf50;
|
||||
color: white;
|
||||
}
|
||||
#output .denied-badge {
|
||||
background-color: #f44336;
|
||||
color: white;
|
||||
}
|
||||
.details-section {
|
||||
margin-top: 1em;
|
||||
}
|
||||
.details-section dt {
|
||||
font-weight: bold;
|
||||
margin-top: 0.5em;
|
||||
}
|
||||
.details-section dd {
|
||||
margin-left: 1em;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<h1>Permission check</h1>
|
||||
|
||||
{% set current_tab = "check" %}
|
||||
{% include "_permissions_debug_tabs.html" %}
|
||||
|
||||
<p>Use this tool to test permission checks for the current actor. It queries the <code>/-/check.json</code> API endpoint.</p>
|
||||
|
||||
{% if request.actor %}
|
||||
<p>Current actor: <strong>{{ request.actor.get("id", "anonymous") }}</strong></p>
|
||||
{% else %}
|
||||
<p>Current actor: <strong>anonymous (not logged in)</strong></p>
|
||||
{% endif %}
|
||||
|
||||
<div class="permission-form">
|
||||
<form id="check-form" method="get" action="{{ urls.path("-/check") }}">
|
||||
<div class="form-section">
|
||||
<label for="action">Action (permission name):</label>
|
||||
<select id="action" name="action" required>
|
||||
<option value="">Select an action...</option>
|
||||
{% for action_name in sorted_actions %}
|
||||
<option value="{{ action_name }}">{{ action_name }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<small>The permission action to check</small>
|
||||
</div>
|
||||
|
||||
<div class="form-section">
|
||||
<label for="parent">Parent resource (optional):</label>
|
||||
<input type="text" id="parent" name="parent" placeholder="e.g., database name">
|
||||
<small>For database-level permissions, specify the database name</small>
|
||||
</div>
|
||||
|
||||
<div class="form-section">
|
||||
<label for="child">Child resource (optional):</label>
|
||||
<input type="text" id="child" name="child" placeholder="e.g., table name">
|
||||
<small>For table-level permissions, specify the table name (requires parent)</small>
|
||||
</div>
|
||||
|
||||
<div class="form-actions">
|
||||
<button type="submit" class="submit-btn" id="submit-btn">Check Permission</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="output" style="display: none;">
|
||||
<h2>Result: <span class="result-badge" id="result-badge"></span></h2>
|
||||
|
||||
<dl class="details-section">
|
||||
<dt>Action:</dt>
|
||||
<dd id="result-action"></dd>
|
||||
|
||||
<dt>Resource Path:</dt>
|
||||
<dd id="result-resource"></dd>
|
||||
|
||||
<dt>Actor ID:</dt>
|
||||
<dd id="result-actor"></dd>
|
||||
|
||||
<div id="additional-details"></div>
|
||||
</dl>
|
||||
|
||||
<details style="margin-top: 1em;">
|
||||
<summary style="cursor: pointer; font-weight: bold;">Raw JSON response</summary>
|
||||
<pre id="raw-json" style="margin-top: 1em; padding: 1em; background-color: #f5f5f5; border: 1px solid #ddd; border-radius: 3px; overflow-x: auto;"></pre>
|
||||
</details>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const form = document.getElementById('check-form');
|
||||
const output = document.getElementById('output');
|
||||
const submitBtn = document.getElementById('submit-btn');
|
||||
|
||||
async function performCheck() {
|
||||
submitBtn.disabled = true;
|
||||
submitBtn.textContent = 'Checking...';
|
||||
|
||||
const formData = new FormData(form);
|
||||
const params = new URLSearchParams();
|
||||
|
||||
for (const [key, value] of formData.entries()) {
|
||||
if (value) {
|
||||
params.append(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch('{{ urls.path("-/check.json") }}?' + params.toString(), {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
displayResult(data);
|
||||
} else {
|
||||
displayError(data);
|
||||
}
|
||||
} catch (error) {
|
||||
alert('Error: ' + error.message);
|
||||
} finally {
|
||||
submitBtn.disabled = false;
|
||||
submitBtn.textContent = 'Check Permission';
|
||||
}
|
||||
}
|
||||
|
||||
// Populate form on initial load
|
||||
(function() {
|
||||
const params = populateFormFromURL();
|
||||
const action = params.get('action');
|
||||
if (action) {
|
||||
performCheck();
|
||||
}
|
||||
})();
|
||||
|
||||
function displayResult(data) {
|
||||
output.style.display = 'block';
|
||||
|
||||
// Set badge and styling
|
||||
const resultBadge = document.getElementById('result-badge');
|
||||
if (data.allowed) {
|
||||
output.className = 'allowed';
|
||||
resultBadge.className = 'result-badge allowed-badge';
|
||||
resultBadge.textContent = 'ALLOWED ✓';
|
||||
} else {
|
||||
output.className = 'denied';
|
||||
resultBadge.className = 'result-badge denied-badge';
|
||||
resultBadge.textContent = 'DENIED ✗';
|
||||
}
|
||||
|
||||
// Basic details
|
||||
document.getElementById('result-action').textContent = data.action || 'N/A';
|
||||
document.getElementById('result-resource').textContent = data.resource?.path || '/';
|
||||
document.getElementById('result-actor').textContent = data.actor_id || 'anonymous';
|
||||
|
||||
// Additional details
|
||||
const additionalDetails = document.getElementById('additional-details');
|
||||
additionalDetails.innerHTML = '';
|
||||
|
||||
if (data.reason !== undefined) {
|
||||
const dt = document.createElement('dt');
|
||||
dt.textContent = 'Reason:';
|
||||
const dd = document.createElement('dd');
|
||||
dd.textContent = data.reason || 'N/A';
|
||||
additionalDetails.appendChild(dt);
|
||||
additionalDetails.appendChild(dd);
|
||||
}
|
||||
|
||||
if (data.source_plugin !== undefined) {
|
||||
const dt = document.createElement('dt');
|
||||
dt.textContent = 'Source Plugin:';
|
||||
const dd = document.createElement('dd');
|
||||
dd.textContent = data.source_plugin || 'N/A';
|
||||
additionalDetails.appendChild(dt);
|
||||
additionalDetails.appendChild(dd);
|
||||
}
|
||||
|
||||
if (data.used_default !== undefined) {
|
||||
const dt = document.createElement('dt');
|
||||
dt.textContent = 'Used Default:';
|
||||
const dd = document.createElement('dd');
|
||||
dd.textContent = data.used_default ? 'Yes' : 'No';
|
||||
additionalDetails.appendChild(dt);
|
||||
additionalDetails.appendChild(dd);
|
||||
}
|
||||
|
||||
if (data.depth !== undefined) {
|
||||
const dt = document.createElement('dt');
|
||||
dt.textContent = 'Depth:';
|
||||
const dd = document.createElement('dd');
|
||||
dd.textContent = data.depth;
|
||||
additionalDetails.appendChild(dt);
|
||||
additionalDetails.appendChild(dd);
|
||||
}
|
||||
|
||||
// Raw JSON
|
||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
||||
|
||||
// Scroll to output
|
||||
output.scrollIntoView({ behavior: 'smooth', block: 'nearest' });
|
||||
}
|
||||
|
||||
function displayError(data) {
|
||||
output.style.display = 'block';
|
||||
output.className = 'denied';
|
||||
|
||||
const resultBadge = document.getElementById('result-badge');
|
||||
resultBadge.className = 'result-badge denied-badge';
|
||||
resultBadge.textContent = 'ERROR';
|
||||
|
||||
document.getElementById('result-action').textContent = 'N/A';
|
||||
document.getElementById('result-resource').textContent = 'N/A';
|
||||
document.getElementById('result-actor').textContent = 'N/A';
|
||||
|
||||
const additionalDetails = document.getElementById('additional-details');
|
||||
additionalDetails.innerHTML = '<dt>Error:</dt><dd>' + (data.error || 'Unknown error') + '</dd>';
|
||||
|
||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
||||
|
||||
output.scrollIntoView({ behavior: 'smooth', block: 'nearest' });
|
||||
}
|
||||
|
||||
// Disable child input if parent is empty
|
||||
const parentInput = document.getElementById('parent');
|
||||
const childInput = document.getElementById('child');
|
||||
|
||||
childInput.addEventListener('focus', () => {
|
||||
if (!parentInput.value) {
|
||||
alert('Please specify a parent resource first before adding a child resource.');
|
||||
parentInput.focus();
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -1,166 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Debug permissions{% endblock %}
|
||||
|
||||
{% block extra_head %}
|
||||
{% include "_permission_ui_styles.html" %}
|
||||
<style type="text/css">
|
||||
.check-result-true {
|
||||
color: green;
|
||||
}
|
||||
.check-result-false {
|
||||
color: red;
|
||||
}
|
||||
.check-result-no-opinion {
|
||||
color: #aaa;
|
||||
}
|
||||
.check h2 {
|
||||
font-size: 1em
|
||||
}
|
||||
.check-action, .check-when, .check-result {
|
||||
font-size: 1.3em;
|
||||
}
|
||||
textarea {
|
||||
height: 10em;
|
||||
width: 95%;
|
||||
box-sizing: border-box;
|
||||
padding: 0.5em;
|
||||
border: 2px dotted black;
|
||||
}
|
||||
.two-col {
|
||||
display: inline-block;
|
||||
width: 48%;
|
||||
}
|
||||
.two-col label {
|
||||
width: 48%;
|
||||
}
|
||||
@media only screen and (max-width: 576px) {
|
||||
.two-col {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<h1>Permission playground</h1>
|
||||
|
||||
{% set current_tab = "permissions" %}
|
||||
{% include "_permissions_debug_tabs.html" %}
|
||||
|
||||
<p>This tool lets you simulate an actor and a permission check for that actor.</p>
|
||||
|
||||
<div class="permission-form">
|
||||
<form action="{{ urls.path('-/permissions') }}" id="debug-post" method="post">
|
||||
<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">
|
||||
<div class="two-col">
|
||||
<div class="form-section">
|
||||
<label>Actor</label>
|
||||
<textarea name="actor">{% if actor_input %}{{ actor_input }}{% else %}{"id": "root"}{% endif %}</textarea>
|
||||
</div>
|
||||
</div>
|
||||
<div class="two-col" style="vertical-align: top">
|
||||
<div class="form-section">
|
||||
<label for="permission">Action</label>
|
||||
<select name="permission" id="permission">
|
||||
{% for permission in permissions %}
|
||||
<option value="{{ permission.name }}">{{ permission.name }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-section">
|
||||
<label for="resource_1">Parent</label>
|
||||
<input type="text" id="resource_1" name="resource_1" placeholder="e.g., database name">
|
||||
</div>
|
||||
<div class="form-section">
|
||||
<label for="resource_2">Child</label>
|
||||
<input type="text" id="resource_2" name="resource_2" placeholder="e.g., table name">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-actions">
|
||||
<button type="submit" class="submit-btn">Simulate permission check</button>
|
||||
</div>
|
||||
<pre style="margin-top: 1em" id="debugResult"></pre>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
var rawPerms = {{ permissions|tojson }};
|
||||
var permissions = Object.fromEntries(rawPerms.map(p => [p.name, p]));
|
||||
var permissionSelect = document.getElementById('permission');
|
||||
var resource1 = document.getElementById('resource_1');
|
||||
var resource2 = document.getElementById('resource_2');
|
||||
var resource1Section = resource1.closest('.form-section');
|
||||
var resource2Section = resource2.closest('.form-section');
|
||||
function updateResourceVisibility() {
|
||||
var permission = permissionSelect.value;
|
||||
var {takes_parent, takes_child} = permissions[permission];
|
||||
resource1Section.style.display = takes_parent ? 'block' : 'none';
|
||||
resource2Section.style.display = takes_child ? 'block' : 'none';
|
||||
}
|
||||
permissionSelect.addEventListener('change', updateResourceVisibility);
|
||||
updateResourceVisibility();
|
||||
|
||||
// When #debug-post form is submitted, use fetch() to POST data
|
||||
var debugPost = document.getElementById('debug-post');
|
||||
var debugResult = document.getElementById('debugResult');
|
||||
debugPost.addEventListener('submit', function(ev) {
|
||||
ev.preventDefault();
|
||||
var formData = new FormData(debugPost);
|
||||
fetch(debugPost.action, {
|
||||
method: 'POST',
|
||||
body: new URLSearchParams(formData),
|
||||
headers: {
|
||||
'Accept': 'application/json'
|
||||
}
|
||||
}).then(function(response) {
|
||||
if (!response.ok) {
|
||||
throw new Error('Request failed with status ' + response.status);
|
||||
}
|
||||
return response.json();
|
||||
}).then(function(data) {
|
||||
debugResult.innerText = JSON.stringify(data, null, 4);
|
||||
}).catch(function(error) {
|
||||
debugResult.innerText = JSON.stringify({ error: error.message }, null, 4);
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<h1>Recent permissions checks</h1>
|
||||
|
||||
<p>
|
||||
{% if filter != "all" %}<a href="?filter=all">All</a>{% else %}<strong>All</strong>{% endif %},
|
||||
{% if filter != "exclude-yours" %}<a href="?filter=exclude-yours">Exclude yours</a>{% else %}<strong>Exclude yours</strong>{% endif %},
|
||||
{% if filter != "only-yours" %}<a href="?filter=only-yours">Only yours</a>{% else %}<strong>Only yours</strong>{% endif %}
|
||||
</p>
|
||||
|
||||
{% if permission_checks %}
|
||||
<table class="rows-and-columns permission-checks-table" id="permission-checks-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>When</th>
|
||||
<th>Action</th>
|
||||
<th>Parent</th>
|
||||
<th>Child</th>
|
||||
<th>Actor</th>
|
||||
<th>Result</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for check in permission_checks %}
|
||||
<tr>
|
||||
<td><span style="font-size: 0.8em">{{ check.when.split('T', 1)[0] }}</span><br>{{ check.when.split('T', 1)[1].split('+', 1)[0].split('-', 1)[0].split('Z', 1)[0] }}</td>
|
||||
<td><code>{{ check.action }}</code></td>
|
||||
<td>{{ check.parent or '—' }}</td>
|
||||
<td>{{ check.child or '—' }}</td>
|
||||
<td>{% if check.actor %}<code>{{ check.actor|tojson }}</code>{% else %}<span class="check-actor-anon">anonymous</span>{% endif %}</td>
|
||||
<td>{% if check.result %}<span class="check-result check-result-true">Allowed</span>{% elif check.result is none %}<span class="check-result check-result-no-opinion">No opinion</span>{% else %}<span class="check-result check-result-false">Denied</span>{% endif %}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
{% else %}
|
||||
<p class="no-results">No permission checks have been recorded yet.</p>
|
||||
{% endif %}
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -1,203 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Permission Rules{% endblock %}
|
||||
|
||||
{% block extra_head %}
|
||||
<script src="{{ base_url }}-/static/json-format-highlight-1.0.1.js"></script>
|
||||
{% include "_permission_ui_styles.html" %}
|
||||
{% include "_debug_common_functions.html" %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<h1>Permission rules</h1>
|
||||
|
||||
{% set current_tab = "rules" %}
|
||||
{% include "_permissions_debug_tabs.html" %}
|
||||
|
||||
<p>Use this tool to view the permission rules that allow the current actor to access resources for a given permission action. It queries the <code>/-/rules.json</code> API endpoint.</p>
|
||||
|
||||
{% if request.actor %}
|
||||
<p>Current actor: <strong>{{ request.actor.get("id", "anonymous") }}</strong></p>
|
||||
{% else %}
|
||||
<p>Current actor: <strong>anonymous (not logged in)</strong></p>
|
||||
{% endif %}
|
||||
|
||||
<div class="permission-form">
|
||||
<form id="rules-form" method="get" action="{{ urls.path("-/rules") }}">
|
||||
<div class="form-section">
|
||||
<label for="action">Action (permission name):</label>
|
||||
<select id="action" name="action" required>
|
||||
<option value="">Select an action...</option>
|
||||
{% for action_name in sorted_actions %}
|
||||
<option value="{{ action_name }}">{{ action_name }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<small>The permission action to check</small>
|
||||
</div>
|
||||
|
||||
<div class="form-section">
|
||||
<label for="page_size">Page size:</label>
|
||||
<input type="number" id="page_size" name="page_size" value="50" min="1" max="200" style="max-width: 100px;">
|
||||
<small>Number of results per page (max 200)</small>
|
||||
</div>
|
||||
|
||||
<div class="form-actions">
|
||||
<button type="submit" class="submit-btn" id="submit-btn">View Permission Rules</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="results-container" style="display: none;">
|
||||
<div class="results-header">
|
||||
<h2>Results</h2>
|
||||
<div class="results-count" id="results-count"></div>
|
||||
</div>
|
||||
|
||||
<div id="results-content"></div>
|
||||
|
||||
<div id="pagination" class="pagination"></div>
|
||||
|
||||
<details style="margin-top: 2em;">
|
||||
<summary style="cursor: pointer; font-weight: bold;">Raw JSON response</summary>
|
||||
<pre id="raw-json" style="margin-top: 1em; padding: 1em; background-color: #f5f5f5; border: 1px solid #ddd; border-radius: 3px; overflow-x: auto;"></pre>
|
||||
</details>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const form = document.getElementById('rules-form');
|
||||
const resultsContainer = document.getElementById('results-container');
|
||||
const resultsContent = document.getElementById('results-content');
|
||||
const resultsCount = document.getElementById('results-count');
|
||||
const pagination = document.getElementById('pagination');
|
||||
const submitBtn = document.getElementById('submit-btn');
|
||||
|
||||
// Populate form on initial load
|
||||
(function() {
|
||||
const params = populateFormFromURL();
|
||||
const action = params.get('action');
|
||||
const page = params.get('page');
|
||||
if (action) {
|
||||
fetchResults(page ? parseInt(page) : 1);
|
||||
}
|
||||
})();
|
||||
|
||||
async function fetchResults(page = 1) {
|
||||
submitBtn.disabled = true;
|
||||
submitBtn.textContent = 'Loading...';
|
||||
|
||||
const formData = new FormData(form);
|
||||
const params = new URLSearchParams();
|
||||
|
||||
for (const [key, value] of formData.entries()) {
|
||||
if (value && key !== 'page_size') {
|
||||
params.append(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
const pageSize = document.getElementById('page_size').value || '50';
|
||||
params.append('page', page.toString());
|
||||
params.append('page_size', pageSize);
|
||||
|
||||
try {
|
||||
const response = await fetch('{{ urls.path("-/rules.json") }}?' + params.toString(), {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
displayResults(data);
|
||||
} else {
|
||||
displayError(data);
|
||||
}
|
||||
} catch (error) {
|
||||
displayError({ error: error.message });
|
||||
} finally {
|
||||
submitBtn.disabled = false;
|
||||
submitBtn.textContent = 'View Permission Rules';
|
||||
}
|
||||
}
|
||||
|
||||
function displayResults(data) {
|
||||
resultsContainer.style.display = 'block';
|
||||
|
||||
// Update count
|
||||
resultsCount.textContent = `Showing ${data.items.length} of ${data.total} total rules (page ${data.page})`;
|
||||
|
||||
// Display results table
|
||||
if (data.items.length === 0) {
|
||||
resultsContent.innerHTML = '<div class="no-results">No permission rules found for this action.</div>';
|
||||
} else {
|
||||
let html = '<table class="results-table">';
|
||||
html += '<thead><tr>';
|
||||
html += '<th>Effect</th>';
|
||||
html += '<th>Resource Path</th>';
|
||||
html += '<th>Parent</th>';
|
||||
html += '<th>Child</th>';
|
||||
html += '<th>Source Plugin</th>';
|
||||
html += '<th>Reason</th>';
|
||||
html += '</tr></thead>';
|
||||
html += '<tbody>';
|
||||
|
||||
for (const item of data.items) {
|
||||
const rowClass = item.allow ? 'allow-row' : 'deny-row';
|
||||
const effectBadge = item.allow
|
||||
? '<span style="background: #4caf50; color: white; padding: 0.2em 0.5em; border-radius: 3px; font-weight: bold;">ALLOW</span>'
|
||||
: '<span style="background: #f44336; color: white; padding: 0.2em 0.5em; border-radius: 3px; font-weight: bold;">DENY</span>';
|
||||
|
||||
html += `<tr class="${rowClass}">`;
|
||||
html += `<td>${effectBadge}</td>`;
|
||||
html += `<td><span class="resource-path">${escapeHtml(item.resource || '/')}</span></td>`;
|
||||
html += `<td>${escapeHtml(item.parent || '—')}</td>`;
|
||||
html += `<td>${escapeHtml(item.child || '—')}</td>`;
|
||||
html += `<td>${escapeHtml(item.source_plugin || '—')}</td>`;
|
||||
html += `<td>${escapeHtml(item.reason || '—')}</td>`;
|
||||
html += '</tr>';
|
||||
}
|
||||
|
||||
html += '</tbody></table>';
|
||||
resultsContent.innerHTML = html;
|
||||
}
|
||||
|
||||
// Update pagination
|
||||
pagination.innerHTML = '';
|
||||
if (data.previous_url || data.next_url) {
|
||||
if (data.previous_url) {
|
||||
const prevLink = document.createElement('a');
|
||||
prevLink.href = data.previous_url;
|
||||
prevLink.textContent = '← Previous';
|
||||
pagination.appendChild(prevLink);
|
||||
}
|
||||
|
||||
const pageInfo = document.createElement('span');
|
||||
pageInfo.textContent = `Page ${data.page}`;
|
||||
pagination.appendChild(pageInfo);
|
||||
|
||||
if (data.next_url) {
|
||||
const nextLink = document.createElement('a');
|
||||
nextLink.href = data.next_url;
|
||||
nextLink.textContent = 'Next →';
|
||||
pagination.appendChild(nextLink);
|
||||
}
|
||||
}
|
||||
|
||||
// Update raw JSON
|
||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
||||
}
|
||||
|
||||
function displayError(data) {
|
||||
resultsContainer.style.display = 'block';
|
||||
resultsCount.textContent = '';
|
||||
pagination.innerHTML = '';
|
||||
|
||||
resultsContent.innerHTML = `<div class="error-message">Error: ${escapeHtml(data.error || 'Unknown error')}</div>`;
|
||||
|
||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -2,10 +2,6 @@
|
|||
|
||||
{% block title %}{{ metadata.title or "Datasette" }}: {% for database in databases %}{{ database.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% endblock %}
|
||||
|
||||
{% block extra_head %}
|
||||
{% if noindex %}<meta name="robots" content="noindex">{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block body_class %}index{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
|
@ -21,7 +17,7 @@
|
|||
{% for database in databases %}
|
||||
<h2 style="padding-left: 10px; border-left: 10px solid #{{ database.color }}"><a href="{{ urls.database(database.name) }}">{{ database.name }}</a>{% if database.private %} 🔒{% endif %}</h2>
|
||||
<p>
|
||||
{% if database.show_table_row_counts %}{{ "{:,}".format(database.table_rows_sum) }} rows in {% endif %}{{ database.tables_count }} table{% if database.tables_count != 1 %}s{% endif %}{% if database.hidden_tables_count %}, {% endif -%}
|
||||
{% if database.show_table_row_counts %}{{ "{:,}".format(database.table_rows_sum) }} rows in {% endif %}{{ database.tables_count }} table{% if database.tables_count != 1 %}s{% endif %}{% if database.tables_count and database.hidden_tables_count %}, {% endif -%}
|
||||
{% if database.hidden_tables_count -%}
|
||||
{% if database.show_table_row_counts %}{{ "{:,}".format(database.hidden_table_rows_sum) }} rows in {% endif %}{{ database.hidden_tables_count }} hidden table{% if database.hidden_tables_count != 1 %}s{% endif -%}
|
||||
{% endif -%}
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
<p>You are logged in as <strong>{{ display_actor(actor) }}</strong></p>
|
||||
|
||||
<form class="core" action="{{ urls.logout() }}" method="post">
|
||||
<form action="{{ urls.logout() }}" method="post">
|
||||
<div>
|
||||
<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">
|
||||
<input type="submit" value="Log out">
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
<p>Set a message:</p>
|
||||
|
||||
<form class="core" action="{{ urls.path('-/messages') }}" method="post">
|
||||
<form action="{{ urls.path('-/messages') }}" method="post">
|
||||
<div>
|
||||
<input type="text" name="message" style="width: 40%">
|
||||
<div class="select-wrapper">
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@
|
|||
</head>
|
||||
<body>
|
||||
|
||||
<header class="hd"><nav>
|
||||
<header><nav>
|
||||
<p class="crumbs">
|
||||
<a href="/">home</a>
|
||||
</p>
|
||||
|
|
@ -45,7 +45,7 @@
|
|||
|
||||
<h2 class="pattern-heading">Header for /database/table/row and Messages</h2>
|
||||
|
||||
<header class="hd">
|
||||
<header>
|
||||
<nav>
|
||||
<p class="crumbs">
|
||||
<a href="/">home</a> /
|
||||
|
|
|
|||
139
datasette/templates/permissions_debug.html
Normal file
139
datasette/templates/permissions_debug.html
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Debug permissions{% endblock %}
|
||||
|
||||
{% block extra_head %}
|
||||
<style type="text/css">
|
||||
.check-result-true {
|
||||
color: green;
|
||||
}
|
||||
.check-result-false {
|
||||
color: red;
|
||||
}
|
||||
.check-result-no-opinion {
|
||||
color: #aaa;
|
||||
}
|
||||
.check h2 {
|
||||
font-size: 1em
|
||||
}
|
||||
.check-action, .check-when, .check-result {
|
||||
font-size: 1.3em;
|
||||
}
|
||||
textarea {
|
||||
height: 10em;
|
||||
width: 95%;
|
||||
box-sizing: border-box;
|
||||
padding: 0.5em;
|
||||
border: 2px dotted black;
|
||||
}
|
||||
.two-col {
|
||||
display: inline-block;
|
||||
width: 48%;
|
||||
}
|
||||
.two-col label {
|
||||
width: 48%;
|
||||
}
|
||||
@media only screen and (max-width: 576px) {
|
||||
.two-col {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<h1>Permission check testing tool</h1>
|
||||
|
||||
<p>This tool lets you simulate an actor and a permission check for that actor.</p>
|
||||
|
||||
<form action="{{ urls.path('-/permissions') }}" id="debug-post" method="post" style="margin-bottom: 1em">
|
||||
<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">
|
||||
<div class="two-col">
|
||||
<p><label>Actor</label></p>
|
||||
<textarea name="actor">{% if actor_input %}{{ actor_input }}{% else %}{"id": "root"}{% endif %}</textarea>
|
||||
</div>
|
||||
<div class="two-col" style="vertical-align: top">
|
||||
<p><label for="permission" style="display:block">Permission</label>
|
||||
<select name="permission" id="permission">
|
||||
{% for permission in permissions %}
|
||||
<option value="{{ permission.name }}">{{ permission.name }} (default {{ permission.default }})</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<p><label for="resource_1">Database name</label><input type="text" id="resource_1" name="resource_1"></p>
|
||||
<p><label for="resource_2">Table or query name</label><input type="text" id="resource_2" name="resource_2"></p>
|
||||
</div>
|
||||
<div style="margin-top: 1em;">
|
||||
<input type="submit" value="Simulate permission check">
|
||||
</div>
|
||||
<pre style="margin-top: 1em" id="debugResult"></pre>
|
||||
</form>
|
||||
|
||||
<script>
|
||||
var rawPerms = {{ permissions|tojson }};
|
||||
var permissions = Object.fromEntries(rawPerms.map(p => [p.name, p]));
|
||||
var permissionSelect = document.getElementById('permission');
|
||||
var resource1 = document.getElementById('resource_1');
|
||||
var resource2 = document.getElementById('resource_2');
|
||||
function updateResourceVisibility() {
|
||||
var permission = permissionSelect.value;
|
||||
var {takes_database, takes_resource} = permissions[permission];
|
||||
if (takes_database) {
|
||||
resource1.closest('p').style.display = 'block';
|
||||
} else {
|
||||
resource1.closest('p').style.display = 'none';
|
||||
}
|
||||
if (takes_resource) {
|
||||
resource2.closest('p').style.display = 'block';
|
||||
} else {
|
||||
resource2.closest('p').style.display = 'none';
|
||||
}
|
||||
}
|
||||
permissionSelect.addEventListener('change', updateResourceVisibility);
|
||||
updateResourceVisibility();
|
||||
|
||||
// When #debug-post form is submitted, use fetch() to POST data
|
||||
var debugPost = document.getElementById('debug-post');
|
||||
var debugResult = document.getElementById('debugResult');
|
||||
debugPost.addEventListener('submit', function(ev) {
|
||||
ev.preventDefault();
|
||||
var formData = new FormData(debugPost);
|
||||
console.log(formData);
|
||||
fetch(debugPost.action, {
|
||||
method: 'POST',
|
||||
body: new URLSearchParams(formData),
|
||||
}).then(function(response) {
|
||||
return response.json();
|
||||
}).then(function(data) {
|
||||
debugResult.innerText = JSON.stringify(data, null, 4);
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<h1>Recent permissions checks</h1>
|
||||
|
||||
{% for check in permission_checks %}
|
||||
<div class="check">
|
||||
<h2>
|
||||
<span class="check-action">{{ check.action }}</span>
|
||||
checked at
|
||||
<span class="check-when">{{ check.when }}</span>
|
||||
{% if check.result %}
|
||||
<span class="check-result check-result-true">✓</span>
|
||||
{% elif check.result is none %}
|
||||
<span class="check-result check-result-no-opinion">none</span>
|
||||
{% else %}
|
||||
<span class="check-result check-result-false">✗</span>
|
||||
{% endif %}
|
||||
{% if check.used_default %}
|
||||
<span class="check-used-default">(used default)</span>
|
||||
{% endif %}
|
||||
</h2>
|
||||
<p><strong>Actor:</strong> {{ check.actor|tojson }}</p>
|
||||
{% if check.resource %}
|
||||
<p><strong>Resource:</strong> {{ check.resource }}</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endfor %}
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -36,7 +36,7 @@
|
|||
|
||||
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
|
||||
|
||||
<form class="sql core" action="{{ urls.database(database) }}{% if canned_query %}/{{ canned_query }}{% endif %}" method="{% if canned_query_write %}post{% else %}get{% endif %}">
|
||||
<form class="sql" action="{{ urls.database(database) }}{% if canned_query %}/{{ canned_query }}{% endif %}" method="{% if canned_query_write %}post{% else %}get{% endif %}">
|
||||
<h3>Custom SQL query{% if display_rows %} returning {% if truncated %}more than {% endif %}{{ "{:,}".format(display_rows|length) }} row{% if display_rows|length == 1 %}{% else %}s{% endif %}{% endif %}{% if not query_error %}
|
||||
<span class="show-hide-sql">(<a href="{{ show_hide_link }}">{{ show_hide_text }}</a>)</span>
|
||||
{% endif %}</h3>
|
||||
|
|
|
|||
|
|
@ -1,41 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{% if is_instance %}Schema for all databases{% elif table_name %}Schema for {{ schemas[0].database }}.{{ table_name }}{% else %}Schema for {{ schemas[0].database }}{% endif %}{% endblock %}
|
||||
|
||||
{% block body_class %}schema{% endblock %}
|
||||
|
||||
{% block crumbs %}
|
||||
{% if is_instance %}
|
||||
{{ crumbs.nav(request=request) }}
|
||||
{% elif table_name %}
|
||||
{{ crumbs.nav(request=request, database=schemas[0].database, table=table_name) }}
|
||||
{% else %}
|
||||
{{ crumbs.nav(request=request, database=schemas[0].database) }}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="page-header">
|
||||
<h1>{% if is_instance %}Schema for all databases{% elif table_name %}Schema for {{ table_name }}{% else %}Schema for {{ schemas[0].database }}{% endif %}</h1>
|
||||
</div>
|
||||
|
||||
{% for item in schemas %}
|
||||
{% if is_instance %}
|
||||
<h2>{{ item.database }}</h2>
|
||||
{% endif %}
|
||||
|
||||
{% if item.schema %}
|
||||
<pre style="background-color: #f5f5f5; padding: 1em; overflow-x: auto; border: 1px solid #ddd; border-radius: 4px;"><code>{{ item.schema }}</code></pre>
|
||||
{% else %}
|
||||
<p><em>No schema available for this database.</em></p>
|
||||
{% endif %}
|
||||
|
||||
{% if not loop.last %}
|
||||
<hr style="margin: 2em 0;">
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
{% if not schemas %}
|
||||
<p><em>No databases with viewable schemas found.</em></p>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
|
@ -17,7 +17,7 @@
|
|||
{% block body_class %}table db-{{ database|to_css_class }} table-{{ table|to_css_class }}{% endblock %}
|
||||
|
||||
{% block crumbs %}
|
||||
{{ crumbs.nav(request=request, database=database, table=table) }}
|
||||
{{ crumbs.nav(request=request, database=database) }}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
|
@ -40,15 +40,12 @@
|
|||
{% endif %}
|
||||
|
||||
{% if count or human_description_en %}
|
||||
<h3>
|
||||
{% if count == count_limit + 1 %}>{{ "{:,}".format(count_limit) }} rows
|
||||
{% if allow_execute_sql and query.sql %} <a class="count-sql" style="font-size: 0.8em;" href="{{ urls.database_query(database, count_sql) }}">count all</a>{% endif %}
|
||||
{% elif count or count == 0 %}{{ "{:,}".format(count) }} row{% if count == 1 %}{% else %}s{% endif %}{% endif %}
|
||||
<h3>{% if count or count == 0 %}{{ "{:,}".format(count) }} row{% if count == 1 %}{% else %}s{% endif %}{% endif %}
|
||||
{% if human_description_en %}{{ human_description_en }}{% endif %}
|
||||
</h3>
|
||||
{% endif %}
|
||||
|
||||
<form class="core" class="filters" action="{{ urls.table(database, table) }}" method="get">
|
||||
<form class="filters" action="{{ urls.table(database, table) }}" method="get">
|
||||
{% if supports_search %}
|
||||
<div class="search-row"><label for="_search">Search:</label><input id="_search" type="search" name="_search" value="{{ search }}"></div>
|
||||
{% endif %}
|
||||
|
|
@ -152,7 +149,7 @@
|
|||
<a href="{{ append_querystring(renderers['json'], '_shape=object') }}">object</a>
|
||||
{% endif %}
|
||||
</p>
|
||||
<form class="core" action="{{ url_csv_path }}" method="get">
|
||||
<form action="{{ url_csv_path }}" method="get">
|
||||
<p>
|
||||
CSV options:
|
||||
<label><input type="checkbox" name="_dl"> download file</label>
|
||||
|
|
@ -175,41 +172,4 @@
|
|||
<pre class="wrapped-sql">{{ view_definition }}</pre>
|
||||
{% endif %}
|
||||
|
||||
{% if allow_execute_sql and query.sql %}
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
const countLink = document.querySelector('a.count-sql');
|
||||
if (countLink) {
|
||||
countLink.addEventListener('click', async function(ev) {
|
||||
ev.preventDefault();
|
||||
// Replace countLink with span with same style attribute
|
||||
const span = document.createElement('span');
|
||||
span.textContent = 'counting...';
|
||||
span.setAttribute('style', countLink.getAttribute('style'));
|
||||
countLink.replaceWith(span);
|
||||
countLink.setAttribute('disabled', 'disabled');
|
||||
let url = countLink.href.replace(/(\?|$)/, '.json$1');
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
console.log({response});
|
||||
const data = await response.json();
|
||||
console.log({data});
|
||||
if (!response.ok) {
|
||||
console.log('throw error');
|
||||
throw new Error(data.title || data.error);
|
||||
}
|
||||
const count = data['rows'][0]['count(*)'];
|
||||
const formattedCount = count.toLocaleString();
|
||||
span.closest('h3').textContent = formattedCount + ' rows';
|
||||
} catch (error) {
|
||||
console.log('Update', span, 'with error message', error);
|
||||
span.textContent = error.message;
|
||||
span.style.color = 'red';
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endif %}
|
||||
|
||||
{% endblock %}
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ def trace_child_tasks():
|
|||
|
||||
|
||||
@contextmanager
|
||||
def trace(trace_type, **kwargs):
|
||||
def trace(type, **kwargs):
|
||||
assert not TRACE_RESERVED_KEYS.intersection(
|
||||
kwargs.keys()
|
||||
), f".trace() keyword parameters cannot include {TRACE_RESERVED_KEYS}"
|
||||
|
|
@ -45,24 +45,17 @@ def trace(trace_type, **kwargs):
|
|||
yield kwargs
|
||||
return
|
||||
start = time.perf_counter()
|
||||
captured_error = None
|
||||
try:
|
||||
yield kwargs
|
||||
except Exception as ex:
|
||||
captured_error = ex
|
||||
raise
|
||||
finally:
|
||||
end = time.perf_counter()
|
||||
trace_info = {
|
||||
"type": trace_type,
|
||||
"start": start,
|
||||
"end": end,
|
||||
"duration_ms": (end - start) * 1000,
|
||||
"traceback": traceback.format_list(traceback.extract_stack(limit=6)[:-3]),
|
||||
"error": str(captured_error) if captured_error else None,
|
||||
}
|
||||
trace_info.update(kwargs)
|
||||
tracer.append(trace_info)
|
||||
yield kwargs
|
||||
end = time.perf_counter()
|
||||
trace_info = {
|
||||
"type": type,
|
||||
"start": start,
|
||||
"end": end,
|
||||
"duration_ms": (end - start) * 1000,
|
||||
"traceback": traceback.format_list(traceback.extract_stack(limit=6)[:-3]),
|
||||
}
|
||||
trace_info.update(kwargs)
|
||||
tracer.append(trace_info)
|
||||
|
||||
|
||||
@contextmanager
|
||||
|
|
@ -97,7 +90,6 @@ class AsgiTracer:
|
|||
|
||||
async def wrapped_send(message):
|
||||
nonlocal accumulated_body, size_limit_exceeded, response_headers
|
||||
|
||||
if message["type"] == "http.response.start":
|
||||
response_headers = message["headers"]
|
||||
await send(message)
|
||||
|
|
@ -110,12 +102,11 @@ class AsgiTracer:
|
|||
# Accumulate body until the end or until size is exceeded
|
||||
accumulated_body += message["body"]
|
||||
if len(accumulated_body) > self.max_body_bytes:
|
||||
# Send what we have accumulated so far
|
||||
await send(
|
||||
{
|
||||
"type": "http.response.body",
|
||||
"body": accumulated_body,
|
||||
"more_body": bool(message.get("more_body")),
|
||||
"more_body": True,
|
||||
}
|
||||
)
|
||||
size_limit_exceeded = True
|
||||
|
|
|
|||
|
|
@ -31,12 +31,6 @@ class Urls:
|
|||
db = self.ds.get_database(database)
|
||||
return self.path(tilde_encode(db.route), format=format)
|
||||
|
||||
def database_query(self, database, sql, format=None):
|
||||
path = f"{self.database(database)}/-/query?" + urllib.parse.urlencode(
|
||||
{"sql": sql}
|
||||
)
|
||||
return self.path(path, format=format)
|
||||
|
||||
def table(self, database, table, format=None):
|
||||
path = f"{self.database(database)}/{tilde_encode(table)}"
|
||||
if format is not None:
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import aiofiles
|
|||
import click
|
||||
from collections import OrderedDict, namedtuple, Counter
|
||||
import copy
|
||||
import dataclasses
|
||||
import base64
|
||||
import hashlib
|
||||
import inspect
|
||||
|
|
@ -28,58 +27,6 @@ from .sqlite import sqlite3, supports_table_xinfo
|
|||
|
||||
if typing.TYPE_CHECKING:
|
||||
from datasette.database import Database
|
||||
from datasette.permissions import Resource
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class PaginatedResources:
|
||||
"""Paginated results from allowed_resources query."""
|
||||
|
||||
resources: List["Resource"]
|
||||
next: str | None # Keyset token for next page (None if no more results)
|
||||
_datasette: typing.Any = dataclasses.field(default=None, repr=False)
|
||||
_action: str = dataclasses.field(default=None, repr=False)
|
||||
_actor: typing.Any = dataclasses.field(default=None, repr=False)
|
||||
_parent: str | None = dataclasses.field(default=None, repr=False)
|
||||
_include_is_private: bool = dataclasses.field(default=False, repr=False)
|
||||
_include_reasons: bool = dataclasses.field(default=False, repr=False)
|
||||
_limit: int = dataclasses.field(default=100, repr=False)
|
||||
|
||||
async def all(self):
|
||||
"""
|
||||
Async generator that yields all resources across all pages.
|
||||
|
||||
Automatically handles pagination under the hood. This is useful when you need
|
||||
to iterate through all results without manually managing pagination tokens.
|
||||
|
||||
Yields:
|
||||
Resource objects one at a time
|
||||
|
||||
Example:
|
||||
page = await datasette.allowed_resources("view-table", actor)
|
||||
async for table in page.all():
|
||||
print(f"{table.parent}/{table.child}")
|
||||
"""
|
||||
# Yield all resources from current page
|
||||
for resource in self.resources:
|
||||
yield resource
|
||||
|
||||
# Continue fetching subsequent pages if there are more
|
||||
next_token = self.next
|
||||
while next_token:
|
||||
page = await self._datasette.allowed_resources(
|
||||
self._action,
|
||||
self._actor,
|
||||
parent=self._parent,
|
||||
include_is_private=self._include_is_private,
|
||||
include_reasons=self._include_reasons,
|
||||
limit=self._limit,
|
||||
next=next_token,
|
||||
)
|
||||
for resource in page.resources:
|
||||
yield resource
|
||||
next_token = page.next
|
||||
|
||||
|
||||
# From https://www.sqlite.org/lang_keywords.html
|
||||
reserved_words = set(
|
||||
|
|
@ -1107,8 +1054,7 @@ def resolve_env_secrets(config, environ):
|
|||
if list(config.keys()) == ["$env"]:
|
||||
return environ.get(list(config.values())[0])
|
||||
elif list(config.keys()) == ["$file"]:
|
||||
with open(list(config.values())[0]) as fp:
|
||||
return fp.read()
|
||||
return open(list(config.values())[0]).read()
|
||||
else:
|
||||
return {
|
||||
key: resolve_env_secrets(value, environ)
|
||||
|
|
@ -1422,6 +1368,7 @@ _table_config_keys = (
|
|||
"fts_table",
|
||||
"fts_pk",
|
||||
"searchmode",
|
||||
"units",
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,587 +0,0 @@
|
|||
"""
|
||||
SQL query builder for hierarchical permission checking.
|
||||
|
||||
This module implements a cascading permission system based on the pattern
|
||||
from https://github.com/simonw/research/tree/main/sqlite-permissions-poc
|
||||
|
||||
It builds SQL queries that:
|
||||
|
||||
1. Start with all resources of a given type (from resource_type.resources_sql())
|
||||
2. Gather permission rules from plugins (via permission_resources_sql hook)
|
||||
3. Apply cascading logic: child → parent → global
|
||||
4. Apply DENY-beats-ALLOW at each level
|
||||
|
||||
The core pattern is:
|
||||
- Resources are identified by (parent, child) tuples
|
||||
- Rules are evaluated at three levels:
|
||||
- child: exact match on (parent, child)
|
||||
- parent: match on (parent, NULL)
|
||||
- global: match on (NULL, NULL)
|
||||
- At the same level, DENY (allow=0) beats ALLOW (allow=1)
|
||||
- Across levels, child beats parent beats global
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from datasette.utils.permissions import gather_permission_sql_from_hooks
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
|
||||
async def build_allowed_resources_sql(
|
||||
datasette: "Datasette",
|
||||
actor: dict | None,
|
||||
action: str,
|
||||
*,
|
||||
parent: str | None = None,
|
||||
include_is_private: bool = False,
|
||||
) -> tuple[str, dict]:
|
||||
"""
|
||||
Build a SQL query that returns all resources the actor can access for this action.
|
||||
|
||||
Args:
|
||||
datasette: The Datasette instance
|
||||
actor: The actor dict (or None for unauthenticated)
|
||||
action: The action name (e.g., "view-table", "view-database")
|
||||
parent: Optional parent filter to limit results (e.g., database name)
|
||||
include_is_private: If True, add is_private column showing if anonymous cannot access
|
||||
|
||||
Returns:
|
||||
A tuple of (sql_query, params_dict)
|
||||
|
||||
The returned SQL query will have three columns (or four with include_is_private):
|
||||
- parent: The parent resource identifier (or NULL)
|
||||
- child: The child resource identifier (or NULL)
|
||||
- reason: The reason from the rule that granted access
|
||||
- is_private: (if include_is_private) 1 if anonymous cannot access, 0 otherwise
|
||||
|
||||
Example:
|
||||
For action="view-table", this might return:
|
||||
SELECT parent, child, reason FROM ... WHERE is_allowed = 1
|
||||
|
||||
Results would be like:
|
||||
('analytics', 'users', 'role-based: analysts can access analytics DB')
|
||||
('analytics', 'events', 'role-based: analysts can access analytics DB')
|
||||
('production', 'orders', 'business-exception: allow production.orders for carol')
|
||||
"""
|
||||
# Get the Action object
|
||||
action_obj = datasette.actions.get(action)
|
||||
if not action_obj:
|
||||
raise ValueError(f"Unknown action: {action}")
|
||||
|
||||
# If this action also_requires another action, we need to combine the queries
|
||||
if action_obj.also_requires:
|
||||
# Build both queries
|
||||
main_sql, main_params = await _build_single_action_sql(
|
||||
datasette,
|
||||
actor,
|
||||
action,
|
||||
parent=parent,
|
||||
include_is_private=include_is_private,
|
||||
)
|
||||
required_sql, required_params = await _build_single_action_sql(
|
||||
datasette,
|
||||
actor,
|
||||
action_obj.also_requires,
|
||||
parent=parent,
|
||||
include_is_private=False,
|
||||
)
|
||||
|
||||
# Merge parameters - they should have identical values for :actor, :actor_id, etc.
|
||||
all_params = {**main_params, **required_params}
|
||||
if parent is not None:
|
||||
all_params["filter_parent"] = parent
|
||||
|
||||
# Combine with INNER JOIN - only resources allowed by both actions
|
||||
combined_sql = f"""
|
||||
WITH
|
||||
main_allowed AS (
|
||||
{main_sql}
|
||||
),
|
||||
required_allowed AS (
|
||||
{required_sql}
|
||||
)
|
||||
SELECT m.parent, m.child, m.reason"""
|
||||
|
||||
if include_is_private:
|
||||
combined_sql += ", m.is_private"
|
||||
|
||||
combined_sql += """
|
||||
FROM main_allowed m
|
||||
INNER JOIN required_allowed r
|
||||
ON ((m.parent = r.parent) OR (m.parent IS NULL AND r.parent IS NULL))
|
||||
AND ((m.child = r.child) OR (m.child IS NULL AND r.child IS NULL))
|
||||
"""
|
||||
|
||||
if parent is not None:
|
||||
combined_sql += "WHERE m.parent = :filter_parent\n"
|
||||
|
||||
combined_sql += "ORDER BY m.parent, m.child"
|
||||
|
||||
return combined_sql, all_params
|
||||
|
||||
# No also_requires, build single action query
|
||||
return await _build_single_action_sql(
|
||||
datasette, actor, action, parent=parent, include_is_private=include_is_private
|
||||
)
|
||||
|
||||
|
||||
async def _build_single_action_sql(
|
||||
datasette: "Datasette",
|
||||
actor: dict | None,
|
||||
action: str,
|
||||
*,
|
||||
parent: str | None = None,
|
||||
include_is_private: bool = False,
|
||||
) -> tuple[str, dict]:
|
||||
"""
|
||||
Build SQL for a single action (internal helper for build_allowed_resources_sql).
|
||||
|
||||
This contains the original logic from build_allowed_resources_sql, extracted
|
||||
to allow combining multiple actions when also_requires is used.
|
||||
"""
|
||||
# Get the Action object
|
||||
action_obj = datasette.actions.get(action)
|
||||
if not action_obj:
|
||||
raise ValueError(f"Unknown action: {action}")
|
||||
|
||||
# Get base resources SQL from the resource class
|
||||
base_resources_sql = await action_obj.resource_class.resources_sql(datasette)
|
||||
|
||||
permission_sqls = await gather_permission_sql_from_hooks(
|
||||
datasette=datasette,
|
||||
actor=actor,
|
||||
action=action,
|
||||
)
|
||||
|
||||
# If permission_sqls is the sentinel, skip all permission checks
|
||||
# Return SQL that allows all resources
|
||||
from datasette.utils.permissions import SKIP_PERMISSION_CHECKS
|
||||
|
||||
if permission_sqls is SKIP_PERMISSION_CHECKS:
|
||||
cols = "parent, child, 'skip_permission_checks' AS reason"
|
||||
if include_is_private:
|
||||
cols += ", 0 AS is_private"
|
||||
return f"SELECT {cols} FROM ({base_resources_sql})", {}
|
||||
|
||||
all_params = {}
|
||||
rule_sqls = []
|
||||
restriction_sqls = []
|
||||
|
||||
for permission_sql in permission_sqls:
|
||||
# Always collect params (even from restriction-only plugins)
|
||||
all_params.update(permission_sql.params or {})
|
||||
|
||||
# Collect restriction SQL filters
|
||||
if permission_sql.restriction_sql:
|
||||
restriction_sqls.append(permission_sql.restriction_sql)
|
||||
|
||||
# Skip plugins that only provide restriction_sql (no permission rules)
|
||||
if permission_sql.sql is None:
|
||||
continue
|
||||
rule_sqls.append(
|
||||
f"""
|
||||
SELECT parent, child, allow, reason, '{permission_sql.source}' AS source_plugin FROM (
|
||||
{permission_sql.sql}
|
||||
)
|
||||
""".strip()
|
||||
)
|
||||
|
||||
# If no rules, return empty result (deny all)
|
||||
if not rule_sqls:
|
||||
empty_cols = "NULL AS parent, NULL AS child, NULL AS reason"
|
||||
if include_is_private:
|
||||
empty_cols += ", NULL AS is_private"
|
||||
return f"SELECT {empty_cols} WHERE 0", {}
|
||||
|
||||
# Build the cascading permission query
|
||||
rules_union = " UNION ALL ".join(rule_sqls)
|
||||
|
||||
# Build the main query
|
||||
query_parts = [
|
||||
"WITH",
|
||||
"base AS (",
|
||||
f" {base_resources_sql}",
|
||||
"),",
|
||||
"all_rules AS (",
|
||||
f" {rules_union}",
|
||||
"),",
|
||||
]
|
||||
|
||||
# If include_is_private, we need to build anonymous permissions too
|
||||
if include_is_private:
|
||||
anon_permission_sqls = await gather_permission_sql_from_hooks(
|
||||
datasette=datasette,
|
||||
actor=None,
|
||||
action=action,
|
||||
)
|
||||
anon_sqls_rewritten = []
|
||||
anon_params = {}
|
||||
|
||||
for permission_sql in anon_permission_sqls:
|
||||
# Skip plugins that only provide restriction_sql (no permission rules)
|
||||
if permission_sql.sql is None:
|
||||
continue
|
||||
rewritten_sql = permission_sql.sql
|
||||
for key, value in (permission_sql.params or {}).items():
|
||||
anon_key = f"anon_{key}"
|
||||
anon_params[anon_key] = value
|
||||
rewritten_sql = rewritten_sql.replace(f":{key}", f":{anon_key}")
|
||||
anon_sqls_rewritten.append(rewritten_sql)
|
||||
|
||||
all_params.update(anon_params)
|
||||
|
||||
if anon_sqls_rewritten:
|
||||
anon_rules_union = " UNION ALL ".join(anon_sqls_rewritten)
|
||||
query_parts.extend(
|
||||
[
|
||||
"anon_rules AS (",
|
||||
f" {anon_rules_union}",
|
||||
"),",
|
||||
]
|
||||
)
|
||||
|
||||
# Continue with the cascading logic
|
||||
query_parts.extend(
|
||||
[
|
||||
"child_lvl AS (",
|
||||
" SELECT b.parent, b.child,",
|
||||
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
||||
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow,",
|
||||
" json_group_array(CASE WHEN ar.allow = 0 THEN ar.source_plugin || ': ' || ar.reason END) AS deny_reasons,",
|
||||
" json_group_array(CASE WHEN ar.allow = 1 THEN ar.source_plugin || ': ' || ar.reason END) AS allow_reasons",
|
||||
" FROM base b",
|
||||
" LEFT JOIN all_rules ar ON ar.parent = b.parent AND ar.child = b.child",
|
||||
" GROUP BY b.parent, b.child",
|
||||
"),",
|
||||
"parent_lvl AS (",
|
||||
" SELECT b.parent, b.child,",
|
||||
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
||||
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow,",
|
||||
" json_group_array(CASE WHEN ar.allow = 0 THEN ar.source_plugin || ': ' || ar.reason END) AS deny_reasons,",
|
||||
" json_group_array(CASE WHEN ar.allow = 1 THEN ar.source_plugin || ': ' || ar.reason END) AS allow_reasons",
|
||||
" FROM base b",
|
||||
" LEFT JOIN all_rules ar ON ar.parent = b.parent AND ar.child IS NULL",
|
||||
" GROUP BY b.parent, b.child",
|
||||
"),",
|
||||
"global_lvl AS (",
|
||||
" SELECT b.parent, b.child,",
|
||||
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
||||
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow,",
|
||||
" json_group_array(CASE WHEN ar.allow = 0 THEN ar.source_plugin || ': ' || ar.reason END) AS deny_reasons,",
|
||||
" json_group_array(CASE WHEN ar.allow = 1 THEN ar.source_plugin || ': ' || ar.reason END) AS allow_reasons",
|
||||
" FROM base b",
|
||||
" LEFT JOIN all_rules ar ON ar.parent IS NULL AND ar.child IS NULL",
|
||||
" GROUP BY b.parent, b.child",
|
||||
"),",
|
||||
]
|
||||
)
|
||||
|
||||
# Add anonymous decision logic if needed
|
||||
if include_is_private:
|
||||
query_parts.extend(
|
||||
[
|
||||
"anon_child_lvl AS (",
|
||||
" SELECT b.parent, b.child,",
|
||||
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
||||
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow",
|
||||
" FROM base b",
|
||||
" LEFT JOIN anon_rules ar ON ar.parent = b.parent AND ar.child = b.child",
|
||||
" GROUP BY b.parent, b.child",
|
||||
"),",
|
||||
"anon_parent_lvl AS (",
|
||||
" SELECT b.parent, b.child,",
|
||||
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
||||
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow",
|
||||
" FROM base b",
|
||||
" LEFT JOIN anon_rules ar ON ar.parent = b.parent AND ar.child IS NULL",
|
||||
" GROUP BY b.parent, b.child",
|
||||
"),",
|
||||
"anon_global_lvl AS (",
|
||||
" SELECT b.parent, b.child,",
|
||||
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
||||
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow",
|
||||
" FROM base b",
|
||||
" LEFT JOIN anon_rules ar ON ar.parent IS NULL AND ar.child IS NULL",
|
||||
" GROUP BY b.parent, b.child",
|
||||
"),",
|
||||
"anon_decisions AS (",
|
||||
" SELECT",
|
||||
" b.parent, b.child,",
|
||||
" CASE",
|
||||
" WHEN acl.any_deny = 1 THEN 0",
|
||||
" WHEN acl.any_allow = 1 THEN 1",
|
||||
" WHEN apl.any_deny = 1 THEN 0",
|
||||
" WHEN apl.any_allow = 1 THEN 1",
|
||||
" WHEN agl.any_deny = 1 THEN 0",
|
||||
" WHEN agl.any_allow = 1 THEN 1",
|
||||
" ELSE 0",
|
||||
" END AS anon_is_allowed",
|
||||
" FROM base b",
|
||||
" JOIN anon_child_lvl acl ON b.parent = acl.parent AND (b.child = acl.child OR (b.child IS NULL AND acl.child IS NULL))",
|
||||
" JOIN anon_parent_lvl apl ON b.parent = apl.parent AND (b.child = apl.child OR (b.child IS NULL AND apl.child IS NULL))",
|
||||
" JOIN anon_global_lvl agl ON b.parent = agl.parent AND (b.child = agl.child OR (b.child IS NULL AND agl.child IS NULL))",
|
||||
"),",
|
||||
]
|
||||
)
|
||||
|
||||
# Final decisions
|
||||
query_parts.extend(
|
||||
[
|
||||
"decisions AS (",
|
||||
" SELECT",
|
||||
" b.parent, b.child,",
|
||||
" -- Cascading permission logic: child → parent → global, DENY beats ALLOW at each level",
|
||||
" -- Priority order:",
|
||||
" -- 1. Child-level deny (most specific, blocks access)",
|
||||
" -- 2. Child-level allow (most specific, grants access)",
|
||||
" -- 3. Parent-level deny (intermediate, blocks access)",
|
||||
" -- 4. Parent-level allow (intermediate, grants access)",
|
||||
" -- 5. Global-level deny (least specific, blocks access)",
|
||||
" -- 6. Global-level allow (least specific, grants access)",
|
||||
" -- 7. Default deny (no rules match)",
|
||||
" CASE",
|
||||
" WHEN cl.any_deny = 1 THEN 0",
|
||||
" WHEN cl.any_allow = 1 THEN 1",
|
||||
" WHEN pl.any_deny = 1 THEN 0",
|
||||
" WHEN pl.any_allow = 1 THEN 1",
|
||||
" WHEN gl.any_deny = 1 THEN 0",
|
||||
" WHEN gl.any_allow = 1 THEN 1",
|
||||
" ELSE 0",
|
||||
" END AS is_allowed,",
|
||||
" CASE",
|
||||
" WHEN cl.any_deny = 1 THEN cl.deny_reasons",
|
||||
" WHEN cl.any_allow = 1 THEN cl.allow_reasons",
|
||||
" WHEN pl.any_deny = 1 THEN pl.deny_reasons",
|
||||
" WHEN pl.any_allow = 1 THEN pl.allow_reasons",
|
||||
" WHEN gl.any_deny = 1 THEN gl.deny_reasons",
|
||||
" WHEN gl.any_allow = 1 THEN gl.allow_reasons",
|
||||
" ELSE '[]'",
|
||||
" END AS reason",
|
||||
]
|
||||
)
|
||||
|
||||
if include_is_private:
|
||||
query_parts.append(
|
||||
" , CASE WHEN ad.anon_is_allowed = 0 THEN 1 ELSE 0 END AS is_private"
|
||||
)
|
||||
|
||||
query_parts.extend(
|
||||
[
|
||||
" FROM base b",
|
||||
" JOIN child_lvl cl ON b.parent = cl.parent AND (b.child = cl.child OR (b.child IS NULL AND cl.child IS NULL))",
|
||||
" JOIN parent_lvl pl ON b.parent = pl.parent AND (b.child = pl.child OR (b.child IS NULL AND pl.child IS NULL))",
|
||||
" JOIN global_lvl gl ON b.parent = gl.parent AND (b.child = gl.child OR (b.child IS NULL AND gl.child IS NULL))",
|
||||
]
|
||||
)
|
||||
|
||||
if include_is_private:
|
||||
query_parts.append(
|
||||
" JOIN anon_decisions ad ON b.parent = ad.parent AND (b.child = ad.child OR (b.child IS NULL AND ad.child IS NULL))"
|
||||
)
|
||||
|
||||
query_parts.append(")")
|
||||
|
||||
# Add restriction list CTE if there are restrictions
|
||||
if restriction_sqls:
|
||||
# Wrap each restriction_sql in a subquery to avoid operator precedence issues
|
||||
# with UNION ALL inside the restriction SQL statements
|
||||
restriction_intersect = "\nINTERSECT\n".join(
|
||||
f"SELECT * FROM ({sql})" for sql in restriction_sqls
|
||||
)
|
||||
query_parts.extend(
|
||||
[",", "restriction_list AS (", f" {restriction_intersect}", ")"]
|
||||
)
|
||||
|
||||
# Final SELECT
|
||||
select_cols = "parent, child, reason"
|
||||
if include_is_private:
|
||||
select_cols += ", is_private"
|
||||
|
||||
query_parts.append(f"SELECT {select_cols}")
|
||||
query_parts.append("FROM decisions")
|
||||
query_parts.append("WHERE is_allowed = 1")
|
||||
|
||||
# Add restriction filter if there are restrictions
|
||||
if restriction_sqls:
|
||||
query_parts.append(
|
||||
"""
|
||||
AND EXISTS (
|
||||
SELECT 1 FROM restriction_list r
|
||||
WHERE (r.parent = decisions.parent OR r.parent IS NULL)
|
||||
AND (r.child = decisions.child OR r.child IS NULL)
|
||||
)"""
|
||||
)
|
||||
|
||||
# Add parent filter if specified
|
||||
if parent is not None:
|
||||
query_parts.append(" AND parent = :filter_parent")
|
||||
all_params["filter_parent"] = parent
|
||||
|
||||
query_parts.append("ORDER BY parent, child")
|
||||
|
||||
query = "\n".join(query_parts)
|
||||
return query, all_params
|
||||
|
||||
|
||||
async def build_permission_rules_sql(
|
||||
datasette: "Datasette", actor: dict | None, action: str
|
||||
) -> tuple[str, dict]:
|
||||
"""
|
||||
Build the UNION SQL and params for all permission rules for a given actor and action.
|
||||
|
||||
Returns:
|
||||
A tuple of (sql, params) where sql is a UNION ALL query that returns
|
||||
(parent, child, allow, reason, source_plugin) rows.
|
||||
"""
|
||||
# Get the Action object
|
||||
action_obj = datasette.actions.get(action)
|
||||
if not action_obj:
|
||||
raise ValueError(f"Unknown action: {action}")
|
||||
|
||||
permission_sqls = await gather_permission_sql_from_hooks(
|
||||
datasette=datasette,
|
||||
actor=actor,
|
||||
action=action,
|
||||
)
|
||||
|
||||
# If permission_sqls is the sentinel, skip all permission checks
|
||||
# Return SQL that allows everything
|
||||
from datasette.utils.permissions import SKIP_PERMISSION_CHECKS
|
||||
|
||||
if permission_sqls is SKIP_PERMISSION_CHECKS:
|
||||
return (
|
||||
"SELECT NULL AS parent, NULL AS child, 1 AS allow, 'skip_permission_checks' AS reason, 'skip' AS source_plugin",
|
||||
{},
|
||||
[],
|
||||
)
|
||||
|
||||
if not permission_sqls:
|
||||
return (
|
||||
"SELECT NULL AS parent, NULL AS child, 0 AS allow, NULL AS reason, NULL AS source_plugin WHERE 0",
|
||||
{},
|
||||
[],
|
||||
)
|
||||
|
||||
union_parts = []
|
||||
all_params = {}
|
||||
restriction_sqls = []
|
||||
|
||||
for permission_sql in permission_sqls:
|
||||
all_params.update(permission_sql.params or {})
|
||||
|
||||
# Collect restriction SQL filters
|
||||
if permission_sql.restriction_sql:
|
||||
restriction_sqls.append(permission_sql.restriction_sql)
|
||||
|
||||
# Skip plugins that only provide restriction_sql (no permission rules)
|
||||
if permission_sql.sql is None:
|
||||
continue
|
||||
|
||||
union_parts.append(
|
||||
f"""
|
||||
SELECT parent, child, allow, reason, '{permission_sql.source}' AS source_plugin FROM (
|
||||
{permission_sql.sql}
|
||||
)
|
||||
""".strip()
|
||||
)
|
||||
|
||||
rules_union = " UNION ALL ".join(union_parts)
|
||||
return rules_union, all_params, restriction_sqls
|
||||
|
||||
|
||||
async def check_permission_for_resource(
|
||||
*,
|
||||
datasette: "Datasette",
|
||||
actor: dict | None,
|
||||
action: str,
|
||||
parent: str | None,
|
||||
child: str | None,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if an actor has permission for a specific action on a specific resource.
|
||||
|
||||
Args:
|
||||
datasette: The Datasette instance
|
||||
actor: The actor dict (or None)
|
||||
action: The action name
|
||||
parent: The parent resource identifier (e.g., database name, or None)
|
||||
child: The child resource identifier (e.g., table name, or None)
|
||||
|
||||
Returns:
|
||||
True if the actor is allowed, False otherwise
|
||||
|
||||
This builds the cascading permission query and checks if the specific
|
||||
resource is in the allowed set.
|
||||
"""
|
||||
rules_union, all_params, restriction_sqls = await build_permission_rules_sql(
|
||||
datasette, actor, action
|
||||
)
|
||||
|
||||
# If no rules (empty SQL), default deny
|
||||
if not rules_union:
|
||||
return False
|
||||
|
||||
# Add parameters for the resource we're checking
|
||||
all_params["_check_parent"] = parent
|
||||
all_params["_check_child"] = child
|
||||
|
||||
# If there are restriction filters, check if the resource passes them first
|
||||
if restriction_sqls:
|
||||
# Check if resource is in restriction allowlist
|
||||
# Database-level restrictions (parent, NULL) should match all children (parent, *)
|
||||
# Wrap each restriction_sql in a subquery to avoid operator precedence issues
|
||||
restriction_check = "\nINTERSECT\n".join(
|
||||
f"SELECT * FROM ({sql})" for sql in restriction_sqls
|
||||
)
|
||||
restriction_query = f"""
|
||||
WITH restriction_list AS (
|
||||
{restriction_check}
|
||||
)
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM restriction_list
|
||||
WHERE (parent = :_check_parent OR parent IS NULL)
|
||||
AND (child = :_check_child OR child IS NULL)
|
||||
) AS in_allowlist
|
||||
"""
|
||||
result = await datasette.get_internal_database().execute(
|
||||
restriction_query, all_params
|
||||
)
|
||||
if result.rows and not result.rows[0][0]:
|
||||
# Resource not in restriction allowlist - deny
|
||||
return False
|
||||
|
||||
query = f"""
|
||||
WITH
|
||||
all_rules AS (
|
||||
{rules_union}
|
||||
),
|
||||
matched_rules AS (
|
||||
SELECT ar.*,
|
||||
CASE
|
||||
WHEN ar.child IS NOT NULL THEN 2 -- child-level (most specific)
|
||||
WHEN ar.parent IS NOT NULL THEN 1 -- parent-level
|
||||
ELSE 0 -- root/global
|
||||
END AS depth
|
||||
FROM all_rules ar
|
||||
WHERE (ar.parent IS NULL OR ar.parent = :_check_parent)
|
||||
AND (ar.child IS NULL OR ar.child = :_check_child)
|
||||
),
|
||||
winner AS (
|
||||
SELECT *
|
||||
FROM matched_rules
|
||||
ORDER BY
|
||||
depth DESC, -- specificity first (higher depth wins)
|
||||
CASE WHEN allow=0 THEN 0 ELSE 1 END, -- then deny over allow
|
||||
source_plugin -- stable tie-break
|
||||
LIMIT 1
|
||||
)
|
||||
SELECT COALESCE((SELECT allow FROM winner), 0) AS is_allowed
|
||||
"""
|
||||
|
||||
# Execute the query against the internal database
|
||||
result = await datasette.get_internal_database().execute(query, all_params)
|
||||
if result.rows:
|
||||
return bool(result.rows[0][0])
|
||||
return False
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
import hashlib
|
||||
import json
|
||||
from datasette.utils import MultiParams, calculate_etag
|
||||
from mimetypes import guess_type
|
||||
|
|
@ -6,7 +7,6 @@ from pathlib import Path
|
|||
from http.cookies import SimpleCookie, Morsel
|
||||
import aiofiles
|
||||
import aiofiles.os
|
||||
import re
|
||||
|
||||
# Workaround for adding samesite support to pre 3.8 python
|
||||
Morsel._reserved["samesite"] = "SameSite"
|
||||
|
|
@ -249,9 +249,6 @@ async def asgi_send_html(send, html, status=200, headers=None):
|
|||
|
||||
|
||||
async def asgi_send_redirect(send, location, status=302):
|
||||
# Prevent open redirect vulnerability: strip multiple leading slashes
|
||||
# //example.com would be interpreted as a protocol-relative URL (e.g., https://example.com/)
|
||||
location = re.sub(r"^/+", "/", location)
|
||||
await asgi_send(
|
||||
send,
|
||||
"",
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import inspect
|
||||
import asyncio
|
||||
import types
|
||||
from typing import NamedTuple, Any
|
||||
|
||||
|
|
@ -17,9 +17,9 @@ def check_callable(obj: Any) -> CallableStatus:
|
|||
return CallableStatus(True, False)
|
||||
|
||||
if isinstance(obj, types.FunctionType):
|
||||
return CallableStatus(True, inspect.iscoroutinefunction(obj))
|
||||
return CallableStatus(True, asyncio.iscoroutinefunction(obj))
|
||||
|
||||
if hasattr(obj, "__call__"):
|
||||
return CallableStatus(True, inspect.iscoroutinefunction(obj.__call__))
|
||||
return CallableStatus(True, asyncio.iscoroutinefunction(obj.__call__))
|
||||
|
||||
assert False, "obj {} is somehow callable with no __call__ method".format(repr(obj))
|
||||
|
|
|
|||
|
|
@ -17,15 +17,7 @@ async def init_internal_db(db):
|
|||
rootpage INTEGER,
|
||||
sql TEXT,
|
||||
PRIMARY KEY (database_name, table_name),
|
||||
FOREIGN KEY (database_name) REFERENCES catalog_databases(database_name)
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS catalog_views (
|
||||
database_name TEXT,
|
||||
view_name TEXT,
|
||||
rootpage INTEGER,
|
||||
sql TEXT,
|
||||
PRIMARY KEY (database_name, view_name),
|
||||
FOREIGN KEY (database_name) REFERENCES catalog_databases(database_name)
|
||||
FOREIGN KEY (database_name) REFERENCES databases(database_name)
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS catalog_columns (
|
||||
database_name TEXT,
|
||||
|
|
@ -38,8 +30,8 @@ async def init_internal_db(db):
|
|||
is_pk INTEGER, -- renamed from pk
|
||||
hidden INTEGER,
|
||||
PRIMARY KEY (database_name, table_name, name),
|
||||
FOREIGN KEY (database_name) REFERENCES catalog_databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES catalog_tables(database_name, table_name)
|
||||
FOREIGN KEY (database_name) REFERENCES databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name)
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS catalog_indexes (
|
||||
database_name TEXT,
|
||||
|
|
@ -50,8 +42,8 @@ async def init_internal_db(db):
|
|||
origin TEXT,
|
||||
partial INTEGER,
|
||||
PRIMARY KEY (database_name, table_name, name),
|
||||
FOREIGN KEY (database_name) REFERENCES catalog_databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES catalog_tables(database_name, table_name)
|
||||
FOREIGN KEY (database_name) REFERENCES databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name)
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS catalog_foreign_keys (
|
||||
database_name TEXT,
|
||||
|
|
@ -65,8 +57,8 @@ async def init_internal_db(db):
|
|||
on_delete TEXT,
|
||||
match TEXT,
|
||||
PRIMARY KEY (database_name, table_name, id, seq),
|
||||
FOREIGN KEY (database_name) REFERENCES catalog_databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES catalog_tables(database_name, table_name)
|
||||
FOREIGN KEY (database_name) REFERENCES databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name)
|
||||
);
|
||||
"""
|
||||
).strip()
|
||||
|
|
@ -119,9 +111,6 @@ async def populate_schema_tables(internal_db, db):
|
|||
conn.execute(
|
||||
"DELETE FROM catalog_tables WHERE database_name = ?", [database_name]
|
||||
)
|
||||
conn.execute(
|
||||
"DELETE FROM catalog_views WHERE database_name = ?", [database_name]
|
||||
)
|
||||
conn.execute(
|
||||
"DELETE FROM catalog_columns WHERE database_name = ?", [database_name]
|
||||
)
|
||||
|
|
@ -136,21 +125,13 @@ async def populate_schema_tables(internal_db, db):
|
|||
await internal_db.execute_write_fn(delete_everything)
|
||||
|
||||
tables = (await db.execute("select * from sqlite_master WHERE type = 'table'")).rows
|
||||
views = (await db.execute("select * from sqlite_master WHERE type = 'view'")).rows
|
||||
|
||||
def collect_info(conn):
|
||||
tables_to_insert = []
|
||||
views_to_insert = []
|
||||
columns_to_insert = []
|
||||
foreign_keys_to_insert = []
|
||||
indexes_to_insert = []
|
||||
|
||||
for view in views:
|
||||
view_name = view["name"]
|
||||
views_to_insert.append(
|
||||
(database_name, view_name, view["rootpage"], view["sql"])
|
||||
)
|
||||
|
||||
for table in tables:
|
||||
table_name = table["name"]
|
||||
tables_to_insert.append(
|
||||
|
|
@ -184,7 +165,6 @@ async def populate_schema_tables(internal_db, db):
|
|||
)
|
||||
return (
|
||||
tables_to_insert,
|
||||
views_to_insert,
|
||||
columns_to_insert,
|
||||
foreign_keys_to_insert,
|
||||
indexes_to_insert,
|
||||
|
|
@ -192,7 +172,6 @@ async def populate_schema_tables(internal_db, db):
|
|||
|
||||
(
|
||||
tables_to_insert,
|
||||
views_to_insert,
|
||||
columns_to_insert,
|
||||
foreign_keys_to_insert,
|
||||
indexes_to_insert,
|
||||
|
|
@ -205,13 +184,6 @@ async def populate_schema_tables(internal_db, db):
|
|||
""",
|
||||
tables_to_insert,
|
||||
)
|
||||
await internal_db.execute_write_many(
|
||||
"""
|
||||
INSERT INTO catalog_views (database_name, view_name, rootpage, sql)
|
||||
values (?, ?, ?, ?)
|
||||
""",
|
||||
views_to_insert,
|
||||
)
|
||||
await internal_db.execute_write_many(
|
||||
"""
|
||||
INSERT INTO catalog_columns (
|
||||
|
|
|
|||
|
|
@ -1,439 +0,0 @@
|
|||
# perm_utils.py
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import Any, Dict, Iterable, List, Sequence, Tuple
|
||||
import sqlite3
|
||||
|
||||
from datasette.permissions import PermissionSQL
|
||||
from datasette.plugins import pm
|
||||
from datasette.utils import await_me_maybe
|
||||
|
||||
|
||||
# Sentinel object to indicate permission checks should be skipped
|
||||
SKIP_PERMISSION_CHECKS = object()
|
||||
|
||||
|
||||
async def gather_permission_sql_from_hooks(
|
||||
*, datasette, actor: dict | None, action: str
|
||||
) -> List[PermissionSQL] | object:
|
||||
"""Collect PermissionSQL objects from the permission_resources_sql hook.
|
||||
|
||||
Ensures that each returned PermissionSQL has a populated ``source``.
|
||||
|
||||
Returns SKIP_PERMISSION_CHECKS sentinel if skip_permission_checks context variable
|
||||
is set, signaling that all permission checks should be bypassed.
|
||||
"""
|
||||
from datasette.permissions import _skip_permission_checks
|
||||
|
||||
# Check if we should skip permission checks BEFORE calling hooks
|
||||
# This avoids creating unawaited coroutines
|
||||
if _skip_permission_checks.get():
|
||||
return SKIP_PERMISSION_CHECKS
|
||||
|
||||
hook_caller = pm.hook.permission_resources_sql
|
||||
hookimpls = hook_caller.get_hookimpls()
|
||||
hook_results = list(hook_caller(datasette=datasette, actor=actor, action=action))
|
||||
|
||||
collected: List[PermissionSQL] = []
|
||||
actor_json = json.dumps(actor) if actor is not None else None
|
||||
actor_id = actor.get("id") if isinstance(actor, dict) else None
|
||||
|
||||
for index, result in enumerate(hook_results):
|
||||
hookimpl = hookimpls[index]
|
||||
resolved = await await_me_maybe(result)
|
||||
default_source = _plugin_name_from_hookimpl(hookimpl)
|
||||
for permission_sql in _iter_permission_sql_from_result(resolved, action=action):
|
||||
if not permission_sql.source:
|
||||
permission_sql.source = default_source
|
||||
params = permission_sql.params or {}
|
||||
params.setdefault("action", action)
|
||||
params.setdefault("actor", actor_json)
|
||||
params.setdefault("actor_id", actor_id)
|
||||
collected.append(permission_sql)
|
||||
|
||||
return collected
|
||||
|
||||
|
||||
def _plugin_name_from_hookimpl(hookimpl) -> str:
|
||||
if getattr(hookimpl, "plugin_name", None):
|
||||
return hookimpl.plugin_name
|
||||
plugin = getattr(hookimpl, "plugin", None)
|
||||
if hasattr(plugin, "__name__"):
|
||||
return plugin.__name__
|
||||
return repr(plugin)
|
||||
|
||||
|
||||
def _iter_permission_sql_from_result(
|
||||
result: Any, *, action: str
|
||||
) -> Iterable[PermissionSQL]:
|
||||
if result is None:
|
||||
return []
|
||||
if isinstance(result, PermissionSQL):
|
||||
return [result]
|
||||
if isinstance(result, (list, tuple)):
|
||||
collected: List[PermissionSQL] = []
|
||||
for item in result:
|
||||
collected.extend(_iter_permission_sql_from_result(item, action=action))
|
||||
return collected
|
||||
if callable(result):
|
||||
permission_sql = result(action) # type: ignore[call-arg]
|
||||
return _iter_permission_sql_from_result(permission_sql, action=action)
|
||||
raise TypeError(
|
||||
"Plugin providers must return PermissionSQL instances, sequences, or callables"
|
||||
)
|
||||
|
||||
|
||||
# -----------------------------
|
||||
# Plugin interface & utilities
|
||||
# -----------------------------
|
||||
|
||||
|
||||
def build_rules_union(
|
||||
actor: dict | None, plugins: Sequence[PermissionSQL]
|
||||
) -> Tuple[str, Dict[str, Any]]:
|
||||
"""
|
||||
Compose plugin SQL into a UNION ALL.
|
||||
|
||||
Returns:
|
||||
union_sql: a SELECT with columns (parent, child, allow, reason, source_plugin)
|
||||
params: dict of bound parameters including :actor (JSON), :actor_id, and plugin params
|
||||
|
||||
Note: Plugins are responsible for ensuring their parameter names don't conflict.
|
||||
The system reserves these parameter names: :actor, :actor_id, :action, :filter_parent
|
||||
Plugin parameters should be prefixed with a unique identifier (e.g., source name).
|
||||
"""
|
||||
parts: List[str] = []
|
||||
actor_json = json.dumps(actor) if actor else None
|
||||
actor_id = actor.get("id") if actor else None
|
||||
params: Dict[str, Any] = {"actor": actor_json, "actor_id": actor_id}
|
||||
|
||||
for p in plugins:
|
||||
# No namespacing - just use plugin params as-is
|
||||
params.update(p.params or {})
|
||||
|
||||
# Skip plugins that only provide restriction_sql (no permission rules)
|
||||
if p.sql is None:
|
||||
continue
|
||||
|
||||
parts.append(
|
||||
f"""
|
||||
SELECT parent, child, allow, reason, '{p.source}' AS source_plugin FROM (
|
||||
{p.sql}
|
||||
)
|
||||
""".strip()
|
||||
)
|
||||
|
||||
if not parts:
|
||||
# Empty UNION that returns no rows
|
||||
union_sql = "SELECT NULL parent, NULL child, NULL allow, NULL reason, 'none' source_plugin WHERE 0"
|
||||
else:
|
||||
union_sql = "\nUNION ALL\n".join(parts)
|
||||
|
||||
return union_sql, params
|
||||
|
||||
|
||||
# -----------------------------------------------
|
||||
# Core resolvers (no temp tables, no custom UDFs)
|
||||
# -----------------------------------------------
|
||||
|
||||
|
||||
async def resolve_permissions_from_catalog(
|
||||
db,
|
||||
actor: dict | None,
|
||||
plugins: Sequence[Any],
|
||||
action: str,
|
||||
candidate_sql: str,
|
||||
candidate_params: Dict[str, Any] | None = None,
|
||||
*,
|
||||
implicit_deny: bool = True,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Resolve permissions by embedding the provided *candidate_sql* in a CTE.
|
||||
|
||||
Expectations:
|
||||
- candidate_sql SELECTs: parent TEXT, child TEXT
|
||||
(Use child=NULL for parent-scoped actions like "execute-sql".)
|
||||
- *db* exposes: rows = await db.execute(sql, params)
|
||||
where rows is an iterable of sqlite3.Row
|
||||
- plugins: hook results handled by await_me_maybe - can be sync/async,
|
||||
single PermissionSQL, list, or callable returning PermissionSQL
|
||||
- actor is the actor dict (or None), made available as :actor (JSON), :actor_id, and :action
|
||||
|
||||
Decision policy:
|
||||
1) Specificity first: child (depth=2) > parent (depth=1) > root (depth=0)
|
||||
2) Within the same depth: deny (0) beats allow (1)
|
||||
3) If no matching rule:
|
||||
- implicit_deny=True -> treat as allow=0, reason='implicit deny'
|
||||
- implicit_deny=False -> allow=None, reason=None
|
||||
|
||||
Returns: list of dict rows
|
||||
- parent, child, allow, reason, source_plugin, depth
|
||||
- resource (rendered "/parent/child" or "/parent" or "/")
|
||||
"""
|
||||
resolved_plugins: List[PermissionSQL] = []
|
||||
restriction_sqls: List[str] = []
|
||||
|
||||
for plugin in plugins:
|
||||
if callable(plugin) and not isinstance(plugin, PermissionSQL):
|
||||
resolved = plugin(action) # type: ignore[arg-type]
|
||||
else:
|
||||
resolved = plugin # type: ignore[assignment]
|
||||
if not isinstance(resolved, PermissionSQL):
|
||||
raise TypeError("Plugin providers must return PermissionSQL instances")
|
||||
resolved_plugins.append(resolved)
|
||||
|
||||
# Collect restriction SQL filters
|
||||
if resolved.restriction_sql:
|
||||
restriction_sqls.append(resolved.restriction_sql)
|
||||
|
||||
union_sql, rule_params = build_rules_union(actor, resolved_plugins)
|
||||
all_params = {
|
||||
**(candidate_params or {}),
|
||||
**rule_params,
|
||||
"action": action,
|
||||
}
|
||||
|
||||
sql = f"""
|
||||
WITH
|
||||
cands AS (
|
||||
{candidate_sql}
|
||||
),
|
||||
rules AS (
|
||||
{union_sql}
|
||||
),
|
||||
matched AS (
|
||||
SELECT
|
||||
c.parent, c.child,
|
||||
r.allow, r.reason, r.source_plugin,
|
||||
CASE
|
||||
WHEN r.child IS NOT NULL THEN 2 -- child-level (most specific)
|
||||
WHEN r.parent IS NOT NULL THEN 1 -- parent-level
|
||||
ELSE 0 -- root/global
|
||||
END AS depth
|
||||
FROM cands c
|
||||
JOIN rules r
|
||||
ON (r.parent IS NULL OR r.parent = c.parent)
|
||||
AND (r.child IS NULL OR r.child = c.child)
|
||||
),
|
||||
ranked AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY parent, child
|
||||
ORDER BY
|
||||
depth DESC, -- specificity first
|
||||
CASE WHEN allow=0 THEN 0 ELSE 1 END, -- then deny over allow at same depth
|
||||
source_plugin -- stable tie-break
|
||||
) AS rn
|
||||
FROM matched
|
||||
),
|
||||
winner AS (
|
||||
SELECT parent, child,
|
||||
allow, reason, source_plugin, depth
|
||||
FROM ranked WHERE rn = 1
|
||||
)
|
||||
SELECT
|
||||
c.parent, c.child,
|
||||
COALESCE(w.allow, CASE WHEN :implicit_deny THEN 0 ELSE NULL END) AS allow,
|
||||
COALESCE(w.reason, CASE WHEN :implicit_deny THEN 'implicit deny' ELSE NULL END) AS reason,
|
||||
w.source_plugin,
|
||||
COALESCE(w.depth, -1) AS depth,
|
||||
:action AS action,
|
||||
CASE
|
||||
WHEN c.parent IS NULL THEN '/'
|
||||
WHEN c.child IS NULL THEN '/' || c.parent
|
||||
ELSE '/' || c.parent || '/' || c.child
|
||||
END AS resource
|
||||
FROM cands c
|
||||
LEFT JOIN winner w
|
||||
ON ((w.parent = c.parent) OR (w.parent IS NULL AND c.parent IS NULL))
|
||||
AND ((w.child = c.child ) OR (w.child IS NULL AND c.child IS NULL))
|
||||
ORDER BY c.parent, c.child
|
||||
"""
|
||||
|
||||
# If there are restriction filters, wrap the query with INTERSECT
|
||||
# This ensures only resources in the restriction allowlist are returned
|
||||
if restriction_sqls:
|
||||
# Start with the main query, but select only parent/child for the INTERSECT
|
||||
main_query_for_intersect = f"""
|
||||
WITH
|
||||
cands AS (
|
||||
{candidate_sql}
|
||||
),
|
||||
rules AS (
|
||||
{union_sql}
|
||||
),
|
||||
matched AS (
|
||||
SELECT
|
||||
c.parent, c.child,
|
||||
r.allow, r.reason, r.source_plugin,
|
||||
CASE
|
||||
WHEN r.child IS NOT NULL THEN 2 -- child-level (most specific)
|
||||
WHEN r.parent IS NOT NULL THEN 1 -- parent-level
|
||||
ELSE 0 -- root/global
|
||||
END AS depth
|
||||
FROM cands c
|
||||
JOIN rules r
|
||||
ON (r.parent IS NULL OR r.parent = c.parent)
|
||||
AND (r.child IS NULL OR r.child = c.child)
|
||||
),
|
||||
ranked AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY parent, child
|
||||
ORDER BY
|
||||
depth DESC, -- specificity first
|
||||
CASE WHEN allow=0 THEN 0 ELSE 1 END, -- then deny over allow at same depth
|
||||
source_plugin -- stable tie-break
|
||||
) AS rn
|
||||
FROM matched
|
||||
),
|
||||
winner AS (
|
||||
SELECT parent, child,
|
||||
allow, reason, source_plugin, depth
|
||||
FROM ranked WHERE rn = 1
|
||||
),
|
||||
permitted_resources AS (
|
||||
SELECT c.parent, c.child
|
||||
FROM cands c
|
||||
LEFT JOIN winner w
|
||||
ON ((w.parent = c.parent) OR (w.parent IS NULL AND c.parent IS NULL))
|
||||
AND ((w.child = c.child ) OR (w.child IS NULL AND c.child IS NULL))
|
||||
WHERE COALESCE(w.allow, CASE WHEN :implicit_deny THEN 0 ELSE NULL END) = 1
|
||||
)
|
||||
SELECT parent, child FROM permitted_resources
|
||||
"""
|
||||
|
||||
# Build restriction list with INTERSECT (all must match)
|
||||
# Then filter to resources that match hierarchically
|
||||
# Wrap each restriction_sql in a subquery to avoid operator precedence issues
|
||||
# with UNION ALL inside the restriction SQL statements
|
||||
restriction_intersect = "\nINTERSECT\n".join(
|
||||
f"SELECT * FROM ({sql})" for sql in restriction_sqls
|
||||
)
|
||||
|
||||
# Combine: resources allowed by permissions AND in restriction allowlist
|
||||
# Database-level restrictions (parent, NULL) should match all children (parent, *)
|
||||
filtered_resources = f"""
|
||||
WITH restriction_list AS (
|
||||
{restriction_intersect}
|
||||
),
|
||||
permitted AS (
|
||||
{main_query_for_intersect}
|
||||
),
|
||||
filtered AS (
|
||||
SELECT p.parent, p.child
|
||||
FROM permitted p
|
||||
WHERE EXISTS (
|
||||
SELECT 1 FROM restriction_list r
|
||||
WHERE (r.parent = p.parent OR r.parent IS NULL)
|
||||
AND (r.child = p.child OR r.child IS NULL)
|
||||
)
|
||||
)
|
||||
"""
|
||||
|
||||
# Now join back to get full results for only the filtered resources
|
||||
sql = f"""
|
||||
{filtered_resources}
|
||||
, cands AS (
|
||||
{candidate_sql}
|
||||
),
|
||||
rules AS (
|
||||
{union_sql}
|
||||
),
|
||||
matched AS (
|
||||
SELECT
|
||||
c.parent, c.child,
|
||||
r.allow, r.reason, r.source_plugin,
|
||||
CASE
|
||||
WHEN r.child IS NOT NULL THEN 2 -- child-level (most specific)
|
||||
WHEN r.parent IS NOT NULL THEN 1 -- parent-level
|
||||
ELSE 0 -- root/global
|
||||
END AS depth
|
||||
FROM cands c
|
||||
JOIN rules r
|
||||
ON (r.parent IS NULL OR r.parent = c.parent)
|
||||
AND (r.child IS NULL OR r.child = c.child)
|
||||
),
|
||||
ranked AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY parent, child
|
||||
ORDER BY
|
||||
depth DESC, -- specificity first
|
||||
CASE WHEN allow=0 THEN 0 ELSE 1 END, -- then deny over allow at same depth
|
||||
source_plugin -- stable tie-break
|
||||
) AS rn
|
||||
FROM matched
|
||||
),
|
||||
winner AS (
|
||||
SELECT parent, child,
|
||||
allow, reason, source_plugin, depth
|
||||
FROM ranked WHERE rn = 1
|
||||
)
|
||||
SELECT
|
||||
c.parent, c.child,
|
||||
COALESCE(w.allow, CASE WHEN :implicit_deny THEN 0 ELSE NULL END) AS allow,
|
||||
COALESCE(w.reason, CASE WHEN :implicit_deny THEN 'implicit deny' ELSE NULL END) AS reason,
|
||||
w.source_plugin,
|
||||
COALESCE(w.depth, -1) AS depth,
|
||||
:action AS action,
|
||||
CASE
|
||||
WHEN c.parent IS NULL THEN '/'
|
||||
WHEN c.child IS NULL THEN '/' || c.parent
|
||||
ELSE '/' || c.parent || '/' || c.child
|
||||
END AS resource
|
||||
FROM filtered c
|
||||
LEFT JOIN winner w
|
||||
ON ((w.parent = c.parent) OR (w.parent IS NULL AND c.parent IS NULL))
|
||||
AND ((w.child = c.child ) OR (w.child IS NULL AND c.child IS NULL))
|
||||
ORDER BY c.parent, c.child
|
||||
"""
|
||||
|
||||
rows_iter: Iterable[sqlite3.Row] = await db.execute(
|
||||
sql,
|
||||
{**all_params, "implicit_deny": 1 if implicit_deny else 0},
|
||||
)
|
||||
return [dict(r) for r in rows_iter]
|
||||
|
||||
|
||||
async def resolve_permissions_with_candidates(
|
||||
db,
|
||||
actor: dict | None,
|
||||
plugins: Sequence[Any],
|
||||
candidates: List[Tuple[str, str | None]],
|
||||
action: str,
|
||||
*,
|
||||
implicit_deny: bool = True,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Resolve permissions without any external candidate table by embedding
|
||||
the candidates as a UNION of parameterized SELECTs in a CTE.
|
||||
|
||||
candidates: list of (parent, child) where child can be None for parent-scoped actions.
|
||||
actor: actor dict (or None), made available as :actor (JSON), :actor_id, and :action
|
||||
"""
|
||||
# Build a small CTE for candidates.
|
||||
cand_rows_sql: List[str] = []
|
||||
cand_params: Dict[str, Any] = {}
|
||||
for i, (parent, child) in enumerate(candidates):
|
||||
pkey = f"cand_p_{i}"
|
||||
ckey = f"cand_c_{i}"
|
||||
cand_params[pkey] = parent
|
||||
cand_params[ckey] = child
|
||||
cand_rows_sql.append(f"SELECT :{pkey} AS parent, :{ckey} AS child")
|
||||
candidate_sql = (
|
||||
"\nUNION ALL\n".join(cand_rows_sql)
|
||||
if cand_rows_sql
|
||||
else "SELECT NULL AS parent, NULL AS child WHERE 0"
|
||||
)
|
||||
|
||||
return await resolve_permissions_from_catalog(
|
||||
db,
|
||||
actor,
|
||||
plugins,
|
||||
action,
|
||||
candidate_sql=candidate_sql,
|
||||
candidate_params=cand_params,
|
||||
implicit_deny=implicit_deny,
|
||||
)
|
||||
|
|
@ -62,13 +62,10 @@ class TestClient:
|
|||
follow_redirects=False,
|
||||
redirect_count=0,
|
||||
method="GET",
|
||||
params=None,
|
||||
cookies=None,
|
||||
if_none_match=None,
|
||||
headers=None,
|
||||
):
|
||||
if params:
|
||||
path += "?" + urlencode(params, doseq=True)
|
||||
return await self._request(
|
||||
path=path,
|
||||
follow_redirects=follow_redirects,
|
||||
|
|
|
|||
|
|
@ -1,2 +1,2 @@
|
|||
__version__ = "1.0a23"
|
||||
__version__ = "1.0a14"
|
||||
__version_info__ = tuple(__version__.split("."))
|
||||
|
|
|
|||
|
|
@ -1,2 +1,3 @@
|
|||
class Context:
|
||||
"Base class for all documented contexts"
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import asyncio
|
||||
import csv
|
||||
import hashlib
|
||||
import json
|
||||
import sys
|
||||
import textwrap
|
||||
import time
|
||||
|
|
@ -9,6 +8,8 @@ import urllib
|
|||
from markupsafe import escape
|
||||
|
||||
|
||||
import pint
|
||||
|
||||
from datasette.database import QueryInterrupted
|
||||
from datasette.utils.asgi import Request
|
||||
from datasette.utils import (
|
||||
|
|
@ -31,6 +32,8 @@ from datasette.utils.asgi import (
|
|||
BadRequest,
|
||||
)
|
||||
|
||||
ureg = pint.UnitRegistry()
|
||||
|
||||
|
||||
class DatasetteError(Exception):
|
||||
def __init__(
|
||||
|
|
@ -159,7 +162,7 @@ class BaseView:
|
|||
template_context["alternate_url_json"] = alternate_url_json
|
||||
headers.update(
|
||||
{
|
||||
"Link": '<{}>; rel="alternate"; type="application/json+datasette"'.format(
|
||||
"Link": '{}; rel="alternate"; type="application/json+datasette"'.format(
|
||||
alternate_url_json
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,10 +9,10 @@ import os
|
|||
import re
|
||||
import sqlite_utils
|
||||
import textwrap
|
||||
from typing import List
|
||||
|
||||
from datasette.events import AlterTableEvent, CreateTableEvent, InsertRowsEvent
|
||||
from datasette.database import QueryInterrupted
|
||||
from datasette.resources import DatabaseResource, QueryResource
|
||||
from datasette.utils import (
|
||||
add_cors_headers,
|
||||
await_me_maybe,
|
||||
|
|
@ -35,7 +35,6 @@ from datasette.utils.asgi import AsgiFileDownload, NotFound, Response, Forbidden
|
|||
from datasette.plugins import pm
|
||||
|
||||
from .base import BaseView, DatasetteError, View, _error, stream_csv
|
||||
from . import Context
|
||||
|
||||
|
||||
class DatabaseView(View):
|
||||
|
|
@ -49,8 +48,10 @@ class DatabaseView(View):
|
|||
|
||||
visible, private = await datasette.check_visibility(
|
||||
request.actor,
|
||||
action="view-database",
|
||||
resource=DatabaseResource(database=database),
|
||||
permissions=[
|
||||
("view-database", database),
|
||||
"view-instance",
|
||||
],
|
||||
)
|
||||
if not visible:
|
||||
raise Forbidden("You do not have permission to view this database")
|
||||
|
|
@ -69,46 +70,40 @@ class DatabaseView(View):
|
|||
|
||||
metadata = await datasette.get_database_metadata(database)
|
||||
|
||||
# Get all tables/views this actor can see in bulk with private flag
|
||||
allowed_tables_page = await datasette.allowed_resources(
|
||||
"view-table",
|
||||
request.actor,
|
||||
parent=database,
|
||||
include_is_private=True,
|
||||
limit=1000,
|
||||
)
|
||||
# Create lookup dict for quick access
|
||||
allowed_dict = {r.child: r for r in allowed_tables_page.resources}
|
||||
|
||||
# Filter to just views
|
||||
view_names_set = set(await db.view_names())
|
||||
sql_views = [
|
||||
{"name": name, "private": allowed_dict[name].private}
|
||||
for name in allowed_dict
|
||||
if name in view_names_set
|
||||
]
|
||||
|
||||
tables = await get_tables(datasette, request, db, allowed_dict)
|
||||
|
||||
# Get allowed queries using the new permission system
|
||||
allowed_query_page = await datasette.allowed_resources(
|
||||
"view-query",
|
||||
request.actor,
|
||||
parent=database,
|
||||
include_is_private=True,
|
||||
limit=1000,
|
||||
)
|
||||
|
||||
# Build canned_queries list by looking up each allowed query
|
||||
all_queries = await datasette.get_canned_queries(database, request.actor)
|
||||
canned_queries = []
|
||||
for query_resource in allowed_query_page.resources:
|
||||
query_name = query_resource.child
|
||||
if query_name in all_queries:
|
||||
canned_queries.append(
|
||||
dict(all_queries[query_name], private=query_resource.private)
|
||||
sql_views = []
|
||||
for view_name in await db.view_names():
|
||||
view_visible, view_private = await datasette.check_visibility(
|
||||
request.actor,
|
||||
permissions=[
|
||||
("view-table", (database, view_name)),
|
||||
("view-database", database),
|
||||
"view-instance",
|
||||
],
|
||||
)
|
||||
if view_visible:
|
||||
sql_views.append(
|
||||
{
|
||||
"name": view_name,
|
||||
"private": view_private,
|
||||
}
|
||||
)
|
||||
|
||||
tables = await get_tables(datasette, request, db)
|
||||
canned_queries = []
|
||||
for query in (
|
||||
await datasette.get_canned_queries(database, request.actor)
|
||||
).values():
|
||||
query_visible, query_private = await datasette.check_visibility(
|
||||
request.actor,
|
||||
permissions=[
|
||||
("view-query", (database, query["name"])),
|
||||
("view-database", database),
|
||||
"view-instance",
|
||||
],
|
||||
)
|
||||
if query_visible:
|
||||
canned_queries.append(dict(query, private=query_private))
|
||||
|
||||
async def database_actions():
|
||||
links = []
|
||||
for hook in pm.hook.database_actions(
|
||||
|
|
@ -124,10 +119,8 @@ class DatabaseView(View):
|
|||
|
||||
attached_databases = [d.name for d in await db.attached_databases()]
|
||||
|
||||
allow_execute_sql = await datasette.allowed(
|
||||
action="execute-sql",
|
||||
resource=DatabaseResource(database=database),
|
||||
actor=request.actor,
|
||||
allow_execute_sql = await datasette.permission_allowed(
|
||||
request.actor, "execute-sql", database
|
||||
)
|
||||
json_data = {
|
||||
"database": database,
|
||||
|
|
@ -159,48 +152,35 @@ class DatabaseView(View):
|
|||
templates = (f"database-{to_css_class(database)}.html", "database.html")
|
||||
environment = datasette.get_jinja_environment(request)
|
||||
template = environment.select_template(templates)
|
||||
context = {
|
||||
**json_data,
|
||||
"database_color": db.color,
|
||||
"database_actions": database_actions,
|
||||
"show_hidden": request.args.get("_show_hidden"),
|
||||
"editable": True,
|
||||
"metadata": metadata,
|
||||
"allow_download": datasette.setting("allow_download")
|
||||
and not db.is_mutable
|
||||
and not db.is_memory,
|
||||
"attached_databases": attached_databases,
|
||||
"alternate_url_json": alternate_url_json,
|
||||
"select_templates": [
|
||||
f"{'*' if template_name == template.name else ''}{template_name}"
|
||||
for template_name in templates
|
||||
],
|
||||
"top_database": make_slot_function(
|
||||
"top_database", datasette, request, database=database
|
||||
),
|
||||
}
|
||||
return Response.html(
|
||||
await datasette.render_template(
|
||||
templates,
|
||||
DatabaseContext(
|
||||
database=database,
|
||||
private=private,
|
||||
path=datasette.urls.database(database),
|
||||
size=db.size,
|
||||
tables=tables,
|
||||
hidden_count=len([t for t in tables if t["hidden"]]),
|
||||
views=sql_views,
|
||||
queries=canned_queries,
|
||||
allow_execute_sql=allow_execute_sql,
|
||||
table_columns=(
|
||||
await _table_columns(datasette, database)
|
||||
if allow_execute_sql
|
||||
else {}
|
||||
),
|
||||
metadata=metadata,
|
||||
database_color=db.color,
|
||||
database_actions=database_actions,
|
||||
show_hidden=request.args.get("_show_hidden"),
|
||||
editable=True,
|
||||
count_limit=db.count_limit,
|
||||
allow_download=datasette.setting("allow_download")
|
||||
and not db.is_mutable
|
||||
and not db.is_memory,
|
||||
attached_databases=attached_databases,
|
||||
alternate_url_json=alternate_url_json,
|
||||
select_templates=[
|
||||
f"{'*' if template_name == template.name else ''}{template_name}"
|
||||
for template_name in templates
|
||||
],
|
||||
top_database=make_slot_function(
|
||||
"top_database", datasette, request, database=database
|
||||
),
|
||||
),
|
||||
context,
|
||||
request=request,
|
||||
view_name="database",
|
||||
),
|
||||
headers={
|
||||
"Link": '<{}>; rel="alternate"; type="application/json+datasette"'.format(
|
||||
"Link": '{}; rel="alternate"; type="application/json+datasette"'.format(
|
||||
alternate_url_json
|
||||
)
|
||||
},
|
||||
|
|
@ -208,56 +188,7 @@ class DatabaseView(View):
|
|||
|
||||
|
||||
@dataclass
|
||||
class DatabaseContext(Context):
|
||||
database: str = field(metadata={"help": "The name of the database"})
|
||||
private: bool = field(
|
||||
metadata={"help": "Boolean indicating if this is a private database"}
|
||||
)
|
||||
path: str = field(metadata={"help": "The URL path to this database"})
|
||||
size: int = field(metadata={"help": "The size of the database in bytes"})
|
||||
tables: list = field(metadata={"help": "List of table objects in the database"})
|
||||
hidden_count: int = field(metadata={"help": "Count of hidden tables"})
|
||||
views: list = field(metadata={"help": "List of view objects in the database"})
|
||||
queries: list = field(metadata={"help": "List of canned query objects"})
|
||||
allow_execute_sql: bool = field(
|
||||
metadata={"help": "Boolean indicating if custom SQL can be executed"}
|
||||
)
|
||||
table_columns: dict = field(
|
||||
metadata={"help": "Dictionary mapping table names to their column lists"}
|
||||
)
|
||||
metadata: dict = field(metadata={"help": "Metadata for the database"})
|
||||
database_color: str = field(metadata={"help": "The color assigned to the database"})
|
||||
database_actions: callable = field(
|
||||
metadata={
|
||||
"help": "Callable returning list of action links for the database menu"
|
||||
}
|
||||
)
|
||||
show_hidden: str = field(metadata={"help": "Value of _show_hidden query parameter"})
|
||||
editable: bool = field(
|
||||
metadata={"help": "Boolean indicating if the database is editable"}
|
||||
)
|
||||
count_limit: int = field(metadata={"help": "The maximum number of rows to count"})
|
||||
allow_download: bool = field(
|
||||
metadata={"help": "Boolean indicating if database download is allowed"}
|
||||
)
|
||||
attached_databases: list = field(
|
||||
metadata={"help": "List of names of attached databases"}
|
||||
)
|
||||
alternate_url_json: str = field(
|
||||
metadata={"help": "URL for the alternate JSON version of this page"}
|
||||
)
|
||||
select_templates: list = field(
|
||||
metadata={
|
||||
"help": "List of templates that were considered for rendering this page"
|
||||
}
|
||||
)
|
||||
top_database: callable = field(
|
||||
metadata={"help": "Callable to render the top_database slot"}
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class QueryContext(Context):
|
||||
class QueryContext:
|
||||
database: str = field(metadata={"help": "The name of the database being queried"})
|
||||
database_color: str = field(metadata={"help": "The color of the database"})
|
||||
query: dict = field(
|
||||
|
|
@ -338,25 +269,24 @@ class QueryContext(Context):
|
|||
)
|
||||
|
||||
|
||||
async def get_tables(datasette, request, db, allowed_dict):
|
||||
"""
|
||||
Get list of tables with metadata for the database view.
|
||||
|
||||
Args:
|
||||
datasette: The Datasette instance
|
||||
request: The current request
|
||||
db: The database
|
||||
allowed_dict: Dict mapping table name -> Resource object with .private attribute
|
||||
"""
|
||||
async def get_tables(datasette, request, db):
|
||||
tables = []
|
||||
table_counts = await db.table_counts(100)
|
||||
database = db.name
|
||||
table_counts = await db.table_counts(5)
|
||||
hidden_table_names = set(await db.hidden_table_names())
|
||||
all_foreign_keys = await db.get_all_foreign_keys()
|
||||
|
||||
for table in table_counts:
|
||||
if table not in allowed_dict:
|
||||
table_visible, table_private = await datasette.check_visibility(
|
||||
request.actor,
|
||||
permissions=[
|
||||
("view-table", (database, table)),
|
||||
("view-database", database),
|
||||
"view-instance",
|
||||
],
|
||||
)
|
||||
if not table_visible:
|
||||
continue
|
||||
|
||||
table_columns = await db.table_columns(table)
|
||||
tables.append(
|
||||
{
|
||||
|
|
@ -367,7 +297,7 @@ async def get_tables(datasette, request, db, allowed_dict):
|
|||
"hidden": table in hidden_table_names,
|
||||
"fts_table": await db.fts_table(table),
|
||||
"foreign_keys": all_foreign_keys[table],
|
||||
"private": allowed_dict[table].private,
|
||||
"private": table_private,
|
||||
}
|
||||
)
|
||||
tables.sort(key=lambda t: (t["hidden"], t["name"]))
|
||||
|
|
@ -375,13 +305,14 @@ async def get_tables(datasette, request, db, allowed_dict):
|
|||
|
||||
|
||||
async def database_download(request, datasette):
|
||||
from datasette.resources import DatabaseResource
|
||||
|
||||
database = tilde_decode(request.url_vars["database"])
|
||||
await datasette.ensure_permission(
|
||||
action="view-database-download",
|
||||
resource=DatabaseResource(database=database),
|
||||
actor=request.actor,
|
||||
await datasette.ensure_permissions(
|
||||
request.actor,
|
||||
[
|
||||
("view-database-download", database),
|
||||
("view-database", database),
|
||||
"view-instance",
|
||||
],
|
||||
)
|
||||
try:
|
||||
db = datasette.get_database(route=database)
|
||||
|
|
@ -459,10 +390,7 @@ class QueryView(View):
|
|||
or request.args.get("_json")
|
||||
or params.get("_json")
|
||||
)
|
||||
params_for_query = MagicParameters(
|
||||
canned_query["sql"], params, request, datasette
|
||||
)
|
||||
await params_for_query.execute_params()
|
||||
params_for_query = MagicParameters(params, request, datasette)
|
||||
ok = None
|
||||
redirect_url = None
|
||||
try:
|
||||
|
|
@ -515,17 +443,6 @@ class QueryView(View):
|
|||
db = await datasette.resolve_database(request)
|
||||
database = db.name
|
||||
|
||||
# Get all tables/views this actor can see in bulk with private flag
|
||||
allowed_tables_page = await datasette.allowed_resources(
|
||||
"view-table",
|
||||
request.actor,
|
||||
parent=database,
|
||||
include_is_private=True,
|
||||
limit=1000,
|
||||
)
|
||||
# Create lookup dict for quick access
|
||||
allowed_dict = {r.child: r for r in allowed_tables_page.resources}
|
||||
|
||||
# Are we a canned query?
|
||||
canned_query = None
|
||||
canned_query_write = False
|
||||
|
|
@ -546,17 +463,18 @@ class QueryView(View):
|
|||
# Respect canned query permissions
|
||||
visible, private = await datasette.check_visibility(
|
||||
request.actor,
|
||||
action="view-query",
|
||||
resource=QueryResource(database=database, query=canned_query["name"]),
|
||||
permissions=[
|
||||
("view-query", (database, canned_query["name"])),
|
||||
("view-database", database),
|
||||
"view-instance",
|
||||
],
|
||||
)
|
||||
if not visible:
|
||||
raise Forbidden("You do not have permission to view this query")
|
||||
|
||||
else:
|
||||
await datasette.ensure_permission(
|
||||
action="execute-sql",
|
||||
resource=DatabaseResource(database=database),
|
||||
actor=request.actor,
|
||||
await datasette.ensure_permissions(
|
||||
request.actor, [("execute-sql", database)]
|
||||
)
|
||||
|
||||
# Flattened because of ?sql=&name1=value1&name2=value2 feature
|
||||
|
|
@ -604,8 +522,7 @@ class QueryView(View):
|
|||
validate_sql_select(sql)
|
||||
else:
|
||||
# Canned queries can run magic parameters
|
||||
params_for_query = MagicParameters(sql, params, request, datasette)
|
||||
await params_for_query.execute_params()
|
||||
params_for_query = MagicParameters(params, request, datasette)
|
||||
results = await datasette.execute(
|
||||
database, sql, params_for_query, truncate=True, **extra_args
|
||||
)
|
||||
|
|
@ -708,7 +625,7 @@ class QueryView(View):
|
|||
data = {}
|
||||
headers.update(
|
||||
{
|
||||
"Link": '<{}>; rel="alternate"; type="application/json+datasette"'.format(
|
||||
"Link": '{}; rel="alternate"; type="application/json+datasette"'.format(
|
||||
alternate_url_json
|
||||
)
|
||||
}
|
||||
|
|
@ -735,10 +652,8 @@ class QueryView(View):
|
|||
path_with_format(request=request, format=key)
|
||||
)
|
||||
|
||||
allow_execute_sql = await datasette.allowed(
|
||||
action="execute-sql",
|
||||
resource=DatabaseResource(database=database),
|
||||
actor=request.actor,
|
||||
allow_execute_sql = await datasette.permission_allowed(
|
||||
request.actor, "execute-sql", database
|
||||
)
|
||||
|
||||
show_hide_hidden = ""
|
||||
|
|
@ -826,7 +741,7 @@ class QueryView(View):
|
|||
show_hide_text=show_hide_text,
|
||||
editable=not canned_query,
|
||||
allow_execute_sql=allow_execute_sql,
|
||||
tables=await get_tables(datasette, request, db, allowed_dict),
|
||||
tables=await get_tables(datasette, request, db),
|
||||
named_parameter_values=named_parameter_values,
|
||||
edit_sql_url=edit_sql_url,
|
||||
display_rows=await display_rows(
|
||||
|
|
@ -876,26 +791,14 @@ class QueryView(View):
|
|||
|
||||
|
||||
class MagicParameters(dict):
|
||||
def __init__(self, sql, data, request, datasette):
|
||||
def __init__(self, data, request, datasette):
|
||||
super().__init__(data)
|
||||
self._sql = sql
|
||||
self._request = request
|
||||
self._magics = dict(
|
||||
itertools.chain.from_iterable(
|
||||
pm.hook.register_magic_parameters(datasette=datasette)
|
||||
)
|
||||
)
|
||||
self._prepared = {}
|
||||
|
||||
async def execute_params(self):
|
||||
for key in derive_named_parameters(self._sql):
|
||||
if key.startswith("_") and key.count("_") >= 2:
|
||||
prefix, suffix = key[1:].split("_", 1)
|
||||
if prefix in self._magics:
|
||||
result = await await_me_maybe(
|
||||
self._magics[prefix](suffix, self._request)
|
||||
)
|
||||
self._prepared[key] = result
|
||||
|
||||
def __len__(self):
|
||||
# Workaround for 'Incorrect number of bindings' error
|
||||
|
|
@ -904,9 +807,6 @@ class MagicParameters(dict):
|
|||
|
||||
def __getitem__(self, key):
|
||||
if key.startswith("_") and key.count("_") >= 2:
|
||||
if key in self._prepared:
|
||||
return self._prepared[key]
|
||||
# Try the other route
|
||||
prefix, suffix = key[1:].split("_", 1)
|
||||
if prefix in self._magics:
|
||||
try:
|
||||
|
|
@ -948,10 +848,8 @@ class TableCreateView(BaseView):
|
|||
database_name = db.name
|
||||
|
||||
# Must have create-table permission
|
||||
if not await self.ds.allowed(
|
||||
action="create-table",
|
||||
resource=DatabaseResource(database=database_name),
|
||||
actor=request.actor,
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "create-table", resource=database_name
|
||||
):
|
||||
return _error(["Permission denied"], 403)
|
||||
|
||||
|
|
@ -987,10 +885,8 @@ class TableCreateView(BaseView):
|
|||
|
||||
if replace:
|
||||
# Must have update-row permission
|
||||
if not await self.ds.allowed(
|
||||
action="update-row",
|
||||
resource=DatabaseResource(database=database_name),
|
||||
actor=request.actor,
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "update-row", resource=database_name
|
||||
):
|
||||
return _error(["Permission denied: need update-row"], 403)
|
||||
|
||||
|
|
@ -1013,10 +909,8 @@ class TableCreateView(BaseView):
|
|||
|
||||
if rows or row:
|
||||
# Must have insert-row permission
|
||||
if not await self.ds.allowed(
|
||||
action="insert-row",
|
||||
resource=DatabaseResource(database=database_name),
|
||||
actor=request.actor,
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "insert-row", resource=database_name
|
||||
):
|
||||
return _error(["Permission denied: need insert-row"], 403)
|
||||
|
||||
|
|
@ -1028,10 +922,8 @@ class TableCreateView(BaseView):
|
|||
else:
|
||||
# alter=True only if they request it AND they have permission
|
||||
if data.get("alter"):
|
||||
if not await self.ds.allowed(
|
||||
action="alter-table",
|
||||
resource=DatabaseResource(database=database_name),
|
||||
actor=request.actor,
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "alter-table", resource=database_name
|
||||
):
|
||||
return _error(["Permission denied: need alter-table"], 403)
|
||||
alter = True
|
||||
|
|
|
|||
|
|
@ -25,49 +25,28 @@ class IndexView(BaseView):
|
|||
|
||||
async def get(self, request):
|
||||
as_format = request.url_vars["format"]
|
||||
await self.ds.ensure_permission(action="view-instance", actor=request.actor)
|
||||
|
||||
# Get all allowed databases and tables in bulk
|
||||
db_page = await self.ds.allowed_resources(
|
||||
"view-database", request.actor, include_is_private=True
|
||||
)
|
||||
allowed_databases = [r async for r in db_page.all()]
|
||||
allowed_db_dict = {r.parent: r for r in allowed_databases}
|
||||
|
||||
# Group tables by database
|
||||
tables_by_db = {}
|
||||
table_page = await self.ds.allowed_resources(
|
||||
"view-table", request.actor, include_is_private=True
|
||||
)
|
||||
async for t in table_page.all():
|
||||
if t.parent not in tables_by_db:
|
||||
tables_by_db[t.parent] = {}
|
||||
tables_by_db[t.parent][t.child] = t
|
||||
|
||||
await self.ds.ensure_permissions(request.actor, ["view-instance"])
|
||||
databases = []
|
||||
# Iterate over allowed databases instead of all databases
|
||||
for name in allowed_db_dict.keys():
|
||||
db = self.ds.databases[name]
|
||||
database_private = allowed_db_dict[name].private
|
||||
|
||||
# Get allowed tables/views for this database
|
||||
allowed_for_db = tables_by_db.get(name, {})
|
||||
|
||||
# Get table names from allowed set instead of db.table_names()
|
||||
table_names = [child_name for child_name in allowed_for_db.keys()]
|
||||
|
||||
for name, db in self.ds.databases.items():
|
||||
database_visible, database_private = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
"view-database",
|
||||
name,
|
||||
)
|
||||
if not database_visible:
|
||||
continue
|
||||
table_names = await db.table_names()
|
||||
hidden_table_names = set(await db.hidden_table_names())
|
||||
|
||||
# Determine which allowed items are views
|
||||
view_names_set = set(await db.view_names())
|
||||
views = [
|
||||
{"name": child_name, "private": resource.private}
|
||||
for child_name, resource in allowed_for_db.items()
|
||||
if child_name in view_names_set
|
||||
]
|
||||
|
||||
# Filter to just tables (not views) for table processing
|
||||
table_names = [name for name in table_names if name not in view_names_set]
|
||||
views = []
|
||||
for view_name in await db.view_names():
|
||||
view_visible, view_private = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
"view-table",
|
||||
(name, view_name),
|
||||
)
|
||||
if view_visible:
|
||||
views.append({"name": view_name, "private": view_private})
|
||||
|
||||
# Perform counts only for immutable or DBS with <= COUNT_TABLE_LIMIT tables
|
||||
table_counts = {}
|
||||
|
|
@ -79,10 +58,13 @@ class IndexView(BaseView):
|
|||
|
||||
tables = {}
|
||||
for table in table_names:
|
||||
# Check if table is in allowed set
|
||||
if table not in allowed_for_db:
|
||||
visible, private = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
"view-table",
|
||||
(name, table),
|
||||
)
|
||||
if not visible:
|
||||
continue
|
||||
|
||||
table_columns = await db.table_columns(table)
|
||||
tables[table] = {
|
||||
"name": table,
|
||||
|
|
@ -92,7 +74,7 @@ class IndexView(BaseView):
|
|||
"hidden": table in hidden_table_names,
|
||||
"fts_table": await db.fts_table(table),
|
||||
"num_relationships_for_sorting": 0,
|
||||
"private": allowed_for_db[table].private,
|
||||
"private": private,
|
||||
}
|
||||
|
||||
if request.args.get("_sort") == "relationships" or not table_counts:
|
||||
|
|
@ -170,21 +152,19 @@ class IndexView(BaseView):
|
|||
extra_links = await await_me_maybe(hook)
|
||||
if extra_links:
|
||||
homepage_actions.extend(extra_links)
|
||||
alternative_homepage = request.path == "/-/"
|
||||
return await self.render(
|
||||
["default:index.html" if alternative_homepage else "index.html"],
|
||||
["index.html"],
|
||||
request=request,
|
||||
context={
|
||||
"databases": databases,
|
||||
"metadata": await self.ds.get_instance_metadata(),
|
||||
"datasette_version": __version__,
|
||||
"private": not await self.ds.allowed(
|
||||
action="view-instance", actor=None
|
||||
"private": not await self.ds.permission_allowed(
|
||||
None, "view-instance"
|
||||
),
|
||||
"top_homepage": make_slot_function(
|
||||
"top_homepage", self.ds, request
|
||||
),
|
||||
"homepage_actions": homepage_actions,
|
||||
"noindex": request.path == "/-/",
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
from datasette.utils.asgi import NotFound, Forbidden, Response
|
||||
from datasette.database import QueryInterrupted
|
||||
from datasette.events import UpdateRowEvent, DeleteRowEvent
|
||||
from datasette.resources import TableResource
|
||||
from .base import DataView, BaseView, _error
|
||||
from datasette.utils import (
|
||||
await_me_maybe,
|
||||
|
|
@ -28,8 +27,11 @@ class RowView(DataView):
|
|||
# Ensure user has permission to view this row
|
||||
visible, private = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
action="view-table",
|
||||
resource=TableResource(database=database, table=table),
|
||||
permissions=[
|
||||
("view-table", (database, table)),
|
||||
("view-database", database),
|
||||
"view-instance",
|
||||
],
|
||||
)
|
||||
if not visible:
|
||||
raise Forbidden("You do not have permission to view this table")
|
||||
|
|
@ -101,6 +103,7 @@ class RowView(DataView):
|
|||
"columns": columns,
|
||||
"primary_keys": resolved.pks,
|
||||
"primary_key_values": pk_values,
|
||||
"units": (await self.ds.table_config(database, table)).get("units", {}),
|
||||
}
|
||||
|
||||
if "foreign_key_tables" in (request.args.get("_extras") or "").split(","):
|
||||
|
|
@ -182,10 +185,8 @@ async def _resolve_row_and_check_permission(datasette, request, permission):
|
|||
return False, _error(["Record not found: {}".format(e.pk_values)], 404)
|
||||
|
||||
# Ensure user has permission to delete this row
|
||||
if not await datasette.allowed(
|
||||
action=permission,
|
||||
resource=TableResource(database=resolved.db.name, table=resolved.table),
|
||||
actor=request.actor,
|
||||
if not await datasette.permission_allowed(
|
||||
request.actor, permission, resource=(resolved.db.name, resolved.table)
|
||||
):
|
||||
return False, _error(["Permission denied"], 403)
|
||||
|
||||
|
|
@ -247,7 +248,7 @@ class RowUpdateView(BaseView):
|
|||
|
||||
if not isinstance(data, dict):
|
||||
return _error(["JSON must be a dictionary"])
|
||||
if "update" not in data or not isinstance(data["update"], dict):
|
||||
if not "update" in data or not isinstance(data["update"], dict):
|
||||
return _error(["JSON must contain an update dictionary"])
|
||||
|
||||
invalid_keys = set(data.keys()) - {"update", "return", "alter"}
|
||||
|
|
@ -257,10 +258,8 @@ class RowUpdateView(BaseView):
|
|||
update = data["update"]
|
||||
|
||||
alter = data.get("alter")
|
||||
if alter and not await self.ds.allowed(
|
||||
action="alter-table",
|
||||
resource=TableResource(database=resolved.db.name, table=resolved.table),
|
||||
actor=request.actor,
|
||||
if alter and not await self.ds.permission_allowed(
|
||||
request.actor, "alter-table", resource=(resolved.db.name, resolved.table)
|
||||
):
|
||||
return _error(["Permission denied for alter-table"], 403)
|
||||
|
||||
|
|
@ -279,7 +278,8 @@ class RowUpdateView(BaseView):
|
|||
results = await resolved.db.execute(
|
||||
resolved.sql, resolved.params, truncate=True
|
||||
)
|
||||
result["row"] = results.dicts()[0]
|
||||
rows = list(results.rows)
|
||||
result["row"] = dict(rows[0])
|
||||
|
||||
await self.ds.track_event(
|
||||
UpdateRowEvent(
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
import json
|
||||
import logging
|
||||
from datasette.events import LogoutEvent, LoginEvent, CreateTokenEvent
|
||||
from datasette.resources import DatabaseResource, TableResource
|
||||
from datasette.utils.asgi import Response, Forbidden
|
||||
from datasette.utils import (
|
||||
actor_matches_allow,
|
||||
|
|
@ -14,20 +12,8 @@ import secrets
|
|||
import urllib
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _resource_path(parent, child):
|
||||
if parent is None:
|
||||
return "/"
|
||||
if child is None:
|
||||
return f"/{parent}"
|
||||
return f"/{parent}/{child}"
|
||||
|
||||
|
||||
class JsonDataView(BaseView):
|
||||
name = "json_data"
|
||||
template = "show_json.html" # Can be overridden in subclasses
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
|
@ -36,50 +22,45 @@ class JsonDataView(BaseView):
|
|||
data_callback,
|
||||
needs_request=False,
|
||||
permission="view-instance",
|
||||
template=None,
|
||||
):
|
||||
self.ds = datasette
|
||||
self.filename = filename
|
||||
self.data_callback = data_callback
|
||||
self.needs_request = needs_request
|
||||
self.permission = permission
|
||||
if template is not None:
|
||||
self.template = template
|
||||
|
||||
async def get(self, request):
|
||||
as_format = request.url_vars["format"]
|
||||
if self.permission:
|
||||
await self.ds.ensure_permission(action=self.permission, actor=request.actor)
|
||||
await self.ds.ensure_permissions(request.actor, [self.permission])
|
||||
if self.needs_request:
|
||||
data = self.data_callback(request)
|
||||
else:
|
||||
data = self.data_callback()
|
||||
|
||||
# Return JSON or HTML depending on format parameter
|
||||
as_format = request.url_vars.get("format")
|
||||
if as_format:
|
||||
headers = {}
|
||||
if self.ds.cors:
|
||||
add_cors_headers(headers)
|
||||
return Response.json(data, headers=headers)
|
||||
return Response(
|
||||
json.dumps(data, default=repr),
|
||||
content_type="application/json; charset=utf-8",
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
else:
|
||||
context = {
|
||||
"filename": self.filename,
|
||||
"data": data,
|
||||
"data_json": json.dumps(data, indent=4, default=repr),
|
||||
}
|
||||
# Add has_debug_permission if this view requires permissions-debug
|
||||
if self.permission == "permissions-debug":
|
||||
context["has_debug_permission"] = True
|
||||
return await self.render(
|
||||
[self.template],
|
||||
["show_json.html"],
|
||||
request=request,
|
||||
context=context,
|
||||
context={
|
||||
"filename": self.filename,
|
||||
"data_json": json.dumps(data, indent=4, default=repr),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class PatternPortfolioView(View):
|
||||
async def get(self, request, datasette):
|
||||
await datasette.ensure_permission(action="view-instance", actor=request.actor)
|
||||
await datasette.ensure_permissions(request.actor, ["view-instance"])
|
||||
return Response.html(
|
||||
await datasette.render_template(
|
||||
"patterns.html",
|
||||
|
|
@ -104,7 +85,7 @@ class AuthTokenView(BaseView):
|
|||
self.ds._root_token = None
|
||||
response = Response.redirect(self.ds.urls.instance())
|
||||
root_actor = {"id": "root"}
|
||||
self.ds.set_actor_cookie(response, root_actor)
|
||||
response.set_cookie("ds_actor", self.ds.sign({"a": root_actor}, "actor"))
|
||||
await self.ds.track_event(LoginEvent(actor=root_actor))
|
||||
return response
|
||||
else:
|
||||
|
|
@ -126,7 +107,7 @@ class LogoutView(BaseView):
|
|||
|
||||
async def post(self, request):
|
||||
response = Response.redirect(self.ds.urls.instance())
|
||||
self.ds.delete_actor_cookie(response)
|
||||
response.set_cookie("ds_actor", "", expires=0, max_age=0)
|
||||
self.ds.add_message(request, "You are now logged out", self.ds.WARNING)
|
||||
await self.ds.track_event(LogoutEvent(actor=request.actor))
|
||||
return response
|
||||
|
|
@ -137,422 +118,59 @@ class PermissionsDebugView(BaseView):
|
|||
has_json_alternate = False
|
||||
|
||||
async def get(self, request):
|
||||
await self.ds.ensure_permission(action="view-instance", actor=request.actor)
|
||||
await self.ds.ensure_permission(action="permissions-debug", actor=request.actor)
|
||||
filter_ = request.args.get("filter") or "all"
|
||||
permission_checks = list(reversed(self.ds._permission_checks))
|
||||
if filter_ == "exclude-yours":
|
||||
permission_checks = [
|
||||
check
|
||||
for check in permission_checks
|
||||
if (check.actor or {}).get("id") != request.actor["id"]
|
||||
]
|
||||
elif filter_ == "only-yours":
|
||||
permission_checks = [
|
||||
check
|
||||
for check in permission_checks
|
||||
if (check.actor or {}).get("id") == request.actor["id"]
|
||||
]
|
||||
await self.ds.ensure_permissions(request.actor, ["view-instance"])
|
||||
if not await self.ds.permission_allowed(request.actor, "permissions-debug"):
|
||||
raise Forbidden("Permission denied")
|
||||
return await self.render(
|
||||
["debug_permissions_playground.html"],
|
||||
["permissions_debug.html"],
|
||||
request,
|
||||
# list() avoids error if check is performed during template render:
|
||||
{
|
||||
"permission_checks": permission_checks,
|
||||
"filter": filter_,
|
||||
"has_debug_permission": True,
|
||||
"permission_checks": list(reversed(self.ds._permission_checks)),
|
||||
"permissions": [
|
||||
{
|
||||
"name": p.name,
|
||||
"abbr": p.abbr,
|
||||
"description": p.description,
|
||||
"takes_parent": p.takes_parent,
|
||||
"takes_child": p.takes_child,
|
||||
"takes_database": p.takes_database,
|
||||
"takes_resource": p.takes_resource,
|
||||
"default": p.default,
|
||||
}
|
||||
for p in self.ds.actions.values()
|
||||
for p in self.ds.permissions.values()
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
async def post(self, request):
|
||||
await self.ds.ensure_permission(action="view-instance", actor=request.actor)
|
||||
await self.ds.ensure_permission(action="permissions-debug", actor=request.actor)
|
||||
await self.ds.ensure_permissions(request.actor, ["view-instance"])
|
||||
if not await self.ds.permission_allowed(request.actor, "permissions-debug"):
|
||||
raise Forbidden("Permission denied")
|
||||
vars = await request.post_vars()
|
||||
actor = json.loads(vars["actor"])
|
||||
permission = vars["permission"]
|
||||
parent = vars.get("resource_1") or None
|
||||
child = vars.get("resource_2") or None
|
||||
|
||||
response, status = await _check_permission_for_actor(
|
||||
self.ds, permission, parent, child, actor
|
||||
resource_1 = vars["resource_1"]
|
||||
resource_2 = vars["resource_2"]
|
||||
resource = []
|
||||
if resource_1:
|
||||
resource.append(resource_1)
|
||||
if resource_2:
|
||||
resource.append(resource_2)
|
||||
resource = tuple(resource)
|
||||
if len(resource) == 1:
|
||||
resource = resource[0]
|
||||
result = await self.ds.permission_allowed(
|
||||
actor, permission, resource, default="USE_DEFAULT"
|
||||
)
|
||||
return Response.json(response, status=status)
|
||||
|
||||
|
||||
class AllowedResourcesView(BaseView):
|
||||
name = "allowed"
|
||||
has_json_alternate = False
|
||||
|
||||
async def get(self, request):
|
||||
await self.ds.refresh_schemas()
|
||||
|
||||
# Check if user has permissions-debug (to show sensitive fields)
|
||||
has_debug_permission = await self.ds.allowed(
|
||||
action="permissions-debug", actor=request.actor
|
||||
return Response.json(
|
||||
{
|
||||
"actor": actor,
|
||||
"permission": permission,
|
||||
"resource": resource,
|
||||
"result": result,
|
||||
"default": self.ds.permissions[permission].default,
|
||||
}
|
||||
)
|
||||
|
||||
# Check if this is a request for JSON (has .json extension)
|
||||
as_format = request.url_vars.get("format")
|
||||
|
||||
if not as_format:
|
||||
# Render the HTML form (even if query parameters are present)
|
||||
# Put most common/interesting actions first
|
||||
priority_actions = [
|
||||
"view-instance",
|
||||
"view-database",
|
||||
"view-table",
|
||||
"view-query",
|
||||
"execute-sql",
|
||||
"insert-row",
|
||||
"update-row",
|
||||
"delete-row",
|
||||
]
|
||||
actions = list(self.ds.actions.keys())
|
||||
# Priority actions first (in order), then remaining alphabetically
|
||||
sorted_actions = [a for a in priority_actions if a in actions]
|
||||
sorted_actions.extend(
|
||||
sorted(a for a in actions if a not in priority_actions)
|
||||
)
|
||||
|
||||
return await self.render(
|
||||
["debug_allowed.html"],
|
||||
request,
|
||||
{
|
||||
"supported_actions": sorted_actions,
|
||||
"has_debug_permission": has_debug_permission,
|
||||
},
|
||||
)
|
||||
|
||||
payload, status = await self._allowed_payload(request, has_debug_permission)
|
||||
headers = {}
|
||||
if self.ds.cors:
|
||||
add_cors_headers(headers)
|
||||
return Response.json(payload, status=status, headers=headers)
|
||||
|
||||
async def _allowed_payload(self, request, has_debug_permission):
|
||||
action = request.args.get("action")
|
||||
if not action:
|
||||
return {"error": "action parameter is required"}, 400
|
||||
if action not in self.ds.actions:
|
||||
return {"error": f"Unknown action: {action}"}, 404
|
||||
|
||||
actor = request.actor if isinstance(request.actor, dict) else None
|
||||
actor_id = actor.get("id") if actor else None
|
||||
parent_filter = request.args.get("parent")
|
||||
child_filter = request.args.get("child")
|
||||
if child_filter and not parent_filter:
|
||||
return {"error": "parent must be provided when child is specified"}, 400
|
||||
|
||||
try:
|
||||
page = int(request.args.get("page", "1"))
|
||||
page_size = int(request.args.get("page_size", "50"))
|
||||
except ValueError:
|
||||
return {"error": "page and page_size must be integers"}, 400
|
||||
if page < 1:
|
||||
return {"error": "page must be >= 1"}, 400
|
||||
if page_size < 1:
|
||||
return {"error": "page_size must be >= 1"}, 400
|
||||
max_page_size = 200
|
||||
if page_size > max_page_size:
|
||||
page_size = max_page_size
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
# Use the simplified allowed_resources method
|
||||
# Collect all resources with optional reasons for debugging
|
||||
try:
|
||||
allowed_rows = []
|
||||
result = await self.ds.allowed_resources(
|
||||
action=action,
|
||||
actor=actor,
|
||||
parent=parent_filter,
|
||||
include_reasons=has_debug_permission,
|
||||
)
|
||||
async for resource in result.all():
|
||||
parent_val = resource.parent
|
||||
child_val = resource.child
|
||||
|
||||
# Build resource path
|
||||
if parent_val is None:
|
||||
resource_path = "/"
|
||||
elif child_val is None:
|
||||
resource_path = f"/{parent_val}"
|
||||
else:
|
||||
resource_path = f"/{parent_val}/{child_val}"
|
||||
|
||||
row = {
|
||||
"parent": parent_val,
|
||||
"child": child_val,
|
||||
"resource": resource_path,
|
||||
}
|
||||
|
||||
# Add reason if we have it (from include_reasons=True)
|
||||
if has_debug_permission and hasattr(resource, "reasons"):
|
||||
row["reason"] = resource.reasons
|
||||
|
||||
allowed_rows.append(row)
|
||||
except Exception:
|
||||
# If catalog tables don't exist yet, return empty results
|
||||
return (
|
||||
{
|
||||
"action": action,
|
||||
"actor_id": actor_id,
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"total": 0,
|
||||
"items": [],
|
||||
},
|
||||
200,
|
||||
)
|
||||
|
||||
# Apply child filter if specified
|
||||
if child_filter is not None:
|
||||
allowed_rows = [row for row in allowed_rows if row["child"] == child_filter]
|
||||
|
||||
# Pagination
|
||||
total = len(allowed_rows)
|
||||
paged_rows = allowed_rows[offset : offset + page_size]
|
||||
|
||||
# Items are already in the right format
|
||||
items = paged_rows
|
||||
|
||||
def build_page_url(page_number):
|
||||
pairs = []
|
||||
for key in request.args:
|
||||
if key in {"page", "page_size"}:
|
||||
continue
|
||||
for value in request.args.getlist(key):
|
||||
pairs.append((key, value))
|
||||
pairs.append(("page", str(page_number)))
|
||||
pairs.append(("page_size", str(page_size)))
|
||||
query = urllib.parse.urlencode(pairs)
|
||||
return f"{request.path}?{query}"
|
||||
|
||||
response = {
|
||||
"action": action,
|
||||
"actor_id": actor_id,
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"total": total,
|
||||
"items": items,
|
||||
}
|
||||
|
||||
if total > offset + page_size:
|
||||
response["next_url"] = build_page_url(page + 1)
|
||||
if page > 1:
|
||||
response["previous_url"] = build_page_url(page - 1)
|
||||
|
||||
return response, 200
|
||||
|
||||
|
||||
class PermissionRulesView(BaseView):
|
||||
name = "permission_rules"
|
||||
has_json_alternate = False
|
||||
|
||||
async def get(self, request):
|
||||
await self.ds.ensure_permission(action="view-instance", actor=request.actor)
|
||||
await self.ds.ensure_permission(action="permissions-debug", actor=request.actor)
|
||||
|
||||
# Check if this is a request for JSON (has .json extension)
|
||||
as_format = request.url_vars.get("format")
|
||||
|
||||
if not as_format:
|
||||
# Render the HTML form (even if query parameters are present)
|
||||
return await self.render(
|
||||
["debug_rules.html"],
|
||||
request,
|
||||
{
|
||||
"sorted_actions": sorted(self.ds.actions.keys()),
|
||||
"has_debug_permission": True,
|
||||
},
|
||||
)
|
||||
|
||||
# JSON API - action parameter is required
|
||||
action = request.args.get("action")
|
||||
if not action:
|
||||
return Response.json({"error": "action parameter is required"}, status=400)
|
||||
if action not in self.ds.actions:
|
||||
return Response.json({"error": f"Unknown action: {action}"}, status=404)
|
||||
|
||||
actor = request.actor if isinstance(request.actor, dict) else None
|
||||
|
||||
try:
|
||||
page = int(request.args.get("page", "1"))
|
||||
page_size = int(request.args.get("page_size", "50"))
|
||||
except ValueError:
|
||||
return Response.json(
|
||||
{"error": "page and page_size must be integers"}, status=400
|
||||
)
|
||||
if page < 1:
|
||||
return Response.json({"error": "page must be >= 1"}, status=400)
|
||||
if page_size < 1:
|
||||
return Response.json({"error": "page_size must be >= 1"}, status=400)
|
||||
max_page_size = 200
|
||||
if page_size > max_page_size:
|
||||
page_size = max_page_size
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
from datasette.utils.actions_sql import build_permission_rules_sql
|
||||
|
||||
union_sql, union_params, restriction_sqls = await build_permission_rules_sql(
|
||||
self.ds, actor, action
|
||||
)
|
||||
await self.ds.refresh_schemas()
|
||||
db = self.ds.get_internal_database()
|
||||
|
||||
count_query = f"""
|
||||
WITH rules AS (
|
||||
{union_sql}
|
||||
)
|
||||
SELECT COUNT(*) AS count
|
||||
FROM rules
|
||||
"""
|
||||
count_row = (await db.execute(count_query, union_params)).first()
|
||||
total = count_row["count"] if count_row else 0
|
||||
|
||||
data_query = f"""
|
||||
WITH rules AS (
|
||||
{union_sql}
|
||||
)
|
||||
SELECT parent, child, allow, reason, source_plugin
|
||||
FROM rules
|
||||
ORDER BY allow DESC, (parent IS NOT NULL), parent, child
|
||||
LIMIT :limit OFFSET :offset
|
||||
"""
|
||||
params = {**union_params, "limit": page_size, "offset": offset}
|
||||
rows = await db.execute(data_query, params)
|
||||
|
||||
items = []
|
||||
for row in rows:
|
||||
parent = row["parent"]
|
||||
child = row["child"]
|
||||
items.append(
|
||||
{
|
||||
"parent": parent,
|
||||
"child": child,
|
||||
"resource": _resource_path(parent, child),
|
||||
"allow": row["allow"],
|
||||
"reason": row["reason"],
|
||||
"source_plugin": row["source_plugin"],
|
||||
}
|
||||
)
|
||||
|
||||
def build_page_url(page_number):
|
||||
pairs = []
|
||||
for key in request.args:
|
||||
if key in {"page", "page_size"}:
|
||||
continue
|
||||
for value in request.args.getlist(key):
|
||||
pairs.append((key, value))
|
||||
pairs.append(("page", str(page_number)))
|
||||
pairs.append(("page_size", str(page_size)))
|
||||
query = urllib.parse.urlencode(pairs)
|
||||
return f"{request.path}?{query}"
|
||||
|
||||
response = {
|
||||
"action": action,
|
||||
"actor_id": (actor or {}).get("id") if actor else None,
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"total": total,
|
||||
"items": items,
|
||||
}
|
||||
|
||||
if total > offset + page_size:
|
||||
response["next_url"] = build_page_url(page + 1)
|
||||
if page > 1:
|
||||
response["previous_url"] = build_page_url(page - 1)
|
||||
|
||||
headers = {}
|
||||
if self.ds.cors:
|
||||
add_cors_headers(headers)
|
||||
return Response.json(response, headers=headers)
|
||||
|
||||
|
||||
async def _check_permission_for_actor(ds, action, parent, child, actor):
|
||||
"""Shared logic for checking permissions. Returns a dict with check results."""
|
||||
if action not in ds.actions:
|
||||
return {"error": f"Unknown action: {action}"}, 404
|
||||
|
||||
if child and not parent:
|
||||
return {"error": "parent is required when child is provided"}, 400
|
||||
|
||||
# Use the action's properties to create the appropriate resource object
|
||||
action_obj = ds.actions.get(action)
|
||||
if not action_obj:
|
||||
return {"error": f"Unknown action: {action}"}, 400
|
||||
|
||||
# Global actions (no resource_class) don't have a resource
|
||||
if action_obj.resource_class is None:
|
||||
resource_obj = None
|
||||
elif action_obj.takes_parent and action_obj.takes_child:
|
||||
# Child-level resource (e.g., TableResource, QueryResource)
|
||||
resource_obj = action_obj.resource_class(database=parent, table=child)
|
||||
elif action_obj.takes_parent:
|
||||
# Parent-level resource (e.g., DatabaseResource)
|
||||
resource_obj = action_obj.resource_class(database=parent)
|
||||
else:
|
||||
# This shouldn't happen given validation in Action.__post_init__
|
||||
return {"error": f"Invalid action configuration: {action}"}, 500
|
||||
|
||||
allowed = await ds.allowed(action=action, resource=resource_obj, actor=actor)
|
||||
|
||||
response = {
|
||||
"action": action,
|
||||
"allowed": bool(allowed),
|
||||
"resource": {
|
||||
"parent": parent,
|
||||
"child": child,
|
||||
"path": _resource_path(parent, child),
|
||||
},
|
||||
}
|
||||
|
||||
if actor and "id" in actor:
|
||||
response["actor_id"] = actor["id"]
|
||||
|
||||
return response, 200
|
||||
|
||||
|
||||
class PermissionCheckView(BaseView):
|
||||
name = "permission_check"
|
||||
has_json_alternate = False
|
||||
|
||||
async def get(self, request):
|
||||
await self.ds.ensure_permission(action="permissions-debug", actor=request.actor)
|
||||
as_format = request.url_vars.get("format")
|
||||
|
||||
if not as_format:
|
||||
return await self.render(
|
||||
["debug_check.html"],
|
||||
request,
|
||||
{
|
||||
"sorted_actions": sorted(self.ds.actions.keys()),
|
||||
"has_debug_permission": True,
|
||||
},
|
||||
)
|
||||
|
||||
# JSON API - action parameter is required
|
||||
action = request.args.get("action")
|
||||
if not action:
|
||||
return Response.json({"error": "action parameter is required"}, status=400)
|
||||
|
||||
parent = request.args.get("parent")
|
||||
child = request.args.get("child")
|
||||
|
||||
response, status = await _check_permission_for_actor(
|
||||
self.ds, action, parent, child, request.actor
|
||||
)
|
||||
return Response.json(response, status=status)
|
||||
|
||||
|
||||
class AllowDebugView(BaseView):
|
||||
name = "allow_debug"
|
||||
|
|
@ -585,9 +203,6 @@ class AllowDebugView(BaseView):
|
|||
"error": "\n\n".join(errors) if errors else "",
|
||||
"actor_input": actor_input,
|
||||
"allow_input": allow_input,
|
||||
"has_debug_permission": await self.ds.allowed(
|
||||
action="permissions-debug", actor=request.actor
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -597,11 +212,11 @@ class MessagesDebugView(BaseView):
|
|||
has_json_alternate = False
|
||||
|
||||
async def get(self, request):
|
||||
await self.ds.ensure_permission(action="view-instance", actor=request.actor)
|
||||
await self.ds.ensure_permissions(request.actor, ["view-instance"])
|
||||
return await self.render(["messages_debug.html"], request)
|
||||
|
||||
async def post(self, request):
|
||||
await self.ds.ensure_permission(action="view-instance", actor=request.actor)
|
||||
await self.ds.ensure_permissions(request.actor, ["view-instance"])
|
||||
post = await request.post_vars()
|
||||
message = post.get("message", "")
|
||||
message_type = post.get("message_type") or "INFO"
|
||||
|
|
@ -637,45 +252,45 @@ class CreateTokenView(BaseView):
|
|||
async def shared(self, request):
|
||||
self.check_permission(request)
|
||||
# Build list of databases and tables the user has permission to view
|
||||
db_page = await self.ds.allowed_resources("view-database", request.actor)
|
||||
allowed_databases = [r async for r in db_page.all()]
|
||||
|
||||
table_page = await self.ds.allowed_resources("view-table", request.actor)
|
||||
allowed_tables = [r async for r in table_page.all()]
|
||||
|
||||
# Build database -> tables mapping
|
||||
database_with_tables = []
|
||||
for db_resource in allowed_databases:
|
||||
database_name = db_resource.parent
|
||||
if database_name == "_memory":
|
||||
for database in self.ds.databases.values():
|
||||
if database.name == "_memory":
|
||||
continue
|
||||
|
||||
# Find tables for this database
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "view-database", database.name
|
||||
):
|
||||
continue
|
||||
hidden_tables = await database.hidden_table_names()
|
||||
tables = []
|
||||
for table_resource in allowed_tables:
|
||||
if table_resource.parent == database_name:
|
||||
tables.append(
|
||||
{
|
||||
"name": table_resource.child,
|
||||
"encoded": tilde_encode(table_resource.child),
|
||||
}
|
||||
)
|
||||
|
||||
for table in await database.table_names():
|
||||
if table in hidden_tables:
|
||||
continue
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor,
|
||||
"view-table",
|
||||
resource=(database.name, table),
|
||||
):
|
||||
continue
|
||||
tables.append({"name": table, "encoded": tilde_encode(table)})
|
||||
database_with_tables.append(
|
||||
{
|
||||
"name": database_name,
|
||||
"encoded": tilde_encode(database_name),
|
||||
"name": database.name,
|
||||
"encoded": tilde_encode(database.name),
|
||||
"tables": tables,
|
||||
}
|
||||
)
|
||||
return {
|
||||
"actor": request.actor,
|
||||
"all_actions": self.ds.actions.keys(),
|
||||
"database_actions": [
|
||||
key for key, value in self.ds.actions.items() if value.takes_parent
|
||||
"all_permissions": self.ds.permissions.keys(),
|
||||
"database_permissions": [
|
||||
key
|
||||
for key, value in self.ds.permissions.items()
|
||||
if value.takes_database
|
||||
],
|
||||
"child_actions": [
|
||||
key for key, value in self.ds.actions.items() if value.takes_child
|
||||
"resource_permissions": [
|
||||
key
|
||||
for key, value in self.ds.permissions.items()
|
||||
if value.takes_resource
|
||||
],
|
||||
"database_with_tables": database_with_tables,
|
||||
}
|
||||
|
|
@ -761,10 +376,10 @@ class ApiExplorerView(BaseView):
|
|||
async def example_links(self, request):
|
||||
databases = []
|
||||
for name, db in self.ds.databases.items():
|
||||
if name == "_internal":
|
||||
continue
|
||||
database_visible, _ = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
action="view-database",
|
||||
resource=DatabaseResource(database=name),
|
||||
request.actor, permissions=[("view-database", name), "view-instance"]
|
||||
)
|
||||
if not database_visible:
|
||||
continue
|
||||
|
|
@ -773,8 +388,11 @@ class ApiExplorerView(BaseView):
|
|||
for table in table_names:
|
||||
visible, _ = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
action="view-table",
|
||||
resource=TableResource(database=name, table=table),
|
||||
permissions=[
|
||||
("view-table", (name, table)),
|
||||
("view-database", name),
|
||||
"view-instance",
|
||||
],
|
||||
)
|
||||
if not visible:
|
||||
continue
|
||||
|
|
@ -791,10 +409,8 @@ class ApiExplorerView(BaseView):
|
|||
if not db.is_mutable:
|
||||
continue
|
||||
|
||||
if await self.ds.allowed(
|
||||
action="insert-row",
|
||||
resource=TableResource(database=name, table=table),
|
||||
actor=request.actor,
|
||||
if await self.ds.permission_allowed(
|
||||
request.actor, "insert-row", (name, table)
|
||||
):
|
||||
pks = await db.primary_keys(table)
|
||||
table_links.extend(
|
||||
|
|
@ -829,10 +445,8 @@ class ApiExplorerView(BaseView):
|
|||
},
|
||||
]
|
||||
)
|
||||
if await self.ds.allowed(
|
||||
action="drop-table",
|
||||
resource=TableResource(database=name, table=table),
|
||||
actor=request.actor,
|
||||
if await self.ds.permission_allowed(
|
||||
request.actor, "drop-table", (name, table)
|
||||
):
|
||||
table_links.append(
|
||||
{
|
||||
|
|
@ -844,11 +458,7 @@ class ApiExplorerView(BaseView):
|
|||
)
|
||||
database_links = []
|
||||
if (
|
||||
await self.ds.allowed(
|
||||
action="create-table",
|
||||
resource=DatabaseResource(database=name),
|
||||
actor=request.actor,
|
||||
)
|
||||
await self.ds.permission_allowed(request.actor, "create-table", name)
|
||||
and db.is_mutable
|
||||
):
|
||||
database_links.append(
|
||||
|
|
@ -881,7 +491,7 @@ class ApiExplorerView(BaseView):
|
|||
async def get(self, request):
|
||||
visible, private = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
action="view-instance",
|
||||
permissions=["view-instance"],
|
||||
)
|
||||
if not visible:
|
||||
raise Forbidden("You do not have permission to view this instance")
|
||||
|
|
@ -906,253 +516,3 @@ class ApiExplorerView(BaseView):
|
|||
"private": private,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class TablesView(BaseView):
|
||||
"""
|
||||
Simple endpoint that uses the new allowed_resources() API.
|
||||
Returns JSON list of all tables the actor can view.
|
||||
|
||||
Supports ?q=foo+bar to filter tables matching .*foo.*bar.* pattern,
|
||||
ordered by shortest name first.
|
||||
"""
|
||||
|
||||
name = "tables"
|
||||
has_json_alternate = False
|
||||
|
||||
async def get(self, request):
|
||||
# Get search query parameter
|
||||
q = request.args.get("q", "").strip()
|
||||
|
||||
# Get SQL for allowed resources using the permission system
|
||||
permission_sql, params = await self.ds.allowed_resources_sql(
|
||||
action="view-table", actor=request.actor
|
||||
)
|
||||
|
||||
# Build query based on whether we have a search query
|
||||
if q:
|
||||
# Build SQL LIKE pattern from search terms
|
||||
# Split search terms by whitespace and build pattern: %term1%term2%term3%
|
||||
terms = q.split()
|
||||
pattern = "%" + "%".join(terms) + "%"
|
||||
|
||||
# Build query with CTE to filter by search pattern
|
||||
sql = f"""
|
||||
WITH allowed_tables AS (
|
||||
{permission_sql}
|
||||
)
|
||||
SELECT parent, child
|
||||
FROM allowed_tables
|
||||
WHERE child LIKE :pattern COLLATE NOCASE
|
||||
ORDER BY length(child), child
|
||||
"""
|
||||
all_params = {**params, "pattern": pattern}
|
||||
else:
|
||||
# No search query - return all tables, ordered by name
|
||||
# Fetch 101 to detect if we need to truncate
|
||||
sql = f"""
|
||||
WITH allowed_tables AS (
|
||||
{permission_sql}
|
||||
)
|
||||
SELECT parent, child
|
||||
FROM allowed_tables
|
||||
ORDER BY parent, child
|
||||
LIMIT 101
|
||||
"""
|
||||
all_params = params
|
||||
|
||||
# Execute against internal database
|
||||
result = await self.ds.get_internal_database().execute(sql, all_params)
|
||||
|
||||
# Build response with truncation
|
||||
rows = list(result.rows)
|
||||
truncated = len(rows) > 100
|
||||
if truncated:
|
||||
rows = rows[:100]
|
||||
|
||||
matches = [
|
||||
{
|
||||
"name": f"{row['parent']}: {row['child']}",
|
||||
"url": self.ds.urls.table(row["parent"], row["child"]),
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
|
||||
return Response.json({"matches": matches, "truncated": truncated})
|
||||
|
||||
|
||||
class SchemaBaseView(BaseView):
|
||||
"""Base class for schema views with common response formatting."""
|
||||
|
||||
has_json_alternate = False
|
||||
|
||||
async def get_database_schema(self, database_name):
|
||||
"""Get schema SQL for a database."""
|
||||
db = self.ds.databases[database_name]
|
||||
result = await db.execute(
|
||||
"select group_concat(sql, ';' || CHAR(10)) as schema from sqlite_master where sql is not null"
|
||||
)
|
||||
row = result.first()
|
||||
return row["schema"] if row and row["schema"] else ""
|
||||
|
||||
def format_json_response(self, data):
|
||||
"""Format data as JSON response with CORS headers if needed."""
|
||||
headers = {}
|
||||
if self.ds.cors:
|
||||
add_cors_headers(headers)
|
||||
return Response.json(data, headers=headers)
|
||||
|
||||
def format_error_response(self, error_message, format_, status=404):
|
||||
"""Format error response based on requested format."""
|
||||
if format_ == "json":
|
||||
headers = {}
|
||||
if self.ds.cors:
|
||||
add_cors_headers(headers)
|
||||
return Response.json(
|
||||
{"ok": False, "error": error_message}, status=status, headers=headers
|
||||
)
|
||||
else:
|
||||
return Response.text(error_message, status=status)
|
||||
|
||||
def format_markdown_response(self, heading, schema):
|
||||
"""Format schema as Markdown response."""
|
||||
md_output = f"# {heading}\n\n```sql\n{schema}\n```\n"
|
||||
return Response.text(
|
||||
md_output, headers={"content-type": "text/markdown; charset=utf-8"}
|
||||
)
|
||||
|
||||
async def format_html_response(
|
||||
self, request, schemas, is_instance=False, table_name=None
|
||||
):
|
||||
"""Format schema as HTML response."""
|
||||
context = {
|
||||
"schemas": schemas,
|
||||
"is_instance": is_instance,
|
||||
}
|
||||
if table_name:
|
||||
context["table_name"] = table_name
|
||||
return await self.render(["schema.html"], request=request, context=context)
|
||||
|
||||
|
||||
class InstanceSchemaView(SchemaBaseView):
|
||||
"""
|
||||
Displays schema for all databases in the instance.
|
||||
Supports HTML, JSON, and Markdown formats.
|
||||
"""
|
||||
|
||||
name = "instance_schema"
|
||||
|
||||
async def get(self, request):
|
||||
format_ = request.url_vars.get("format") or "html"
|
||||
|
||||
# Get all databases the actor can view
|
||||
allowed_databases_page = await self.ds.allowed_resources(
|
||||
"view-database",
|
||||
request.actor,
|
||||
)
|
||||
allowed_databases = [r.parent async for r in allowed_databases_page.all()]
|
||||
|
||||
# Get schema for each database
|
||||
schemas = []
|
||||
for database_name in allowed_databases:
|
||||
schema = await self.get_database_schema(database_name)
|
||||
schemas.append({"database": database_name, "schema": schema})
|
||||
|
||||
if format_ == "json":
|
||||
return self.format_json_response({"schemas": schemas})
|
||||
elif format_ == "md":
|
||||
md_parts = [
|
||||
f"# Schema for {item['database']}\n\n```sql\n{item['schema']}\n```"
|
||||
for item in schemas
|
||||
]
|
||||
return Response.text(
|
||||
"\n\n".join(md_parts),
|
||||
headers={"content-type": "text/markdown; charset=utf-8"},
|
||||
)
|
||||
else:
|
||||
return await self.format_html_response(request, schemas, is_instance=True)
|
||||
|
||||
|
||||
class DatabaseSchemaView(SchemaBaseView):
|
||||
"""
|
||||
Displays schema for a specific database.
|
||||
Supports HTML, JSON, and Markdown formats.
|
||||
"""
|
||||
|
||||
name = "database_schema"
|
||||
|
||||
async def get(self, request):
|
||||
database_name = request.url_vars["database"]
|
||||
format_ = request.url_vars.get("format") or "html"
|
||||
|
||||
# Check if database exists
|
||||
if database_name not in self.ds.databases:
|
||||
return self.format_error_response("Database not found", format_)
|
||||
|
||||
# Check view-database permission
|
||||
await self.ds.ensure_permission(
|
||||
action="view-database",
|
||||
resource=DatabaseResource(database=database_name),
|
||||
actor=request.actor,
|
||||
)
|
||||
|
||||
schema = await self.get_database_schema(database_name)
|
||||
|
||||
if format_ == "json":
|
||||
return self.format_json_response(
|
||||
{"database": database_name, "schema": schema}
|
||||
)
|
||||
elif format_ == "md":
|
||||
return self.format_markdown_response(f"Schema for {database_name}", schema)
|
||||
else:
|
||||
schemas = [{"database": database_name, "schema": schema}]
|
||||
return await self.format_html_response(request, schemas)
|
||||
|
||||
|
||||
class TableSchemaView(SchemaBaseView):
|
||||
"""
|
||||
Displays schema for a specific table.
|
||||
Supports HTML, JSON, and Markdown formats.
|
||||
"""
|
||||
|
||||
name = "table_schema"
|
||||
|
||||
async def get(self, request):
|
||||
database_name = request.url_vars["database"]
|
||||
table_name = request.url_vars["table"]
|
||||
format_ = request.url_vars.get("format") or "html"
|
||||
|
||||
# Check view-table permission
|
||||
await self.ds.ensure_permission(
|
||||
action="view-table",
|
||||
resource=TableResource(database=database_name, table=table_name),
|
||||
actor=request.actor,
|
||||
)
|
||||
|
||||
# Get schema for the table
|
||||
db = self.ds.databases[database_name]
|
||||
result = await db.execute(
|
||||
"select sql from sqlite_master where name = ? and sql is not null",
|
||||
[table_name],
|
||||
)
|
||||
row = result.first()
|
||||
|
||||
# Return 404 if table doesn't exist
|
||||
if not row or not row["sql"]:
|
||||
return self.format_error_response("Table not found", format_)
|
||||
|
||||
schema = row["sql"]
|
||||
|
||||
if format_ == "json":
|
||||
return self.format_json_response(
|
||||
{"database": database_name, "table": table_name, "schema": schema}
|
||||
)
|
||||
elif format_ == "md":
|
||||
return self.format_markdown_response(
|
||||
f"Schema for {database_name}.{table_name}", schema
|
||||
)
|
||||
else:
|
||||
schemas = [{"database": database_name, "schema": schema}]
|
||||
return await self.format_html_response(
|
||||
request, schemas, table_name=table_name
|
||||
)
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ from datasette.events import (
|
|||
UpsertRowsEvent,
|
||||
)
|
||||
from datasette import tracer
|
||||
from datasette.resources import DatabaseResource, TableResource
|
||||
from datasette.utils import (
|
||||
add_cors_headers,
|
||||
await_me_maybe,
|
||||
|
|
@ -44,7 +43,7 @@ from datasette.utils import (
|
|||
from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response
|
||||
from datasette.filters import Filters
|
||||
import sqlite_utils
|
||||
from .base import BaseView, DatasetteError, _error, stream_csv
|
||||
from .base import BaseView, DatasetteError, ureg, _error, stream_csv
|
||||
from .database import QueryView
|
||||
|
||||
LINK_WITH_LABEL = (
|
||||
|
|
@ -166,6 +165,7 @@ async def display_columns_and_rows(
|
|||
column_details = {
|
||||
col.name: col for col in await db.table_column_details(table_name)
|
||||
}
|
||||
table_config = await datasette.table_config(database_name, table_name)
|
||||
pks = await db.primary_keys(table_name)
|
||||
pks_for_display = pks
|
||||
if not pks_for_display:
|
||||
|
|
@ -273,7 +273,7 @@ async def display_columns_and_rows(
|
|||
link_template = LINK_WITH_LABEL if (label != value) else LINK_WITH_VALUE
|
||||
display_value = markupsafe.Markup(
|
||||
link_template.format(
|
||||
database=tilde_encode(database_name),
|
||||
database=database_name,
|
||||
base_url=base_url,
|
||||
table=tilde_encode(other_table),
|
||||
link_id=tilde_encode(str(value)),
|
||||
|
|
@ -292,6 +292,14 @@ async def display_columns_and_rows(
|
|||
),
|
||||
)
|
||||
)
|
||||
elif column in table_config.get("units", {}) and value != "":
|
||||
# Interpret units using pint
|
||||
value = value * ureg(table_config["units"][column])
|
||||
# Pint uses floating point which sometimes introduces errors in the compact
|
||||
# representation, which we have to round off to avoid ugliness. In the vast
|
||||
# majority of cases this rounding will be inconsequential. I hope.
|
||||
value = round(value.to_compact(), 6)
|
||||
display_value = markupsafe.Markup(f"{value:~P}".replace(" ", " "))
|
||||
else:
|
||||
display_value = str(value)
|
||||
if truncate_cells and len(display_value) > truncate_cells:
|
||||
|
|
@ -356,7 +364,7 @@ class TableInsertView(BaseView):
|
|||
def _errors(errors):
|
||||
return None, errors, {}
|
||||
|
||||
if not request.headers.get("content-type").startswith("application/json"):
|
||||
if request.headers.get("content-type") != "application/json":
|
||||
# TODO: handle form-encoded data
|
||||
return _errors(["Invalid content-type, must be application/json"])
|
||||
body = await request.post_body()
|
||||
|
|
@ -449,15 +457,11 @@ class TableInsertView(BaseView):
|
|||
if upsert:
|
||||
# Must have insert-row AND upsert-row permissions
|
||||
if not (
|
||||
await self.ds.allowed(
|
||||
action="insert-row",
|
||||
resource=TableResource(database=database_name, table=table_name),
|
||||
actor=request.actor,
|
||||
await self.ds.permission_allowed(
|
||||
request.actor, "insert-row", resource=(database_name, table_name)
|
||||
)
|
||||
and await self.ds.allowed(
|
||||
action="update-row",
|
||||
resource=TableResource(database=database_name, table=table_name),
|
||||
actor=request.actor,
|
||||
and await self.ds.permission_allowed(
|
||||
request.actor, "update-row", resource=(database_name, table_name)
|
||||
)
|
||||
):
|
||||
return _error(
|
||||
|
|
@ -465,10 +469,8 @@ class TableInsertView(BaseView):
|
|||
)
|
||||
else:
|
||||
# Must have insert-row permission
|
||||
if not await self.ds.allowed(
|
||||
action="insert-row",
|
||||
resource=TableResource(database=database_name, table=table_name),
|
||||
actor=request.actor,
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "insert-row", resource=(database_name, table_name)
|
||||
):
|
||||
return _error(["Permission denied"], 403)
|
||||
|
||||
|
|
@ -497,20 +499,16 @@ class TableInsertView(BaseView):
|
|||
if upsert and (ignore or replace):
|
||||
return _error(["Upsert does not support ignore or replace"], 400)
|
||||
|
||||
if replace and not await self.ds.allowed(
|
||||
action="update-row",
|
||||
resource=TableResource(database=database_name, table=table_name),
|
||||
actor=request.actor,
|
||||
if replace and not await self.ds.permission_allowed(
|
||||
request.actor, "update-row", resource=(database_name, table_name)
|
||||
):
|
||||
return _error(['Permission denied: need update-row to use "replace"'], 403)
|
||||
|
||||
initial_schema = None
|
||||
if alter:
|
||||
# Must have alter-table permission
|
||||
if not await self.ds.allowed(
|
||||
action="alter-table",
|
||||
resource=TableResource(database=database_name, table=table_name),
|
||||
actor=request.actor,
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "alter-table", resource=(database_name, table_name)
|
||||
):
|
||||
return _error(["Permission denied for alter-table"], 403)
|
||||
# Track initial schema to check if it changed later
|
||||
|
|
@ -568,7 +566,7 @@ class TableInsertView(BaseView):
|
|||
),
|
||||
args,
|
||||
)
|
||||
result["rows"] = fetched_rows.dicts()
|
||||
result["rows"] = [dict(r) for r in fetched_rows.rows]
|
||||
else:
|
||||
result["rows"] = rows
|
||||
# We track the number of rows requested, but do not attempt to show which were actually
|
||||
|
|
@ -637,10 +635,8 @@ class TableDropView(BaseView):
|
|||
db = self.ds.get_database(database_name)
|
||||
if not await db.table_exists(table_name):
|
||||
return _error(["Table not found: {}".format(table_name)], 404)
|
||||
if not await self.ds.allowed(
|
||||
action="drop-table",
|
||||
resource=TableResource(database=database_name, table=table_name),
|
||||
actor=request.actor,
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "drop-table", resource=(database_name, table_name)
|
||||
):
|
||||
return _error(["Permission denied"], 403)
|
||||
if not db.is_mutable:
|
||||
|
|
@ -906,7 +902,7 @@ async def table_view_traced(datasette, request):
|
|||
)
|
||||
headers.update(
|
||||
{
|
||||
"Link": '<{}>; rel="alternate"; type="application/json+datasette"'.format(
|
||||
"Link": '{}; rel="alternate"; type="application/json+datasette"'.format(
|
||||
alternate_url_json
|
||||
)
|
||||
}
|
||||
|
|
@ -926,10 +922,8 @@ async def table_view_traced(datasette, request):
|
|||
"true" if datasette.setting("allow_facet") else "false"
|
||||
),
|
||||
is_sortable=any(c["sortable"] for c in data["display_columns"]),
|
||||
allow_execute_sql=await datasette.allowed(
|
||||
action="execute-sql",
|
||||
resource=DatabaseResource(database=resolved.db.name),
|
||||
actor=request.actor,
|
||||
allow_execute_sql=await datasette.permission_allowed(
|
||||
request.actor, "execute-sql", resolved.db.name
|
||||
),
|
||||
query_ms=1.2,
|
||||
select_templates=[
|
||||
|
|
@ -943,7 +937,6 @@ async def table_view_traced(datasette, request):
|
|||
database=resolved.db.name,
|
||||
table=resolved.table,
|
||||
),
|
||||
count_limit=resolved.db.count_limit,
|
||||
),
|
||||
request=request,
|
||||
view_name="table",
|
||||
|
|
@ -976,8 +969,11 @@ async def table_view_data(
|
|||
# Can this user view it?
|
||||
visible, private = await datasette.check_visibility(
|
||||
request.actor,
|
||||
action="view-table",
|
||||
resource=TableResource(database=database_name, table=table_name),
|
||||
permissions=[
|
||||
("view-table", (database_name, table_name)),
|
||||
("view-database", database_name),
|
||||
"view-instance",
|
||||
],
|
||||
)
|
||||
if not visible:
|
||||
raise Forbidden("You do not have permission to view this table")
|
||||
|
|
@ -1021,6 +1017,7 @@ async def table_view_data(
|
|||
nofacet = True
|
||||
|
||||
table_metadata = await datasette.table_config(database_name, table_name)
|
||||
units = table_metadata.get("units", {})
|
||||
|
||||
# Arguments that start with _ and don't contain a __ are
|
||||
# special - things like ?_search= - and should not be
|
||||
|
|
@ -1032,7 +1029,7 @@ async def table_view_data(
|
|||
filter_args.append((key, v))
|
||||
|
||||
# Build where clauses from query string arguments
|
||||
filters = Filters(sorted(filter_args))
|
||||
filters = Filters(sorted(filter_args), units, ureg)
|
||||
where_clauses, params = filters.build_where_clauses(table_name)
|
||||
|
||||
# Execute filters_from_request plugin hooks - including the default
|
||||
|
|
@ -1292,9 +1289,6 @@ async def table_view_data(
|
|||
if extra_extras:
|
||||
extras.update(extra_extras)
|
||||
|
||||
async def extra_count_sql():
|
||||
return count_sql
|
||||
|
||||
async def extra_count():
|
||||
"Total count of rows matching these filters"
|
||||
# Calculate the total count for this query
|
||||
|
|
@ -1314,11 +1308,8 @@ async def table_view_data(
|
|||
|
||||
# Otherwise run a select count(*) ...
|
||||
if count_sql and count is None and not nocount:
|
||||
count_sql_limited = (
|
||||
f"select count(*) from (select * {from_sql} limit 10001)"
|
||||
)
|
||||
try:
|
||||
count_rows = list(await db.execute(count_sql_limited, from_sql_params))
|
||||
count_rows = list(await db.execute(count_sql, from_sql_params))
|
||||
count = count_rows[0][0]
|
||||
except QueryInterrupted:
|
||||
pass
|
||||
|
|
@ -1633,7 +1624,6 @@ async def table_view_data(
|
|||
"facet_results",
|
||||
"facets_timed_out",
|
||||
"count",
|
||||
"count_sql",
|
||||
"human_description_en",
|
||||
"next_url",
|
||||
"metadata",
|
||||
|
|
@ -1666,7 +1656,6 @@ async def table_view_data(
|
|||
|
||||
registry = Registry(
|
||||
extra_count,
|
||||
extra_count_sql,
|
||||
extra_facet_results,
|
||||
extra_facets_timed_out,
|
||||
extra_suggested_facets,
|
||||
|
|
|
|||
|
|
@ -6,18 +6,18 @@
|
|||
|
||||
Datasette doesn't require authentication by default. Any visitor to a Datasette instance can explore the full data and execute read-only SQL queries.
|
||||
|
||||
Datasette can be configured to only allow authenticated users, or to control which databases, tables, and queries can be accessed by the public or by specific users. Datasette's plugin system can be used to add many different styles of authentication, such as user accounts, single sign-on or API keys.
|
||||
Datasette's plugin system can be used to add many different styles of authentication, such as user accounts, single sign-on or API keys.
|
||||
|
||||
.. _authentication_actor:
|
||||
|
||||
Actors
|
||||
======
|
||||
|
||||
Through plugins, Datasette can support both authenticated users (with cookies) and authenticated API clients (via authentication tokens). The word "actor" is used to cover both of these cases.
|
||||
Through plugins, Datasette can support both authenticated users (with cookies) and authenticated API agents (via authentication tokens). The word "actor" is used to cover both of these cases.
|
||||
|
||||
Every request to Datasette has an associated actor value, available in the code as ``request.actor``. This can be ``None`` for unauthenticated requests, or a JSON compatible Python dictionary for authenticated users or API clients.
|
||||
Every request to Datasette has an associated actor value, available in the code as ``request.actor``. This can be ``None`` for unauthenticated requests, or a JSON compatible Python dictionary for authenticated users or API agents.
|
||||
|
||||
The actor dictionary can be any shape - the design of that data structure is left up to the plugins. Actors should always include a unique ``"id"`` string, as demonstrated by the "root" actor below.
|
||||
The actor dictionary can be any shape - the design of that data structure is left up to the plugins. A useful convention is to include an ``"id"`` string, as demonstrated by the "root" actor below.
|
||||
|
||||
Plugins can use the :ref:`plugin_hook_actor_from_request` hook to implement custom logic for authenticating an actor based on the incoming HTTP request.
|
||||
|
||||
|
|
@ -28,25 +28,13 @@ Using the "root" actor
|
|||
|
||||
Datasette currently leaves almost all forms of authentication to plugins - `datasette-auth-github <https://github.com/simonw/datasette-auth-github>`__ for example.
|
||||
|
||||
The one exception is the "root" account, which you can sign into while using Datasette on your local machine. The root user has **all permissions** - they can perform any action regardless of other permission rules.
|
||||
|
||||
The ``--root`` flag is designed for local development and testing. When you start Datasette with ``--root``, the root user automatically receives every permission, including:
|
||||
|
||||
* All view permissions (``view-instance``, ``view-database``, ``view-table``, etc.)
|
||||
* All write permissions (``insert-row``, ``update-row``, ``delete-row``, ``create-table``, ``alter-table``, ``drop-table``)
|
||||
* Debug permissions (``permissions-debug``, ``debug-menu``)
|
||||
* Any custom permissions defined by plugins
|
||||
|
||||
If you add explicit deny rules in ``datasette.yaml`` those can still block the
|
||||
root actor from specific databases or tables.
|
||||
|
||||
The ``--root`` flag sets an internal ``root_enabled`` switch—without it, a signed-in user with ``{"id": "root"}`` is treated like any other actor.
|
||||
The one exception is the "root" account, which you can sign into while using Datasette on your local machine. This provides access to a small number of debugging features.
|
||||
|
||||
To sign in as root, start Datasette using the ``--root`` command-line option, like this::
|
||||
|
||||
datasette --root
|
||||
|
||||
Datasette will output a single-use-only login URL on startup::
|
||||
::
|
||||
|
||||
http://127.0.0.1:8001/-/auth-token?token=786fc524e0199d70dc9a581d851f466244e114ca92f33aa3b42a139e9388daa7
|
||||
INFO: Started server process [25801]
|
||||
|
|
@ -54,7 +42,7 @@ Datasette will output a single-use-only login URL on startup::
|
|||
INFO: Application startup complete.
|
||||
INFO: Uvicorn running on http://127.0.0.1:8001 (Press CTRL+C to quit)
|
||||
|
||||
Click on that link and then visit ``http://127.0.0.1:8001/-/actor`` to confirm that you are authenticated as an actor that looks like this:
|
||||
The URL on the first line includes a one-use token which can be used to sign in as the "root" actor in your browser. Click on that link and then visit ``http://127.0.0.1:8001/-/actor`` to confirm that you are authenticated as an actor that looks like this:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
|
@ -67,7 +55,7 @@ Click on that link and then visit ``http://127.0.0.1:8001/-/actor`` to confirm t
|
|||
Permissions
|
||||
===========
|
||||
|
||||
Datasette's permissions system is built around SQL queries. Datasette and its plugins construct SQL queries to resolve the list of resources that an actor cas access.
|
||||
Datasette has an extensive permissions system built-in, which can be further extended and customized by plugins.
|
||||
|
||||
The key question the permissions system answers is this:
|
||||
|
||||
|
|
@ -75,80 +63,37 @@ The key question the permissions system answers is this:
|
|||
|
||||
**Actors** are :ref:`described above <authentication_actor>`.
|
||||
|
||||
An **action** is a string describing the action the actor would like to perform. A full list is :ref:`provided below <actions>` - examples include ``view-table`` and ``execute-sql``.
|
||||
An **action** is a string describing the action the actor would like to perform. A full list is :ref:`provided below <permissions>` - examples include ``view-table`` and ``execute-sql``.
|
||||
|
||||
A **resource** is the item the actor wishes to interact with - for example a specific database or table. Some actions, such as ``permissions-debug``, are not associated with a particular resource.
|
||||
|
||||
Datasette's built-in view actions (``view-database``, ``view-table`` etc) are allowed by Datasette's default configuration: unless you :ref:`configure additional permission rules <authentication_permissions_config>` unauthenticated users will be allowed to access content.
|
||||
Datasette's built-in view permissions (``view-database``, ``view-table`` etc) default to *allow* - unless you :ref:`configure additional permission rules <authentication_permissions_config>` unauthenticated users will be allowed to access content.
|
||||
|
||||
Other actions, including those introduced by plugins, will default to *deny*.
|
||||
|
||||
.. _authentication_default_deny:
|
||||
|
||||
Denying all permissions by default
|
||||
----------------------------------
|
||||
|
||||
By default, Datasette allows unauthenticated access to view databases, tables, and execute SQL queries.
|
||||
|
||||
You may want to run Datasette in a mode where **all** access is denied by default, and you explicitly grant permissions only to authenticated users, either using the :ref:`--root mechanism <authentication_root>` or through :ref:`configuration file rules <authentication_permissions_config>` or plugins.
|
||||
|
||||
Use the ``--default-deny`` command-line option to run Datasette in this mode::
|
||||
|
||||
datasette --default-deny data.db --root
|
||||
|
||||
With ``--default-deny`` enabled:
|
||||
|
||||
* Anonymous users are denied access to view the instance, databases, tables, and queries
|
||||
* Authenticated users are also denied access unless they're explicitly granted permissions
|
||||
* The root user (when using ``--root``) still has access to everything
|
||||
* You can grant permissions using :ref:`configuration file rules <authentication_permissions_config>` or plugins
|
||||
|
||||
For example, to allow only a specific user to access your instance::
|
||||
|
||||
datasette --default-deny data.db --config datasette.yaml
|
||||
|
||||
Where ``datasette.yaml`` contains:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
allow:
|
||||
id: alice
|
||||
|
||||
This configuration will deny access to everyone except the user with ``id`` of ``alice``.
|
||||
Permissions with potentially harmful effects should default to *deny*. Plugin authors should account for this when designing new plugins - for example, the `datasette-upload-csvs <https://github.com/simonw/datasette-upload-csvs>`__ plugin defaults to deny so that installations don't accidentally allow unauthenticated users to create new tables by uploading a CSV file.
|
||||
|
||||
.. _authentication_permissions_explained:
|
||||
|
||||
How permissions are resolved
|
||||
----------------------------
|
||||
|
||||
Datasette performs permission checks using the internal :ref:`datasette_allowed`, method which accepts keyword arguments for ``action``, ``resource`` and an optional ``actor``.
|
||||
The :ref:`datasette.permission_allowed(actor, action, resource=None, default=...)<datasette_permission_allowed>` method is called to check if an actor is allowed to perform a specific action.
|
||||
|
||||
``resource`` should be an instance of the appropriate ``Resource`` subclass from :mod:`datasette.resources`—for example ``InstanceResource()``, ``DatabaseResource(database="...``)`` or ``TableResource(database="...", table="...")``. This defaults to ``InstanceResource()`` if not specified.
|
||||
This method asks every plugin that implements the :ref:`plugin_hook_permission_allowed` hook if the actor is allowed to perform the action.
|
||||
|
||||
When a check runs Datasette gathers allow/deny rules from multiple sources and
|
||||
compiles them into a SQL query. The resulting query describes all of the
|
||||
resources an actor may access for that action, together with the reasons those
|
||||
resources were allowed or denied. The combined sources are:
|
||||
Each plugin can return ``True`` to indicate that the actor is allowed to perform the action, ``False`` if they are not allowed and ``None`` if the plugin has no opinion on the matter.
|
||||
|
||||
* ``allow`` blocks configured in :ref:`datasette.yaml <authentication_permissions_config>`.
|
||||
* :ref:`Actor restrictions <authentication_cli_create_token_restrict>` encoded into the actor dictionary or API token.
|
||||
* The "root" user shortcut when ``--root`` (or :attr:`Datasette.root_enabled <datasette.app.Datasette.root_enabled>`) is active, replying ``True`` to all permission chucks unless configuration rules deny them at a more specific level.
|
||||
* Any additional SQL provided by plugins implementing :ref:`plugin_hook_permission_resources_sql`.
|
||||
``False`` acts as a veto - if any plugin returns ``False`` then the permission check is denied. Otherwise, if any plugin returns ``True`` then the permission check is allowed.
|
||||
|
||||
Datasette evaluates the SQL to determine if the requested ``resource`` is
|
||||
included. Explicit deny rules returned by configuration or plugins will block
|
||||
access even if other rules allowed it.
|
||||
The ``resource`` argument can be used to specify a specific resource that the action is being performed against. Some permissions, such as ``view-instance``, do not involve a resource. Others such as ``view-database`` have a resource that is a string naming the database. Permissions that take both a database name and the name of a table, view or canned query within that database use a resource that is a tuple of two strings, ``(database_name, resource_name)``.
|
||||
|
||||
Plugins that implement the ``permission_allowed()`` hook can decide if they are going to consider the provided resource or not.
|
||||
|
||||
.. _authentication_permissions_allow:
|
||||
|
||||
Defining permissions with "allow" blocks
|
||||
----------------------------------------
|
||||
|
||||
One way to define permissions in Datasette is to use an ``"allow"`` block :ref:`in the datasette.yaml file <authentication_permissions_config>`. This is a JSON document describing which actors are allowed to perform an action against a specific resource.
|
||||
|
||||
Each ``allow`` block is compiled into SQL and combined with any
|
||||
:ref:`plugin-provided rules <plugin_hook_permission_resources_sql>` to produce
|
||||
the cascading allow/deny decisions that power :ref:`datasette_allowed`.
|
||||
The standard way to define permissions in Datasette is to use an ``"allow"`` block :ref:`in the datasette.yaml file <authentication_permissions_config>`. This is a JSON document describing which actors are allowed to perform a permission.
|
||||
|
||||
The most basic form of allow block is this (`allow demo <https://latest.datasette.io/-/allow-debug?actor=%7B%22id%22%3A+%22root%22%7D&allow=%7B%0D%0A++++++++%22id%22%3A+%22root%22%0D%0A++++%7D>`__, `deny demo <https://latest.datasette.io/-/allow-debug?actor=%7B%22id%22%3A+%22trevor%22%7D&allow=%7B%0D%0A++++++++%22id%22%3A+%22root%22%0D%0A++++%7D>`__):
|
||||
|
||||
|
|
@ -470,7 +415,7 @@ You can control the following:
|
|||
* Access to specific tables and views
|
||||
* Access to specific :ref:`canned_queries`
|
||||
|
||||
If a user has permission to view a table they will be able to view that table, independent of if they have permission to view the database or instance that the table exists within.
|
||||
If a user cannot access a specific database, they will not be able to access tables, views or queries within that database. If a user cannot access the instance they will not be able to access any of the databases, tables, views or queries.
|
||||
|
||||
.. _authentication_permissions_instance:
|
||||
|
||||
|
|
@ -708,7 +653,7 @@ Controlling the ability to execute arbitrary SQL
|
|||
|
||||
Datasette defaults to allowing any site visitor to execute their own custom SQL queries, for example using the form on `the database page <https://latest.datasette.io/fixtures>`__ or by appending a ``?_where=`` parameter to the table page `like this <https://latest.datasette.io/fixtures/facetable?_where=_city_id=1>`__.
|
||||
|
||||
Access to this ability is controlled by the :ref:`actions_execute_sql` permission.
|
||||
Access to this ability is controlled by the :ref:`permissions_execute_sql` permission.
|
||||
|
||||
The easiest way to disable arbitrary SQL queries is using the :ref:`default_allow_sql setting <setting_default_allow_sql>` when you first start Datasette running.
|
||||
|
||||
|
|
@ -1066,37 +1011,15 @@ This example outputs the following::
|
|||
}
|
||||
}
|
||||
|
||||
Restrictions act as an allowlist layered on top of the actor's existing
|
||||
permissions. They can only remove access the actor would otherwise have—they
|
||||
cannot grant new access. If the underlying actor is denied by ``allow`` rules in
|
||||
``datasette.yaml`` or by a plugin, a token that lists that resource in its
|
||||
``"_r"`` section will still be denied.
|
||||
|
||||
|
||||
.. _permissions_plugins:
|
||||
|
||||
Checking permissions in plugins
|
||||
===============================
|
||||
|
||||
Datasette plugins can check if an actor has permission to perform an action using :ref:`datasette_allowed`—for example::
|
||||
Datasette plugins can check if an actor has permission to perform an action using the :ref:`datasette.permission_allowed(...)<datasette_permission_allowed>` method.
|
||||
|
||||
from datasette.resources import TableResource
|
||||
|
||||
can_edit = await datasette.allowed(
|
||||
action="update-row",
|
||||
resource=TableResource(database="fixtures", table="facetable"),
|
||||
actor=request.actor,
|
||||
)
|
||||
|
||||
Use :ref:`datasette_ensure_permission` when you need to enforce a permission and
|
||||
raise a ``Forbidden`` error automatically.
|
||||
|
||||
Plugins that define new operations should return :class:`~datasette.permissions.Action`
|
||||
objects from :ref:`plugin_register_actions` and can supply additional allow/deny
|
||||
rules by returning :class:`~datasette.permissions.PermissionSQL` objects from the
|
||||
:ref:`plugin_hook_permission_resources_sql` hook. Those rules are merged with
|
||||
configuration ``allow`` blocks and actor restrictions to determine the final
|
||||
result for each check.
|
||||
Datasette core performs a number of permission checks, :ref:`documented below <permissions>`. Plugins can implement the :ref:`plugin_hook_permission_allowed` plugin hook to participate in decisions about whether an actor should be able to perform a specified action.
|
||||
|
||||
.. _authentication_actor_matches_allow:
|
||||
|
||||
|
|
@ -1116,56 +1039,17 @@ The currently authenticated actor is made available to plugins as ``request.acto
|
|||
|
||||
.. _PermissionsDebugView:
|
||||
|
||||
Permissions debug tools
|
||||
=======================
|
||||
The permissions debug tool
|
||||
==========================
|
||||
|
||||
The debug tool at ``/-/permissions`` is available to any actor with the ``permissions-debug`` permission. By default this is just the :ref:`authenticated root user <authentication_root>` but you can open it up to all users by starting Datasette like this::
|
||||
The debug tool at ``/-/permissions`` is only available to the :ref:`authenticated root user <authentication_root>` (or any actor granted the ``permissions-debug`` action).
|
||||
|
||||
datasette -s permissions.permissions-debug true data.db
|
||||
|
||||
The page shows the permission checks that have been carried out by the Datasette instance.
|
||||
It shows the thirty most recent permission checks that have been carried out by the Datasette instance.
|
||||
|
||||
It also provides an interface for running hypothetical permission checks against a hypothetical actor. This is a useful way of confirming that your configured permissions work in the way you expect.
|
||||
|
||||
This is designed to help administrators and plugin authors understand exactly how permission checks are being carried out, in order to effectively configure Datasette's permission system.
|
||||
|
||||
.. _AllowedResourcesView:
|
||||
|
||||
Allowed resources view
|
||||
----------------------
|
||||
|
||||
The ``/-/allowed`` endpoint displays resources that the current actor can access for a specified ``action``.
|
||||
|
||||
This endpoint provides an interactive HTML form interface. Add ``.json`` to the URL path (e.g. ``/-/allowed.json``) to get the raw JSON response instead.
|
||||
|
||||
Pass ``?action=view-table`` (or another action) to select the action. Optional ``parent=`` and ``child=`` query parameters can narrow the results to a specific database/table pair.
|
||||
|
||||
This endpoint is publicly accessible to help users understand their own permissions. The potentially sensitive ``reason`` field is only shown to users with the ``permissions-debug`` permission - it shows the plugins and explanatory reasons that were responsible for each decision.
|
||||
|
||||
.. _PermissionRulesView:
|
||||
|
||||
Permission rules view
|
||||
---------------------
|
||||
|
||||
The ``/-/rules`` endpoint displays all permission rules (both allow and deny) for each candidate resource for the requested action.
|
||||
|
||||
This endpoint provides an interactive HTML form interface. Add ``.json`` to the URL path (e.g. ``/-/rules.json?action=view-table``) to get the raw JSON response instead.
|
||||
|
||||
Pass ``?action=`` as a query parameter to specify which action to check.
|
||||
|
||||
This endpoint requires the ``permissions-debug`` permission.
|
||||
|
||||
.. _PermissionCheckView:
|
||||
|
||||
Permission check view
|
||||
---------------------
|
||||
|
||||
The ``/-/check`` endpoint evaluates a single action/resource pair and returns information indicating whether the access was allowed along with diagnostic information.
|
||||
|
||||
This endpoint provides an interactive HTML form interface. Add ``.json`` to the URL path (e.g. ``/-/check.json?action=view-instance``) to get the raw JSON response instead.
|
||||
|
||||
Pass ``?action=`` to specify the action to check, and optional ``?parent=`` and ``?child=`` parameters to specify the resource.
|
||||
|
||||
.. _authentication_ds_actor:
|
||||
|
||||
The ds_actor cookie
|
||||
|
|
@ -1178,25 +1062,19 @@ Authentication plugins can set signed ``ds_actor`` cookies themselves like so:
|
|||
.. code-block:: python
|
||||
|
||||
response = Response.redirect("/")
|
||||
datasette.set_actor_cookie(response, {"id": "cleopaws"})
|
||||
response.set_cookie(
|
||||
"ds_actor",
|
||||
datasette.sign({"a": {"id": "cleopaws"}}, "actor"),
|
||||
)
|
||||
|
||||
The shape of data encoded in the cookie is as follows:
|
||||
Note that you need to pass ``"actor"`` as the namespace to :ref:`datasette_sign`.
|
||||
|
||||
.. code-block:: json
|
||||
The shape of data encoded in the cookie is as follows::
|
||||
|
||||
{
|
||||
"a": {
|
||||
"id": "cleopaws"
|
||||
}
|
||||
"a": {... actor ...}
|
||||
}
|
||||
|
||||
To implement logout in a plugin, use the ``delete_actor_cookie()`` method:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
response = Response.redirect("/")
|
||||
datasette.delete_actor_cookie(response)
|
||||
|
||||
.. _authentication_ds_actor_expiry:
|
||||
|
||||
Including an expiry time
|
||||
|
|
@ -1204,13 +1082,25 @@ Including an expiry time
|
|||
|
||||
``ds_actor`` cookies can optionally include a signed expiry timestamp, after which the cookies will no longer be valid. Authentication plugins may chose to use this mechanism to limit the lifetime of the cookie. For example, if a plugin implements single-sign-on against another source it may decide to set short-lived cookies so that if the user is removed from the SSO system their existing Datasette cookies will stop working shortly afterwards.
|
||||
|
||||
To include an expiry pass ``expire_after=`` to ``datasette.set_actor_cookie()`` with a number of seconds. For example, to expire in 24 hours:
|
||||
To include an expiry, add a ``"e"`` key to the cookie value containing a base62-encoded integer representing the timestamp when the cookie should expire. For example, here's how to set a cookie that expires after 24 hours:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import time
|
||||
from datasette.utils import baseconv
|
||||
|
||||
expires_at = int(time.time()) + (24 * 60 * 60)
|
||||
|
||||
response = Response.redirect("/")
|
||||
datasette.set_actor_cookie(
|
||||
response, {"id": "cleopaws"}, expire_after=60 * 60 * 24
|
||||
response.set_cookie(
|
||||
"ds_actor",
|
||||
datasette.sign(
|
||||
{
|
||||
"a": {"id": "cleopaws"},
|
||||
"e": baseconv.base62.encode(expires_at),
|
||||
},
|
||||
"actor",
|
||||
),
|
||||
)
|
||||
|
||||
The resulting cookie will encode data that looks something like this:
|
||||
|
|
@ -1218,12 +1108,13 @@ The resulting cookie will encode data that looks something like this:
|
|||
.. code-block:: json
|
||||
|
||||
{
|
||||
"a": {
|
||||
"id": "cleopaws"
|
||||
},
|
||||
"e": "1jjSji"
|
||||
"a": {
|
||||
"id": "cleopaws"
|
||||
},
|
||||
"e": "1jjSji"
|
||||
}
|
||||
|
||||
|
||||
.. _LogoutView:
|
||||
|
||||
The /-/logout page
|
||||
|
|
@ -1231,156 +1122,168 @@ The /-/logout page
|
|||
|
||||
The page at ``/-/logout`` provides the ability to log out of a ``ds_actor`` cookie authentication session.
|
||||
|
||||
.. _actions:
|
||||
.. _permissions:
|
||||
|
||||
Built-in actions
|
||||
================
|
||||
Built-in permissions
|
||||
====================
|
||||
|
||||
This section lists all of the permission checks that are carried out by Datasette core, along with the ``resource`` if it was passed.
|
||||
|
||||
.. _actions_view_instance:
|
||||
.. _permissions_view_instance:
|
||||
|
||||
view-instance
|
||||
-------------
|
||||
|
||||
Top level permission - Actor is allowed to view any pages within this instance, starting at https://latest.datasette.io/
|
||||
|
||||
.. _actions_view_database:
|
||||
Default *allow*.
|
||||
|
||||
.. _permissions_view_database:
|
||||
|
||||
view-database
|
||||
-------------
|
||||
|
||||
Actor is allowed to view a database page, e.g. https://latest.datasette.io/fixtures
|
||||
|
||||
``resource`` - ``datasette.permissions.DatabaseResource(database)``
|
||||
``database`` is the name of the database (string)
|
||||
``resource`` - string
|
||||
The name of the database
|
||||
|
||||
.. _actions_view_database_download:
|
||||
Default *allow*.
|
||||
|
||||
.. _permissions_view_database_download:
|
||||
|
||||
view-database-download
|
||||
----------------------
|
||||
-----------------------
|
||||
|
||||
Actor is allowed to download a database, e.g. https://latest.datasette.io/fixtures.db
|
||||
|
||||
``resource`` - ``datasette.resources.DatabaseResource(database)``
|
||||
``database`` is the name of the database (string)
|
||||
``resource`` - string
|
||||
The name of the database
|
||||
|
||||
.. _actions_view_table:
|
||||
Default *allow*.
|
||||
|
||||
.. _permissions_view_table:
|
||||
|
||||
view-table
|
||||
----------
|
||||
|
||||
Actor is allowed to view a table (or view) page, e.g. https://latest.datasette.io/fixtures/complex_foreign_keys
|
||||
|
||||
``resource`` - ``datasette.resources.TableResource(database, table)``
|
||||
``database`` is the name of the database (string)
|
||||
``resource`` - tuple: (string, string)
|
||||
The name of the database, then the name of the table
|
||||
|
||||
``table`` is the name of the table (string)
|
||||
Default *allow*.
|
||||
|
||||
.. _actions_view_query:
|
||||
.. _permissions_view_query:
|
||||
|
||||
view-query
|
||||
----------
|
||||
|
||||
Actor is allowed to view (and execute) a :ref:`canned query <canned_queries>` page, e.g. https://latest.datasette.io/fixtures/pragma_cache_size - this includes executing :ref:`canned_queries_writable`.
|
||||
|
||||
``resource`` - ``datasette.resources.QueryResource(database, query)``
|
||||
``database`` is the name of the database (string)
|
||||
|
||||
``query`` is the name of the canned query (string)
|
||||
``resource`` - tuple: (string, string)
|
||||
The name of the database, then the name of the canned query
|
||||
|
||||
.. _actions_insert_row:
|
||||
Default *allow*.
|
||||
|
||||
.. _permissions_insert_row:
|
||||
|
||||
insert-row
|
||||
----------
|
||||
|
||||
Actor is allowed to insert rows into a table.
|
||||
|
||||
``resource`` - ``datasette.resources.TableResource(database, table)``
|
||||
``database`` is the name of the database (string)
|
||||
``resource`` - tuple: (string, string)
|
||||
The name of the database, then the name of the table
|
||||
|
||||
``table`` is the name of the table (string)
|
||||
Default *deny*.
|
||||
|
||||
.. _actions_delete_row:
|
||||
.. _permissions_delete_row:
|
||||
|
||||
delete-row
|
||||
----------
|
||||
|
||||
Actor is allowed to delete rows from a table.
|
||||
|
||||
``resource`` - ``datasette.resources.TableResource(database, table)``
|
||||
``database`` is the name of the database (string)
|
||||
``resource`` - tuple: (string, string)
|
||||
The name of the database, then the name of the table
|
||||
|
||||
``table`` is the name of the table (string)
|
||||
Default *deny*.
|
||||
|
||||
.. _actions_update_row:
|
||||
.. _permissions_update_row:
|
||||
|
||||
update-row
|
||||
----------
|
||||
|
||||
Actor is allowed to update rows in a table.
|
||||
|
||||
``resource`` - ``datasette.resources.TableResource(database, table)``
|
||||
``database`` is the name of the database (string)
|
||||
``resource`` - tuple: (string, string)
|
||||
The name of the database, then the name of the table
|
||||
|
||||
``table`` is the name of the table (string)
|
||||
Default *deny*.
|
||||
|
||||
.. _actions_create_table:
|
||||
.. _permissions_create_table:
|
||||
|
||||
create-table
|
||||
------------
|
||||
|
||||
Actor is allowed to create a database table.
|
||||
|
||||
``resource`` - ``datasette.resources.DatabaseResource(database)``
|
||||
``database`` is the name of the database (string)
|
||||
``resource`` - string
|
||||
The name of the database
|
||||
|
||||
.. _actions_alter_table:
|
||||
Default *deny*.
|
||||
|
||||
.. _permissions_alter_table:
|
||||
|
||||
alter-table
|
||||
-----------
|
||||
|
||||
Actor is allowed to alter a database table.
|
||||
|
||||
``resource`` - ``datasette.resources.TableResource(database, table)``
|
||||
``database`` is the name of the database (string)
|
||||
``resource`` - tuple: (string, string)
|
||||
The name of the database, then the name of the table
|
||||
|
||||
``table`` is the name of the table (string)
|
||||
Default *deny*.
|
||||
|
||||
.. _actions_drop_table:
|
||||
.. _permissions_drop_table:
|
||||
|
||||
drop-table
|
||||
----------
|
||||
|
||||
Actor is allowed to drop a database table.
|
||||
|
||||
``resource`` - ``datasette.resources.TableResource(database, table)``
|
||||
``database`` is the name of the database (string)
|
||||
``resource`` - tuple: (string, string)
|
||||
The name of the database, then the name of the table
|
||||
|
||||
``table`` is the name of the table (string)
|
||||
Default *deny*.
|
||||
|
||||
.. _actions_execute_sql:
|
||||
.. _permissions_execute_sql:
|
||||
|
||||
execute-sql
|
||||
-----------
|
||||
|
||||
Actor is allowed to run arbitrary SQL queries against a specific database, e.g. https://latest.datasette.io/fixtures/-/query?sql=select+100
|
||||
Actor is allowed to run arbitrary SQL queries against a specific database, e.g. https://latest.datasette.io/fixtures?sql=select+100
|
||||
|
||||
``resource`` - ``datasette.resources.DatabaseResource(database)``
|
||||
``database`` is the name of the database (string)
|
||||
``resource`` - string
|
||||
The name of the database
|
||||
|
||||
See also :ref:`the default_allow_sql setting <setting_default_allow_sql>`.
|
||||
Default *allow*. See also :ref:`the default_allow_sql setting <setting_default_allow_sql>`.
|
||||
|
||||
.. _actions_permissions_debug:
|
||||
.. _permissions_permissions_debug:
|
||||
|
||||
permissions-debug
|
||||
-----------------
|
||||
|
||||
Actor is allowed to view the ``/-/permissions`` debug tools.
|
||||
Actor is allowed to view the ``/-/permissions`` debug page.
|
||||
|
||||
.. _actions_debug_menu:
|
||||
Default *deny*.
|
||||
|
||||
.. _permissions_debug_menu:
|
||||
|
||||
debug-menu
|
||||
----------
|
||||
|
||||
Controls if the various debug pages are displayed in the navigation menu.
|
||||
|
||||
Default *deny*.
|
||||
|
|
|
|||
|
|
@ -4,185 +4,6 @@
|
|||
Changelog
|
||||
=========
|
||||
|
||||
.. _v1_0_a23:
|
||||
|
||||
1.0a23 (2025-12-02)
|
||||
-------------------
|
||||
|
||||
- Fix for bug where a stale database entry in ``internal.db`` could cause a 500 error on the homepage. (:issue:`2605`)
|
||||
- Cosmetic improvement to ``/-/actions`` page. (:issue:`2599`)
|
||||
|
||||
.. _v1_0_a22:
|
||||
|
||||
1.0a22 (2025-11-13)
|
||||
-------------------
|
||||
|
||||
- ``datasette serve --default-deny`` option for running Datasette configured to :ref:`deny all permissions by default <authentication_default_deny>`. (:issue:`2592`)
|
||||
- ``datasette.is_client()`` method for detecting if code is :ref:`executing inside a datasette.client request <internals_datasette_is_client>`. (:issue:`2594`)
|
||||
- ``datasette.pm`` property can now be used to :ref:`register and unregister plugins in tests <testing_plugins_register_in_test>`. (:issue:`2595`)
|
||||
|
||||
.. _v1_0_a21:
|
||||
|
||||
1.0a21 (2025-11-05)
|
||||
-------------------
|
||||
|
||||
- Fixes an **open redirect** security issue: Datasette instances would redirect to ``example.com/foo/bar`` if you accessed the path ``//example.com/foo/bar``. Thanks to `James Jefferies <https://github.com/jamesjefferies>`__ for the fix. (:issue:`2429`)
|
||||
- Fixed ``datasette publish cloudrun`` to work with changes to the underlying Cloud Run architecture. (:issue:`2511`)
|
||||
- New ``datasette --get /path --headers`` option for inspecting the headers returned by a path. (:issue:`2578`)
|
||||
- New ``datasette.client.get(..., skip_permission_checks=True)`` parameter to bypass permission checks when making requests using the internal client. (:issue:`2583`)
|
||||
|
||||
.. _v0_65_2:
|
||||
|
||||
0.65.2 (2025-11-05)
|
||||
-------------------
|
||||
|
||||
- Fixes an **open redirect** security issue: Datasette instances would redirect to ``example.com/foo/bar`` if you accessed the path ``//example.com/foo/bar``. Thanks to `James Jefferies <https://github.com/jamesjefferies>`__ for the fix. (:issue:`2429`)
|
||||
- Upgraded for compatibility with Python 3.14.
|
||||
- Fixed ``datasette publish cloudrun`` to work with changes to the underlying Cloud Run architecture. (:issue:`2511`)
|
||||
- Minor upgrades to fix warnings, including ``pkg_resources`` deprecation.
|
||||
|
||||
.. _v1_0_a20:
|
||||
|
||||
1.0a20 (2025-11-03)
|
||||
-------------------
|
||||
|
||||
This alpha introduces a major breaking change prior to the 1.0 release of Datasette concerning how Datasette's permission system works.
|
||||
|
||||
Permission system redesign
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Previously the permission system worked using ``datasette.permission_allowed()`` checks which consulted all available plugins in turn to determine whether a given actor was allowed to perform a given action on a given resource.
|
||||
|
||||
This approach could become prohibitively expensive for large lists of items - for example to determine the list of tables that a user could view in a large Datasette instance each plugin implementation of that hook would be fired for every table.
|
||||
|
||||
The new design uses SQL queries against Datasette's internal :ref:`catalog tables <internals_internal>` to derive the list of resources for which an actor has permission for a given action. This turns an N x M problem (N resources, M plugins) into a single SQL query.
|
||||
|
||||
Plugins can use the new :ref:`plugin_hook_permission_resources_sql` hook to return SQL fragments which will be used as part of that query.
|
||||
|
||||
Plugins that use any of the following features will need to be updated to work with this and following alphas (and Datasette 1.0 stable itself):
|
||||
|
||||
- Checking permissions with ``datasette.permission_allowed()`` - this method has been replaced with :ref:`datasette.allowed() <datasette_allowed>`.
|
||||
- Implementing the ``permission_allowed()`` plugin hook - this hook has been removed in favor of :ref:`permission_resources_sql() <plugin_hook_permission_resources_sql>`.
|
||||
- Using ``register_permissions()`` to register permissions - this hook has been removed in favor of :ref:`register_actions() <plugin_register_actions>`.
|
||||
|
||||
Consult the :ref:`v1.0a20 upgrade guide <upgrade_guide_v1_a20>` for further details on how to upgrade affected plugins.
|
||||
|
||||
Plugins can now make use of two new internal methods to help resolve permission checks:
|
||||
|
||||
- :ref:`datasette.allowed_resources() <datasette_allowed_resources>` returns a ``PaginatedResources`` object with a ``.resources`` list of ``Resource`` instances that an actor is allowed to access for a given action (and a ``.next`` token for pagination).
|
||||
- :ref:`datasette.allowed_resources_sql() <datasette_allowed_resources_sql>` returns the SQL and parameters that can be executed against the internal catalog tables to determine which resources an actor is allowed to access for a given action. This can be combined with further SQL to perform advanced custom filtering.
|
||||
|
||||
Related changes:
|
||||
|
||||
- The way ``datasette --root`` works has changed. Running Datasette with this flag now causes the root actor to pass *all* permission checks. (:issue:`2521`)
|
||||
|
||||
- Permission debugging improvements:
|
||||
|
||||
- The ``/-/allowed`` endpoint shows resources the user is allowed to interact with for different actions.
|
||||
- ``/-/rules`` shows the raw allow/deny rules that apply to different permission checks.
|
||||
- ``/-/actions`` lists every available action.
|
||||
- ``/-/check`` can be used to try out different permission checks for the current actor.
|
||||
|
||||
Other changes
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
- The internal ``catalog_views`` table now tracks SQLite views alongside tables in the introspection database. (:issue:`2495`)
|
||||
- Hitting the ``/`` brings up a search interface for navigating to tables that the current user can view. A new ``/-/tables`` endpoint supports this functionality. (:issue:`2523`)
|
||||
- Datasette attempts to detect some configuration errors on startup.
|
||||
- Datasette now supports Python 3.14 and no longer tests against Python 3.9.
|
||||
|
||||
.. _v1_0_a19:
|
||||
|
||||
1.0a19 (2025-04-21)
|
||||
-------------------
|
||||
|
||||
- Tiny cosmetic bug fix for mobile display of table rows. (:issue:`2479`)
|
||||
|
||||
.. _v1_0_a18:
|
||||
|
||||
1.0a18 (2025-04-16)
|
||||
-------------------
|
||||
|
||||
- Fix for incorrect foreign key references in the internal database schema. (:issue:`2466`)
|
||||
- The ``prepare_connection()`` hook no longer runs for the internal database. (:issue:`2468`)
|
||||
- Fixed bug where ``link:`` HTTP headers used invalid syntax. (:issue:`2470`)
|
||||
- No longer tested against Python 3.8. Now tests against Python 3.13.
|
||||
- FTS tables are now hidden by default if they correspond to a content table. (:issue:`2477`)
|
||||
- Fixed bug with foreign key links to rows in databases with filenames containing a special character. Thanks, `Jack Stratton <https://github.com/phroa>`__. (`#2476 <https://github.com/simonw/datasette/pull/2476>`__)
|
||||
|
||||
.. _v1_0_a17:
|
||||
|
||||
1.0a17 (2025-02-06)
|
||||
-------------------
|
||||
|
||||
- ``DATASETTE_SSL_KEYFILE`` and ``DATASETTE_SSL_CERTFILE`` environment variables as alternatives to ``--ssl-keyfile`` and ``--ssl-certfile``. Thanks, Alex Garcia. (:issue:`2422`)
|
||||
- ``SQLITE_EXTENSIONS`` environment variable has been renamed to ``DATASETTE_LOAD_EXTENSION``. (:issue:`2424`)
|
||||
- ``datasette serve`` environment variables are now :ref:`documented here <cli_datasette_serve_env>`.
|
||||
- The :ref:`plugin_hook_register_magic_parameters` plugin hook can now register async functions. (:issue:`2441`)
|
||||
- Datasette is now tested against Python 3.13.
|
||||
- Breadcrumbs on database and table pages now include a consistent self-link for resetting query string parameters. (:issue:`2454`)
|
||||
- Fixed issue where Datasette could crash on ``metadata.json`` with nested values. (:issue:`2455`)
|
||||
- New internal methods ``datasette.set_actor_cookie()`` and ``datasette.delete_actor_cookie()``, :ref:`described here <authentication_ds_actor>`. (:issue:`1690`)
|
||||
- ``/-/permissions`` page now shows a list of all permissions registered by plugins. (:issue:`1943`)
|
||||
- If a table has a single unique text column Datasette now detects that as the foreign key label for that table. (:issue:`2458`)
|
||||
- The ``/-/permissions`` page now includes options for filtering or exclude permission checks recorded against the current user. (:issue:`2460`)
|
||||
- Fixed a bug where replacing a database with a new one with the same name did not pick up the new database correctly. (:issue:`2465`)
|
||||
|
||||
.. _v0_65_1:
|
||||
|
||||
0.65.1 (2024-11-28)
|
||||
-------------------
|
||||
|
||||
- Fixed bug with upgraded HTTPX 0.28.0 dependency. (:issue:`2443`)
|
||||
|
||||
.. _v0_65:
|
||||
|
||||
0.65 (2024-10-07)
|
||||
-----------------
|
||||
|
||||
- Upgrade for compatibility with Python 3.13 (by vendoring Pint dependency). (:issue:`2434`)
|
||||
- Dropped support for Python 3.8.
|
||||
|
||||
.. _v1_0_a16:
|
||||
|
||||
1.0a16 (2024-09-05)
|
||||
-------------------
|
||||
|
||||
This release focuses on performance, in particular against large tables, and introduces some minor breaking changes for CSS styling in Datasette plugins.
|
||||
|
||||
- Removed the unit conversions feature and its dependency, Pint. This means Datasette is now compatible with the upcoming Python 3.13. (:issue:`2400`, :issue:`2320`)
|
||||
- The ``datasette --pdb`` option now uses the `ipdb <https://github.com/gotcha/ipdb>`__ debugger if it is installed. You can install it using ``datasette install ipdb``. Thanks, `Tiago Ilieve <https://github.com/myhro>`__. (`#2342 <https://github.com/simonw/datasette/pull/2342>`__)
|
||||
- Fixed a confusing error that occurred if ``metadata.json`` contained nested objects. (:issue:`2403`)
|
||||
- Fixed a bug with ``?_trace=1`` where it returned a blank page if the response was larger than 256KB. (:issue:`2404`)
|
||||
- Tracing mechanism now also displays SQL queries that returned errors or ran out of time. `datasette-pretty-traces 0.5 <https://github.com/simonw/datasette-pretty-traces/releases/tag/0.5>`__ includes support for displaying this new type of trace. (:issue:`2405`)
|
||||
- Fixed a text spacing with table descriptions on the homepage. (:issue:`2399`)
|
||||
- Performance improvements for large tables:
|
||||
- Suggested facets now only consider the first 1000 rows. (:issue:`2406`)
|
||||
- Improved performance of date facet suggestion against large tables. (:issue:`2407`)
|
||||
- Row counts stop at 10,000 rows when listing tables. (:issue:`2398`)
|
||||
- On table page the count stops at 10,000 rows too, with a "count all" button to execute the full count. (:issue:`2408`)
|
||||
- New ``.dicts()`` internal method on :ref:`database_results` that returns a list of dictionaries representing the results from a SQL query: (:issue:`2414`)
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
rows = (await db.execute("select * from t")).dicts()
|
||||
|
||||
- Default Datasette core CSS that styles inputs and buttons now requires a class of ``"core"`` on the element or a containing element, for example ``<form class="core">``. (:issue:`2415`)
|
||||
- Similarly, default table styles now only apply to ``<table class="rows-and-columns">``. (:issue:`2420`)
|
||||
|
||||
.. _v1_0_a15:
|
||||
|
||||
1.0a15 (2024-08-15)
|
||||
-------------------
|
||||
|
||||
- Datasette now defaults to hiding SQLite "shadow" tables, as seen in extensions such as SQLite FTS and `sqlite-vec <https://github.com/asg017/sqlite-vec>`__. Virtual tables that it makes sense to display, such as FTS core tables, are no longer hidden. Thanks, `Alex Garcia <https://github.com/asg017>`__. (:issue:`2296`)
|
||||
- Fixed bug where running Datasette with one or more ``-s/--setting`` options could over-ride settings that were present in ``datasette.yml``. (:issue:`2389`)
|
||||
- The Datasette homepage is now duplicated at ``/-/``, using the default ``index.html`` template. This ensures that the information on that page is still accessible even if the Datasette homepage has been customized using a custom ``index.html`` template, for example on sites like `datasette.io <https://datasette.io/>`__. (:issue:`2393`)
|
||||
- Failed CSRF checks now display a more user-friendly error page. (:issue:`2390`)
|
||||
- Fixed a bug where the ``json1`` extension was not correctly detected on the ``/-/versions`` page. Thanks, `Seb Bacon <https://github.com/sebbacon>`__. (:issue:`2326`)
|
||||
- Fixed a bug where the Datasette write API did not correctly accept ``Content-Type: application/json; charset=utf-8``. (:issue:`2384`)
|
||||
- Fixed a bug where Datasette would fail to start if ``metadata.yml`` contained a ``queries`` block. (`#2386 <https://github.com/simonw/datasette/pull/2386>`__)
|
||||
|
||||
.. _v1_0_a14:
|
||||
|
||||
1.0a14 (2024-08-05)
|
||||
|
|
@ -205,7 +26,7 @@ This alpha introduces significant changes to Datasette's :ref:`metadata` system,
|
|||
|
||||
.. _v0_64_8:
|
||||
|
||||
0.64.8 (2024-06-21)
|
||||
0.64.8 (2023-06-21)
|
||||
-------------------
|
||||
|
||||
- Security improvement: 404 pages used to reflect content from the URL path, which could be used to display misleading information to Datasette users. 404 errors no longer display additional information from the URL. (:issue:`2359`)
|
||||
|
|
@ -213,7 +34,7 @@ This alpha introduces significant changes to Datasette's :ref:`metadata` system,
|
|||
|
||||
.. _v0_64_7:
|
||||
|
||||
0.64.7 (2024-06-12)
|
||||
0.64.7 (2023-06-12)
|
||||
-------------------
|
||||
|
||||
- Fixed a bug where canned queries with named parameters threw an error when run against SQLite 3.46.0. (:issue:`2353`)
|
||||
|
|
@ -275,7 +96,7 @@ This alpha release adds basic alter table support to the Datasette Write API and
|
|||
Alter table support for create, insert, upsert and update
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The :ref:`JSON write API <json_api_write>` can now be used to apply simple alter table schema changes, provided the acting actor has the new :ref:`actions_alter_table` permission. (:issue:`2101`)
|
||||
The :ref:`JSON write API <json_api_write>` can now be used to apply simple alter table schema changes, provided the acting actor has the new :ref:`permissions_alter_table` permission. (:issue:`2101`)
|
||||
|
||||
The only alter operation supported so far is adding new columns to an existing table.
|
||||
|
||||
|
|
@ -290,12 +111,12 @@ Permissions fix for the upsert API
|
|||
|
||||
The :ref:`/database/table/-/upsert API <TableUpsertView>` had a minor permissions bug, only affecting Datasette instances that had configured the ``insert-row`` and ``update-row`` permissions to apply to a specific table rather than the database or instance as a whole. Full details in issue :issue:`2262`.
|
||||
|
||||
To avoid similar mistakes in the future the ``datasette.permission_allowed()`` method now specifies ``default=`` as a keyword-only argument.
|
||||
To avoid similar mistakes in the future the :ref:`datasette.permission_allowed() <datasette_permission_allowed>` method now specifies ``default=`` as a keyword-only argument.
|
||||
|
||||
Permission checks now consider opinions from every plugin
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The ``datasette.permission_allowed()`` method previously consulted every plugin that implemented the ``permission_allowed()`` plugin hook and obeyed the opinion of the last plugin to return a value. (:issue:`2275`)
|
||||
The :ref:`datasette.permission_allowed() <datasette_permission_allowed>` method previously consulted every plugin that implemented the :ref:`permission_allowed() <plugin_hook_permission_allowed>` plugin hook and obeyed the opinion of the last plugin to return a value. (:issue:`2275`)
|
||||
|
||||
Datasette now consults every plugin and checks to see if any of them returned ``False`` (the veto rule), and if none of them did, it then checks to see if any of them returned ``True``.
|
||||
|
||||
|
|
@ -555,7 +376,7 @@ The third Datasette 1.0 alpha release adds upsert support to the JSON API, plus
|
|||
See `Datasette 1.0a2: Upserts and finely grained permissions <https://simonwillison.net/2022/Dec/15/datasette-1a2/>`__ for an extended, annotated version of these release notes.
|
||||
|
||||
- New ``/db/table/-/upsert`` API, :ref:`documented here <TableUpsertView>`. upsert is an update-or-insert: existing rows will have specified keys updated, but if no row matches the incoming primary key a brand new row will be inserted instead. (:issue:`1878`)
|
||||
- New ``register_permissions()`` plugin hook. Plugins can now register named permissions, which will then be listed in various interfaces that show available permissions. (:issue:`1940`)
|
||||
- New :ref:`plugin_register_permissions` plugin hook. Plugins can now register named permissions, which will then be listed in various interfaces that show available permissions. (:issue:`1940`)
|
||||
- The ``/db/-/create`` API for :ref:`creating a table <TableCreateView>` now accepts ``"ignore": true`` and ``"replace": true`` options when called with the ``"rows"`` property that creates a new table based on an example set of rows. This means the API can be called multiple times with different rows, setting rules for what should happen if a primary key collides with an existing row. (:issue:`1927`)
|
||||
- Arbitrary permissions can now be configured at the instance, database and resource (table, SQL view or canned query) level in Datasette's :ref:`metadata` JSON and YAML files. The new ``"permissions"`` key can be used to specify which actors should have which permissions. See :ref:`authentication_permissions_other` for details. (:issue:`1636`)
|
||||
- The ``/-/create-token`` page can now be used to create API tokens which are restricted to just a subset of actions, including against specific databases or resources. See :ref:`CreateTokenView` for details. (:issue:`1947`)
|
||||
|
|
@ -664,7 +485,7 @@ Documentation
|
|||
.. _v0_62:
|
||||
|
||||
0.62 (2022-08-14)
|
||||
-----------------
|
||||
-------------------
|
||||
|
||||
Datasette can now run entirely in your browser using WebAssembly. Try out `Datasette Lite <https://lite.datasette.io/>`__, take a look `at the code <https://github.com/simonw/datasette-lite>`__ or read more about it in `Datasette Lite: a server-side Python web application running in a browser <https://simonwillison.net/2022/May/4/datasette-lite/>`__.
|
||||
|
||||
|
|
@ -730,7 +551,7 @@ Datasette also now requires Python 3.7 or higher.
|
|||
- Datasette is now covered by a `Code of Conduct <https://github.com/simonw/datasette/blob/main/CODE_OF_CONDUCT.md>`__. (:issue:`1654`)
|
||||
- Python 3.6 is no longer supported. (:issue:`1577`)
|
||||
- Tests now run against Python 3.11-dev. (:issue:`1621`)
|
||||
- New ``datasette.ensure_permissions(actor, permissions)`` internal method for checking multiple permissions at once. (:issue:`1675`)
|
||||
- New :ref:`datasette.ensure_permissions(actor, permissions) <datasette_ensure_permissions>` internal method for checking multiple permissions at once. (:issue:`1675`)
|
||||
- New :ref:`datasette.check_visibility(actor, action, resource=None) <datasette_check_visibility>` internal method for checking if a user can see a resource that would otherwise be invisible to unauthenticated users. (:issue:`1678`)
|
||||
- Table and row HTML pages now include a ``<link rel="alternate" type="application/json+datasette" href="...">`` element and return a ``Link: URL; rel="alternate"; type="application/json+datasette"`` HTTP header pointing to the JSON version of those pages. (:issue:`1533`)
|
||||
- ``Access-Control-Expose-Headers: Link`` is now added to the CORS headers, allowing remote JavaScript to access that header.
|
||||
|
|
@ -1155,7 +976,7 @@ Smaller changes
|
|||
~~~~~~~~~~~~~~~
|
||||
|
||||
- Wide tables shown within Datasette now scroll horizontally (:issue:`998`). This is achieved using a new ``<div class="table-wrapper">`` element which may impact the implementation of some plugins (for example `this change to datasette-cluster-map <https://github.com/simonw/datasette-cluster-map/commit/fcb4abbe7df9071c5ab57defd39147de7145b34e>`__).
|
||||
- New :ref:`actions_debug_menu` permission. (:issue:`1068`)
|
||||
- New :ref:`permissions_debug_menu` permission. (:issue:`1068`)
|
||||
- Removed ``--debug`` option, which didn't do anything. (:issue:`814`)
|
||||
- ``Link:`` HTTP header pagination. (:issue:`1014`)
|
||||
- ``x`` button for clearing filters. (:issue:`1016`)
|
||||
|
|
@ -1414,7 +1235,7 @@ You can use the new ``"allow"`` block syntax in ``metadata.json`` (or ``metadata
|
|||
|
||||
See :ref:`authentication_permissions_allow` for more details.
|
||||
|
||||
Plugins can implement their own custom permission checks using the new ``plugin_hook_permission_allowed()`` plugin hook.
|
||||
Plugins can implement their own custom permission checks using the new :ref:`plugin_hook_permission_allowed` hook.
|
||||
|
||||
A new debug page at ``/-/permissions`` shows recent permission checks, to help administrators and plugin authors understand exactly what checks are being performed. This tool defaults to only being available to the root user, but can be exposed to other users by plugins that respond to the ``permissions-debug`` permission. (:issue:`788`)
|
||||
|
||||
|
|
@ -1490,7 +1311,7 @@ Smaller changes
|
|||
- New :ref:`datasette.get_database() <datasette_get_database>` method.
|
||||
- Added ``_`` prefix to many private, undocumented methods of the Datasette class. (:issue:`576`)
|
||||
- Removed the ``db.get_outbound_foreign_keys()`` method which duplicated the behaviour of ``db.foreign_keys_for_table()``.
|
||||
- New ``await datasette.permission_allowed()`` method.
|
||||
- New :ref:`await datasette.permission_allowed() <datasette_permission_allowed>` method.
|
||||
- ``/-/actor`` debugging endpoint for viewing the currently authenticated actor.
|
||||
- New ``request.cookies`` property.
|
||||
- ``/-/plugins`` endpoint now shows a list of hooks implemented by each plugin, e.g. https://latest.datasette.io/-/plugins?all=1
|
||||
|
|
|
|||
|
|
@ -119,10 +119,8 @@ Once started you can access it at ``http://localhost:8001``
|
|||
signed cookies
|
||||
--root Output URL that sets a cookie authenticating
|
||||
the root user
|
||||
--default-deny Deny all permissions by default
|
||||
--get TEXT Run an HTTP GET request against this path,
|
||||
print results and exit
|
||||
--headers Include HTTP headers in --get output
|
||||
--token TEXT API token to send with --get requests
|
||||
--actor TEXT Actor to use for --get requests (JSON string)
|
||||
--version-note TEXT Additional note to show on /-/versions
|
||||
|
|
@ -143,17 +141,6 @@ Once started you can access it at ``http://localhost:8001``
|
|||
|
||||
.. [[[end]]]
|
||||
|
||||
.. _cli_datasette_serve_env:
|
||||
|
||||
Environment variables
|
||||
---------------------
|
||||
|
||||
Some of the ``datasette serve`` options can be provided by environment variables:
|
||||
|
||||
- ``DATASETTE_SECRET``: Equivalent to the ``--secret`` option.
|
||||
- ``DATASETTE_SSL_KEYFILE``: Equivalent to the ``--ssl-keyfile`` option.
|
||||
- ``DATASETTE_SSL_CERTFILE``: Equivalent to the ``--ssl-certfile`` option.
|
||||
- ``DATASETTE_LOAD_EXTENSION``: Equivalent to the ``--load-extension`` option.
|
||||
|
||||
.. _cli_datasette_get:
|
||||
|
||||
|
|
@ -490,15 +477,8 @@ See :ref:`publish_cloud_run`.
|
|||
--cpu [1|2|4] Number of vCPUs to allocate in Cloud Run
|
||||
--timeout INTEGER Build timeout in seconds
|
||||
--apt-get-install TEXT Additional packages to apt-get install
|
||||
--max-instances INTEGER Maximum Cloud Run instances (use 0 to remove
|
||||
the limit) [default: 1]
|
||||
--max-instances INTEGER Maximum Cloud Run instances
|
||||
--min-instances INTEGER Minimum Cloud Run instances
|
||||
--artifact-repository TEXT Artifact Registry repository to store the
|
||||
image [default: datasette]
|
||||
--artifact-region TEXT Artifact Registry location (region or multi-
|
||||
region) [default: us]
|
||||
--artifact-project TEXT Project ID for Artifact Registry (defaults to
|
||||
the active project)
|
||||
--help Show this message and exit.
|
||||
|
||||
|
||||
|
|
|
|||
12
docs/conf.py
12
docs/conf.py
|
|
@ -36,19 +36,12 @@ extensions = [
|
|||
"sphinx.ext.extlinks",
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx_copybutton",
|
||||
"myst_parser",
|
||||
"sphinx_markdown_builder",
|
||||
]
|
||||
if not os.environ.get("DISABLE_SPHINX_INLINE_TABS"):
|
||||
extensions += ["sphinx_inline_tabs"]
|
||||
|
||||
autodoc_member_order = "bysource"
|
||||
|
||||
myst_enable_extensions = ["colon_fence"]
|
||||
|
||||
markdown_http_base = "https://docs.datasette.io/en/stable"
|
||||
markdown_uri_doc_suffix = ".html"
|
||||
|
||||
extlinks = {
|
||||
"issue": ("https://github.com/simonw/datasette/issues/%s", "#%s"),
|
||||
}
|
||||
|
|
@ -60,10 +53,7 @@ templates_path = ["_templates"]
|
|||
# You can specify multiple suffix as a list of string:
|
||||
#
|
||||
# source_suffix = ['.rst', '.md']
|
||||
source_suffix = {
|
||||
".rst": "restructuredtext",
|
||||
".md": "markdown",
|
||||
}
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = "index"
|
||||
|
|
|
|||
|
|
@ -13,14 +13,13 @@ General guidelines
|
|||
* **main should always be releasable**. Incomplete features should live in branches. This ensures that any small bug fixes can be quickly released.
|
||||
* **The ideal commit** should bundle together the implementation, unit tests and associated documentation updates. The commit message should link to an associated issue.
|
||||
* **New plugin hooks** should only be shipped if accompanied by a separate release of a non-demo plugin that uses them.
|
||||
* **New user-facing views and documentation** should be added or updated alongside their implementation. The `/docs` folder includes pages for plugin hooks and built-in views—please ensure any new hooks or views are reflected there so the documentation tests continue to pass.
|
||||
|
||||
.. _devenvironment:
|
||||
|
||||
Setting up a development environment
|
||||
------------------------------------
|
||||
|
||||
If you have Python 3.10 or higher installed on your computer (on OS X the quickest way to do this `is using homebrew <https://docs.python-guide.org/starting/install3/osx/>`__) you can install an editable copy of Datasette using the following steps.
|
||||
If you have Python 3.8 or higher installed on your computer (on OS X the quickest way to do this `is using homebrew <https://docs.python-guide.org/starting/install3/osx/>`__) you can install an editable copy of Datasette using the following steps.
|
||||
|
||||
If you want to use GitHub to publish your changes, first `create a fork of datasette <https://github.com/simonw/datasette/fork>`__ under your own GitHub account.
|
||||
|
||||
|
|
@ -42,7 +41,7 @@ The next step is to create a virtual environment for your project and use it to
|
|||
# Install Datasette and its testing dependencies
|
||||
python3 -m pip install -e '.[test]'
|
||||
|
||||
That last line does most of the work: ``pip install -e`` means "install this package in a way that allows me to edit the source code in place". The ``.[test]`` option means "install the optional testing dependencies as well".
|
||||
That last line does most of the work: ``pip install -e`` means "install this package in a way that allows me to edit the source code in place". The ``.[test]`` option means "use the setup.py in this directory and install the optional testing dependencies as well".
|
||||
|
||||
.. _contributing_running_tests:
|
||||
|
||||
|
|
@ -112,14 +111,10 @@ Debugging
|
|||
|
||||
Any errors that occur while Datasette is running while display a stack trace on the console.
|
||||
|
||||
You can tell Datasette to open an interactive ``pdb`` (or ``ipdb``, if present) debugger session if an error occurs using the ``--pdb`` option::
|
||||
You can tell Datasette to open an interactive ``pdb`` debugger session if an error occurs using the ``--pdb`` option::
|
||||
|
||||
datasette --pdb fixtures.db
|
||||
|
||||
For `ipdb <https://pypi.org/project/ipdb/>`__, first run this::
|
||||
|
||||
datasette install ipdb
|
||||
|
||||
.. _contributing_formatting:
|
||||
|
||||
Code formatting
|
||||
|
|
@ -131,15 +126,6 @@ These formatters are enforced by Datasette's continuous integration: if a commit
|
|||
|
||||
When developing locally, you can verify and correct the formatting of your code using these tools.
|
||||
|
||||
If you are using `Just <https://github.com/casey/just>`__ the quickest way to run these is like so::
|
||||
|
||||
just black
|
||||
just prettier
|
||||
|
||||
Or run both at the same time::
|
||||
|
||||
just format
|
||||
|
||||
.. _contributing_formatting_black:
|
||||
|
||||
Running Black
|
||||
|
|
@ -160,7 +146,7 @@ If any of your code does not conform to Black you can run this to automatically
|
|||
|
||||
::
|
||||
|
||||
reformatted ../datasette/app.py
|
||||
reformatted ../datasette/setup.py
|
||||
All done! ✨ 🍰 ✨
|
||||
1 file reformatted, 94 files left unchanged.
|
||||
|
||||
|
|
@ -363,4 +349,4 @@ Datasette bundles `CodeMirror <https://codemirror.net/>`__ for the SQL editing i
|
|||
-p @rollup/plugin-node-resolve \
|
||||
-p @rollup/plugin-terser
|
||||
|
||||
* Update the version reference in the ``codemirror.html`` template.
|
||||
* Update the version reference in the ``codemirror.html`` template.
|
||||
|
|
@ -83,15 +83,6 @@ database column they are representing, for example:
|
|||
</tbody>
|
||||
</table>
|
||||
|
||||
.. _customization_css:
|
||||
|
||||
Writing custom CSS
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Custom templates need to take Datasette's default CSS into account. The pattern portfolio at ``/-/patterns`` (`example here <https://latest.datasette.io/-/patterns>`__) is a useful reference for understanding the available CSS classes.
|
||||
|
||||
The ``core`` class is particularly useful - you can apply this directly to a ``<input>`` or ``<button>`` element to get Datasette's default form styles, or you can apply it to a containing element (such as ``<form>``) to apply those styles to all of the form elements within it.
|
||||
|
||||
.. _customization_static_files:
|
||||
|
||||
Serving static files
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ Datasette will not be accessible from outside the server because it is listening
|
|||
.. _deploying_openrc:
|
||||
|
||||
Running Datasette using OpenRC
|
||||
==============================
|
||||
===============================
|
||||
OpenRC is the service manager on non-systemd Linux distributions like `Alpine Linux <https://www.alpinelinux.org/>`__ and `Gentoo <https://www.gentoo.org/>`__.
|
||||
|
||||
Create an init script at ``/etc/init.d/datasette`` with the following contents:
|
||||
|
|
|
|||
|
|
@ -1,14 +1,14 @@
|
|||
(events)=
|
||||
# Events
|
||||
.. _events:
|
||||
|
||||
Events
|
||||
======
|
||||
|
||||
Datasette includes a mechanism for tracking events that occur while the software is running. This is primarily intended to be used by plugins, which can both trigger events and listen for events.
|
||||
|
||||
The core Datasette application triggers events when certain things happen. This page describes those events.
|
||||
|
||||
Plugins can listen for events using the {ref}`plugin_hook_track_event` plugin hook, which will be called with instances of the following classes - or additional classes {ref}`registered by other plugins <plugin_hook_register_events>`.
|
||||
Plugins can listen for events using the :ref:`plugin_hook_track_event` plugin hook, which will be called with instances of the following classes - or additional classes :ref:`registered by other plugins <plugin_hook_register_events>`.
|
||||
|
||||
```{eval-rst}
|
||||
.. automodule:: datasette.events
|
||||
:members:
|
||||
:exclude-members: Event
|
||||
```
|
||||
|
|
@ -8,7 +8,7 @@ Play with a live demo
|
|||
|
||||
The best way to experience Datasette for the first time is with a demo:
|
||||
|
||||
* `datasette.io/global-power-plants <https://datasette.io/global-power-plants/global-power-plants>`__ provides a searchable database of power plants around the world, using data from the `World Resources Institude <https://www.wri.org/publication/global-power-plant-database>`__ rendered using the `datasette-cluster-map <https://github.com/simonw/datasette-cluster-map>`__ plugin.
|
||||
* `global-power-plants.datasettes.com <https://global-power-plants.datasettes.com/global-power-plants/global-power-plants>`__ provides a searchable database of power plants around the world, using data from the `World Resources Institude <https://www.wri.org/publication/global-power-plant-database>`__ rendered using the `datasette-cluster-map <https://github.com/simonw/datasette-cluster-map>`__ plugin.
|
||||
* `fivethirtyeight.datasettes.com <https://fivethirtyeight.datasettes.com/fivethirtyeight>`__ shows Datasette running against over 400 datasets imported from the `FiveThirtyEight GitHub repository <https://github.com/fivethirtyeight/data>`__.
|
||||
|
||||
.. _getting_started_tutorial:
|
||||
|
|
@ -33,18 +33,29 @@ You can pass a URL to a CSV, SQLite or raw SQL file directly to Datasette Lite t
|
|||
|
||||
This `example link <https://lite.datasette.io/?url=https%3A%2F%2Fraw.githubusercontent.com%2FNUKnightLab%2Fsql-mysteries%2Fmaster%2Fsql-murder-mystery.db#/sql-murder-mystery>`__ opens Datasette Lite and loads the SQL Murder Mystery example database from `Northwestern University Knight Lab <https://github.com/NUKnightLab/sql-mysteries>`__.
|
||||
|
||||
.. _getting_started_codespaces:
|
||||
.. _getting_started_glitch:
|
||||
|
||||
Try Datasette without installing anything with Codespaces
|
||||
---------------------------------------------------------
|
||||
Try Datasette without installing anything using Glitch
|
||||
------------------------------------------------------
|
||||
|
||||
`GitHub Codespaces <https://github.com/features/codespaces/>`__ offers a free browser-based development environment that lets you run a development server without installing any local software.
|
||||
`Glitch <https://glitch.com/>`__ is a free online tool for building web apps directly from your web browser. You can use Glitch to try out Datasette without needing to install any software on your own computer.
|
||||
|
||||
Here's a demo project on GitHub which you can use as the basis for your own experiments:
|
||||
Here's a demo project on Glitch which you can use as the basis for your own experiments:
|
||||
|
||||
`github.com/datasette/datasette-studio <https://github.com/datasette/datasette-studio>`__
|
||||
`glitch.com/~datasette-csvs <https://glitch.com/~datasette-csvs>`__
|
||||
|
||||
The README file in that repository has instructions on how to get started.
|
||||
Glitch allows you to "remix" any project to create your own copy and start editing it in your browser. You can remix the ``datasette-csvs`` project by clicking this button:
|
||||
|
||||
.. image:: https://cdn.glitch.com/2703baf2-b643-4da7-ab91-7ee2a2d00b5b%2Fremix-button.svg
|
||||
:target: https://glitch.com/edit/#!/remix/datasette-csvs
|
||||
|
||||
Find a CSV file and drag it onto the Glitch file explorer panel - ``datasette-csvs`` will automatically convert it to a SQLite database (using `sqlite-utils <https://github.com/simonw/sqlite-utils>`__) and allow you to start exploring it using Datasette.
|
||||
|
||||
If your CSV file has a ``latitude`` and ``longitude`` column you can visualize it on a map by uncommenting the ``datasette-cluster-map`` line in the ``requirements.txt`` file using the Glitch file editor.
|
||||
|
||||
Need some data? Try this `Public Art Data <https://data.seattle.gov/Community/Public-Art-Data/j7sn-tdzk>`__ for the city of Seattle - hit "Export" and select "CSV" to download it as a CSV file.
|
||||
|
||||
For more on how this works, see `Running Datasette on Glitch <https://simonwillison.net/2019/Apr/23/datasette-glitch/>`__.
|
||||
|
||||
.. _getting_started_your_computer:
|
||||
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ Datasette is a tool for exploring and publishing data. It helps people take data
|
|||
|
||||
Datasette is aimed at data journalists, museum curators, archivists, local governments and anyone else who has data that they wish to share with the world. It is part of a :ref:`wider ecosystem of tools and plugins <ecosystem>` dedicated to making working with structured data as productive as possible.
|
||||
|
||||
`Explore a demo <https://fivethirtyeight.datasettes.com/fivethirtyeight>`__, watch `a presentation about the project <https://static.simonwillison.net/static/2018/pybay-datasette/>`__.
|
||||
`Explore a demo <https://fivethirtyeight.datasettes.com/fivethirtyeight>`__, watch `a presentation about the project <https://static.simonwillison.net/static/2018/pybay-datasette/>`__ or :ref:`getting_started_glitch`.
|
||||
|
||||
Interested in learning Datasette? Start with `the official tutorials <https://datasette.io/tutorials>`__.
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,9 @@
|
|||
Installation
|
||||
==============
|
||||
|
||||
.. note::
|
||||
If you just want to try Datasette out you don't need to install anything: see :ref:`getting_started_glitch`
|
||||
|
||||
There are two main options for installing Datasette. You can install it directly on to your machine, or you can install it using Docker.
|
||||
|
||||
If you want to start making contributions to the Datasette project by installing a copy that lets you directly modify the code, take a look at our guide to :ref:`devenvironment`.
|
||||
|
|
@ -54,7 +57,7 @@ If the latest packaged release of Datasette has not yet been made available thro
|
|||
Using pip
|
||||
---------
|
||||
|
||||
Datasette requires Python 3.10 or higher. The `Python.org Python For Beginners <https://www.python.org/about/gettingstarted/>`__ page has instructions for getting started.
|
||||
Datasette requires Python 3.8 or higher. The `Python.org Python For Beginners <https://www.python.org/about/gettingstarted/>`__ page has instructions for getting started.
|
||||
|
||||
You can install Datasette and its dependencies using ``pip``::
|
||||
|
||||
|
|
|
|||
|
|
@ -272,14 +272,14 @@ The dictionary keys are the name of the database that is used in the URL - e.g.
|
|||
|
||||
All databases are listed, irrespective of user permissions.
|
||||
|
||||
.. _datasette_actions:
|
||||
.. _datasette_permissions:
|
||||
|
||||
.actions
|
||||
--------
|
||||
.permissions
|
||||
------------
|
||||
|
||||
Property exposing a dictionary of actions that have been registered using the :ref:`plugin_register_actions` plugin hook.
|
||||
Property exposing a dictionary of permissions that have been registered using the :ref:`plugin_register_permissions` plugin hook.
|
||||
|
||||
The dictionary keys are the action names - e.g. ``view-instance`` - and the values are ``Action()`` objects describing the permission.
|
||||
The dictionary keys are the permission names - e.g. ``view-instance`` - and the values are ``Permission()`` objects describing the permission. Here is a :ref:`description of that object <plugin_register_permissions>`.
|
||||
|
||||
.. _datasette_plugin_config:
|
||||
|
||||
|
|
@ -342,182 +342,10 @@ If no plugins that implement that hook are installed, the default return value l
|
|||
"2": {"id": "2"}
|
||||
}
|
||||
|
||||
.. _datasette_allowed:
|
||||
.. _datasette_permission_allowed:
|
||||
|
||||
await .allowed(\*, action, resource, actor=None)
|
||||
------------------------------------------------
|
||||
|
||||
``action`` - string
|
||||
The name of the action that is being permission checked.
|
||||
|
||||
``resource`` - Resource object
|
||||
A Resource object representing the database, table, or other resource. Must be an instance of a Resource class such as ``TableResource``, ``DatabaseResource``, ``QueryResource``, or ``InstanceResource``.
|
||||
|
||||
``actor`` - dictionary, optional
|
||||
The authenticated actor. This is usually ``request.actor``. Defaults to ``None`` for unauthenticated requests.
|
||||
|
||||
This method checks if the given actor has permission to perform the given action on the given resource. All parameters must be passed as keyword arguments.
|
||||
|
||||
Example usage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette.resources import (
|
||||
TableResource,
|
||||
DatabaseResource,
|
||||
)
|
||||
|
||||
# Check if actor can view a specific table
|
||||
can_view = await datasette.allowed(
|
||||
action="view-table",
|
||||
resource=TableResource(
|
||||
database="fixtures", table="facetable"
|
||||
),
|
||||
actor=request.actor,
|
||||
)
|
||||
|
||||
# Check if actor can execute SQL on a database
|
||||
can_execute = await datasette.allowed(
|
||||
action="execute-sql",
|
||||
resource=DatabaseResource(database="fixtures"),
|
||||
actor=request.actor,
|
||||
)
|
||||
|
||||
The method returns ``True`` if the permission is granted, ``False`` if denied.
|
||||
|
||||
.. _datasette_allowed_resources:
|
||||
|
||||
await .allowed_resources(action, actor=None, \*, parent=None, include_is_private=False, include_reasons=False, limit=100, next=None)
|
||||
------------------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
Returns a ``PaginatedResources`` object containing resources that the actor can access for the specified action, with support for keyset pagination.
|
||||
|
||||
``action`` - string
|
||||
The action name (e.g., "view-table", "view-database")
|
||||
|
||||
``actor`` - dictionary, optional
|
||||
The authenticated actor. Defaults to ``None`` for unauthenticated requests.
|
||||
|
||||
``parent`` - string, optional
|
||||
Optional parent filter (e.g., database name) to limit results
|
||||
|
||||
``include_is_private`` - boolean, optional
|
||||
If True, adds a ``.private`` attribute to each Resource indicating whether anonymous users can access it
|
||||
|
||||
``include_reasons`` - boolean, optional
|
||||
If True, adds a ``.reasons`` attribute with a list of strings describing why access was granted (useful for debugging)
|
||||
|
||||
``limit`` - integer, optional
|
||||
Maximum number of results to return per page (1-1000, default 100)
|
||||
|
||||
``next`` - string, optional
|
||||
Keyset token from a previous page for pagination
|
||||
|
||||
The method returns a ``PaginatedResources`` object (from ``datasette.utils``) with the following attributes:
|
||||
|
||||
``resources`` - list
|
||||
List of ``Resource`` objects for the current page
|
||||
|
||||
``next`` - string or None
|
||||
Token for the next page, or ``None`` if no more results exist
|
||||
|
||||
Example usage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# Get first page of tables
|
||||
page = await datasette.allowed_resources(
|
||||
"view-table",
|
||||
actor=request.actor,
|
||||
parent="fixtures",
|
||||
limit=50,
|
||||
)
|
||||
|
||||
for table in page.resources:
|
||||
print(table.parent, table.child)
|
||||
if hasattr(table, "private"):
|
||||
print(f" Private: {table.private}")
|
||||
|
||||
# Get next page if available
|
||||
if page.next:
|
||||
next_page = await datasette.allowed_resources(
|
||||
"view-table", actor=request.actor, next=page.next
|
||||
)
|
||||
|
||||
# Iterate through all results automatically
|
||||
page = await datasette.allowed_resources(
|
||||
"view-table", actor=request.actor
|
||||
)
|
||||
async for table in page.all():
|
||||
print(table.parent, table.child)
|
||||
|
||||
# With reasons for debugging
|
||||
page = await datasette.allowed_resources(
|
||||
"view-table", actor=request.actor, include_reasons=True
|
||||
)
|
||||
for table in page.resources:
|
||||
print(f"{table.child}: {table.reasons}")
|
||||
|
||||
The ``page.all()`` async generator automatically handles pagination, fetching additional pages and yielding all resources one at a time.
|
||||
|
||||
This method uses :ref:`datasette_allowed_resources_sql` under the hood and is an efficient way to list the databases, tables or other resources that an actor can access for a specific action.
|
||||
|
||||
.. _datasette_allowed_resources_sql:
|
||||
|
||||
await .allowed_resources_sql(\*, action, actor=None, parent=None, include_is_private=False)
|
||||
-------------------------------------------------------------------------------------------
|
||||
|
||||
Builds the SQL query that Datasette uses to determine which resources an actor may access for a specific action. Returns a ``(sql: str, params: dict)`` namedtuple that can be executed against the internal ``catalog_*`` database tables. ``parent`` can be used to limit results to a specific database, and ``include_is_private`` adds a column indicating whether anonymous users would be denied access to that resource.
|
||||
|
||||
Plugins that need to execute custom analysis over the raw allow/deny rules can use this helper to run the same query that powers the ``/-/allowed`` debugging interface.
|
||||
|
||||
The SQL query built by this method will return the following columns:
|
||||
|
||||
- ``parent``: The parent resource identifier (or NULL)
|
||||
- ``child``: The child resource identifier (or NULL)
|
||||
- ``reason``: The reason from the rule that granted access
|
||||
- ``is_private``: (if ``include_is_private``) 1 if anonymous users cannot access, 0 otherwise
|
||||
|
||||
.. _datasette_ensure_permission:
|
||||
|
||||
await .ensure_permission(action, resource=None, actor=None)
|
||||
-----------------------------------------------------------
|
||||
|
||||
``action`` - string
|
||||
The action to check. See :ref:`actions` for a list of available actions.
|
||||
|
||||
``resource`` - Resource object (optional)
|
||||
The resource to check the permission against. Must be an instance of ``InstanceResource``, ``DatabaseResource``, or ``TableResource`` from the ``datasette.resources`` module. If omitted, defaults to ``InstanceResource()`` for instance-level permissions.
|
||||
|
||||
``actor`` - dictionary (optional)
|
||||
The authenticated actor. This is usually ``request.actor``.
|
||||
|
||||
This is a convenience wrapper around :ref:`datasette_allowed` that raises a ``datasette.Forbidden`` exception if the permission check fails. Use this when you want to enforce a permission check and halt execution if the actor is not authorized.
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette.resources import TableResource
|
||||
|
||||
# Will raise Forbidden if actor cannot view the table
|
||||
await datasette.ensure_permission(
|
||||
action="view-table",
|
||||
resource=TableResource(
|
||||
database="fixtures", table="cities"
|
||||
),
|
||||
actor=request.actor,
|
||||
)
|
||||
|
||||
# For instance-level actions, resource can be omitted:
|
||||
await datasette.ensure_permission(
|
||||
action="permissions-debug", actor=request.actor
|
||||
)
|
||||
|
||||
.. _datasette_check_visibility:
|
||||
|
||||
await .check_visibility(actor, action, resource=None)
|
||||
-----------------------------------------------------
|
||||
await .permission_allowed(actor, action, resource=None, default=...)
|
||||
--------------------------------------------------------------------
|
||||
|
||||
``actor`` - dictionary
|
||||
The authenticated actor. This is usually ``request.actor``.
|
||||
|
|
@ -525,8 +353,64 @@ await .check_visibility(actor, action, resource=None)
|
|||
``action`` - string
|
||||
The name of the action that is being permission checked.
|
||||
|
||||
``resource`` - Resource object, optional
|
||||
The resource being checked, as a Resource object such as ``DatabaseResource(database=...)``, ``TableResource(database=..., table=...)``, or ``QueryResource(database=..., query=...)``. Only some permissions apply to a resource.
|
||||
``resource`` - string or tuple, optional
|
||||
The resource, e.g. the name of the database, or a tuple of two strings containing the name of the database and the name of the table. Only some permissions apply to a resource.
|
||||
|
||||
``default`` - optional: True, False or None
|
||||
What value should be returned by default if nothing provides an opinion on this permission check.
|
||||
Set to ``True`` for default allow or ``False`` for default deny.
|
||||
If not specified the ``default`` from the ``Permission()`` tuple that was registered using :ref:`plugin_register_permissions` will be used.
|
||||
|
||||
Check if the given actor has :ref:`permission <authentication_permissions>` to perform the given action on the given resource.
|
||||
|
||||
Some permission checks are carried out against :ref:`rules defined in datasette.yaml <authentication_permissions_config>`, while other custom permissions may be decided by plugins that implement the :ref:`plugin_hook_permission_allowed` plugin hook.
|
||||
|
||||
If neither ``metadata.json`` nor any of the plugins provide an answer to the permission query the ``default`` argument will be returned.
|
||||
|
||||
See :ref:`permissions` for a full list of permission actions included in Datasette core.
|
||||
|
||||
.. _datasette_ensure_permissions:
|
||||
|
||||
await .ensure_permissions(actor, permissions)
|
||||
---------------------------------------------
|
||||
|
||||
``actor`` - dictionary
|
||||
The authenticated actor. This is usually ``request.actor``.
|
||||
|
||||
``permissions`` - list
|
||||
A list of permissions to check. Each permission in that list can be a string ``action`` name or a 2-tuple of ``(action, resource)``.
|
||||
|
||||
This method allows multiple permissions to be checked at once. It raises a ``datasette.Forbidden`` exception if any of the checks are denied before one of them is explicitly granted.
|
||||
|
||||
This is useful when you need to check multiple permissions at once. For example, an actor should be able to view a table if either one of the following checks returns ``True`` or not a single one of them returns ``False``:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
await datasette.ensure_permissions(
|
||||
request.actor,
|
||||
[
|
||||
("view-table", (database, table)),
|
||||
("view-database", database),
|
||||
"view-instance",
|
||||
],
|
||||
)
|
||||
|
||||
.. _datasette_check_visibility:
|
||||
|
||||
await .check_visibility(actor, action=None, resource=None, permissions=None)
|
||||
----------------------------------------------------------------------------
|
||||
|
||||
``actor`` - dictionary
|
||||
The authenticated actor. This is usually ``request.actor``.
|
||||
|
||||
``action`` - string, optional
|
||||
The name of the action that is being permission checked.
|
||||
|
||||
``resource`` - string or tuple, optional
|
||||
The resource, e.g. the name of the database, or a tuple of two strings containing the name of the database and the name of the table. Only some permissions apply to a resource.
|
||||
|
||||
``permissions`` - list of ``action`` strings or ``(action, resource)`` tuples, optional
|
||||
Provide this instead of ``action`` and ``resource`` to check multiple permissions at once.
|
||||
|
||||
This convenience method can be used to answer the question "should this item be considered private, in that it is visible to me but it is not visible to anonymous users?"
|
||||
|
||||
|
|
@ -536,12 +420,23 @@ This example checks if the user can access a specific table, and sets ``private`
|
|||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette.resources import TableResource
|
||||
|
||||
visible, private = await datasette.check_visibility(
|
||||
request.actor,
|
||||
action="view-table",
|
||||
resource=TableResource(database=database, table=table),
|
||||
resource=(database, table),
|
||||
)
|
||||
|
||||
The following example runs three checks in a row, similar to :ref:`datasette_ensure_permissions`. If any of the checks are denied before one of them is explicitly granted then ``visible`` will be ``False``. ``private`` will be ``True`` if an anonymous user would not be able to view the resource.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
visible, private = await datasette.check_visibility(
|
||||
request.actor,
|
||||
permissions=[
|
||||
("view-table", (database, table)),
|
||||
("view-database", database),
|
||||
"view-instance",
|
||||
],
|
||||
)
|
||||
|
||||
.. _datasette_create_token:
|
||||
|
|
@ -594,6 +489,16 @@ The following example creates a token that can access ``view-instance`` and ``vi
|
|||
},
|
||||
)
|
||||
|
||||
.. _datasette_get_permission:
|
||||
|
||||
.get_permission(name_or_abbr)
|
||||
-----------------------------
|
||||
|
||||
``name_or_abbr`` - string
|
||||
The name or abbreviation of the permission to look up, e.g. ``view-table`` or ``vt``.
|
||||
|
||||
Returns a :ref:`Permission object <plugin_register_permissions>` representing the permission, or raises a ``KeyError`` if one is not found.
|
||||
|
||||
.. _datasette_get_database:
|
||||
|
||||
.get_database(name)
|
||||
|
|
@ -781,8 +686,8 @@ Use ``is_mutable=False`` to add an immutable database.
|
|||
|
||||
.. _datasette_add_memory_database:
|
||||
|
||||
.add_memory_database(memory_name, name=None, route=None)
|
||||
--------------------------------------------------------
|
||||
.add_memory_database(name)
|
||||
--------------------------
|
||||
|
||||
Adds a shared in-memory database with the specified name:
|
||||
|
||||
|
|
@ -800,9 +705,7 @@ This is a shortcut for the following:
|
|||
Database(datasette, memory_name="statistics")
|
||||
)
|
||||
|
||||
Using either of these patterns will result in the in-memory database being served at ``/statistics``.
|
||||
|
||||
The ``name`` and ``route`` parameters are optional and work the same way as they do for :ref:`datasette_add_database`.
|
||||
Using either of these pattern will result in the in-memory database being served at ``/statistics``.
|
||||
|
||||
.. _datasette_remove_database:
|
||||
|
||||
|
|
@ -1047,60 +950,6 @@ These methods can be used with :ref:`internals_datasette_urls` - for example:
|
|||
|
||||
For documentation on available ``**kwargs`` options and the shape of the HTTPX Response object refer to the `HTTPX Async documentation <https://www.python-httpx.org/async/>`__.
|
||||
|
||||
Bypassing permission checks
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
All ``datasette.client`` methods accept an optional ``skip_permission_checks=True`` parameter. When set, all permission checks will be bypassed for that request, allowing access to any resource regardless of the configured permissions.
|
||||
|
||||
This is useful for plugins and internal operations that need to access all resources without being subject to permission restrictions.
|
||||
|
||||
Example usage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# Regular request - respects permissions
|
||||
response = await datasette.client.get(
|
||||
"/private-db/secret-table.json"
|
||||
)
|
||||
# May return 403 Forbidden if access is denied
|
||||
|
||||
# With skip_permission_checks - bypasses all permission checks
|
||||
response = await datasette.client.get(
|
||||
"/private-db/secret-table.json",
|
||||
skip_permission_checks=True,
|
||||
)
|
||||
# Will return 200 OK and the data, regardless of permissions
|
||||
|
||||
This parameter works with all HTTP methods (``get``, ``post``, ``put``, ``patch``, ``delete``, ``options``, ``head``) and the generic ``request`` method.
|
||||
|
||||
.. warning::
|
||||
|
||||
Use ``skip_permission_checks=True`` with caution. It completely bypasses Datasette's permission system and should only be used in trusted plugin code or internal operations where you need guaranteed access to resources.
|
||||
|
||||
.. _internals_datasette_is_client:
|
||||
|
||||
Detecting internal client requests
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette.in_client()`` - returns bool
|
||||
Returns ``True`` if the current code is executing within a ``datasette.client`` request, ``False`` otherwise.
|
||||
|
||||
This method is useful for plugins that need to behave differently when called through ``datasette.client`` versus when handling external HTTP requests.
|
||||
|
||||
Example usage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
async def fetch_documents(datasette):
|
||||
if not datasette.in_client():
|
||||
return Response.text(
|
||||
"Only available via internal client requests",
|
||||
status=403,
|
||||
)
|
||||
...
|
||||
|
||||
Note that ``datasette.in_client()`` is independent of ``skip_permission_checks``. A request made through ``datasette.client`` will always have ``in_client()`` return ``True``, regardless of whether ``skip_permission_checks`` is set.
|
||||
|
||||
.. _internals_datasette_urls:
|
||||
|
||||
datasette.urls
|
||||
|
|
@ -1152,132 +1001,6 @@ Use the ``format="json"`` (or ``"csv"`` or other formats supported by plugins) a
|
|||
|
||||
These methods each return a ``datasette.utils.PrefixedUrlString`` object, which is a subclass of the Python ``str`` type. This allows the logic that considers the ``base_url`` setting to detect if that prefix has already been applied to the path.
|
||||
|
||||
.. _internals_permission_classes:
|
||||
|
||||
Permission classes and utilities
|
||||
================================
|
||||
|
||||
.. _internals_permission_sql:
|
||||
|
||||
PermissionSQL class
|
||||
-------------------
|
||||
|
||||
The ``PermissionSQL`` class is used by plugins to contribute SQL-based permission rules through the :ref:`plugin_hook_permission_resources_sql` hook. This enables efficient permission checking across multiple resources by leveraging SQLite's query engine.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette.permissions import PermissionSQL
|
||||
|
||||
|
||||
@dataclass
|
||||
class PermissionSQL:
|
||||
source: str # Plugin name for auditing
|
||||
sql: str # SQL query returning permission rules
|
||||
params: Dict[str, Any] # Parameters for the SQL query
|
||||
|
||||
**Attributes:**
|
||||
|
||||
``source`` - string
|
||||
An identifier for the source of these permission rules, typically the plugin name. This is used for debugging and auditing.
|
||||
|
||||
``sql`` - string
|
||||
A SQL query that returns permission rules. The query must return rows with the following columns:
|
||||
|
||||
- ``parent`` (TEXT or NULL) - The parent resource identifier (e.g., database name)
|
||||
- ``child`` (TEXT or NULL) - The child resource identifier (e.g., table name)
|
||||
- ``allow`` (INTEGER) - 1 for allow, 0 for deny
|
||||
- ``reason`` (TEXT) - A human-readable explanation of why this permission was granted or denied
|
||||
|
||||
``params`` - dictionary
|
||||
A dictionary of parameters to bind into the SQL query. Parameter names should not include the ``:`` prefix.
|
||||
|
||||
.. _permission_sql_parameters:
|
||||
|
||||
Available SQL parameters
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
When writing SQL for ``PermissionSQL``, the following parameters are automatically available:
|
||||
|
||||
``:actor`` - JSON string or NULL
|
||||
The full actor dictionary serialized as JSON. Use SQLite's ``json_extract()`` function to access fields:
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
json_extract(:actor, '$.role') = 'admin'
|
||||
json_extract(:actor, '$.team') = 'engineering'
|
||||
|
||||
``:actor_id`` - string or NULL
|
||||
The actor's ``id`` field, for simple equality comparisons:
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
:actor_id = 'alice'
|
||||
|
||||
``:action`` - string
|
||||
The action being checked (e.g., ``"view-table"``, ``"insert-row"``, ``"execute-sql"``).
|
||||
|
||||
**Example usage:**
|
||||
|
||||
Here's an example plugin that grants view-table permissions to users with an "analyst" role for tables in the "analytics" database:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
from datasette.permissions import PermissionSQL
|
||||
|
||||
|
||||
@hookimpl
|
||||
def permission_resources_sql(datasette, actor, action):
|
||||
if action != "view-table":
|
||||
return None
|
||||
|
||||
return PermissionSQL(
|
||||
source="my_analytics_plugin",
|
||||
sql="""
|
||||
SELECT 'analytics' AS parent,
|
||||
NULL AS child,
|
||||
1 AS allow,
|
||||
'Analysts can view analytics database' AS reason
|
||||
WHERE json_extract(:actor, '$.role') = 'analyst'
|
||||
AND :action = 'view-table'
|
||||
""",
|
||||
params={},
|
||||
)
|
||||
|
||||
A more complex example that uses custom parameters:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@hookimpl
|
||||
def permission_resources_sql(datasette, actor, action):
|
||||
if not actor:
|
||||
return None
|
||||
|
||||
user_teams = actor.get("teams", [])
|
||||
|
||||
return PermissionSQL(
|
||||
source="team_permissions_plugin",
|
||||
sql="""
|
||||
SELECT
|
||||
team_database AS parent,
|
||||
team_table AS child,
|
||||
1 AS allow,
|
||||
'User is member of team: ' || team_name AS reason
|
||||
FROM team_permissions
|
||||
WHERE user_id = :user_id
|
||||
AND :action IN ('view-table', 'insert-row', 'update-row')
|
||||
""",
|
||||
params={"user_id": actor.get("id")},
|
||||
)
|
||||
|
||||
**Permission resolution rules:**
|
||||
|
||||
When multiple ``PermissionSQL`` objects return conflicting rules for the same resource, Datasette applies the following precedence:
|
||||
|
||||
1. **Specificity**: Child-level rules (with both ``parent`` and ``child``) override parent-level rules (with only ``parent``), which override root-level rules (with neither ``parent`` nor ``child``)
|
||||
2. **Deny over allow**: At the same specificity level, deny (``allow=0``) takes precedence over allow (``allow=1``)
|
||||
3. **Implicit deny**: If no rules match a resource, access is denied by default
|
||||
|
||||
.. _internals_database:
|
||||
|
||||
Database class
|
||||
|
|
@ -1370,9 +1093,6 @@ The ``Results`` object also has the following properties and methods:
|
|||
``.rows`` - list of ``sqlite3.Row``
|
||||
This property provides direct access to the list of rows returned by the database. You can access specific rows by index using ``results.rows[0]``.
|
||||
|
||||
``.dicts()`` - list of ``dict``
|
||||
This method returns a list of Python dictionaries, one for each row.
|
||||
|
||||
``.first()`` - row or None
|
||||
Returns the first row in the results, or ``None`` if no rows were returned.
|
||||
|
||||
|
|
@ -1404,7 +1124,7 @@ Example usage:
|
|||
.. _database_execute_write:
|
||||
|
||||
await db.execute_write(sql, params=None, block=True)
|
||||
----------------------------------------------------
|
||||
-----------------------------------------------------
|
||||
|
||||
SQLite only allows one database connection to write at a time. Datasette handles this for you by maintaining a queue of writes to be executed against a given database. Plugins can submit write operations to this queue and they will be executed in the order in which they are received.
|
||||
|
||||
|
|
@ -1421,7 +1141,7 @@ Each call to ``execute_write()`` will be executed inside a transaction.
|
|||
.. _database_execute_write_script:
|
||||
|
||||
await db.execute_write_script(sql, block=True)
|
||||
----------------------------------------------
|
||||
-----------------------------------------------
|
||||
|
||||
Like ``execute_write()`` but can be used to send multiple SQL statements in a single string separated by semicolons, using the ``sqlite3`` `conn.executescript() <https://docs.python.org/3/library/sqlite3.html#sqlite3.Cursor.executescript>`__ method.
|
||||
|
||||
|
|
@ -1430,7 +1150,7 @@ Each call to ``execute_write_script()`` will be executed inside a transaction.
|
|||
.. _database_execute_write_many:
|
||||
|
||||
await db.execute_write_many(sql, params_seq, block=True)
|
||||
--------------------------------------------------------
|
||||
---------------------------------------------------------
|
||||
|
||||
Like ``execute_write()`` but uses the ``sqlite3`` `conn.executemany() <https://docs.python.org/3/library/sqlite3.html#sqlite3.Cursor.executemany>`__ method. This will efficiently execute the same SQL statement against each of the parameters in the ``params_seq`` iterator, for example:
|
||||
|
||||
|
|
@ -1571,64 +1291,27 @@ The ``Database`` class also provides properties and methods for introspecting th
|
|||
Returns the SQL definition of the named view.
|
||||
|
||||
``await db.get_all_foreign_keys()`` - dictionary
|
||||
Dictionary representing both incoming and outgoing foreign keys for every table in this database. Each key is a table name that points to a dictionary with two keys, ``"incoming"`` and ``"outgoing"``, each of which is a list of dictionaries with keys ``"column"``, ``"other_table"`` and ``"other_column"``. For example:
|
||||
Dictionary representing both incoming and outgoing foreign keys for this table. It has two keys, ``"incoming"`` and ``"outgoing"``, each of which is a list of dictionaries with keys ``"column"``, ``"other_table"`` and ``"other_column"``. For example:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"documents": {
|
||||
"incoming": [
|
||||
{
|
||||
"other_table": "pages",
|
||||
"column": "id",
|
||||
"other_column": "document_id"
|
||||
}
|
||||
],
|
||||
"outgoing": []
|
||||
},
|
||||
"pages": {
|
||||
"incoming": [
|
||||
{
|
||||
"other_table": "organization_pages",
|
||||
"column": "id",
|
||||
"other_column": "page_id"
|
||||
}
|
||||
],
|
||||
"outgoing": [
|
||||
{
|
||||
"other_table": "documents",
|
||||
"column": "document_id",
|
||||
"other_column": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
"organization": {
|
||||
"incoming": [
|
||||
{
|
||||
"other_table": "organization_pages",
|
||||
"column": "id",
|
||||
"other_column": "organization_id"
|
||||
}
|
||||
],
|
||||
"outgoing": []
|
||||
},
|
||||
"organization_pages": {
|
||||
"incoming": [],
|
||||
"outgoing": [
|
||||
{
|
||||
"other_table": "pages",
|
||||
"column": "page_id",
|
||||
"other_column": "id"
|
||||
},
|
||||
{
|
||||
"other_table": "organization",
|
||||
"column": "organization_id",
|
||||
"other_column": "id"
|
||||
}
|
||||
{
|
||||
"other_table": "attraction_characteristic",
|
||||
"column": "characteristic_id",
|
||||
"other_column": "pk",
|
||||
},
|
||||
{
|
||||
"other_table": "roadside_attractions",
|
||||
"column": "attraction_id",
|
||||
"other_column": "pk",
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
.. _internals_csrf:
|
||||
|
||||
CSRF protection
|
||||
|
|
@ -1655,7 +1338,7 @@ Datasette's internal database
|
|||
|
||||
Datasette maintains an "internal" SQLite database used for configuration, caching, and storage. Plugins can store configuration, settings, and other data inside this database. By default, Datasette will use a temporary in-memory SQLite database as the internal database, which is created at startup and destroyed at shutdown. Users of Datasette can optionally pass in a ``--internal`` flag to specify the path to a SQLite database to use as the internal database, which will persist internal data across Datasette instances.
|
||||
|
||||
Datasette maintains tables called ``catalog_databases``, ``catalog_tables``, ``catalog_views``, ``catalog_columns``, ``catalog_indexes``, ``catalog_foreign_keys`` with details of the attached databases and their schemas. These tables should not be considered a stable API - they may change between Datasette releases.
|
||||
Datasette maintains tables called ``catalog_databases``, ``catalog_tables``, ``catalog_columns``, ``catalog_indexes``, ``catalog_foreign_keys`` with details of the attached databases and their schemas. These tables should not be considered a stable API - they may change between Datasette releases.
|
||||
|
||||
Metadata is stored in tables ``metadata_instance``, ``metadata_databases``, ``metadata_resources`` and ``metadata_columns``. Plugins can interact with these tables via the :ref:`get_*_metadata() and set_*_metadata() methods <datasette_get_set_metadata>`.
|
||||
|
||||
|
|
@ -1696,15 +1379,7 @@ The internal database schema is as follows:
|
|||
rootpage INTEGER,
|
||||
sql TEXT,
|
||||
PRIMARY KEY (database_name, table_name),
|
||||
FOREIGN KEY (database_name) REFERENCES catalog_databases(database_name)
|
||||
);
|
||||
CREATE TABLE catalog_views (
|
||||
database_name TEXT,
|
||||
view_name TEXT,
|
||||
rootpage INTEGER,
|
||||
sql TEXT,
|
||||
PRIMARY KEY (database_name, view_name),
|
||||
FOREIGN KEY (database_name) REFERENCES catalog_databases(database_name)
|
||||
FOREIGN KEY (database_name) REFERENCES databases(database_name)
|
||||
);
|
||||
CREATE TABLE catalog_columns (
|
||||
database_name TEXT,
|
||||
|
|
@ -1717,8 +1392,8 @@ The internal database schema is as follows:
|
|||
is_pk INTEGER, -- renamed from pk
|
||||
hidden INTEGER,
|
||||
PRIMARY KEY (database_name, table_name, name),
|
||||
FOREIGN KEY (database_name) REFERENCES catalog_databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES catalog_tables(database_name, table_name)
|
||||
FOREIGN KEY (database_name) REFERENCES databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name)
|
||||
);
|
||||
CREATE TABLE catalog_indexes (
|
||||
database_name TEXT,
|
||||
|
|
@ -1729,8 +1404,8 @@ The internal database schema is as follows:
|
|||
origin TEXT,
|
||||
partial INTEGER,
|
||||
PRIMARY KEY (database_name, table_name, name),
|
||||
FOREIGN KEY (database_name) REFERENCES catalog_databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES catalog_tables(database_name, table_name)
|
||||
FOREIGN KEY (database_name) REFERENCES databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name)
|
||||
);
|
||||
CREATE TABLE catalog_foreign_keys (
|
||||
database_name TEXT,
|
||||
|
|
@ -1744,8 +1419,8 @@ The internal database schema is as follows:
|
|||
on_delete TEXT,
|
||||
match TEXT,
|
||||
PRIMARY KEY (database_name, table_name, id, seq),
|
||||
FOREIGN KEY (database_name) REFERENCES catalog_databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES catalog_tables(database_name, table_name)
|
||||
FOREIGN KEY (database_name) REFERENCES databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name)
|
||||
);
|
||||
CREATE TABLE metadata_instance (
|
||||
key text,
|
||||
|
|
|
|||
|
|
@ -144,47 +144,6 @@ Shows currently attached databases. `Databases example <https://latest.datasette
|
|||
}
|
||||
]
|
||||
|
||||
.. _TablesView:
|
||||
|
||||
/-/tables
|
||||
---------
|
||||
|
||||
Returns a JSON list of all tables that the current actor has permission to view. This endpoint uses the resource-based permission system and respects database and table-level access controls.
|
||||
|
||||
The endpoint supports a ``?q=`` query parameter for filtering tables by name using case-insensitive regex matching.
|
||||
|
||||
`Tables example <https://latest.datasette.io/-/tables>`_:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"matches": [
|
||||
{
|
||||
"name": "fixtures/facetable",
|
||||
"url": "/fixtures/facetable"
|
||||
},
|
||||
{
|
||||
"name": "fixtures/searchable",
|
||||
"url": "/fixtures/searchable"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Search example with ``?q=facet`` returns only tables matching ``.*facet.*``:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"matches": [
|
||||
{
|
||||
"name": "fixtures/facetable",
|
||||
"url": "/fixtures/facetable"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
When multiple search terms are provided (e.g., ``?q=user+profile``), tables must match the pattern ``.*user.*profile.*``. Results are ordered by shortest table name first.
|
||||
|
||||
.. _JsonDataView_threads:
|
||||
|
||||
/-/threads
|
||||
|
|
|
|||
|
|
@ -347,7 +347,7 @@ Special table arguments
|
|||
though this could potentially result in errors if the wrong syntax is used.
|
||||
|
||||
``?_where=SQL-fragment``
|
||||
If the :ref:`actions_execute_sql` permission is enabled, this parameter
|
||||
If the :ref:`permissions_execute_sql` permission is enabled, this parameter
|
||||
can be used to pass one or more additional SQL fragments to be used in the
|
||||
`WHERE` clause of the SQL used to query the table.
|
||||
|
||||
|
|
@ -457,7 +457,7 @@ You can find this near the top of the source code of those pages, looking like t
|
|||
|
||||
The JSON URL is also made available in a ``Link`` HTTP header for the page::
|
||||
|
||||
Link: <https://latest.datasette.io/fixtures/sortable.json>; rel="alternate"; type="application/json+datasette"
|
||||
Link: https://latest.datasette.io/fixtures/sortable.json; rel="alternate"; type="application/json+datasette"
|
||||
|
||||
.. _json_api_cors:
|
||||
|
||||
|
|
@ -510,7 +510,7 @@ Datasette provides a write API for JSON data. This is a POST-only API that requi
|
|||
Inserting rows
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
This requires the :ref:`actions_insert_row` permission.
|
||||
This requires the :ref:`permissions_insert_row` permission.
|
||||
|
||||
A single row can be inserted using the ``"row"`` key:
|
||||
|
||||
|
|
@ -621,9 +621,9 @@ Pass ``"ignore": true`` to ignore these errors and insert the other rows:
|
|||
"ignore": true
|
||||
}
|
||||
|
||||
Or you can pass ``"replace": true`` to replace any rows with conflicting primary keys with the new values. This requires the :ref:`actions_update_row` permission.
|
||||
Or you can pass ``"replace": true`` to replace any rows with conflicting primary keys with the new values. This requires the :ref:`permissions_update_row` permission.
|
||||
|
||||
Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`actions_alter_table` permission.
|
||||
Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`permissions_alter_table` permission.
|
||||
|
||||
.. _TableUpsertView:
|
||||
|
||||
|
|
@ -632,7 +632,7 @@ Upserting rows
|
|||
|
||||
An upsert is an insert or update operation. If a row with a matching primary key already exists it will be updated - otherwise a new row will be inserted.
|
||||
|
||||
The upsert API is mostly the same shape as the :ref:`insert API <TableInsertView>`. It requires both the :ref:`actions_insert_row` and :ref:`actions_update_row` permissions.
|
||||
The upsert API is mostly the same shape as the :ref:`insert API <TableInsertView>`. It requires both the :ref:`permissions_insert_row` and :ref:`permissions_update_row` permissions.
|
||||
|
||||
::
|
||||
|
||||
|
|
@ -735,14 +735,14 @@ When using upsert you must provide the primary key column (or columns if the tab
|
|||
|
||||
If your table does not have an explicit primary key you should pass the SQLite ``rowid`` key instead.
|
||||
|
||||
Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`actions_alter_table` permission.
|
||||
Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`permissions_alter_table` permission.
|
||||
|
||||
.. _RowUpdateView:
|
||||
|
||||
Updating a row
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
To update a row, make a ``POST`` to ``/<database>/<table>/<row-pks>/-/update``. This requires the :ref:`actions_update_row` permission.
|
||||
To update a row, make a ``POST`` to ``/<database>/<table>/<row-pks>/-/update``. This requires the :ref:`permissions_update_row` permission.
|
||||
|
||||
::
|
||||
|
||||
|
|
@ -792,14 +792,14 @@ The returned JSON will look like this:
|
|||
|
||||
Any errors will return ``{"errors": ["... descriptive message ..."], "ok": false}``, and a ``400`` status code for a bad input or a ``403`` status code for an authentication or permission error.
|
||||
|
||||
Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`actions_alter_table` permission.
|
||||
Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`permissions_alter_table` permission.
|
||||
|
||||
.. _RowDeleteView:
|
||||
|
||||
Deleting a row
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
To delete a row, make a ``POST`` to ``/<database>/<table>/<row-pks>/-/delete``. This requires the :ref:`actions_delete_row` permission.
|
||||
To delete a row, make a ``POST`` to ``/<database>/<table>/<row-pks>/-/delete``. This requires the :ref:`permissions_delete_row` permission.
|
||||
|
||||
::
|
||||
|
||||
|
|
@ -818,7 +818,7 @@ Any errors will return ``{"errors": ["... descriptive message ..."], "ok": false
|
|||
Creating a table
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
To create a table, make a ``POST`` to ``/<database>/-/create``. This requires the :ref:`actions_create_table` permission.
|
||||
To create a table, make a ``POST`` to ``/<database>/-/create``. This requires the :ref:`permissions_create_table` permission.
|
||||
|
||||
::
|
||||
|
||||
|
|
@ -859,8 +859,8 @@ The JSON here describes the table that will be created:
|
|||
|
||||
* ``pks`` can be used instead of ``pk`` to create a compound primary key. It should be a JSON list of column names to use in that primary key.
|
||||
* ``ignore`` can be set to ``true`` to ignore existing rows by primary key if the table already exists.
|
||||
* ``replace`` can be set to ``true`` to replace existing rows by primary key if the table already exists. This requires the :ref:`actions_update_row` permission.
|
||||
* ``alter`` can be set to ``true`` if you want to automatically add any missing columns to the table. This requires the :ref:`actions_alter_table` permission.
|
||||
* ``replace`` can be set to ``true`` to replace existing rows by primary key if the table already exists. This requires the :ref:`permissions_update_row` permission.
|
||||
* ``alter`` can be set to ``true`` if you want to automatically add any missing columns to the table. This requires the :ref:`permissions_alter_table` permission.
|
||||
|
||||
If the table is successfully created this will return a ``201`` status code and the following response:
|
||||
|
||||
|
|
@ -906,7 +906,7 @@ Datasette will create a table with a schema that matches those rows and insert t
|
|||
"pk": "id"
|
||||
}
|
||||
|
||||
Doing this requires both the :ref:`actions_create_table` and :ref:`actions_insert_row` permissions.
|
||||
Doing this requires both the :ref:`permissions_create_table` and :ref:`permissions_insert_row` permissions.
|
||||
|
||||
The ``201`` response here will be similar to the ``columns`` form, but will also include the number of rows that were inserted as ``row_count``:
|
||||
|
||||
|
|
@ -937,16 +937,16 @@ If you pass a row to the create endpoint with a primary key that already exists
|
|||
|
||||
You can avoid this error by passing the same ``"ignore": true`` or ``"replace": true`` options to the create endpoint as you can to the :ref:`insert endpoint <TableInsertView>`.
|
||||
|
||||
To use the ``"replace": true`` option you will also need the :ref:`actions_update_row` permission.
|
||||
To use the ``"replace": true`` option you will also need the :ref:`permissions_update_row` permission.
|
||||
|
||||
Pass ``"alter": true`` to automatically add any missing columns to the existing table that are present in the rows you are submitting. This requires the :ref:`actions_alter_table` permission.
|
||||
Pass ``"alter": true`` to automatically add any missing columns to the existing table that are present in the rows you are submitting. This requires the :ref:`permissions_alter_table` permission.
|
||||
|
||||
.. _TableDropView:
|
||||
|
||||
Dropping tables
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
To drop a table, make a ``POST`` to ``/<database>/<table>/-/drop``. This requires the :ref:`actions_drop_table` permission.
|
||||
To drop a table, make a ``POST`` to ``/<database>/<table>/-/drop``. This requires the :ref:`permissions_drop_table` permission.
|
||||
|
||||
::
|
||||
|
||||
|
|
|
|||
|
|
@ -205,6 +205,100 @@ These will be displayed at the top of the table page, and will also show in the
|
|||
|
||||
You can see an example of how these look at `latest.datasette.io/fixtures/roadside_attractions <https://latest.datasette.io/fixtures/roadside_attractions>`__.
|
||||
|
||||
Specifying units for a column
|
||||
-----------------------------
|
||||
|
||||
Datasette supports attaching units to a column, which will be used when displaying
|
||||
values from that column. SI prefixes will be used where appropriate.
|
||||
|
||||
Column units are configured in the metadata like so:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"databases": {
|
||||
"database1": {
|
||||
"tables": {
|
||||
"example_table": {
|
||||
"units": {
|
||||
"column1": "metres",
|
||||
"column2": "Hz"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
databases:
|
||||
database1:
|
||||
tables:
|
||||
example_table:
|
||||
units:
|
||||
column1: metres
|
||||
column2: Hz
|
||||
|
||||
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"databases": {
|
||||
"database1": {
|
||||
"tables": {
|
||||
"example_table": {
|
||||
"units": {
|
||||
"column1": "metres",
|
||||
"column2": "Hz"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
|
||||
Units are interpreted using Pint_, and you can see the full list of available units in
|
||||
Pint's `unit registry`_. You can also add `custom units`_ to the metadata, which will be
|
||||
registered with Pint:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"custom_units": [
|
||||
"decibel = [] = dB"
|
||||
]
|
||||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
custom_units:
|
||||
- decibel = [] = dB
|
||||
|
||||
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"custom_units": [
|
||||
"decibel = [] = dB"
|
||||
]
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
.. _Pint: https://pint.readthedocs.io/
|
||||
.. _unit registry: https://github.com/hgrecco/pint/blob/master/pint/default_en.txt
|
||||
.. _custom units: http://pint.readthedocs.io/en/latest/defining.html
|
||||
|
||||
.. _metadata_default_sort:
|
||||
|
||||
Setting a default sort order
|
||||
|
|
|
|||
|
|
@ -14,31 +14,31 @@ Top-level index
|
|||
The root page of any Datasette installation is an index page that lists all of the currently attached databases. Some examples:
|
||||
|
||||
* `fivethirtyeight.datasettes.com <https://fivethirtyeight.datasettes.com/>`_
|
||||
* `global-power-plants.datasettes.com <https://global-power-plants.datasettes.com/>`_
|
||||
* `register-of-members-interests.datasettes.com <https://register-of-members-interests.datasettes.com/>`_
|
||||
|
||||
Add ``/.json`` to the end of the URL for the JSON version of the underlying data:
|
||||
|
||||
* `fivethirtyeight.datasettes.com/.json <https://fivethirtyeight.datasettes.com/.json>`_
|
||||
* `global-power-plants.datasettes.com/.json <https://global-power-plants.datasettes.com/.json>`_
|
||||
* `register-of-members-interests.datasettes.com/.json <https://register-of-members-interests.datasettes.com/.json>`_
|
||||
|
||||
The index page can also be accessed at ``/-/``, useful for if the default index page has been replaced using an :ref:`index.html custom template <customization_custom_templates>`. The ``/-/`` page will always render the default Datasette ``index.html`` template.
|
||||
|
||||
.. _DatabaseView:
|
||||
|
||||
Database
|
||||
========
|
||||
|
||||
Each database has a page listing the tables, views and canned queries available for that database. If the :ref:`actions_execute_sql` permission is enabled (it's on by default) there will also be an interface for executing arbitrary SQL select queries against the data.
|
||||
Each database has a page listing the tables, views and canned queries available for that database. If the :ref:`permissions_execute_sql` permission is enabled (it's on by default) there will also be an interface for executing arbitrary SQL select queries against the data.
|
||||
|
||||
Examples:
|
||||
|
||||
* `fivethirtyeight.datasettes.com/fivethirtyeight <https://fivethirtyeight.datasettes.com/fivethirtyeight>`_
|
||||
* `datasette.io/global-power-plants <https://datasette.io/global-power-plants>`_
|
||||
* `global-power-plants.datasettes.com/global-power-plants <https://global-power-plants.datasettes.com/global-power-plants>`_
|
||||
|
||||
The JSON version of this page provides programmatic access to the underlying data:
|
||||
|
||||
* `fivethirtyeight.datasettes.com/fivethirtyeight.json <https://fivethirtyeight.datasettes.com/fivethirtyeight.json>`_
|
||||
* `datasette.io/global-power-plants.json <https://datasette.io/global-power-plants.json>`_
|
||||
* `global-power-plants.datasettes.com/global-power-plants.json <https://global-power-plants.datasettes.com/global-power-plants.json>`_
|
||||
|
||||
.. _DatabaseView_hidden:
|
||||
|
||||
|
|
@ -60,7 +60,7 @@ The following tables are hidden by default:
|
|||
Queries
|
||||
=======
|
||||
|
||||
The ``/database-name/-/query`` page can be used to execute an arbitrary SQL query against that database, if the :ref:`actions_execute_sql` permission is enabled. This query is passed as the ``?sql=`` query string parameter.
|
||||
The ``/database-name/-/query`` page can be used to execute an arbitrary SQL query against that database, if the :ref:`permissions_execute_sql` permission is enabled. This query is passed as the ``?sql=`` query string parameter.
|
||||
|
||||
This means you can link directly to a query by constructing the following URL:
|
||||
|
||||
|
|
@ -87,7 +87,7 @@ Some examples:
|
|||
|
||||
* `../items <https://register-of-members-interests.datasettes.com/regmem/items>`_ lists all of the line-items registered by UK MPs as potential conflicts of interest. It demonstrates Datasette's support for :ref:`full_text_search`.
|
||||
* `../antiquities-act%2Factions_under_antiquities_act <https://fivethirtyeight.datasettes.com/fivethirtyeight/antiquities-act%2Factions_under_antiquities_act>`_ is an interface for exploring the "actions under the antiquities act" data table published by FiveThirtyEight.
|
||||
* `../global-power-plants?country_long=United+Kingdom&primary_fuel=Gas <https://datasette.io/global-power-plants/global-power-plants?_facet=primary_fuel&_facet=owner&_facet=country_long&country_long__exact=United+Kingdom&primary_fuel=Gas>`_ is a filtered table page showing every Gas power plant in the United Kingdom. It includes some default facets (configured using `its metadata.json <https://datasette.io/-/metadata>`_) and uses the `datasette-cluster-map <https://github.com/simonw/datasette-cluster-map>`_ plugin to show a map of the results.
|
||||
* `../global-power-plants?country_long=United+Kingdom&primary_fuel=Gas <https://global-power-plants.datasettes.com/global-power-plants/global-power-plants?_facet=primary_fuel&_facet=owner&_facet=country_long&country_long__exact=United+Kingdom&primary_fuel=Gas>`_ is a filtered table page showing every Gas power plant in the United Kingdom. It includes some default facets (configured using `its metadata.json <https://global-power-plants.datasettes.com/-/metadata>`_) and uses the `datasette-cluster-map <https://github.com/simonw/datasette-cluster-map>`_ plugin to show a map of the results.
|
||||
|
||||
.. _RowView:
|
||||
|
||||
|
|
@ -107,46 +107,3 @@ Note that this URL includes the encoded primary key of the record.
|
|||
Here's that same page as JSON:
|
||||
|
||||
`../people/uk~2Eorg~2Epublicwhip~2Fperson~2F10001.json <https://register-of-members-interests.datasettes.com/regmem/people/uk~2Eorg~2Epublicwhip~2Fperson~2F10001.json>`_
|
||||
|
||||
|
||||
.. _pages_schemas:
|
||||
|
||||
Schemas
|
||||
=======
|
||||
|
||||
Datasette offers ``/-/schema`` endpoints to expose the SQL schema for databases and tables.
|
||||
|
||||
.. _InstanceSchemaView:
|
||||
|
||||
Instance schema
|
||||
---------------
|
||||
|
||||
Access ``/-/schema`` to see the complete schema for all attached databases in the Datasette instance.
|
||||
|
||||
Use ``/-/schema.md`` to get the same information as Markdown.
|
||||
|
||||
Use ``/-/schema.json`` to get the same information as JSON, which looks like this:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"schemas": [
|
||||
{
|
||||
"database": "content",
|
||||
"schema": "create table posts ..."
|
||||
}
|
||||
}
|
||||
|
||||
.. _DatabaseSchemaView:
|
||||
|
||||
Database schema
|
||||
---------------
|
||||
|
||||
Use ``/database-name/-/schema`` to see the complete schema for a specific database. The ``.md`` and ``.json`` extensions work here too. The JSON returns an object with ``"database"`` and ``"schema"`` keys.
|
||||
|
||||
.. _TableSchemaView:
|
||||
|
||||
Table schema
|
||||
------------
|
||||
|
||||
Use ``/database-name/table-name/-/schema`` to see the schema for a specific table. The ``.md`` and ``.json`` extensions work here too. The JSON returns an object with ``"database"``, ``"table"``, and ``"schema"`` keys.
|
||||
|
|
|
|||
|
|
@ -57,8 +57,6 @@ arguments and can be called like this::
|
|||
|
||||
select random_integer(1, 10);
|
||||
|
||||
``prepare_connection()`` hooks are not called for Datasette's :ref:`internal database <internals_internal>`.
|
||||
|
||||
Examples: `datasette-jellyfish <https://datasette.io/plugins/datasette-jellyfish>`__, `datasette-jq <https://datasette.io/plugins/datasette-jq>`__, `datasette-haversine <https://datasette.io/plugins/datasette-haversine>`__, `datasette-rure <https://datasette.io/plugins/datasette-rure>`__
|
||||
|
||||
.. _plugin_hook_prepare_jinja2_environment:
|
||||
|
|
@ -691,7 +689,7 @@ Help text (from the docstring for the function plus any defined Click arguments
|
|||
|
||||
Plugins can register multiple commands by making multiple calls to the ``@cli.command()`` decorator. Consult the `Click documentation <https://click.palletsprojects.com/>`__ for full details on how to build a CLI command, including how to define arguments and options.
|
||||
|
||||
Note that ``register_commands()`` plugins cannot used with the :ref:`--plugins-dir mechanism <writing_plugins_one_off>` - they need to be installed into the same virtual environment as Datasette using ``pip install``. Provided it has a ``pyproject.toml`` file (see :ref:`writing_plugins_packaging`) you can run ``pip install`` directly against the directory in which you are developing your plugin like so::
|
||||
Note that ``register_commands()`` plugins cannot used with the :ref:`--plugins-dir mechanism <writing_plugins_one_off>` - they need to be installed into the same virtual environment as Datasette using ``pip install``. Provided it has a ``setup.py`` file (see :ref:`writing_plugins_packaging`) you can run ``pip install`` directly against the directory in which you are developing your plugin like so::
|
||||
|
||||
pip install -e path/to/my/datasette-plugin
|
||||
|
||||
|
|
@ -777,128 +775,52 @@ The plugin hook can then be used to register the new facet class like this:
|
|||
def register_facet_classes():
|
||||
return [SpecialFacet]
|
||||
|
||||
.. _plugin_register_actions:
|
||||
.. _plugin_register_permissions:
|
||||
|
||||
register_actions(datasette)
|
||||
---------------------------
|
||||
register_permissions(datasette)
|
||||
--------------------------------
|
||||
|
||||
If your plugin needs to register actions that can be checked with Datasette's new resource-based permission system, return a list of those actions from this hook.
|
||||
|
||||
Actions define what operations can be performed on resources (like viewing a table, executing SQL, or custom plugin actions).
|
||||
If your plugin needs to register additional permissions unique to that plugin - ``upload-csvs`` for example - you can return a list of those permissions from this hook.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
from datasette.permissions import Action, Resource
|
||||
|
||||
|
||||
class DocumentCollectionResource(Resource):
|
||||
"""A collection of documents."""
|
||||
|
||||
name = "document-collection"
|
||||
parent_name = None
|
||||
|
||||
def __init__(self, collection: str):
|
||||
super().__init__(parent=collection, child=None)
|
||||
|
||||
@classmethod
|
||||
def resources_sql(cls) -> str:
|
||||
return """
|
||||
SELECT collection_name AS parent, NULL AS child
|
||||
FROM document_collections
|
||||
"""
|
||||
|
||||
|
||||
class DocumentResource(Resource):
|
||||
"""A document in a collection."""
|
||||
|
||||
name = "document"
|
||||
parent_name = "document-collection"
|
||||
|
||||
def __init__(self, collection: str, document: str):
|
||||
super().__init__(parent=collection, child=document)
|
||||
|
||||
@classmethod
|
||||
def resources_sql(cls) -> str:
|
||||
return """
|
||||
SELECT collection_name AS parent, document_id AS child
|
||||
FROM documents
|
||||
"""
|
||||
from datasette import hookimpl, Permission
|
||||
|
||||
|
||||
@hookimpl
|
||||
def register_actions(datasette):
|
||||
def register_permissions(datasette):
|
||||
return [
|
||||
Action(
|
||||
name="list-documents",
|
||||
abbr="ld",
|
||||
description="List documents in a collection",
|
||||
resource_class=DocumentCollectionResource,
|
||||
),
|
||||
Action(
|
||||
name="view-document",
|
||||
abbr="vdoc",
|
||||
description="View document",
|
||||
resource_class=DocumentResource,
|
||||
),
|
||||
Action(
|
||||
name="edit-document",
|
||||
abbr="edoc",
|
||||
description="Edit document",
|
||||
resource_class=DocumentResource,
|
||||
),
|
||||
Permission(
|
||||
name="upload-csvs",
|
||||
abbr=None,
|
||||
description="Upload CSV files",
|
||||
takes_database=True,
|
||||
takes_resource=False,
|
||||
default=False,
|
||||
)
|
||||
]
|
||||
|
||||
The fields of the ``Action`` dataclass are as follows:
|
||||
The fields of the ``Permission`` class are as follows:
|
||||
|
||||
``name`` - string
|
||||
The name of the action, e.g. ``view-document``. This should be unique across all plugins.
|
||||
The name of the permission, e.g. ``upload-csvs``. This should be unique across all plugins that the user might have installed, so choose carefully.
|
||||
|
||||
``abbr`` - string or None
|
||||
An abbreviation of the action, e.g. ``vdoc``. This is optional. Since this needs to be unique across all installed plugins it's best to choose carefully or omit it entirely (same as setting it to ``None``.)
|
||||
An abbreviation of the permission, e.g. ``uc``. This is optional - you can set it to ``None`` if you do not want to pick an abbreviation. Since this needs to be unique across all installed plugins it's best not to specify an abbreviation at all. If an abbreviation is provided it will be used when creating restricted signed API tokens.
|
||||
|
||||
``description`` - string or None
|
||||
A human-readable description of what the action allows you to do.
|
||||
A human-readable description of what the permission lets you do. Should make sense as the second part of a sentence that starts "A user with this permission can ...".
|
||||
|
||||
``resource_class`` - type[Resource] or None
|
||||
The Resource subclass that defines what kind of resource this action applies to. Omit this (or set to ``None``) for global actions that apply only at the instance level with no associated resources (like ``debug-menu`` or ``permissions-debug``). Your Resource subclass must:
|
||||
``takes_database`` - boolean
|
||||
``True`` if this permission can be granted on a per-database basis, ``False`` if it is only valid at the overall Datasette instance level.
|
||||
|
||||
- Define a ``name`` class attribute (e.g., ``"document"``)
|
||||
- Define a ``parent_class`` class attribute (``None`` for top-level resources like databases, or the parent ``Resource`` subclass for child resources)
|
||||
- Implement a ``resources_sql()`` classmethod that returns SQL returning all resources as ``(parent, child)`` columns
|
||||
- Have an ``__init__`` method that accepts appropriate parameters and calls ``super().__init__(parent=..., child=...)``
|
||||
``takes_resource`` - boolean
|
||||
``True`` if this permission can be granted on a per-resource basis. A resource is a database table, SQL view or :ref:`canned query <canned_queries>`.
|
||||
|
||||
The ``resources_sql()`` method
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
``default`` - boolean
|
||||
The default value for this permission if it is not explicitly granted to a user. ``True`` means the permission is granted by default, ``False`` means it is not.
|
||||
|
||||
The ``resources_sql()`` classmethod returns a SQL query that lists all resources of that type that exist in the system.
|
||||
|
||||
This query is used by Datasette to efficiently check permissions across multiple resources at once. When a user requests a list of resources (like tables, documents, or other entities), Datasette uses this SQL to:
|
||||
|
||||
1. Get all resources of this type from your data catalog
|
||||
2. Combine it with permission rules from the ``permission_resources_sql`` hook
|
||||
3. Use SQL joins and filtering to determine which resources the actor can access
|
||||
4. Return only the permitted resources
|
||||
|
||||
The SQL query **must** return exactly two columns:
|
||||
|
||||
- ``parent`` - The parent identifier (e.g., database name, collection name), or ``NULL`` for top-level resources
|
||||
- ``child`` - The child identifier (e.g., table name, document ID), or ``NULL`` for parent-only resources
|
||||
|
||||
For example, if you're building a document management plugin with collections and documents stored in a ``documents`` table, your ``resources_sql()`` might look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@classmethod
|
||||
def resources_sql(cls) -> str:
|
||||
return """
|
||||
SELECT collection_name AS parent, document_id AS child
|
||||
FROM documents
|
||||
"""
|
||||
|
||||
This tells Datasette "here's how to find all documents in the system - look in the documents table and get the collection name and document ID for each one."
|
||||
|
||||
The permission system then uses this query along with rules from plugins to determine which documents each user can access, all efficiently in SQL rather than loading everything into Python.
|
||||
This should only be ``True`` if you want anonymous users to be able to take this action.
|
||||
|
||||
.. _plugin_asgi_wrapper:
|
||||
|
||||
|
|
@ -1102,7 +1024,7 @@ actor_from_request(datasette, request)
|
|||
|
||||
This is part of Datasette's :ref:`authentication and permissions system <authentication>`. The function should attempt to authenticate an actor (either a user or an API actor of some sort) based on information in the request.
|
||||
|
||||
If it cannot authenticate an actor, it should return ``None``, otherwise it should return a dictionary representing that actor. Once a plugin has returned an actor from this hook other plugins will be ignored.
|
||||
If it cannot authenticate an actor, it should return ``None``. Otherwise it should return a dictionary representing that actor.
|
||||
|
||||
Here's an example that authenticates the actor based on an incoming API key:
|
||||
|
||||
|
|
@ -1314,191 +1236,70 @@ This example plugin causes 0 results to be returned if ``?_nothing=1`` is added
|
|||
|
||||
Example: `datasette-leaflet-freedraw <https://datasette.io/plugins/datasette-leaflet-freedraw>`_
|
||||
|
||||
.. _plugin_hook_permission_resources_sql:
|
||||
.. _plugin_hook_permission_allowed:
|
||||
|
||||
permission_resources_sql(datasette, actor, action)
|
||||
--------------------------------------------------
|
||||
permission_allowed(datasette, actor, action, resource)
|
||||
------------------------------------------------------
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
Access to the Datasette instance.
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
|
||||
|
||||
``actor`` - dictionary or None
|
||||
The current actor dictionary. ``None`` for anonymous requests.
|
||||
``actor`` - dictionary
|
||||
The current actor, as decided by :ref:`plugin_hook_actor_from_request`.
|
||||
|
||||
``action`` - string
|
||||
The permission action being evaluated. Examples include ``"view-table"`` or ``"insert-row"``.
|
||||
The action to be performed, e.g. ``"edit-table"``.
|
||||
|
||||
Return value
|
||||
A :class:`datasette.permissions.PermissionSQL` object, ``None`` or an iterable of ``PermissionSQL`` objects.
|
||||
``resource`` - string or None
|
||||
An identifier for the individual resource, e.g. the name of the table.
|
||||
|
||||
Datasette's action-based permission resolver calls this hook to gather SQL rows describing which
|
||||
resources an actor may access (``allow = 1``) or should be denied (``allow = 0``) for a specific action.
|
||||
Each SQL snippet should return ``parent``, ``child``, ``allow`` and ``reason`` columns.
|
||||
Called to check that an actor has permission to perform an action on a resource. Can return ``True`` if the action is allowed, ``False`` if the action is not allowed or ``None`` if the plugin does not have an opinion one way or the other.
|
||||
|
||||
**Parameter naming convention:** Plugin parameters in ``PermissionSQL.params`` should use unique names
|
||||
to avoid conflicts with other plugins. The recommended convention is to prefix parameters with your
|
||||
plugin's source name (e.g., ``myplugin_user_id``). The system reserves these parameter names:
|
||||
``:actor``, ``:actor_id``, ``:action``, and ``:filter_parent``.
|
||||
|
||||
You can also use return ``PermissionSQL.allow(reason="reason goes here")`` or ``PermissionSQL.deny(reason="reason goes here")`` as shortcuts for simple root-level allow or deny rules. These will create SQL snippets that look like this:
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
SELECT
|
||||
NULL AS parent,
|
||||
NULL AS child,
|
||||
1 AS allow,
|
||||
'reason goes here' AS reason
|
||||
|
||||
Or ``0 AS allow`` for denies.
|
||||
|
||||
Permission plugin examples
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
These snippets show how to use the new ``permission_resources_sql`` hook to
|
||||
contribute rows to the action-based permission resolver. Each hook receives the
|
||||
current actor dictionary (or ``None``) and must return ``None`` or an instance or list of
|
||||
``datasette.permissions.PermissionSQL`` (or a coroutine that resolves to that).
|
||||
|
||||
Allow Alice to view a specific table
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
This plugin grants the actor with ``id == "alice"`` permission to perform the
|
||||
``view-table`` action against the ``sales`` table inside the ``accounting`` database.
|
||||
Here's an example plugin which randomly selects if a permission should be allowed or denied, except for ``view-instance`` which always uses the default permission scheme instead.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
from datasette.permissions import PermissionSQL
|
||||
import random
|
||||
|
||||
|
||||
@hookimpl
|
||||
def permission_resources_sql(datasette, actor, action):
|
||||
if action != "view-table":
|
||||
return None
|
||||
if not actor or actor.get("id") != "alice":
|
||||
return None
|
||||
def permission_allowed(action):
|
||||
if action != "view-instance":
|
||||
# Return True or False at random
|
||||
return random.random() > 0.5
|
||||
# Returning None falls back to default permissions
|
||||
|
||||
return PermissionSQL(
|
||||
sql="""
|
||||
SELECT
|
||||
'accounting' AS parent,
|
||||
'sales' AS child,
|
||||
1 AS allow,
|
||||
'alice can view accounting/sales' AS reason
|
||||
""",
|
||||
)
|
||||
This function can alternatively return an awaitable function which itself returns ``True``, ``False`` or ``None``. You can use this option if you need to execute additional database queries using ``await datasette.execute(...)``.
|
||||
|
||||
Restrict execute-sql to a database prefix
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Only allow ``execute-sql`` against databases whose name begins with
|
||||
``analytics_``. This shows how to use parameters that the permission resolver
|
||||
will pass through to the SQL snippet.
|
||||
Here's an example that allows users to view the ``admin_log`` table only if their actor ``id`` is present in the ``admin_users`` table. It aso disallows arbitrary SQL queries for the ``staff.db`` database for all users.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
from datasette.permissions import PermissionSQL
|
||||
|
||||
|
||||
@hookimpl
|
||||
def permission_resources_sql(datasette, actor, action):
|
||||
if action != "execute-sql":
|
||||
return None
|
||||
def permission_allowed(datasette, actor, action, resource):
|
||||
async def inner():
|
||||
if action == "execute-sql" and resource == "staff":
|
||||
return False
|
||||
if action == "view-table" and resource == (
|
||||
"staff",
|
||||
"admin_log",
|
||||
):
|
||||
if not actor:
|
||||
return False
|
||||
user_id = actor["id"]
|
||||
return await datasette.get_database(
|
||||
"staff"
|
||||
).execute(
|
||||
"select count(*) from admin_users where user_id = :user_id",
|
||||
{"user_id": user_id},
|
||||
)
|
||||
|
||||
return PermissionSQL(
|
||||
sql="""
|
||||
SELECT
|
||||
parent,
|
||||
NULL AS child,
|
||||
1 AS allow,
|
||||
'execute-sql allowed for analytics_*' AS reason
|
||||
FROM catalog_databases
|
||||
WHERE database_name LIKE :analytics_prefix
|
||||
""",
|
||||
params={
|
||||
"analytics_prefix": "analytics_%",
|
||||
},
|
||||
)
|
||||
return inner
|
||||
|
||||
Read permissions from a custom table
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
See :ref:`built-in permissions <permissions>` for a full list of permissions that are included in Datasette core.
|
||||
|
||||
This example stores grants in an internal table called ``permission_grants``
|
||||
with columns ``(actor_id, action, parent, child, allow, reason)``.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
from datasette.permissions import PermissionSQL
|
||||
|
||||
|
||||
@hookimpl
|
||||
def permission_resources_sql(datasette, actor, action):
|
||||
if not actor:
|
||||
return None
|
||||
|
||||
return PermissionSQL(
|
||||
sql="""
|
||||
SELECT
|
||||
parent,
|
||||
child,
|
||||
allow,
|
||||
COALESCE(reason, 'permission_grants table') AS reason
|
||||
FROM permission_grants
|
||||
WHERE actor_id = :grants_actor_id
|
||||
AND action = :grants_action
|
||||
""",
|
||||
params={
|
||||
"grants_actor_id": actor.get("id"),
|
||||
"grants_action": action,
|
||||
},
|
||||
)
|
||||
|
||||
Default deny with an exception
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Combine a root-level deny with a specific table allow for trusted users.
|
||||
The resolver will automatically apply the most specific rule.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
from datasette.permissions import PermissionSQL
|
||||
|
||||
|
||||
TRUSTED = {"alice", "bob"}
|
||||
|
||||
|
||||
@hookimpl
|
||||
def permission_resources_sql(datasette, actor, action):
|
||||
if action != "view-table":
|
||||
return None
|
||||
|
||||
actor_id = (actor or {}).get("id")
|
||||
|
||||
if actor_id not in TRUSTED:
|
||||
return PermissionSQL(
|
||||
sql="""
|
||||
SELECT NULL AS parent, NULL AS child, 0 AS allow,
|
||||
'default deny view-table' AS reason
|
||||
""",
|
||||
)
|
||||
|
||||
return PermissionSQL(
|
||||
sql="""
|
||||
SELECT NULL AS parent, NULL AS child, 0 AS allow,
|
||||
'default deny view-table' AS reason
|
||||
UNION ALL
|
||||
SELECT 'reports' AS parent, 'daily_metrics' AS child, 1 AS allow,
|
||||
'trusted user access' AS reason
|
||||
""",
|
||||
params={"actor_id": actor_id},
|
||||
)
|
||||
|
||||
The ``UNION ALL`` ensures the deny rule is always present, while the second row
|
||||
adds the exception for trusted users.
|
||||
Example: `datasette-permissions-sql <https://datasette.io/plugins/datasette-permissions-sql>`_
|
||||
|
||||
.. _plugin_hook_register_magic_parameters:
|
||||
|
||||
|
|
@ -1514,7 +1315,7 @@ Magic parameters all take this format: ``_prefix_rest_of_parameter``. The prefix
|
|||
|
||||
To register a new function, return it as a tuple of ``(string prefix, function)`` from this hook. The function you register should take two arguments: ``key`` and ``request``, where ``key`` is the ``rest_of_parameter`` portion of the parameter and ``request`` is the current :ref:`internals_request`.
|
||||
|
||||
This example registers two new magic parameters: ``:_request_http_version`` returning the HTTP version of the current request, and ``:_uuid_new`` which returns a new UUID. It also registers an ``:_asynclookup_key`` parameter, demonstrating that these functions can be asynchronous:
|
||||
This example registers two new magic parameters: ``:_request_http_version`` returning the HTTP version of the current request, and ``:_uuid_new`` which returns a new UUID:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
|
@ -1536,16 +1337,11 @@ This example registers two new magic parameters: ``:_request_http_version`` retu
|
|||
raise KeyError
|
||||
|
||||
|
||||
async def asynclookup(key, request):
|
||||
return await do_something_async(key)
|
||||
|
||||
|
||||
@hookimpl
|
||||
def register_magic_parameters(datasette):
|
||||
return [
|
||||
("request", request),
|
||||
("uuid", uuid),
|
||||
("asynclookup", asynclookup),
|
||||
]
|
||||
|
||||
.. _plugin_hook_forbidden:
|
||||
|
|
@ -1915,16 +1711,16 @@ This example adds a new database action for creating a table, if the user has th
|
|||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
from datasette.resources import DatabaseResource
|
||||
|
||||
|
||||
@hookimpl
|
||||
def database_actions(datasette, actor, database):
|
||||
async def inner():
|
||||
if not await datasette.allowed(
|
||||
if not await datasette.permission_allowed(
|
||||
actor,
|
||||
"edit-schema",
|
||||
resource=DatabaseResource("database"),
|
||||
resource=database,
|
||||
default=False,
|
||||
):
|
||||
return []
|
||||
return [
|
||||
|
|
|
|||
|
|
@ -198,15 +198,6 @@ If you run ``datasette plugins --all`` it will include default plugins that ship
|
|||
"register_output_renderer"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "datasette.default_actions",
|
||||
"static": false,
|
||||
"templates": false,
|
||||
"version": null,
|
||||
"hooks": [
|
||||
"register_actions"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "datasette.default_magic_parameters",
|
||||
"static": false,
|
||||
|
|
@ -232,8 +223,8 @@ If you run ``datasette plugins --all`` it will include default plugins that ship
|
|||
"version": null,
|
||||
"hooks": [
|
||||
"actor_from_request",
|
||||
"canned_queries",
|
||||
"permission_resources_sql",
|
||||
"permission_allowed",
|
||||
"register_permissions",
|
||||
"skip_csrf"
|
||||
]
|
||||
},
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ default_allow_sql
|
|||
|
||||
Should users be able to execute arbitrary SQL queries by default?
|
||||
|
||||
Setting this to ``off`` causes permission checks for :ref:`actions_execute_sql` to fail by default.
|
||||
Setting this to ``off`` causes permission checks for :ref:`permissions_execute_sql` to fail by default.
|
||||
|
||||
::
|
||||
|
||||
|
|
@ -374,7 +374,7 @@ One way to generate a secure random secret is to use Python like this::
|
|||
python3 -c 'import secrets; print(secrets.token_hex(32))'
|
||||
cdb19e94283a20f9d42cca50c5a4871c0aa07392db308755d60a1a5b9bb0fa52
|
||||
|
||||
Plugin authors can make use of this signing mechanism in their plugins using the :ref:`datasette.sign() <datasette_sign>` and :ref:`datasette.unsign() <datasette_unsign>` methods.
|
||||
Plugin authors make use of this signing mechanism in their plugins using :ref:`datasette_sign` and :ref:`datasette_unsign`.
|
||||
|
||||
.. _setting_publish_secrets:
|
||||
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue